import json, time, ConfigParser
# Incident Activity log Watcher
# Look for changes in the IncidentActivity log.
# The comments from the Incident Activity log be will appended to the log 
# as they arrive from the web app.
# We only need to keep track of the log length in order to
# determine if a new comment has been added.  We will output the 
# new messages that arrived during the last wait interval. 
# jdalbey  7/24/2019
# Refactored and revised to fix defect #255 5/27/2021
# Notes:
# incident_description.json contains the summary details of the incident 
# incident_activity.json contains the incident detail entries for all created incidents.
# incident_description.json fields
#      "20190523-008",  # Incident ID
#      "05\\/23",                # Date
#      "15:55.25",       # Time
#      "Hall, Kacie",    # User ID
#      "SR-73",          # Route
#      "BAKER ",                 # Location
#      "SB",             # Direction
#      "Advisory",               # Incident Category
#      "Advisory: National Weather Service Advisory",  # Incident Type
#      "RV FIRE "                # Description
#incident_activity fields:
#      "20190523-004",   # Incident ID
#      "05\\/23",               # Date
#      "14:14.04",              # Time
#      "Sarker, Afrid", # User ID
#      "CMS Activation",         # Activity Code
#      "UPDATED CMS CMS ID: 1214504   LOCATION: N I-5 12.86  Avery Pkwy"          Description

# Global variables to remember length of file between readings
file1length = 0
file2length = 0
configdir = "config"  # default dir for production

# Utility functions
def isEmpty(cmsitem):
    return cmsitem == ",,,,,"
def isFull(cmsitem):
    return not isEmpty(cmsitem)
# get path to input file from configuration
def getLogFilePath():
    global configdir
    config = ConfigParser.ConfigParser()
    config.read(configdir+'/logging_service.cfg')
    return config.get('Paths', 'ActivityLogPath')

# get data filenames from configuration
# returns incident data filename, incident summary filename
# jdalbey 2022.9.1
def getLogFilenames():
    global configdir
    config = ConfigParser.ConfigParser()
    config.read(configdir+'/logging_service.cfg')
    file1 = config.get('Files','ActivityLogDataFilename')
    file2 = config.get('Files','ActivityLogSummaryFilename')
    return file1,file2
    
# Read the activty log entries 
def readFile1():
    logfilename = getLogFilenames()[0]  #"IncidentActivity.log"
    output = []
    try:  
        log_file = open (getLogFilePath() + logfilename,'r')
    except IOError:
        print "Error: missing "+getLogFilePath() + logfilename + " file."
    else:
        logData=log_file.read()  # Read the activity log
        # implement ticket #192
        output = json.loads(logData)['data']  # parse the log data into a dict
        log_file.close()
    return output

# Read the log of new incidents
# It has a different format than incident_activity.json, so we put it in a separate function.
def readFile2():
    data_summary = getLogFilenames()[1] #"incident_description.json"

    output = []
    try:  
        data_summary = open(getLogFilePath() + data_summary, 'r')
    except IOError:
        print "Error: missing "+getLogFilePath() + data_summary + " file."
    else:
        # read in incident_description json file that contains Incident summary data
        # assuming this file is in the same folder
        summary_json = data_summary.read()
        data_lst = json.loads(summary_json)['data'] # parse the new incident data
        # Append each 'incident created' info to the results 
        for entry in data_lst:
            # put the first 3 fields as place holder to be consistent with other type of logging
            output.append([entry[0], entry[1] , entry[2], entry[3] , ", Incident Created, ", ""]) 
        data_summary.close()
    return output
    
# Retrieve new messages from activity log 
def getLogEntries():
    global file1length, file2length
    # Read activity log of new activity 
    msgList = readFile1()
    file1items = []
    file1items = msgList[file1length:]    # new items since last file read
    file1length = len(msgList)
    # Read activity log of new incidents
    msgList = readFile2()
    file2items = []
    file2items = msgList[file2length:]    # new items since last file read
    file2length = len(msgList)
    # Concatenate the two logs
    currList = file1items + file2items

    resultList = []
    # Format messages into desired result format
    for item in currList:
        # implement ticket #188
        # extract desired fields
        incident_num = item[0].split("-")[1] 
        # extract the incident number from the date time field
        name = item[3].replace(',','')
        code = item[4].replace(',','')
        msg = item[5].replace(',','')
        desiredFields = "Activity Log,"+name+"," + incident_num + " " + code+" "+msg
        # Append to results list
        resultList.append(desiredFields)
    return resultList

def setup(dir):
    global configdir
    configdir = dir
    return

# Local main for unit testing
def main():
    setup("config/devlinux")
    # Loop Forever, checking every five seconds
    while True:
        # Look for new messages
        answer = getLogEntries()
        # Output results
        for item in answer:
            print item
        # wait 
        time.sleep(5)

if __name__ == "__main__":
    main()
