Index: trunk/src/python/unifiedlogger/activitylog_watcher.py
===================================================================
--- trunk/src/python/unifiedlogger/activitylog_watcher.py	(revision 549)
+++ trunk/src/python/unifiedlogger/activitylog_watcher.py	(revision 644)
@@ -1,5 +1,3 @@
 import json, time, ConfigParser
-#from copy import deepcopy
-
 # Incident Activity log Watcher
 # Look for changes in the IncidentActivity log.
@@ -10,6 +8,30 @@
 # new messages that arrived during the last wait interval. 
 # jdalbey  7/24/2019
+# Refactored and revised to fix defect #255 5/27/2021
+# Notes:
+# incident_description.json contains the summary details of the incident 
+# incident_activity.json contains the incident detail entries for all created incidents.
+# incident_description.json fields
+#      "20190523-008",  # Incident ID
+#      "05\\/23",                # Date
+#      "15:55.25",       # Time
+#      "Hall, Kacie",    # User ID
+#      "SR-73",          # Route
+#      "BAKER ",                 # Location
+#      "SB",             # Direction
+#      "Advisory",               # Incident Category
+#      "Advisory: National Weather Service Advisory",  # Incident Type
+#      "RV FIRE "                # Description
+#incident_activity fields:
+#      "20190523-004",   # Incident ID
+#      "05\\/23",               # Date
+#      "14:14.04",              # Time
+#      "Sarker, Afrid", # User ID
+#      "CMS Activation",         # Activity Code
+#      "UPDATED CMS CMS ID: 1214504   LOCATION: N I-5 12.86  Avery Pkwy"          Description
 
-lastLineNum = 0
+# Global variables to remember length of file between readings
+file1length = 0
+file2length = 0
 
 # Utility functions
@@ -18,45 +40,62 @@
 def isFull(cmsitem):
     return not isEmpty(cmsitem)
-
-# Read the log 
-def readFile():
-    # get path to input file from configuration
+# get path to input file from configuration
+def getLogFilePath():
     config = ConfigParser.ConfigParser()
     config.read('config/logging_service.cfg')
-    logfilepath = config.get('Paths', 'ActivityLogPath')
-    logfilename = "data.json"   #"IncidentActivity.log"
-    data_summary = "data_summary.json"
-    lines = []
+    return config.get('Paths', 'ActivityLogPath')
+    
+# Read the activty log entries 
+def readFile1():
+    logfilename = "incident_activity.json"   #"IncidentActivity.log"
     output = []
-    try:
-        json_file = open (logfilepath + logfilename,'r')
+    try:  
+        log_file = open (getLogFilePath() + logfilename,'r')
     except IOError:
-        print "Error: missing "+logfilepath+logfilename+" file."
+        print "Error: missing "+logfilepath + logfilename + " file."
     else:
-        jsonData=json_file.read()
+        logData=log_file.read()  # Read the activity log
         # implement ticket #192
-        output = json.loads(jsonData)['data'] 
-        # read in data_summary_json file
-        # assuming the data_summary.json is in the trunk foldera
-        data_summary = open(logfilepath + data_summary, 'r')
+        output = json.loads(logData)['data']  # parse the log data into a dict
+        log_file.close()
+    return output
+
+# Read the log of new incidents
+# It has a different format than incident_activity.json, so we put it in a separate function.
+def readFile2():
+    data_summary = "incident_description.json"
+    output = []
+    try:  
+        data_summary = open(getLogFilePath() + data_summary, 'r')
+    except IOError:
+        print "Error: missing "+logfilepath + data_summary + " file."
+    else:
+        # read in incident_description json file that contains Incident summary data
+        # assuming this file is in the same folder
         summary_json = data_summary.read()
-        data_lst = json.loads(summary_json)['data']
+        data_lst = json.loads(summary_json)['data'] # parse the new incident data
+        # Append each 'incident created' info to the results 
         for entry in data_lst:
-            # put the first 3 entries as place holder to be consistent with other type of logging
+            # put the first 3 fields as place holder to be consistent with other type of logging
             output.append([entry[0], entry[1] , entry[2], entry[3] , ", Incident Created, ", ""]) 
-        json_file.close()
+        data_summary.close()
     return output
     
-def setup():
-    # nothing needed for setup
-    return
-
 # Retrieve new messages from activity log 
 def getLogEntries():
-    global lastLineNum
-    msgList = readFile()
-    currList = []
-    currList = msgList[lastLineNum:]    # new items since last file read
-    lastLineNum = len(msgList)
+    global file1length, file2length
+    # Read activity log of new activity 
+    msgList = readFile1()
+    file1items = []
+    file1items = msgList[file1length:]    # new items since last file read
+    file1length = len(msgList)
+    # Read activity log of new incidents
+    msgList = readFile2()
+    file2items = []
+    file2items = msgList[file2length:]    # new items since last file read
+    file2length = len(msgList)
+    # Concatenate the two logs
+    currList = file1items + file2items
+
     resultList = []
     # Format messages into desired result format
@@ -73,4 +112,8 @@
         resultList.append(desiredFields)
     return resultList
+
+def setup():
+    # nothing needed for setup
+    return
 
 # Local main for unit testing
