def __init__(self, dbFile: TypeVar('T', str, dict), slFilePath: str, AutoCleanUpTempFiles:bool = True): ''' @param dbFile this can ether be a MDF or a db file generated from an MDF @param slFilePath this can ether be a sl file or an asl file @param AutoCleanUpTempFiles determins whether the generated files will be deleted ''' try : t0 = time.time() self.slFilePath = slFilePath self.IsOpen = True self.UsingTempDBFile = False self.UsingTempSLFile = False self.AutoCleanUpTempFiles = AutoCleanUpTempFiles if isinstance(dbFile, str): dbFile = {"path": dbFile} elif "path" in dbFile: pass else: raise ValueError('invalid db/mdf File Path') self.dbFile = dbFile logger.info("Start Reading " + os.path.basename(dbFile["path"])) if os.path.splitext(slFilePath)[1] == '.asl' or IPAInterfaceLibrary.is_running_on_wivi_server(): self.slFilePath = self.__ResolveAliases( dbFile["path"], slFilePath, os.path.join(outputDir, "config.sl") if IPAInterfaceLibrary.is_running_on_wivi_server() else None ) logger.debug("Aliases Resolved") self.dbFileName = self.__GetDBFilePath(dbFile["path"], self.slFilePath) logger.debug("DB Created") self.__OpenDataFile(self.dbFileName, self.slFilePath) logger.debug("DB Opened") self.__SetupIndexOperator(self.slFilePath) logger.debug("Index Operator initialization complete") self.RecordTimestamp = -1 logger.info("Finished Reading " + os.path.basename(dbFile["path"]) + " Time Taken " + str(time.time() - t0)) except ValueError as e: logger.error(str(e)) raise e
def Begin(self, data: ICSDataFileType, hitDescription: str = "Hit number ", initTrigger: bool =False) -> None: self.numRec = 0 self.triggered = initTrigger self.hitDescription = hitDescription self.data = data if IPAInterfaceLibrary.is_running_on_wivi_server(): filenamewithoutpath = os.path.basename(data.dbFile["path"]) self.dsr["HitList"].append({"id": data.dbFile["id"], "startDate": data.dbFile["startDate"], "vehicle": data.dbFile["vehicle"], "FilenameAndPath": filenamewithoutpath}) else: self.dsr["HitList"].append({"FilenameAndPath": data.dbFile["path"]}) self.data_to_hitlist[data] = self.dsr["HitList"][-1]
import json import logging import time from ICS_IPA.DataFileIOLibraryInterface import * from ICS_IPA import IPAInterfaceLibrary logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) # create a logging format formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s') # create a file handler outputDir = os.getcwd() if IPAInterfaceLibrary.is_running_on_wivi_server() and os.path.splitext(sys.argv[1])[1].lower() == '.json': outputDir = json.load(open(sys.argv[1]))["output_dir"] fh = logging.FileHandler(os.path.join(outputDir, "IPA.log")) fh.setLevel(logging.INFO) fh.setFormatter(formatter) ch = logging.StreamHandler() ch.setLevel(logging.INFO) ch.setFormatter(formatter) # add the handlers to the logger logger.addHandler(fh) #logger.addHandler(ch)
from MsgFileClass import msgFiles from ICS_IPA import IPAInterfaceLibrary logging.basicConfig(level=logging.INFO) log = logging.getLogger(__name__) handler = logging.FileHandler('IPA.log') handler.setLevel(logging.INFO) # create a logging format formatter = logging.Formatter('%(asctime)s - %(name)s - %(message)s') handler.setFormatter(formatter) log.addHandler(handler) log.info("Hello") slFilePath = IPAInterfaceLibrary.get_config_file() inputFilePaths = IPAInterfaceLibrary.get_input_file_list() log.info(slFilePath) ReportGenTimeStamp = datetime.now().strftime("%m-%d-%y_%H-%M-%S") log.info("Analyzing input files") msg_Files = msgFiles(inputFilePaths, ReportGenTimeStamp) with open(slFilePath) as configFile: config = json.load(configFile) if not (math.isnan(config["StartTimeInSecondsFromStartOfFile"])) and ( config["StartTimeAbsolute"] != ""): log.info( "You can filter time by using StartTimeInSecondsFromStartOfFile or StartTimeAbsolute, not both"
from ICS_IPA import IPAInterfaceLibrary from SigEnumFile import Sig logging.basicConfig(level=logging.INFO) log = logging.getLogger(__name__) handler = logging.FileHandler('IPA.log') handler.setLevel(logging.INFO) # create a logging format formatter = logging.Formatter('%(asctime)s - %(name)s - %(message)s') handler.setFormatter(formatter) log.addHandler(handler) log.info("Hello") slFilePath = IPAInterfaceLibrary.get_config_file() dbFilePaths = IPAInterfaceLibrary.get_input_file_list() #------------------------------------------------------------------------------------------------------------------ dsr = icsDSR.DSRFile() #there are multiple methods to save to a dsr file. for dbFilePath in dbFilePaths: try: with icsFI.ICSDataFile(dbFilePath, slFilePath) as data: curTimestamp = data.JumpBeforeTimestamp(0) dataPoints = data.GetPoints() timeStamps = data.GetTimeStamps() dsr.Begin(data) ActiveMaskResult = data.SetActiveMask("0001")
def __init__(self, inputFilePaths, ReportGenTimeStamp): self.FilesList = [] self.FilesListSorted = [] logging.basicConfig(level=logging.INFO) self.log = logging.getLogger(__name__) self.handler = logging.FileHandler('IPA.log') self.handler.setLevel(logging.INFO) # create a logging format self.formatter = logging.Formatter( '%(asctime)s - %(name)s - %(message)s') self.handler.setFormatter(self.formatter) self.log.addHandler(self.handler) if IPAInterfaceLibrary.is_running_on_wivi_server(): OutputFilePath = os.path.dirname(sys.argv[0]) else: OutputFilePath = os.path.dirname(sys.argv[0]) for inputFilePath in inputFilePaths: self.FilesList.append(msgFile()) self.FilesList[-1].InputFileName = os.path.basename( inputFilePath["path"]) self.FilesList[-1].InputFilePath = os.path.dirname( inputFilePath["path"]) filename, fileExtension = os.path.splitext( str(self.FilesList[-1].InputFileName)) self.FilesList[-1].DB_FileName = os.path.join( OutputFilePath, filename + "_" + ReportGenTimeStamp + "_Filtrd.db2") self.FilesList[-1].OutputFilePath = OutputFilePath self.FilesList[-1].FileExtension = fileExtension self.FilesList[-1].FileCreatedByClass = False self.FilesList[-1].FileStartTime = "" self.FilesList[-1].FileEndTime = "" self.FilesList[-1].FileStartTimeRaw = 0 self.FilesList[-1].FileEndTimeRaw = 0 self.FilesList[-1].TempVSBExtracted = False zip2FolderName = "" self.log.info("InputFileName: " + str(self.FilesList[-1].InputFileName)) #now compress output files if (fileExtension == ".zip"): with zipfile.ZipFile( os.path.join(self.FilesList[-1].InputFilePath, self.FilesList[-1].InputFileName), 'r') as zipObj: # Get a list of all archived file names from the zip listOfFileNames = zipObj.namelist() # Iterate over the file names self.log.info("First level zip filenames:") for zipfileName in listOfFileNames: self.log.info(zipfileName) # skip directories if not zipfileName: continue if zipfileName[len(zipfileName) - 4:len(zipfileName)] == ('.vsb'): zipObj.extract(zipfileName, self.FilesList[-1].OutputFilePath) self.FilesList[-1].InputFileName = zipfileName self.FilesList[-1].FileExtension = ".vsb" self.FilesList[-1].InputFilePath = self.FilesList[ -1].OutputFilePath self.FilesList[-1].TempVSBExtracted = True self.log.info("Only one level of zip") elif zipfileName[len(zipfileName) - 8:len(zipfileName)] == ('.vsb.zip'): self.log.info(".vsb.zip file found") zip2FolderName = os.path.join( OutputFilePath.replace('\\', '/'), 'tempZipFolderName').replace('\\', '/') zipObj.extract(zipfileName, zip2FolderName) zip3FileName = os.path.join( zip2FolderName, zipfileName).replace('\\', '/') with zipfile.ZipFile(zip3FileName) as zipObj2: zipObj2.extract(zipObj2.namelist()[0], OutputFilePath) self.FilesList[ -1].InputFileName = zipObj2.namelist()[0] self.log.info( str(self.FilesList[-1].InputFileName)) filename2, fileExtension2 = os.path.splitext( self.FilesList[-1].InputFileName) self.FilesList[ -1].FileExtension = fileExtension2 self.FilesList[ -1].InputFilePath = self.FilesList[ -1].OutputFilePath self.FilesList[-1].TempVSBExtracted = True try: shutil.rmtree(zip2FolderName) self.log.info("Removed top level zip file") except OSError as e: self.log.info("error removing temp zip folder") if (self.FilesList[-1].FileExtension == ".vsb"): try: # Open the message database self.log.info("Create database for file " + str( os.path.join(self.FilesList[-1].InputFilePath, self.FilesList[-1].InputFileName))) self.log.info("vsb filename: " + str( os.path.join(self.FilesList[-1].InputFilePath, self.FilesList[-1].InputFileName))) vsb.CreateDatabase( os.path.join(self.FilesList[-1].InputFilePath, self.FilesList[-1].InputFileName), self.FilesList[-1].DB_FileName, None) if ((self.FilesList[-1].TempVSBExtracted == True) and (os.path.isfile( os.path.join(self.FilesList[-1].InputFilePath, self.FilesList[-1].InputFileName)))): os.remove( os.path.join(self.FilesList[-1].InputFilePath, self.FilesList[-1].InputFileName)) self.log.info("Removed temp input vsb file") self.FilesList[-1].FileCreatedByClass = True conn = sqlite3.connect(self.FilesList[-1].DB_FileName, timeout=10) except ValueError as e: print(str(e)) else: self.FilesList[-1].DB_FileName = os.path.join( self.FilesList[-1].InputFilePath, self.FilesList[-1].InputFileName) conn = sqlite3.connect(os.path.join( self.FilesList[-1].InputFilePath, self.FilesList[-1].InputFileName), timeout=10) self.log.info("Get info from DB file") conn.text_factory = lambda x: str(x, 'utf-8', 'ignore') # first get the list of networks and their IDs cursor = conn.cursor() cursor.execute("SELECT Min(FirstTime), Max(LastTime) FROM Network") row = cursor.fetchone() if row is not None: startTime = row[0] self.FilesList[-1].FileStartTimeRaw = startTime self.FilesList[-1].FileStartTime = (datetime.fromtimestamp( startTime / 1e9, timezone.utc).isoformat() + '\n') endTime = row[1] self.FilesList[-1].FileEndTimeRaw = endTime self.FilesList[-1].FileEndTime = (datetime.fromtimestamp( endTime / 1e9, timezone.utc).isoformat() + '\n') #now get the list of existing network Ids and Names cursor.execute("Select DISTINCT Id, Name from Network ORDER by Id") row = cursor.fetchall() if row is not None: for i in range(len(row)): self.FilesList[-1].FileNetworks.append(file_network()) self.FilesList[-1].FileNetworks[-1].network_id = row[i][0] self.FilesList[-1].FileNetworks[-1].network_name = row[i][ 1] # now get the list of MsgIDs for each network # NumMessages is an alias, which can be used as a shortcut in the where for i in range(len(self.FilesList[-1].FileNetworks)): self.FilesList[-1].FileNetworks[i].numberOfArbIDs = 0 cursor.execute( "SELECT Id, COUNT(*) NumMessages FROM RawMessageData WHERE NetworkId = " + str(self.FilesList[-1].FileNetworks[i].network_id) + " GROUP BY Id") row = cursor.fetchall() for j in range(len(row)): self.FilesList[-1].FileNetworks[ i].network_msg_ids_dec.append(row[j][0]) self.FilesList[-1].FileNetworks[ i].network_msg_ids_hex.append(hex(row[j][0])) self.FilesList[-1].FileNetworks[ i].network_msg_num_msgs.append(row[j][1]) self.FilesList[-1].FileNetworks[ i].numberOfArbIDs = self.FilesList[-1].FileNetworks[ i].numberOfArbIDs + 1 # now loop through each network and each ArbID and determine the min and max sample period for i in range(len(self.FilesList[-1].FileNetworks)): for j in range( self.FilesList[-1].FileNetworks[i].numberOfArbIDs): if self.FilesList[-1].FileNetworks[i].network_msg_num_msgs[ j] > 1: QueryString = "CREATE VIEW IF NOT EXISTS TempView AS SELECT * FROM RawMessageData WHERE NetworkId = " + str( self.FilesList[-1].FileNetworks[i].network_id ) + " AND ID = " + str( self.FilesList[-1].FileNetworks[i]. network_msg_ids_dec[j]) cursor.execute(QueryString) QueryString = "SELECT MIN(DeltaVal), MAX(DeltaVal) FROM (SELECT (a.MessageTime - b.MessageTime) as DeltaVal FROM TempView AS a JOIN TempView AS b ON (b.MessageTime = (SELECT MAX(z.MessageTime) FROM TempView AS z WHERE z.MessageTime < a.MessageTime)))" cursor.execute(QueryString) row = cursor.fetchall() self.FilesList[-1].FileNetworks[ i].network_msg_id_min_periods.append(row[0][0] * 1e-9) self.FilesList[-1].FileNetworks[ i].network_msg_id_max_periods.append(row[0][1] * 1e-9) cursor.execute("DROP VIEW IF EXISTS TempView") else: self.FilesList[-1].FileNetworks[ i].network_msg_id_min_periods.append(0) self.FilesList[-1].FileNetworks[ i].network_msg_id_max_periods.append(0) self.FilesListSorted = sorted(self.FilesList, key=lambda x: x.FileStartTimeRaw)
def __init__(self, inputFilePaths, ReportGenTimeStamp): self.FilesList = [] self.FilesListSorted = [] logging.basicConfig(level=logging.INFO) self.log = logging.getLogger(__name__) self.handler = logging.FileHandler('IPA.log') self.handler.setLevel(logging.INFO) # create a logging format self.formatter = logging.Formatter( '%(asctime)s - %(name)s - %(message)s') self.handler.setFormatter(self.formatter) self.log.addHandler(self.handler) if IPAInterfaceLibrary.is_running_on_wivi_server(): OutputFilePath = os.path.dirname(sys.argv[0]) + "\\" else: OutputFilePath = os.path.dirname(sys.argv[0]) + "\\" for inputFilePath in inputFilePaths: filenameWithoutPath = os.path.basename(inputFilePath["path"]) filename, fileExtension = os.path.splitext(filenameWithoutPath) self.FilesList.append(msgFile()) self.FilesList[ -1].DB_FileName = OutputFilePath + filename + "_" + ReportGenTimeStamp + "_Filtrd.db2" self.FilesList[-1].FileName = filenameWithoutPath self.FilesList[-1].FilePath = OutputFilePath self.FilesList[-1].FileExtension = fileExtension self.FilesList[-1].FileCreatedByClass = False self.FilesList[-1].FileStartTime = "" self.FilesList[-1].FileEndTime = "" self.FilesList[-1].FileStartTimeRaw = 0 self.FilesList[-1].FileEndTimeRaw = 0 if (fileExtension == ".vsb"): try: # Open the message database self.log.info("Create database for file " + inputFilePath["path"]) vsb.CreateDatabase(inputFilePath["path"], self.FilesList[-1].DB_FileName, None) self.FilesList[-1].FileCreatedByClass = True conn = sqlite3.connect(self.FilesList[-1].DB_FileName, timeout=10) except ValueError as e: print(str(e)) else: self.FilesList[-1].DB_FileName = inputFilePath["path"] conn = sqlite3.connect(inputFilePath["path"], timeout=10) self.log.info("Get info from DB file") conn.text_factory = lambda x: str(x, 'utf-8', 'ignore') # first get the list of networks and their IDs cursor = conn.cursor() cursor.execute("SELECT Min(FirstTime), Max(LastTime) FROM Network") row = cursor.fetchone() if row is not None: startTime = row[0] self.FilesList[-1].FileStartTimeRaw = startTime self.FilesList[-1].FileStartTime = (datetime.fromtimestamp( startTime / 1e9, timezone.utc).isoformat() + '\n') endTime = row[1] self.FilesList[-1].FileEndTimeRaw = endTime self.FilesList[-1].FileEndTime = (datetime.fromtimestamp( endTime / 1e9, timezone.utc).isoformat() + '\n') #now get the list of existing network Ids and Names cursor.execute("Select DISTINCT Id, Name from Network ORDER by Id") row = cursor.fetchall() if row is not None: for i in range(len(row)): self.FilesList[-1].FileneNetworks.append(file_network()) self.FilesList[-1].FileneNetworks[-1].network_id = row[i][ 0] self.FilesList[-1].FileneNetworks[-1].network_name = row[ i][1] # now get the list of MsgIDs for each network # NumMessages is an alias, which can be used as a shortcut in the where for i in range(len(self.FilesList[-1].FileneNetworks)): cursor.execute( "SELECT Id, COUNT(*) NumMessages FROM RawMessageData WHERE NetworkId = " + str(self.FilesList[-1].FileneNetworks[i].network_id) + " GROUP BY Id") row = cursor.fetchall() for j in range(len(row)): self.FilesList[-1].FileneNetworks[ i].network_msg_ids_dec.append(row[j][0]) self.FilesList[-1].FileneNetworks[ i].network_msg_ids_hex.append(hex(row[j][0])) self.FilesList[-1].FileneNetworks[ i].network_msg_num_msgs.append(row[j][1]) self.FilesListSorted = sorted(self.FilesList, key=lambda x: x.FileStartTimeRaw)
#set up access to IPA.log file in main script logging.basicConfig(level=logging.INFO) log = logging.getLogger(__name__) handler = logging.FileHandler('IPA.log') handler.setLevel(logging.INFO) # create a logging format formatter = logging.Formatter('%(asctime)s - %(name)s - %(message)s') handler.setFormatter(formatter) log.addHandler(handler) # Get user input: an .asl file and one or more db files # These next two functions are defined differently on the PC version compared to Wivi since # user pics files on PC but Wivi server provides input file list without GUI slFilePath = IPAInterfaceLibrary.get_config_file() dbFilePaths = IPAInterfaceLibrary.get_input_file_list() log.info("Hello") with open(slFilePath) as file: config = json.load(file) signals = config["Channels"] n_signals = len(signals) bins_list = [sig['bins'] for sig in signals] time_tallys = [[0.0]*(len(bins_list[sig_num])+1) for sig_num in range(n_signals)] MaskString = "" for i in range (0, Sig.MaxIndexInSigClass):