def main(): "The main method that gets invoked at command line. This method validates command line arguments and calls the ImportS57ToDB method" # Gather Start Time logger.info("******************** Process Started ***********************************") StartTime = time.time() FmtStartTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) #Workflow steps based on settings section5 = config['Workflow'] #Begin import Process passing the options Initialize() File = "" if "Yes" in section5['downloadFile']: File = Utils.downloadFile() inputDirectory = "" if "Yes" in section5['extract']: inputDirectory = Utils.extract(File).split() if "Yes" in section5['dropDB']: DBAccess.dropDB() if "Yes" in section5['createDB']: DBAccess.createDB() if "Yes" in section5['createAndImportTables']: S57Files = [] Chunks = [] section3 = config['S57'] inputDirectory = section3['directory'].split() filemasks = section3['filemasks'].split() if (nProcs < 0): #Single Processor server ***Test S57Files = gatherFilesToProcess(inputDirectory, filemasks) DBAccess.createAndImportTables(S57Files, len(S57Files)) elif (nProcs < 2): #Single Processor server Chunks = gatherFilesToProcessInChunks(inputDirectory, filemasks) DBAccess.createDBObjects(sum(Chunks, [])) DBAccess.importData(Chunks[0], nProcs) else: #Multi Processor server Chunks = gatherFilesToProcessInChunks(inputDirectory, filemasks) #Create the database schema logger.info("Creating S57 database -- started") #Flatten the individual lists AllS57Files = sum(Chunks, []) # Create the schema objects - tables, columns # This has to be on single core DBAccess.createDBObjects(AllS57Files) jobs = [] for i in range(nProcs): queue = Queue() process = multiprocessing.Process(target=DBAccess.importData, args=(Chunks[i],i)) jobs.append(process) process.start() for job in jobs: job.join() if "Yes" in section5['prepareDB']: DBAccess.prepareDB() # Print End and Elapse Time ElapsedTime = time.time()- StartTime logger.info('Total time taken in HH:MM:SS.ms: %s', str(datetime.timedelta(seconds=ElapsedTime))) # Send email if section5['sendemail'] == "Yes": logger.info("Sending notification email") message = {'Start DateTime for Processing': FmtStartTime , \ 'End DateTime for Processing': strftime("%Y-%m-%d %H:%M:%S", time.localtime()), \ 'Total Time Taken to Process in HH:MM:SS.ms': str(datetime.timedelta(seconds=ElapsedTime)), \ 'Total Number of files Processed': len(S57Files), \ 'Link to Log file': 'https://srclogix.dlinkddns.com/logs/vic.txt'} Utils.noticeEMail(message) logger.info("******************** Process Finished ***********************************") return True
def main(): "The main method that gets invoked at command line. This method validates command line arguments and calls the ImportS57ToDB method" # Gather Start Time logger.info( "******************** Process Started ***********************************" ) StartTime = time.time() FmtStartTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) #Workflow steps based on settings section5 = config['Workflow'] #Begin import Process passing the options Initialize() File = "" if "Yes" in section5['downloadFile']: File = Utils.downloadFile() inputDirectory = "" if "Yes" in section5['extract']: inputDirectory = Utils.extract(File).split() if "Yes" in section5['dropDB']: DBAccess.dropDB() if "Yes" in section5['createDB']: DBAccess.createDB() if "Yes" in section5['createAndImportTables']: S57Files = [] Chunks = [] section3 = config['S57'] inputDirectory = section3['directory'].split() filemasks = section3['filemasks'].split() if (nProcs < 0): #Single Processor server ***Test S57Files = gatherFilesToProcess(inputDirectory, filemasks) DBAccess.createAndImportTables(S57Files, len(S57Files)) elif (nProcs < 2): #Single Processor server Chunks = gatherFilesToProcessInChunks(inputDirectory, filemasks) DBAccess.createDBObjects(sum(Chunks, [])) DBAccess.importData(Chunks[0], nProcs) else: #Multi Processor server Chunks = gatherFilesToProcessInChunks(inputDirectory, filemasks) #Create the database schema logger.info("Creating S57 database -- started") #Flatten the individual lists AllS57Files = sum(Chunks, []) # Create the schema objects - tables, columns # This has to be on single core DBAccess.createDBObjects(AllS57Files) jobs = [] for i in range(nProcs): queue = Queue() process = multiprocessing.Process(target=DBAccess.importData, args=(Chunks[i], i)) jobs.append(process) process.start() for job in jobs: job.join() if "Yes" in section5['prepareDB']: DBAccess.prepareDB() # Print End and Elapse Time ElapsedTime = time.time() - StartTime logger.info('Total time taken in HH:MM:SS.ms: %s', str(datetime.timedelta(seconds=ElapsedTime))) # Send email if section5['sendemail'] == "Yes": logger.info("Sending notification email") message = {'Start DateTime for Processing': FmtStartTime , \ 'End DateTime for Processing': strftime("%Y-%m-%d %H:%M:%S", time.localtime()), \ 'Total Time Taken to Process in HH:MM:SS.ms': str(datetime.timedelta(seconds=ElapsedTime)), \ 'Total Number of files Processed': len(S57Files), \ 'Link to Log file': 'https://srclogix.dlinkddns.com/logs/vic.txt'} Utils.noticeEMail(message) logger.info( "******************** Process Finished ***********************************" ) return True