def dispatch(args): """ Main entry function. """ if helper.is_terminated(): return helper.g_log("events.run", 1) try: config.read_config() except Exception: logger.exception("Unable to read configuration. Skipping processing.") monitor.send_event( monitor.h_events.CONFIG_UPDATE, monitor.severity.WARNING, "Unable to read configuration (possibly locked)", ) return success_folder = Path(config.mercure[mercure_folders.SUCCESS]) error_folder = Path(config.mercure[mercure_folders.ERROR]) retry_max = config.mercure["retry_max"] retry_delay = config.mercure["retry_delay"] # TODO: Sort list so that the oldest DICOMs get dispatched first with os.scandir(config.mercure[mercure_folders.OUTGOING]) as it: for entry in it: if entry.is_dir() and not has_been_send(entry.path) and is_ready_for_sending(entry.path): logger.info(f"Sending folder {entry.path}") execute(Path(entry.path), success_folder, error_folder, retry_max, retry_delay) # If termination is requested, stop processing series after the # active one has been completed if helper.is_terminated(): break
def route_studies(): studies_ready = {} with os.scandir(config.mercure[mercure_folders.STUDIES]) as it: for entry in it: if (entry.is_dir() and not is_study_locked(entry.path) and is_study_complete(entry.path)): modificationTime = entry.stat().st_mtime studies_ready[entry.name] = modificationTime # Process all complete studies for entry in sorted(studies_ready): try: route_study(entry) except Exception: logger.exception(f'Problems while processing study {entry}') # TODO: Add study events to bookkeeper #monitor.send_series_event(monitor.s_events.ERROR, entry, 0, "", "Exception while processing") monitor.send_event(monitor.h_events.PROCESSING, monitor.severity.ERROR, f"Exception while processing study {entry}") # If termination is requested, stop processing after the active study has been completed if helper.is_terminated(): return
def clean(args): """ Main entry function. """ if helper.is_terminated(): return helper.g_log("events.run", 1) try: config.read_config() except Exception: logger.exception("Unable to read configuration. Skipping processing.") monitor.send_event( monitor.h_events.CONFIG_UPDATE, monitor.severity.WARNING, "Unable to read configuration (possibly locked)", ) return # TODO: Adaptively reduce the retention time if the disk space is running low if _is_offpeak( config.mercure["offpeak_start"], config.mercure["offpeak_end"], datetime.now().time(), ): success_folder = config.mercure[mercure_folders.SUCCESS] discard_folder = config.mercure[mercure_folders.DISCARD] retention = timedelta(seconds=config.mercure["retention"]) clean_dir(success_folder, retention) clean_dir(discard_folder, retention)
def run_processor(args): """Main processing function that is called every second.""" if helper.is_terminated(): return try: config.read_config() except Exception: logger.exception( "Unable to update configuration. Skipping processing.") monitor.send_event(monitor.h_events.CONFIG_UPDATE, monitor.severity.WARNING, "Unable to update configuration (possibly locked)") return call_counter = 0 while (search_folder(call_counter)): call_counter += 1 # If termination is requested, stop processing series after the active one has been completed if helper.is_terminated(): return
def run_router(args): """Main processing function that is called every second.""" if helper.is_terminated(): return helper.g_log('events.run', 1) #logger.info('') #logger.info('Processing incoming folder...') try: config.read_config() except Exception: logger.exception( "Unable to update configuration. Skipping processing.") monitor.send_event(monitor.h_events.CONFIG_UPDATE, monitor.severity.WARNING, "Unable to update configuration (possibly locked)") return filecount = 0 series = {} complete_series = {} error_files_found = False # Check the incoming folder for completed series. To this end, generate a map of all # series in the folder with the timestamp of the latest DICOM file as value for entry in os.scandir(config.mercure['incoming_folder']): if entry.name.endswith(".tags") and not entry.is_dir(): filecount += 1 seriesString = entry.name.split('#', 1)[0] modificationTime = entry.stat().st_mtime if seriesString in series.keys(): if modificationTime > series[seriesString]: series[seriesString] = modificationTime else: series[seriesString] = modificationTime # Check if at least one .error file exists. In that case, the incoming folder should # be searched for .error files at the end of the update run if (not error_files_found) and entry.name.endswith(".error"): error_files_found = True # Check if any of the series exceeds the "series complete" threshold for entry in series: if ((time.time() - series[entry]) > config.mercure['series_complete_trigger']): complete_series[entry] = series[entry] #logger.info(f'Files found = {filecount}') #logger.info(f'Series found = {len(series)}') #logger.info(f'Complete series = {len(complete_series)}') helper.g_log('incoming.files', filecount) helper.g_log('incoming.series', len(series)) # Process all complete series for entry in sorted(complete_series): try: route_series(entry) except Exception: logger.exception(f'Problems while processing series {entry}') monitor.send_series_event(monitor.s_events.ERROR, entry, 0, "", "Exception while processing") monitor.send_event(monitor.h_events.PROCESSING, monitor.severity.ERROR, "Exception while processing series") # If termination is requested, stop processing series after the active one has been completed if helper.is_terminated(): return if error_files_found: route_error_files() # Now, check if studies in the studies folder are ready for routing/processing route_studies()