def analize_last_run(current_manager): finished_files = find_finished_files(current_manager.drive_container.parent.parent) # MAKE SURE THIS WORKS if len(finished_files) != 0: latest_run = finished_files[-1] # datetime.fromisoformat(drive_time[:19]) latest_run_path = DL.download_file(latest_run) latest_manager = ContainerManager([latest_run_path], current_manager.drive_container, True) gap_path = current_manager.generate_gap_report(latest_manager) new_path = current_manager.generate_new_report(latest_manager) DL.upload_file(gap_path, unifier_io.folder_data['id']) DL.upload_file(new_path, unifier_io.folder_data['id']) os.remove(gap_path) os.remove(new_path) slack(f"Uploaded {current_manager.drive_container.path} New and Gap") else: return None
def main(): container_manager = ContainerManager(config) communication_handler = CommunicationHandler() rdf_store_config = config['rdf_store'] endpoint, username, password = rdf_store_config['endpoint'], \ rdf_store_config['username'],\ rdf_store_config['password'] rdf_store = RDFStore(endpoint, username, password) dataset_storage_handler = DatasetStorageHandler(rdf_store) threading.Thread(target=consumers.run_consumer, args=(container_manager, )).start() threading.Thread(target=consumers.run_communication_consumer, args=(communication_handler, )).start() threading.Thread(target=consumers.dataset_storage_job_consumer, args=(dataset_storage_handler, )).start()
def main(): parser = Parser() args = parser.get_arguments() container_manager = ContainerManager() events_queue = Queue() capabilities_tracer = CapabilitiesTracer(events_queue) capabilities_tracer.start() container_pid = container_manager.start(args) container_config = container_manager.get_config() capabilities_analyzer = CapabilitiesAnalyzer(events_queue, container_pid, container_config) capabilities_analyzer.start() def clean_up(): container_manager.stop() capabilities_tracer.stop() capabilities_analyzer.stop() capabilities_analyzer.print_report() sys.exit() def signal_handler(_signal_number, _frame): clean_up() signal.signal(signal.SIGINT, signal_handler) signal.signal(signal.SIGINT, signal_handler) for log in container_manager.logs(): print(log.decode()) print('Container exited') clean_up()
for container in files_present_queue: brand = container.parent.parent container.modify_time() # Update container time for recursive search unifier_io, _ = find_io_folders(brand) # Download brand files DL.clear_storage() print(f'Downloading {len(container.files)} {container} files from {brand}') current_downloads = download_files(container.files) # Download old files to get missing info old_transformer_files = find_finished_files(brand) print(f'Downloading {len(old_transformer_files)} old files from {brand}') previous_downloads = download_files(old_transformer_files) # Parse files container_manager = ContainerManager(current_downloads, container) if previous_downloads: previous_manager = ContainerManager(previous_downloads, unifier_io, True) container_manager.load_knowns(previous_manager) print('Exporting files') known_path = container_manager.generate_knowns() if container_manager.unknowns(): # Upload Unknowns file unknown_path = container_manager.generate_unknowns() upload = DL.upload_file(unknown_path, unifier_io.folder_data['id']) os.remove(unknown_path) slack(f'Unknowns found: uploaded {unifier_io.path}/{upload["name"]}') if SLACK else False else: # Upload knowns, gap, new, for_transformer
def main(): container_manager = ContainerManager(config) communication_handler = CommunicationHandler() threading.Thread(target=consumers.run_consumer, args=(container_manager,)).start() threading.Thread(target=consumers.run_communication_consumer, args=(communication_handler,)).start()