logger.error(f"Error test message. {multiprocessing.current_process().name}") # Multiprocessed def example_multiprocessing_function(some_input, tqdm_func, global_tqdm): logger.debug(f"Debug test message - I won't show up in console. {multiprocessing.current_process().name}") logger.info(f"Info test message. {multiprocessing.current_process().name}") some_other_function(tqdm_func, global_tqdm) return True def error_callback(result): print("Error!") def done_callback(result): print("Done. Result: ", result) def example(): pool = TqdmMultiProcessPool() process_count = 4 task_count = 10 initial_tasks = [(example_multiprocessing_function, (i,)) for i in range(task_count)] total_iterations = iterations1 * iterations2 * iterations3 * task_count with tqdm.tqdm(total=total_iterations, dynamic_ncols=True) as global_progress: global_progress.set_description("global") results = pool.map(process_count, global_progress, initial_tasks, error_callback, done_callback) print(results) if __name__ == '__main__': logfile_path = "tqdm_multiprocessing_example.log" setup_logger_tqdm(logfile_path) # Logger will write messages using tqdm.write example()
def on_done(_): return None def on_error(_): return None global_progress = tqdm(total=len(bucket_file_paths), dynamic_ncols=True, unit="file") _ = pool.map(global_progress, tasks, on_error, on_done) shutil.copy(original_info_file_path, os.path.join(output_directory, "info.json")) parser = argparse.ArgumentParser(description="sort 13gram buckets") parser.add_argument("-dir", "--working_directory", required=True) parser.add_argument("-output", "--output_directory", required=True) parser.add_argument("-procs", "--process_count", type=int, default=8) if __name__ == "__main__": version = 1.00 print(f"Running version {version}") logfile_path = "compress_and_package.log" setup_logger_tqdm(logfile_path) args = parser.parse_args() compress_and_move(args.working_directory, args.output_directory, args.process_count)