def main(**args): # get abstract objects from specification spec = args['spec'] objects = Solver.get_objects_from_spec(*spec) simulation = objects[0] arrays = objects[1:] # set defaults with the following priority: command line arguments >> simulation object >> script defaults for k, v in simulation.items(): args.setdefault(k, v) if args[k] is None: args[k] = v for k, v in defaults.items(): args.setdefault(k, v) if args[k] is None: args[k] = v print('simulation parameters as key --> value:') for k, v in args.items(): print(k, '-->', v) # get args needed in main file = args['file'] threads = args['threads'] mode = args['mesh_mode'] mesh_vector1 = args['mesh_vector1'] mesh_vector2 = args['mesh_vector2'] mesh_vector3 = args['mesh_vector3'] rotations = args['rotations'] a_start, a_stop, a_step = args['angles'] # create field positions field_pos = util.meshview(np.linspace(*mesh_vector1), np.linspace(*mesh_vector2), np.linspace(*mesh_vector3), mode=mode) # create angles angles = np.arange(a_start, a_stop + a_step, a_step) # create rotation rules which will be distributed by the pool array_ids = [id for id, _ in rotations] dirs = [dir for _, dir in rotations] zip_args = [] for id, dir in zip(array_ids, dirs): zip_args.append(zip(repeat(id), repeat(dir), angles)) rotation_rules = list(zip(*zip_args)) # calculate job-related values is_complete = None njobs = int(np.ceil(len(field_pos) / POSITIONS_PER_PROCESS) * len(rotation_rules)) ijob = 0 # check for existing file if os.path.isfile(file): response = input('File ' + str(file) + ' already exists.\n' + 'Continue (c), overwrite (o), or do nothing (any other key)?') if response.lower() in ['o', 'overwrite']: # if file exists, prompt for overwrite os.remove(file) # remove existing file create_database(file, args, njobs, field_pos) # create database elif response.lower() in ['c', 'continue']: # continue from current progress is_complete, ijob = util.get_progress(file) else: raise Exception('Database already exists') else: # Make directories if they do not exist file_dir = os.path.dirname(os.path.abspath(file)) if not os.path.exists(file_dir): os.makedirs(file_dir) # create database create_database(file, args, njobs, field_pos) try: # start multiprocessing pool and run process write_lock = multiprocessing.Lock() simulation = abstract.dumps(simulation) arrays = abstract.dumps(arrays) pool = multiprocessing.Pool(threads, initializer=init_process, initargs=(write_lock, simulation, arrays)) jobs = util.create_jobs(file, (field_pos, POSITIONS_PER_PROCESS), (rotation_rules, 1), mode='product', is_complete=is_complete) result = pool.imap_unordered(run_process, jobs) for r in tqdm(result, desc='Simulating', total=njobs, initial=ijob): pass except Exception as e: print(e) finally: pool.terminate() pool.close()
def main(args): # define and parse arguments parser = argparse.ArgumentParser() parser.add_argument('file', nargs='?') parser.add_argument('-s', '--spec', nargs='+') parser.add_argument('-t', '--threads', type=int) parser.add_argument('-o', '--overwrite', action='store_true') args = vars(parser.parse_args(args)) # get abstract objects from specification spec = args['spec'] objects = TransmitBeamplot.get_objects_from_spec(*spec) simulation = objects[0] arrays = objects[1:] # set defaults with the following priority: command line arguments >> simulation object >> script defaults for k, v in simulation.items(): args.setdefault(k, v) if args[k] is None: args[k] = v for k, v in defaults.items(): args.setdefault(k, v) if args[k] is None: args[k] = v print('simulation parameters as key --> value:') for k, v in args.items(): print(k, '-->', v) # get args needed in main file = args['file'] threads = args['threads'] overwrite = args['overwrite'] mode = args['mesh_mode'] mesh_vector1 = args['mesh_vector1'] mesh_vector2 = args['mesh_vector2'] mesh_vector3 = args['mesh_vector3'] # create field positions field_pos = util.meshview(np.linspace(*mesh_vector1), np.linspace(*mesh_vector2), np.linspace(*mesh_vector3), mode=mode) # calculate job-related values is_complete = None njobs = int(np.ceil(len(field_pos) / POSITIONS_PER_PROCESS)) ijob = 0 # check for existing file if os.path.isfile(file): if overwrite: # if file exists, prompt for overwrite os.remove(file) # remove existing file create_database(file, args, njobs, field_pos) # create database else: # continue from current progress is_complete, ijob = util.get_progress(file) if np.all(is_complete): return else: # Make directories if they do not exist file_dir = os.path.dirname(os.path.abspath(file)) if not os.path.exists(file_dir): os.makedirs(file_dir) # create database create_database(file, args, njobs, field_pos) try: # start multiprocessing pool and run process write_lock = multiprocessing.Lock() simulation = abstract.dumps(simulation) arrays = abstract.dumps(arrays) pool = multiprocessing.Pool(threads, initializer=init_process, initargs=(write_lock, simulation, arrays)) jobs = util.create_jobs(file, (field_pos, POSITIONS_PER_PROCESS), mode='zip', is_complete=is_complete) result = pool.imap_unordered(run_process, jobs) for r in tqdm(result, desc='Simulating', total=njobs, initial=ijob): pass except Exception as e: print(e) finally: pool.terminate() pool.close()