help='hdf franek', type=bool, default=False) parser.add_argument('--parallelism', '-p', help='number of processes (unbounded if 0)', type=int, default=8) parsed_params = vars(parser.parse_args()) logging.debug("loading pickled simulation from '%s' file", parsed_params['data_root']) data_root_path = Path(parsed_params['data_root']) sim_params = pickle.load( PathProvider.new_path_provider( data_root_path).get_simulation_params_path().open('rb')) unpickled_inmem = pickle.load( PathProvider.new_path_provider( data_root_path).get_inmem_calc_path().open('rb')) unpickled_stimuluses = pickle.load( PathProvider.new_path_provider( data_root_path).get_stimuluses_path().open('rb')) for k, v in unpickled_inmem.items(): inmem[k] = v if parsed_params['hdf_franek']: hdf_command = MakeHdf5(parsed_params['data_root'], unpickled_stimuluses, sim_params) hdf_command()
simulation_tasks = [] if parsed_params['load_simulation']: for run in Path(parsed_params['load_simulation']).glob('*'): pickled_simulation_file = parsed_params['load_simulation'] logging.debug("loading pickled simulation from {} file".format( pickled_simulation_file)) with open(pickled_simulation_file, 'rb') as read_handle: step, population = pickle.load(read_handle) simulation_tasks.append( Simulation(params=parsed_params, step_offset=step + 1, population=population, context_constructor=context_constructor, num=0, path_provider=PathProvider.new_path_provider( parsed_params['simulation_name']))) else: simulation_path = os.path.abspath(parsed_params['simulation_name']) if os.path.exists(simulation_path): shutil.rmtree(simulation_path, ignore_errors=True) os.makedirs(simulation_path) os.makedirs(simulation_path + '/stats') for run in range(parsed_params['runs']): population = Population(parsed_params) root_path = Path(simulation_path).joinpath('run' + str(run)) path_provider = PathProvider.new_path_provider(root_path) path_provider.create_directory_structure() simulation = Simulation(params=parsed_params, step_offset=0, population=population,