def run(algorithm, path, label, pre_simulation_hook=None): """ Simulate a reconstruction algorithm. The simulation results are stored in a HDF5 database rather than returned by the function. Parameters ---------- algorithm : function A function handle to the reconstruction algorithm. path : str The path of the HDF5 database where the results should be stored. label : str The label assigned to the simulation results pre_simumlation_hook : callable A handle to a callable which should be run *just* before the call to the reconstruction algorithm (the default is None, which implies that no pre hook is run). """ tmp_dir = _split_path(path)[0] + '.tmp' + os.sep tmp_file = tmp_dir + label.replace('/', '#') + '.hdf5' if not os.path.isdir(tmp_dir): os.mkdir(tmp_dir) if not os.path.isfile(tmp_file): _backup.create(tmp_file) done = _backup.get(tmp_file) shape = [len(_conf['delta']), len(_conf['rho'])] random.seed(_conf['seed']) seeds = np.array(random.sample(iter_range, shape[0] * shape[1])).reshape(shape) tasks = [(algorithm, (i, j), seeds[i, j], tmp_file, pre_simulation_hook) for i in range(shape[0]) for j in range(shape[1]) if not done[i, j]] _process(_simulate, args_list=tasks, maxtasks=_conf['maxpoints']) with _File(tmp_file, 'r') as f: stat_time = f.root.time[:] stat_dist = f.root.dist[:] stat_mse = f.root.mse[:] stat_norm = f.root.norm[:] with _File(path, 'a') as f: f.create_array('/' + label, 'time', stat_time, createparents=True) f.create_array('/' + label, 'dist', stat_dist, createparents=True) f.create_array('/' + label, 'mse', stat_mse, createparents=True) f.create_array('/' + label, 'norm', stat_norm, createparents=True) os.remove(tmp_file) if len(os.listdir(tmp_dir)) == 0: os.removedirs(tmp_dir)
def run(algorithm, path, label): """ Simulate a reconstruction algorithm. The simulation results are stored in a HDF5 database rather than returned by the function. Parameters ---------- algorithm : function A function handle to the reconstruction algorithm. path : str The path of the HDF5 database where the results should be stored. label : str The label assigned to the simulation results. """ tmp_dir = _split_path(path)[0] + '.tmp' + os.sep tmp_file = tmp_dir + label.replace('/', '#') + '.hdf5' if not os.path.isdir(tmp_dir): os.mkdir(tmp_dir) if not os.path.isfile(tmp_file): _backup.create(tmp_file) done = _backup.get(tmp_file) shape = _config.get() shape = [len(shape['delta']), len(shape['rho']), shape['monte_carlo']] np.random.seed(_config.get('seed')) seeds = np.random.randint(0, 2**30, shape) tasks = [(algorithm, (i, j), seeds[i, j], tmp_file) for i in range(shape[0]) for j in range(shape[1]) if not done[i, j]] _process(_simulate, args_list=tasks) with _File(tmp_file, 'r') as f: stat_time = f.root.time[:] stat_dist = f.root.dist[:] with _File(path, 'a') as f: f.create_array('/' + label, 'time', stat_time, createparents=True) f.create_array('/' + label, 'dist', stat_dist, createparents=True) os.remove(tmp_file) if len(os.listdir(tmp_dir)) == 0: os.removedirs(tmp_dir)
def _save_output(output_path, name, fig, fig_ext, datasets): """ Save figure and data output. Parameters ---------- output_path : str The output_path to save to. name : str The 'fixed' part of the file name saved to. fig : matplotlib.figure.Figure The figure instance to save. fig_ext : str The file extension to use for the saved figure. datasets : dict The dict of dicts for datasets to save in a HDF database. """ @_decorate_validation def validate_input(): _generic('output_path', 'string') _generic('name', 'string') _generic('fig', mpl.figure.Figure) _generic('fig_ext', 'string') _levels('datasets', (_generic(None, 'mapping'), _generic(None, 'mapping'))) validate_input() if output_path[-1] == os.sep: path = output_path prefix = '' else: path, prefix, no_ext = _split_path(output_path) prefix = prefix + '_' fig.savefig(path + prefix + name + os.path.extsep + fig_ext) db_path = path + prefix + name + '.hdf5' _io.create_database(db_path) with _File(db_path, mode='a') as h5file: data_group = h5file.create_group('/', 'data', __name__ + ': ' + name) for dataset in datasets: set_group = h5file.create_group(data_group, dataset, dataset) for array in datasets[dataset]: h5file.create_array(set_group, array, datasets[dataset][array])