def load_config(config_source, select=None, verbose=True): """Loads config parameters from file and returns as dict parameters ---------- config_source : str soure to load select : str (optional) select and return only a single section of the config verbose : bool """ config_filepath = grid_strings.config_filepath(source=config_source, module_dir='grids') printv(f'Loading config: {config_filepath}', verbose=verbose) if not os.path.exists(config_filepath): raise FileNotFoundError( f'Config file not found: {config_filepath}.' "\nTry making one from the template 'default.ini'") ini = configparser.ConfigParser() ini.read(config_filepath) config = {} for section in ini.sections(): config[section] = {} for option in ini.options(section): config[section][option] = ast.literal_eval(ini.get( section, option)) if select is None: return config else: return config[select]
def load_model_table(batch, source, filename='MODELS.txt', verbose=True): """Returns the model_table of a batch """ source = grid_strings.source_shorthand(source=source) filepath = grid_strings.get_model_table_filepath(batch, source, filename) printv(f'Loading: {filepath}', verbose) model_table = pd.read_csv(filepath, delim_whitespace=True) return model_table
def load_chain(source, version, n_steps, n_walkers, verbose=True): """Loads from file and returns np array of chain """ filename = get_mcmc_string(source=source, version=version, n_steps=n_steps, n_walkers=n_walkers, prefix='chain', extension='.npy') mcmc_path = get_mcmc_path(source) filepath = os.path.join(mcmc_path, filename) pyprint.printv(f'Loading chain: {filepath}', verbose=verbose) return np.load(filepath)
def save_compressed_chain(chain, source, version, verbose=True): """Saves a chain as a compressed zip """ n_walkers, n_steps, n_dim = chain.shape filename = get_mcmc_string(source=source, version=version, n_steps=n_steps, n_walkers=n_walkers, prefix='chain', extension='.npy.gz') mcmc_path = get_mcmc_path(source) filepath = os.path.join(mcmc_path, filename) pyprint.printv(f'Saving compressed chain: {filepath}', verbose=verbose) with gzip.GzipFile(filepath, 'w') as f: np.save(f, chain)
def load_dump(cycle, run, batch, source, basename='xrb', prefix='', verbose=False): batch_str = grid_strings.get_batch_string(batch, source) run_str = grid_strings.get_run_string(run, basename) filename = get_dump_filename(cycle, run, basename, prefix=prefix) filepath = os.path.join(MODELS_PATH, batch_str, run_str, filename) printv(f'Loading: {filepath}', verbose=verbose) return kepdump.load(filepath, graphical=False, silent=True)
def save_all_plots(source, version, discard, n_steps, n_walkers=1000, display=False, save=True, cap=None, posteriors=True, contours=True, redshift=True, mass_radius=True, verbose=True, compressed=False): """Saves (and/or displays) main MCMC plots """ chain = mcmc_tools.load_chain(source, version=version, n_steps=n_steps, n_walkers=n_walkers, verbose=verbose, compressed=compressed) if posteriors: printv('Plotting posteriors', verbose=verbose) plot_posteriors(chain, source=source, save=save, discard=discard, cap=cap, display=display, version=version) if contours: printv('Plotting contours', verbose=verbose) plot_contours(chain, source=source, save=save, discard=discard, cap=cap, display=display, version=version) if mass_radius: printv('Plotting mass-radius', verbose=verbose) plot_mass_radius(chain, source=source, save=save, discard=discard, cap=cap, display=display, version=version) if redshift: printv('Plotting redshift', verbose=verbose) plot_redshift(chain, source=source, save=save, discard=discard, cap=cap, display=display, version=version)
def load_dump(cycle, run, batch, source, basename='xrb', prefix='', verbose=False): filename = get_dump_filename(cycle, run, basename, prefix=prefix) model_path = grid_strings.get_model_path(run=run, batch=batch, source=source, basename=basename) filepath = os.path.join(model_path, filename) printv(f'Loading: {filepath}', verbose=verbose) return kepdump.load(filepath, graphical=False, silent=True)
def try_mkdir(path, skip=False, verbose=True): printv(f'Creating directory {path}', verbose) if os.path.exists(path): if skip: printv('Directory already exists - skipping', verbose) else: print('Directory exists') cont = input('specified? (DESTROY) [y/n]: ') if cont == 'y' or cont == 'Y': subprocess.run(['rm', '-r', path]) subprocess.run(['mkdir', path]) elif cont == 'n' or cont == 'N': sys.exit() else: subprocess.run(['mkdir', '-p', path], check=True)
def load_chain(source, version, n_steps, n_walkers, compressed=False, verbose=True): """Loads from file and returns np array of chain """ extension = {True: '.npy.gz', False: '.npy'}[compressed] filename = get_mcmc_string(source=source, version=version, n_steps=n_steps, n_walkers=n_walkers, prefix='chain', extension=extension) mcmc_path = get_mcmc_path(source) filepath = os.path.join(mcmc_path, filename) pyprint.printv(f'Loading chain: {filepath}', verbose=verbose) if compressed: f = gzip.GzipFile(filepath, 'r') chain = np.load(f) else: chain = np.load(filepath) return chain
def setup_config(source, select=None, specified=None, verbose=True): """Returns combined dict of params from default, source, and supplied parameters ---------- source : str select : str (optional) select and return only a single section of the config specified : {} Overwrite default/source config with user-specified values verbose : bool """ def overwrite_option(old_dict, new_dict): for key, val in new_dict.items(): old_dict[key] = val if specified is None: specified = {} default_config = load_config(config_source='default', select=select, verbose=verbose) source_config = load_config(config_source=source, select=select, verbose=verbose) combined_config = dict(default_config) for category, contents in combined_config.items(): printv( f'Overwriting default {category} with source-specific and ' f'user-supplied {category}', verbose=verbose) if source_config.get(category) is not None: overwrite_option(old_dict=contents, new_dict=source_config[category]) if specified.get(category) is not None: overwrite_option(old_dict=contents, new_dict=specified[category]) return combined_config
def load_grid_table(tablename, source, verbose=True, lampe_analyser=False): """Returns table of grid input/output tablename = str : table name (e.g. 'params', 'summ', 'bursts') source = str : name of source object lampe_analyser = bool : if the table is from Lampe's analyser (as opposed to pyburst) """ source = grid_strings.source_shorthand(source) prefix_map = {'summ': 'summary'} prefix = prefix_map.get(tablename, tablename) if tablename in ('summ', 'bursts') and not lampe_analyser: table_path = grid_strings.burst_analyser_path(source) else: table_path = grid_strings.get_source_subdir(source, tablename) filename = f'{prefix}_{source}.txt' filepath = os.path.join(table_path, filename) printv(f'Loading {tablename} table: {filepath}', verbose) table = pd.read_csv(filepath, delim_whitespace=True) return table
def load_lum(run, batch, source, basename='xrb', reload=False, save=True, silent=True, check_monotonic=True, verbose=True): """Attempts to load pre-extracted luminosity data, or load raw binary. Returns [time (s), luminosity (erg/s)] """ def load_save(load_filepath, save_filepath): lum_loaded = extract_lcdata(filepath=load_filepath, silent=silent) if save: grid_tools.try_mkdir(input_path, skip=True, verbose=False) save_ascii(lum=lum_loaded, filepath=save_filepath, verbose=verbose) return lum_loaded batch_str = grid_strings.get_batch_string(batch, source) analysis_path = grid_strings.burst_analyser_path(source) input_path = os.path.join(analysis_path, batch_str, 'input') presaved_filepath = os.path.join(input_path, f'{batch_str}_{run}.txt') run_str = grid_strings.get_run_string(run, basename) model_path = grid_strings.get_model_path(run, batch, source, basename) binary_filepath = os.path.join(model_path, f'{run_str}.lc') if reload: pyprint.printv('Deleting preloaded file, reloading binary file', verbose=verbose) subprocess.run(['rm', '-f', presaved_filepath]) try: lum = load_save(binary_filepath, presaved_filepath) except FileNotFoundError: pyprint.printv('XXXXXXX lumfile not found. Skipping XXXXXXXX', verbose=verbose) return else: try: lum = load_ascii(presaved_filepath, verbose=verbose) except (FileNotFoundError, OSError): pyprint.printv('No preloaded file found. Reloading binary', verbose=verbose) try: lum = load_save(binary_filepath, presaved_filepath) except FileNotFoundError: pyprint.printv('XXXXXXX lumfile not found. Skipping XXXXXXX', verbose=verbose) return if check_monotonic: dt = np.diff(lum[:, 0]) if True in (dt < 0): pyprint.print_warning('Lightcurve timesteps are not in order. ' + 'Something has gone horribly wrong!', n=80) raise RuntimeError('Lightcurve timesteps are not in order') return lum
def save_ascii(lum, filepath, verbose=True): """Saves extracted [time, lum] """ pyprint.printv(f'Saving data for faster loading in: {filepath}', verbose=verbose) header = 'time (s), luminosity (erg/s)' np.savetxt(filepath, lum, header=header)
def load_ascii(filepath, verbose=True): """Loads pre-extracted .txt file of [time, lum] """ pyprint.printv(f'Loading preloaded luminosity file: {filepath}', verbose=verbose) return np.loadtxt(filepath, skiprows=1)