Ejemplo n.º 1
0
def combine_run_tables(batch, source, table_name):
    """Combine summary files of runs into a batch table
    """
    print(f'Combining model summary tables:')
    n_runs = grid_tools.get_nruns(batch, source)
    runs = np.arange(n_runs) + 1
    table_list = []

    for run in runs:
        sys.stdout.write(f'\r{source}{batch} {run}/{runs[-1]}')
        run_table = load_run_table(run, batch, source=source, table=table_name)
        if table_name == 'bursts':
            run_table['run'] = run
        table_list += [run_table]

    sys.stdout.write('\n')
    combined_table = pd.concat(table_list, ignore_index=True, sort=False)
    table_str = combined_table.to_string(index=False, justify='left')

    filepath = grid_strings.batch_table_filepath(batch, source, table_name=table_name)
    print(f'Saving: {filepath}')
    with open(filepath, 'w') as f:
        f.write(table_str)

    return combined_table
Ejemplo n.º 2
0
def extract_batches(source,
                    batches=None,
                    save_plots=True,
                    multithread=True,
                    reload=False,
                    load_bursts=False,
                    load_summary=False,
                    basename='xrb',
                    param_table=None):
    """Do burst analysis on arbitrary number of batches"""
    t0 = time.time()
    if param_table is not None:
        print('Using models from table provided')
        batches = np.unique(param_table['batch'])
    else:
        batches = grid_tools.ensure_np_list(batches)

    for batch in batches:
        print_title(f'Batch {batch}')

        analysis_path = grid_strings.batch_analysis_path(batch, source)
        for folder in ['input', 'output']:
            path = os.path.join(analysis_path, folder)
            grid_tools.try_mkdir(path, skip=True)

        if param_table is not None:
            subset = grid_tools.reduce_table(param_table,
                                             params={'batch': batch})
            runs = np.array(subset['run'])
        else:
            n_runs = grid_tools.get_nruns(batch, source)
            runs = np.arange(n_runs) + 1

        if multithread:
            args = []
            for run in runs:
                args.append((run, batch, source, save_plots, reload,
                             load_bursts, load_summary, basename))
            with mp.Pool(processes=8) as pool:
                pool.starmap(extract_runs, args)
        else:
            extract_runs(runs,
                         batch,
                         source,
                         reload=reload,
                         save_plots=save_plots,
                         load_bursts=load_bursts,
                         load_summary=load_summary,
                         basename=basename)

        print_title('Combining run tables')
        for table_name in ('summary', 'bursts'):
            burst_tools.combine_run_tables(batch,
                                           source,
                                           table_name=table_name)

    t1 = time.time()
    dt = t1 - t0
    print_title(f'Time taken: {dt:.1f} s ({dt/60:.2f} min)')
Ejemplo n.º 3
0
def batch_save(batch, source, runs=None, basename='xrb', reload=True, **kwargs):
    """Loads a collection of models and saves their lightcurves
    """
    if runs is None:
        runs = grid_tools.get_nruns(batch, source)
    runs = grid_tools.expand_runs(runs)

    for run in runs:
        load_lum(run, batch, source, basename=basename, reload=reload, **kwargs)
Ejemplo n.º 4
0
def extract_batch_profiles(batch, source='frank', basename='xrb'):
    """Extracts and saves all profiles of all runs from a batch
    """
    nruns = grid_tools.get_nruns(batch=batch, source=source, basename=basename)

    for run in range(1, nruns + 1):
        extract_run_profiles(run=run,
                             batch=batch,
                             source=source,
                             basename=basename)
Ejemplo n.º 5
0
def save_all_average_lightcurves(batches,
                                 source,
                                 align='t_peak',
                                 reload=False):
    """Saves all lightcurve files for each model in given batches
    """
    for batch in batches:
        n_runs = grid_tools.get_nruns(batch, source=source)
        for run in range(1, n_runs + 1):
            model = burst_analyser.BurstRun(run,
                                            batch=batch,
                                            source=source,
                                            reload=reload)
            model.save_average_lightcurve(align=align)
Ejemplo n.º 6
0
def combine_run_summaries(batch, source):
    """Combines summary files of individual batch runs into a single table
    """
    print(f'Combining model summary tables:')
    n_runs = grid_tools.get_nruns(batch, source)
    runs = np.arange(n_runs) + 1
    run_tables = []

    for run in runs:
        sys.stdout.write(f'\r{source}{batch} {run}/{runs[-1]}')
        run_tables += [load_run_table(run, batch, source=source, table='summary')]

    sys.stdout.write('\n')
    combined_table = pd.concat(run_tables, ignore_index=True)
    table_str = combined_table.to_string(index=False, justify='left')

    filepath = get_table_filepath(batch, source)
    print(f'Saving: {filepath}')
    with open(filepath, 'w') as f:
        f.write(table_str)

    return combined_table