Beispiel #1
0
def load_qnuc_table(source, grid_version=0):
    path = grid_strings.get_source_subdir(source, 'qnuc')
    prefix = f'qnuc_v{grid_version}'
    filename = grid_strings.get_source_filename(source,
                                                prefix=prefix,
                                                extension='.txt')
    filepath = os.path.join(path, filename)
    return pd.read_table(filepath, delim_whitespace=True)
Beispiel #2
0
def save_qnuc_table(table, source, grid_version=0):
    path = grid_strings.get_source_subdir(source, 'qnuc')
    prefix = f'qnuc_v{grid_version}'
    filename = grid_strings.get_source_filename(source,
                                                prefix=prefix,
                                                extension='.txt')
    filepath = os.path.join(path, filename)

    table_str = table.to_string(index=False)
    with open(filepath, 'w') as f:
        f.write(table_str)
Beispiel #3
0
def load_lum(run, batch, source, basename='xrb', reload=False, save=True,
             silent=True, check_monotonic=True):
    """Attempts to load pre-extracted luminosity data, or load raw binary.
    Returns [time (s), luminosity (erg/s)]
    """
    def load_save(load_filepath, save_filepath):
        lum_loaded = extract_lcdata(filepath=load_filepath, silent=silent)
        if save:
            try:
                save_ascii(lum=lum_loaded, filepath=save_filepath)
            except FileNotFoundError:
                print("Can't save preloaded luminosity file, path not found")
        return lum_loaded

    pyprint.print_dashes()
    batch_str = grid_strings.get_batch_string(batch, source)
    analysis_path = grid_strings.get_source_subdir(source, 'burst_analysis')
    input_path = os.path.join(analysis_path, batch_str, 'input')

    presaved_filepath = os.path.join(input_path, f'{batch_str}_{run}.txt')
    run_str = grid_strings.get_run_string(run, basename)
    model_path = grid_strings.get_model_path(run, batch, source, basename)
    binary_filepath = os.path.join(model_path, f'{run_str}.lc')
    print(binary_filepath)
    if reload:
        print('Deleting preloaded file, reloading binary file')
        subprocess.run(['rm', '-f', presaved_filepath])
        try:
            lum = load_save(binary_filepath, presaved_filepath)
        except FileNotFoundError:
            print('XXXXXXX lumfile not found. Skipping XXXXXXXX')
            return
    else:
        try:
            lum = load_ascii(presaved_filepath)
        except (FileNotFoundError, OSError):
            print('No preloaded file found. Reloading binary')
            try:
                lum = load_save(binary_filepath, presaved_filepath)
            except FileNotFoundError:
                print('XXXXXXX lumfile not found. Skipping XXXXXXX')
                return

    if check_monotonic:
        dt = np.diff(lum[:, 0])
        if True in (dt < 0):
            pyprint.print_warning('Lightcurve timesteps are not in order. '
                                  + 'Something has gone horribly wrong!', n=80)
            raise RuntimeError('Lightcurve timesteps are not in order')
    pyprint.print_dashes()
    return lum
Beispiel #4
0
def write_submission_script(source,
                            version,
                            n_walkers,
                            n_steps,
                            dump_step,
                            walltime=20,
                            n_threads=8,
                            cluster='icer'):
    """Writes a script for submitting a job on a cluster

    Parameter:
    ----------
    path : str
        target path for slurm script
    walltime: int
        job time limit (hr)
    """
    extensions = {'monarch': '.sh', 'icer': '.qsub'}

    job_str = get_jobstring(source=source,
                            version=version,
                            n_walkers=n_walkers,
                            n_threads=n_threads,
                            n_steps=n_steps)
    time_str = f'{walltime:02}:00:00'

    print(f'Writing submission script for cluster: {cluster}')
    ext = extensions[cluster]
    script_str = get_submission_str(source=source,
                                    version=version,
                                    n_walkers=n_walkers,
                                    n_threads=n_threads,
                                    n_steps=n_steps,
                                    time_str=time_str,
                                    job_str=job_str,
                                    cluster=cluster,
                                    dump_step=dump_step)

    # prepend_str = {True:'restart_', False:''}[restart]
    logs_path = grid_strings.get_source_subdir(source, 'logs')
    filename = f'{cluster}_{job_str}{ext}'
    filepath = os.path.join(logs_path, filename)

    with open(filepath, 'w') as f:
        f.write(script_str)
Beispiel #5
0
def copy_lightcurve(run, batch, source, basename='xrb'):
    """Copies over full model lightcurve
    """
    dashes()
    print('Copying model lightcurve')

    path = grid_strings.get_source_subdir(source, 'burst_analysis')
    batch_str = grid_strings.get_batch_string(batch, source)
    run_str = get_run_string(run, basename)
    model_string = grid_strings.get_model_string(run=run,
                                                 batch=batch,
                                                 source=source)

    filename = f'{model_string}.txt'
    filepath = os.path.join(path, batch_str, 'input', filename)

    target_filename = f'model_lightcurve_{model_string}'
    target_filepath = os.path.join(PROJECT_PATH, source, batch_str, run_str,
                                   target_filename)

    print(f'from: \n{filepath}' + f'to: {target_filepath}')
    subprocess.run(['cp', filepath, target_filepath])
Beispiel #6
0
def batch_path(batch, source='frank'):
    """Returns string of path to batch dir
    """
    path = grid_strings.get_source_subdir(source, 'profiles')
    batch_str = grid_strings.get_batch_string(batch=batch, source=source)
    return os.path.join(path, batch_str)
Beispiel #7
0
# =================================================================
# Script callable from terminal to clean up chain/sampler files
# =================================================================
GRIDS_PATH = os.environ['KEPLER_GRIDS']
n_arg = len(sys.argv)

if n_arg != 4:
    print('Must provide 3 parameters: \n\t1. [source]' +
          '\n\t2. [version]\n\t3. [keep_step]')
    sys.exit()

source = sys.argv[1]
version = sys.argv[2]
keep_step = sys.argv[3]

source_path = grid_strings.get_source_subdir(source, 'mcmc')
file_list = os.listdir(source_path)
discard = []
keep = []

print(source)
print(source_path)

for file_ in file_list:
    if (source in file_ and file_ not in ['.keep', '.gitignore']
            and f'_V{version}_' in file_ and f'_S{keep_step}.' not in file_):
        discard += [file_]

    elif (source in file_ and keep_step in file_ and f'_V{version}_' in file_):
        keep += [file_]