def run_validator(*det_task_names):
    """
    Driver function to run the validator function for the desired
    detector-level EO task.
    """
    results = []
    for det_task_name in det_task_names:
        validator = eval('validate_{}'.format(det_task_name))
        results = validator(results, camera_info.get_det_names())
    results.extend(siteUtils.jobInfo())

    # Persist the bot_eo_acq_cfg file so that the analysis
    # configuration for this job is saved.
    acq_config = siteUtils.get_job_acq_configs()
    bot_eo_acq_cfg = os.path.basename(acq_config['bot_eo_acq_cfg'])
    if os.path.isfile(bot_eo_acq_cfg):
        results.append(lcatr.schema.fileref.make(bot_eo_acq_cfg))

    # Check if an actual path to a curated gains file has been
    # specified.  If so, then persist it.
    gains_file = siteUtils.\
                 get_analysis_run('gain', bot_eo_config_file=bot_eo_acq_cfg)
    if gains_file is not None and os.path.isfile(gains_file):
        shutil.copy(gains_file, '.')
        results.append(lcatr.schema.fileref.make(os.path.basename(gains_file)))

    lcatr.schema.write_file(results)
    lcatr.schema.validate_file()
Exemplo n.º 2
0
def repackage_summary_files():
    """
    Repackage summary.lims files from prior jobs as eotest results
    files.
    """
    run = siteUtils.getRunNumber()
    summary_files = siteUtils.dependency_glob('summary.lims')
    for det_name in camera_info.get_det_names():
        file_prefix = make_file_prefix(run, det_name)
        raft, slot = det_name.split('_')
        repackager = eotestUtils.JsonRepackager()
        repackager.process_files(summary_files, slot=slot, raft=raft)
        repackager.write('{}_eotest_results.fits'.format(file_prefix))
Exemplo n.º 3
0
def run_validator(*det_task_names):
    """
    Driver function to run the validator function for the desired
    detector-level EO task.
    """
    results = []
    for det_task_name in det_task_names:
        validator = eval('validate_{}'.format(det_task_name))
        results = validator(results, camera_info.get_det_names())
    results.extend(siteUtils.jobInfo())

    # Persist the bot_eo_acq_cfg file so that the analysis
    # configuration for this job is saved.
    acq_config = siteUtils.get_job_acq_configs()
    bot_eo_acq_cfg = os.path.basename(acq_config['bot_eo_acq_cfg'])
    if os.path.isfile(bot_eo_acq_cfg):
        results.append(lcatr.schema.fileref.make(bot_eo_acq_cfg))

    lcatr.schema.write_file(results)
    lcatr.schema.validate_file()
Exemplo n.º 4
0
task_mapping = {'gain': (fe55_jh_task,),
                'bias': (bias_frame_jh_task,),
                'biasnoise': (read_noise_jh_task,),
                'dark': (dark_current_jh_task,),
                'badpixel': (bright_defects_jh_task, dark_defects_jh_task),
                'ptc': (ptc_jh_task,),
                'brighterfatter': (bf_jh_task,),
                'linearity': (flat_pairs_jh_task,),
                'cti': (cte_jh_task,),
                'tearing': (tearing_jh_task,)}

analysis_types = get_analysis_types()

# Detector-level analyses
det_names = camera_info.get_det_names()
for analysis_type in analysis_types:
    print("**************************************")
    print("Running analysis type %s" % analysis_type)
    print("**************************************")
    if analysis_type not in task_mapping:
        print("   not in task_mapping. skipping")
        continue
    tasks = task_mapping[analysis_type]
    for task in tasks:
        run_device_analysis_pool(task, det_names, processes=processes)

# Raft-level analyses
raft_names = camera_info.get_raft_names()
if 'biasnoise' in analysis_types:
    print("**************************************")
Exemplo n.º 5
0
def run_jh_tasks(*jh_tasks, device_names=None, processes=None, walltime=3600):
    """
    Run functions to execute tasks under the job harness in parallel.
    These functions should take a device name as its only argument, and
    the parallelization will take place over device_names.

    Parameters
    ----------
    jh_tasks: list-like container of functions
        These functions are serialized and dispatched to workers on
        remote nodes, so all dependencies should be imported in the
        bodies of the functions.
    device_names: list-like container of device names [None]
        List of sensors or rafts on which to operate.  If None, then
        the installed sensors in the focal plane is used.
    processes: int [None]
        Number of processes to run in parallel. If None, then all
        available processes can be potentially used.
    walltime: float [3600]
        Walltime in seconds for python app execution.  If the python app
        does not return within walltime, a parsl.app.errors.AppTimeout
        exception will be thrown.

    Raises
    ------
    parsl.app.errors.AppTimeout

    Notes
    -----
    Because the number of jh_task functions can vary, the keyword arguments
    should reference the keywords explicitly, i.e., one cannot rely on
    keyword position to pass those values.
    """
    if device_names is None:
        device_names = camera_info.get_det_names()

    # Restrict to installed rafts or sensors.  This function call
    # also writes the camera_info cache file for the eT db query.
    installed_rafts = camera_info.get_installed_raft_names()

    # Check if rafts are over-ridden in the lcatr.cfg file.
    override_rafts = os.environ.get('LCATR_RAFTS', None)
    if override_rafts is not None:
        installed_rafts = override_rafts.split('_')

    device_names = [_ for _ in device_names if _[:3] in installed_rafts]

    cwd = os.path.abspath('.')

    # Query eT database for file paths from a previous run, if
    # specified, and store in a pickle file.
    hj_fp_server = siteUtils.HarnessedJobFilePaths()

    # Query for file paths for other analysis runs, if specified in
    # the bot_eo_config_file.
    for analysis_type in ('badpixel', 'bias', 'dark', 'linearity',
                          'nonlinearity'):
        hj_fp_server.query_file_paths(
            siteUtils.get_analysis_run(analysis_type))

    hj_fp_server_file = 'hj_fp_server.pkl'
    with open(hj_fp_server_file, 'wb') as output:
        pickle.dump(hj_fp_server, output)

    # Create a GetAmplifierGains object in order to query the eT
    # database for gain results from previous runs and write a pickle
    # file that can be loaded locally from disk by the various jh
    # tasks being run in parallel to avoid eT db access contention.
    GetAmplifierGains()

    for jh_task in jh_tasks:
        # Add 30 second sleep before launching jh_task processes in
        # parallel to allow for parsl process_pool_workers from the
        # previous set of jh_task processes to finish.
        time.sleep(30)
        run_device_analysis_pool(jh_task, device_names,
                                 processes=processes, cwd=cwd,
                                 walltime=walltime)
Exemplo n.º 6
0
"""
import os
import sys
import glob
import json
import shutil
import pickle
import subprocess
import siteUtils
from camera_components import camera_info
from bot_eo_analyses import glob_pattern, bias_filename, medianed_dark_frame,\
    get_mask_files

RAFTS = set(camera_info.get_raft_names())

CCDS = set(camera_info.get_det_names())

CCD_DATA_KEYS = {
    'bias_frame_BOT': ('bias_frame', 'bias_stability'),
    'fe55_analysis_BOT': ('fe55', ),
    'ptc_BOT': ('ptc', ),
    'read_noise_BOT': ('read_noise', ),
    'pixel_defects_BOT': ('bright_defects', 'dark_defects'),
    'trap_analysis_BOT': ('traps', ),
    'persistence_BOT': ('persistence_bias', 'persistence_dark'),
    'dark_current_BOT': ('dark_current', ),
    'flat_pairs_BOT': ('flat_pairs', ),
    'flat_gain_stability_BOT': ('tearing', ),
    'brighter_fatter_BOT': ('brighter_fatter', ),
    'cti_BOT': ('cte_high', 'cte_low'),
    'overscan_BOT': ('overscan', ),