def stage_isr_files(device_list, dest_dir): """ Stage bias frame, dark frame, and mask files for the specified devices. """ run = siteUtils.getRunNumber() fits_files = set() ccds = CCDS.intersection(device_list) for ccd in ccds: fits_files = fits_files.union(get_isr_files(ccd, run)) rafts = RAFTS.intersection(device_list) for raft in rafts: if raft in 'R00 R04 R40 R44': slots = 'SG0 SG1 SW0 SW1'.split() else: slots = 'S00 S01 S02 S10 S11 S12 S20 S21 S22'.split() for slot in slots: det_name = '_'.join((raft, slot)) fits_files = fits_files.union(get_isr_files(det_name, run)) for src in fits_files: folder = os.path.basename(os.path.dirname(src)) os.makedirs(os.path.join(dest_dir, folder), exist_ok=True) dest = os.path.join(dest_dir, folder, os.path.basename(src)) if not os.path.isfile(dest): print('copying', src, 'to', dest) shutil.copy(src, dest)
def run_dark_current_task(sensor_id): "Single sensor execution of dark current analysis." import lsst.eotest.sensor as sensorTest import siteUtils import eotestUtils file_prefix = '%s_%s' % (sensor_id, siteUtils.getRunNumber()) dark_files = siteUtils.dependency_glob( 'S*/%s_dark_dark_*.fits' % sensor_id, jobname=siteUtils.getProcessName('dark_raft_acq'), description='Dark files:') bias_frame = siteUtils.dependency_glob('%s_sflat*median_bias.fits' % sensor_id, description='Super bias frame:')[0] mask_files = \ eotestUtils.glob_mask_files(pattern='%s_*mask.fits' % sensor_id) gains = eotestUtils.getSensorGains(jobname='fe55_raft_analysis', sensor_id=sensor_id) task = sensorTest.DarkCurrentTask() task.config.temp_set_point = -100. dark_curr_pixels, dark95s \ = task.run(sensor_id, dark_files, mask_files, gains, bias_frame=bias_frame) results_file \ = siteUtils.dependency_glob('%s_eotest_results.fits' % sensor_id, jobname='read_noise_raft')[0] plots = sensorTest.EOTestPlots(sensor_id, results_file=results_file) siteUtils.make_png_file(plots.total_noise, '%s_noise.png' % file_prefix, dark95s=dark95s)
def validate_tearing(results, det_names): """Validate the tearing analysis results.""" run = siteUtils.getRunNumber() schema = lcatr.schema.get('tearing_detection_BOT') missing_det_names = [] for det_name in det_names: raft, slot = det_name.split('_') file_prefix = make_file_prefix(run, det_name) tearing_results_file = '%s_tearing_stats.pkl' % file_prefix if not os.path.isfile(tearing_results_file): missing_det_names.append(det_name) continue with open(tearing_results_file, 'rb') as input_: tearing_stats = pickle.load(input_) for values in tearing_stats: stats = dict(kv for kv in zip(('job_name', 'subset', 'sensor_id', 'detections', 'slot', 'raft'), list(values) + [slot, raft])) results.append(lcatr.schema.valid(schema, **stats)) png_files = sorted(glob.glob('*_tearing.png')) results.extend(persist_tearing_png_files(png_files)) report_missing_data("validate_tearing", missing_det_names) return results
def bias_frame_jh_task(det_name): """JH version of the bias_frame_task.""" import os import siteUtils import json from bot_eo_analyses import glob_pattern, bias_frame_task, \ bias_stability_task run = siteUtils.getRunNumber() acq_jobname = siteUtils.getProcessName('BOT_acq') bias_files \ = siteUtils.dependency_glob(glob_pattern('bias_frame', det_name), acq_jobname=acq_jobname, description='Bias frames:') if not bias_files: print("bias_frame_task: Needed data files are missing for detector", det_name) return None bias_stability_files \ = siteUtils.dependency_glob(glob_pattern('bias_stability', det_name), acq_jobname=acq_jobname, description='Bias stability frames:') if not bias_stability_files: print("bias_stability_task: Needed data files are missing for detector", det_name) return None bias_stability_files = sorted(bias_stability_files) bias_stability_task(run, det_name, bias_stability_files) return bias_frame_task(run, det_name, bias_files[1:])
def validate_overscan(results, det_names): """Validate the overscan analysis results.""" run = siteUtils.getRunNumber() results = [] missing_det_names = set() for det_name in det_names: file_prefix = make_file_prefix(run, det_name) results_file = f'{file_prefix}_overscan_results.fits' if not os.path.isfile(results_file): missing_det_names.add(det_name) else: md = dict(DATA_PRODUCT='overscan_task_results', RUN=run, DETECTOR=det_name) results.append(siteUtils.make_fileref(results_file, metadata=md)) png_files = (glob.glob(f'{file_prefix}_*_eper_*.png') + glob.glob(f'{file_prefix}_*_overscan_*.png') + glob.glob(f'{file_prefix}_*_cti.png')) md = dict(TEST_CATEGORY='EO', DETECTOR=det_name, RUN=run) results.extend( siteUtils.persist_png_files('', file_prefix, png_files=png_files, metadata=md)) report_missing_data('validate_overscan', missing_det_names) return results
def run_dark_pixels_task(sensor_id): "Single sensor execution of the dark pixels task." import lsst.eotest.sensor as sensorTest import siteUtils import eotestUtils acq_jobname = siteUtils.getProcessName('sflat_raft_acq') file_prefix = '%s_%s' % (sensor_id, siteUtils.getRunNumber()) sflat_files = siteUtils.dependency_glob('S*/%s_sflat_500_flat_H*.fits' % sensor_id, jobname=acq_jobname, description='Superflat files:') bias_files = siteUtils.dependency_glob('S*/%s_sflat_bias*.fits' % sensor_id, jobname=acq_jobname, description='Bias files:') bias_frame = eotestUtils.make_median_bias_frame(bias_files, sensor_id, 'sflat_raft_acq') mask_files = \ eotestUtils.glob_mask_files(pattern='%s_*mask.fits' % sensor_id) task = sensorTest.DarkPixelsTask() task.run(sensor_id, sflat_files, mask_files, bias_frame=bias_frame) siteUtils.make_png_file(sensorTest.plot_flat, '%s_superflat_dark_defects.png' % file_prefix, '%s_median_sflat.fits' % sensor_id, title='%s, superflat for dark defects analysis' % sensor_id, annotation='ADU/pixel', flatten=True, binsize=4)
def run_bf_task(sensor_id): import lsst.eotest.sensor as sensorTest import siteUtils import eotestUtils file_prefix = '%s_%s' % (sensor_id, siteUtils.getRunNumber()) flat_files = siteUtils.dependency_glob( 'S*/%s_flat*flat1*.fits' % sensor_id, jobname=siteUtils.getProcessName('flat_pair_raft_acq'), description='Flat files:') bias_frame = siteUtils.dependency_glob('%s_sflat*median_bias.fits' % sensor_id, description='Superbias files:')[0] mask_files = \ eotestUtils.glob_mask_files(pattern='%s_*mask.fits' % sensor_id) task = sensorTest.BFTask() task.run(sensor_id, flat_files, mask_files=mask_files, bias_frame=bias_frame) results_file = '%s_eotest_results.fits' % sensor_id plots = sensorTest.EOTestPlots(sensor_id, results_file=results_file) siteUtils.make_png_file(plots.bf_curves, '%s_brighter-fatter.png' % file_prefix, bf_file='%s_bf.fits' % sensor_id)
def validate_traps(results, det_names): """Validate and persist trap results.""" run = siteUtils.getRunNumber() missing_det_names = [] for det_name in det_names: raft, slot = det_name.split('_') file_prefix = make_file_prefix(run, det_name) trap_file = '%s_traps.fits' % file_prefix if not os.path.isfile(trap_file): missing_det_names.append(det_name) continue eotestUtils.addHeaderData(trap_file, TESTTYPE='TRAP', DATE=eotestUtils.utc_now_isoformat()) results.append(siteUtils.make_fileref(trap_file)) mask_file = '%s_traps_mask.fits' % file_prefix results.append(siteUtils.make_fileref(mask_file)) results_file = '%s_eotest_results.fits' % file_prefix data = sensorTest.EOTestResults(results_file) amps = data['AMP'] num_traps = data['NUM_TRAPS'] for amp, ntrap in zip(amps, num_traps): results.append( lcatr.schema.valid(lcatr.schema.get('traps_BOT'), amp=amp, num_traps=ntrap, slot=slot, raft=raft)) report_missing_data("validate_traps", missing_det_names) return results
def __init__(self, configFile, sys_paths=()): """ configFile contains the names of the site-specific configuration files. File basenames are provided in configFile, and the full paths are constructed in the _read(...) method. """ super(CcsSetup, self).__init__() self.commands = [] self['tsCWD'] = os.getcwd() self['labname'] = siteUtils.getSiteName() self['jobname'] = siteUtils.getJobName() self['CCDID'] = siteUtils.getUnitId() self['UNITID'] = siteUtils.getUnitId() self['LSSTID'] = siteUtils.getLSSTId() try: self['RUNNUM'] = siteUtils.getRunNumber() except Exception: self['RUNNUM'] = "no_lcatr_run_number" self['ts'] = os.getenv('CCS_TS', default='ts') self['archon'] = os.getenv('CCS_ARCHON', default='archon') # The following are only available for certain contexts. if 'CCS_VAC_OUTLET' in os.environ: self['vac_outlet'] = os.getenv('CCS_VAC_OUTLET') if 'CCS_CRYO_OUTLET' in os.environ: self['cryo_outlet'] = os.getenv('CCS_CRYO_OUTLET') if 'CCS_PUMP_OUTLET' in os.environ: self['pump_outlet'] = os.getenv('CCS_PUMP_OUTLET') self._read(os.path.join(siteUtils.getJobDir(), configFile)) self.sys_paths = sys_paths
def dark_defects_jh_task(det_name): """JH version of single sensor execution of the dark defects task.""" import glob import siteUtils from bot_eo_analyses import make_file_prefix, glob_pattern,\ get_amplifier_gains, bias_filename, dark_defects_task, get_mask_files run = siteUtils.getRunNumber() file_prefix = make_file_prefix(run, det_name) acq_jobname = siteUtils.getProcessName('BOT_acq') sflat_files \ = siteUtils.dependency_glob(glob_pattern('dark_defects', det_name), acq_jobname=acq_jobname) if not sflat_files: print("dark_defects_task: No high flux superflat files found for", det_name) return None mask_files = sorted(glob.glob(f'{file_prefix}*mask*.fits')) bias_frame = bias_filename(run, det_name) return dark_defects_task(run, det_name, sflat_files, mask_files=mask_files, bias_frame=bias_frame)
def tearing_fp_heat_map(pattern='*_tearing_stats.pickle'): """ Plot a heat map of the tearing detections per amp over the full focal plane. Glob the data from the tearing stats pickle files produced by tearing_task for each CCD. """ channels = {amp: 'C'+_ for amp, _ in imutils.channelIds.items()} # With lsst_distrib v20.0.0, the channel names for the WF sensors # are of the form 'C0x'. wf_channels = {_ + 1: f'C0{_}' for _ in range(8)} # The following dict has the correct mapping: #wf_channels = {_ + 1: f'C1{_}' for _ in range(8)} amp_data = defaultdict(dict) for item in glob.glob(pattern): det_name = os.path.basename(item)[:len('R22_S11')] ch = wf_channels if 'SW' in det_name else channels with open(item, 'rb') as fd: _, amp_counts = pickle.load(fd) for amp, detections in amp_counts.items(): amp_data[det_name][ch[amp]] = detections fig = plt.figure() ax = fig.add_subplot(111) run = siteUtils.getRunNumber() plot_focal_plane(ax, amp_data, camera=camera_info.camera_object, title=f'Run {run}, tearing detections') plt.savefig(f'LCA-10134_Cryostat-0001_{run}_tearing_detections.png')
def qe_jh_task(det_name): """JH version of single sensor execution of the QE task.""" run = siteUtils.getRunNumber() file_prefix = make_file_prefix(run, det_name) acq_jobname = siteUtils.getProcessName('BOT_acq') lambda_files = siteUtils.dependency_glob(glob_pattern('qe', det_name), acq_jobname=acq_jobname) if not lambda_files: print("qe_task: QE scan files not found for detector", det_name) return None pd_ratio_file = eotestUtils.getPhotodiodeRatioFile() if pd_ratio_file is None: message = ("The BOT photodiode ratio file is " + "not given in config/%s/eotest_calibrations.cfg." % siteUtils.getSiteName()) raise RuntimeError(message) # correction_image = eotestUtils.getIlluminationNonUniformityImage() # if correction_image is None: # print() # print("WARNING: The correction image file is not given in") # print("config/%s/eotest_calibrations.cfg." % siteUtils.getSiteName()) # print("No correction for non-uniform illumination will be applied.") # print() # sys.stdout.flush() mask_files = get_mask_files(det_name) eotest_results_file = '{}_eotest_results.fits'.format(file_prefix) gains = get_amplifier_gains(eotest_results_file) bias_frame = bias_filename(run, det_name) return qe_task(run, det_name, lambda_files, pd_ratio_file, gains, mask_files=mask_files, bias_frame=bias_frame, mondiode_func=mondiode_value)
def validate_flat_gain_stability(results, det_names): """Valdiate the output files from the flat_gain_stability analysis""" if 'gainstability' not in get_analysis_types(): return results run = siteUtils.getRunNumber() missing_det_names = set() for det_name in det_names: file_prefix = make_file_prefix(run, det_name) results_file = f'{file_prefix}_flat_signal_sequence.pickle' if not os.path.isfile(results_file): missing_det_names.add(det_name) else: md = dict(DATA_PRODUCT='flat_gain_stability_results') results.append(siteUtils.make_fileref(results_file, metadata=md)) report_missing_data('validate_flat_gain_stability', missing_det_names) unit_id = siteUtils.getUnitId() png_files = glob.glob('*flat_gain_stability.png') for png_file in png_files: md = dict(DATA_PRODUCT='flat_gain_stability_plot') if unit_id in png_file: md['LsstId'] = unit_id results.append(siteUtils.make_fileref(png_file, metadata=md)) return results
def run_read_noise_task(sensor_id): file_prefix = '%s_%s' % (sensor_id, siteUtils.getRunNumber()) bias_files = siteUtils.dependency_glob( 'S*/%s_fe55_fe55_*.fits' % sensor_id, jobname=siteUtils.getProcessName('fe55_raft_acq'), description='Fe55 files for read noise:') gains = eotestUtils.getSensorGains(jobname='fe55_raft_analysis', sensor_id=sensor_id) system_noise = None mask_files = \ eotestUtils.glob_mask_files(pattern='%s_*mask.fits' % sensor_id) task = sensorTest.ReadNoiseTask() task.config.temp_set_point = -100. task.run(sensor_id, bias_files, gains, system_noise=system_noise, mask_files=mask_files, use_overscan=True) # Compute amp-amp correlated noise. _, corr_fig, _ = correlated_noise(bias_files, target=0, make_plots=True, title=sensor_id) plt.figure(corr_fig.number) plt.savefig('%s_correlated_noise.png' % file_prefix)
def flat_gain_stability_jh_task(det_name): """JH version of single sensor execution of the flat pairs task.""" import glob import siteUtils from bot_eo_analyses import make_file_prefix, glob_pattern,\ bias_filename, flat_gain_stability_task,\ get_mask_files, medianed_dark_frame run = siteUtils.getRunNumber() file_prefix = make_file_prefix(run, det_name) acq_jobname = siteUtils.getProcessName('BOT_acq') flat_files = siteUtils.dependency_glob(glob_pattern('tearing', det_name), acq_jobname=acq_jobname) if not flat_files: print("flat_gain_stability_task: Flat pairs files not found for", det_name) return None mask_files = get_mask_files(det_name) bias_frame = bias_filename(run, det_name) dark_frame = medianed_dark_frame(det_name) return flat_gain_stability_task(run, det_name, flat_files, mask_files=mask_files, bias_frame=bias_frame, dark_frame=dark_frame)
def ptc_jh_task(det_name): """JH version of single sensor execution of the PTC task.""" import glob import siteUtils from bot_eo_analyses import make_file_prefix, glob_pattern,\ get_amplifier_gains, bias_filename, ptc_task, get_mask_files run = siteUtils.getRunNumber() file_prefix = make_file_prefix(run, det_name) acq_jobname = siteUtils.getProcessName('BOT_acq') flat_files = siteUtils.dependency_glob(glob_pattern('ptc', det_name), acq_jobname=acq_jobname) if not flat_files: print("ptc_task: Flat pairs files not found for detector", det_name) return None mask_files = get_mask_files(det_name) eotest_results_file = '{}_eotest_results.fits'.format(file_prefix) gains = get_amplifier_gains(eotest_results_file) bias_frame = bias_filename(run, det_name) return ptc_task(run, det_name, flat_files, gains, mask_files=mask_files, bias_frame=bias_frame)
def dark_current_jh_task(det_name): """JH version of single sensor execution of the dark current task.""" import glob import siteUtils from bot_eo_analyses import make_file_prefix, glob_pattern,\ get_amplifier_gains, bias_filename, dark_current_task,\ plot_ccd_total_noise, get_mask_files run = siteUtils.getRunNumber() file_prefix = make_file_prefix(run, det_name) acq_jobname = siteUtils.getProcessName('BOT_acq') dark_files \ = siteUtils.dependency_glob(glob_pattern('dark_current', det_name), acq_jobname=acq_jobname, description="Dark current frames:") if not dark_files: print("dark_current_task: No dark files found for detector", det_name) return None mask_files = get_mask_files(det_name) eotest_results_file \ = siteUtils.dependency_glob('{}_eotest_results.fits'.format(file_prefix), jobname='read_noise_BOT')[0] gains = get_amplifier_gains('{}_eotest_results.fits'.format(file_prefix)) bias_frame = bias_filename(run, det_name) dark_curr_pixels, dark95s \ = dark_current_task(run, det_name, dark_files, gains, mask_files=mask_files, bias_frame=bias_frame) plot_ccd_total_noise(run, det_name, dark_curr_pixels, dark95s, eotest_results_file) return dark_curr_pixels, dark95s
def read_noise_jh_task(det_name): """JH version of the single sensor read noise task.""" import os import glob import logging import siteUtils from bot_eo_analyses import make_file_prefix, glob_pattern,\ get_amplifier_gains, read_noise_task, get_mask_files logger = logging.getLogger('read_noise_jh_task') logger.setLevel(logging.INFO) run = siteUtils.getRunNumber() file_prefix = make_file_prefix(run, det_name) acq_jobname = siteUtils.getProcessName('BOT_acq') nbias = os.environ.get('LCATR_NUM_BIAS_FRAMES', 10) bias_files \ = siteUtils.dependency_glob(glob_pattern('read_noise', det_name), acq_jobname=acq_jobname)[:nbias] if not bias_files: logger.info( "read_noise_task: Needed data files are missing " "for detector %s", det_name) return None eotest_results_file = '{}_eotest_results.fits'.format(file_prefix) gains = get_amplifier_gains(eotest_results_file) mask_files = get_mask_files(det_name) return read_noise_task(run, det_name, bias_files, gains, mask_files=mask_files)
def bf_jh_task(det_name): """JH version of single sensor execution of the brighter-fatter task.""" import glob import siteUtils from bot_eo_analyses import make_file_prefix, glob_pattern,\ bias_filename, bf_task, find_flat2_bot, get_mask_files,\ get_amplifier_gains run = siteUtils.getRunNumber() file_prefix = make_file_prefix(run, det_name) acq_jobname = siteUtils.getProcessName('BOT_acq') flat_files \ = siteUtils.dependency_glob(glob_pattern('brighter_fatter', det_name), acq_jobname=acq_jobname) if not flat_files: print("bf_jh_task: Flat pairs files not found for detector", det_name) return None flat_files = [_ for _ in flat_files if 'flat1' in _] mask_files = get_mask_files(det_name) eotest_results_file = '{}_eotest_results.fits'.format(file_prefix) gains = get_amplifier_gains(eotest_results_file) bias_frame = bias_filename(run, det_name) return bf_task(run, det_name, flat_files, gains, mask_files=mask_files, flat2_finder=find_flat2_bot, bias_frame=bias_frame)
def bright_defects_jh_task(det_name): """JH version of single sensor bright pixels task.""" import glob import siteUtils from bot_eo_analyses import make_file_prefix, glob_pattern,\ get_amplifier_gains, bias_filename, bright_defects_task, get_mask_files run = siteUtils.getRunNumber() file_prefix = make_file_prefix(run, det_name) acq_jobname = siteUtils.getProcessName('BOT_acq') dark_files \ = siteUtils.dependency_glob(glob_pattern('bright_defects', det_name), acq_jobname=acq_jobname) if not dark_files: print("bright_defects_task: Needed data files missing for detector", det_name) return None eotest_results_file = '{}_eotest_results.fits'.format(file_prefix) gains = get_amplifier_gains(eotest_results_file) mask_files = sorted(glob.glob(f'{file_prefix}*mask*.fits')) bias_frame = bias_filename(run, det_name) return bright_defects_task(run, det_name, dark_files, gains, mask_files=mask_files, bias_frame=bias_frame)
def persistence_jh_task(det_name): """JH version of the persistence_task.""" import siteUtils from bot_eo_analyses import make_file_prefix, glob_pattern, \ bias_frame_task, get_mask_files, get_bot_eo_config, persistence_task run = siteUtils.getRunNumber() file_prefix = make_file_prefix(run, det_name) acq_jobname = siteUtils.getProcessName('BOT_acq') bias_files \ = siteUtils.dependency_glob(glob_pattern('persistence_bias', det_name), acq_jobname=acq_jobname, description='Persistence bias frames:') dark_files \ = siteUtils.dependency_glob(glob_pattern('persistence_dark', det_name), acq_jobname=acq_jobname, description='Persistence dark frames:') if not bias_files or not dark_files: print("persistence_task: Needed data files are missing for detector", det_name) return None # Sort by test sequence number, i.e., by filenames. bias_files = sorted(bias_files) dark_files = sorted(dark_files) # Make a superbias frame using the pre-exposure persistence bias # files, skipping the first exposure. superbias_frame = f'{file_prefix}_persistence_superbias.fits' bias_frame_task(run, det_name, bias_files, bias_frame=superbias_frame) return persistence_task(run, det_name, dark_files, superbias_frame, get_mask_files(det_name))
def validate_bias_frame(results, det_names): """Validate and persist medianed bias frames.""" run = siteUtils.getRunNumber() missing_det_names = set() for det_name in det_names: file_prefix = make_file_prefix(run, det_name) bias_frame = f'{file_prefix}_median_bias.fits' rolloff_mask = f'{file_prefix}_edge_rolloff_mask.fits' pca_bias_file = f'{file_prefix}_pca_bias.fits' pca_superbias = f'{file_prefix}_pca_superbias.fits' # Add/update the metadata to the primary HDU of these files. for fitsfile in (bias_frame, rolloff_mask, pca_bias_file, pca_superbias): if os.path.isfile(fitsfile): eotestUtils.addHeaderData(fitsfile, TESTTYPE='BIAS', DATE=eotestUtils.utc_now_isoformat()) results.append(lcatr.schema.fileref.make(fitsfile)) else: missing_det_names.add(det_name) # Persist the PCA bias model file. pca_bias_model = f'{file_prefix}_pca_bias.pickle' if os.path.isfile(pca_bias_model): results.append(lcatr.schema.fileref.make(pca_bias_model)) else: missing_det_names.add(det_name) report_missing_data('validate_bias_frames', missing_det_names) return results
def run_ptc_task(sensor_id): import lsst.eotest.sensor as sensorTest import siteUtils import eotestUtils file_prefix = '%s_%s' % (sensor_id, siteUtils.getRunNumber()) flat_files = siteUtils.dependency_glob( 'S*/%s_flat*flat?_*.fits' % sensor_id, jobname=siteUtils.getProcessName('flat_pair_raft_acq'), description='Flat files:') bias_frame = siteUtils.dependency_glob('%s_sflat*median_bias.fits' % sensor_id, description='Super bias frame:')[0] mask_files = \ eotestUtils.glob_mask_files(pattern='%s_*mask.fits' % sensor_id) gains = eotestUtils.getSensorGains(jobname='fe55_raft_analysis', sensor_id=sensor_id) task = sensorTest.PtcTask() task.run(sensor_id, flat_files, mask_files, gains, bias_frame=bias_frame) results_file = '%s_eotest_results.fits' % sensor_id plots = sensorTest.EOTestPlots(sensor_id, results_file=results_file) siteUtils.make_png_file(plots.ptcs, '%s_ptcs.png' % file_prefix, ptc_file='%s_ptc.fits' % sensor_id)
def run_dark_pixels_task(sensor_id): print("run_dark_pixels_task: sensor_id = ", sensor_id) # raft_id = os.environ['LCATR_UNIT_ID'] # raft = camera_components.Raft.create_from_etrav(raft_id) # wgSlotName = siteUtils.getWGSlotNames(raft)[sensor_id]; "Single sensor execution of the dark pixels task." file_prefix = '%s_%s' % (sensor_id, siteUtils.getRunNumber()) sflat_files = siteUtils.dependency_glob( 'S*/%s_sflat_500_flat_*.fits' % sensor_id, jobname=siteUtils.getProcessName('sflat_raft_acq'), description='Superflat files:') print("sflat query: ", 'S*/%s_sflat_500_flat_H*.fits' % sensor_id) print("sflat_files = ", sflat_files) mask_files = \ eotestUtils.glob_mask_files(pattern='%s_*mask.fits' % sensor_id) task = sensorTest.DarkPixelsTask() task.run(sensor_id, sflat_files, mask_files) siteUtils.make_png_file(sensorTest.plot_flat, '%s_superflat_dark_defects.png' % file_prefix, '%s_median_sflat.fits' % sensor_id, title='%s, superflat for dark defects analysis' % sensor_id, annotation='ADU/pixel')
def scan_mode_analysis_jh_task(raft_name): """JH version of scan mode analysis task.""" import siteUtils from bot_eo_analyses import get_scan_mode_files, scan_mode_analysis_task run = siteUtils.getRunNumber() scan_mode_files = get_scan_mode_files(raft_name) return scan_mode_analysis_task(run, raft_name, scan_mode_files)
def run_qe_task(sensor_id): try: if hw_objects[sensor_id] : return except: hw_objects[sensor_id] = True print("hw_objects = ",hw_objects) "Single sensor execution of the QE task." file_prefix = '%s_%s' % (sensor_id, siteUtils.getRunNumber()) lambda_files = siteUtils.dependency_glob('S*/%s_lambda_flat_*.fits' % sensor_id, jobname=siteUtils.getProcessName('qe_raft_acq'), description='Lambda files:') pd_ratio_file = eotestUtils.getPhotodiodeRatioFile() if pd_ratio_file is None: message = ("The test-stand specific photodiode ratio file is " + "not given in config/%s/eotest_calibrations.cfg." % siteUtils.getSiteName()) raise RuntimeError(message) correction_image = eotestUtils.getIlluminationNonUniformityImage() if correction_image is None: print() print("WARNING: The correction image file is not given in") print("config/%s/eotest_calibrations.cfg." % siteUtils.getSiteName()) print("No correction for non-uniform illumination will be applied.") print() sys.stdout.flush() mask_files = \ eotestUtils.glob_mask_files(pattern='%s_*mask.fits' % sensor_id) gains = eotestUtils.getSensorGains(jobname='fe55_raft_analysis', sensor_id=sensor_id) task = sensorTest.QeTask() task.config.temp_set_point = -100. task.run(sensor_id, lambda_files, pd_ratio_file, mask_files, gains, correction_image=correction_image) results_file \ = siteUtils.dependency_glob('%s_eotest_results.fits' % sensor_id, jobname='fe55_raft_analysis', description='Fe55 results file')[0] plots = sensorTest.EOTestPlots(sensor_id, results_file=results_file) siteUtils.make_png_file(plots.qe, '%s_qe.png' % file_prefix, qe_file='%s_QE.fits' % sensor_id) try: plots.flat_fields(os.path.dirname(lambda_files[0]), annotation='e-/pixel, gain-corrected, bias-subtracted') except Exception as eobj: print("Exception raised while creating flat fields:") print(str(eobj))
def stage_files(device_list, data_keys): """ Function to stage the needed raw image files from the specified devices (CCDs or rafts) for the current job in the scratch area. """ # Gather the filenames of the needed data. fits_files = set() for device in device_list: fits_files = fits_files.union(get_files(data_keys, device)) if not fits_files: return # Make the scratch directory for the BOT data. run_number = siteUtils.getRunNumber() scratch_dir = os.environ.get('LCATR_SCRATCH_DIR', '/scratch') dest_dir = os.path.join(scratch_dir, 'bot_data', str(run_number)) os.makedirs(dest_dir, exist_ok=True) # Glob existing files to avoid re-copying or for possible clean up. old_files = set(glob.glob(os.path.join(dest_dir, '*', 'MC_C*.fits'))) old_files = old_files.union( glob.glob(os.path.join(dest_dir, '*', 'Photodiode*.txt'))) # Create a dict that maps src to dest file paths. Preserve the # folder name of the exposure so that the PTC and flat pairs tasks # can identify the paired exposures. new_files = dict() frame_dirs = set() for src in fits_files: frame_dir = os.path.dirname(src) frame_dirs.add(frame_dir) folder = os.path.basename(frame_dir) os.makedirs(os.path.join(dest_dir, folder), exist_ok=True) new_files[src] = os.path.join(dest_dir, folder, os.path.basename(src)) # Include any Photodiode_Readings*.txt files. for frame_dir in frame_dirs: for src in glob.glob(os.path.join(frame_dir, 'Photodiode*.txt')): new_files[src] = os.path.join(dest_dir, os.path.basename(frame_dir), os.path.basename(src)) # Clean up unneeded files. unneeded_files = old_files.difference(new_files.values()) for item in unneeded_files: print('removing', item) os.remove(item) # Copy the remaining files. for src, dest in new_files.items(): if dest not in old_files: print('copying', src, 'to', dest) shutil.copy(src, dest) stage_isr_files(device_list, dest_dir)
def validate_qe(results, det_names): """Validate the QE results.""" run = siteUtils.getRunNumber() missing_det_names = [] for det_name in det_names: raft, slot = det_name.split('_') file_prefix = make_file_prefix(run, det_name) qe_results_file = '%s_QE.fits' % file_prefix if not os.path.isfile(qe_results_file): missing_det_names.append(det_name) continue with fits.open(qe_results_file) as qe_results: qe_data = qe_results['QE_BANDS'].data QE = OrderedDict((band, []) for band in qe_data.field('BAND')) for amp in range(1, 17): values = qe_data.field('AMP%02i' % amp) for band, value in zip(QE, values): QE[band].append(value) for band in QE: for amp in range(1, 17): results.append( lcatr.schema.valid(lcatr.schema.get('qe_BOT_analysis'), band=band, QE=QE[band][amp - 1], amp=amp, slot=slot, raft=raft)) qe_files = glob.glob('%s_*QE*.fits' % file_prefix) for item in qe_files: eotestUtils.addHeaderData(item, TESTTYPE='LAMBDA', DATE=eotestUtils.utc_now_isoformat()) results.extend([siteUtils.make_fileref(item) for item in qe_files]) # Persist the png files. metadata = dict(DETECTOR=det_name, RUN=run, TESTTYPE='LAMBDA', TEST_CATEGORY='EO') results.extend( siteUtils.persist_png_files('%s*qe.png' % file_prefix, file_prefix, metadata=metadata)) results.extend( siteUtils.persist_png_files('%s*flat.png' % file_prefix, file_prefix, metadata=metadata)) report_missing_data("validate_qe", missing_det_names) return results
def validate_brighter_fatter(results, det_names): """Validate the brighter-fatter results.""" run = siteUtils.getRunNumber() missing_det_names = set() for det_name in det_names: raft, slot = det_name.split('_') file_prefix = make_file_prefix(run, det_name) bf_results = '%s_bf.fits' % file_prefix if not os.path.isfile(bf_results): missing_det_names.add(det_name) continue eotestUtils.addHeaderData(bf_results, TESTTYPE='FLAT', DATE=eotestUtils.utc_now_isoformat()) results.append(siteUtils.make_fileref(bf_results)) results_file = '%s_eotest_results.fits' % file_prefix data = sensorTest.EOTestResults(results_file) columns = (data['AMP'], data['BF_XCORR'], data['BF_XCORR_ERR'], data['BF_YCORR'], data['BF_YCORR_ERR'], data['BF_SLOPEX'], data['BF_SLOPEX_ERR'], data['BF_SLOPEY'], data['BF_SLOPEY_ERR'], data['BF_MEAN']) for amp, bf_xcorr, bf_xcorr_err, bf_ycorr, bf_ycorr_err, \ bf_slopex, bf_slopex_err, bf_slopey, bf_slopey_err, bf_mean \ in zip(*columns): results.append( lcatr.schema.valid(lcatr.schema.get('brighter_fatter_BOT'), amp=amp, bf_xcorr=bf_xcorr, bf_xcorr_err=bf_xcorr_err, bf_ycorr=bf_ycorr, bf_ycorr_err=bf_ycorr_err, bf_slopex=bf_slopex, bf_slopex_err=bf_slopex_err, bf_slopey=bf_slopey, bf_slopey_err=bf_slopey_err, bf_mean=bf_mean, slot=slot, raft=raft)) # Persist the png files. metadata = dict(DETECTOR=det_name, RUN=run, TESTTYPE='FLAT', TEST_CATEGORY='EO') results.extend( siteUtils.persist_png_files('%s*brighter-fatter.png' % file_prefix, file_prefix, metadata=metadata)) return results
def validate_read_noise(results, det_names): """Validate and persist read noise results.""" run = siteUtils.getRunNumber() missing_det_names = set() for det_name in det_names: raft, slot = det_name.split('_') file_prefix = make_file_prefix(run, det_name) read_noise_file = '%s_eotest_results.fits' % file_prefix if not os.path.isfile(read_noise_file): # No data for this detector, so note that and continue # with the others. missing_det_names.add(det_name) continue data = sensorTest.EOTestResults(read_noise_file) amps = data['AMP'] read_noise_data = data['READ_NOISE'] system_noise_data = data['SYSTEM_NOISE'] total_noise_data = data['TOTAL_NOISE'] for amp, read_noise, system_noise, total_noise \ in zip(amps, read_noise_data, system_noise_data, total_noise_data): results.append(lcatr.schema.valid( lcatr.schema.get('read_noise_BOT'), amp=amp, read_noise=read_noise, system_noise=system_noise, total_noise=total_noise, slot=slot, raft=raft)) files = glob.glob('%s_read_noise?*.fits' % file_prefix) for fitsfile in files: eotestUtils.addHeaderData(fitsfile, TESTTYPE='FE55', DATE=eotestUtils.utc_now_isoformat()) data_products = [siteUtils.make_fileref(item) for item in files] results.extend(data_products) # Persist the png files. metadata = dict(DETECTOR=det_name, TESTTYPE='FE55', TEST_CATEGORY='EO', RUN=run) filename = '%s_correlated_noise.png' % file_prefix results.extend(siteUtils.persist_png_files(filename, file_prefix, metadata=metadata)) # Persist the raft-level overscan correlation plots. for raft in camera_info.get_installed_raft_names(): metadata = dict(TESTTYPE='FE55', TEST_CATEGORY='EO', RAFT=raft, RUN=run) file_prefix = make_file_prefix(run, raft) filename = '%s_overscan_correlations.png' % file_prefix results.extend(siteUtils.persist_png_files(filename, file_prefix, metadata=metadata)) report_missing_data("validate_read_noise", missing_det_names) return results
def __init__(self, configFile): """ configFile contains the names of the site-specific configuration files. File basenames are provided in configFile, and the full paths are constructed in the _read(...) method. """ super(CcsSetup, self).__init__() if os.environ.has_key('CCS_TS8'): self['ts8']=_quote(os.getenv('CCS_TS8')) else: self['ts8'] = _quote('ts8') if os.environ.has_key('CCS_JYTH'): self['jyth']=_quote(os.getenv('CCS_JYTH')) else: self['jyth'] = _quote('JythonInterpreterConsole') if os.environ.has_key('CCS_JSON_PORT'): self['jsonport']=os.getenv('CCS_JSON_PORT') else: self['jsonport'] = 4444 if os.environ.has_key('CCS_PS'): self['ps']=_quote(os.getenv('CCS_PS')) else: self['ps'] = _quote('ccs-rebps') if os.environ.has_key('CCS_TS'): self['ts']=_quote(os.getenv('CCS_TS')) else: self['ts'] = _quote('ts') if os.environ.has_key('CCS_ARCHON'): self['archon']=_quote(os.getenv('CCS_ARCHON')) else: self['archon'] = _quote('archon') if os.environ.has_key('CCS_VAC_OUTLET'): self['vac_outlet']=os.getenv('CCS_VAC_OUTLET') # there is no default for vac_outlet - if there is a script that needs # it and it has not been defined then I want it to crash if os.environ.has_key('CCS_CRYO_OUTLET'): self['cryo_outlet']=os.getenv('CCS_CRYO_OUTLET') # there is no default for cryo_outlet - if there is a script that needs # it and it has not been defined then I want it to crash if os.environ.has_key('CCS_PUMP_OUTLET'): self['pump_outlet']=os.getenv('CCS_PUMP_OUTLET') # there is no default for pump_outlet - if there is a script that needs # it and it has not been defined then I want it to crash self['tsCWD'] = _quote(os.getcwd()) self['labname'] = _quote(siteUtils.getSiteName()) self['jobname'] = _quote(siteUtils.getJobName()) self['CCDID'] = _quote(siteUtils.getUnitId()) self['UNITID'] = _quote(siteUtils.getUnitId()) self['LSSTID'] = _quote(siteUtils.getLSSTId()) unitid = siteUtils.getUnitId() CCDTYPE = _quote(siteUtils.getUnitType()) ccdnames = {} ccdmanunames = {} ccdnames,ccdmanunames = siteUtils.getCCDNames() print "retrieved the following LSST CCD names list" print ccdnames print "retrieved the following Manufacturers CCD names list" print ccdmanunames for slot in ccdnames : print "CCD %s is in slot %s" % (ccdnames[slot],slot) self['CCD%s'%slot] = _quote(ccdnames[slot]) if 'itl' in ccdnames[slot].lower() : CCDTYPE = 'itl' if 'e2v' in ccdnames[slot].lower() : CCDTYPE = 'e2v' for slot in ccdmanunames : print "CCD %s is in slot %s" % (ccdmanunames[slot],slot) self['CCDMANU%s'%slot] = _quote(ccdmanunames[slot]) try: self['RUNNUM'] = _quote(siteUtils.getRunNumber()) except: self['RUNNUM'] = "no_lcatr_run_number" pass self._read(os.path.join(siteUtils.getJobDir(), configFile)) print "CCDTYPE = %s" % CCDTYPE self['sequence_file'] = _quote("NA") self['acffile'] = self['itl_acffile'] # set default type self['CCSCCDTYPE'] = _quote("ITL") if ("RTM" in unitid.upper() or "ETU" in unitid.upper() or "RSA" in unitid.upper()) : if ("e2v" in CCDTYPE) : self['CCSCCDTYPE'] = _quote("E2V") self['acffile'] = self['e2v_acffile'] self['sequence_file'] = self['e2v_seqfile'] else : self['CCSCCDTYPE'] = _quote("ITL") self['acffile'] = self['itl_acffile'] self['sequence_file'] = self['itl_seqfile'] os.system("export | grep -i seq") seqdir = "" if os.environ.has_key('SEQUENCERFILESDIR') : seqdir = os.getenv('SEQUENCERFILESDIR') print "seqdir=",seqdir self['sequence_file'] = self['sequence_file'].replace('${SEQUENCERFILESDIR}',seqdir) os.system("cp -vp %s %s" % (self['sequence_file'],self['tsCWD'])) # now use the local copy # bb = self['sequence_file'].split("/") # self['sequence_file'] = _quote("%s/%s" % (os.getcwd(),bb[len(bb)-1].split("'")[0])) print "The sequence file to be used is %s" % self['sequence_file'] else : if ("ITL" in CCDTYPE) : self['CCSCCDTYPE'] = _quote("ITL") self['acffile'] = self['itl_acffile'] if ("e2v" in CCDTYPE) : self['CCSCCDTYPE'] = _quote("E2V") self['acffile'] = self['e2v_acffile'] print "The acffile to be used is %s" % self['acffile']
#!/usr/bin/env python import Tkinter import glob import shutil import os import matplotlib.pyplot as plt import ccs_trending import siteUtils import time import subprocess raft_id = siteUtils.getLSSTId() run_number = siteUtils.getRunNumber() host = 'localhost' jobDir = siteUtils.getJobDir() shutil.copy("%s/ts_quantities.cfg" % jobDir ,os.getcwd()) shutil.copy("%s/ts8_quantities.cfg" % jobDir ,os.getcwd()) ccsProducer('RTM_thermo', 'ccsthermal.py') cdir = os.getcwd() #rtmstatelist = [ #"RTM_off_5min_stable__502", #"REB_quiescient_record__505", #"RTM_quies_5min_record__508", #"RTM_biases_5min_record__510", #"RTM_clears_5min_record__512",