Exemple #1
0
def test_reduce_image_GN_EEV_2x2_g(path_to_inputs):
    logutils.config(file_name='gmos_test_reduce_image_GN_EEV_2x2_g.log')

    calib_files = []

    raw_subdir = 'GMOS/GN-2002A-Q-89'

    all_files = sorted(glob.glob(
        os.path.join(path_to_inputs, raw_subdir, '*.fits')))
    assert len(all_files) > 1

    list_of_bias = dataselect.select_data(
        all_files,
        ['BIAS'],
        []
    )

    list_of_flats = dataselect.select_data(
        all_files,
        ['IMAGE', 'FLAT'],
        [],
        dataselect.expr_parser('filter_name=="g"')
    )

    # These old data don't have an OBSCLASS keyword:
    list_of_science_files = dataselect.select_data(
        all_files, [],
        ['CAL'],
        dataselect.expr_parser(
            'object=="PerseusField4" and filter_name=="g"'
        )
    )

    reduce_bias = Reduce()
    assert len(reduce_bias.files) == 0

    reduce_bias.files.extend(list_of_bias)
    assert len(reduce_bias.files) == len(list_of_bias)

    reduce_bias.runr()

    calib_files.append(
        'processed_bias:{}'.format(reduce_bias.output_filenames[0])
    )

    reduce_flats = Reduce()
    reduce_flats.files.extend(list_of_flats)
    reduce_flats.ucals = normalize_ucals(reduce_flats.files, calib_files)
    reduce_flats.runr()

    calib_files.append(
        'processed_flat:{}'.format(reduce_flats.output_filenames[0])
    )

    reduce_target = Reduce()
    reduce_target.files.extend(list_of_science_files)
    reduce_target.ucals = normalize_ucals(reduce_target.files, calib_files)
    reduce_target.runr()
def reduce(file_list,
           label,
           calib_files,
           recipe_name=None,
           save_to=None,
           user_pars=None):
    """
    Helper function used to prevent replication of code.

    Parameters
    ----------
    file_list : list
        List of files that will be reduced.
    label : str
        Labed used on log files name.
    calib_files : list
        List of calibration files properly formatted for DRAGONS Reduce().
    recipe_name : str, optional
        Name of the recipe used to reduce the data.
    save_to : str, optional
        Stores the calibration files locally in a list.
    user_pars : list, optional
        List of user parameters

    Returns
    -------
    str : Output reduced file.
    list : An updated list of calibration files.
    """
    objgraph = pytest.importorskip("objgraph")

    logutils.get_logger().info("\n\n\n")
    logutils.config(file_name=f"test_image_{label}.log")
    r = Reduce()
    r.files = file_list
    r.ucals = normalize_ucals(r.files, calib_files)
    r.uparms = user_pars

    if recipe_name:
        r.recipename = recipe_name

    r.runr()
    output_file = r.output_filenames[0]

    if save_to:
        calib_files.append("{}:{}".format(
            save_to,
            os.path.join("calibrations", save_to, r.output_filenames[0])))
        [os.remove(f) for f in r.output_filenames]

    # check that we are not leaking objects
    assert len(objgraph.by_type('NDAstroData')) == 0

    return output_file, calib_files
Exemple #3
0
    def test_slitbias_in_calservice(self, get_or_create_tmpdir):
        """
        Check that:

        - A bias slit calibrator exists in the local calibrations dir;
        - It can be retrieved using a getProcessedSlitBias call.
        """
        assert len(glob.glob(os.path.join(
            os.getcwd(), 'calibrations', 'processed_bias', '*bias*slit*.fits'
        ))) == 1, "Couldn't find the stored slit bias in the calibrations " \
                  "system OR found multiples"

        # Do the master bias generation
        reduce = Reduce()
        reduce.drpkg = 'ghostdr'
        # Use one of the 'dark slit' files to try and retrieve the slit bias
        reduce.files = [
            os.path.join(os.getcwd(), 'dark95_1_MEF_2x2_slit.fits'),
        ]
        reduce.mode = [
            'test',
        ]
        reduce.recipename = 'recipeRetrieveSlitBiasTest'
        # reduce.mode = ['sq', ]
        # reduce.recipename = 'makeProcessedBias'
        reduce.logfile = os.path.join(os.getcwd(),
                                      'reduce_slitbias_retrieve.log')
        # FIXME cal_service will hopefully find the calibration itself later
        reduce.ucals = normalize_ucals(reduce.files, [
            'processed_bias:{}'.format(
                glob.glob(
                    os.path.join('calibrations', 'processed_bias',
                                 '*slit*bias*.fits'))[0]),
        ])
        reduce.logmode = 'quiet'
        reduce.suffix = '_testSlitBiasRetrieve'
        logutils.config(file_name=reduce.logfile, mode=reduce.logmode)

        try:
            reduce.runr()
        except IOError as e:
            assert 0, 'Calibration system could not locate the slit bias ' \
                      'frame ({})'.format(e.message)
        finally:
            # Teardown code
            for _ in glob.glob(
                    os.path.join(os.getcwd(),
                                 '*{}.fits'.format(reduce.suffix)), ):
                os.remove(_)
Exemple #4
0
def processed_slit_illum(change_working_dir, path_to_inputs, request):
    """
    Returns the processed slit illumination function that will be analysed.

    Parameters
    ----------
    change_working_dir : pytest.fixture
        Fixture that changes the working directory (see :mod:`astrodata.testing`).
    path_to_inputs : pytest.fixture
        Fixture defined in :mod:`astrodata.testing` with the path to the
        pre-processed input file.
    request : pytest.fixture
        PyTest built-in fixture containing information about parent test.

    Returns
    -------
    AstroData
        Input spectrum processed up to right before the `applyQECorrection`.
    """
    twi_filename = request.param
    twi_path = download_from_archive(twi_filename)
    twi_ad = astrodata.open(twi_path)

    print(twi_ad.tags)

    master_bias = os.path.join(
        path_to_inputs, associated_calibrations[twi_filename])

    assert os.path.exists(master_bias)

    calibration_files = ['processed_bias:{}'.format(master_bias)]

    with change_working_dir():
        print("Reducing SLITILLUM in folder:\n  {}".format(os.getcwd()))
        logutils.config(
            file_name='log_flat_{}.txt'.format(twi_ad.data_label()))

        reduce = Reduce()
        reduce.files.extend([twi_path])
        reduce.mode = 'sq'
        reduce.recipename = 'makeProcessedSlitIllum'
        reduce.ucals = normalize_ucals(reduce.files, calibration_files)
        reduce.runr()

        _processed_twi_filename = reduce.output_filenames.pop()
        _processed_twi = astrodata.open(_processed_twi_filename)

    return _processed_twi
Exemple #5
0
    def reduce(filelist, saveto=None, label='', calib_files=None,
               recipename=None):
        red = Reduce()
        assert len(red.files) == 0
        red.files.extend(filelist)
        assert len(red.files) == len(filelist)
        if calib_files:
            red.ucals = normalize_ucals(red.files, calib_files)
        if recipename:
            red.recipename = recipename
        red.runr()
        if saveto:
            calib_files.append(f'{saveto}:{red.output_filenames[0]}')

        # check that we are not leaking objects
        assert len(objgraph.by_type('NDAstroData')) == 0
Exemple #6
0
    def _reduce_flat(datalabel, flat_fname, master_bias):
        with output_path():
            logutils.config(file_name='log_flat_{}.txt'.format(datalabel))

            calibration_files = ['processed_bias:{}'.format(master_bias)]

            reduce = Reduce()
            reduce.files.extend([flat_fname])
            reduce.mode = 'ql'
            reduce.ucals = normalize_ucals(reduce.files, calibration_files)
            reduce.runr()

            master_flat = reduce.output_filenames.pop()
            master_flat_ad = astrodata.open(master_flat)

        return master_flat_ad
def processed_flat(change_working_dir, path_to_inputs, request):
    """
    Returns the processed flat that will be analysed.

    Parameters
    ----------
    change_working_dir : pytest.fixture
        Fixture that changes the working directory (see :mod:`astrodata.testing`).
    path_to_inputs : pytest.fixture
        Fixture defined in :mod:`astrodata.testing` with the path to the
        pre-processed input file.
    request : pytest.fixture
        PyTest built-in fixture containing information about parent test.

    Returns
    -------
    AstroData
        Input spectrum processed up to right before the `applyQECorrection`.
    """
    np.random.seed(0)

    flat_filename = request.param
    flat_path = download_from_archive(flat_filename)
    flat_raw = astrodata.open(flat_path)

    master_bias = os.path.join(path_to_inputs,
                               associated_calibrations[flat_filename])
    calibration_files = ['processed_bias:{}'.format(master_bias)]

    with change_working_dir():
        print("Reducing FLATs in folder:\n  {}".format(os.getcwd()))
        logutils.config(
            file_name='log_flat_{}.txt'.format(flat_raw.data_label()))

        reduce = Reduce()
        reduce.files.extend([flat_path])
        reduce.mode = 'ql'
        reduce.ucals = normalize_ucals(reduce.files, calibration_files)
        reduce.runr()

        # Clean up duplicated files
        shutil.rmtree('calibrations/')

        _processed_flat_filename = reduce.output_filenames.pop()
        _processed_flat = astrodata.open(_processed_flat_filename)

    return _processed_flat
Exemple #8
0
    def _reduce_flat(data_label, flat_fnames, master_bias):
        with change_working_dir():
            print("Reducing FLATs in folder:\n  {}".format(os.getcwd()))
            logutils.config(file_name='log_flat_{}.txt'.format(data_label))

            calibration_files = ['processed_bias:{}'.format(master_bias)]

            reduce = Reduce()
            reduce.files.extend(flat_fnames)
            reduce.mode = 'ql'
            reduce.ucals = normalize_ucals(calibration_files)
            reduce.runr()

            master_flat = reduce.output_filenames.pop()
            master_flat_ad = astrodata.open(master_flat)

        return master_flat_ad
Exemple #9
0
def test_make_processed_flat(change_working_dir, flat_fnames, master_bias,
                             path_to_inputs):
    """
    Regression test for GMOS

    Parameters
    ----------
    change_working_dir : fixture
        Custom fixture defined astrodata.testing that changes temporarily the
        working dir.
    flat_fnames : list
        Contains the flat names that will be reduced.
    master_bias : str
        Contains the name of the master flat.
    path_to_inputs : fixture
        Custom fixture that defines where the input data is stored.
    """
    master_bias = os.path.join(path_to_inputs, master_bias)
    calibration_files = ['processed_bias:{}'.format(master_bias)]

    with change_working_dir():
        logutils.config(
            file_name=f"log_flat_{flat_fnames[0].split('.')[0]}.txt")
        r = Reduce()
        r.files = [download_from_archive(f) for f in flat_fnames]
        r.mode = 'qa'
        r.ucals = normalize_ucals(r.files, calibration_files)
        r.runr()

        # Delete files that won't be used
        shutil.rmtree('calibrations/')
        [os.remove(f) for f in glob.glob('*_forStack.fits')]

        ad = astrodata.open(r.output_filenames[0])

    for ext in ad:
        data = np.ma.masked_array(ext.data, mask=ext.mask)

        if not data.mask.all():
            np.testing.assert_allclose(np.ma.median(data.ravel()),
                                       1,
                                       atol=0.071)
            np.testing.assert_allclose(data[~data.mask], 1, atol=0.45)
Exemple #10
0
    def __init__(self, sys_args=None):
        if sys_args:
            args = sys_args
        elif self._confirm_args():
            args = buildParser(__version__).parse_args()
        else:
            args = buildParser(__version__).parse_args([])

        # acquire any new astrodata classes.
        if args.adpkg:
            import_module(args.adpkg)

        self.mode = args.mode
        self.drpkg = args.drpkg
        self.files = args.files
        self.suffix = args.suffix
        self.ucals = normalize_ucals(args.files, args.user_cal)
        self.uparms = set_btypes(args.userparam)
        self._upload = args.upload
        self._output_filenames = None
        self.recipename = args.recipename if args.recipename else '_default'
Exemple #11
0
    def do_master_flat(self, get_or_create_tmpdir, request):
        """
        Run the recipeFlatCreateMaster recipe.

        .. note::
            Fixture.
        """
        arm, res = request.param
        rawfilename = 'flat*{}*{}*.fits'.format(res, arm)
        # Copy the raw data file into here
        tmpsubdir, cal_service = get_or_create_tmpdir
        # Find all the relevant files
        # rawfiles = glob.glob(os.path.join(os.path.dirname(
        #     os.path.abspath(__file__)),
        #     'testdata',
        #     rawfilename))
        # for f in rawfiles:
        #     shutil.copy(f, os.path.join(tmpsubdir.dirname, tmpsubdir.basename))
        rawfiles = glob.glob(
            os.path.join(tmpsubdir.dirname, tmpsubdir.basename, rawfilename))

        # Do the master bias generation
        reduce = Reduce()
        reduce.drpkg = 'ghostdr'
        reduce.files = rawfiles
        reduce.mode = [
            'test',
        ]
        reduce.recipename = 'recipeFlatCreateMaster'
        # reduce.mode = ['sq', ]
        # reduce.recipename = 'makeProcessedBias'
        reduce.logfile = os.path.join(
            tmpsubdir.dirname, tmpsubdir.basename,
            'reduce_masterflat_{}_{}.log'.format(res, arm))
        reduce.logmode = 'quiet'
        reduce.suffix = '_{}_{}_testMasterFlat'.format(res, arm)
        logutils.config(file_name=reduce.logfile, mode=reduce.logmode)
        # import pdb; pdb.set_trace()
        calibs = {
            'processed_bias':
            glob.glob(
                os.path.join('calibrations', 'processed_bias',
                             'bias*{}*.fits'.format(arm)))[0],
            'processed_dark':
            glob.glob(
                os.path.join('calibrations', 'processed_dark',
                             'dark*{}*.fits'.format(arm)))[0],
            'processed_slitflat':
            glob.glob(
                os.path.join('calibrations', 'processed_slitflat',
                             'flat*{}*slitflat*.fits'.format(res)))[0],
        }
        reduce.ucals = normalize_ucals(
            reduce.files, ['{}:{}'.format(k, v) for k, v in calibs.items()])

        reduce.runr()
        if res == 'std' and arm == 'red':
            pass
            # import pdb;
            # pdb.set_trace()

        corrfilename = '*' + reduce.suffix + '.fits'
        corrfilename = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                    glob.glob(corrfilename)[0])
        corrfile = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                corrfilename)

        # Return filenames of raw, subtracted files
        yield rawfiles, corrfile, calibs

        # Execute teardown code
        pass
Exemple #12
0
def test_reduce_image(path_to_inputs):
    calib_files = []

    all_files = glob.glob(
        os.path.join(path_to_inputs, 'GSAOI/test_reduce/', '*.fits'))

    all_files.sort()

    assert len(all_files) > 1

    list_of_darks = dataselect.select_data(all_files, ['DARK'], [])
    list_of_darks.sort()

    list_of_kshort_flats = dataselect.select_data(
        all_files, ['FLAT'], [],
        dataselect.expr_parser('filter_name=="Kshort"'))
    list_of_kshort_flats.sort()

    list_of_h_flats = dataselect.select_data(
        all_files, ['FLAT'], [], dataselect.expr_parser('filter_name=="H"'))
    list_of_h_flats.sort()

    list_of_science_files = dataselect.select_data(
        all_files, [], [],
        dataselect.expr_parser(
            'observation_class=="science" and exposure_time==60.'))
    list_of_science_files.sort()

    for darks in [list_of_darks]:
        reduce_darks = Reduce()
        assert len(reduce_darks.files) == 0

        reduce_darks.files.extend(darks)
        assert len(reduce_darks.files) == len(darks)

        logutils.config(file_name='gsaoi_test_reduce_dark.log', mode='quiet')
        reduce_darks.runr()

        del reduce_darks

    logutils.config(file_name='gsaoi_test_reduce_bpm.log', mode='quiet')
    reduce_bpm = Reduce()
    reduce_bpm.files.extend(list_of_h_flats)
    reduce_bpm.files.extend(list_of_darks)
    reduce_bpm.recipename = 'makeProcessedBPM'
    reduce_bpm.runr()

    bpm_filename = reduce_bpm.output_filenames[0]

    del reduce_bpm

    logutils.config(file_name='gsaoi_test_reduce_flats.log', mode='quiet')
    reduce_flats = Reduce()
    reduce_flats.files.extend(list_of_kshort_flats)
    reduce_flats.uparms = [('addDQ:user_bpm', bpm_filename)]
    reduce_flats.runr()

    calib_files.append('processed_flat:{}'.format(
        reduce_flats.output_filenames[0]))

    del reduce_flats

    logutils.config(file_name='gsaoi_test_reduce_science.log', mode='quiet')
    reduce_target = Reduce()
    reduce_target.files.extend(list_of_science_files)
    reduce_target.uparms = [('addDQ:user_bpm', bpm_filename)]
    reduce_target.ucals = normalize_ucals(reduce_target.files, calib_files)
    reduce_target.runr()

    del reduce_target
Exemple #13
0
def test_reduce_image(path_to_inputs):
    calib_files = []

    all_files = glob.glob(
        os.path.join(path_to_inputs, 'F2/test_reduce/', '*.fits'))

    all_files.sort()

    assert len(all_files) > 1

    darks_3s = dataselect.select_data(
        all_files, ['F2', 'DARK', 'RAW'], [],
        dataselect.expr_parser('exposure_time==3'))
    darks_3s.sort()

    darks_20s = dataselect.select_data(
        all_files, ['F2', 'DARK', 'RAW'], [],
        dataselect.expr_parser('exposure_time==20'))
    darks_20s.sort()

    darks_120s = dataselect.select_data(
        all_files, ['F2', 'DARK', 'RAW'], [],
        dataselect.expr_parser('exposure_time==120'))
    darks_120s.sort()

    flats = dataselect.select_data(all_files, ['F2', 'FLAT', 'RAW'], [],
                                   dataselect.expr_parser('filter_name=="Y"'))
    flats.sort()

    science = dataselect.select_data(
        all_files, ['F2', 'RAW'], ['CAL'],
        dataselect.expr_parser('filter_name=="Y"'))
    science.sort()

    for darks in [darks_3s, darks_20s, darks_120s]:
        reduce_darks = Reduce()
        assert len(reduce_darks.files) == 0

        reduce_darks.files.extend(darks)
        assert len(reduce_darks.files) == len(darks)

        logutils.config(file_name='f2_test_reduce_darks.log', mode='quiet')
        reduce_darks.runr()

        calib_files.append('processed_dark:{}'.format(
            reduce_darks.output_filenames[0]))

    logutils.config(file_name='f2_test_reduce_bpm.log', mode='quiet')
    reduce_bpm = Reduce()
    reduce_bpm.files.extend(flats)
    reduce_bpm.files.extend(darks_3s)
    reduce_bpm.recipename = 'makeProcessedBPM'
    reduce_bpm.runr()

    bpm_filename = reduce_bpm.output_filenames[0]

    logutils.config(file_name='f2_test_reduce_flats.log', mode='quiet')
    reduce_flats = Reduce()
    reduce_flats.files.extend(flats)
    reduce_flats.uparms = [('addDQ:user_bpm', bpm_filename)]
    reduce_flats.runr()

    calib_files.append('processed_flat:{}'.format(
        reduce_flats.output_filenames[0]))

    logutils.config(file_name='f2_test_reduce_science.log', mode='quiet')
    reduce_target = Reduce()
    reduce_target.files.extend(science)
    reduce_target.uparms = [('addDQ:user_bpm', bpm_filename)]
    reduce_target.ucals = normalize_ucals(reduce_target.files, calib_files)
    reduce_target.runr()
Exemple #14
0
def create_inputs():
    """
    Create inputs for `test_plot_spectra_for_qa_single_frame`.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    import glob
    import os
    from geminidr.gmos.primitives_gmos_longslit import GMOSLongslit
    from gempy.utils import logutils
    from recipe_system.reduction.coreReduce import Reduce
    from recipe_system.utils.reduce_utils import normalize_ucals

    cwd = os.getcwd()
    path = f"./dragons_test_inputs/geminidr/core/{__file__.split('.')[0]}/"
    os.makedirs(path, exist_ok=True)
    os.chdir(path)

    os.makedirs("inputs/", exist_ok=True)

    for raw_list, bias_list, quartz_list, arc_list in single_aperture_data:

        if all([
                os.path.exists(f"inputs/{s.split('.')[0]}_extracted.fits")
                for s in raw_list
        ]):
            print("Skipping already created input.")
            continue

        raw_paths = [download_from_archive(f) for f in raw_list]
        bias_paths = [download_from_archive(f) for f in bias_list]
        quartz_paths = [download_from_archive(f) for f in quartz_list]
        arc_paths = [download_from_archive(f) for f in arc_list]

        cals = []
        raw_ads = [astrodata.open(p) for p in raw_paths]
        data_label = raw_ads[0].data_label()
        print('Current working directory:\n    {:s}'.format(os.getcwd()))

        if len(bias_paths):
            logutils.config(file_name='log_bias_{}.txt'.format(data_label))
            r = Reduce()
            r.files.extend(bias_paths)
            r.runr()
            master_bias = r.output_filenames.pop()
            cals.append(f"processed_bias:{master_bias}")
            del r
        else:
            master_bias = None

        if len(quartz_paths):
            logutils.config(file_name='log_quartz_{}.txt'.format(data_label))
            r = Reduce()
            r.files.extend(quartz_paths)
            r.ucals = normalize_ucals(r.files, cals)
            r.runr()
            master_quartz = r.output_filenames.pop()
            cals.append(f"processed_flat:{master_quartz}")
            del r
        else:
            master_quartz = None

        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        r = Reduce()
        r.files.extend(arc_paths)
        r.ucals = normalize_ucals(r.files, cals)
        r.runr()
        master_arc = r.output_filenames.pop()

        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = GMOSLongslit(raw_ads)
        p.prepare()
        p.addDQ(static_bpm=None)
        p.addVAR(read_noise=True)
        p.overscanCorrect()
        p.biasCorrect(do_bias=master_bias is not None, bias=master_bias)
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True)
        p.flatCorrect(do_flat=master_quartz is not None, flat=master_quartz)
        p.QECorrect(arc=master_arc)
        p.distortionCorrect(arc=master_arc)
        p.findSourceApertures(max_apertures=3)
        p.skyCorrectFromSlit()
        p.traceApertures()
        p.extract1DSpectra()
        p.linearizeSpectra()

        [os.remove(s) for s in glob.glob("*_arc.fits")]
        [os.remove(s) for s in glob.glob("*_bias.fits")]
        [os.remove(s) for s in glob.glob("*_flat.fits")]
        [os.remove(s) for s in glob.glob("*_mosaic.fits")]

        os.chdir("inputs/")
        print("\n\n    Writing processed files for tests into:\n"
              "    {:s}\n\n".format(os.getcwd()))
        _ = p.writeOutputs()
        os.chdir("../")

    os.chdir(cwd)
def test_arc_missing_pixelmodel(arm, res, epoch, get_or_create_tmpdir):
    """
    Check for the correct behaviour/error handling if PIXELMODEL extn. missing.
    """
    rawfilename = 'arc{}*{}*{}[0-9].fits'.format(epoch, res, arm)
    # Copy the raw data file into here
    tmpsubdir, cal_service = get_or_create_tmpdir
    # Find all the relevant files
    # rawfiles = glob.glob(os.path.join(os.path.dirname(
    #     os.path.abspath(__file__)),
    #     'testdata',
    #     rawfilename))
    # for f in rawfiles:
    #     shutil.copy(f, os.path.join(tmpsubdir.dirname, tmpsubdir.basename))
    rawfiles = glob.glob(
        os.path.join(tmpsubdir.dirname, tmpsubdir.basename, rawfilename))
    rawfiles_ad = [astrodata.open(_) for _ in rawfiles]

    calibs = {
        'processed_bias':
        glob.glob(
            os.path.join('calibrations', 'processed_bias',
                         'bias*{}*.fits'.format(arm)))[0],
        'processed_dark':
        glob.glob(
            os.path.join('calibrations', 'processed_dark',
                         'dark*{}*.fits'.format(arm)))[0],
        'processed_flat':
        glob.glob(
            os.path.join('calibrations', 'processed_flat',
                         'flat*{}*{}*.fits'.format(res, arm)))[0],
        'processed_slitflat':
        glob.glob(
            os.path.join('calibrations', 'processed_slitflat',
                         'flat*{}*slitflat*.fits'.format(res)))[0],
        'processed_slit':
        glob.glob(
            os.path.join('calibrations', 'processed_slit',
                         'arc{}*{}*_slit.fits'.format(epoch, res)))[0],
    }
    flat = astrodata.open(calibs['processed_flat'])
    del flat[0].PIXELMODEL
    flatname = 'flat_{}_{}_nopixmod.fits'.format(arm, res)
    flat.write(filename=flatname, overwrite=True)
    calibs['processed_flat'] = flatname

    # import pdb;
    # pdb.set_trace()

    pm = PrimitiveMapper(rawfiles_ad,
                         mode='test',
                         drpkg='ghostdr',
                         recipename='recipeArcCreateMaster',
                         usercals=normalize_ucals(
                             rawfiles,
                             ['{}:{}'.format(k, v) for k, v in calibs.items()])
                         # usercals=calibs,
                         )
    rm = RecipeMapper(
        rawfiles_ad,
        mode='test',
        drpkg='ghostdr',
        recipename='recipeArcCreateMaster',
        # usercals=calibs,
    )

    p = pm.get_applicable_primitives()
    recipe = rm.get_applicable_recipe()

    with pytest.raises(AttributeError) as e_pixmod:
        recipe(p)
        # import pdb; pdb.set_trace()
        assert 'PIXELMODEL' in e_pixmod.value.__str__(), "The assertion error raised " \
                                                         "in this " \
                                                         "test doesn't seem to be " \
                                                         "about the " \
                                                         "missing PIXELMODEL " \
                                                         "extension, as expected."

    # Teardown code
    os.remove(flatname)
Exemple #16
0
def create_inputs_recipe(use_branch_name=False):
    """
    Creates input data for tests using pre-processed standard star and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    import os
    from astrodata.testing import download_from_archive
    from gempy.utils import logutils
    from geminidr.gmos.tests.spect import CREATED_INPUTS_PATH_FOR_TESTS
    from recipe_system.reduction.coreReduce import Reduce
    from recipe_system.utils.reduce_utils import normalize_ucals

    module_name, _ = os.path.splitext(os.path.basename(__file__))
    path = os.path.join(CREATED_INPUTS_PATH_FOR_TESTS, module_name)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)

    input_path = os.path.join(path, "inputs/")
    os.makedirs(input_path, exist_ok=True)

    for filename, cals in associated_calibrations.items():
        print(filename)
        print(cals)

        sci_path = download_from_archive(filename)
        cals = associated_calibrations[filename]
        bias_paths = [download_from_archive(f) for f in cals['bias']]
        flat_paths = [download_from_archive(f) for f in cals['flat']]
        arc_paths = [download_from_archive(f) for f in cals['arcs']]

        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        logutils.config(file_name='log_bias_{}.txt'.format(data_label))
        bias_reduce = Reduce()
        bias_reduce.files.extend(bias_paths)
        bias_reduce.runr()
        bias_master = bias_reduce.output_filenames.pop()
        calibration_files = ['processed_bias:{}'.format(bias_master)]
        del bias_reduce

        logutils.config(file_name='log_flat_{}.txt'.format(data_label))
        flat_reduce = Reduce()
        flat_reduce.files.extend(flat_paths)
        flat_reduce.ucals = normalize_ucals(calibration_files)
        flat_reduce.runr()
        flat_master = flat_reduce.output_filenames.pop()
        calibration_files.append('processed_flat:{}'.format(flat_master))
        del flat_reduce

        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        arc_reduce = Reduce()
        arc_reduce.files.extend(arc_paths)
        arc_reduce.ucals = normalize_ucals(calibration_files)

        os.chdir(input_path)
        arc_reduce.runr()
        _ = arc_reduce.output_filenames.pop()
        os.chdir(path)

        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = primitives_gmos_longslit.GMOSLongslit([sci_ad])
        p.prepare()
        p.addDQ(static_bpm=None)
        p.addVAR(read_noise=True)
        p.overscanCorrect()
        p.biasCorrect(bias=bias_master)
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True)
        p.flatCorrect(flat=flat_master)

        os.chdir(input_path)
        _ = p.writeOutputs().pop()
        os.chdir(path)
Exemple #17
0
    def do_slit_dark(self, get_or_create_tmpdir):
        """
        Reduce the test slit dark data.

        .. note::
            Fixture.
        """
        rawfilename = 'dark*slit*.fits'
        # Copy the raw data file into here
        tmpsubdir, cal_service = get_or_create_tmpdir
        # Find all the relevant files
        rawfiles = glob.glob(
            os.path.join(tmpsubdir.dirname, tmpsubdir.basename, rawfilename))

        # Do the master bias generation
        reduce = Reduce()
        reduce.drpkg = 'ghostdr'
        reduce.files = rawfiles
        reduce.mode = [
            'test',
        ]
        reduce.recipename = 'recipeSlitDarkTest'
        # Make sure refresh is used for all primitives
        reduce.upars = [
            'refresh=True',
        ]
        # FIXME cal_service will hopefully find the calibration itself later
        calibs = {
            'processed_bias':
            glob.glob(
                os.path.join('calibrations', 'processed_bias',
                             '*slit*bias*.fits'))[0],
        }
        reduce.ucals = normalize_ucals(
            reduce.files, ['{}:{}'.format(k, v) for k, v in calibs.items()])
        reduce.logfile = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                      'reduce_slitdark.log')
        reduce.logmode = 'standard'
        reduce.suffix = '_testSlitDark'
        logutils.config(file_name=reduce.logfile, mode=reduce.logmode)
        reduce.runr()

        corrfilename = '*' + reduce.suffix + '.fits'
        corrfilename = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                    glob.glob(corrfilename)[0])
        corrfile = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                corrfilename)

        # Return filenames of raw, subtracted files
        yield rawfiles, corrfile, calibs

        # import pdb; pdb.set_trace()

        # Execute teardown code
        for _ in glob.glob(
                os.path.join(
                    os.getcwd(),
                    # rawfilename,
                    corrfilename,
                )):
            os.remove(_)
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed standard star and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    import os
    from astrodata.testing import download_from_archive
    from gempy.utils import logutils
    from recipe_system.reduction.coreReduce import Reduce
    from recipe_system.utils.reduce_utils import normalize_ucals

    from geminidr.gmos.tests.spect import CREATED_INPUTS_PATH_FOR_TESTS

    associated_calibrations = {
        "N20180109S0287.fits": {
            'bias': [
                "N20180103S0563.fits", "N20180103S0564.fits",
                "N20180103S0565.fits", "N20180103S0566.fits",
                "N20180103S0567.fits"
            ],
            'flat': ["N20180109S0288.fits"],
            'arcs': ["N20180109S0315.fits"]
        }
    }

    module_name, _ = os.path.splitext(os.path.basename(__file__))
    path = os.path.join(CREATED_INPUTS_PATH_FOR_TESTS, module_name)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    print('Current working directory:\n    {:s}'.format(os.getcwd()))
    os.makedirs("inputs/", exist_ok=True)

    for filename, cals in associated_calibrations.items():

        print('Downloading files...')
        sci_path = download_from_archive(filename)
        bias_path = [download_from_archive(f) for f in cals['bias']]
        flat_path = [download_from_archive(f) for f in cals['flat']]
        arc_path = [download_from_archive(f) for f in cals['arcs']]

        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        print('Reducing BIAS for {:s}'.format(data_label))
        logutils.config(file_name='log_bias_{}.txt'.format(data_label))
        bias_reduce = Reduce()
        bias_reduce.files.extend(bias_path)
        bias_reduce.runr()
        bias_master = bias_reduce.output_filenames.pop()
        calibration_files = ['processed_bias:{}'.format(bias_master)]
        del bias_reduce

        print('Reducing FLAT for {:s}'.format(data_label))
        logutils.config(file_name='log_flat_{}.txt'.format(data_label))
        flat_reduce = Reduce()
        flat_reduce.files.extend(flat_path)
        flat_reduce.ucals = normalize_ucals(flat_reduce.files,
                                            calibration_files)
        flat_reduce.runr()
        flat_master = flat_reduce.output_filenames.pop()
        calibration_files.append('processed_flat:{}'.format(flat_master))
        del flat_reduce

        print('Reducing ARC for {:s}'.format(data_label))
        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        arc_reduce = Reduce()
        arc_reduce.files.extend(arc_path)
        arc_reduce.ucals = normalize_ucals(arc_reduce.files, calibration_files)
        arc_reduce.runr()
        arc_master = arc_reduce.output_filenames.pop()
        del arc_reduce

        print('Reducing pre-processed data:')
        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = primitives_gmos_longslit.GMOSLongslit([sci_ad])
        p.prepare()
        p.addDQ(static_bpm=None, user_bpm=None, add_illum_mask=False)
        p.addVAR(read_noise=True, poisson_noise=False)
        p.overscanCorrect(function="spline",
                          high_reject=3.,
                          low_reject=3.,
                          nbiascontam=0,
                          niterate=2,
                          order=None)
        p.biasCorrect(bias=bias_master, do_bias=True)
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True, read_noise=False)
        p.flatCorrect(flat=flat_master)
        p.QECorrect(arc=arc_master)
        p.distortionCorrect(arc=arc_master, order=3, subsample=1)
        p.findSourceApertures(max_apertures=1,
                              threshold=0.01,
                              min_sky_region=20)
        p.skyCorrectFromSlit(order=5, grow=0)
        p.traceApertures(order=2,
                         nsum=10,
                         step=10,
                         max_missed=5,
                         max_shift=0.05)
        p.extract1DSpectra(grow=10, method="standard", width=None)

        os.chdir("inputs/")
        processed_ad = p.writeOutputs().pop()
        print('Wrote pre-processed file to:\n'
              '    {:s}'.format(processed_ad.filename))
        os.chdir("../")
    def do_bias_subtract(self, get_or_create_tmpdir, request):
        """
        Perform basic bias subtraction on the dark frame.

        .. note::
            Fixture.
        """
        rawfilename = 'dark*{}*.fits'.format(request.param)
        # Copy the raw data file into here
        tmpsubdir, cal_service = get_or_create_tmpdir
        # Find all the relevant files
        # rawfiles = glob.glob(os.path.join(os.path.dirname(
        #     os.path.abspath(__file__)),
        #     'testdata',
        #     rawfilename))
        # for f in rawfiles:
        #     shutil.copy(f, os.path.join(tmpsubdir.dirname, tmpsubdir.basename))
        rawfiles = glob.glob(
            os.path.join(tmpsubdir.dirname, tmpsubdir.basename, rawfilename))

        # Do the master bias generation
        reduce = Reduce()
        reduce.drpkg = 'ghostdr'
        reduce.files = rawfiles[0]
        reduce.mode = [
            'test',
        ]
        reduce.recipename = 'recipeDarkBiasCorrect'
        # reduce.mode = ['sq', ]
        # reduce.recipename = 'makeProcessedBias'
        reduce.logfile = os.path.join(
            tmpsubdir.dirname, tmpsubdir.basename,
            'reduce_biascorrect_{}.log'.format(request.param))
        reduce.logmode = 'quiet'
        reduce.suffix = '_{}_testBiasCorrect'.format(request.param)
        logutils.config(file_name=reduce.logfile, mode=reduce.logmode)
        # import pdb; pdb.set_trace()
        calibs = {
            'processed_bias':
            glob.glob(
                os.path.join('calibrations', 'processed_bias',
                             'bias*{}*.fits'.format(request.param)))[0],
        }
        reduce.ucals = normalize_ucals(
            reduce.files, ['{}:{}'.format(k, v) for k, v in calibs.items()])

        # import pdb;
        # pdb.set_trace()
        reduce.runr()

        corrfilename = '*' + reduce.suffix + '.fits'
        corrfilename = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                    glob.glob(corrfilename)[0])
        corrfile = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                corrfilename)

        # Return filenames of raw, subtracted files
        yield rawfiles, corrfile, calibs

        # Execute teardown code
        for _ in glob.glob(
                os.path.join(os.getcwd(), '*{}.fits'.format(reduce.suffix))):
            os.remove(_)
Exemple #20
0
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed standard star and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    import os
    from astrodata.testing import download_from_archive
    from recipe_system.utils.reduce_utils import normalize_ucals
    from recipe_system.reduction.coreReduce import Reduce

    associated_calibrations = {
        "N20180109S0287.fits": {
            "arcs": ["N20180109S0315.fits"],
            "bias": [
                "N20180103S0563.fits",
                "N20180103S0564.fits",
                "N20180103S0565.fits",
                "N20180103S0566.fits",
                "N20180103S0567.fits",
            ],
            "flat": ["N20180109S0288.fits"],
        }
    }

    root_path = os.path.join("./dragons_test_inputs/")
    module_path = "geminidr/gmos/spect/{}/".format(__file__.split('.')[0])
    path = os.path.join(root_path, module_path)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    os.makedirs("inputs", exist_ok=True)
    print('Current working directory:\n    {:s}'.format(os.getcwd()))

    for filename, cals in associated_calibrations.items():

        print('Downloading files...')
        sci_path = download_from_archive(filename)
        bias_path = [download_from_archive(f) for f in cals['bias']]
        flat_path = [download_from_archive(f) for f in cals['flat']]
        arc_path = [download_from_archive(f) for f in cals['arcs']]

        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        print('Reducing BIAS for {:s}'.format(data_label))
        logutils.config(file_name='log_bias_{}.txt'.format(data_label))
        bias_reduce = Reduce()
        bias_reduce.files.extend(bias_path)
        bias_reduce.runr()
        bias_master = bias_reduce.output_filenames.pop()
        calibration_files = ['processed_bias:{}'.format(bias_master)]
        del bias_reduce

        print('Reducing FLAT for {:s}'.format(data_label))
        logutils.config(file_name='log_flat_{}.txt'.format(data_label))
        flat_reduce = Reduce()
        flat_reduce.files.extend(flat_path)
        flat_reduce.ucals = normalize_ucals(flat_reduce.files,
                                            calibration_files)
        flat_reduce.runr()
        flat_master = flat_reduce.output_filenames.pop()
        calibration_files.append('processed_flat:{}'.format(flat_master))
        del flat_reduce

        print('Reducing ARC for {:s}'.format(data_label))
        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        arc_reduce = Reduce()
        arc_reduce.files.extend(arc_path)
        arc_reduce.ucals = normalize_ucals(arc_reduce.files, calibration_files)
        arc_reduce.runr()
        arc_master = arc_reduce.output_filenames.pop()

        print('Reducing pre-processed data:')
        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = primitives_gmos_longslit.GMOSLongslit([sci_ad])
        p.prepare()
        p.addDQ(static_bpm=None)
        p.addVAR(read_noise=True)
        p.overscanCorrect()
        p.biasCorrect(bias=bias_master)
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True)
        p.flatCorrect(flat=flat_master)
        p.QECorrect(arc=arc_master)
        p.distortionCorrect(arc=arc_master)
        p.findSourceApertures(max_apertures=1)
        p.skyCorrectFromSlit()
        p.traceApertures()
        p.extract1DSpectra()
        p.calculateSensitivity()

        os.chdir("inputs/")
        _ = p.writeOutputs().pop()
        os.chdir("../../")
Exemple #21
0
def test_reduce_image(change_working_dir):
    with change_working_dir():
        calib_files = []
        all_files = [download_from_archive(f) for f in datasets]
        all_files.sort()
        assert len(all_files) > 1

        darks_3s = dataselect.select_data(
            all_files, ['F2', 'DARK', 'RAW'], [],
            dataselect.expr_parser('exposure_time==3'))
        darks_3s.sort()

        darks_20s = dataselect.select_data(
            all_files, ['F2', 'DARK', 'RAW'], [],
            dataselect.expr_parser('exposure_time==20'))
        darks_20s.sort()

        darks_120s = dataselect.select_data(
            all_files, ['F2', 'DARK', 'RAW'], [],
            dataselect.expr_parser('exposure_time==120'))
        darks_120s.sort()

        flats = dataselect.select_data(
            all_files, ['F2', 'FLAT', 'RAW'], [],
            dataselect.expr_parser('filter_name=="Y"'))
        flats.sort()

        science = dataselect.select_data(
            all_files, ['F2', 'RAW'], ['CAL'],
            dataselect.expr_parser('filter_name=="Y"'))
        science.sort()

        for darks in [darks_3s, darks_20s, darks_120s]:
            reduce_darks = Reduce()
            assert len(reduce_darks.files) == 0

            reduce_darks.files.extend(darks)
            assert len(reduce_darks.files) == len(darks)

            logutils.config(file_name='f2_test_reduce_darks.log', mode='quiet')
            reduce_darks.runr()

            calib_files.append('processed_dark:{}'.format(
                reduce_darks.output_filenames[0]))

        logutils.config(file_name='f2_test_reduce_bpm.log', mode='quiet')
        reduce_bpm = Reduce()
        reduce_bpm.files.extend(flats)
        assert len(reduce_bpm.files) == len(flats)

        reduce_bpm.files.extend(darks_3s)
        assert len(reduce_bpm.files) == len(flats) + len(darks_3s)

        reduce_bpm.recipename = 'makeProcessedBPM'
        reduce_bpm.runr()

        bpm_filename = reduce_bpm.output_filenames[0]

        logutils.config(file_name='f2_test_reduce_flats.log', mode='quiet')
        reduce_flats = Reduce()
        reduce_flats.files.extend(flats)
        reduce_flats.uparms = [('addDQ:user_bpm', bpm_filename)]
        reduce_flats.runr()

        calib_files.append('processed_flat:{}'.format(
            reduce_flats.output_filenames[0]))

        logutils.config(file_name='f2_test_reduce_science.log', mode='quiet')
        reduce_target = Reduce()
        reduce_target.files.extend(science)
        reduce_target.uparms = [('addDQ:user_bpm', bpm_filename)]
        reduce_target.ucals = normalize_ucals(reduce_target.files, calib_files)
        reduce_target.runr()
Exemple #22
0
    def do_slit_arc(self, request, get_or_create_tmpdir):
        """
        Reduce the test slit arc data.

        .. note::
            Fixture.
        """

        # import pdb; pdb.set_trace()

        # rawfilename = '{}*slit*.fits'.format(slit_type)
        # Copy the raw data file into here
        tmpsubdir, cal_service = get_or_create_tmpdir
        slit_type, res = request.param
        filenames = glob.glob('{}*{}*slit.fits'.format(slit_type, res))

        # Do the master bias generation
        reduce = Reduce()
        reduce.drpkg = 'ghostdr'
        reduce.mode = ['test', ]
        reduce.recipename = 'recipeSlitArcTest' if slit_type == 'arc' \
            else 'recipeSlitTest'
        # Make sure refresh is used for all primitives
        reduce.upars = ['refresh=True', ]
        # FIXME cal_service will hopefully find the calibration itself later
        calibs = {
            'processed_bias': glob.glob(os.path.join(
                'calibrations',
                'processed_bias',
                '*slit*bias*.fits'))[0],
            'processed_dark': glob.glob(os.path.join(
                    'calibrations',
                    'processed_dark',
                    '*slit*dark*.fits'))[0],
            'processed_slitflat': glob.glob(os.path.join(
                'calibrations',
                'processed_slitflat',
                '*{}*slit*slitflat*.fits'.format(res, )))[0]
        }
        reduce.logfile = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                      'reduce_slit{}.log'.format(slit_type))
        reduce.logmode = 'standard'
        reduce.suffix = '_{}_testSlit'.format(slit_type)
        logutils.config(file_name=reduce.logfile, mode=reduce.logmode)

        corrfiles = []
        for filename in filenames:
            reduce.files = [filename, ]
            reduce.ucals = normalize_ucals(reduce.files, [
                '{}:{}'.format(k, v) for k, v in calibs.items()
            ])
            reduce.runr()

            # import pdb; pdb.set_trace()
            corrfilename = '*' + reduce.suffix + '.fits'
            corrfilename = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                        glob.glob(corrfilename)[0])
            corrfile = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                    corrfilename)
            corrfiles.append(corrfile)

        # Return filenames of raw, subtracted files
        yield filenames, corrfiles, calibs

        # import pdb; pdb.set_trace()

        # Execute teardown code
        for _ in glob.glob(os.path.join(
                os.getcwd(),
                # rawfilename,
                corrfilename,
        )):
            os.remove(_)
Exemple #23
0
def test_reduce_image_GS_HAM_2x2_i_std(path_to_inputs):
    logutils.config(file_name='gmos_test_reduce_image_GS_HAM_1x1_i.log')

    calib_files = []

    raw_subdir = 'GMOS/GS-2017B-Q-6'

    all_files = sorted(glob.glob(
        os.path.join(path_to_inputs, raw_subdir, '*.fits')))
    assert len(all_files) > 1

    list_of_sci_bias = dataselect.select_data(
        all_files,
        ['BIAS'],
        [],
        dataselect.expr_parser('detector_x_bin==2 and detector_y_bin==2')
    )

    list_of_sci_flats = dataselect.select_data(
        all_files,
        ['TWILIGHT'],
        [],
        dataselect.expr_parser(
            'filter_name=="i" and detector_x_bin==2 and detector_y_bin==2'
        )
    )

    list_of_science_files = dataselect.select_data(
        all_files, [],
        [],
        dataselect.expr_parser(
            'observation_class=="partnerCal" and filter_name=="i"'
        )
    )

    reduce_bias = Reduce()
    assert len(reduce_bias.files) == 0

    reduce_bias.files.extend(list_of_sci_bias)
    assert len(reduce_bias.files) == len(list_of_sci_bias)

    reduce_bias.runr()

    calib_files.append(
        'processed_bias:{}'.format(reduce_bias.output_filenames[0])
    )

    reduce_flats = Reduce()
    reduce_flats.files.extend(list_of_sci_flats)
    # reduce_flats.uparms = [('addDQ:user_bpm', 'fixed_bpm_2x2_FullFrame.fits')]
    reduce_flats.ucals = normalize_ucals(reduce_flats.files, calib_files)
    reduce_flats.runr()

    calib_files.append(
        'processed_flat:{}'.format(reduce_flats.output_filenames[0])
    )

    reduce_target = Reduce()
    reduce_target.files.extend(list_of_science_files)
    reduce_target.ucals = normalize_ucals(reduce_target.files, calib_files)
    reduce_target.uparms = [
        ('stackFrames:memory', 1),
        # ('addDQ:user_bpm', 'fixed_bpm_2x2_FullFrame.fits'),
        ('resampleToCommonFrame:interpolator', 'spline3')
    ]
    reduce_target.runr()