Example #1
0
def create_master_bias_for_tests():
    """
    Creates input bias data for tests.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    root_path = os.path.join("./dragons_test_inputs/")
    module_path = f"geminidr/gmos/recipes/ql/{__file__.split('.')[0]}/"
    path = os.path.join(root_path, module_path, "inputs/")

    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    print('Current working directory:\n    {:s}'.format(os.getcwd()))

    associated_biases = {
        "S20180707S0043.fits": [
            "S20180707S0187.fits", "S20180707S0188.fits",
            "S20180707S0189.fits", "S20180707S0190.fits", "S20180707S0191.fits"
        ],
        "S20190502S0096.fits": [
            "S20190502S0221.fits", "S20190502S0222.fits",
            "S20190502S0223.fits", "S20190502S0224.fits", "S20190502S0225.fits"
        ],
        "S20200122S0020.fits": [
            "S20200121S0170.fits", "S20200121S0171.fits",
            "S20200121S0172.fits", "S20200121S0173.fits", "S20200121S0174.fits"
        ],
        "N20200101S0055.fits": [
            "N20200101S0240.fits", "N20200101S0241.fits",
            "N20200101S0242.fits", "N20200101S0243.fits", "N20200101S0244.fits"
        ],
        "S20180410S0120.fits": [
            "S20180410S0132.fits", "S20180410S0133.fits",
            "S20180410S0134.fits", "S20180410S0135.fits", "S20180410S0136.fits"
        ],
        "S20190410S0053.fits": [
            "S20190410S0297.fits", "S20190410S0298.fits",
            "S20190410S0299.fits", "S20190410S0300.fits", "S20190410S0301.fits"
        ],
    }

    for filename, bias_files in associated_biases.items():

        print('Downloading files...')
        sci_path = download_from_archive(filename)
        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        bias_paths = [download_from_archive(f) for f in bias_files]

        print('Reducing BIAS for {:s}'.format(data_label))
        logutils.config(file_name='log_bias_{}.txt'.format(data_label))
        bias_reduce = Reduce()
        bias_reduce.files.extend(bias_paths)
        bias_reduce.runr()

        shutil.rmtree("calibrations/")
Example #2
0
    def _reduce_bias(datalabel, bias_fnames):
        with output_path():
            logutils.config(file_name='log_bias_{}.txt'.format(datalabel))

            reduce = Reduce()
            reduce.files.extend(bias_fnames)
            reduce.runr()

            master_bias = reduce.output_filenames.pop()

        return master_bias
Example #3
0
    def _reduce_arc(dlabel, arc_fnames):
        with output_path():
            # Use config to prevent duplicated outputs when running Reduce via API
            logutils.config(file_name='log_arc_{}.txt'.format(dlabel))

            reduce = Reduce()
            reduce.files.extend(arc_fnames)
            reduce.runr()

            master_arc = reduce.output_filenames.pop()
        return master_arc
Example #4
0
    def _reduce_bias(datalabel, bias_fnames):
        with change_working_dir():
            print("Reducing BIAS in folder:\n  {}".format(os.getcwd()))
            logutils.config(file_name='log_bias_{}.txt'.format(datalabel))

            reduce = Reduce()
            reduce.files.extend(bias_fnames)
            reduce.runr()

            master_bias = reduce.output_filenames.pop()

        return master_bias
Example #5
0
    def test_can_run_reduce_bias(config):
        """
        Make sure that the reduce_BIAS works for spectroscopic data.
        """
        logutils.config(mode='quiet',
                        file_name=os.path.join(config.log_dir,
                                               'reduce_GMOS_LS_bias.log'))

        reduce = Reduce()
        reduce.files.extend(config.biases)
        reduce.upload = 'calibs'
        reduce.runr()
Example #6
0
    def test_can_run_reduce_science(config):
        """
        Make sure that the recipes_ARC_LS_SPECT works for spectroscopic data.
        """
        logutils.config(mode='quiet',
                        file_name=os.path.join(config.log_dir,
                                               'reduce_GMOS_LS_science.log'))

        reduce = Reduce()
        reduce.files.extend(config.science)
        reduce.upload = 'calibs'
        reduce.runr()
Example #7
0
    def _reduce_arc(dlabel, arc_fnames):
        with change_working_dir():
            print("Reducing ARCs in folder:\n  {}".format(os.getcwd()))
            # Use config to prevent duplicated outputs when running Reduce via API
            logutils.config(file_name='log_arc_{}.txt'.format(dlabel))

            reduce = Reduce()
            reduce.files.extend(arc_fnames)
            reduce.runr()

            master_arc = reduce.output_filenames.pop()
        return master_arc
Example #8
0
def reduce(file_list,
           label,
           calib_files,
           recipe_name=None,
           save_to=None,
           user_pars=None):
    """
    Helper function used to prevent replication of code.

    Parameters
    ----------
    file_list : list
        List of files that will be reduced.
    label : str
        Labed used on log files name.
    calib_files : list
        List of calibration files properly formatted for DRAGONS Reduce().
    recipe_name : str, optional
        Name of the recipe used to reduce the data.
    save_to : str, optional
        Stores the calibration files locally in a list.
    user_pars : list, optional
        List of user parameters

    Returns
    -------
    str : Output reduced file.
    list : An updated list of calibration files.
    """
    objgraph = pytest.importorskip("objgraph")

    logutils.get_logger().info("\n\n\n")
    logutils.config(file_name=f"test_image_{label}.log")
    r = Reduce()
    r.files = file_list
    r.ucals = normalize_ucals(r.files, calib_files)
    r.uparms = user_pars

    if recipe_name:
        r.recipename = recipe_name

    r.runr()
    output_file = r.output_filenames[0]

    if save_to:
        calib_files.append("{}:{}".format(
            save_to,
            os.path.join("calibrations", save_to, r.output_filenames[0])))
        [os.remove(f) for f in r.output_filenames]

    # check that we are not leaking objects
    assert len(objgraph.by_type('NDAstroData')) == 0

    return output_file, calib_files
Example #9
0
def create_master_bias_for_tests():
    """
    Creates input bias data for tests.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory.

    Processed files will be stored inside a new folder called
    "./dragons_test_inputs". The sub-directory structure should reflect the one
    returned by the `path_to_inputs` fixture.
    """
    bias_datasets = [
        [f"N20190101S{n:04d}.fits" for n in range(494, 499)],
    ]

    root_path = os.path.join("./dragons_test_inputs/")
    module_path = "geminidr/gmos/recipes/qa/{:s}/inputs".format(
        __file__.split('.')[0])
    path = os.path.join(root_path, module_path)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)

    print('\n  Current working directory:\n    {:s}'.format(os.getcwd()))

    for filenames in bias_datasets:

        print('  Downloading files...')
        paths = [download_from_archive(f) for f in filenames]

        f = paths[0]
        ad = astrodata.open(f)

        if not os.path.exists(f.replace('.fits', '_bias.fits')):
            print(f"  Creating input file:\n")
            print(f"    {os.path.basename(f).replace('.fits', '_bias.fits')}")
            logutils.config(file_name=f'log_arc_{ad.data_label()}.txt')
            r = Reduce()
            r.files.extend(paths)
            r.runr()
        else:
            print(f"  Input file already exists:")
            print(f"    {f.replace('.fits', '_bias.fits')}")
Example #10
0
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed twilight flat data and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """

    _associated_calibrations = {
        "S20190204S0006.fits": {
            "bias": ["S20190203S0110.fits",
                     "S20190203S0109.fits",
                     "S20190203S0108.fits",
                     "S20190203S0107.fits",
                     "S20190203S0106.fits"],
        }
    }

    root_path = os.path.join("./dragons_test_inputs/")
    module_path = "geminidr/gmos/recipes/sq/test_make_processed_slit_illum/inputs"
    path = os.path.join(root_path, module_path)

    os.makedirs(path, exist_ok=True)
    os.chdir(path)

    print('Current working directory:\n    {:s}'.format(os.getcwd()))
    for filename, cals in _associated_calibrations.items():

        print('Downloading files...')
        bias_path = [download_from_archive(f) for f in cals['bias']]

        print('Reducing BIAS for {:s}'.format(filename))
        logutils.config(
            file_name='log_bias_{}.txt'.format(filename.replace(".fits", "")))

        bias_reduce = Reduce()
        bias_reduce.files.extend(bias_path)
        bias_reduce.runr()
Example #11
0
def test_primitive_not_found():
    testfile = download_from_archive("N20160524S0119.fits")

    red = Reduce()
    red.files = [testfile]
    red.recipename = 'foobar'
    with pytest.raises(RecipeNotFound, match='No primitive named foobar'):
        red.runr()
Example #12
0
def test_mode_not_found():
    testfile = download_from_archive("N20160524S0119.fits")

    red = Reduce()
    red.files = [testfile]
    red.mode = 'aa'
    with pytest.raises(RecipeNotFound,
                       match="GMOS recipes do not define a 'aa' recipe"):
        red.runr()
Example #13
0
def processed_slit_illum(change_working_dir, path_to_inputs, request):
    """
    Returns the processed slit illumination function that will be analysed.

    Parameters
    ----------
    change_working_dir : pytest.fixture
        Fixture that changes the working directory (see :mod:`astrodata.testing`).
    path_to_inputs : pytest.fixture
        Fixture defined in :mod:`astrodata.testing` with the path to the
        pre-processed input file.
    request : pytest.fixture
        PyTest built-in fixture containing information about parent test.

    Returns
    -------
    AstroData
        Input spectrum processed up to right before the `applyQECorrection`.
    """
    twi_filename = request.param
    twi_path = download_from_archive(twi_filename)
    twi_ad = astrodata.open(twi_path)

    print(twi_ad.tags)

    master_bias = os.path.join(
        path_to_inputs, associated_calibrations[twi_filename])

    assert os.path.exists(master_bias)

    calibration_files = ['processed_bias:{}'.format(master_bias)]

    with change_working_dir():
        print("Reducing SLITILLUM in folder:\n  {}".format(os.getcwd()))
        logutils.config(
            file_name='log_flat_{}.txt'.format(twi_ad.data_label()))

        reduce = Reduce()
        reduce.files.extend([twi_path])
        reduce.mode = 'sq'
        reduce.recipename = 'makeProcessedSlitIllum'
        reduce.ucals = normalize_ucals(reduce.files, calibration_files)
        reduce.runr()

        _processed_twi_filename = reduce.output_filenames.pop()
        _processed_twi = astrodata.open(_processed_twi_filename)

    return _processed_twi
Example #14
0
def test_make_processed_flat(change_working_dir, flat_fnames, master_bias,
                             path_to_inputs):
    """
    Regression test for GMOS

    Parameters
    ----------
    change_working_dir : fixture
        Custom fixture defined astrodata.testing that changes temporarily the
        working dir.
    flat_fnames : list
        Contains the flat names that will be reduced.
    master_bias : str
        Contains the name of the master flat.
    path_to_inputs : fixture
        Custom fixture that defines where the input data is stored.
    """
    master_bias = os.path.join(path_to_inputs, master_bias)
    calibration_files = ['processed_bias:{}'.format(master_bias)]

    with change_working_dir():
        logutils.config(
            file_name=f"log_flat_{flat_fnames[0].split('.')[0]}.txt")
        r = Reduce()
        r.files = [download_from_archive(f) for f in flat_fnames]
        r.mode = 'qa'
        r.ucals = normalize_ucals(r.files, calibration_files)
        r.runr()

        # Delete files that won't be used
        shutil.rmtree('calibrations/')
        [os.remove(f) for f in glob.glob('*_forStack.fits')]

        ad = astrodata.open(r.output_filenames[0])

    for ext in ad:
        data = np.ma.masked_array(ext.data, mask=ext.mask)

        if not data.mask.all():
            np.testing.assert_allclose(np.ma.median(data.ravel()),
                                       1,
                                       atol=0.071)
            np.testing.assert_allclose(data[~data.mask], 1, atol=0.45)
Example #15
0
    def _reduce_flat(datalabel, flat_fname, master_bias):
        with output_path():
            logutils.config(file_name='log_flat_{}.txt'.format(datalabel))

            calibration_files = ['processed_bias:{}'.format(master_bias)]

            reduce = Reduce()
            reduce.files.extend([flat_fname])
            reduce.mode = 'ql'
            reduce.ucals = normalize_ucals(reduce.files, calibration_files)
            reduce.runr()

            master_flat = reduce.output_filenames.pop()
            master_flat_ad = astrodata.open(master_flat)

        return master_flat_ad
Example #16
0
    def reduce(filelist, saveto=None, label='', calib_files=None,
               recipename=None):
        red = Reduce()
        assert len(red.files) == 0
        red.files.extend(filelist)
        assert len(red.files) == len(filelist)
        if calib_files:
            red.ucals = normalize_ucals(red.files, calib_files)
        if recipename:
            red.recipename = recipename
        red.runr()
        if saveto:
            calib_files.append(f'{saveto}:{red.output_filenames[0]}')

        # check that we are not leaking objects
        assert len(objgraph.by_type('NDAstroData')) == 0
Example #17
0
def processed_flat(change_working_dir, path_to_inputs, request):
    """
    Returns the processed flat that will be analysed.

    Parameters
    ----------
    change_working_dir : pytest.fixture
        Fixture that changes the working directory (see :mod:`astrodata.testing`).
    path_to_inputs : pytest.fixture
        Fixture defined in :mod:`astrodata.testing` with the path to the
        pre-processed input file.
    request : pytest.fixture
        PyTest built-in fixture containing information about parent test.

    Returns
    -------
    AstroData
        Input spectrum processed up to right before the `applyQECorrection`.
    """
    np.random.seed(0)

    flat_filename = request.param
    flat_path = download_from_archive(flat_filename)
    flat_raw = astrodata.open(flat_path)

    master_bias = os.path.join(path_to_inputs,
                               associated_calibrations[flat_filename])
    calibration_files = ['processed_bias:{}'.format(master_bias)]

    with change_working_dir():
        print("Reducing FLATs in folder:\n  {}".format(os.getcwd()))
        logutils.config(
            file_name='log_flat_{}.txt'.format(flat_raw.data_label()))

        reduce = Reduce()
        reduce.files.extend([flat_path])
        reduce.mode = 'ql'
        reduce.ucals = normalize_ucals(reduce.files, calibration_files)
        reduce.runr()

        # Clean up duplicated files
        shutil.rmtree('calibrations/')

        _processed_flat_filename = reduce.output_filenames.pop()
        _processed_flat = astrodata.open(_processed_flat_filename)

    return _processed_flat
Example #18
0
    def _reduce_flat(data_label, flat_fnames, master_bias):
        with change_working_dir():
            print("Reducing FLATs in folder:\n  {}".format(os.getcwd()))
            logutils.config(file_name='log_flat_{}.txt'.format(data_label))

            calibration_files = ['processed_bias:{}'.format(master_bias)]

            reduce = Reduce()
            reduce.files.extend(flat_fnames)
            reduce.mode = 'ql'
            reduce.ucals = normalize_ucals(calibration_files)
            reduce.runr()

            master_flat = reduce.output_filenames.pop()
            master_flat_ad = astrodata.open(master_flat)

        return master_flat_ad
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed standard star and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    import os
    from astrodata.testing import download_from_archive
    from geminidr.gmos.tests.spect import CREATED_INPUTS_PATH_FOR_TESTS
    from gempy.utils import logutils
    from recipe_system.reduction.coreReduce import Reduce

    input_data = {
        "N20180508S0021.fits": {"arc": "N20180615S0409.fits", "center": 244},
        "N20180509S0010.fits": {"arc": "N20180509S0080.fits", "center": 259},
        "N20180516S0081.fits": {"arc": "N20180516S0214.fits", "center": 255},
        "N20190201S0163.fits": {"arc": "N20190201S0162.fits", "center": 255},
        "N20190313S0114.fits": {"arc": "N20190313S0132.fits", "center": 254},
        "N20190427S0123.fits": {"arc": "N20190427S0266.fits", "center": 260},
        "N20190427S0126.fits": {"arc": "N20190427S0267.fits", "center": 259},
        "N20190427S0127.fits": {"arc": "N20190427S0268.fits", "center": 258},
        "N20190427S0141.fits": {"arc": "N20190427S0270.fits", "center": 264},
    }

    module_name, _ = os.path.splitext(os.path.basename(__file__))
    path = os.path.join(CREATED_INPUTS_PATH_FOR_TESTS, module_name)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    os.makedirs("inputs/", exist_ok=True)
    print('Current working directory:\n    {:s}'.format(os.getcwd()))

    for filename, pars in input_data.items():

        print('Downloading files...')
        sci_path = download_from_archive(filename)
        arc_path = download_from_archive(pars['arc'])

        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        print('Reducing ARC for {:s}'.format(data_label))
        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        arc_reduce = Reduce()
        arc_reduce.files.extend([arc_path])
        arc_reduce.runr()
        arc = arc_reduce.output_filenames.pop()

        print('Reducing pre-processed data:')
        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = primitives_gmos_spect.GMOSSpect([sci_ad])
        p.prepare()
        p.addDQ(static_bpm=None)
        p.addVAR(read_noise=True)
        p.overscanCorrect()
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True)
        p.mosaicDetectors()
        p.distortionCorrect(arc=arc)
        _ad = p.makeIRAFCompatible().pop()
        _ad = _add_aperture_table(_ad, pars['center'])

        p = primitives_gmos_spect.GMOSSpect([_ad])
        p.traceApertures(trace_order=2, nsum=20, step=10, max_shift=0.09, max_missed=5)

        os.chdir("inputs/")
        processed_ad = p.writeOutputs().pop()
        os.chdir("../")
        print('Wrote pre-processed file to:\n'
              '    {:s}'.format(processed_ad.filename))
Example #20
0
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed standard star and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    import os
    from astrodata.testing import download_from_archive
    from recipe_system.utils.reduce_utils import normalize_ucals
    from recipe_system.reduction.coreReduce import Reduce

    associated_calibrations = {
        "N20180109S0287.fits": {
            "arcs": ["N20180109S0315.fits"],
            "bias": [
                "N20180103S0563.fits",
                "N20180103S0564.fits",
                "N20180103S0565.fits",
                "N20180103S0566.fits",
                "N20180103S0567.fits",
            ],
            "flat": ["N20180109S0288.fits"],
        }
    }

    root_path = os.path.join("./dragons_test_inputs/")
    module_path = "geminidr/gmos/spect/{}/".format(__file__.split('.')[0])
    path = os.path.join(root_path, module_path)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    os.makedirs("inputs", exist_ok=True)
    print('Current working directory:\n    {:s}'.format(os.getcwd()))

    for filename, cals in associated_calibrations.items():

        print('Downloading files...')
        sci_path = download_from_archive(filename)
        bias_path = [download_from_archive(f) for f in cals['bias']]
        flat_path = [download_from_archive(f) for f in cals['flat']]
        arc_path = [download_from_archive(f) for f in cals['arcs']]

        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        print('Reducing BIAS for {:s}'.format(data_label))
        logutils.config(file_name='log_bias_{}.txt'.format(data_label))
        bias_reduce = Reduce()
        bias_reduce.files.extend(bias_path)
        bias_reduce.runr()
        bias_master = bias_reduce.output_filenames.pop()
        calibration_files = ['processed_bias:{}'.format(bias_master)]
        del bias_reduce

        print('Reducing FLAT for {:s}'.format(data_label))
        logutils.config(file_name='log_flat_{}.txt'.format(data_label))
        flat_reduce = Reduce()
        flat_reduce.files.extend(flat_path)
        flat_reduce.ucals = normalize_ucals(flat_reduce.files,
                                            calibration_files)
        flat_reduce.runr()
        flat_master = flat_reduce.output_filenames.pop()
        calibration_files.append('processed_flat:{}'.format(flat_master))
        del flat_reduce

        print('Reducing ARC for {:s}'.format(data_label))
        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        arc_reduce = Reduce()
        arc_reduce.files.extend(arc_path)
        arc_reduce.ucals = normalize_ucals(arc_reduce.files, calibration_files)
        arc_reduce.runr()
        arc_master = arc_reduce.output_filenames.pop()

        print('Reducing pre-processed data:')
        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = primitives_gmos_longslit.GMOSLongslit([sci_ad])
        p.prepare()
        p.addDQ(static_bpm=None)
        p.addVAR(read_noise=True)
        p.overscanCorrect()
        p.biasCorrect(bias=bias_master)
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True)
        p.flatCorrect(flat=flat_master)
        p.QECorrect(arc=arc_master)
        p.distortionCorrect(arc=arc_master)
        p.findSourceApertures(max_apertures=1)
        p.skyCorrectFromSlit()
        p.traceApertures()
        p.extract1DSpectra()
        p.calculateSensitivity()

        os.chdir("inputs/")
        _ = p.writeOutputs().pop()
        os.chdir("../../")
Example #21
0
    def do_master_flat(self, get_or_create_tmpdir, request):
        """
        Run the recipeFlatCreateMaster recipe.

        .. note::
            Fixture.
        """
        arm, res = request.param
        rawfilename = 'flat*{}*{}*.fits'.format(res, arm)
        # Copy the raw data file into here
        tmpsubdir, cal_service = get_or_create_tmpdir
        # Find all the relevant files
        # rawfiles = glob.glob(os.path.join(os.path.dirname(
        #     os.path.abspath(__file__)),
        #     'testdata',
        #     rawfilename))
        # for f in rawfiles:
        #     shutil.copy(f, os.path.join(tmpsubdir.dirname, tmpsubdir.basename))
        rawfiles = glob.glob(
            os.path.join(tmpsubdir.dirname, tmpsubdir.basename, rawfilename))

        # Do the master bias generation
        reduce = Reduce()
        reduce.drpkg = 'ghostdr'
        reduce.files = rawfiles
        reduce.mode = [
            'test',
        ]
        reduce.recipename = 'recipeFlatCreateMaster'
        # reduce.mode = ['sq', ]
        # reduce.recipename = 'makeProcessedBias'
        reduce.logfile = os.path.join(
            tmpsubdir.dirname, tmpsubdir.basename,
            'reduce_masterflat_{}_{}.log'.format(res, arm))
        reduce.logmode = 'quiet'
        reduce.suffix = '_{}_{}_testMasterFlat'.format(res, arm)
        logutils.config(file_name=reduce.logfile, mode=reduce.logmode)
        # import pdb; pdb.set_trace()
        calibs = {
            'processed_bias':
            glob.glob(
                os.path.join('calibrations', 'processed_bias',
                             'bias*{}*.fits'.format(arm)))[0],
            'processed_dark':
            glob.glob(
                os.path.join('calibrations', 'processed_dark',
                             'dark*{}*.fits'.format(arm)))[0],
            'processed_slitflat':
            glob.glob(
                os.path.join('calibrations', 'processed_slitflat',
                             'flat*{}*slitflat*.fits'.format(res)))[0],
        }
        reduce.ucals = normalize_ucals(
            reduce.files, ['{}:{}'.format(k, v) for k, v in calibs.items()])

        reduce.runr()
        if res == 'std' and arm == 'red':
            pass
            # import pdb;
            # pdb.set_trace()

        corrfilename = '*' + reduce.suffix + '.fits'
        corrfilename = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                    glob.glob(corrfilename)[0])
        corrfile = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                corrfilename)

        # Return filenames of raw, subtracted files
        yield rawfiles, corrfile, calibs

        # Execute teardown code
        pass
Example #22
0
def test_reduce_image(test_path, caldb):

    logutils.config(file_name='f2_test_reduce_image.log')

    caldb.init(wipe=True)

    all_files = glob.glob(
        os.path.join(test_path, 'F2/test_reduce/', '*.fits'))
    assert len(all_files) > 1

    darks_3s = dataselect.select_data(
        all_files, ['F2', 'DARK', 'RAW'], [],
        dataselect.expr_parser('exposure_time==3'))

    darks_20s = dataselect.select_data(
        all_files, ['F2', 'DARK', 'RAW'], [],
        dataselect.expr_parser('exposure_time==20'))

    darks_120s = dataselect.select_data(
        all_files, ['F2', 'DARK', 'RAW'], [],
        dataselect.expr_parser('exposure_time==120'))

    flats = dataselect.select_data(
        all_files, ['F2', 'FLAT', 'RAW'], [],
        dataselect.expr_parser('filter_name=="Y"'))

    science = dataselect.select_data(
        all_files, ['F2', 'RAW'], ['CAL'],
        dataselect.expr_parser('filter_name=="Y"'))

    for darks in [darks_3s, darks_20s, darks_120s]:

        reduce_darks = Reduce()
        assert len(reduce_darks.files) == 0

        reduce_darks.files.extend(darks)
        assert len(reduce_darks.files) == len(darks)

        reduce_darks.runr()

        caldb.add_cal(reduce_darks.output_filenames[0])

    reduce_bpm = Reduce()
    reduce_bpm.files.extend(flats)
    reduce_bpm.files.extend(darks_3s)
    reduce_bpm.recipename = 'makeProcessedBPM'
    reduce_bpm.runr()

    bpm_filename = reduce_bpm.output_filenames[0]

    reduce_flats = Reduce()
    reduce_flats.files.extend(flats)
    reduce_flats.uparms = [('addDQ:user_bpm', bpm_filename)]
    reduce_flats.runr()

    caldb.add_cal(reduce_flats.output_filenames[0])

    reduce_target = Reduce()
    reduce_target.files.extend(science)
    reduce_target.uparms = [('addDQ:user_bpm', bpm_filename)]
    reduce_target.runr()

    for f in caldb.list_files():
        print(f)
Example #23
0
def test_reduce_image(test_path, caldb):

    logutils.config(file_name='gsaoi_test_reduce_image.log')

    caldb.init(wipe=True)

    all_files = glob.glob(
        os.path.join(test_path, 'GSAOI/test_reduce/', '*.fits'))
    assert len(all_files) > 1

    list_of_darks = dataselect.select_data(
        all_files, ['DARK'], [])

    list_of_kshort_flats = dataselect.select_data(
        all_files, ['FLAT'], [],
        dataselect.expr_parser('filter_name=="Kshort"'))

    list_of_h_flats = dataselect.select_data(
        all_files, ['FLAT'], [],
        dataselect.expr_parser('filter_name=="H"'))

    list_of_std_LHS_2026 = dataselect.select_data(
        all_files, [], [],
        dataselect.expr_parser('object=="LHS 2026"'))

    list_of_std_cskd8 = dataselect.select_data(
        all_files, [], [],
        dataselect.expr_parser('object=="cskd-8"'))

    list_of_science_files = dataselect.select_data(
        all_files, [], [],
        dataselect.expr_parser('observation_class=="science" and exposure_time==60.'))

    for darks in [list_of_darks]:

        reduce_darks = Reduce()
        assert len(reduce_darks.files) == 0

        reduce_darks.files.extend(darks)
        assert len(reduce_darks.files) == len(darks)

        reduce_darks.runr()

        caldb.add_cal(reduce_darks.output_filenames[0])

    reduce_bpm = Reduce()
    reduce_bpm.files.extend(list_of_h_flats)
    reduce_bpm.files.extend(list_of_darks)
    reduce_bpm.recipename = 'makeProcessedBPM'
    reduce_bpm.runr()

    bpm_filename = reduce_bpm.output_filenames[0]

    reduce_flats = Reduce()
    reduce_flats.files.extend(list_of_kshort_flats)
    reduce_flats.uparms = [('addDQ:user_bpm', bpm_filename)]
    reduce_flats.runr()

    caldb.add_cal(reduce_flats.output_filenames[0])

    reduce_target = Reduce()
    reduce_target.files.extend(list_of_science_files)
    reduce_target.uparms = [('addDQ:user_bpm', bpm_filename)]
    reduce_target.runr()

    for f in caldb.list_files():
        print(f)
Example #24
0
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed standard star and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    import os
    from astrodata.testing import download_from_archive
    from recipe_system.reduction.coreReduce import Reduce
    from gempy.utils import logutils

    associated_calibrations = {
        "S20190808S0048.fits": {
            "arcs": ["S20190808S0167.fits"]
        },
        "S20190808S0049.fits": {
            "arcs": ["S20190808S0168.fits"]
        },
        # "S20190808S0052.fits": {"arcs": ["S20190808S0165.fits"]}, # Can't find aperture
        "S20190808S0053.fits": {
            "arcs": ["S20190808S0169.fits"]
        },
    }

    root_path = os.path.join("./dragons_test_inputs/")
    module_path = "geminidr/gmos/spect/{}".format(__file__.split('.')[0])
    path = os.path.join(root_path, module_path)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    os.makedirs("./inputs", exist_ok=True)
    print('Current working directory:\n    {:s}'.format(os.getcwd()))

    for filename, cals in associated_calibrations.items():

        print('Downloading files...')
        sci_path = download_from_archive(filename)
        arc_path = [download_from_archive(f) for f in cals['arcs']]

        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        print('Reducing ARC for {:s}'.format(data_label))
        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        arc_reduce = Reduce()
        arc_reduce.files.extend(arc_path)
        arc_reduce.runr()
        arc = arc_reduce.output_filenames.pop()

        print('Reducing pre-processed data:')
        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = GMOSLongslit([sci_ad])
        p.prepare()
        p.addDQ(static_bpm=None)
        p.addVAR(read_noise=True)
        p.overscanCorrect()
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True)
        p.mosaicDetectors()
        p.distortionCorrect(arc=arc)
        p.findSourceApertures(max_apertures=1)
        p.skyCorrectFromSlit()
        p.traceApertures()
        p.extract1DSpectra()

        os.chdir("inputs/")
        _ = p.writeOutputs().pop()
        os.chdir("../")
Example #25
0
def test_reduce_image(path_to_inputs):
    calib_files = []

    all_files = glob.glob(
        os.path.join(path_to_inputs, 'GSAOI/test_reduce/', '*.fits'))

    all_files.sort()

    assert len(all_files) > 1

    list_of_darks = dataselect.select_data(all_files, ['DARK'], [])
    list_of_darks.sort()

    list_of_kshort_flats = dataselect.select_data(
        all_files, ['FLAT'], [],
        dataselect.expr_parser('filter_name=="Kshort"'))
    list_of_kshort_flats.sort()

    list_of_h_flats = dataselect.select_data(
        all_files, ['FLAT'], [], dataselect.expr_parser('filter_name=="H"'))
    list_of_h_flats.sort()

    list_of_science_files = dataselect.select_data(
        all_files, [], [],
        dataselect.expr_parser(
            'observation_class=="science" and exposure_time==60.'))
    list_of_science_files.sort()

    for darks in [list_of_darks]:
        reduce_darks = Reduce()
        assert len(reduce_darks.files) == 0

        reduce_darks.files.extend(darks)
        assert len(reduce_darks.files) == len(darks)

        logutils.config(file_name='gsaoi_test_reduce_dark.log', mode='quiet')
        reduce_darks.runr()

        del reduce_darks

    logutils.config(file_name='gsaoi_test_reduce_bpm.log', mode='quiet')
    reduce_bpm = Reduce()
    reduce_bpm.files.extend(list_of_h_flats)
    reduce_bpm.files.extend(list_of_darks)
    reduce_bpm.recipename = 'makeProcessedBPM'
    reduce_bpm.runr()

    bpm_filename = reduce_bpm.output_filenames[0]

    del reduce_bpm

    logutils.config(file_name='gsaoi_test_reduce_flats.log', mode='quiet')
    reduce_flats = Reduce()
    reduce_flats.files.extend(list_of_kshort_flats)
    reduce_flats.uparms = [('addDQ:user_bpm', bpm_filename)]
    reduce_flats.runr()

    calib_files.append('processed_flat:{}'.format(
        reduce_flats.output_filenames[0]))

    del reduce_flats

    logutils.config(file_name='gsaoi_test_reduce_science.log', mode='quiet')
    reduce_target = Reduce()
    reduce_target.files.extend(list_of_science_files)
    reduce_target.uparms = [('addDQ:user_bpm', bpm_filename)]
    reduce_target.ucals = normalize_ucals(reduce_target.files, calib_files)
    reduce_target.runr()

    del reduce_target
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed standard star and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    import os
    from astrodata.testing import download_from_archive
    from geminidr.gmos.tests.spect import CREATED_INPUTS_PATH_FOR_TESTS
    from gempy.utils import logutils
    from recipe_system.reduction.coreReduce import Reduce

    module_name, _ = os.path.splitext(os.path.basename(__file__))
    path = os.path.join(CREATED_INPUTS_PATH_FOR_TESTS, module_name)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    os.makedirs("inputs", exist_ok=True)
    cwd = os.getcwd()

    associated_arcs = {
        "N20180109S0287.fits":
        "N20180109S0315.fits",  # GN-2017B-FT-20-13-001 B600 0.505um
        "N20190302S0089.fits":
        "N20190302S0274.fits",  # GN-2019A-Q-203-7-001 B600 0.550um
        "N20190313S0114.fits":
        "N20190313S0132.fits",  # GN-2019A-Q-325-13-001 B600 0.482um
        "N20190427S0126.fits":
        "N20190427S0267.fits",  # GN-2019A-FT-206-7-004 R400 0.625um
        "N20190910S0028.fits":
        "N20190910S0279.fits",  # GN-2019B-Q-313-5-001 B600 0.550um
        "S20180919S0139.fits":
        "S20180919S0141.fits",  # GS-2018B-Q-209-13-003 B600 0.45um
        "S20191005S0051.fits":
        "S20191005S0147.fits",  # GS-2019B-Q-132-35-001 R400 0.73um
    }

    for sci_fname, arc_fname in associated_arcs.items():

        sci_path = download_from_archive(sci_fname)
        arc_path = download_from_archive(arc_fname)

        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        arc_reduce = Reduce()
        arc_reduce.files.extend([arc_path])
        # arc_reduce.ucals = normalize_ucals(arc_reduce.files, calibration_files)

        os.chdir("inputs/")
        arc_reduce.runr()
        arc_ad = arc_reduce.output_filenames.pop()

        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = GMOSSpect([sci_ad])
        p.prepare()
        p.addDQ(static_bpm=None)
        p.addVAR(read_noise=True)
        p.overscanCorrect()
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True)
        p.distortionCorrect(arc=arc_ad)
        p.writeOutputs()
        os.chdir("../")

    os.chdir(cwd)
Example #27
0
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed standard star and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """

    input_data = {
        "N20180508S0021.fits": {
            "arc": "N20180615S0409.fits",
        },
        "S20190204S0079.fits": {
            "arc": "S20190206S0030.fits",
        },
        "S20181024S0035.fits": {
            "arc": "S20181025S0012.fits",
        },
    }

    module_name, _ = os.path.splitext(os.path.basename(__file__))
    path = os.path.join(CREATED_INPUTS_PATH_FOR_TESTS, module_name)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    os.makedirs("inputs/", exist_ok=True)
    print('Current working directory:\n    {:s}'.format(os.getcwd()))

    for filename, pars in input_data.items():

        print('Downloading files...')
        sci_path = download_from_archive(filename)
        arc_path = download_from_archive(pars['arc'])

        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        print('Reducing ARC for {:s}'.format(data_label))
        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        arc_reduce = Reduce()
        arc_reduce.files.extend([arc_path])
        arc_reduce.runr()
        arc = arc_reduce.output_filenames.pop()

        print('Reducing pre-processed data:')
        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = primitives_gmos_longslit.GMOSLongslit([sci_ad])
        p.prepare()
        p.addDQ()
        p.addVAR(read_noise=True)
        p.overscanCorrect()
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True)
        p.mosaicDetectors()
        p.distortionCorrect(arc=arc)
        p.findSourceApertures(max_apertures=1)
        p.traceApertures(order=2,
                         nsum=20,
                         step=10,
                         max_shift=0.09,
                         max_missed=5)

        os.chdir("inputs/")
        processed_ad = p.writeOutputs().pop()
        os.chdir("../")
        print(f'Wrote pre-processed file to:\n    {processed_ad.filename}')
Example #28
0
def create_inputs():
    """
    Create inputs for `test_plot_spectra_for_qa_single_frame`.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    import glob
    import os
    from geminidr.gmos.primitives_gmos_longslit import GMOSLongslit
    from gempy.utils import logutils
    from recipe_system.reduction.coreReduce import Reduce
    from recipe_system.utils.reduce_utils import normalize_ucals

    cwd = os.getcwd()
    path = f"./dragons_test_inputs/geminidr/core/{__file__.split('.')[0]}/"
    os.makedirs(path, exist_ok=True)
    os.chdir(path)

    os.makedirs("inputs/", exist_ok=True)

    for raw_list, bias_list, quartz_list, arc_list in single_aperture_data:

        if all([
                os.path.exists(f"inputs/{s.split('.')[0]}_extracted.fits")
                for s in raw_list
        ]):
            print("Skipping already created input.")
            continue

        raw_paths = [download_from_archive(f) for f in raw_list]
        bias_paths = [download_from_archive(f) for f in bias_list]
        quartz_paths = [download_from_archive(f) for f in quartz_list]
        arc_paths = [download_from_archive(f) for f in arc_list]

        cals = []
        raw_ads = [astrodata.open(p) for p in raw_paths]
        data_label = raw_ads[0].data_label()
        print('Current working directory:\n    {:s}'.format(os.getcwd()))

        if len(bias_paths):
            logutils.config(file_name='log_bias_{}.txt'.format(data_label))
            r = Reduce()
            r.files.extend(bias_paths)
            r.runr()
            master_bias = r.output_filenames.pop()
            cals.append(f"processed_bias:{master_bias}")
            del r
        else:
            master_bias = None

        if len(quartz_paths):
            logutils.config(file_name='log_quartz_{}.txt'.format(data_label))
            r = Reduce()
            r.files.extend(quartz_paths)
            r.ucals = normalize_ucals(r.files, cals)
            r.runr()
            master_quartz = r.output_filenames.pop()
            cals.append(f"processed_flat:{master_quartz}")
            del r
        else:
            master_quartz = None

        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        r = Reduce()
        r.files.extend(arc_paths)
        r.ucals = normalize_ucals(r.files, cals)
        r.runr()
        master_arc = r.output_filenames.pop()

        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = GMOSLongslit(raw_ads)
        p.prepare()
        p.addDQ(static_bpm=None)
        p.addVAR(read_noise=True)
        p.overscanCorrect()
        p.biasCorrect(do_bias=master_bias is not None, bias=master_bias)
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True)
        p.flatCorrect(do_flat=master_quartz is not None, flat=master_quartz)
        p.QECorrect(arc=master_arc)
        p.distortionCorrect(arc=master_arc)
        p.findSourceApertures(max_apertures=3)
        p.skyCorrectFromSlit()
        p.traceApertures()
        p.extract1DSpectra()
        p.linearizeSpectra()

        [os.remove(s) for s in glob.glob("*_arc.fits")]
        [os.remove(s) for s in glob.glob("*_bias.fits")]
        [os.remove(s) for s in glob.glob("*_flat.fits")]
        [os.remove(s) for s in glob.glob("*_mosaic.fits")]

        os.chdir("inputs/")
        print("\n\n    Writing processed files for tests into:\n"
              "    {:s}\n\n".format(os.getcwd()))
        _ = p.writeOutputs()
        os.chdir("../")

    os.chdir(cwd)
Example #29
0
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed twilight flat data and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    from geminidr.gmos.tests.longslit import INPUTS_ROOT_PATH

    associated_calibrations = {
        "S20190204S0006.fits": {
            "bias": ["S20190203S0110.fits",
                     "S20190203S0109.fits",
                     "S20190203S0108.fits",
                     "S20190203S0107.fits",
                     "S20190203S0106.fits"],
            "twilight": ["S20190204S0006.fits"],
        },
        "N20190103S0462.fits": {
            "bias": ["N20190102S0531.fits",
                     "N20190102S0530.fits",
                     "N20190102S0529.fits",
                     "N20190102S0528.fits",
                     "N20190102S0527.fits"],
            "twilight": ["N20190103S0462.fits",
                         "N20190103S0463.fits"],
        },
        "N20190327S0056.fits": {
            "bias": ["N20190327S0098.fits",
                     "N20190327S0099.fits",
                     "N20190327S0100.fits",
                     "N20190327S0101.fits",
                     "N20190327S0102.fits"],
            "twilight": ["N20190327S0056.fits"],
        }
    }

    module_name, _ = os.path.splitext(os.path.basename(__file__))
    path = os.path.join(INPUTS_ROOT_PATH, module_name)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    print('Current working directory:\n    {:s}'.format(os.getcwd()))

    for filename, cals in associated_calibrations.items():

        print('Downloading files...')
        twilight_path = [download_from_archive(f) for f in cals['twilight']]
        bias_path = [download_from_archive(f) for f in cals['bias']]

        twilight_ad = astrodata.open(twilight_path[0])
        data_label = twilight_ad.data_label()

        print('Reducing BIAS for {:s}'.format(data_label))
        logutils.config(file_name='log_bias_{}.txt'.format(data_label))
        bias_reduce = Reduce()
        bias_reduce.files.extend(bias_path)
        bias_reduce.runr()
        bias_master = bias_reduce.output_filenames.pop()
        del bias_reduce

        print('Reducing twilight flat:')
        logutils.config(file_name='log_twilight_{}.txt'.format(data_label))

        with warnings.catch_warnings():
            warnings.simplefilter("ignore")

            p = primitives_gmos_longslit.GMOSLongslit(
                [astrodata.open(f) for f in twilight_path])

            p.prepare()
            p.addDQ(static_bpm=None)
            p.addVAR(read_noise=True)
            p.overscanCorrect()
            p.biasCorrect(bias=bias_master)
            p.ADUToElectrons()
            p.addVAR(poisson_noise=True)
            p.stackFrames()

            # Write non-mosaicked data
            twilight = p.writeOutputs(suffix="_twilight", strip=True)[0]

            # Write mosaicked data
            p = primitives_gmos_longslit.GMOSLongslit([twilight])
            p.mosaicDetectors()
            p.writeOutputs(suffix="_mosaickedTwilight", strip=True)
Example #30
0
    def test_slitbias_in_calservice(self, get_or_create_tmpdir):
        """
        Check that:

        - A bias slit calibrator exists in the local calibrations dir;
        - It can be retrieved using a getProcessedSlitBias call.
        """
        assert len(glob.glob(os.path.join(
            os.getcwd(), 'calibrations', 'processed_bias', '*bias*slit*.fits'
        ))) == 1, "Couldn't find the stored slit bias in the calibrations " \
                  "system OR found multiples"

        # Do the master bias generation
        reduce = Reduce()
        reduce.drpkg = 'ghostdr'
        # Use one of the 'dark slit' files to try and retrieve the slit bias
        reduce.files = [
            os.path.join(os.getcwd(), 'dark95_1_MEF_2x2_slit.fits'),
        ]
        reduce.mode = [
            'test',
        ]
        reduce.recipename = 'recipeRetrieveSlitBiasTest'
        # reduce.mode = ['sq', ]
        # reduce.recipename = 'makeProcessedBias'
        reduce.logfile = os.path.join(os.getcwd(),
                                      'reduce_slitbias_retrieve.log')
        # FIXME cal_service will hopefully find the calibration itself later
        reduce.ucals = normalize_ucals(reduce.files, [
            'processed_bias:{}'.format(
                glob.glob(
                    os.path.join('calibrations', 'processed_bias',
                                 '*slit*bias*.fits'))[0]),
        ])
        reduce.logmode = 'quiet'
        reduce.suffix = '_testSlitBiasRetrieve'
        logutils.config(file_name=reduce.logfile, mode=reduce.logmode)

        try:
            reduce.runr()
        except IOError as e:
            assert 0, 'Calibration system could not locate the slit bias ' \
                      'frame ({})'.format(e.message)
        finally:
            # Teardown code
            for _ in glob.glob(
                    os.path.join(os.getcwd(),
                                 '*{}.fits'.format(reduce.suffix)), ):
                os.remove(_)
Example #31
0
    def do_slit_arc(self, request, get_or_create_tmpdir):
        """
        Reduce the test slit arc data.

        .. note::
            Fixture.
        """

        # import pdb; pdb.set_trace()

        # rawfilename = '{}*slit*.fits'.format(slit_type)
        # Copy the raw data file into here
        tmpsubdir, cal_service = get_or_create_tmpdir
        slit_type, res = request.param
        filenames = glob.glob('{}*{}*slit.fits'.format(slit_type, res))

        # Do the master bias generation
        reduce = Reduce()
        reduce.drpkg = 'ghostdr'
        reduce.mode = ['test', ]
        reduce.recipename = 'recipeSlitArcTest' if slit_type == 'arc' \
            else 'recipeSlitTest'
        # Make sure refresh is used for all primitives
        reduce.upars = ['refresh=True', ]
        # FIXME cal_service will hopefully find the calibration itself later
        calibs = {
            'processed_bias': glob.glob(os.path.join(
                'calibrations',
                'processed_bias',
                '*slit*bias*.fits'))[0],
            'processed_dark': glob.glob(os.path.join(
                    'calibrations',
                    'processed_dark',
                    '*slit*dark*.fits'))[0],
            'processed_slitflat': glob.glob(os.path.join(
                'calibrations',
                'processed_slitflat',
                '*{}*slit*slitflat*.fits'.format(res, )))[0]
        }
        reduce.logfile = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                      'reduce_slit{}.log'.format(slit_type))
        reduce.logmode = 'standard'
        reduce.suffix = '_{}_testSlit'.format(slit_type)
        logutils.config(file_name=reduce.logfile, mode=reduce.logmode)

        corrfiles = []
        for filename in filenames:
            reduce.files = [filename, ]
            reduce.ucals = normalize_ucals(reduce.files, [
                '{}:{}'.format(k, v) for k, v in calibs.items()
            ])
            reduce.runr()

            # import pdb; pdb.set_trace()
            corrfilename = '*' + reduce.suffix + '.fits'
            corrfilename = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                        glob.glob(corrfilename)[0])
            corrfile = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                    corrfilename)
            corrfiles.append(corrfile)

        # Return filenames of raw, subtracted files
        yield filenames, corrfiles, calibs

        # import pdb; pdb.set_trace()

        # Execute teardown code
        for _ in glob.glob(os.path.join(
                os.getcwd(),
                # rawfilename,
                corrfilename,
        )):
            os.remove(_)
Example #32
0
    def do_slit_bias(self, get_or_create_tmpdir):
        """
        Reduce the bias slit test data.

        .. note::
            Fixture.
        """
        rawfilename = 'bias*slit*.fits'
        # Copy the raw data file into here
        tmpsubdir, cal_service = get_or_create_tmpdir
        # Find all the relevant files
        rawfiles = glob.glob(
            os.path.join(tmpsubdir.dirname, tmpsubdir.basename, rawfilename))

        # Do the master bias generation
        reduce = Reduce()
        reduce.drpkg = 'ghostdr'
        reduce.files = rawfiles
        reduce.mode = [
            'test',
        ]
        reduce.recipename = 'recipeSlitBiasTest'
        reduce.logfile = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                      'reduce_slitbias.log')
        reduce.logmode = 'quiet'
        reduce.suffix = '_testSlitBias'
        logutils.config(file_name=reduce.logfile, mode=reduce.logmode)
        reduce.runr()

        corrfilename = '*' + reduce.suffix + '.fits'
        corrfilename = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                    glob.glob(corrfilename)[0])
        corrfile = os.path.join(tmpsubdir.dirname, tmpsubdir.basename,
                                corrfilename)

        # Return filenames of raw, subtracted files
        yield rawfiles, corrfile

        # Execute teardown code

        for _ in glob.glob(
                os.path.join(
                    os.getcwd(),
                    # rawfilename,
                    corrfilename,
                )):
            os.remove(_)