コード例 #1
0
def test_adjust_wcs(files, path_to_inputs):
    """
    We take preprocessed files with a single aperture and run them through
    adjustWCSToReference. After this, the celestial coordinates of the aperture
    should be similar in all images.

    In order to make this a real test, we edit the gWCS objects of the inputs
    so they're wrong (except for the first one, which is the reference)
    """
    adinputs = [astrodata.open(os.path.join(path_to_inputs, f)) for f in files]
    # Hack the WCS of all but the first input so they're wrong
    for ad in adinputs[1:]:
        ad[0].wcs.pipeline[0][1]['crpix2'].offset = 600
    p = primitives_gmos_longslit.GMOSLongslit(adinputs)
    p.adjustWCSToReference()
    # Return the (RA, dec) as a SkyCoord at the location of each aperture
    # Then confirm that the sky coordinates are all similar
    skycoords = [ad[0].wcs(0, ad[0].APERTURE['c0'][0], with_units=True)[1]
                 for ad in adinputs]
    c0 = skycoords[0]
    for c in skycoords[1:]:
        assert c0.separation(c).arcsecond < 0.05


# Todo: Implement recipe to create input files
コード例 #2
0
def test_qe_correct_is_locally_continuous(ad, arc_ad, change_working_dir):

    if ad.filename == 'S20180919S0139_flatCorrected.fits':
        pytest.xfail('FIXME: this test fails following changes on the QE '
                     'curves. Needs more investigation.')

    with change_working_dir():

        logutils.config(
            file_name='log_test_continuity{}.txt'.format(ad.data_label()))
        p = primitives_gmos_longslit.GMOSLongslit([ad])
        p.QECorrect(arc=arc_ad)

        # Need these extra steps to extract and analyse the data
        p.distortionCorrect(arc=arc_ad)
        p.findSourceApertures(max_apertures=1)
        p.skyCorrectFromSlit()
        p.traceApertures()
        p.extract1DSpectra()
        p.linearizeSpectra()
        processed_ad = p.writeOutputs().pop()

    for ext in processed_ad:
        assert not np.any(np.isnan(ext.data))
        assert not np.any(np.isinf(ext.data))

    basename = processed_ad.filename.replace('_linearized', '')
    kwargs = gap_local_kw[basename] if basename in gap_local_kw.keys() else {}
    gap = MeasureGapSizeLocallyWithSpline(processed_ad, **kwargs)

    assert abs(gap.measure_gaps(0) < 0.05)
    assert abs(gap.measure_gaps(1) < 0.05)
コード例 #3
0
def processed_ad(request, qe_corrected_ad, get_master_arc, output_path):
    """
    Process the QE corrected data so we can measure the jump size at each gap.

    Parameters
    ----------
    request
    qe_corrected_ad : AstroData
        QE corrected astrodata (as it says).
    get_master_arc : pytest.fixture
        Fixture that reads the master flat either from the permanent input folder
        or from the temporary cache folder.

    Returns
    -------
    AstroData
        Processed 1D bias + flat + qe + distortion + sky corrected, extracted,
        and linearized spectrum.
    """
    pre_process = request.config.getoption("--force-preprocess-data")
    master_arc = get_master_arc(qe_corrected_ad, pre_process)

    with output_path():
        p = primitives_gmos_longslit.GMOSLongslit([qe_corrected_ad])
        p.distortionCorrect(arc=master_arc)
        p.findSourceApertures(max_apertures=1)
        p.skyCorrectFromSlit()
        p.traceApertures()
        p.extract1DSpectra()
        p.linearizeSpectra()
        processed_ad = p.writeOutputs().pop()

    return processed_ad
コード例 #4
0
def qe_corrected_ad(request, get_input_ad, get_master_arc, output_path):
    """
    Returns the processed spectrum right after running `applyQECorrection`.

    Parameters
    ----------
    request : pytest.fixture
        Fixture that contains information this fixture's parent.
    get_input_ad : pytest.fixture
        Fixture that reads the input data or cache/process it in a temporary
        folder.
    get_master_arc : pytest.fixture
        Fixture that reads the master flat either from the permanent input folder
        or from the temporary cache folder.
    output_path : contextmanager
        Enable easy change to temporary folder when reducing data.

    Returns
    -------
    AstroData
        QE Corrected astrodata.
    """

    filename = request.param
    pre_process = request.config.getoption("--force-preprocess-data")

    input_ad = get_input_ad(filename, pre_process)
    master_arc = get_master_arc(input_ad, pre_process)

    with output_path():
        p = primitives_gmos_longslit.GMOSLongslit([input_ad])
        p.applyQECorrection(arc=master_arc)
        qe_corrected_ad = p.writeOutputs().pop()

    return qe_corrected_ad
コード例 #5
0
def test_full_frame_distortion_works_on_smaller_region(ad, change_working_dir):
    """
    Takes a full-frame arc and self-distortion-corrects it. It then fakes
    sub-regions of this and corrects those using the full-frame distortion to
    confirm that the result is the same as the appropriate region of the
    distortion-corrected full-frame image. There's no need to do this more
    than once for a given binning, so we loop within the function, keeping
    track of binnings we've already processed.
    """
    NSUB = 4  # we're going to take combos of horizontal quadrants
    completed_binnings = []

    xbin, ybin = ad.detector_x_bin(), ad.detector_y_bin()
    if ad.detector_roi_setting() != "Full Fame" or (xbin, ybin) in completed_binnings:
        return

    p = primitives_gmos_longslit.GMOSLongslit([ad])
    p.viewer.viewer_name = None
    ad_out = p.distortionCorrect([deepcopy(ad)], arc=ad, order=1)[0]

    for start in range(NSUB):
        for end in range(start + 1, NSUB + 1):
            ad_copy = deepcopy(ad)
            y1b = start * ad[0].shape[0] // NSUB
            y2b = end * ad[0].shape[0] // NSUB
            y1, y2 = y1b * ybin, y2b * ybin  # unbinned pixels

            # Fake the section header keywords and set the SCI and DQ
            # to the appropriate sub-region
            for ext in ad_copy:
                arrsec = ext.array_section()
                detsec = ext.detector_section()
                ext.hdr['CCDSEC'] = '[{}:{},{}:{}]'.format(arrsec.x1 + 1,
                                                           arrsec.x2, y1 + 1, y2)
                ext.hdr['DETSEC'] = '[{}:{},{}:{}]'.format(detsec.x1 + 1,
                                                           detsec.x2, y1 + 1, y2)
                ext.data = ext.data[y1b:y2b]
                ext.mask = ext.mask[y1b:y2b]
                ext.hdr['DATASEC'] = '[1:{},1:{}]'.format(ext.shape[1], y2b - y1b)

            ad2 = p.distortionCorrect(
                [ad_copy], arc=p.streams['mosaic'][0], order=1)[0]

        # It's GMOS LS so the offset between this AD and the full-frame
        # will be the same as the DETSEC offset, but the width may be
        # smaller so we need to shuffle the smaller image within the
        # larger one to look for a match
        ny, nx = ad2[0].shape
        xsizediff = ad_out[0].shape[1] - nx

        for xoffset in range(xsizediff + 1):
            np.testing.assert_allclose(
                np.ma.masked_array(ad2[0].data, mask=ad2[0].mask),
                ad_out[0].data[y1b:y1b + ny, xoffset:xoffset + nx],
                atol=0.01,
                err_msg="Problem with {} {}:{}".format(ad.filename, start, end))

        completed_binnings.append((xbin, ybin))
コード例 #6
0
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed standard star and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    import os
    from astrodata.testing import download_from_archive

    root_path = os.path.join("./dragons_test_inputs/")
    module_path = "geminidr/gmos/spect/{}".format(__file__.split('.')[0])
    path = os.path.join(root_path, module_path)

    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    os.makedirs("inputs/", exist_ok=True)
    print('Current working directory:\n    {:s}'.format(os.getcwd()))

    for filename in datasets:
        print('Downloading files...')
        basename = filename.split("_")[0] + ".fits"
        sci_path = download_from_archive(basename)
        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        print('Reducing pre-processed data:')
        logutils.config(file_name='log_{}.txt'.format(data_label))

        p = primitives_gmos_longslit.GMOSLongslit([sci_ad])
        p.prepare()
        p.addDQ(static_bpm=None, user_bpm=None, add_illum_mask=False)
        p.addVAR(read_noise=True, poisson_noise=False)
        p.overscanCorrect(function="spline",
                          high_reject=3.,
                          low_reject=3.,
                          nbiascontam=0,
                          niterate=2,
                          order=None)
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True, read_noise=False)
        p.mosaicDetectors()
        p.makeIRAFCompatible()
        p.determineDistortion(**fixed_test_parameters_for_determine_distortion)

        os.chdir("inputs")
        processed_ad = p.writeOutputs().pop()
        os.chdir("../../")
        print('Wrote pre-processed file to:\n'
              '    {:s}'.format(processed_ad.filename))
コード例 #7
0
def test_create_slit_illumination_with_multi_extension_data(
        ad, change_working_dir, request):
    """
    Test that can run `makeSlitIllum` in multi-extension data.
    """
    plot = request.config.getoption("--do-plots")

    with change_working_dir():

        cwd = os.getcwd()
        print("Running tests inside folder:\n  {}".format(cwd))
        p = primitives_gmos_longslit.GMOSLongslit([ad])
        p.makeSlitIllum(bins=25, border=10, debug_plot=plot)
        slit_illum_ad = p.writeOutputs()[0]

        for ext, slit_ext in zip(ad, slit_illum_ad):
            assert ext.shape == slit_ext.shape

            # Create output data
            data_o = (np.ma.masked_array(ext.data, mask=ext.mask) /
                      np.ma.masked_array(slit_ext.data, mask=slit_ext.mask))

            # Bin columns
            fitter = fitting.LinearLSQFitter()
            model = models.Polynomial1D(degree=2)
            nbins = 10
            rows = np.arange(data_o.shape[0])

            for i in range(nbins):

                col_start = i * data_o.shape[1] // nbins
                col_end = (i + 1) * data_o.shape[1] // nbins

                cols = np.ma.mean(data_o[:, col_start:col_end], axis=1)

                fitted_model = fitter(model, rows, cols)

                # Check column is linear
                np.testing.assert_allclose(fitted_model.c2.value, 0, atol=0.01)

                # Check if slope is (almost) horizontal (< 2.0 deg)
                assert np.abs(
                    np.rad2deg(
                        np.arctan(fitted_model.c1.value /
                                  (rows.size // 2)))) < 1.5

    if plot:
        os.makedirs(PLOT_PATH, exist_ok=True)
        print("Renaming plots to ",
              os.path.join(PLOT_PATH, ad.filename.replace(".fits", ".png")))
        os.rename(
            os.path.join(cwd, slit_illum_ad.filename.replace(".fits", ".png")),
            os.path.join(PLOT_PATH, ad.filename.replace(".fits", ".png")))
コード例 #8
0
def test_split_mosaic_into_extensions_metadata(filename):
    """
    Tests that the metadata is correctly propagated to the split object.
    """
    ad = astrodata.open(download_from_archive(filename))

    p = primitives_gmos_longslit.GMOSLongslit([ad])
    p.prepare()
    p.overscanCorrect()
    mosaicked_ad = p.mosaicDetectors().pop()

    ad2 = primitives_gmos_longslit._split_mosaic_into_extensions(
        ad, mosaicked_ad, border_size=10)

    for (ext, ext2) in zip(ad, ad2):
        assert (ext.shape == ext2.shape)
        assert (ext.data_section() == ext2.data_section())
        assert (ext.detector_section() == ext2.detector_section())
        assert (ext.array_section() == ext2.array_section())
コード例 #9
0
    def _reduce_data(ad, master_arc, master_bias, master_flat):
        with output_path():
            # Use config to prevent outputs when running Reduce via API
            logutils.config(file_name='log_{}.txt'.format(ad.data_label()))

            p = primitives_gmos_longslit.GMOSLongslit([ad])
            p.prepare()
            p.addDQ(static_bpm=None)
            p.addVAR(read_noise=True)
            p.overscanCorrect()
            p.biasCorrect(bias=master_bias)
            p.ADUToElectrons()
            p.addVAR(poisson_noise=True)
            p.flatCorrect(flat=master_flat)
            # p.applyQECorrection(arc=master_arc)

            processed_ad = p.writeOutputs().pop()

        return processed_ad
コード例 #10
0
ファイル: test_qe_correct.py プロジェクト: b1quint/DRAGONS
def test_regression_on_qe_correct(ad, arc_ad, change_working_dir,
                                  ref_ad_factory):

    with change_working_dir():
        logutils.config(
            file_name='log_test_regression{}.txt'.format(ad.data_label()))
        p = primitives_gmos_longslit.GMOSLongslit([ad])
        p.QECorrect(arc=arc_ad)
        qe_corrected_ad = p.writeOutputs().pop()

    assert 'QECORR' in qe_corrected_ad.phu.keys()

    ref_ad = ref_ad_factory(qe_corrected_ad.filename)

    for qe_corrected_ext, ref_ext in zip(qe_corrected_ad, ref_ad):
        np.testing.assert_allclose(np.ma.masked_array(
            qe_corrected_ext.data, mask=qe_corrected_ext.mask),
                                   np.ma.masked_array(ref_ext.data,
                                                      mask=ref_ext.mask),
                                   atol=0.05)
コード例 #11
0
def test_regression_in_distortion_correct(ad, change_working_dir, ref_ad_factory):
    """
    Runs the `distortionCorrect` primitive on a preprocessed data and compare
    its model with the one in the reference file.
    """
    with change_working_dir():

        logutils.config(
            file_name='log_regression_{:s}.txt'.format(ad.data_label()))

        p = primitives_gmos_longslit.GMOSLongslit([deepcopy(ad)])
        p.viewer = geminidr.dormantViewer(p, None)
        p.distortionCorrect(arc=deepcopy(ad), order=3, subsample=1)
        dist_corrected_ad = p.writeOutputs()[0]

    ref_ad = ref_ad_factory(dist_corrected_ad.filename)
    for ext, ext_ref in zip(dist_corrected_ad, ref_ad):
        data = np.ma.masked_invalid(ext.data)
        ref_data = np.ma.masked_invalid(ext_ref.data)
        np.testing.assert_allclose(data, ref_data, atol=1)
コード例 #12
0
def test_regression_sky_correct_from_slit(filename, params, refname,
                                          change_working_dir, path_to_inputs,
                                          path_to_refs):

    func = params.get('function', 'spline')
    if not func.startswith('spline') and ASTROPY_LT_42:
        pytest.skip('Astropy 4.2 is required to use the linear fitter '
                    'with weights')

    path = os.path.join(path_to_inputs, filename)
    ad = astrodata.open(path)

    with change_working_dir():
        logutils.config(file_name=f'log_regression_{ad.data_label()}.txt')
        p = primitives_gmos_longslit.GMOSLongslit([ad])
        p.viewer = geminidr.dormantViewer(p, None)
        p.skyCorrectFromSlit(**params)
        sky_subtracted_ad = p.writeOutputs(outfilename=refname).pop()

    ref_ad = astrodata.open(os.path.join(path_to_refs, refname))

    for ext, ref_ext in zip(sky_subtracted_ad, ref_ad):
        np.testing.assert_allclose(ext.data, ref_ext.data, atol=0.01)
コード例 #13
0
def test_regression_on_qe_correct(ad, arc_ad, change_working_dir,
                                  ref_ad_factory):

    # The GMOS-N tests need to be run with `use_iraf=False` because the
    # reference files for those use the DRAGONS spline models.
    is_gmos_s = ad.instrument() == 'GMOS-S'

    with change_working_dir():
        logutils.config(
            file_name='log_test_regression{}.txt'.format(ad.data_label()))
        p = primitives_gmos_longslit.GMOSLongslit([ad])
        p.QECorrect(arc=arc_ad, use_iraf=is_gmos_s)
        qe_corrected_ad = p.writeOutputs().pop()

    assert 'QECORR' in qe_corrected_ad.phu.keys()

    ref_ad = ref_ad_factory(qe_corrected_ad.filename)

    for qe_corrected_ext, ref_ext in zip(qe_corrected_ad, ref_ad):
        np.testing.assert_allclose(np.ma.masked_array(
            qe_corrected_ext.data, mask=qe_corrected_ext.mask),
                                   np.ma.masked_array(ref_ext.data,
                                                      mask=ref_ext.mask),
                                   atol=0.05)
コード例 #14
0
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed standard star and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    import os
    from astrodata.testing import download_from_archive
    from recipe_system.utils.reduce_utils import normalize_ucals
    from recipe_system.reduction.coreReduce import Reduce

    associated_calibrations = {
        "N20180109S0287.fits": {
            "arcs": ["N20180109S0315.fits"],
            "bias": [
                "N20180103S0563.fits",
                "N20180103S0564.fits",
                "N20180103S0565.fits",
                "N20180103S0566.fits",
                "N20180103S0567.fits",
            ],
            "flat": ["N20180109S0288.fits"],
        }
    }

    root_path = os.path.join("./dragons_test_inputs/")
    module_path = "geminidr/gmos/spect/{}/".format(__file__.split('.')[0])
    path = os.path.join(root_path, module_path)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    os.makedirs("inputs", exist_ok=True)
    print('Current working directory:\n    {:s}'.format(os.getcwd()))

    for filename, cals in associated_calibrations.items():

        print('Downloading files...')
        sci_path = download_from_archive(filename)
        bias_path = [download_from_archive(f) for f in cals['bias']]
        flat_path = [download_from_archive(f) for f in cals['flat']]
        arc_path = [download_from_archive(f) for f in cals['arcs']]

        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        print('Reducing BIAS for {:s}'.format(data_label))
        logutils.config(file_name='log_bias_{}.txt'.format(data_label))
        bias_reduce = Reduce()
        bias_reduce.files.extend(bias_path)
        bias_reduce.runr()
        bias_master = bias_reduce.output_filenames.pop()
        calibration_files = ['processed_bias:{}'.format(bias_master)]
        del bias_reduce

        print('Reducing FLAT for {:s}'.format(data_label))
        logutils.config(file_name='log_flat_{}.txt'.format(data_label))
        flat_reduce = Reduce()
        flat_reduce.files.extend(flat_path)
        flat_reduce.ucals = normalize_ucals(flat_reduce.files,
                                            calibration_files)
        flat_reduce.runr()
        flat_master = flat_reduce.output_filenames.pop()
        calibration_files.append('processed_flat:{}'.format(flat_master))
        del flat_reduce

        print('Reducing ARC for {:s}'.format(data_label))
        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        arc_reduce = Reduce()
        arc_reduce.files.extend(arc_path)
        arc_reduce.ucals = normalize_ucals(arc_reduce.files, calibration_files)
        arc_reduce.runr()
        arc_master = arc_reduce.output_filenames.pop()

        print('Reducing pre-processed data:')
        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = primitives_gmos_longslit.GMOSLongslit([sci_ad])
        p.prepare()
        p.addDQ(static_bpm=None)
        p.addVAR(read_noise=True)
        p.overscanCorrect()
        p.biasCorrect(bias=bias_master)
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True)
        p.flatCorrect(flat=flat_master)
        p.QECorrect(arc=arc_master)
        p.distortionCorrect(arc=arc_master)
        p.findSourceApertures(max_apertures=1)
        p.skyCorrectFromSlit()
        p.traceApertures()
        p.extract1DSpectra()
        p.calculateSensitivity()

        os.chdir("inputs/")
        _ = p.writeOutputs().pop()
        os.chdir("../../")
コード例 #15
0
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed standard star and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    import os
    from astrodata.testing import download_from_archive
    from gempy.utils import logutils
    from recipe_system.reduction.coreReduce import Reduce
    from recipe_system.utils.reduce_utils import normalize_ucals

    from geminidr.gmos.tests.spect import CREATED_INPUTS_PATH_FOR_TESTS

    associated_calibrations = {
        "N20180109S0287.fits": {
            'bias': [
                "N20180103S0563.fits", "N20180103S0564.fits",
                "N20180103S0565.fits", "N20180103S0566.fits",
                "N20180103S0567.fits"
            ],
            'flat': ["N20180109S0288.fits"],
            'arcs': ["N20180109S0315.fits"]
        }
    }

    module_name, _ = os.path.splitext(os.path.basename(__file__))
    path = os.path.join(CREATED_INPUTS_PATH_FOR_TESTS, module_name)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    print('Current working directory:\n    {:s}'.format(os.getcwd()))
    os.makedirs("inputs/", exist_ok=True)

    for filename, cals in associated_calibrations.items():

        print('Downloading files...')
        sci_path = download_from_archive(filename)
        bias_path = [download_from_archive(f) for f in cals['bias']]
        flat_path = [download_from_archive(f) for f in cals['flat']]
        arc_path = [download_from_archive(f) for f in cals['arcs']]

        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        print('Reducing BIAS for {:s}'.format(data_label))
        logutils.config(file_name='log_bias_{}.txt'.format(data_label))
        bias_reduce = Reduce()
        bias_reduce.files.extend(bias_path)
        bias_reduce.runr()
        bias_master = bias_reduce.output_filenames.pop()
        calibration_files = ['processed_bias:{}'.format(bias_master)]
        del bias_reduce

        print('Reducing FLAT for {:s}'.format(data_label))
        logutils.config(file_name='log_flat_{}.txt'.format(data_label))
        flat_reduce = Reduce()
        flat_reduce.files.extend(flat_path)
        flat_reduce.ucals = normalize_ucals(flat_reduce.files,
                                            calibration_files)
        flat_reduce.runr()
        flat_master = flat_reduce.output_filenames.pop()
        calibration_files.append('processed_flat:{}'.format(flat_master))
        del flat_reduce

        print('Reducing ARC for {:s}'.format(data_label))
        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        arc_reduce = Reduce()
        arc_reduce.files.extend(arc_path)
        arc_reduce.ucals = normalize_ucals(arc_reduce.files, calibration_files)
        arc_reduce.runr()
        arc_master = arc_reduce.output_filenames.pop()
        del arc_reduce

        print('Reducing pre-processed data:')
        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = primitives_gmos_longslit.GMOSLongslit([sci_ad])
        p.prepare()
        p.addDQ(static_bpm=None, user_bpm=None, add_illum_mask=False)
        p.addVAR(read_noise=True, poisson_noise=False)
        p.overscanCorrect(function="spline",
                          high_reject=3.,
                          low_reject=3.,
                          nbiascontam=0,
                          niterate=2,
                          order=None)
        p.biasCorrect(bias=bias_master, do_bias=True)
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True, read_noise=False)
        p.flatCorrect(flat=flat_master)
        p.QECorrect(arc=arc_master)
        p.distortionCorrect(arc=arc_master, order=3, subsample=1)
        p.findSourceApertures(max_apertures=1,
                              threshold=0.01,
                              min_sky_region=20)
        p.skyCorrectFromSlit(order=5, grow=0)
        p.traceApertures(order=2,
                         nsum=10,
                         step=10,
                         max_missed=5,
                         max_shift=0.05)
        p.extract1DSpectra(grow=10, method="standard", width=None)

        os.chdir("inputs/")
        processed_ad = p.writeOutputs().pop()
        print('Wrote pre-processed file to:\n'
              '    {:s}'.format(processed_ad.filename))
        os.chdir("../")
コード例 #16
0
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed twilight flat data and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    from geminidr.gmos.tests.longslit import INPUTS_ROOT_PATH

    associated_calibrations = {
        "S20190204S0006.fits": {
            "bias": ["S20190203S0110.fits",
                     "S20190203S0109.fits",
                     "S20190203S0108.fits",
                     "S20190203S0107.fits",
                     "S20190203S0106.fits"],
            "twilight": ["S20190204S0006.fits"],
        },
        "N20190103S0462.fits": {
            "bias": ["N20190102S0531.fits",
                     "N20190102S0530.fits",
                     "N20190102S0529.fits",
                     "N20190102S0528.fits",
                     "N20190102S0527.fits"],
            "twilight": ["N20190103S0462.fits",
                         "N20190103S0463.fits"],
        },
        "N20190327S0056.fits": {
            "bias": ["N20190327S0098.fits",
                     "N20190327S0099.fits",
                     "N20190327S0100.fits",
                     "N20190327S0101.fits",
                     "N20190327S0102.fits"],
            "twilight": ["N20190327S0056.fits"],
        }
    }

    module_name, _ = os.path.splitext(os.path.basename(__file__))
    path = os.path.join(INPUTS_ROOT_PATH, module_name)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    print('Current working directory:\n    {:s}'.format(os.getcwd()))

    for filename, cals in associated_calibrations.items():

        print('Downloading files...')
        twilight_path = [download_from_archive(f) for f in cals['twilight']]
        bias_path = [download_from_archive(f) for f in cals['bias']]

        twilight_ad = astrodata.open(twilight_path[0])
        data_label = twilight_ad.data_label()

        print('Reducing BIAS for {:s}'.format(data_label))
        logutils.config(file_name='log_bias_{}.txt'.format(data_label))
        bias_reduce = Reduce()
        bias_reduce.files.extend(bias_path)
        bias_reduce.runr()
        bias_master = bias_reduce.output_filenames.pop()
        del bias_reduce

        print('Reducing twilight flat:')
        logutils.config(file_name='log_twilight_{}.txt'.format(data_label))

        with warnings.catch_warnings():
            warnings.simplefilter("ignore")

            p = primitives_gmos_longslit.GMOSLongslit(
                [astrodata.open(f) for f in twilight_path])

            p.prepare()
            p.addDQ(static_bpm=None)
            p.addVAR(read_noise=True)
            p.overscanCorrect()
            p.biasCorrect(bias=bias_master)
            p.ADUToElectrons()
            p.addVAR(poisson_noise=True)
            p.stackFrames()

            # Write non-mosaicked data
            twilight = p.writeOutputs(suffix="_twilight", strip=True)[0]

            # Write mosaicked data
            p = primitives_gmos_longslit.GMOSLongslit([twilight])
            p.mosaicDetectors()
            p.writeOutputs(suffix="_mosaickedTwilight", strip=True)
コード例 #17
0
def test_create_slit_illumination_with_mosaicked_data(ad, change_working_dir, request):
    """
    Test that can run `makeSlitIllum` in mosaicked data. This primitive
    creates a 2D image that is used to normalize the input data.

    After normalization, the intensity along a column should be more or less
    constant.

    There are several ways of doing this but, given the noise levels, we bin
    the data, fit a polynomium, and check that the fitted polynomium has its 1st
    and 2nd coefficients almost zero.
    """
    plot = request.config.getoption("--do-plots")
    np.random.seed(0)

    with change_working_dir():

        cwd = os.getcwd()
        print("Running tests inside folder:\n  {}".format(cwd))

        assert hasattr(ad[0], "wcs")

        p = primitives_gmos_longslit.GMOSLongslit([ad])
        p.makeSlitIllum(bins=25, border=10, debug_plot=plot)
        slit_illum_ad = p.writeOutputs(
            suffix="_mosaickedSlitIllum",  strip=True)[0]

        for ext, slit_ext in zip(ad, slit_illum_ad):
            assert ext.shape == slit_ext.shape

            # Create output data
            data_o = (np.ma.masked_array(ext.data, mask=ext.mask) /
                      np.ma.masked_array(slit_ext.data, mask=slit_ext.mask))

            # Bin columns
            fitter = fitting.LinearLSQFitter()
            model = models.Polynomial1D(degree=2)
            nbins = 50
            rows = np.arange(data_o.shape[0])

            for i in range(nbins):

                col_start = i * data_o.shape[1] // nbins
                col_end = (i + 1) * data_o.shape[1] // nbins

                cols = np.ma.mean(data_o[:, col_start:col_end], axis=1)

                fitted_model = fitter(model, rows, cols)

                # Check column is linear
                np.testing.assert_allclose(fitted_model.c2.value, 0, atol=0.01)

                # Check if slope is (almost) horizontal (< 1.0 deg)
                assert np.abs(
                    np.rad2deg(
                        np.arctan(
                            fitted_model.c1.value / (rows.size // 2)))) < 1.0

    if plot:
        os.makedirs(PLOT_PATH, exist_ok=True)
        print("Renaming plots to ",
              os.path.join(PLOT_PATH, ad.filename.replace(".fits", ".png")))
        os.rename(
            os.path.join(cwd, slit_illum_ad.filename.replace(".fits", ".png")),
            os.path.join(PLOT_PATH, ad.filename.replace(".fits", ".png")))
コード例 #18
0
def create_inputs_recipe(use_branch_name=False):
    """
    Creates input data for tests using pre-processed standard star and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """
    import os
    from astrodata.testing import download_from_archive
    from gempy.utils import logutils
    from geminidr.gmos.tests.spect import CREATED_INPUTS_PATH_FOR_TESTS
    from recipe_system.reduction.coreReduce import Reduce
    from recipe_system.utils.reduce_utils import normalize_ucals

    module_name, _ = os.path.splitext(os.path.basename(__file__))
    path = os.path.join(CREATED_INPUTS_PATH_FOR_TESTS, module_name)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)

    input_path = os.path.join(path, "inputs/")
    os.makedirs(input_path, exist_ok=True)

    for filename, cals in associated_calibrations.items():
        print(filename)
        print(cals)

        sci_path = download_from_archive(filename)
        cals = associated_calibrations[filename]
        bias_paths = [download_from_archive(f) for f in cals['bias']]
        flat_paths = [download_from_archive(f) for f in cals['flat']]
        arc_paths = [download_from_archive(f) for f in cals['arcs']]

        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        logutils.config(file_name='log_bias_{}.txt'.format(data_label))
        bias_reduce = Reduce()
        bias_reduce.files.extend(bias_paths)
        bias_reduce.runr()
        bias_master = bias_reduce.output_filenames.pop()
        calibration_files = ['processed_bias:{}'.format(bias_master)]
        del bias_reduce

        logutils.config(file_name='log_flat_{}.txt'.format(data_label))
        flat_reduce = Reduce()
        flat_reduce.files.extend(flat_paths)
        flat_reduce.ucals = normalize_ucals(calibration_files)
        flat_reduce.runr()
        flat_master = flat_reduce.output_filenames.pop()
        calibration_files.append('processed_flat:{}'.format(flat_master))
        del flat_reduce

        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        arc_reduce = Reduce()
        arc_reduce.files.extend(arc_paths)
        arc_reduce.ucals = normalize_ucals(calibration_files)

        os.chdir(input_path)
        arc_reduce.runr()
        _ = arc_reduce.output_filenames.pop()
        os.chdir(path)

        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = primitives_gmos_longslit.GMOSLongslit([sci_ad])
        p.prepare()
        p.addDQ(static_bpm=None)
        p.addVAR(read_noise=True)
        p.overscanCorrect()
        p.biasCorrect(bias=bias_master)
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True)
        p.flatCorrect(flat=flat_master)

        os.chdir(input_path)
        _ = p.writeOutputs().pop()
        os.chdir(path)
コード例 #19
0
def create_inputs_recipe():
    """
    Creates input data for tests using pre-processed standard star and its
    calibration files.

    The raw files will be downloaded and saved inside the path stored in the
    `$DRAGONS_TEST/raw_inputs` directory. Processed files will be stored inside
    a new folder called "dragons_test_inputs". The sub-directory structure
    should reflect the one returned by the `path_to_inputs` fixture.
    """

    input_data = {
        "N20180508S0021.fits": {
            "arc": "N20180615S0409.fits",
        },
        "S20190204S0079.fits": {
            "arc": "S20190206S0030.fits",
        },
        "S20181024S0035.fits": {
            "arc": "S20181025S0012.fits",
        },
    }

    module_name, _ = os.path.splitext(os.path.basename(__file__))
    path = os.path.join(CREATED_INPUTS_PATH_FOR_TESTS, module_name)
    os.makedirs(path, exist_ok=True)
    os.chdir(path)
    os.makedirs("inputs/", exist_ok=True)
    print('Current working directory:\n    {:s}'.format(os.getcwd()))

    for filename, pars in input_data.items():

        print('Downloading files...')
        sci_path = download_from_archive(filename)
        arc_path = download_from_archive(pars['arc'])

        sci_ad = astrodata.open(sci_path)
        data_label = sci_ad.data_label()

        print('Reducing ARC for {:s}'.format(data_label))
        logutils.config(file_name='log_arc_{}.txt'.format(data_label))
        arc_reduce = Reduce()
        arc_reduce.files.extend([arc_path])
        arc_reduce.runr()
        arc = arc_reduce.output_filenames.pop()

        print('Reducing pre-processed data:')
        logutils.config(file_name='log_{}.txt'.format(data_label))
        p = primitives_gmos_longslit.GMOSLongslit([sci_ad])
        p.prepare()
        p.addDQ()
        p.addVAR(read_noise=True)
        p.overscanCorrect()
        p.ADUToElectrons()
        p.addVAR(poisson_noise=True)
        p.mosaicDetectors()
        p.distortionCorrect(arc=arc)
        p.findSourceApertures(max_apertures=1)
        p.traceApertures(order=2,
                         nsum=20,
                         step=10,
                         max_shift=0.09,
                         max_missed=5)

        os.chdir("inputs/")
        processed_ad = p.writeOutputs().pop()
        os.chdir("../")
        print(f'Wrote pre-processed file to:\n    {processed_ad.filename}')
コード例 #20
0
def test_full_frame_distortion_works_on_smaller_region(fname, path_to_inputs):
    """
    Takes a full-frame arc and self-distortion-corrects it. It then fakes
    subregions of this and corrects those using the full-frame distortion to
    confirm that the result is the same as the appropriate region of the
    distortion-corrected full-frame image. There's no need to do this more
    than once for a given binning, so we loop within the function, keeping
    track of binnings we've already processed.
    """
    subpath, basename = os.path.split(fname)
    basename, extension = os.path.splitext(basename)
    basename = basename.split('_')[0] + extension

    raw_fname = testing.download_from_archive(basename, path=subpath)

    _ad = astrodata.open(os.path.join(path_to_inputs, subpath, raw_fname))

    NSUB = 4  # we're going to take combos of horizontal quadrants
    completed_binnings = []

    xbin, ybin = _ad.detector_x_bin(), _ad.detector_y_bin()

    if _ad.detector_roi_setting() != "Full Fame" or (xbin, ybin) in completed_binnings:
        return

    p = primitives_gmos_longslit.GMOSLongslit([_ad])
    p.viewer.viewer_name = None
    p.prepare()
    p.addDQ(static_bpm=None)
    p.overscanCorrect()
    p.ADUToElectrons()
    p.mosaicDetectors(outstream='mosaic')
    # Speed things up a bit with a larger step
    p.determineDistortion(stream='mosaic', step=48 // ybin)

    ad_out = p.distortionCorrect(
        [deepcopy(_ad)], arc=p.streams['mosaic'][0], order=1)[0]

    for start in range(NSUB):
        for end in range(start + 1, NSUB + 1):
            ad_copy = deepcopy(_ad)
            y1b = start * _ad[0].shape[0] // NSUB
            y2b = end * _ad[0].shape[0] // NSUB
            y1, y2 = y1b * ybin, y2b * ybin  # unbinned pixels

            # Fake the section header keywords and set the SCI and DQ
            # to the appropriate sub-region
            for ext in ad_copy:
                arrsec = ext.array_section()
                detsec = ext.detector_section()
                ext.hdr['CCDSEC'] = '[{}:{},{}:{}]'.format(arrsec.x1 + 1,
                                                           arrsec.x2, y1 + 1, y2)
                ext.hdr['DETSEC'] = '[{}:{},{}:{}]'.format(detsec.x1 + 1,
                                                           detsec.x2, y1 + 1, y2)
                ext.data = ext.data[y1b:y2b]
                ext.mask = ext.mask[y1b:y2b]
                ext.hdr['DATASEC'] = '[1:{},1:{}]'.format(ext.shape[1], y2b - y1b)
            ad2 = p.distortionCorrect([ad_copy], arc=p.streams['mosaic'][0],
                                      order=1)[0]

        # It's GMOS LS so the offset between this AD and the full-frame
        # will be the same as the DETSEC offset, but the width may be
        # smaller so we need to shuffle the smaller image within the
        # larger one to look for a match
        ny, nx = ad2[0].shape
        xsizediff = ad_out[0].shape[1] - nx

        for xoffset in range(xsizediff + 1):
            np.testing.assert_allclose(
                np.ma.masked_array(ad2[0].data, mask=ad2[0].mask),
                ad_out[0].data[y1b:y1b + ny, xoffset:xoffset + nx],
                atol=0.01,
                err_msg="Problem with {} {}:{}".format(ad.filename, start, end))

        completed_binnings.append((xbin, ybin))