def test_append_whole_instance(test_path):

    ad = astrodata.open(os.path.join(test_path, 'GMOS/N20160524S0119.fits'))
    ad2 = astrodata.open(os.path.join(test_path, 'GMOS/N20110826S0336.fits'))

    with pytest.raises(ValueError):
        ad2.append(ad)
def test_append_slice_to_extension(test_path):

    ad = astrodata.open(os.path.join(test_path, 'GMOS/N20160524S0119.fits'))
    ad2 = astrodata.open(os.path.join(test_path, 'GMOS/N20110826S0336.fits'))

    with pytest.raises(ValueError):
        ad2[0].append(ad[0], name="FOOBAR")
def test_append_non_single_slice(test_path):

    ad = astrodata.open(os.path.join(test_path, 'GMOS/N20160524S0119.fits'))
    ad2 = astrodata.open(os.path.join(test_path, 'GMOS/N20110826S0336.fits'))

    with pytest.raises(ValueError):
        ad2.append(ad[1:])
示例#4
0
 def test_clip_auxiliary_data(self):
     ad = astrodata.open(os.path.join(TESTDATAPATH, 'NIRI',
                                       'N20160620S0035.fits'))
     bpm_ad = astrodata.open('geminidr/niri/lookups/BPM/NIRI_bpm.fits')
     ret = gt.clip_auxiliary_data(ad, bpm_ad, 'bpm', np.int16)
     assert ret[0].data.shape == ad[0].data.shape
     assert np.all(ret[0].data == bpm_ad[0].data[256:768,256:768])
示例#5
0
def atd4():
    """
    Verify that a  mosaicAD class  method can create a tiled array from 
    extensions of a given name.

    The test creates a mosaic ndarray using the method mosaic_image_data 
    with the parameter 'tile=True'  which avoids the transformation step.

    """
    print('\n atd4 REQUIREMENT.......')
    print('Tile all IMAGE extensions matching a given extension name')
    gmos_file='../data/gS20120420S0033.fits'
    gsaoi_file='../data/guS20120413S0048.fits'
    ad = astrodata.open(gmos_file)
    mo = MosaicAD(ad, gemini_mosaic_function)
    #     Now use the mosaic_image_data method to create
    #     the mosaic tile array from the 'SCI' extname.
    tile_data = mo.mosaic_image_data(tile=True, extname='SCI') 

    # ----- Comparing input and output. GMOS image 

    #     The tester should feel free to verify any input and output
    #     pixel location.

    #     For example: A GMOS image:
    #     The lower left corner (2x2) pixels the first GMOS 
    #     data extension. For a GSAOI is the second extension

    corner_gmos = ad['SCI',1].data   # For a GMOS file    
    print('ad["SCI",1].data[:2,:2]\n',corner_gmos[:2,:2])

    #     From the output mosaic. We should get the same values.
    print('tile_data[:2,:2]\n',tile_data[:2,:2])

    # The top right corner of the mosaic
    nexts = ad.count_exts('SCI')
    block = ad['SCI', nexts].data    # There is one amp per block
    print('\nad["SCI",last].data[-2:-2]\n', block[-2:,-2:])

    # The mosaic top corner
    print('\ntile_data[-2:,-2:]\n',tile_data[-2:,-2:])

    # ----- GSAOI data
    ad = astrodata.open(gsaoi_file)
    mo = MosaicAD(ad, gemini_mosaic_function)
    tile_data = mo.mosaic_image_data(tile=True, extname='SCI') 

    print('\nGSAOI data')
    corner_gsaoi = ad['SCI',2].data   # For a GSAOI file    
    print('ad["SCI",2].data\n', corner_gsaoi[:2,:2])
    print('tile_data[:2,:2]\n', tile_data[:2,:2])

    #     The top right corner of the mosaic
    block4 = ad['SCI',4].data    # There is one amp per block
    print('\nblock4[-2:,-2:]\n', block4[-2:,-2:])
    print('tile_data[-2:,-2:]\n', tile_data[-2:,-2:])
    return
def test_append_slice_to_extension(testfile1, testfile2):
    ad = astrodata.open(testfile2)
    ad2 = astrodata.open(testfile1)

    with pytest.raises(TypeError):
        ad2[0].append(ad[0], name="FOOBAR")

    match = "Cannot append an AstroData slice to another slice"
    with pytest.raises(ValueError, match=match):
        ad[2].FOO = ad2[1]
示例#7
0
def test_read_invalid_file(tmpdir, caplog):
    testfile = str(tmpdir.join('test.fits'))
    with open(testfile, 'w'):
        # create empty file
        pass

    with pytest.raises(astrodata.AstroDataError):
        astrodata.open(testfile)

    assert caplog.records[0].message.endswith('is zero size')
def test_append_single_slice(testfile1, testfile2):
    ad = astrodata.open(testfile2)
    ad2 = astrodata.open(testfile1)

    lbefore = len(ad2)
    last_ever = ad2[-1].nddata.meta['header'].get('EXTVER', -1)
    ad2.append(ad[1])

    assert len(ad2) == (lbefore + 1)
    assert np.all(ad2[-1].data == ad[1].data)
    assert last_ever < ad2[-1].nddata.meta['header'].get('EXTVER', -1)
示例#9
0
 def test_clip_auxliary_data_GSAOI(self):
     ad = astrodata.open(os.path.join(TESTDATAPATH, 'GSAOI',
                                       'S20150528S0112.fits'))
     bpm_ad = astrodata.open('geminidr/gsaoi/lookups/BPM/gsaoibpm_high_full.fits')
     ret = gt.clip_auxiliary_data_GSAOI(ad, bpm_ad, 'bpm', np.int16)
     for rd, cd, bd in zip(ret.data, ad.data, bpm_ad.data):
         assert rd.shape == cd.shape
         # Note this only works for unprepared data because of the ROI
         # row problem
         assert np.all(rd == bd[512:1536,512:1536])
     pass
def test_append_single_slice(test_path):

    ad = astrodata.open(os.path.join(test_path, 'GMOS/N20160524S0119.fits'))
    ad2 = astrodata.open(os.path.join(test_path, 'GMOS/N20110826S0336.fits'))

    lbefore = len(ad2)
    last_ever = ad2[-1].nddata.meta['header'].get('EXTVER', -1)
    ad2.append(ad[1])

    assert len(ad2) == (lbefore + 1)
    assert np.all(ad2[-1].data == ad[1].data)
    assert last_ever < ad2[-1].nddata.meta['header'].get('EXTVER', -1)
def test_append_single_slice(test_path):

    ad = astrodata.open(os.path.join(test_path, 'GMOS/N20160524S0119.fits'))
    ad2 = astrodata.open(os.path.join(test_path, 'GMOS/N20110826S0336.fits'))

    lbefore = len(ad2)
    last_ever = ad2[-1].nddata.meta['header'].get('EXTVER', -1)
    ad2.append(ad[1])

    assert len(ad2) == (lbefore + 1)
    assert np.all(ad2[-1].data == ad[1].data)
    assert last_ever < ad2[-1].nddata.meta['header'].get('EXTVER', -1)
    def distortion_diagnosis_plots(self):
        """
        Makes the Diagnosis Plots for `determineDistortion` and
        `distortionCorrect` for each extension inside the reduced arc.
        """
        output_file = os.path.join(self.output_folder, self.name + ".fits")
        reference_file = os.path.join(self.ref_folder, self.name + ".fits")

        ad = astrodata.open(output_file)
        ad_ref = astrodata.open(reference_file)

        self.show_distortion_map(ad)
        self.show_distortion_model_difference(ad, ad_ref)
def test_append_single_slice(testfile1, testfile2):
    ad = astrodata.open(testfile2)
    ad2 = astrodata.open(testfile1)

    lbefore = len(ad2)
    ad2.append(ad[1])

    assert len(ad2) == (lbefore + 1)
    assert np.all(ad2[-1].data == ad[1].data)

    # With a custom header
    ad2.append(ad[1], header=fits.Header({'FOO': 'BAR'}))
    assert ad2[-1].nddata.meta['header']['FOO'] == 'BAR'
示例#14
0
    def process_object(filename, arc, suffix="linearized"):
        """
        Helper recipe to reduce the object file.

        Returns
        -------
        AstroData
            Processed arc.
        """
        from astrodata.testing import download_from_archive
        from geminidr.gmos.primitives_gmos_longslit import GMOSLongslit

        processed_filename, ext = os.path.splitext(filename)
        processed_filename += "_{:s}{:s}".format(suffix, ext)

        if os.path.exists(processed_filename):
            ad = astrodata.open(processed_filename)
        else:

            if os.path.exists(filename):
                ad = astrodata.open(filename)
            else:
                ad = astrodata.open(
                    download_from_archive(filename,
                                          path='',
                                          env_var='DRAGONS_TEST'))

            p = GMOSLongslit([ad])

            p.prepare()
            p.addDQ(static_bpm=None)
            p.addVAR(read_noise=True)
            p.overscanCorrect()
            # p.biasCorrect()
            p.ADUToElectrons()
            p.addVAR(poisson_noise=True)
            # p.flatCorrect()
            # p.applyQECorrection()
            p.distortionCorrect(arc=arc)
            p.findSourceApertures(max_apertures=1)
            p.skyCorrectFromSlit()
            p.traceApertures()
            p.extract1DSpectra()
            p.linearizeSpectra()  # TODO: needed?
            p.calculateSensitivity()

            ad = p.streams['main'][0]
            ad.write(overwrite=True)

        return ad
示例#15
0
    def _get_master_arc(ad, pre_process):

        cals = get_associated_calibrations(ad.filename.split('_')[0] + '.fits')

        arc_filename = cals[cals.caltype == 'arc'].filename.values[0]
        arc_filename = arc_filename.split('.fits')[0] + '_arc.fits'

        if pre_process:
            with output_path():
                master_arc = astrodata.open(arc_filename)
        else:
            master_arc = astrodata.open(
                os.path.join(new_path_to_inputs, arc_filename))

        return master_arc
示例#16
0
def processed_slit_illum(change_working_dir, path_to_inputs, request):
    """
    Returns the processed slit illumination function that will be analysed.

    Parameters
    ----------
    change_working_dir : pytest.fixture
        Fixture that changes the working directory (see :mod:`astrodata.testing`).
    path_to_inputs : pytest.fixture
        Fixture defined in :mod:`astrodata.testing` with the path to the
        pre-processed input file.
    request : pytest.fixture
        PyTest built-in fixture containing information about parent test.

    Returns
    -------
    AstroData
        Input spectrum processed up to right before the `applyQECorrection`.
    """
    twi_filename = request.param
    twi_path = download_from_archive(twi_filename)
    twi_ad = astrodata.open(twi_path)

    print(twi_ad.tags)

    master_bias = os.path.join(
        path_to_inputs, associated_calibrations[twi_filename])

    assert os.path.exists(master_bias)

    calibration_files = ['processed_bias:{}'.format(master_bias)]

    with change_working_dir():
        print("Reducing SLITILLUM in folder:\n  {}".format(os.getcwd()))
        logutils.config(
            file_name='log_flat_{}.txt'.format(twi_ad.data_label()))

        reduce = Reduce()
        reduce.files.extend([twi_path])
        reduce.mode = 'sq'
        reduce.recipename = 'makeProcessedSlitIllum'
        reduce.ucals = normalize_ucals(reduce.files, calibration_files)
        reduce.runr()

        _processed_twi_filename = reduce.output_filenames.pop()
        _processed_twi = astrodata.open(_processed_twi_filename)

    return _processed_twi
示例#17
0
 def recover(self):
     log.debug("OufileETIFile recover()")
     ad = astrodata.open(self.tmp_name)
     ad.filename = self.ad_name
     ad = gemini_tools.obsmode_del(ad)
     log.fullinfo(self.tmp_name + " was loaded into memory")
     return ad
示例#18
0
 def test_airmass_descriptor_is_none_or_float(self, test_path, filename):
     ad = astrodata.open(os.path.join(test_path, "Archive/", filename))
     try:
         assert ((type(ad.airmass()) == float)
                 or (ad.airmass() is None))
     except Exception as err:
         print("{} failed on call: {}".format(ad.airmass, str(err)))
示例#19
0
    def flushPixels(self, adinputs=None, force=False):
        """
        This primitive saves the inputs to disk and then reopens them so
        the pixel data are out of memory
        """
        def is_lazy(ad):
            """Determine whether an AD object is lazily-loaded"""
            for ndd in ad.nddata:
                for attr in ('_data', '_mask', '_uncertainty'):
                    item = getattr(ndd, attr)
                    if item is not None and not (hasattr(item, 'lazy') and item.lazy):
                        return False
            return True

        log = self.log

        for i, ad in enumerate(adinputs):
            if not force and is_lazy(ad):
                log.fullinfo("{} is lazily-loaded; not writing to "
                          "disk".format(ad.filename))
            else:
                # Write in current directory (hence ad.filename specified)
                log.fullinfo("Writing {} to disk and reopening".format(ad.filename))
                ad.write(ad.filename, overwrite=True)
                # We directly edit elements in the list to ensure the versions
                # in the primitivesClass stream are affected too. We also want
                # the files to retain their orig_filename attributes, which
                # would otherwise change upon loading.
                orig_filename = ad.orig_filename
                adinputs[i] = astrodata.open(ad.filename)
                adinputs[i].orig_filename = orig_filename
        return adinputs
def test_select_from_inputs_primitive(test_path):

    with open("recursion.log", 'w') as _log:

        for f in glob.glob(os.path.join(test_path, 'F2', '*.fits')):

            ad = astrodata.open(f)

            p = F2Image([ad])

            p.prepare()
            p.addVAR(read_noise=True)

            try:

                print('Running tests on file: {:s}'.format(f))
                p.selectFromInputs(tags="DARK", outstream="darks")
                p.showInputs(stream="darks")
                p.showInputs()

                print("{:15s} OK".format(f))
                _log.write("\n{:15s} OK".format(f))

            except RecursionError as re:

                print("{:15s} FAIL".format(f))
                _log.write("\n{:15s} FAIL".format(f))

            del ad
            del p
示例#21
0
    def test_tags(self, test_path):

        ad = astrodata.open(os.path.join(test_path, filename))
        tags = ad.tags
        expected = {'UNPREPARED', 'RAW', 'SPECT', 'GEMINI', 'GRACES'}

        assert expected.issubset(tags)
示例#22
0
def atd8():
    """
    From a given AstroData object, the system shall offer an option to
    prevent the creation of merged table associations.

    The test creates an output AstroData object using the method 
    as_astrodata with the     parameter 'return_associated_bintables'
    set to False which prevents the creation of     associated binary 
    tables to the reference image extension name.

    Resources:

    file: N20120121S0175_ccMeasured.fits. Contains SCI,VAR,DQ
         OBJCAT (Bintable) and REFFACT (Bintable)

    """
    print('\n atd8 REQUIREMENT.......')
    print ('Offer an option to prevent merged table associations')
    file = '../data/N20120121S0175_ccMeasured.fits'
    ad = astrodata.open(file)
    #    Creates a mosaicAD object using the input ad
    #    and the default mosaic function named 
    #    gemini_mosaic_function. 'SCI' is the default extname
    mo = MosaicAD(ad, gemini_mosaic_function)
    outad = mo.as_astrodata()
    print('.......OUTPUT AD with all extensions')
    print(outad.info())
    outad = mo.as_astrodata(return_associated_bintables=False)
    #    The tester should see that there is no BINTABLE 
    #    extension name associated with the reference image 
    #    extension name in this output.
    print('.......OUTPUT AD with no associated BINTABLE extensions')
    print(outad.info())
    return
示例#23
0
    def test_iterate_over_single_slice(self, test_path, filename):
        ad = astrodata.open(os.path.join(test_path, filename))

        metadata = ('SCI', 1)

        for ext in ad[0]:
            assert (ext.hdr['EXTNAME'], ext.hdr['EXTVER']) == metadata
示例#24
0
def atd1():
    """
    With a GMOS AstroData object, the test instantiates a MosaicAD object
    containing 'coords' as one of the attributes. The test verify that 
    coords['amp_mosaic_coord'] and ad['SCI'].detector_array.as_dict() values
    match.

    """
    print('\n atd1 REQUIREMENT.......')
    print ('Instantiate an object from a supported AstroData objec')
    gmos_file   = '../data/gS20120420S0033.fits'
    gsaoi_file  = '../data/guS20110324S0146.fits'
    nongem_file = '../data/kp620765.fits'

    #    Success Criterion 1. (GMOS data)
    #    The tester should provide her/his own GMOS file.
    for tfile in [gmos_file, gsaoi_file, nongem_file]:
        ad = astrodata.open(tfile)
        print('\n ...........CASE for:', ad.filename, ad.instrument())
        mo = MosaicAD(ad, gemini_mosaic_function)

        #    print DETECTOR values for all the 'SCI' extensions as
        #    a dictionary.
        print(ad.detector_section())
          
        #    print the 'amp_mosaic_coord' key value from the 'coords'
        #    attribute. This list is in increasing order of extver.
        print(mo.coords['amp_mosaic_coord'])
    return
示例#25
0
    def test_read_a_keyword_from_hdr_deprecated(self):
        ad = astrodata.open('N20110826S0336.fits')

        with pytest.raises(AttributeError):
            assert ad.hdr.CCDNAME == [
                'EEV 9273-16-03', 'EEV 9273-20-04', 'EEV 9273-20-03'
            ]
示例#26
0
 def test_airmass_descriptor_value_is_acceptable(self, test_path, filename):
     ad = astrodata.open(os.path.join(test_path, "Archive/", filename))
     try:
         assert ((ad.airmass() >= 1.0)
                 or (ad.airmass() is None))
     except Exception as err:
         print("{} failed on call: {}".format(ad.airmass, str(err)))
示例#27
0
    def test_can_make_and_write_ad_object(self, test_path):

        # Creates data and ad object
        phu = fits.PrimaryHDU()
        pixel_data = np.random.rand(100, 100)

        hdu = fits.ImageHDU()
        hdu.data = pixel_data

        ad = astrodata.create(phu)
        ad.append(hdu, name='SCI')

        # Write file and test it exists properly
        test_file_location = os.path.join(
            test_path, 'created_fits_file.fits')

        if os.path.exists(test_file_location):
            os.remove(test_file_location)
        ad.write(test_file_location)

        assert os.path.exists(test_file_location)
        # Opens file again and tests data is same as above

        adnew = astrodata.open(test_file_location)
        assert np.array_equal(adnew[0].data, pixel_data)
        os.remove(test_file_location)
示例#28
0
    def test_can_overwrite_existing_file(self, test_path, filename):

        ad = astrodata.open(os.path.join(test_path, filename))
        test_file_location = os.path.join(test_path,
                                          'test_fits_overwrite.fits')
        if os.path.exists(test_file_location):
            os.remove(test_file_location)
        ad.write(test_file_location)

        assert os.path.exists(test_file_location)

        adnew = astrodata.open(test_file_location)
        adnew.write(overwrite=True)

        # erasing file for cleanup
        os.remove(test_file_location)
示例#29
0
def ad(request, path_to_inputs):
    """
    Returns the pre-processed spectrum file.

    Parameters
    ----------
    path_to_inputs : pytest.fixture
        Fixture defined in :mod:`astrodata.testing` with the path to the
        pre-processed input file.
    request : pytest.fixture
        PyTest built-in fixture containing information about parent test.

    Returns
    -------
    AstroData
        Input spectrum processed up to right before the `distortionDetermine`
        primitive.
    """
    filename = request.param
    path = os.path.join(path_to_inputs, filename)

    if os.path.exists(path):
        ad = astrodata.open(path)
    else:
        raise FileNotFoundError(path)

    return ad
示例#30
0
def test_adjust_wcs(files, path_to_inputs):
    """
    We take preprocessed files with a single aperture and run them through
    adjustWCSToReference. After this, the celestial coordinates of the aperture
    should be similar in all images.

    In order to make this a real test, we edit the gWCS objects of the inputs
    so they're wrong (except for the first one, which is the reference)
    """
    adinputs = [astrodata.open(os.path.join(path_to_inputs, f)) for f in files]
    # Hack the WCS of all but the first input so they're wrong
    for ad in adinputs[1:]:
        ad[0].wcs.pipeline[0][1]['crpix2'].offset = 600
    p = primitives_gmos_longslit.GMOSLongslit(adinputs)
    p.adjustWCSToReference()
    # Return the (RA, dec) as a SkyCoord at the location of each aperture
    # Then confirm that the sky coordinates are all similar
    skycoords = [ad[0].wcs(0, ad[0].APERTURE['c0'][0], with_units=True)[1]
                 for ad in adinputs]
    c0 = skycoords[0]
    for c in skycoords[1:]:
        assert c0.separation(c).arcsecond < 0.05


# Todo: Implement recipe to create input files
示例#31
0
def arc_ad(path_to_inputs, request):
    """
    Returns the master arc used during the data-set data reduction.

    Parameters
    ----------
    path_to_inputs : pytest.fixture
        Fixture defined in :mod:`astrodata.testing` with the path to the
        pre-processed input file.
    request : pytest.fixture
        PyTest built-in fixture containing information about parent test.

    Returns
    -------
    AstroData
        Master arc.
    """
    filename = request.param
    path = os.path.join(path_to_inputs, filename)

    if os.path.exists(path):
        print(f"Reading input arc: {path}")
        arc_ad = astrodata.open(path)
    else:
        raise FileNotFoundError(path)

    return arc_ad
示例#32
0
def input_ad_list(path_to_inputs):
    """
    Reads the inputs data from disk as AstroData objects.

    Parameters
    ----------
    path_to_inputs : pytest.fixture
        Fixture defined in :mod:`astrodata.testing` with the path to the
        pre-processed input file.

    Returns
    -------
    list of AstroData objects.
    """
    _input_ad_list = []

    for input_fname in test_datasets:
        input_path = os.path.join(path_to_inputs, input_fname)

        if os.path.exists(input_path):
            ad = astrodata.open(input_path)
        else:
            raise FileNotFoundError(input_path)

        _input_ad_list.append(ad)

    return _input_ad_list
示例#33
0
def test_copy(GSAOI_DARK, capsys):
    ad = astrodata.open(GSAOI_DARK)
    ad.TABLE = Table([['a', 'b', 'c'], [1, 2, 3]])
    ad[0].MYTABLE = Table([['aa', 'bb', 'cc'], [1, 2, 3]])

    ad.info()
    captured = capsys.readouterr()

    ad2 = copy.deepcopy(ad)
    ad2.info()
    captured2 = capsys.readouterr()

    # Compare that objects have the same attributes etc. with their
    # .info representation
    assert captured.out == captured2.out

    ext = ad[0]
    ext.info()
    captured = capsys.readouterr()

    ext2 = copy.deepcopy(ext)
    ext2.info()
    captured2 = capsys.readouterr()

    # Same for extension, except that first line is different (no
    # filename in the copied ext)
    assert captured.out.splitlines()[1:] == captured2.out.splitlines()[1:]
def test_append_array_to_extension_with_name_sci(testfile2):
    ad = astrodata.open(testfile2)
    assert len(ad) == 6

    ones = np.ones((10, 10))
    with pytest.raises(ValueError):
        ad[0].append(ones, name='SCI')
def test_append_array_to_root_with_arbitrary_name(testfile2):
    ad = astrodata.open(testfile2)
    assert len(ad) == 6

    ones = np.ones((10, 10))
    with pytest.raises(ValueError):
        ad.append(ones, name='ARBITRARY')
示例#36
0
 def test_prepare(self):
     ad = astrodata.open(os.path.join(TESTDATAPATH, 'NIRI',
                             'N20070819S0104.fits'))
     p = NIRIImage([ad])
     ad = p.prepare()[0]
     assert ad_compare(ad, os.path.join(TESTDATAPATH, 'NIRI',
                             'N20070819S0104_prepared.fits'))
示例#37
0
 def test_addVAR(self):
     ad = astrodata.open(os.path.join(TESTDATAPATH, 'NIRI',
                             'N20070819S0104_ADUToElectrons.fits'))
     p = NIRIImage([ad])
     ad = p.addVAR(read_noise=True, poisson_noise=True)[0]
     assert ad_compare(ad, os.path.join(TESTDATAPATH, 'NIRI',
                             'N20070819S0104_varAdded.fits'))
def test_append_table_to_extension(testfile2):
    ad = astrodata.open(testfile2)
    assert len(ad) == 6

    table = Table(([1, 2, 3], [4, 5, 6], [7, 8, 9]), names=('a', 'b', 'c'))
    ad[0].append(table, 'MYTABLE')
    assert (ad[0].MYTABLE == table).all()
def test_select_from_inputs_primitive(path_to_inputs):

    with open("recursion.log", 'w') as _log:

        for f in glob.glob(os.path.join(path_to_inputs, 'F2', '*.fits')):

            ad = astrodata.open(f)

            p = F2Image([ad])

            p.prepare()
            p.addVAR(read_noise=True)

            try:

                print('Running tests on file: {:s}'.format(f))
                p.selectFromInputs(tags="DARK", outstream="darks")
                p.showInputs(stream="darks")
                p.showInputs()

                print("{:15s} OK".format(f))
                _log.write("\n{:15s} OK".format(f))

            except RecursionError as re:

                print("{:15s} FAIL".format(f))
                _log.write("\n{:15s} FAIL".format(f))

            del ad
            del p
示例#40
0
    def test_slitarc_avgepoch(self, do_slit_obj, values=AVGEPOCH_VALUES):
        """
        Confirm that the average exposure epoch is correct.

        The calculation to do this is somewhat complex, so there's no easy
        way to compute in a different fashion to check it. Best bet is to use
        a pre-canned value for each type and resolution (i.e. regression test).
        """
        rawfiles, corrfiles, calibs = do_slit_obj

        # Get the res and slit_type from the filename
        for rawfile, corrfile in zip(rawfiles, corrfiles):
            # Get the res and slit_type from the corrfile name
            res, slit_type = '.'.join(corrfile.split(os.path.sep)[-1].split(
                '.')[:-1]).split('_')[-3:-1]
            corrslit = astrodata.open(corrfile)

            assert corrslit.phu.get(
                'AVGEPOCH') == values[slit_type][res], "AVGEPOCH in {} " \
                                                       "appears to be wrong " \
                                                       "(expected {}, got " \
                                                       "{})".format(
                rawfile,
                values[slit_type][res],
                corrslit.phu.get(
                    'AVGEPOCH')
            )
示例#41
0
def test_attributes(GSAOI_DARK):
    ad = astrodata.open(GSAOI_DARK)
    assert ad.shape == [(2048, 2048)] * 4
    assert [arr.shape for arr in ad.data] == [(2048, 2048)] * 4
    assert [arr.dtype for arr in ad.data] == ['f'] * 4
    assert ad.uncertainty == [None] * 4
    assert ad.variance == [None] * 4
    assert ad.mask == [None] * 4

    ad[0].variance = np.ones(ad[0].shape)
    assert isinstance(ad[0].uncertainty, ADVarianceUncertainty)
    assert_array_equal(ad[0].uncertainty.array, 1)
    assert_array_equal(ad[0].variance, 1)
    assert_array_equal(ad.variance[0], 1)

    assert all(isinstance(nd, NDAstroData) for nd in ad.nddata)
    assert [nd.shape for nd in ad.nddata] == [(2048, 2048)] * 4

    match = "Trying to assign to an AstroData object that is not a single slice"
    with pytest.raises(ValueError, match=match):
        ad.data = 1
    with pytest.raises(ValueError, match=match):
        ad.variance = 1
    with pytest.raises(ValueError, match=match):
        ad.uncertainty = 1
    with pytest.raises(ValueError, match=match):
        ad.mask = 1
示例#42
0
def test_from_hdulist2():
    tablehdu = fits.table_to_hdu(Table([[1]]))
    tablehdu.name = 'REFCAT'

    hdul = fits.HDUList([
        fits.PrimaryHDU(header=fits.Header({'INSTRUME': 'FISH'})),
        fits.ImageHDU(data=np.zeros(10), name='SCI', ver=1),
        fits.ImageHDU(data=np.ones(10), name='VAR', ver=1),
        fits.ImageHDU(data=np.zeros(10, dtype='uint16'), name='DQ', ver=1),
        tablehdu,
        fits.BinTableHDU.from_columns(
            [fits.Column(array=['a', 'b'], format='A', name='col')],
            ver=1,
        ),  # This HDU will be skipped because it has no EXTNAME
    ])

    with pytest.warns(UserWarning,
                      match='Skip HDU .* because it has no EXTNAME'):
        ad = astrodata.open(hdul)

    assert len(ad) == 1
    assert ad.phu['INSTRUME'] == 'FISH'
    assert_array_equal(ad[0].data, 0)
    assert_array_equal(ad[0].variance, 1)
    assert_array_equal(ad[0].mask, 0)
    assert len(ad.REFCAT) == 1
    assert ad.exposed == {'REFCAT'}
    assert ad[0].exposed == {'REFCAT'}
示例#43
0
def test_can_append_table_and_access_data(capsys, tmpdir):
    tbl = Table([np.zeros(10), np.ones(10)], names=['col1', 'col2'])
    phu = fits.PrimaryHDU()
    ad = astrodata.create(phu)

    with pytest.raises(ValueError,
                       match='Tables should be set directly as attribute'):
        ad.append(tbl, name='BOB')

    ad.BOB = tbl
    assert ad.exposed == {'BOB'}

    assert ad.tables == {'BOB'}
    assert np.all(ad.table()['BOB'] == tbl)

    ad.info()
    captured = capsys.readouterr()
    assert '.BOB           Table       (10, 2)' in captured.out

    # Write file and test it exists properly
    testfile = str(tmpdir.join('created_fits_file.fits'))
    ad.write(testfile)
    adnew = astrodata.open(testfile)
    assert adnew.exposed == {'BOB'}
    assert len(adnew.BOB) == 10

    del ad.BOB
    assert ad.tables == set()
    with pytest.raises(AttributeError):
        del ad.BOB
示例#44
0
    def flushPixels(self, adinputs=None, force=False):
        """
        This primitive saves the inputs to disk and then reopens them so
        the pixel data are out of memory
        """
        def is_lazy(ad):
            """Determine whether an AD object is lazily-loaded"""
            for ndd in ad.nddata:
                for attr in ('_data', '_mask', '_uncertainty'):
                    item = getattr(ndd, attr)
                    if item is not None and not (hasattr(item, 'lazy')
                                                 and item.lazy):
                        return False
            return True

        log = self.log

        for i, ad in enumerate(adinputs):
            if not force and is_lazy(ad):
                log.fullinfo("{} is lazily-loaded; not writing to "
                             "disk".format(ad.filename))
            else:
                # Write in current directory (hence ad.filename specified)
                log.fullinfo("Writing {} to disk and reopening".format(
                    ad.filename))
                ad.write(ad.filename, overwrite=True)
                # We directly edit elements in the list to ensure the versions
                # in the primitivesClass stream are affected too. We also want
                # the files to retain their orig_filename attributes, which
                # would otherwise change upon loading.
                orig_filename = ad.orig_filename
                adinputs[i] = astrodata.open(ad.filename)
                adinputs[i].orig_filename = orig_filename
        return adinputs
示例#45
0
def test_from_hdulist(NIFS_DARK):
    with fits.open(NIFS_DARK) as hdul:
        assert 'ORIGNAME' not in hdul[0].header
        ad = astrodata.open(hdul)
        assert ad.path is None
        assert ad.instrument() == 'NIFS'
        assert ad.object() == 'Dark'
        assert ad.telescope() == 'Gemini-North'
        assert len(ad) == 1
        assert ad[0].shape == (2048, 2048)

    with fits.open(NIFS_DARK) as hdul:
        # Make sure that when ORIGNAME is set, astrodata use it
        hdul[0].header['ORIGNAME'] = 'N20160727S0077.fits'
        ad = astrodata.open(hdul)
        assert ad.path == 'N20160727S0077.fits'
def test_delete_named_associated_extension(testfile2):
    ad = astrodata.open(testfile2)
    table = Table(([1, 2, 3], [4, 5, 6], [7, 8, 9]), names=('a', 'b', 'c'))
    ad[0].append(table, 'MYTABLE')
    assert 'MYTABLE' in ad[0]
    del ad[0].MYTABLE
    assert 'MYTABLE' not in ad[0]
示例#47
0
    def _convert_inputs(self, inputs):
        """
        Convert files into AstroData objects.

        Parameters
        ----------
        inputs: <list>, list of FITS file names

        Return
        ------
        allinputs: <list>, list of AstroData objects

        """
        allinputs = []
        for inp in inputs:
            try:
                ad = astrodata.open(inp)
            except AstroDataError as err:
                log.warning("Can't Load Dataset: %s" % inp)
                log.warning(err)
                continue
            except OSError as err:
                log.warning("Can't Load Dataset: %s" % inp)
                log.warning(err)
                continue

            if not len(ad):
                log.warning("%s contains no extensions." % ad.filename)
                continue

            allinputs.append(ad)

        return allinputs
示例#48
0
 def test_nonlinearityCorrect(self):
     ad = astrodata.open(os.path.join(TESTDATAPATH, 'NIRI',
                             'N20070819S0104_varAdded.fits'))
     p = NIRIImage([ad])
     ad = p.nonlinearityCorrect()[0]
     assert ad_compare(ad, os.path.join(TESTDATAPATH, 'NIRI',
                             'N20070819S0104_nonlinearityCorrected.fits'))
示例#49
0
 def test_array_information(self):
     ad = astrodata.open(os.path.join(TESTDATAPATH, 'GMOS',
                                         'N20110524S0358_varAdded.fits'))
     ret = gt.array_information(ad)
     assert ret == {'amps_per_array': {1: 1, 2: 1, 3: 1},
                   'amps_order': [0, 1, 2], 'array_number': [1, 2, 3],
                   'reference_extension': 2}
示例#50
0
def test_crop(GSAOI_DARK):
    ad = astrodata.open(GSAOI_DARK)
    assert set(ad.shape) == {(2048, 2048)}

    ad.crop(0, 0, 5, 10)
    assert len(ad.nddata) == 4
    assert set(ad.shape) == {(11, 6)}
示例#51
0
    def _get_master_arc(ad, pre_process):

        cals = testing.get_associated_calibrations(
            ad.filename.split('_')[0] + '.fits')

        arc_filename = cals[cals['caltype'] == 'arc']['filename'][0]
        arc_filename = arc_filename.split('.fits')[0] + '_arc.fits'

        if pre_process:
            with change_working_dir():
                master_arc = astrodata.open(arc_filename)
        else:
            master_arc = astrodata.open(
                os.path.join(path_to_inputs, arc_filename))

        return master_arc
示例#52
0
def niriprim2():
    file_path = download_from_archive("N20190120S0287.fits")
    ad = astrodata.open(file_path)
    ad.append(ad[0])
    p = NIRIImage([ad])
    p.addDQ()
    return p
def test_delete_named_associated_extension(test_path):
    ad = astrodata.open(os.path.join(test_path, 'GMOS/N20160524S0119.fits'))
    table = Table(([1, 2, 3], [4, 5, 6], [7, 8, 9]),
                  names=('a', 'b', 'c'))
    ad[0].append(table, 'MYTABLE')
    assert 'MYTABLE' in ad[0]
    del ad[0].MYTABLE
    assert 'MYTABLE' not in ad[0]
示例#54
0
    def test_do_arith_and_retain_features(self, test_path, filename):
        ad = astrodata.open(os.path.join(test_path, filename))

        ad[0].NEW_FEATURE = np.array([1, 2, 3, 4, 5])
        ad2 = ad * 5

        np.testing.assert_array_almost_equal(ad[0].NEW_FEATURE,
                                             ad2[0].NEW_FEATURE)
def test_append_var_to_root(test_path):

    test_filename = 'GMOS/N20160524S0119.fits'
    ad = astrodata.open(os.path.join(test_path, test_filename))

    var = np.random.random(ad[0].data.shape)
    with pytest.raises(ValueError):
        ad.append(var, 'VAR')
def test_append_var_to_ext(test_path):

    test_filename = 'GMOS/N20160524S0119.fits'
    ad = astrodata.open(os.path.join(test_path, test_filename))

    var = np.random.random(ad[0].data.shape)
    ad[0].append(var, 'VAR')
    assert np.abs(var - ad[0].variance).mean() < 0.00000001
def test_for_length(test_path):

    test_filename = 'GMOS/N20110826S0336.fits'
    ad = astrodata.open(os.path.join(test_path, test_filename))

    # This should force the data to be loaded
    # Otherwise, we'll get different results - or an exception
    assert len(ad) == len(ad.nddata)
示例#58
0
    def test_tags(self, test_path):

        ad = astrodata.open(os.path.join(test_path, filename))
        tags = ad.tags
        expected = {'RAW', 'GEMINI', 'NORTH', 'SIDEREAL', 'UNPREPARED',
                 'IMAGE', 'NIRI'}

        assert expected.issubset(tags)
def test_append_dq_to_ext(test_path):

    test_filename = 'GMOS/N20160524S0119.fits'
    ad = astrodata.open(os.path.join(test_path, test_filename))

    dq = np.zeros(ad[0].data.shape)
    ad[0].append(dq, 'DQ')
    assert dq is ad[0].mask
示例#60
0
    def test_tags(self, test_path):

        ad = astrodata.open(os.path.join(test_path, filename))
        tags = ad.tags
        expected = {'DARK', 'RAW', 'AT_ZENITH', 'NORTH', 'AZEL_TARGET',
                 'CAL', 'UNPREPARED', 'NIFS', 'GEMINI', 'NON_SIDEREAL'}

        assert expected.issubset(tags)