コード例 #1
0
    def test_lcurve(self):
        """Test light curve production."""
        from astropy.io.fits import Header
        command = ('{0} -e {1} {2} --safe-interval '
                   '{3} {4}  --nproc 2 -b 0.5 -o {5}').format(
                       os.path.join(
                           self.datadir, 'monol_testA_nustar_fpma_ev_calib' +
                           HEN_FILE_EXTENSION), 3, 50, 100, 300,
                       os.path.join(
                           self.datadir,
                           'monol_testA_E3-50_lc' + HEN_FILE_EXTENSION))
        hen.lcurve.main(command.split())

        new_filename = \
            os.path.join(os.path.join(self.datadir,
                                      'monol_testA_E3-50_lc' +
                                      HEN_FILE_EXTENSION))
        assert os.path.exists(new_filename)
        lc = hen.io.load_lcurve(new_filename)
        assert hasattr(lc, 'header')
        # Test that the header is correctly conserved
        Header.fromstring(lc.header)
        assert hasattr(lc, 'gti')
        gti_to_test = hen.io.load_events(self.first_event_file).gti
        assert np.allclose(gti_to_test, lc.gti)
コード例 #2
0
ファイル: test_lc.py プロジェクト: swapsha96/HENDRICS
 def test_treat_event_file_nustar(self):
     from astropy.io.fits import Header
     treat_event_file(self.fits_fileA)
     lcurve_from_events(self.new_filename)
     newfile = \
         os.path.join(self.datadir,
                      'monol_testA_nustar_fpma_lc' + HEN_FILE_EXTENSION)
     assert os.path.exists(newfile)
     type, data = get_file_type(newfile)
     assert type == 'lc'
     assert isinstance(data, Lightcurve)
     Header.fromstring(data.header)
     assert hasattr(data, 'mjdref')
     assert data.mjdref > 0
コード例 #3
0
ファイル: match_exposure.py プロジェクト: ivmfnal/striped
def read_bliss_file(path):
    fits = fitsio.FITS(path)

    # The header for the image that this catalog was derived from is
    # stored in the first extension of the FITS file. However, it's format
    # is weird, so we parse it.

    # First we build a string
    hdrstr = '\n'.join(fits['LDAC_IMHEAD'].read()[0][0])
    # Then we use astropy to parse that string into a dict
    hdr = Header.fromstring(hdrstr, sep='\n')

    # Now we read the catalog
    observations = fits['LDAC_OBJECTS'].read()

    # The image header gives us access to image-level quantities, like
    # EXPNUM, CCDNUM, MJD-OBS, etc. Careful, these quantities may have a different byte order than the catalog data.
    EXPNUM = np.tile(hdr['EXPNUM'], len(observations))
    CCDNUM = np.tile(hdr['CCDNUM'], len(observations))

    # We can then append those quantities to the object array
    observations = rec_append_fields(observations,
                                     names=['EXPNUM', 'CCDNUM'],
                                     data=[EXPNUM, CCDNUM])
    return observations
コード例 #4
0
def get_lightcurve_dataset_from_stingray_Lightcurve(lcurve,
                                                    header=None,
                                                    header_comments=None,
                                                    hduname='RATE',
                                                    column=CONFIG.TIME_COLUMN):
    from astropy.io.fits import Header

    dataset = get_hdu_type_dataset("LIGHTCURVE", [column, hduname], hduname)

    hdu_table = dataset.tables[hduname]
    if header is None:
        if not hasattr(lcurve, 'header'):
            logging.warn("Light curve has no header")
            lcurve.header = Header()

        header = Header.fromstring(lcurve.header)
        header = dict()
        for header_column in header:
            header[header_column] = str(header[header_column])
            header_comments[header_column] = \
                str(header.comments[header_column])
    hdu_table.set_header_info(header, header_comments)
    hdu_table.columns[column].add_values(lcurve.time)
    hdu_table.columns[hduname].add_values(lcurve.counts, lcurve.counts_err)

    dataset.tables["GTI"] = \
        DsHelper.get_gti_table_from_stingray_gti(lcurve.gti)

    return dataset
コード例 #5
0
ファイル: dataset.py プロジェクト: StingraySoftware/dave
def get_lightcurve_dataset_from_stingray_Lightcurve(lcurve, header=None,
                                                    header_comments=None,
                                                    hduname='RATE',
                                                    column=CONFIG.TIME_COLUMN):
    from astropy.io.fits import Header

    dataset = get_hdu_type_dataset("LIGHTCURVE", [column, hduname], hduname)

    hdu_table = dataset.tables[hduname]
    if header is None:
        if not hasattr(lcurve, 'header'):
            logging.warn("Light curve has no header")
            lcurve.header = Header()

        header = Header.fromstring(lcurve.header)
        header = dict()
        for header_column in header:
            header[header_column] = str(header[header_column])
            header_comments[header_column] = \
                str(header.comments[header_column])
    hdu_table.set_header_info(header, header_comments)
    hdu_table.columns[column].add_values(lcurve.time)
    hdu_table.columns[hduname].add_values(lcurve.counts,
                                                lcurve.counts_err)

    dataset.tables["GTI"] = \
        DsHelper.get_gti_table_from_stingray_gti(lcurve.gti)

    return dataset
コード例 #6
0
ファイル: downskim.py プロジェクト: kadrlica/desqr
def create_header(fits):
    from astropy.io.fits import Header

    if isstring(fits):
        fits = fitsio.FITS(fits)

    hdrstr = '\n'.join(fits['LDAC_IMHEAD'].read()[0][0])
    return Header.fromstring(hdrstr,sep='\n')
コード例 #7
0
ファイル: dexp.py プロジェクト: kadrlica/decam_archive
 def create_header(cls, catalog):
     if isinstance(catalog,np.ndarray):
         data = catalog
     else:
         fits = cls.parse_catalog(catalog)
         data = fits[cls._hdrhdu].read()[0][0]
     data = data[~np.char.startswith(data,'        =')]
     s = '\n'.join(data)
     return Header.fromstring(s,sep='\n')
コード例 #8
0
    def from_header_string(cls, string):
        """
        This function ...
        :param string:
        :return:
        """

        header = Header.fromstring(string)
        return cls.from_header(header)
コード例 #9
0
def test_load_data():

    expected_datacube = np.arange(120).reshape(4,5,6)

    expected_header = Header.fromstring("SIMPLE  =                    T / conforms to FITS standard                      BITPIX  =                   64 / array data type                                NAXIS   =                    3 / number of array dimensions                     NAXIS1  =                    6                                                  NAXIS2  =                    5                                                  NAXIS3  =                    4                                                  CTYPE1  = 'VELO-LSR'                                                            CTYPE2  = 'GLON-CAR'                                                            CTYPE3  = 'GLAT-CAR'                                                            CRVAL1  = ''                                                                    CRVAL2  = ''                                                                    CRVAL3  = ''                                                                    CDELT1  = ''                                                                    CDELT2  = ''                                                                    CDELT3  = ''                                                                    CRPIX1  = ''                                                                    CRPIX2  = ''                                                                    CRPIX3  = ''                                                                    END                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             ")

    datacube, header = load_data("test_data.fits", data_path="production/test/")

    assert_allclose(datacube, expected_datacube)
    assert_equal(header, expected_header)
コード例 #10
0
    def test_calibrate(self):
        """Test event file calibration."""
        from astropy.io.fits import Header
        command = '{0} -r {1}'.format(
            os.path.join(self.datadir,
                         'monol_testA_nustar_fpma_ev' + HEN_FILE_EXTENSION),
            os.path.join(self.datadir, 'test.rmf'))
        hen.calibrate.main(command.split())
        new_filename = os.path.join(
            self.datadir,
            'monol_testA_nustar_fpma_ev_calib' + HEN_FILE_EXTENSION)
        assert os.path.exists(new_filename)
        ev = hen.io.load_events(new_filename)
        assert hasattr(ev, 'header')

        Header.fromstring(ev.header)
        assert hasattr(ev, 'gti')
        gti_to_test = hen.io.load_events(self.first_event_file).gti
        assert np.allclose(gti_to_test, ev.gti)
コード例 #11
0
def test_load_data():

    expected_datacube = np.arange(120).reshape(4, 5, 6)

    expected_header = Header.fromstring(
        "SIMPLE  =                    T / conforms to FITS standard                      BITPIX  =                   64 / array data type                                NAXIS   =                    3 / number of array dimensions                     NAXIS1  =                    6                                                  NAXIS2  =                    5                                                  NAXIS3  =                    4                                                  CTYPE1  = 'VELO-LSR'                                                            CTYPE2  = 'GLON-CAR'                                                            CTYPE3  = 'GLAT-CAR'                                                            CRVAL1  = ''                                                                    CRVAL2  = ''                                                                    CRVAL3  = ''                                                                    CDELT1  = ''                                                                    CDELT2  = ''                                                                    CDELT3  = ''                                                                    CRPIX1  = ''                                                                    CRPIX2  = ''                                                                    CRPIX3  = ''                                                                    END                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             "
    )

    datacube, header = load_data("test_data.fits",
                                 data_path="production/test/")

    assert_allclose(datacube, expected_datacube)
    assert_equal(header, expected_header)
コード例 #12
0
def test_permute_data_to_standard_order():

    expected_datacube = np.arange(120).reshape(4,5,6).transpose(2,0,1)

    expected_header = Header.fromstring("SIMPLE  =                    T / conforms to FITS standard                      BITPIX  =                   64 / array data type                                NAXIS   =                    3 / number of array dimensions                     NAXIS1  =                    5                                                  NAXIS2  =                    4                                                  NAXIS3  =                    6                                                  CTYPE1  = 'GLON-CAR'                                                            CTYPE2  = 'GLAT-CAR'                                                            CTYPE3  = 'VELO-LSR'                                                            CRVAL1  = ''                                                                    CRVAL2  = ''                                                                    CRVAL3  = ''                                                                    CDELT1  = ''                                                                    CDELT2  = ''                                                                    CDELT3  = ''                                                                    CRPIX1  = ''                                                                    CRPIX2  = ''                                                                    CRPIX3  = ''                                                                    END                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             ")
    
    # if something's wrong with load_data then it should fail the previous test
    datacube, header = permute_data_to_standard_order(*load_data("test_data.fits", data_path="production/test/"))

    assert_allclose(datacube, expected_datacube)
    assert_equal(header, expected_header)
    assert_equal(expected_header['ctype1'], 'GLON-CAR')
    assert_equal(header['ctype1'], 'GLON-CAR')
コード例 #13
0
def test_reload_dendrogram_catalog_output():
    # 1. construct a silly dendrogram, catalog, header, and metadata
    data = np.zeros((3, 3, 3))
    data[1, 1, 1] = 1
    d = Dendrogram.compute(data, min_value=0)

    catalog = Table()
    catalog['test_column'] = np.zeros(10)

    header = Header.fromstring(
        "SIMPLE  =                    T / conforms to FITS standard                      BITPIX  =                   64 / array data type                                NAXIS   =                    3 / number of array dimensions                     NAXIS1  =                    6                                                  NAXIS2  =                    5                                                  NAXIS3  =                    4                                                  CTYPE1  = 'VELO-LSR'                                                            CTYPE2  = 'GLON-CAR'                                                            CTYPE3  = 'GLAT-CAR'                                                            CRVAL1  = ''                                                                    CRVAL2  = ''                                                                    CRVAL3  = ''                                                                    CDELT1  = ''                                                                    CDELT2  = ''                                                                    CDELT3  = ''                                                                    CRPIX1  = ''                                                                    CRPIX2  = ''                                                                    CRPIX3  = ''                                                                    END                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             "
    )

    metadata = {}
    metadata['data_unit'] = u.K

    # 2. save it all "manually"
    filename_base = filepath + "not_real_data_reloadtest.fits_1.000_2.000_3"

    d.save_to(filename_base + "_d.hdf5")
    catalog.write(filename_base + "_catalog.fits", overwrite=True)
    header.tofile(filename_base + "_header.fits", clobber=True)
    pickle.dump(metadata, open(filename_base + "_metadata.p", 'wb'))

    # 3. reconstitute it using the magic function
    kwargs = {
        'data_filename': 'not_real_data_reloadtest.fits',
        'min_value': 1,
        'min_delta': 2,
        'min_npix': 3,
        'savepath': filepath
    }

    d2, catalog2, header2, metadata2 = reload_dendrogram_catalog_output(
        **kwargs)

    #4 and assert they equal the mock things.
    assert_equal(d2.index_map, d.index_map)
    assert_array_equal(catalog2, catalog)
    assert_equal(header2, header)
    assert_equal(metadata2, metadata)

    # 4.5. stress test.
    for i in range(5):
        reload_dendrogram_catalog_output(**kwargs)

    #5 then delete the saved objects.
    os.remove(filename_base + "_d.hdf5")
    os.remove(filename_base + "_catalog.fits")
    os.remove(filename_base + "_header.fits")
    os.remove(filename_base + "_metadata.p")
コード例 #14
0
def get_eventlist_dataset_from_stingray_Eventlist(evlist,
                                                  header=None,
                                                  header_comments=None,
                                                  hduname='EVENTS',
                                                  column=CONFIG.TIME_COLUMN):
    from astropy.io.fits import Header

    evt_columns = [column, "PI"]
    if hasattr(evlist, 'energy'):
        evt_columns = [column, "PI", "E"]

    dataset = get_hdu_type_dataset("EVENTS", evt_columns, hduname)

    hdu_table = dataset.tables[hduname]
    if header is None:
        if not hasattr(evlist, 'header'):
            logging.warn("Event list has no header")
            evlist.header = Header()

        header = Header.fromstring(evlist.header)
        header = dict()
        for header_column in header:
            header[header_column] = str(header[header_column])
            header_comments[header_column] = \
                str(header.comments[header_column])

    hdu_table.set_header_info(header, header_comments)
    hdu_table.columns[column].add_values(evlist.time)

    if hasattr(evlist, 'energy'):
        if evlist.energy is not None and len(evlist.energy) == len(
                evlist.time):
            hdu_table.columns['E'].add_values(evlist.energy)
        else:
            logging.warn(
                "Event list energies differs from event counts, setted all energies as 0"
            )
            hdu_table.columns['E'].add_values(np.zeros_like(evlist.time))

    if hasattr(evlist, 'pi') and evlist.pi is not None and len(
            evlist.pi) == len(evlist.time):
        hdu_table.columns['PI'].add_values(evlist.pi)
    else:
        logging.warn("Event list has no PI values, using np.zeros_like")
        hdu_table.columns['PI'].add_values(np.zeros_like(evlist.time))

    dataset.tables["GTI"] = \
        DsHelper.get_gti_table_from_stingray_gti(evlist.gti)

    return dataset
コード例 #15
0
def test_permute_data_to_standard_order():

    expected_datacube = np.arange(120).reshape(4, 5, 6).transpose(2, 0, 1)

    expected_header = Header.fromstring(
        "SIMPLE  =                    T / conforms to FITS standard                      BITPIX  =                   64 / array data type                                NAXIS   =                    3 / number of array dimensions                     NAXIS1  =                    5                                                  NAXIS2  =                    4                                                  NAXIS3  =                    6                                                  CTYPE1  = 'GLON-CAR'                                                            CTYPE2  = 'GLAT-CAR'                                                            CTYPE3  = 'VELO-LSR'                                                            CRVAL1  = ''                                                                    CRVAL2  = ''                                                                    CRVAL3  = ''                                                                    CDELT1  = ''                                                                    CDELT2  = ''                                                                    CDELT3  = ''                                                                    CRPIX1  = ''                                                                    CRPIX2  = ''                                                                    CRPIX3  = ''                                                                    END                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             "
    )

    # if something's wrong with load_data then it should fail the previous test
    datacube, header = permute_data_to_standard_order(
        *load_data("test_data.fits", data_path="production/test/"))

    assert_allclose(datacube, expected_datacube)
    assert_equal(header, expected_header)
    assert_equal(expected_header['ctype1'], 'GLON-CAR')
    assert_equal(header['ctype1'], 'GLON-CAR')
コード例 #16
0
ファイル: test_coadd.py プロジェクト: pllim/reproject
def test_coadd_solar_map():

    # This is a test that exercises a lot of different parts of the mosaicking
    # code. The idea is to take three solar images from different viewpoints
    # and combine them into a single one. This uses weight maps that are not
    # uniform and also include NaN values.

    # The reference image was generated for sunpy 3.0.1 - it will not work with
    # previous versions due to the bug that https://github.com/sunpy/sunpy/pull/5381
    # fixes.
    pytest.importorskip('sunpy', minversion='3.0.1')
    from sunpy.map import Map, all_coordinates_from_map

    # Load in three images from different viewpoints around the Sun
    filenames = ['secchi_l0_a.fits', 'aia_171_level1.fits', 'secchi_l0_b.fits']
    maps = [Map(os.path.join(DATA, f)) for f in filenames]

    # Produce weight maps that are centered on the solar disk and go to zero at the edges
    coordinates = tuple(map(all_coordinates_from_map, maps))
    input_weights = [
        coord.transform_to("heliocentric").z.value for coord in coordinates
    ]
    input_weights = [(w / np.nanmax(w))**4 for w in input_weights]

    shape_out = [90, 180]
    wcs_out = WCS(Header.fromstring(HEADER_SOLAR_OUT, sep='\n'))
    scales = [1 / 6, 1, 1 / 6]

    input_data = tuple(
        (a.data * scale, a.wcs) for (a, scale) in zip(maps, scales))

    array, footprint = reproject_and_coadd(input_data,
                                           wcs_out,
                                           shape_out,
                                           input_weights=input_weights,
                                           reproject_function=reproject_interp,
                                           match_background=True)

    header_out = wcs_out.to_header()

    # ASTROPY_LT_40: astropy v4.0 introduced new default header keywords,
    # once we support only astropy 4.0 and later we can update the reference
    # data files and remove this section.
    for key in ('MJDREFF', 'MJDREFI', 'MJDREF', 'MJD-OBS'):
        header_out.pop(key, None)

    return array_footprint_to_hdulist(array, footprint, header_out)
コード例 #17
0
def test_save_dendrogram_catalog_output():

    # 1. construct a silly dendrogram, catalog, header, and metadata
    data = np.zeros((3, 3, 3))
    data[1, 1, 1] = 1
    d = Dendrogram.compute(data, min_value=0)

    catalog = Table()
    catalog['test_column'] = np.zeros(10)

    header = Header.fromstring(
        "SIMPLE  =                    T / conforms to FITS standard                      BITPIX  =                   64 / array data type                                NAXIS   =                    3 / number of array dimensions                     NAXIS1  =                    6                                                  NAXIS2  =                    5                                                  NAXIS3  =                    4                                                  CTYPE1  = 'VELO-LSR'                                                            CTYPE2  = 'GLON-CAR'                                                            CTYPE3  = 'GLAT-CAR'                                                            CRVAL1  = ''                                                                    CRVAL2  = ''                                                                    CRVAL3  = ''                                                                    CDELT1  = ''                                                                    CDELT2  = ''                                                                    CDELT3  = ''                                                                    CRPIX1  = ''                                                                    CRPIX2  = ''                                                                    CRPIX3  = ''                                                                    END                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             "
    )

    metadata = {}
    metadata['data_unit'] = u.K

    # 2. run save_dendrogram_catalog_output on them - to some safe location
    kwargs = {
        'data_filename': 'not_real_data_savetest.fits',
        'min_value': 1,
        'min_delta': 2,
        'min_npix': 3,
        'savepath': filepath
    }
    save_dendrogram_catalog_output(d, catalog, header, metadata, **kwargs)

    # 3. reload those things
    filename_base = filepath + "not_real_data_savetest.fits_1.000_2.000_3"

    d2 = Dendrogram.load_from(filename_base + "_d.hdf5")
    catalog2 = Table.read(filename_base + "_catalog.fits")
    header2 = getheader(filename_base + "_header.fits")
    metadata2 = pickle.load(open(filename_base + "_metadata.p", 'rb'))

    #4 and assert they equal the mock things.
    assert_equal(d2.index_map, d.index_map)
    assert_array_equal(catalog2, catalog)
    assert_equal(header2, header)
    assert_equal(metadata2, metadata)

    #5 then delete the saved objects.
    os.remove(filename_base + "_d.hdf5")
    os.remove(filename_base + "_catalog.fits")
    os.remove(filename_base + "_header.fits")
    os.remove(filename_base + "_metadata.p")
コード例 #18
0
ファイル: dataset.py プロジェクト: StingraySoftware/dave
def get_eventlist_dataset_from_stingray_Eventlist(evlist, header=None,
                                                  header_comments=None,
                                                  hduname='EVENTS',
                                                  column=CONFIG.TIME_COLUMN):
    from astropy.io.fits import Header

    evt_columns = [column, "PI"]
    if hasattr(evlist, 'energy'):
        evt_columns = [column, "PI", "E"]

    dataset = get_hdu_type_dataset("EVENTS", evt_columns, hduname)

    hdu_table = dataset.tables[hduname]
    if header is None:
        if not hasattr(evlist, 'header'):
            logging.warn("Event list has no header")
            evlist.header = Header()

        header = Header.fromstring(evlist.header)
        header = dict()
        for header_column in header:
            header[header_column] = str(header[header_column])
            header_comments[header_column] = \
                str(header.comments[header_column])

    hdu_table.set_header_info(header, header_comments)
    hdu_table.columns[column].add_values(evlist.time)

    if hasattr(evlist, 'energy'):
        if evlist.energy is not None and len(evlist.energy) == len(evlist.time):
            hdu_table.columns['E'].add_values(evlist.energy)
        else:
            logging.warn("Event list energies differs from event counts, setted all energies as 0")
            hdu_table.columns['E'].add_values(np.zeros_like(evlist.time))

    if hasattr(evlist, 'pi') and evlist.pi is not None and len(evlist.pi) == len(evlist.time):
        hdu_table.columns['PI'].add_values(evlist.pi)
    else:
        logging.warn("Event list has no PI values, using np.zeros_like")
        hdu_table.columns['PI'].add_values(np.zeros_like(evlist.time))

    dataset.tables["GTI"] = \
        DsHelper.get_gti_table_from_stingray_gti(evlist.gti)

    return dataset
コード例 #19
0
def get_header_info(obj):
    """Get header info from a Stingray object."""
    from astropy.io.fits import Header
    header = Header.fromstring(obj.header)
    info = type('', (), {})()
    info.mjdref = high_precision_keyword_read(header, 'MJDREF')
    info.telescope = header['TELESCOP']
    info.instrument = header['INSTRUME']
    info.source = header['OBJECT']
    try:
        user = header['USER']
    except:
        user = '******'
    info.observer = user
    info.user = user
    info.tstart = header['TSTART']
    info.tstop = header['TSTOP']
    try:
        ra = header['RA_OBJ']
        dec = header['DEC_OBJ']
    except:
        ra = header['RA_PNT']
        dec = header['DEC_PNT']

    a = SkyCoord(ra, dec, unit="degree")
    info.raj = \
        (a.ra.to_string("hourangle")
         ).replace("s", "").replace("h", ":").replace("m", ":")
    info.decj = (a.ra.to_string()
                 ).replace("s", "").replace("d", ":").replace("m", ":")
    if hasattr(obj, 'e_interval'):
        e0, e1 = obj.e_interval
    elif hasattr(obj, 'energy') and obj.energy is not None:
        e0, e1 = np.min(obj.energy), np.max(obj.energy)
    else:
        e0, e1 = 0, 0
    info.centralE = (e0 + e1) / 2
    info.bandpass = e1 - e0

    return info
コード例 #20
0
ファイル: dataset.py プロジェクト: ayush1999/dave
def get_eventlist_dataset_from_stingray_Eventlist(evlist,
                                                  header=None,
                                                  header_comments=None,
                                                  hduname='EVENTS',
                                                  column='TIME'):
    from astropy.io.fits import Header
    lc_columns = [column, "PI", "ENERGY"]

    dataset = get_hdu_type_dataset("EVENTS", lc_columns, hduname)

    hdu_table = dataset.tables[hduname]
    if header is None:
        if not hasattr(evlist, 'header'):
            logging.warn("Event list has no header")
            evlist.header = Header()

        header = Header.fromstring(evlist.header)
        header = dict()
        for header_column in header:
            header[header_column] = str(header[header_column])
            header_comments[header_column] = \
                str(header.comments[header_column])

    hdu_table.set_header_info(header, header_comments)
    hdu_table.columns[lc_columns[0]].add_values(evlist.time)
    if hasattr(evlist, 'energy'):
        hdu_table.columns['ENERGY'].add_values(evlist.energy)
    else:
        hdu_table.columns['ENERGY'].add_values(np.zeros_like(evlist.time))

    if hasattr(evlist, 'pi'):
        hdu_table.columns['PI'].add_values(evlist.pi)
    else:
        hdu_table.columns['ENERGY'].add_values(np.zeros_like(evlist.time))

    dataset.tables["GTI"] = \
        DsHelper.get_gti_table_from_stingray_gti(evlist.gti)

    return dataset
コード例 #21
0
def test_coadd_solar_map():

    # This is a test that exercises a lot of different parts of the mosaicking
    # code. The idea is to take three solar images from different viewpoints
    # and combine them into a single one. This uses weight maps that are not
    # uniform and also include NaN values.

    pytest.importorskip('sunpy', minversion='1.0.4')
    from sunpy.map import Map, all_coordinates_from_map

    # Load in three images from different viewpoints around the Sun
    filenames = ['secchi_l0_a.fits', 'aia_171_level1.fits', 'secchi_l0_b.fits']
    maps = [Map(os.path.join(DATA, f)) for f in filenames]

    # Produce weight maps that are centered on the solar disk and go to zero at the edges
    coordinates = tuple(map(all_coordinates_from_map, maps))
    input_weights = [
        coord.transform_to("heliocentric").z.value for coord in coordinates
    ]
    input_weights = [(w / np.nanmax(w))**4 for w in input_weights]

    shape_out = [90, 180]
    wcs_out = WCS(Header.fromstring(HEADER_SOLAR_OUT, sep='\n'))
    scales = [1 / 6, 1, 1 / 6]

    input_data = tuple(
        (a.data * scale, a.wcs) for (a, scale) in zip(maps, scales))

    array, footprint = reproject_and_coadd(input_data,
                                           wcs_out,
                                           shape_out,
                                           input_weights=input_weights,
                                           reproject_function=reproject_interp,
                                           match_background=True)

    return array_footprint_to_hdulist(array, footprint, wcs_out.to_header())
コード例 #22
0
CTYPE3  = GLON-CAR
CRVAL1  = 10
CRVAL2  = 20
CRVAL3  = 25
CRPIX1  = 30
CRPIX2  = 40
CRPIX3  = 45
CDELT1  = -0.1
CDELT2  =  0.5
CDELT3  =  0.1
CUNIT1  = deg
CUNIT2  = Hz
CUNIT3  = deg
"""

WCS_SPECTRAL_CUBE = WCS(Header.fromstring(HEADER_SPECTRAL_CUBE, sep='\n'))
WCS_SPECTRAL_CUBE.pixel_bounds = [(-1, 11), (-2, 18), (5, 15)]


@pytest.mark.parametrize("item, ndim, expected", (
    ([Ellipsis, 10], 4, [slice(None)] * 3 + [10]),
    ([10, slice(20, 30)], 5, [10, slice(20, 30)] + [slice(None)] * 3),
    ([10, Ellipsis, 8], 10, [10] + [slice(None)] * 8 + [8])
))
def test_sanitize_slice(item, ndim, expected):
    new_item = sanitize_slices(item, ndim)
    # FIXME: do we still need the first two since the third assert
    # should cover it all?
    assert len(new_item) == ndim
    assert all(isinstance(i, (slice, int)) for i in new_item)
    assert new_item == expected
コード例 #23
0
ファイル: test_fitswcs.py プロジェクト: yannick1974/astropy
def header_polarized():
    return Header.fromstring(HEADER_POLARIZED, sep='\n')
コード例 #24
0
ファイル: test_fitswcs.py プロジェクト: yannick1974/astropy
def header_spectral_frames():
    return Header.fromstring(HEADER_SPECTRAL_FRAMES, sep='\n')
コード例 #25
0
ファイル: header.py プロジェクト: astromancer/salticam
def _convert(header):
    if isinstance(header, (str, bytes)):
        return Header.fromstring(header)
    if isinstance(header, Header):
        return header
    raise TypeError('Invalid type for header')
コード例 #26
0
ファイル: read_header.py プロジェクト: ivmfnal/striped
import numpy as np
import numpy.lib.recfunctions as recfuncs
from astropy.io.fits import Header

# Read the catalog FITS file
filename = '/data/bliss/566500/566509/D00566509_i_10_r1p1_fullcat.fits'
fits = fitsio.FITS(filename)

# The header for the image that this catalog was derived from is
# stored in the first extension of the FITS file. However, it's format
# is weird, so we parse it.

# First we build a string
hdrstr = '\n'.join(fits['LDAC_IMHEAD'].read()[0][0])
# Then we use astropy to parse that string into a dict
hdr = Header.fromstring(hdrstr, sep='\n')

# Now we read the catalog
catalog = fits['LDAC_OBJECTS'].read()

# The image header gives us access to image-level quantities, like
# EXPNUM, CCDNUM, MJD-OBS, etc. Careful, these quantities may have a different byte order than the catalog data.
EXPNUM = np.tile(hdr['EXPNUM'], len(catalog))
CCDNUM = np.tile(hdr['CCDNUM'], len(catalog))

# We can then append those quantities to the object array
data = recfuncs.rec_append_fields(catalog,
                                  names=['EXPNUM', 'CCDNUM'],
                                  data=[EXPNUM, CCDNUM])

print("Check out the byte order...")
コード例 #27
0
CTYPE3  = GLON-CAR
CRVAL1  = 10
CRVAL2  = 20
CRVAL3  = 25
CRPIX1  = 30
CRPIX2  = 40
CRPIX3  = 45
CDELT1  = -0.1
CDELT2  =  0.5
CDELT3  =  0.1
CUNIT1  = deg
CUNIT2  = Hz
CUNIT3  = deg
"""

WCS_SPECTRAL_CUBE = WCS(Header.fromstring(HEADER_SPECTRAL_CUBE, sep='\n'))
WCS_SPECTRAL_CUBE.pixel_bounds = [(-1, 11), (-2, 18), (5, 15)]


@pytest.mark.parametrize(
    "item, ndim, expected",
    (([Ellipsis, 10], 4, [slice(None)] * 3 + [10]),
     ([10, slice(20, 30)], 5, [10, slice(20, 30)] + [slice(None)] * 3),
     ([10, Ellipsis, 8], 10, [10] + [slice(None)] * 8 + [8])))
def test_sanitize_slice(item, ndim, expected):
    new_item = sanitize_slices(item, ndim)
    # FIXME: do we still need the first two since the third assert
    # should cover it all?
    assert len(new_item) == ndim
    assert all(isinstance(i, (slice, int)) for i in new_item)
    assert new_item == expected
コード例 #28
0
ファイル: test_sliced_wcs.py プロジェクト: Gabriel-p/astropy
CRVAL2  = 20
CRVAL3  = 25
CRPIX1  = 30
CRPIX2  = 40
CRPIX3  = 45
CDELT1  = -0.1
CDELT2  =  0.5
CDELT3  =  0.1
CUNIT1  = deg
CUNIT2  = Hz
CUNIT3  = deg
"""

with warnings.catch_warnings():
    warnings.simplefilter('ignore', VerifyWarning)
    WCS_SPECTRAL_CUBE = WCS(Header.fromstring(HEADER_SPECTRAL_CUBE, sep='\n'))
WCS_SPECTRAL_CUBE.pixel_bounds = [(-1, 11), (-2, 18), (5, 15)]


def test_invalid_slices():
    with pytest.raises(IndexError):
        SlicedLowLevelWCS(WCS_SPECTRAL_CUBE,
                          [None, None, [False, False, False]])

    with pytest.raises(IndexError):
        SlicedLowLevelWCS(WCS_SPECTRAL_CUBE,
                          [None, None, slice(None, None, 2)])

    with pytest.raises(IndexError):
        SlicedLowLevelWCS(WCS_SPECTRAL_CUBE, [None, None, 1000.100])
コード例 #29
0
def header_spectral_1d():
    return Header.fromstring(HEADER_SPECTRAL_1D, sep='\n')
コード例 #30
0
ファイル: phaseogram.py プロジェクト: InesPM/HENDRICS
def run_interactive_phaseogram(event_file,
                               freq,
                               fdot=0,
                               fddot=0,
                               nbin=64,
                               nt=32,
                               binary=False,
                               test=False,
                               binary_parameters=[None, 0, None],
                               pepoch=None,
                               norm=None,
                               plot_only=False,
                               deorbit_par=None):
    from astropy.io.fits import Header
    from astropy.coordinates import SkyCoord

    events = load_events(event_file)
    try:
        header = Header.fromstring(events.header)
        position = SkyCoord(header['RA_OBJ'],
                            header['DEC_OBJ'],
                            unit='deg',
                            frame=header['RADECSYS'].lower())
        name = header['OBJECT']
    except (KeyError, AttributeError):
        position = name = None

    pepoch_mjd = pepoch
    if pepoch is None:
        pepoch = events.gti[0, 0]
        pepoch_mjd = pepoch / 86400 + events.mjdref
    else:
        pepoch = (pepoch_mjd - events.mjdref) * 86400

    if binary:
        ip = BinaryPhaseogram(events.time,
                              freq,
                              nph=nbin,
                              nt=nt,
                              fdot=fdot,
                              test=test,
                              fddot=fddot,
                              pepoch=pepoch,
                              orbital_period=binary_parameters[0],
                              asini=binary_parameters[1],
                              t0=binary_parameters[2],
                              mjdref=events.mjdref,
                              gti=events.gti,
                              label=hen_root(event_file),
                              norm=norm,
                              object=name,
                              position=position,
                              plot_only=plot_only)
    else:
        events_save = copy.deepcopy(events)
        if deorbit_par is not None:
            events = deorbit_events(events, deorbit_par)

        ip = InteractivePhaseogram(events.time,
                                   freq,
                                   nph=nbin,
                                   nt=nt,
                                   fdot=fdot,
                                   test=test,
                                   fddot=fddot,
                                   pepoch=pepoch,
                                   mjdref=events.mjdref,
                                   gti=events.gti,
                                   label=hen_root(event_file),
                                   norm=norm,
                                   object=name,
                                   position=position,
                                   plot_only=plot_only,
                                   time_corr=events_save.time - events.time)

    return ip
コード例 #31
0
ファイル: test_fitswcs.py プロジェクト: caseyjlaw/astropy
def header_time_1d():
    return Header.fromstring(HEADER_TIME_1D, sep='\n')
コード例 #32
0
ファイル: test_fitswcs.py プロジェクト: caseyjlaw/astropy
CTYPE2  = DEC--TAN
CRVAL1  = 10
CRVAL2  = 20
CRPIX1  = 30
CRPIX2  = 40
CDELT1  = -0.1
CDELT2  =  0.1
CROTA2  = 0.
CUNIT1  = deg
CUNIT2  = deg
"""

with warnings.catch_warnings():
    warnings.simplefilter('ignore', VerifyWarning)
    WCS_SIMPLE_CELESTIAL = WCS(
        Header.fromstring(HEADER_SIMPLE_CELESTIAL, sep='\n'))


def test_simple_celestial():

    wcs = WCS_SIMPLE_CELESTIAL

    # Low-level API

    assert wcs.pixel_n_dim == 2
    assert wcs.world_n_dim == 2
    assert wcs.array_shape is None
    assert wcs.pixel_shape is None
    assert wcs.world_axis_physical_types == ['pos.eq.ra', 'pos.eq.dec']
    assert wcs.world_axis_units == ['deg', 'deg']
    assert wcs.pixel_axis_names == ['', '']
コード例 #33
0
ファイル: hst_cooksey.py プロジェクト: xiaoleihappy/igmspec
def hdf5_adddata(hdf,
                 sname,
                 meta,
                 debug=False,
                 chk_meta_only=False,
                 mk_test_file=False):
    """ Append HST/FUSE data to the h5 file

    Parameters
    ----------
    hdf : hdf5 pointer
    IDs : ndarray
      int array of IGM_ID values in mainDB
    sname : str
      Survey name
    chk_meta_only : bool, optional
      Only check meta file;  will not write
    mk_test_file : bool, optional
      Generate the debug test file for Travis??

    Returns
    -------

    """
    Rdicts = defs.get_res_dicts()
    # Add Survey
    print("Adding {:s} survey to DB".format(sname))
    hstc_grp = hdf.create_group(sname)
    # Checks
    if sname != 'UVpSM4':
        raise IOError("Not expecting this survey..")

    # Build spectra (and parse for meta)
    nspec = len(meta)
    max_npix = 40000  # Just needs to be large enough
    # Init
    data = init_data(max_npix, include_co=True)
    spec_set = hdf[sname].create_dataset('spec',
                                         data=data,
                                         chunks=True,
                                         maxshape=(None, ),
                                         compression='gzip')
    spec_set.resize((nspec, ))
    Rlist = []
    wvminlist = []
    wvmaxlist = []
    npixlist = []
    gratinglist = []
    datelist = []
    badf = []
    badstis = []
    badghrs = []
    # Loop
    path = os.getenv('RAW_IGMSPEC') + '/HST_Cooksey/'
    maxpix = 0
    for jj, row in enumerate(meta):
        # Generate full file
        full_file = path + '{:s}/{:s}/{:s}'.format(row['QSO'], row['INSTR'],
                                                   row['SPEC_FILE'])
        # Extract
        if row['INSTR'] == 'FUSE':
            hext = 1
        else:
            hext = 0
        print("HST_Cooksey: Reading {:s}".format(full_file))
        try:
            spec = lsio.readspec(full_file, head_exten=hext, masking='edges')
        except:  # BAD HEADER
            hdu = fits.open(full_file)
            head1 = hdu[1].header
            hdu[1].verify('fix')
            tbl = Table(hdu[1].data)
            spec = lsio.readspec(tbl, masking='edges')
            spec.meta['headers'][spec.select] = head1
            # Continuum
            cfile = full_file.replace('.fits', '_c.fits')
            if os.path.isfile(cfile):
                # Watch that mask!
                gdp = ~spec.data['flux'][spec.select].mask
                spec.data['co'][spec.select][gdp] = (
                    fits.open(cfile)[0].data)[gdp]
        # npix
        npix = spec.npix
        if npix > max_npix:
            raise ValueError(
                "Not enough pixels in the data... ({:d})".format(npix))
        else:
            maxpix = max(npix, maxpix)
        # Some fiddling about
        for key in ['wave', 'flux', 'sig', 'co']:
            data[key] = 0.  # Important to init (for compression too)
        data['flux'][0][:npix] = spec.flux.value
        data['sig'][0][:npix] = spec.sig.value
        data['wave'][0][:npix] = spec.wavelength.value
        if spec.co_is_set:
            try:
                data['co'][0][:npix] = spec.co.value
            except ValueError:
                pdb.set_trace()
        # Meta
        datet = None
        if row['INSTR'] == 'FUSE':
            if 'HISTORY' in spec.header.keys():
                ncards = len(spec.header['HISTORY'])
                flg_H = True
            else:
                flg_H = False
                hdu = fits.open(full_file)
                head0 = hdu[0].header
                ncards = len(head0)
                # Is this a good one?
                if 'APER_ACT' in head0:
                    pass
                else:  # Need to fight harder for the header
                    # Look for untrim
                    untrim = full_file + '.untrim'
                    if not os.path.isfile(untrim):
                        pdb.set_trace()
                    # Read
                    hduu = fits.open(untrim)
                    if 'PKS2005' in untrim:  # One extra kludge..
                        head0 = hduu[1].header
                        flg_H = True
                        ncards = len(head0['HISTORY'])
                    else:
                        head0 = hduu[0].header
                        ncards = len(head0)
                spec.meta['headers'][spec.select] = head0
            # Read from history
            for ss in range(ncards):
                if flg_H:
                    try:
                        card = Header.fromstring(spec.header['HISTORY'][ss])
                    except:
                        pdb.set_trace()
                    try:
                        ckey = list(card.keys())[0]
                    except IndexError:
                        continue
                    else:
                        card0 = card[0]
                else:
                    ckey, card0 = list(spec.header.keys())[ss], spec.header[ss]
                # Parse
                if ckey == 'APERTURE':
                    aper = card0
                elif ckey == 'DETECTOR':
                    det = card0
                elif ckey == 'APER_ACT':  # Extracted aperture
                    ext_ap = card0
                elif ckey == 'DATE':  # Extracted aperture
                    datet = card0
            gratinglist.append(ext_ap + det)
        elif row['INSTR'] == 'STIS':
            try:
                datet = spec.header['DATE']
            except KeyError:  # handful of kludged coadds
                if 'HISTORY' not in spec.header.keys():
                    # Grab from the other extension, e.g. PKS0405
                    hdu = fits.open(full_file)
                    head1 = hdu[1].header
                    spec.meta['headers'][0] = head1
                for ihist in spec.header['HISTORY']:
                    if 'TDATEOBS' in ihist:
                        idash = ihist.find('-')
                        datet = ihist[idash - 4:idash + 6]
                # Grating from name
                i0 = full_file.rfind('_')
                i1 = full_file.rfind('.fits')
                gratinglist.append(full_file[i0 + 1:i1])
                if datet is None:
                    pdb.set_trace()
            else:
                gratinglist.append(spec.header['OPT_ELEM'])
        elif row['INSTR'] == 'GHRS':
            # Date
            try:
                tmp = spec.header['DATE-OBS']
            except KeyError:
                # Pull header from parallel file
                iM = full_file.find('M_1')
                if iM <= 0:
                    iM = full_file.find('L_1')
                ofile = full_file[:iM + 1] + '_F.fits'
                if not os.path.isfile(ofile):
                    if 'NGC4151' in ofile:  # Kludge
                        ofile = ofile.replace('G160M', 'G160Mmd')
                    elif 'PKS2155-304_GHRS_G140L' in ofile:  # Kludge
                        ofile = ofile.replace('G140L', 'G140Llo')
                    elif 'PKS2155-304_GHRS_G160M' in ofile:  # Kludge
                        ofile = ofile.replace('G160M', 'G160Mmd')
                    else:
                        pdb.set_trace()
                hdu = fits.open(ofile)
                head0 = hdu[0].header
                spec.meta['headers'][spec.select] = head0
            # Reformat
            prs = tmp.split('/')
            if prs[2][0] == '9':
                yr = '19' + prs[2]
            else:
                yr = '20' + prs[2]
            datet = yr + '-' + prs[1] + '-{:02d}'.format(int(prs[0]))
            # Grating
            gratinglist.append(spec.header['GRATING'])
        else:
            pdb.set_trace()
        if datet is None:
            try:
                datet = spec.header['DATE-OBS']
            except KeyError:
                print("Missing Header for file: {:s}".format(full_file))
                badf.append(full_file)
                datet = '9999-9-9'
        t = Time(datet, format='isot',
                 out_subfmt='date')  # Fixes to YYYY-MM-DD
        datelist.append(t.iso)
        try:
            Rlist.append(Rdicts[row['INSTR']][gratinglist[-1]])
        except KeyError:
            print(gratinglist[-1])
            pdb.set_trace()
        wvminlist.append(np.min(data['wave'][0][:npix]))
        wvmaxlist.append(np.max(data['wave'][0][:npix]))
        npixlist.append(npix)
        if chk_meta_only:
            continue
        # Only way to set the dataset correctly
        spec_set[jj] = data

    #
    if (len(badstis)) > 0:
        raise ValueError("Somehow have a bad STIS header..")
    if len(badf) > 0:
        print("We still have bad FUSE headers")
        pdb.set_trace()
    if len(badghrs) > 0:
        print("We still have bad GHRS headers")
        pdb.set_trace()
    print("Max pix = {:d}".format(maxpix))
    # Add columns
    meta.add_column(Column(npixlist, name='NPIX'))
    meta.add_column(Column(wvminlist, name='WV_MIN'))
    meta.add_column(Column(Rlist, name='R'))
    meta.add_column(Column(gratinglist, name='DISPERSER'))
    meta.add_column(Column(wvmaxlist, name='WV_MAX'))
    meta.add_column(Column(datelist, name='DATE-OBS'))
    meta.add_column(Column(np.arange(nspec, dtype=int), name='GROUP_ID'))

    # Add HDLLS meta to hdf5
    if chk_meta(meta):
        if chk_meta_only:
            pdb.set_trace()
        hdf[sname]['meta'] = meta
    else:
        pdb.set_trace()
        raise ValueError("meta file failed")
    # References
    refs = [
        dict(url='http://adsabs.harvard.edu/abs/2010ApJ...708..868C',
             bib='cooksey10')
    ]
    jrefs = ltu.jsonify(refs)
    hdf[sname]['meta'].attrs['Refs'] = json.dumps(jrefs)
    #
    return
コード例 #34
0
ファイル: test_fitswcs.py プロジェクト: caseyjlaw/astropy
def header_time_1d_no_obs():
    header = Header.fromstring(HEADER_TIME_1D, sep='\n')
    del header['OBSGEO-L']
    del header['OBSGEO-B']
    del header['OBSGEO-H']
    return header
コード例 #35
0
ファイル: test_fitswcs.py プロジェクト: Cadair/astropy
HEADER_SIMPLE_CELESTIAL = """
WCSAXES = 2
CTYPE1  = RA---TAN
CTYPE2  = DEC--TAN
CRVAL1  = 10
CRVAL2  = 20
CRPIX1  = 30
CRPIX2  = 40
CDELT1  = -0.1
CDELT2  =  0.1
CROTA2  = 0.
CUNIT1  = deg
CUNIT2  = deg
"""

WCS_SIMPLE_CELESTIAL = WCS(Header.fromstring(HEADER_SIMPLE_CELESTIAL, sep='\n'))


def test_simple_celestial():

    wcs = WCS_SIMPLE_CELESTIAL

    # Low-level API

    assert wcs.pixel_n_dim == 2
    assert wcs.world_n_dim == 2
    assert wcs.array_shape is None
    assert wcs.pixel_shape is None
    assert wcs.world_axis_physical_types == ['pos.eq.ra', 'pos.eq.dec']
    assert wcs.world_axis_units == ['deg', 'deg']