Пример #1
0
    def test_save_longcomplex(self):
        val = np.longcomplex(1.01 + 2.3j)
        data = {'val': val}
        save_data(data, 'bubu' + HEN_FILE_EXTENSION)
        data_out = load_data('bubu' + HEN_FILE_EXTENSION)

        assert np.allclose(data['val'], data_out['val'])
Пример #2
0
 def test_treat_event_file_xmm(self):
     treat_event_file(self.fits_file)
     new_filename = 'monol_test_fake_xmm_epn_det01_ev' + HEN_FILE_EXTENSION
     assert os.path.exists(os.path.join(self.datadir, new_filename))
     data = load_data(os.path.join(self.datadir, new_filename))
     assert 'instr' in data
     assert 'gti' in data
     assert 'mjdref' in data
Пример #3
0
 def test_treat_event_file_nustar(self):
     treat_event_file(self.fits_fileA)
     new_filename = 'monol_testA_nustar_fpma_ev' + HEN_FILE_EXTENSION
     assert os.path.exists(os.path.join(self.datadir, new_filename))
     data = load_data(os.path.join(self.datadir, new_filename))
     assert 'instr' in data
     assert 'gti' in data
     assert 'mjdref' in data
     assert np.isclose(data['mjdref'], ref_mjd(self.fits_fileA))
Пример #4
0
    def test_save_longcomplex(self):
        val = np.complex256(1.01 + 2.3j)
        data = {'val': val}
        with pytest.warns(UserWarning) as record:
            save_data(data, 'bubu' + HEN_FILE_EXTENSION)
        assert "complex256 yet unsupported" in record[0].message.args[0]
        data_out = load_data('bubu' + HEN_FILE_EXTENSION)

        assert np.allclose(data['val'], data_out['val'])
Пример #5
0
 def test_save_data(self):
     struct = {'a': 0.1, 'b': np.longdouble('123.4567890123456789'),
               'c': np.longdouble([[-0.5, 3.5]]),
               'd': 1}
     save_data(struct, self.dum)
     struct2 = load_data(self.dum)
     assert np.allclose(struct['a'], struct2['a'])
     assert np.allclose(struct['b'], struct2['b'])
     assert np.allclose(struct['c'], struct2['c'])
     assert np.allclose(struct['d'], struct2['d'])
Пример #6
0
def get_lightcurve_fits_dataset_with_stingray(destination, hdulist, hduname='RATE',
                                            column=CONFIG.TIME_COLUMN, gtistring=CONFIG.GTI_STRING, time_offset=0):

    supported_rate_columns = set(['RATE', 'RATE1', 'COUNTS'])
    found_rate_columns = set(hdulist[hduname].data.names)
    intersection_columns = supported_rate_columns.intersection(found_rate_columns)

    #Check if HDUCLAS1 = LIGHTCURVE column exists
    logging.debug("Reading Lightcurve Fits columns")
    if "HDUCLAS1" not in hdulist[hduname].header:
        logging.warn("HDUCLAS1 not found in header: " + hduname)
        return None

    elif hdulist[hduname].header["HDUCLAS1"] != "LIGHTCURVE":
        logging.warn("HDUCLAS1 is not LIGHTCURVE")
        return None

    elif len(intersection_columns) == 0:
        logging.warn("RATE, RATE1 or COUNTS columns not found in " + str(hduname) + " HDU, found columns: " + str(hdulist[hduname].data.names))
        return None

    elif len(intersection_columns) > 1:
        logging.warn("RATE, RATE1 or COUNTS ambiguous columns found in " + str(hduname) + " HDU, found columns: " + str(hdulist[hduname].data.names))
        return None

    ratecolumn = list(intersection_columns)[0]
    if len(hdulist[hduname].data[ratecolumn].shape) != 1 \
        or not (isinstance(hdulist[hduname].data[ratecolumn][0], int) \
        or isinstance(hdulist[hduname].data[ratecolumn][0], np.integer) \
        or isinstance(hdulist[hduname].data[ratecolumn][0], float) \
        or isinstance(hdulist[hduname].data[ratecolumn][0], np.floating)):
        logging.warn("Wrong data type found for column: " + str(ratecolumn) + " in " + str(hduname) + " HDU, expected Integer or Float.")
        return None

    header, header_comments = get_header(hdulist, hduname)

    # Reads the lightcurve with HENDRICS
    outfile = lcurve_from_fits(destination, gtistring=get_hdu_string_from_hdulist(gtistring, hdulist),
                             timecolumn=column, ratecolumn=ratecolumn, ratehdu=1,
                             fracexp_limit=CONFIG.FRACEXP_LIMIT)[0]

    lcurve, events_start_time = substract_tstart_from_lcurve(load_data(outfile), time_offset)

    dataset = DataSet.get_lightcurve_dataset_from_stingray_lcurve(lcurve, header, header_comments,
                                                                    hduname, column)

    # Stores the events_start_time in time column extra
    dataset.tables[hduname].columns[column].set_extra("TSTART", events_start_time)

    logging.debug("Read Lightcurve fits with stingray file successfully: " + str(destination) + ", tstart: " + str(events_start_time) + ", rate: " + str(len(lcurve["counts"])))

    return dataset
Пример #7
0
    def test_treat_event_file_xmm(self):
        fits_file = os.path.join(self.datadir, 'monol_test_fake.evt')
        hen.fake.main([
            '--deadtime', '1e-4', '-m', 'XMM', '-i', 'epn', '--ctrate', '2000',
            '-o', fits_file
        ])

        treat_event_file(fits_file)
        new_filename = 'monol_test_fake_xmm_epn_det01_ev' + HEN_FILE_EXTENSION
        assert os.path.exists(os.path.join(self.datadir, new_filename))
        data = load_data(os.path.join(self.datadir, new_filename))
        assert 'instr' in data
        assert 'gti' in data
        assert 'mjdref' in data
        treat_event_file(fits_file, gti_split=True)
        new_filename = \
            'monol_test_fake_xmm_epn_det01_gti0_ev' + HEN_FILE_EXTENSION
        assert os.path.exists(os.path.join(self.datadir, new_filename))
        data = load_data(os.path.join(self.datadir, new_filename))
        assert 'instr' in data
        assert 'gti' in data
        assert 'mjdref' in data
Пример #8
0
    def test_treat_event_file_xmm_lensplit(self):

        treat_event_file(self.fits_file, length_split=10)
        new_filename = \
            'monol_test_fake_xmm_epn_det01_chunk000_ev' + HEN_FILE_EXTENSION
        assert os.path.exists(os.path.join(self.datadir, new_filename))
        data = load_data(os.path.join(self.datadir, new_filename))
        assert 'instr' in data
        assert 'gti' in data
        assert 'mjdref' in data
        gtis = data['gti']
        lengths = np.array([g1 - g0 for (g0, g1) in gtis])
        assert np.all(lengths <= 10)
Пример #9
0
 def test_load_data_fails(self):
     with pytest.raises(TypeError) as record:
         load_data('afile.fits')
     assert "The file type is not recognized" in str(record.value)
Пример #10
0
def get_lightcurve_fits_dataset_with_stingray(destination,
                                              hdulist,
                                              hduname='RATE',
                                              column=CONFIG.TIME_COLUMN,
                                              gtistring=CONFIG.GTI_STRING,
                                              time_offset=0):

    supported_rate_columns = set(['RATE', 'RATE1', 'COUNTS'])
    found_rate_columns = set(hdulist[hduname].data.names)
    intersection_columns = supported_rate_columns.intersection(
        found_rate_columns)

    #Check if HDUCLAS1 = LIGHTCURVE column exists
    logging.debug("Reading Lightcurve Fits columns")
    if "HDUCLAS1" not in hdulist[hduname].header:
        logging.warn("HDUCLAS1 not found in header: " + hduname)
        return None

    elif hdulist[hduname].header["HDUCLAS1"] != "LIGHTCURVE":
        logging.warn("HDUCLAS1 is not LIGHTCURVE")
        return None

    elif len(intersection_columns) == 0:
        logging.warn("RATE, RATE1 or COUNTS columns not found in " +
                     str(hduname) + " HDU, found columns: " +
                     str(hdulist[hduname].data.names))
        return None

    elif len(intersection_columns) > 1:
        logging.warn("RATE, RATE1 or COUNTS ambiguous columns found in " +
                     str(hduname) + " HDU, found columns: " +
                     str(hdulist[hduname].data.names))
        return None

    ratecolumn = list(intersection_columns)[0]
    if len(hdulist[hduname].data[ratecolumn].shape) != 1 \
        or not (isinstance(hdulist[hduname].data[ratecolumn][0], int) \
        or isinstance(hdulist[hduname].data[ratecolumn][0], np.integer) \
        or isinstance(hdulist[hduname].data[ratecolumn][0], float) \
        or isinstance(hdulist[hduname].data[ratecolumn][0], np.floating)):
        logging.warn("Wrong data type found for column: " + str(ratecolumn) +
                     " in " + str(hduname) +
                     " HDU, expected Integer or Float.")
        return None

    header, header_comments = get_header(hdulist, hduname)

    # Reads the lightcurve with HENDRICS
    outfile = lcurve_from_fits(destination,
                               gtistring=get_hdu_string_from_hdulist(
                                   gtistring, hdulist),
                               timecolumn=column,
                               ratecolumn=ratecolumn,
                               ratehdu=1,
                               fracexp_limit=CONFIG.FRACEXP_LIMIT)[0]

    lcurve, events_start_time = substract_tstart_from_lcurve(
        load_data(outfile), time_offset)

    dataset = DataSet.get_lightcurve_dataset_from_stingray_lcurve(
        lcurve, header, header_comments, hduname, column)

    # Stores the events_start_time in time column extra
    dataset.tables[hduname].columns[column].set_extra("TSTART",
                                                      events_start_time)

    logging.debug("Read Lightcurve fits with stingray file successfully: " +
                  str(destination) + ", tstart: " + str(events_start_time) +
                  ", rate: " + str(len(lcurve["counts"])))

    return dataset
Пример #11
0
 def test_treat_event_file_nustar(self):
     treat_event_file(self.fits_fileA)
     new_filename = 'monol_testA_nustar_fpma_ev' + HEN_FILE_EXTENSION
     assert os.path.exists(os.path.join(self.datadir, new_filename))
     data = load_data(os.path.join(self.datadir, new_filename))
     assert 'instr' in data