Ejemplo n.º 1
0
def ism_transmittance_chunk(qso_record_table):
    start_offset = qso_record_table[0]['index']
    # spectra = read_spectrum_hdf5.SpectraWithMetadata(qso_record_table, settings.get_qso_spectra_hdf5())
    # continuum_fit_file = NpSpectrumContainer(True, filename=settings.get_continuum_fit_npy())
    delta_transmittance_file = NpSpectrumContainer(
        readonly=True,
        filename=settings.get_delta_t_npy(),
        max_wavelength_count=1000)

    num_spectra = len(qso_record_table)
    ism_delta_t = NpSpectrumContainer(False, num_spectra=num_spectra)
    # warning: np.ndarray is not initialized by default. zeroing manually.
    ism_delta_t.zero()
    n = 0
    for i in range(len(qso_record_table)):
        qso_rec = QSORecord.from_row(qso_record_table[i])
        index = qso_rec.index

        # read original delta transmittance
        ar_redshift = delta_transmittance_file.get_wavelength(index)
        # ar_flux = delta_transmittance_file.get_flux(index)
        ar_ivar = delta_transmittance_file.get_ivar(index)

        # get correction to ISM
        # ar_flux_new, ar_ivar_new, is_corrected = pre_process_spectrum.mw_lines.apply_correction(
        #     ar_wavelength, np.ones_like(ar_flux), ar_ivar, qso_rec.ra, qso_rec.dec)

        ar_wavelength = (ar_redshift + 1) * lya_center  # type: np.ndarray
        # limit maximum bin number because higher extinction bins are not reliable
        max_extinction_bin = max(20, ar_extinction_levels.size)

        if np.isfinite(qso_rec.extinction_g):
            extinction_bin = int(
                np.round(
                    np.interp(qso_rec.extinction_g, ar_extinction_levels,
                              np.arange(max_extinction_bin))))
        else:
            extinction_bin = 0

        l_print_no_barrier("extinction_bin = ", extinction_bin)
        ar_ism_resampled = np.interp(
            ar_wavelength,
            extinction_spectra_list[extinction_bin][0],
            extinction_spectra_list[extinction_bin][1],
            left=np.nan,
            right=np.nan)
        extinction = ar_extinction_levels[extinction_bin]
        # rescale according to QSO extinction
        l_print_no_barrier(qso_rec.extinction_g, extinction)
        ism_scale_factor = 1.
        ar_flux_new = (ar_ism_resampled - 1
                       ) * ism_scale_factor * qso_rec.extinction_g / extinction

        mask = np.logical_and(np.isfinite(ar_flux_new), ar_ivar)

        ism_delta_t.set_wavelength(i, ar_redshift[mask])
        # use reciprocal to get absorption spectrum, then subtract 1 to get the delta
        ism_delta_t.set_flux(i, ar_flux_new[mask])
        # ism_delta_t.set_flux(i, np.ones_like(ar_flux) * qso_rec.extinction_g)
        # use original ivar because we are not correcting an existing spectrum
        ism_delta_t.set_ivar(i, ar_ivar[mask])

        n += 1

    l_print_no_barrier("chunk n =", n, "offset =", start_offset)
    return ism_delta_t.as_np_array(), None
Ejemplo n.º 2
0
class ContinuumFitContainer(object):
    def __init__(self, num_spectra=-1):
        self.num_spectra = num_spectra
        self.np_spectrum = NpSpectrumContainer(readonly=False, num_spectra=num_spectra)
        self.continuum_fit_metadata = table.Table()
        self.continuum_fit_metadata.add_columns(
            [table.Column(name='index', dtype='i8', unit=None, length=num_spectra),
             table.Column(name='is_good_fit', dtype='b', unit=None, length=num_spectra),
             table.Column(name='goodness_of_fit', dtype='f8', unit=None, length=num_spectra),
             table.Column(name='snr', dtype='f8', unit=None, length=num_spectra)])

        # initialize array
        self.np_spectrum.zero()

    def get_wavelength(self, n):
        return self.np_spectrum.get_wavelength(n)

    def get_flux(self, n):
        return self.np_spectrum.get_flux(n)

    def set_wavelength(self, n, data):
        self.np_spectrum.set_wavelength(n, data)

    def set_flux(self, n, data):
        self.np_spectrum.set_flux(n, data)

    def set_metadata(self, n, is_good_fit, goodness_of_fit, snr):
        self.continuum_fit_metadata[n] = [n, is_good_fit, goodness_of_fit, snr]

    def copy_metadata(self, n, metadata):
        self.continuum_fit_metadata[n] = metadata

    def get_metadata(self, n):
        return self.continuum_fit_metadata[n]

    def get_is_good_fit(self, n):
        return self.get_metadata(n)['is_good_fit']

    def get_goodness_of_fit(self, n):
        return self.get_metadata(n)['goodness_of_fit']

    def get_snr(self, n):
        return self.get_metadata(n)['snr']

    @classmethod
    def from_np_array_and_object(cls, np_array, obj):
        # TODO: consider refactoring.
        np_spectrum = NpSpectrumContainer.from_np_array(np_array, readonly=True)
        new_instance = cls(num_spectra=np_spectrum.num_spectra)
        # replace spectrum container with existing data
        new_instance.np_spectrum = np_spectrum
        # replace metadata with existing metadata object
        assert type(new_instance.continuum_fit_metadata) == type(obj)
        new_instance.continuum_fit_metadata = obj
        return new_instance

    def as_object(self):
        return self.continuum_fit_metadata

    def as_np_array(self):
        return self.np_spectrum.as_np_array()