def test_apextract_weights():
    import tarfile

    file_name = 'master_weights_LCB_10img_1exp.tar'

    data = fits.getdata('fiberflat_frame.fits')
    rss = apextract_weights(data, tarfile.open(file_name, 'r'))
    hdu_rss = fits.PrimaryHDU(rss)
    final = fits.HDUList([hdu_rss])
    final.writeto('rss.fits', clobber=True)
    assert True
Exemple #2
0
    def run_args(self, obresult, weights, wlcalib, parameters):
        # Basic processing

        self.logger.info('twilight fiber flat reduction started')

        reduced = self.bias_process_common(obresult, parameters)

        _logger.info('extract fibers')
        rssdata = apextract_weights(reduced[0].data, weights)

        # FIXME: we are ignoring here all the possible bad pixels
        # and WL distortion when doing the normalization
        # rssdata /= rssdata.mean() #Originally uncomment
        template_header = reduced[0].header
        rsshdu = fits.PrimaryHDU(rssdata, header=template_header)
        rss = fits.HDUList([rsshdu])

        self.logger.info('extraction completed')

        _logger.info('resampling spectra')
        final, wcsdata = self.resample_rss_flux(rsshdu.data, wlcalib)
        # This value was ~0.4% and now is 4e-6 %
        # (abs(final.sum()-hdu_t.data.sum())/hdu_t.data.sum()*100)

        # Measure values in final
        start = 200
        end = 2100
        _logger.info('doing mean between columns %d-%d', start, end)
        colapse = final[:, start:end].mean(axis=1)

        normalized = numpy.tile(colapse[:, numpy.newaxis], 4096)

        master_t_hdu = fits.PrimaryHDU(normalized, header=template_header)
        master_t = fits.HDUList([master_t_hdu])

        _logger.info('twilight fiber flat reduction ended')
        result = self.create_result(reduced_frame=reduced, reduced_rss=rss,
                                    master_twilight_flat=master_t)
        return result