예제 #1
0
    def run(self, rinput):
        # Basic processing
        self.logger.info('start focus spectrograph')
        flow = self.create_flow(rinput.master_bias,
                                rinput.obresult.configuration.values)

        ever = []
        focci = []
        # FIXME: use tagger here
        vphs = []

        for frame in rinput.obresult.images:
            hdulist = frame.open()

            focus_val = hdulist[0].header['focus']
            vph_name = hdulist[0].header['VPH']
            focci.append(focus_val)

            self.logger.info('processing frame %s', frame)
            hdulist = flow(hdulist)
            self.logger.info('extract fibers')
            rssdata = apextract_tracemap(hdulist[0].data, rinput.tracemap)
            self.logger.info('find lines and compute FWHM')
            lines_rss_fwhm = self.run_on_image(rssdata, rinput.tracemap, vph_name)

            ever.append(lines_rss_fwhm)

        self.logger.info('pair lines in images')
        line_fibers = self.filter_lines(ever)

        focus_wavelength = self.generateJSON(ever,
                                             self.get_wlcalib(rinput.wlcalib),
                                             rinput.obresult.images)

        self.logger.info('fit FWHM of lines')
        final = self.reorder_and_fit(line_fibers, focci)

        focus_median = numpy.median(final[:, 2])
        self.logger.info('median focus value is %5.2f', focus_median)

        self.logger.info('generate focus image')
        image = self.generate_image(final)
        focus_image_hdu = fits.PrimaryHDU(image)
        focus_image = fits.HDUList([focus_image_hdu])

        self.logger.info('end focus spectrograph')
        return self.create_result(focus_table=final, focus_image=focus_image,
                                  focus_wavelength=focus_wavelength)
예제 #2
0
    def run(self, rinput):
        # Basic processing
        self.logger.info('twilight fiber flat reduction started')

        parameters = self.get_parameters(rinput)

        reduced = self.bias_process_common(rinput.obresult, parameters)

        _logger.info('extract fibers')
        rssdata = apextract_tracemap(reduced[0].data, rinput.tracemap)

        # FIXME: we are ignoring here all the possible bad pixels
        # and WL distortion when doing the normalization
        # rssdata /= rssdata.mean() #Originally uncomment
        template_header = reduced[0].header
        rsshdu = fits.PrimaryHDU(rssdata, header=template_header)
        rss = fits.HDUList([rsshdu])

        self.logger.info('extraction completed')

        _logger.info('resampling spectra')
        final, wcsdata = self.resample_rss_flux(rsshdu.data, self.get_wlcalib(rinput.wlcalib))
        # This value was ~0.4% and now is 4e-6 %
        # (abs(final.sum()-hdu_t.data.sum())/hdu_t.data.sum()*100)

        # Measure values in final
        start = 200
        end = 2100
        _logger.info('doing mean between columns %d-%d', start, end)
        colapse = final[:,start:end].mean(axis=1)

        normalized = numpy.tile(colapse[:, numpy.newaxis], 4096)

        master_t_hdu = fits.PrimaryHDU(normalized, header=template_header)
        header_list = self.getHeaderList([reduced, rinput.obresult.images[0].open()])
        master_t = fits.HDUList([master_t_hdu]+header_list)


        _logger.info('twilight fiber flat reduction ended')
        result = self.create_result(reduced_frame=reduced, reduced_rss=rss,
                                    master_twilight_flat=master_t)
        return result