Example #1
0
class StareSpectraRecipe(EmirRecipe):
    """Process images in Stare spectra mode"""

    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterSpectralFlatFieldRequirement()
    master_sky = reqs.SpectralSkyRequirement(optional=True)

    stare = Result(prods.ProcessedMOS)

    def run(self, rinput):
        self.logger.info('starting stare spectra reduction')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput,
                                                    flow,
                                                    method=median)
        hdr = hdulist[0].header
        self.set_base_headers(hdr)
        # Update EXP to 0
        hdr['EXP'] = 0

        self.logger.info('end stare spectra reduction')
        result = self.create_result(stare=hdulist)
        return result
Example #2
0
class DitherSkyRecipe(EmirRecipe):
    """Recipe to process data taken in dither sky mode.

    """

    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()

    skyframe = Result(prods.MasterSky)

    def run(self, rinput):
        _logger.debug('instrument %s, mode %s', rinput.obresult.instrument,
                      rinput.obresult.mode)
        _logger.info('starting sky reduction with dither')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_segmentation(rinput,
                                                     flow,
                                                     method=median,
                                                     errors=True)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)
        _logger.info('end sky reduction with dither')

        result = self.create_result(skyframe=hdulist)

        return result
Example #3
0
class TestSkyCorrectRecipe(EmirRecipe):

    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    master_sky = Requirement(prods.MasterIntensityFlat,
                             'Master Sky calibration')

    frame = Result(prods.ProcessedImage)

    def run(self, rinput):
        self.logger.info('starting simple sky reduction')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput,
                                                    flow,
                                                    method=median)
        hdr = hdulist[0].header
        self.set_base_headers(hdr)
        # Update SEC to 0
        hdr['SEC'] = 0

        result = self.create_result(frame=hdulist)

        return result
Example #4
0
class SlitTransmissionRecipe(EmirRecipe):
    """Recipe to calibrate the slit transmission.

    **Observing modes:**

        * Slit transmission calibration (4.4)

    **Inputs:**

        * A list of uniformly illuminated images of MSM

    **Outputs:**

     * A list of slit transmission functions

    **Procedure:**

     * TBD

    """

    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()

    slit = Result(prods.SlitTransmissionCalibration)

    def run(self, rinput):
        return self.create_result(slit=prods.SlitTransmissionCalibration())
Example #5
0
class SkySpecRecipe(EmirRecipe):
    """Recipe to process data taken in spectral sky mode.

    """

    obresult = ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterSpectralFlatFieldRequirement()

    skyspec = Product(prods.SkySpectrum)

    def run(self, rinput):
        self.logger.info('starting spectral sky reduction')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput,
                                                    flow,
                                                    method=median,
                                                    errors=True)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)
        self.logger.info('end sky spectral reduction')

        result = self.create_result(skyspec=hdulist)

        return result
Example #6
0
class WavelengthCalibrationRecipe(EmirRecipe):
    """Recipe to calibrate the spectral response.

    **Observing modes:**

        * Wavelength calibration (4.5)

    **Inputs:**

     * List of line positions
     * Calibrations up to spectral flatfielding

    **Outputs:**

     * Wavelength calibration structure

    **Procedure:**

     * TBD
    """

    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    master_spectral_ff = reqs.MasterSpectralFlatFieldRequirement()

    cal = Result(prods.WavelengthCalibration)

    def run(self, rinput):
        return self.create_result(cal=prods.WavelengthCalibration())
Example #7
0
class TelescopeFineFocusRecipe(EmirRecipe):
    """
    Recipe to compute the telescope focus.

    **Observing modes:**

        * Telescope fine focus

    **Inputs:**

     * A list of images
     * A list of sky images
     * Bias, dark, flat
     * A model of the detector
     * List of focii

    **Outputs:**
     * Best focus

    """

    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    objects = Parameter([], 'List of x-y pair of object coordinates'),

    focus = Product(TelescopeFocus)

    def run(self, rinput):
        return self.create_result(focus=TelescopeFocus())
Example #8
0
class DTUFocusRecipe(EmirRecipe):
    """
    Recipe to compute the DTU focus.

    **Observing modes:**

        * EMIR focus control

    **Inputs:**

     * A list of images
     * A list of sky images
     * Bias, dark, flat
     * A model of the detector
     * List of focii

    **Outputs:**
     * Best focus

    """

    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    objects = Parameter([], 'List of x-y pair of object coordinates'),
    msm_pattern = Parameter([], 'List of x-y pair of slit coordinates'),
    dtu_focus_range = Parameter(
        'dtu_focus_range', [], 'Focus range of the DTU: begin, end and step')

    focus = Product(DTUFocus)


    def run(self, rinput):
        return self.create_result(focus=DTUFocus())
Example #9
0
class SimpleSkyRecipe(EmirRecipe):
    """Recipe to process data taken in intensity flat-field mode.

    """

    master_bpm = reqs.MasterBadPixelMaskRequirement()
    obresult = reqs.ObservationResultRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()

    skyframe = Result(prods.MasterSky)

    def run(self, rinput):
        _logger.info('starting sky reduction')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput,
                                                    flow,
                                                    method=median,
                                                    errors=True)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        result = self.create_result(skyframe=hdulist)

        return result
Example #10
0
class TelescopeRoughFocusRecipe(EmirRecipe):
    """Recipe to compute the telescope focus.

    **Observing modes:**

     * Telescope rough focus
     * Emir focus control

    **Inputs:**

     * A list of images
     * A list of sky images
     * Bias, dark, flat
     * A model of the detector
     * List of focii

    **Outputs:**
     * Best focus
    """

    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    objects = Parameter([], 'List of x-y pair of object coordinates'),
    focus_range = Parameter([], 'Focus range: begin, end and step')

    focus = Result(prods.TelescopeFocus)

    def run(self, rinput):
        return self.create_result(focus=prods.TelescopeFocus())
Example #11
0
class IntensityFlatRecipe(EmirRecipe):
    """Recipe to process data taken in intensity flat-field mode.

    Recipe to process intensity flat-fields. The flat-on and
    flat-off images are combined (method?) separately and the subtracted
    to obtain a thermal subtracted flat-field.

    **Observing modes:**

     * Intensity Flat-Field

    **Inputs:**

      * A master dark frame
      * Non linearity
      * A model of the detector.

    **Outputs:**

     * TBD

    **Procedure:**

     * A combined thermal subtracted flat field, normalized to median 1,
       with with variance extension and quality flag.

    """

    master_bpm = reqs.MasterBadPixelMaskRequirement()
    obresult = reqs.ObservationResultRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()

    flatframe = Result(prods.MasterIntensityFlat)

    def run(self, rinput):
        _logger.info('starting flat reduction')

        errors = True

        flow = self.init_filters(rinput)
        hdulist = basic_processing_with_combination(rinput,
                                                    flow,
                                                    method=median,
                                                    errors=errors)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)
        mm = hdulist[0].data.mean()
        hdr['CCDMEAN'] = mm

        hdulist[0].data /= mm
        if errors:
            hdulist['variance'].data /= (mm * mm)

        result = self.create_result(flatframe=hdulist)

        return result
Example #12
0
class SkySpecRecipe(EmirRecipe):
    """Recipe to process data taken in spectral sky mode.

    """

    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterSpectralFlatFieldRequirement()
    master_rectwv = reqs.MasterRectWaveRequirement()
    skyspec = Result(prods.SkySpectrum)
    reduced_image = Result(prods.ProcessedMOS)

    def run(self, rinput):
        self.logger.info('starting spectral sky reduction')

        flow = self.init_filters(rinput)

        reduced_image = basic_processing_with_combination(
            rinput, flow,
            method=median,
            errors=True
        )

        hdr = reduced_image[0].header
        self.set_base_headers(hdr)

        # save intermediate image in work directory
        self.save_intermediate_img(reduced_image, 'reduced_image.fits')

        # RectWaveCoeff object with rectification and wavelength calibration
        # coefficients for the particular CSU configuration
        rectwv_coeff = rectwv_coeff_from_mos_library(
            reduced_image,
            rinput.master_rectwv
        )
        # save as JSON file in work directory
        self.save_structured_as_json(rectwv_coeff, 'rectwv_coeff.json')

        # generate associated ds9 region files and save them in work directory
        if self.intermediate_results:
            save_four_ds9(rectwv_coeff)

        # apply rectification and wavelength calibration
        skyspec = apply_rectwv_coeff(
            reduced_image,
            rectwv_coeff
        )

        self.logger.info('end sky spectral reduction')

        result = self.create_result(
            reduced_image=reduced_image,
            skyspec=skyspec
        )

        return result
Example #13
0
class StareImageRecipe2(EmirRecipe):
    """Process images in Stare Image Mode"""

    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()

    result_image = Result(prods.ProcessedImage)

    def run(self, rinput):
        self.logger.info('starting stare image reduction (offline)')

        frames = rinput.obresult.frames
        datamodel = self.datamodel

        with extra.manage_frame(frames) as list_of:
            c_img = extra.combine_frames(list_of, datamodel, method=combine.mean, errors=False)

        flow = self.init_filters(rinput)

        # Correct Bias if needed
        # Correct Dark if needed
        # Correct FF

        processed_img = flow(c_img)

        hdr = processed_img[0].header
        self.set_base_headers(hdr)

        self.logger.debug('append BPM')
        if rinput.master_bpm is not None:
            self.logger.debug('using BPM from inputs')
            hdul_bpm = rinput.master_bpm.open()
            hdu_bpm = extra.generate_bpm_hdu(hdul_bpm[0])
        else:
            self.logger.debug('using empty BPM')
            hdu_bpm = extra.generate_empty_bpm_hdu(processed_img[0])

        # Append the BPM to the result
        processed_img.append(hdu_bpm)
        self.logger.info('end stare image (off) reduction')
        result = self.create_result(result_image=processed_img)

        return result


    def set_base_headers(self, hdr):
        """Set metadata in FITS headers."""
        hdr = super(StareImageRecipe2, self).set_base_headers(hdr)
        # Set EXP to 0
        hdr['EXP'] = 0
        return hdr
Example #14
0
class IntensityFlatRecipe2(EmirRecipe):
    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()

    master_flatframe = Result(prods.MasterIntensityFlat)

    def run(self, rinput):
        import emirdrp.core.extra as extra
        from numina.array import combine

        _logger.info('starting flat reduction')

        frames = rinput.obresult.frames
        datamodel = self.datamodel

        with extra.manage_frame(frames) as list_of:
            c_img = extra.combine_frames2(list_of,
                                          datamodel,
                                          method=combine.mean,
                                          errors=False)

        self.save_intermediate_img(c_img, 'p0.fits')

        flow = self.init_filters(rinput)

        processed_img = flow(c_img)

        self.save_intermediate_img(processed_img, 'p1.fits')

        hdr = processed_img[0].header
        self.set_base_headers(hdr)

        import scipy.ndimage.filters

        _logger.info('median filter')
        data_smooth = scipy.ndimage.filters.median_filter(
            processed_img[0].data, size=11)

        self.save_intermediate_array(data_smooth, 'smooth.fits')

        mm = processed_img[0].data.mean()
        hdr['CCDMEAN'] = mm

        processed_img[0].data /= data_smooth

        self.save_intermediate_img(processed_img, 'p2.fits')

        result = self.create_result(master_flatframe=processed_img)

        return result
Example #15
0
class SpectralFlatRecipe(EmirRecipe):
    """Recipe to process data taken in intensity flat-field mode."""

    master_bpm = reqs.MasterBadPixelMaskRequirement()
    obresult = reqs.ObservationResultRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()

    flatframe = Result(prods.MasterSpectralFlat)

    def run(self, rinput):
        return self.create_result(flatframe=prods.MasterSpectralFlat())
Example #16
0
class StareImageBaseRecipe(EmirRecipe):
    """Process images in Stare Image Mode"""

    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    master_sky = reqs.MasterSkyRequirement(optional=True)

    frame = Result(prods.ProcessedImage)

    def __init__(self, *args, **kwargs):
        super(StareImageBaseRecipe, self).__init__(*args, **kwargs)
        if False:
            self.query_options['master_sky'] = Ignore()

    @emirdrp.decorators.loginfo
    @timeit
    def run(self, rinput):
        self.logger.info('starting stare image reduction')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(
            rinput,
            flow,
            method=combine.median
        )
        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        if rinput.master_bpm:
            hdul_bpm = rinput.master_bpm.open()
            hdu_bpm = extra.generate_bpm_hdu(hdul_bpm[0])
        else:
            hdu_bpm = extra.generate_empty_bpm_hdu(hdulist[0])

        # Append the BPM to the result
        hdulist.append(hdu_bpm)
        self.logger.info('end stare image reduction')
        result = self.create_result(frame=hdulist)

        return result

    def set_base_headers(self, hdr):
        """Set metadata in FITS headers."""
        hdr = super(StareImageBaseRecipe, self).set_base_headers(hdr)
        # Update EXP to 0
        hdr['EXP'] = 0
        return hdr
Example #17
0
class MaskCheckRecipe(EmirRecipe):
    """
    Acquire a target.

    Recipe for the processing of multi-slit/long-slit check images.

    **Observing modes:**

        * MSM and LSM check

    """

    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()

    def run(self, rinput):
        self.logger.info("Start MaskCheckRecipe")
        self.logger.info("End MaskCheckRecipe")
        return self.create_result()
Example #18
0
class TestDarkCorrectRecipe(EmirRecipe):

    obresult = ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()

    frame = Result(prods.ProcessedImage)

    def run(self, rinput):
        _logger.info('starting simple dark reduction')

        flow = self.init_filters(rinput)
        hdulist = basic_processing_with_combination(rinput,
                                                    flow,
                                                    method=median)
        hdr = hdulist[0].header
        hdr['NUMRNAM'] = (self.__class__.__name__, 'Numina recipe name')
        hdr['NUMRVER'] = (self.__version__, 'Numina recipe version')

        result = self.create_result(frame=hdulist)

        return result
Example #19
0
class StareSpectraWaveRecipe(EmirRecipe):
    """Process images in Stare spectra at the GTC.

    This recipe is intended to be used at GTC. The rectification
    and wavelength calibration can computed from a model if this
    model (master_rectwv) is provided as input.

    """

    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterSpectralFlatFieldRequirement()
    master_rectwv = reqs.MasterRectWaveRequirement(optional=True)
    master_sky = reqs.SpectralSkyRequirement(optional=True)

    reduced_image = Result(prods.ProcessedImage)
    stare = Result(prods.ProcessedMOS)

    def run(self, rinput):
        self.logger.info('starting reduction of stare spectra')

        self.logger.info(rinput.master_rectwv)

        # build object to proceed with bpm, bias, dark and flat
        flow = self.init_filters(rinput)

        # apply bpm, bias, dark and flat
        reduced_image = basic_processing_with_combination(rinput,
                                                          flow,
                                                          method=median)
        # update header with additional info
        hdr = reduced_image[0].header
        self.set_base_headers(hdr)

        # save intermediate image in work directory
        self.save_intermediate_img(reduced_image, 'reduced_image.fits')

        # rectification and wavelength calibration (if a model has
        # been provided)
        if rinput.master_rectwv:
            # RectWaveCoeff object with rectification and wavelength
            # calibration coefficients for the particular CSU configuration
            rectwv_coeff = rectwv_coeff_from_mos_library(
                reduced_image, rinput.master_rectwv)

            # apply rectification and wavelength calibration
            stare_image = apply_rectwv_coeff(reduced_image, rectwv_coeff)

            # save as JSON file in work directory
            self.save_structured_as_json(rectwv_coeff, 'rectwv_coeff.json')

            # ds9 region files (to be saved in the work directory)
            if self.intermediate_results:
                save_four_ds9(rectwv_coeff)
                save_spectral_lines_ds9(rectwv_coeff)

            # compute median spectra employing the useful region of the
            # rectified image
            if self.intermediate_results:
                for imode, outfile in enumerate([
                        'median_spectra_full', 'median_spectra_slitlets',
                        'median_spectrum_slitlets'
                ]):
                    median_image = median_slitlets_rectified(stare_image,
                                                             mode=imode)
                    self.save_intermediate_img(median_image, outfile + '.fits')

            # image_wl_calibrated = True

        else:

            stare_image = reduced_image

            self.logger.info('No wavelength calibration provided')
            grism_value = hdr.get('GRISM', 'unknown')
            self.logger.debug('GRISM is %s', grism_value)
            if grism_value.lower() == 'open':
                self.logger.debug('GRISM is %s, so this seems OK', grism_value)

            # image_wl_calibrated = False

        if rinput.master_sky:
            # Sky subtraction after rectification
            msky = rinput.master_sky.open()
            # Check if images have the same size.
            # if so, go ahead
            if msky[0].data.shape != stare_image[0].data.shape:
                self.logger.warning(
                    "sky and current image don't have the same shape")
            else:
                sky_corrector = proc.SkyCorrector(
                    msky[0].data,
                    datamodel=self.datamodel,
                    calibid=self.datamodel.get_imgid(msky))

                stare_image = sky_corrector(stare_image)
        else:
            self.logger.info('No sky image provided')

        # save results in results directory
        self.logger.info('end reduction of stare spectra')
        result = self.create_result(reduced_image=reduced_image,
                                    stare=stare_image)
        return result

    def set_base_headers(self, hdr):
        newhdr = super(StareSpectraWaveRecipe, self).set_base_headers(hdr)
        # Update EXP to 0
        newhdr['EXP'] = 0
        return newhdr
Example #20
0
class BarDetectionRecipe(EmirRecipe):

    # Recipe Requirements
    #
    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    master_sky = reqs.MasterSkyRequirement()

    bars_nominal_positions = Requirement(prods.NominalPositions,
                                         'Nominal positions of the bars')
    median_filter_size = Parameter(5, 'Size of the median box')
    average_box_row_size = Parameter(
        7, 'Number of rows to average for fine centering (odd)')
    average_box_col_size = Parameter(
        21, 'Number of columns to extract for fine centering (odd)')
    fit_peak_npoints = Parameter(
        3, 'Number of points to use for fitting the peak (odd)')

    # Recipe Products
    frame = Result(prods.ProcessedImage)
    # derivative = Result(prods.ProcessedImage)
    slits = Result(tarray.ArrayType)
    positions3 = Result(tarray.ArrayType)
    positions5 = Result(tarray.ArrayType)
    positions7 = Result(tarray.ArrayType)
    positions9 = Result(tarray.ArrayType)
    DTU = Result(tarray.ArrayType)
    ROTANG = Result(float)
    TSUTC1 = Result(float)
    csupos = Result(tarray.ArrayType)
    csusens = Result(tarray.ArrayType)

    def run(self, rinput):
        self.logger.info('starting processing for bars detection')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        self.save_intermediate_img(hdulist, 'reduced_image.fits')

        try:
            rotang = hdr['ROTANG']
            tsutc1 = hdr['TSUTC1']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)
            csupos = datamodel.get_csup_from_header(hdr)
            if len(csupos) != 2 * EMIR_NBARS:
                raise RecipeError('Number of CSUPOS != 2 * NBARS')
            csusens = datamodel.get_cs_from_header(hdr)

        except KeyError as error:
            self.logger.error(error)
            raise RecipeError(error)

        self.logger.debug('start finding bars')
        allpos, slits = find_bars(
            hdulist,
            rinput.bars_nominal_positions,
            csupos,
            dtur,
            average_box_row_size=rinput.average_box_row_size,
            average_box_col_size=rinput.average_box_col_size,
            fit_peak_npoints=rinput.fit_peak_npoints,
            median_filter_size=rinput.median_filter_size,
            logger=self.logger)

        self.logger.debug('end finding bars')

        if self.intermediate_results:
            with open('ds9.reg', 'w') as ds9reg:
                slits_to_ds9_reg(ds9reg, slits)

        result = self.create_result(
            frame=hdulist,
            slits=slits,
            positions9=allpos[9],
            positions7=allpos[7],
            positions5=allpos[5],
            positions3=allpos[3],
            DTU=dtub,
            ROTANG=rotang,
            TSUTC1=tsutc1,
            csupos=csupos,
            csusens=csusens,
        )
        return result
Example #21
0
class CosmeticsRecipe(EmirRecipe):
    """Detector Cosmetics.

    Recipe to find and tag bad pixels in the detector.
    """

    obresult = ObservationResultRequirement()
    insconf = InstrumentConfigurationRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    lowercut = Parameter(
        4.0, 'Values below this sigma level are flagged as dead pixels')
    uppercut = Parameter(
        4.0, 'Values above this sigma level are flagged as hot pixels')
    maxiter = Parameter(30, 'Maximum number of iterations')

    ratioframe = Result(prods.ProcessedImage)
    maskframe = Result(prods.MasterBadPixelMask)

    def run(self, rinput):

        # FIXME:
        # We need 2 flats
        # Of different exposure times
        #
        # And their calibrations
        #

        if len(rinput.obresult.frames) < 2:
            raise RecipeError('The recipe requires 2 flat frames')

        iinfo = []
        for frame in rinput.obresult.frames:
            with frame.open() as hdulist:
                iinfo.append(gather_info(hdulist))

        # Loading calibrations
        with rinput.master_bias.open() as hdul:
            readmode = hdul[0].header.get('READMODE', 'undefined')
            if readmode.lower() in ['simple', 'bias']:
                self.logger.debug('loading bias')
                mbias = hdul[0].data
                bias_corrector = proc.BiasCorrector(mbias)
            else:
                self.logger.debug('ignoring bias')
                bias_corrector = numina.util.node.IdNode()

        with rinput.master_dark.open() as mdark_hdul:
            self.logger.debug('loading dark')
            mdark = mdark_hdul[0].data
            dark_corrector = proc.DarkCorrector(mdark)

        flow = numina.util.flow.SerialFlow([bias_corrector, dark_corrector])

        self.logger.info('processing flat #1')
        with rinput.obresult.frames[0].open() as hdul:
            other = flow(hdul)
            f1 = other[0].data.copy() * iinfo[0]['texp'] * 1e-3

        self.logger.info('processing flat #2')
        with rinput.obresult.frames[1].open() as hdul:
            other = flow(hdul)
            f2 = other[0].data.copy() * iinfo[1]['texp'] * 1e-3

        # Preprocess...

        maxiter = rinput.maxiter
        lowercut = rinput.lowercut
        uppercut = rinput.uppercut

        ninvalid = 0
        mask = None

        if mask:
            m = fits.getdata(mask)
            ninvalid = numpy.count_nonzero(m)
        else:
            m = numpy.zeros_like(f1, dtype='int')

        for niter in range(1, maxiter + 1):
            self.logger.debug('iter %d', niter)
            ratio, m, sigma = cosmetics(f1,
                                        f2,
                                        m,
                                        lowercut=lowercut,
                                        uppercut=uppercut)

            if self.intermediate_results:
                with warnings.catch_warnings():
                    warnings.simplefilter('ignore')
                    fits.writeto('numina-cosmetics-i%02d.fits' % niter,
                                 ratio,
                                 overwrite=True)
                    fits.writeto('numina-mask-i%02d.fits' % niter,
                                 m,
                                 overwrite=True)
                    fits.writeto('numina-sigma-i%02d.fits' % niter,
                                 m * 0.0 + sigma,
                                 overwrite=True)
            self.logger.debug('iter %d, invalid points in input mask: %d',
                              niter, ninvalid)
            self.logger.debug('iter %d, estimated sigma is %f', niter, sigma)
            n_ninvalid = numpy.count_nonzero(m)

            # Probably there is something wrong here
            # too much defective pixels
            if ninvalid / m.size >= 0.10:
                # This should set a flag in the output
                msg = 'defective pixels are greater than 10%'
                self.logger.warning(msg)

            if n_ninvalid == ninvalid:
                self.logger.info('convergence reached after %d iterations',
                                 niter)
                break
            self.logger.info('new invalid points: %d', n_ninvalid - ninvalid)
            ninvalid = n_ninvalid
        else:
            # This should set a flag in the output
            msg = 'convergence not reached after %d iterations' % maxiter
            self.logger.warning(msg)

        self.logger.info('number of dead pixels %d',
                         numpy.count_nonzero(m == PIXEL_DEAD))
        self.logger.info('number of hot pixels %d',
                         numpy.count_nonzero(m == PIXEL_HOT))

        if self.intermediate_results:
            with warnings.catch_warnings():
                warnings.simplefilter('ignore')
                fits.writeto('numina-cosmetics.fits', ratio, overwrite=True)
                fits.writeto('numina-mask.fits', m, overwrite=True)
                fits.writeto('numina-sigma.fits',
                             sigma * numpy.ones_like(m),
                             overwrite=True)

        hdu = fits.PrimaryHDU(ratio)
        hdr = hdu.header
        hdr['NUMXVER'] = (__version__, 'Numina package version')
        hdr['NUMRNAM'] = (self.__class__.__name__, 'Numina recipe name')
        hdr['NUMRVER'] = (self.__version__, 'Numina recipe version')
        ratiohdl = fits.HDUList([hdu])

        maskhdu = fits.PrimaryHDU(m)
        hdr = maskhdu.header
        hdr['NUMXVER'] = (__version__, 'Numina package version')
        hdr['NUMRNAM'] = (self.__class__.__name__, 'Numina recipe name')
        hdr['NUMRVER'] = (self.__version__, 'Numina recipe version')
        maskhdl = fits.HDUList([maskhdu])

        res = self.create_result(ratioframe=ratiohdl, maskframe=maskhdl)
        return res
Example #22
0
class MaskImagingRecipe(EmirRecipe):

    # Recipe Requirements
    #
    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    master_sky = reqs.MasterSkyRequirement()

    bars_nominal_positions = Requirement(prods.CoordinateList2DType,
                                         'Nominal positions of the bars')
    median_filter_size = Parameter(5, 'Size of the median box')
    average_box_row_size = Parameter(
        7, 'Number of rows to average for fine centering (odd)')
    average_box_col_size = Parameter(
        21, 'Number of columns to extract for fine centering (odd)')
    fit_peak_npoints = Parameter(
        3, 'Number of points to use for fitting the peak (odd)')

    # Recipe Products
    frame = Result(prods.ProcessedImage)
    # derivative = Result(prods.ProcessedImage)
    slits = Result(tarray.ArrayType)
    positions3 = Result(tarray.ArrayType)
    positions5 = Result(tarray.ArrayType)
    positions7 = Result(tarray.ArrayType)
    positions9 = Result(tarray.ArrayType)
    DTU = Result(tarray.ArrayType)
    ROTANG = Result(float)
    TSUTC1 = Result(float)
    csupos = Result(tarray.ArrayType)
    csusens = Result(tarray.ArrayType)

    def run(self, rinput):
        self.logger.info('starting processing for bars detection')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        try:
            rotang = hdr['ROTANG']
            tsutc1 = hdr['TSUTC1']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)
            csupos = datamodel.get_csup_from_header(hdr)
            csusens = datamodel.get_cs_from_header(hdr)

        except KeyError as error:
            self.logger.error(error)
            raise numina.exceptions.RecipeError(error)

        self.logger.debug('finding bars')
        # Processed array
        arr = hdulist[0].data

        # Median filter of processed array (two times)
        mfilter_size = rinput.median_filter_size

        self.logger.debug('median filtering X, %d columns', mfilter_size)
        arr_median = median_filter(arr, size=(1, mfilter_size))
        self.logger.debug('median filtering X, %d rows', mfilter_size)
        arr_median = median_filter(arr_median, size=(mfilter_size, 1))

        # Median filter of processed array (two times) in the other direction
        # for Y coordinates
        self.logger.debug('median filtering Y, %d rows', mfilter_size)
        arr_median_alt = median_filter(arr, size=(mfilter_size, 1))
        self.logger.debug('median filtering Y, %d columns', mfilter_size)
        arr_median_alt = median_filter(arr_median_alt, size=(1, mfilter_size))

        xfac = dtur[0] / EMIR_PIXSCALE
        yfac = -dtur[1] / EMIR_PIXSCALE

        vec = [yfac, xfac]
        self.logger.debug('DTU shift is %s', vec)

        # and the table of approx positions of the slits
        barstab = rinput.bars_nominal_positions
        # Currently, we only use fields 0 and 2
        # of the nominal positions file

        # Number or rows used
        # These other parameters cab be tuned also
        bstart = 1
        bend = 2047
        self.logger.debug('ignoring columns outside %d - %d', bstart, bend - 1)

        # extract a region to average
        wy = (rinput.average_box_row_size // 2)
        wx = (rinput.average_box_col_size // 2)
        self.logger.debug('extraction window is %d rows, %d cols', 2 * wy + 1,
                          2 * wx + 1)
        # Fit the peak with these points
        wfit = 2 * (rinput.fit_peak_npoints // 2) + 1
        self.logger.debug('fit with %d points', wfit)

        # Minimum threshold
        threshold = 5 * EMIR_RON
        # Savitsky and Golay (1964) filter to compute the X derivative
        # scipy >= xx has a savgol_filter function
        # for compatibility we do it manually

        allpos = {}
        ypos3_kernel = None
        slits = numpy.zeros((EMIR_NBARS, 8), dtype='float')

        self.logger.info('start finding bars')
        for ks in [3, 5, 7, 9]:
            self.logger.debug('kernel size is %d', ks)
            # S and G kernel for derivative
            kw = ks * (ks * ks - 1) / 12.0
            coeffs_are = -numpy.arange((1 - ks) // 2, (ks - 1) // 2 + 1) / kw
            if ks == 3:
                ypos3_kernel = coeffs_are
            self.logger.debug('kernel weights are %s', coeffs_are)

            self.logger.debug('derive image in X direction')
            arr_deriv = convolve1d(arr_median, coeffs_are, axis=-1)
            # Axis 0 is
            #
            self.logger.debug('derive image in Y direction (with kernel=3)')
            arr_deriv_alt = convolve1d(arr_median_alt, ypos3_kernel, axis=0)

            positions = []
            for coords in barstab:
                lbarid = int(coords[0])
                rbarid = lbarid + EMIR_NBARS
                ref_y_coor = coords[1] + vec[1]
                poly_coeffs = coords[2:]
                prow = coor_to_pix_1d(ref_y_coor) - 1
                fits_row = prow + 1  # FITS pixel index

                # A function that returns the center of the bar
                # given its X position
                def center_of_bar(x):
                    # Pixel values are 0-based
                    return polyval(x + 1 - vec[0], poly_coeffs) + vec[1] - 1

                self.logger.debug('looking for bars with ids %d - %d', lbarid,
                                  rbarid)
                self.logger.debug('reference y position is Y %7.2f',
                                  ref_y_coor)

                # if ref_y_coor is outlimits, skip this bar
                # ref_y_coor is in FITS format
                if (ref_y_coor >= 2047) or (ref_y_coor <= 1):
                    self.logger.debug(
                        'reference y position is outlimits, skipping')
                    positions.append([lbarid, fits_row, fits_row, 1, 0, 3])
                    positions.append([rbarid, fits_row, fits_row, 1, 0, 3])
                    continue

                # Left bar
                self.logger.debug('measure left border (%d)', lbarid)

                centery, xpos, fwhm, st = char_bar_peak_l(arr_deriv,
                                                          prow,
                                                          bstart,
                                                          bend,
                                                          threshold,
                                                          center_of_bar,
                                                          wx=wx,
                                                          wy=wy,
                                                          wfit=wfit)
                xpos1 = xpos
                positions.append(
                    [lbarid, centery + 1, fits_row, xpos + 1, fwhm, st])

                # Right bar
                self.logger.debug('measure rigth border (%d)', rbarid)
                centery, xpos, fwhm, st = char_bar_peak_r(arr_deriv,
                                                          prow,
                                                          bstart,
                                                          bend,
                                                          threshold,
                                                          center_of_bar,
                                                          wx=wx,
                                                          wy=wy,
                                                          wfit=wfit)
                positions.append(
                    [rbarid, centery + 1, fits_row, xpos + 1, fwhm, st])
                xpos2 = xpos
                #
                if st == 0:
                    self.logger.debug('measure top-bottom borders')
                    try:
                        y1, y2, statusy = char_bar_height(arr_deriv_alt,
                                                          xpos1,
                                                          xpos2,
                                                          centery,
                                                          threshold,
                                                          wh=35,
                                                          wfit=wfit)
                    except Exception as error:
                        self.logger.warning('Error computing height: %s',
                                            error)
                        statusy = 44

                    if statusy in [0, 40]:
                        # Main border is detected
                        positions[-1][1] = y2 + 1
                        positions[-2][1] = y2 + 1
                    else:
                        # Update status
                        positions[-1][-1] = 4
                        positions[-2][-1] = 4
                else:
                    self.logger.debug('slit is not complete')
                    y1, y2 = 0, 0

                # Update positions

                self.logger.debug(
                    'bar %d centroid-y %9.4f, row %d x-pos %9.4f, FWHM %6.3f, status %d',
                    *positions[-2])
                self.logger.debug(
                    'bar %d centroid-y %9.4f, row %d x-pos %9.4f, FWHM %6.3f, status %d',
                    *positions[-1])

                if ks == 5:
                    slits[lbarid -
                          1] = [xpos1, y2, xpos2, y2, xpos2, y1, xpos1, y1]
                    # FITS coordinates
                    slits[lbarid - 1] += 1.0
                    self.logger.debug('inserting bars %d-%d into "slits"',
                                      lbarid, rbarid)

            allpos[ks] = numpy.asarray(
                positions, dtype='float')  # GCS doesn't like lists of lists

        self.logger.debug('end finding bars')
        result = self.create_result(
            frame=hdulist,
            slits=slits,
            positions9=allpos[9],
            positions7=allpos[7],
            positions5=allpos[5],
            positions3=allpos[3],
            DTU=dtub,
            ROTANG=rotang,
            TSUTC1=tsutc1,
            csupos=csupos,
            csusens=csusens,
        )
        return result
Example #23
0
class BarDetectionRecipe(EmirRecipe):

    # Recipe Requirements
    #
    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    master_sky = reqs.MasterSkyRequirement()

    bars_nominal_positions = Requirement(prods.CoordinateList2DType,
                                         'Nominal positions of the bars')
    median_filter_size = Parameter(5, 'Size of the median box')
    canny_sigma = Parameter(3.0, 'Sigma for the canny algorithm')
    canny_high_threshold = Parameter(0.04,
                                     'High threshold for the canny algorithm')
    canny_low_threshold = Parameter(0.01,
                                    'High threshold for the canny algorithm')

    # Recipe Results
    frame = Result(prods.ProcessedImage)
    positions = Result(tarray.ArrayType)
    DTU = Result(tarray.ArrayType)
    ROTANG = Result(float)
    csupos = Result(tarray.ArrayType)
    csusens = Result(tarray.ArrayType)
    param_median_filter_size = Result(float)
    param_canny_high_threshold = Result(float)
    param_canny_low_threshold = Result(float)

    def run(self, rinput):

        logger = logging.getLogger('numina.recipes.emir')

        logger.info('starting processing for bars detection')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        try:
            rotang = hdr['ROTANG']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)
            csupos = datamodel.get_csup_from_header(hdr)
            csusens = datamodel.get_cs_from_header(hdr)

        except KeyError as error:
            logger.error(error)
            raise numina.exceptions.RecipeError(error)

        logger.debug('finding bars')

        arr = hdulist[0].data

        # Median filter
        logger.debug('median filtering')
        mfilter_size = rinput.median_filter_size

        arr_median = median_filter(arr, size=mfilter_size)

        # Image is mapped between 0 and 1
        # for the full range [0: 2**16]
        logger.debug('image scaling to 0-1')
        arr_grey = normalize_raw(arr_median)

        # Find borders
        logger.debug('find borders')
        canny_sigma = rinput.canny_sigma
        # These threshols corespond roughly with
        # value x (2**16 - 1)
        high_threshold = rinput.canny_high_threshold
        low_threshold = rinput.canny_low_threshold

        edges = canny(arr_grey,
                      sigma=canny_sigma,
                      high_threshold=high_threshold,
                      low_threshold=low_threshold)

        # Number or rows used
        # These other parameters cab be tuned also
        total = 5
        maxdist = 1.0
        bstart = 100
        bend = 1900

        positions = []
        nt = total // 2

        xfac = dtur[0] / EMIR_PIXSCALE
        yfac = -dtur[1] / EMIR_PIXSCALE

        vec = [yfac, xfac]
        logger.debug('DTU shift is %s', vec)

        # Based on the 'edges image'
        # and the table of approx positions of the slits
        barstab = rinput.bars_nominal_positions

        # Currently, we only use fields 0 and 2
        # of the nominal positions file

        for coords in barstab:
            lbarid = int(coords[0])
            rbarid = lbarid + 55
            ref_y_coor = coords[2] + vec[1]
            prow = coor_to_pix_1d(ref_y_coor) - 1
            fits_row = prow + 1  # FITS pixel index

            logger.debug('looking for bars with ids %d - %d', lbarid, rbarid)
            logger.debug('reference y position is Y %7.2f', ref_y_coor)
            # Find the position of each bar

            bpos = find_position(edges, prow, bstart, bend, total)

            nbars_found = len(bpos)

            # If no bar is found, append and empty token
            if nbars_found == 0:
                logger.debug('bars %d, %d not found at row %d', lbarid, rbarid,
                             fits_row)
                thisres1 = (lbarid, fits_row, 0, 0, 1)
                thisres2 = (rbarid, fits_row, 0, 0, 1)

            elif nbars_found == 2:

                # Order values by increasing X
                centl, centr = sorted(bpos, key=lambda cen: cen[0])
                c1 = centl[0]
                c2 = centr[0]

                logger.debug('bars found  at row %d between %7.2f - %7.2f',
                             fits_row, c1, c2)
                # Compute FWHM of the collapsed profile

                cslit = arr_grey[prow - nt:prow + nt + 1, :]
                pslit = cslit.mean(axis=0)

                # Add 1 to return FITS coordinates
                epos, epos_f, error = locate_bar_l(pslit, c1)
                thisres1 = lbarid, fits_row, epos + 1, epos_f + 1, error

                epos, epos_f, error = locate_bar_r(pslit, c2)
                thisres2 = rbarid, fits_row, epos + 1, epos_f + 1, error

            elif nbars_found == 1:
                logger.warning(
                    'only 1 edge found  at row %d, not yet implemented',
                    fits_row)
                thisres1 = (lbarid, fits_row, 0, 0, 1)
                thisres2 = (rbarid, fits_row, 0, 0, 1)

            else:
                logger.warning(
                    '3 or more edges found  at row %d, not yet implemented',
                    fits_row)
                thisres1 = (lbarid, fits_row, 0, 0, 1)
                thisres2 = (rbarid, fits_row, 0, 0, 1)

            positions.append(thisres1)
            positions.append(thisres2)

        logger.debug('end finding bars')
        result = self.create_result(
            frame=hdulist,
            positions=positions,
            DTU=dtub,
            ROTANG=rotang,
            csupos=csupos,
            csusens=csusens,
            param_median_filter_size=rinput.median_filter_size,
            param_canny_high_threshold=rinput.canny_high_threshold,
            param_canny_low_threshold=rinput.canny_low_threshold)
        return result
Example #24
0
class ArcCalibrationRecipe(EmirRecipe):
    """Process arc images applying wavelength calibration"""

    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    bound_param = reqs.RefinedBoundaryModelParamRequirement()
    lines_catalog = Requirement(LinesCatalog, 'Catalog of lines')

    reduced_image = Result(prods.ProcessedImage)
    rectwv_coeff = Result(prods.RectWaveCoeff)
    reduced_55sp = Result(prods.ProcessedMOS)
    reduced_arc = Result(prods.ProcessedMOS)

    @emirdrp.decorators.loginfo
    def run(self, rinput):
        self.logger.info('starting rect.+wavecal. reduction of arc spectra')

        # build object to proceed with bpm, bias, dark and flat
        flow = self.init_filters(rinput)

        # apply bpm, bias, dark and flat
        reduced_image = basic_processing_with_combination(rinput,
                                                          flow,
                                                          method=median)
        # update header con additional info
        hdr = reduced_image[0].header
        self.set_base_headers(hdr)

        # save intermediate image in work directory
        self.save_intermediate_img(reduced_image, 'reduced_image.fits')

        # RectWaveCoeff object (with rectification and wavelength calibration
        # coefficients for the particular CSU configuration of the arc image)
        # and HDUList object with the FITS image corresponding to 55 median
        # spectra of each slitlet
        rectwv_coeff, reduced_55sp = rectwv_coeff_from_arc_image(
            reduced_image,
            rinput.bound_param,
            rinput.lines_catalog,
        )

        # generate associated ds9 region files and save them in work directory
        if self.intermediate_results:
            save_four_ds9(rectwv_coeff)

        # apply rectification and wavelength calibration
        reduced_arc = apply_rectwv_coeff(reduced_image, rectwv_coeff)

        # save results in result directory
        self.logger.info('end rect.+wavecal. reduction of arc spectra')
        result = self.create_result(reduced_image=reduced_image,
                                    rectwv_coeff=rectwv_coeff,
                                    reduced_55sp=reduced_55sp,
                                    reduced_arc=reduced_arc)
        return result

    def set_base_headers(self, hdr):
        newhdr = super(ArcCalibrationRecipe, self).set_base_headers(hdr)
        # Update SEC to 0
        newhdr['SEC'] = 0
        return newhdr
Example #25
0
class TestPinholeRecipe(EmirRecipe):

    # Recipe Requirements
    #
    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    master_sky = reqs.MasterSkyRequirement()

    pinhole_nominal_positions = Requirement(
        prods.CoordinateList2DType, 'Nominal positions of the pinholes')
    shift_coordinates = Parameter(
        True, 'Use header information to'
        ' shift the pinhole positions from (0,0) '
        'to X_DTU, Y_DTU')
    box_half_size = Parameter(4, 'Half of the computation box size in pixels')
    recenter = Parameter(True, 'Recenter the pinhole coordinates')
    max_recenter_radius = Parameter(2.0, 'Maximum distance for recentering')

    # Recipe Results
    frame = Result(prods.ProcessedImage)
    positions = Result(tarray.ArrayType)
    positions_alt = Result(tarray.ArrayType)
    DTU = Result(tarray.ArrayType)
    filter = Result(str)
    readmode = Result(str)
    ROTANG = Result(float)
    DETPA = Result(float)
    DTUPA = Result(float)
    param_recenter = Result(bool)
    param_max_recenter_radius = Result(float)
    param_box_half_size = Result(float)

    def run(self, rinput):
        _logger.info('starting processing for slit detection')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        _logger.debug('finding pinholes')

        try:
            filtername = hdr['FILTER']
            readmode = hdr['READMODE']
            rotang = hdr['ROTANG']
            detpa = hdr['DETPA']
            dtupa = hdr['DTUPA']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)
        except KeyError as error:
            _logger.error(error)
            raise numina.exceptions.RecipeError(error)

        if rinput.shift_coordinates:
            xdtur, ydtur, zdtur = dtur
            xfac = xdtur / EMIR_PIXSCALE
            yfac = -ydtur / EMIR_PIXSCALE

            vec = numpy.array([yfac, xfac])
            _logger.info('shift is %s', vec)
            ncenters = rinput.pinhole_nominal_positions + vec
        else:
            _logger.info('using pinhole coordinates as they are')
            ncenters = rinput.pinhole_nominal_positions

        _logger.info('pinhole characterization')
        positions = pinhole_char(hdulist[0].data,
                                 ncenters,
                                 box=rinput.box_half_size,
                                 recenter_pinhole=rinput.recenter,
                                 maxdist=rinput.max_recenter_radius)

        _logger.info('alternate pinhole characterization')
        positions_alt = pinhole_char2(
            hdulist[0].data,
            ncenters,
            recenter_pinhole=rinput.recenter,
            recenter_half_box=rinput.box_half_size,
            recenter_maxdist=rinput.max_recenter_radius)

        result = self.create_result(
            frame=hdulist,
            positions=positions,
            positions_alt=positions_alt,
            filter=filtername,
            DTU=dtub,
            readmode=readmode,
            ROTANG=rotang,
            DETPA=detpa,
            DTUPA=dtupa,
            param_recenter=rinput.recenter,
            param_max_recenter_radius=rinput.max_recenter_radius,
            param_box_half_size=rinput.box_half_size)
        return result
Example #26
0
class TestSlitMaskDetectionRecipe(EmirRecipe):

    # Recipe Requirements
    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    master_sky = reqs.MasterSkyRequirement()

    median_filter_size = Parameter(5, 'Size of the median box')
    canny_sigma = Parameter(3.0, 'Sigma for the Canny algorithm')
    canny_high_threshold = Parameter(0.04, 'High threshold for the Canny algorithm')
    canny_low_threshold = Parameter(0.01, 'High threshold for the Canny algorithm')
    obj_min_size = Parameter(200, 'Minimum size of the slit')
    obj_max_size = Parameter(3000, 'Maximum size of the slit')
    slit_size_ratio = Parameter(4.0, 'Minimum ratio between height and width for slits')

    # Recipe Results
    frame = Result(prods.DataFrameType)
    slitstable = Result(tarray.ArrayType)
    DTU = Result(tarray.ArrayType)
    ROTANG = Result(float)
    DETPA = Result(float)
    DTUPA = Result(float)

    def run(self, rinput):
        self.logger.info('starting slit processing')

        self.logger.info('basic image reduction')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)
        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        try:
            rotang = hdr['ROTANG']
            detpa = hdr['DETPA']
            dtupa = hdr['DTUPA']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)

        except KeyError as error:
            self.logger.error(error)
            raise RecipeError(error)

        self.logger.debug('finding slits')

        # First, prefilter with median
        median_filter_size = rinput.median_filter_size
        canny_sigma = rinput.canny_sigma
        obj_min_size = rinput.obj_min_size
        obj_max_size = rinput.obj_max_size

        data1 = hdulist[0].data
        self.logger.debug('Median filter with box %d', median_filter_size)
        data2 = median_filter(data1, size=median_filter_size)

        # Grey level image
        img_grey = normalize_raw(data2)

        # Find edges with Canny
        self.logger.debug('Find edges with Canny, sigma %f', canny_sigma)
        # These thresholds corespond roughly with
        # value x (2**16 - 1)
        high_threshold = rinput.canny_high_threshold
        low_threshold = rinput.canny_low_threshold
        self.logger.debug('Find edges, Canny high threshold %f', high_threshold)
        self.logger.debug('Find edges, Canny low threshold %f', low_threshold)
        edges = canny(img_grey, sigma=canny_sigma,
                      high_threshold=high_threshold,
                      low_threshold=low_threshold)
        # Fill edges
        self.logger.debug('Fill holes')
        fill_slits =  ndimage.binary_fill_holes(edges)

        self.logger.debug('Label objects')
        label_objects, nb_labels = ndimage.label(fill_slits)
        self.logger.debug('%d objects found', nb_labels)
        # Filter on the area of the labeled region
        # Perhaps we could ignore this filtering and
        # do it later?
        self.logger.debug('Filter objects by size')
        # Sizes of regions
        sizes = numpy.bincount(label_objects.ravel())

        self.logger.debug('Min size is %d', obj_min_size)
        self.logger.debug('Max size is %d', obj_max_size)

        mask_sizes = (sizes > obj_min_size) & (sizes < obj_max_size)

        # Filter out regions
        nids, = numpy.where(mask_sizes)

        mm = numpy.in1d(label_objects, nids)
        mm.shape = label_objects.shape

        fill_slits_clean = numpy.where(mm, 1, 0)
        #plt.imshow(fill_slits_clean)

        # and relabel
        self.logger.debug('Label filtered objects')
        relabel_objects, nb_labels = ndimage.label(fill_slits_clean)
        self.logger.debug('%d objects found after filtering', nb_labels)
        ids = list(six.moves.range(1, nb_labels + 1))

        self.logger.debug('Find regions and centers')
        regions = ndimage.find_objects(relabel_objects)
        centers = ndimage.center_of_mass(data2, labels=relabel_objects,
                                         index=ids
                                         )

        table = char_slit(data2, regions,
                          slit_size_ratio=rinput.slit_size_ratio
                          )

        result = self.create_result(frame=hdulist, slitstable=table,
                                    DTU=dtub,
                                    ROTANG=rotang,
                                    DETPA=detpa,
                                    DTUPA=dtupa
                                    )

        return result
Example #27
0
class TestSlitDetectionRecipe(EmirRecipe):

    # Recipe Requirements
    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    master_sky = reqs.MasterSkyRequirement()

    median_filter_size = Parameter(5, 'Size of the median box')
    canny_sigma = Parameter(3.0, 'Sigma for the canny algorithm')
    canny_high_threshold = Parameter(0.04, 'High threshold for the Canny algorithm')
    canny_low_threshold = Parameter(0.01, 'High threshold for the Canny algorithm')

    # Recipe Results
    frame = Result(prods.ProcessedImage)
    slitstable = Result(tarray.ArrayType)
    DTU = Result(tarray.ArrayType)
    ROTANG = Result(float)
    DETPA = Result(float)
    DTUPA = Result(float)

    def run(self, rinput):
        self.logger.info('starting slit processing')

        self.logger.info('basic image reduction')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)
        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        try:
            rotang = hdr['ROTANG']
            detpa = hdr['DETPA']
            dtupa = hdr['DTUPA']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)

        except KeyError as error:
            self.logger.error(error)
            raise RecipeError(error)

        self.logger.debug('finding slits')


        # Filter values below 0.0
        self.logger.debug('Filter values below 0')
        data1 = hdulist[0].data[:]

        data1[data1 < 0.0] = 0.0
        # First, prefilter with median
        median_filter_size = rinput.median_filter_size
        canny_sigma = rinput.canny_sigma

        self.logger.debug('Median filter with box %d', median_filter_size)
        data2 = median_filter(data1, size=median_filter_size)

        # Grey level image
        img_grey = normalize_raw(data2)

        # Find edges with Canny
        self.logger.debug('Find edges, Canny sigma %f', canny_sigma)
        # These thresholds corespond roughly with
        # value x (2**16 - 1)
        high_threshold = rinput.canny_high_threshold
        low_threshold = rinput.canny_low_threshold
        self.logger.debug('Find edges, Canny high threshold %f', high_threshold)
        self.logger.debug('Find edges, Canny low threshold %f', low_threshold)
        edges = canny(img_grey, sigma=canny_sigma,
                      high_threshold=high_threshold,
                      low_threshold=low_threshold)
        
        # Fill edges
        self.logger.debug('Fill holes')
        # I do a dilation and erosion to fill
        # possible holes in 'edges'
        fill = ndimage.binary_dilation(edges)
        fill2 = ndimage.binary_fill_holes(fill)
        fill_slits = ndimage.binary_erosion(fill2)

        self.logger.debug('Label objects')
        label_objects, nb_labels = ndimage.label(fill_slits)
        self.logger.debug('%d objects found', nb_labels)
        ids = list(six.moves.range(1, nb_labels + 1))

        self.logger.debug('Find regions and centers')
        regions = ndimage.find_objects(label_objects)
        centers = ndimage.center_of_mass(data2, labels=label_objects,
                                         index=ids
                                         )

        table = char_slit(data2, regions,
                          slit_size_ratio=-1.0
                          )

        result = self.create_result(frame=hdulist,
                                    slitstable=table,
                                    DTU=dtub,
                                    ROTANG=rotang,
                                    DETPA=detpa,
                                    DTUPA=dtupa
                                    )

        return result
Example #28
0
class TestPointSourceRecipe(EmirRecipe):

    # Recipe Requirements
    #
    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    master_sky = reqs.MasterSkyRequirement()

    shift_coordinates = Parameter(
        True, 'Use header information to'
        ' shift the pinhole positions from (0,0) '
        'to X_DTU, Y_DTU')
    box_half_size = Parameter(4, 'Half of the computation box size in pixels')
    recenter = Parameter(True, 'Recenter the pinhole coordinates')
    max_recenter_radius = Parameter(2.0, 'Maximum distance for recentering')

    # Recipe Results
    frame = Result(prods.ProcessedImage)
    positions = Result(tarray.ArrayType)
    positions_alt = Result(tarray.ArrayType)
    DTU = Result(tarray.ArrayType)
    filter = Result(str)
    readmode = Result(str)
    ROTANG = Result(float)
    DETPA = Result(float)
    DTUPA = Result(float)
    param_recenter = Result(bool)
    param_max_recenter_radius = Result(float)
    param_box_half_size = Result(float)

    def run(self, rinput):
        self.logger.info('starting processing for object detection')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        self.logger.debug('finding point sources')

        try:
            filtername = hdr['FILTER']
            readmode = hdr['READMODE']
            rotang = hdr['ROTANG']
            detpa = hdr['DETPA']
            dtupa = hdr['DTUPA']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)
        except KeyError as error:
            self.logger.error(error)
            raise RecipeError(error)

        data = hdulist[0].data

        # Copy needed in numpy 1.7
        # This seems already bitswapped??
        # FIXME: check this works offline/online
        # ndata = data.byteswap().newbyteorder()
        # data = data.byteswap(inplace=True).newbyteorder()

        snr_detect = 5.0
        fwhm = 4.0
        npixels = 15
        box_shape = [64, 64]
        self.logger.info('point source detection2')
        self.logger.info('using internal mask to remove corners')
        # Corners
        mask = numpy.zeros_like(data, dtype='int32')
        mask[2000:, 0:80] = 1
        mask[2028:, 2000:] = 1
        mask[:50, 1950:] = 1
        mask[:100, :50] = 1
        # Remove corner regions

        self.logger.info('compute background map, %s', box_shape)
        bkg = sep.Background(data)

        self.logger.info('reference fwhm is %5.1f pixels', fwhm)
        self.logger.info('detect threshold, %3.1f over background', snr_detect)
        self.logger.info('convolve with gaussian kernel, FWHM %3.1f pixels',
                         fwhm)
        sigma = fwhm * gaussian_fwhm_to_sigma
        #
        kernel = Gaussian2DKernel(sigma)
        kernel.normalize()

        thresh = snr_detect * bkg.globalrms
        data_s = data - bkg.back()
        objects, segmap = sep.extract(data - bkg.back(),
                                      thresh,
                                      minarea=npixels,
                                      filter_kernel=kernel.array,
                                      segmentation_map=True,
                                      mask=mask)
        fits.writeto('segmap.fits', segmap)
        self.logger.info('detected %d objects', len(objects))

        # Hardcoded values
        rs2 = 15.0
        fit_rad = 10.0
        flux_min = 1000.0
        flux_max = 30000.0
        self.logger.debug('Flux limit is %6.1f %6.1f', flux_min, flux_max)
        # FIXME: this should be a view, not a copy
        xall = objects['x']
        yall = objects['y']
        mm = numpy.array([xall, yall]).T
        self.logger.info('computing FWHM')
        # Find objects with pairs inside fit_rad
        kdtree = KDTree(mm)
        nearobjs = (kdtree.query_ball_tree(kdtree, r=fit_rad))
        positions = []
        for idx, obj in enumerate(objects):
            x0 = obj['x']
            y0 = obj['y']
            sl = image_box2d(x0, y0, data.shape, (fit_rad, fit_rad))
            # sl_sky = image_box2d(x0, y0, data.shape, (rs2, rs2))
            part_s = data_s[sl]
            # Logical coordinates
            xx0 = x0 - sl[1].start
            yy0 = y0 - sl[0].start

            _, fwhm_x, fwhm_y = compute_fwhm_2d_simple(part_s, xx0, yy0)

            if min(fwhm_x, fwhm_x) < 3:
                continue
            if flux_min > obj['peak'] or flux_max < obj['peak']:
                continue
            # nobjs is the number of object inside fit_rad
            nobjs = len(nearobjs[idx])

            flag = 0 if nobjs == 1 else 1

            positions.append([idx, x0, y0, obj['peak'], fwhm_x, fwhm_y, flag])

        self.logger.info('saving photometry')
        positions = numpy.array(positions)
        positions_alt = positions
        self.logger.info('end processing for object detection')

        result = self.create_result(
            frame=hdulist,
            positions=positions_alt,
            positions_alt=positions_alt,
            filter=filtername,
            DTU=dtub,
            readmode=readmode,
            ROTANG=rotang,
            DETPA=detpa,
            DTUPA=dtupa,
            param_recenter=rinput.recenter,
            param_max_recenter_radius=rinput.max_recenter_radius,
            param_box_half_size=rinput.box_half_size)
        return result
Example #29
0
class TestMaskRecipe(EmirRecipe):

    # Recipe Requirements
    #
    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterIntensityFlatFieldRequirement()
    master_sky = reqs.MasterSkyRequirement()

    pinhole_nominal_positions = Requirement(
        prods.CoordinateList2DType, 'Nominal positions of the pinholes')
    shift_coordinates = Parameter(
        True, 'Use header information to'
        ' shift the pinhole positions from (0,0) '
        'to X_DTU, Y_DTU')
    box_half_size = Parameter(4, 'Half of the computation box size in pixels')
    recenter = Parameter(True, 'Recenter the pinhole coordinates')
    max_recenter_radius = Parameter(2.0, 'Maximum distance for recentering')

    median_filter_size = Parameter(5, 'Size of the median box')
    canny_sigma = Parameter(3.0, 'Sigma for the canny algorithm')
    obj_min_size = Parameter(200, 'Minimum size of the slit')
    obj_max_size = Parameter(3000, 'Maximum size of the slit')
    slit_size_ratio = Parameter(
        4.0, 'Minimum ratio between height and width for slits')

    # Recipe Results
    frame = Result(prods.ProcessedImage)
    positions = Result(tarray.ArrayType)
    positions_alt = Result(tarray.ArrayType)
    slitstable = Result(tarray.ArrayType)
    DTU = Result(tarray.ArrayType)
    filter = Result(str)
    readmode = Result(str)
    ROTANG = Result(float)
    DETPA = Result(float)
    DTUPA = Result(float)
    param_recenter = Result(bool)
    param_max_recenter_radius = Result(float)
    param_box_half_size = Result(float)

    def run(self, rinput):
        _logger.info('starting processing for slit detection')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        _logger.debug('finding pinholes')

        try:
            filtername = hdr['FILTER']
            readmode = hdr['READMODE']
            rotang = hdr['ROTANG']
            detpa = hdr['DETPA']
            dtupa = hdr['DTUPA']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)
        except KeyError as error:
            _logger.error(error)
            raise numina.exceptions.RecipeError(error)

        if rinput.shift_coordinates:
            xdtur, ydtur, zdtur = dtur
            xfac = xdtur / EMIR_PIXSCALE
            yfac = -ydtur / EMIR_PIXSCALE

            vec = numpy.array([yfac, xfac])
            _logger.info('shift is %s', vec)
            ncenters = rinput.pinhole_nominal_positions + vec
        else:
            _logger.info('using pinhole coordinates as they are')
            ncenters = rinput.pinhole_nominal_positions

        _logger.info('pinhole characterization')
        positions = pinhole_char(hdulist[0].data,
                                 ncenters,
                                 box=rinput.box_half_size,
                                 recenter_pinhole=rinput.recenter,
                                 maxdist=rinput.max_recenter_radius)

        _logger.info('alternate pinhole characterization')
        positions_alt = pinhole_char2(
            hdulist[0].data,
            ncenters,
            recenter_pinhole=rinput.recenter,
            recenter_half_box=rinput.box_half_size,
            recenter_maxdist=rinput.max_recenter_radius)

        _logger.debug('finding slits')

        # First, prefilter with median
        median_filter_size = rinput.median_filter_size
        canny_sigma = rinput.canny_sigma
        obj_min_size = rinput.obj_min_size
        obj_max_size = rinput.obj_max_size

        data1 = hdulist[0].data
        _logger.debug('Median filter with box %d', median_filter_size)
        data2 = median_filter(data1, size=median_filter_size)

        # Grey level image
        img_grey = normalize(data2)

        # Find edges with canny
        _logger.debug('Find edges with canny, sigma %d', canny_sigma)
        edges = canny(img_grey, sigma=canny_sigma)

        # Fill edges
        _logger.debug('Fill holes')
        fill_slits = ndimage.binary_fill_holes(edges)

        _logger.debug('Label objects')
        label_objects, nb_labels = ndimage.label(fill_slits)
        _logger.debug('%d objects found', nb_labels)
        # Filter on the area of the labeled region
        # Perhaps we could ignore this filtering and
        # do it later?
        _logger.debug('Filter objects by size')
        # Sizes of regions
        sizes = numpy.bincount(label_objects.ravel())

        _logger.debug('Min size is %d', obj_min_size)
        _logger.debug('Max size is %d', obj_max_size)

        mask_sizes = (sizes > obj_min_size) & (sizes < obj_max_size)

        # Filter out regions
        nids, = numpy.where(mask_sizes)

        mm = numpy.in1d(label_objects, nids)
        mm.shape = label_objects.shape

        fill_slits_clean = numpy.where(mm, 1, 0)

        # and relabel
        _logger.debug('Label filtered objects')
        relabel_objects, nb_labels = ndimage.label(fill_slits_clean)
        _logger.debug('%d objects found after filtering', nb_labels)
        ids = list(six.moves.range(1, nb_labels + 1))

        _logger.debug('Find regions and centers')
        regions = ndimage.find_objects(relabel_objects)
        centers = ndimage.center_of_mass(data2,
                                         labels=relabel_objects,
                                         index=ids)

        table = char_slit(data2,
                          regions,
                          slit_size_ratio=rinput.slit_size_ratio)

        result = self.create_result(
            frame=hdulist,
            positions=positions,
            positions_alt=positions_alt,
            slitstable=table,
            filter=filtername,
            DTU=dtub,
            readmode=readmode,
            ROTANG=rotang,
            DETPA=detpa,
            DTUPA=dtupa,
            param_recenter=rinput.recenter,
            param_max_recenter_radius=rinput.max_recenter_radius,
            param_box_half_size=rinput.box_half_size)
        return result
Example #30
0
class StareSpectraRectwv(EmirRecipe):
    """Process images in Stare spectra mode applying wavelength calibration

    Note that in this case the wavelength calibration has already been
    determined.

    """

    obresult = reqs.ObservationResultRequirement()
    master_bpm = reqs.MasterBadPixelMaskRequirement()
    master_bias = reqs.MasterBiasRequirement()
    master_dark = reqs.MasterDarkRequirement()
    master_flat = reqs.MasterSpectralFlatFieldRequirement()
    rectwv_coeff = reqs.RectWaveCoeffRequirement()

    reduced_mos = Result(prods.ProcessedMOS)

    def run(self, rinput):
        self.logger.info('applying existing rect.+wavecal. calibration of '
                         'stare spectra')

        # build object to proceed with bpm, bias, dark and flat
        flow = self.init_filters(rinput)

        # apply bpm, bias, dark and flat
        reduced_image = basic_processing_with_combination(rinput,
                                                          flow,
                                                          method=median)
        # update header with additional info
        hdr = reduced_image[0].header
        self.set_base_headers(hdr)

        # save intermediate image in work directory
        self.save_intermediate_img(reduced_image, 'reduced_image.fits')

        # apply rectification and wavelength calibration
        reduced_mos = apply_rectwv_coeff(reduced_image, rinput.rectwv_coeff)

        # ds9 region files (to be saved in the work directory)
        if self.intermediate_results:
            save_four_ds9(rinput.rectwv_coeff)
            save_spectral_lines_ds9(rinput.rectwv_coeff)

        # compute median spectra employing the useful region of the
        # rectified image
        if self.intermediate_results:
            for imode, outfile in enumerate([
                    'median_spectra_full', 'median_spectra_slitlets',
                    'median_spectrum_slitlets'
            ]):
                median_image = median_slitlets_rectified(reduced_mos,
                                                         mode=imode)
                self.save_intermediate_img(median_image, outfile + '.fits')

        # save results in results directory
        self.logger.info('end rect.+wavecal. reduction of stare spectra')
        result = self.create_result(reduced_mos=reduced_mos)
        return result

    def set_base_headers(self, hdr):
        newhdr = super(StareSpectraRectwv, self).set_base_headers(hdr)
        # Update EXP to 0
        newhdr['EXP'] = 0
        return newhdr