Пример #1
0
class TargetAcquisitionRecipe(EmirRecipe):
    """
    Acquire a target.

    Recipe for the processing of target acquisition images.

    **Observing modes:**

        * Target acquisition


    """

    # Requirements
    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()

    # Products
    telescope_offset = Product(TelescopeOffset)

    def run(self, rinput):
        return self.create_result(telescope_offset=TelescopeOffset())
Пример #2
0
class DitherSkyRecipe(EmirRecipe):
    """Recipe to process data taken in dither sky mode.

    """

    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()

    skyframe = Product(MasterIntensityFlat)

    def run(self, rinput):
        _logger.debug('instrument %s, mode %s', rinput.obresult.instrument,
                      rinput.obresult.mode)
        _logger.info('starting sky reduction with dither')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_segmentation(rinput,
                                                     flow,
                                                     method=median,
                                                     errors=True)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)
        _logger.info('end sky reduction with dither')

        result = self.create_result(skyframe=hdulist)

        return result
Пример #3
0
class SimpleSkyRecipe(EmirRecipe):
    """Recipe to process data taken in intensity flat-field mode.

    """

    master_bpm = MasterBadPixelMaskRequirement()
    obresult = ObservationResultRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()

    skyframe = Product(MasterIntensityFlat)

    def run(self, rinput):
        _logger.info('starting sky reduction')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput,
                                                    flow,
                                                    method=median,
                                                    errors=True)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        result = self.create_result(skyframe=hdulist)

        return result
Пример #4
0
class WavelengthCalibrationRecipe(EmirRecipe):
    """Recipe to calibrate the spectral response.

    **Observing modes:**

        * Wavelength calibration (4.5)

    **Inputs:**

     * List of line positions
     * Calibrations up to spectral flatfielding

    **Outputs:**

     * Wavelength calibration structure

    **Procedure:**

     * TBD
    """

    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    master_spectral_ff = MasterSpectralFlatFieldRequirement()

    cal = Product(WavelengthCalibration)

    def run(self, rinput):
        return self.create_result(cal=WavelengthCalibration())
Пример #5
0
class OffsetSpectraRecipe(EmirRecipe):
    """
    Observing mode:
        Offset spectra beyond the slit
    """

    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    master_spectral_ff = Requirement(prods.MasterSpectralFlat,
                                     'Master spectral flatfield')
    st_calibration = Requirement(prods.SlitTransmissionCalibration,
                                 'Slit tranmision calibration')
    w_calibration = Requirement(prods.WavelengthCalibration,
                                'Wavelength calibration')
    lines = Parameter('lines', None,
                      'List of x-lambda pairs of line coordinates')

    spectra = Product(prods.Spectra)
    catalog = Product(prods.LinesCatalog)

    def run(self, rinput):
        return self.create_result(spectra=prods.Spectra(),
                                  catalog=prods.LinesCatalog())
Пример #6
0
class SlitTransmissionRecipe(EmirRecipe):
    """Recipe to calibrate the slit transmission.

    **Observing modes:**

        * Slit transmission calibration (4.4)

    **Inputs:**

        * A list of uniformly illuminated images of MSM

    **Outputs:**

     * A list of slit transmission functions

    **Procedure:**

     * TBD

    """

    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()

    slit = Product(SlitTransmissionCalibration)

    def run(self, rinput):
        return self.create_result(slit=SlitTransmissionCalibration())
Пример #7
0
class ArcCalibrationRecipe(EmirRecipe):

    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    lines_catalog = Requirement(LinesCatalog, "Catalog of lines")
    polynomial_degree = Parameter(2,
                                  'Polynomial degree of the arc calibration')

    polynomial_coeffs = Product(ArrayType)

    def run(self, rinput):
        _logger.info('starting arc calibration')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        nslits = len(rinput.slits_catalog)
        coeff_table = numpy.zeros((nslits, rinput.polynomial_degree + 1))

        result = self.create_result(polynomial_coeffs=coeff_table)

        return result
Пример #8
0
class TestSkyCorrectRecipe(EmirRecipe):

    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    master_sky = Requirement(MasterIntensityFlat, 'Master Sky calibration')

    frame = Product(DataFrameType)

    def run(self, rinput):
        _logger.info('starting simple sky reduction')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput,
                                                    flow,
                                                    method=median)
        hdr = hdulist[0].header
        self.set_base_headers(hdr)
        # Update SEC to 0
        hdr['SEC'] = 0

        result = self.create_result(frame=hdulist)

        return result
Пример #9
0
class IntensityFlatRecipe(EmirRecipe):
    """Recipe to process data taken in intensity flat-field mode.

    Recipe to process intensity flat-fields. The flat-on and
    flat-off images are combined (method?) separately and the subtracted
    to obtain a thermal subtracted flat-field.

    **Observing modes:**

     * Intensity Flat-Field

    **Inputs:**

      * A master dark frame
      * Non linearity
      * A model of the detector.

    **Outputs:**

     * TBD

    **Procedure:**

     * A combined thermal subtracted flat field, normalized to median 1,
       with with variance extension and quality flag.

    """

    master_bpm = MasterBadPixelMaskRequirement()
    obresult = ObservationResultRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()

    flatframe = Product(MasterIntensityFlat)

    def run(self, rinput):
        _logger.info('starting flat reduction')

        errors = True

        flow = self.init_filters(rinput)
        hdulist = basic_processing_with_combination(rinput,
                                                    flow,
                                                    method=median,
                                                    errors=errors)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)
        mm = hdulist[0].data.mean()
        hdr['CCDMEAN'] = mm

        hdulist[0].data /= mm
        if errors:
            hdulist['variance'].data /= (mm * mm)

        result = self.create_result(flatframe=hdulist)

        return result
Пример #10
0
class CSUSpectraExtractionRecipe(EmirRecipe):
    """Extract spectra in image taken with the CSU configured"""

    # Recipe Requirements
    obresult = ObservationResultRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    master_sky = MasterSkyRequirement()
    nrows_side = Parameter(5, 'Number of rows to extract around the center')
    slits_positions = Requirement(ArrayType,
                                  'Positions and widths of the slits')

    # Recipe products
    frame = Product(DataFrameType)
    rss = Product(DataFrameType)

    def run(self, rinput):
        _logger.info('starting extraction')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        data1 = hdulist[0].data

        _logger.info('Create output images')
        rssdata = numpy.zeros(
            (rinput.slits_positions.shape[0], data1.shape[1]), dtype='float32')

        nrows = rinput.nrows_side
        # Loop over slits
        for idx, slit_coords in enumerate(rinput.slits_positions):

            x, y, ax, ay = slit_coords  # Coords in FITS coordinates

            ref_col = wc_to_pix(x - 1)
            ref_row = wc_to_pix(y - 1)

            _logger.info('Processing slit in column %i, row=%i', ref_col,
                         ref_row)

            # Simple extraction

            _logger.info('Extract %i rows around center', nrows)
            region = data1[ref_row - nrows:ref_row + nrows + 1, :]

            rssdata[idx, :] = region.mean(axis=0)

        hdurss = fits.PrimaryHDU(rssdata)

        result = self.create_result(frame=hdulist, rss=hdurss)

        return result
Пример #11
0
class DarkRecipe(EmirRecipe):
    """Recipe to process data taken in Dark current image Mode.

    Recipe to process dark images. The dark images will be combined
    using the median.
    They do have to be of the same exposure time t.

    **Observing mode:**

     * Dark current Image (3.2)

    **Inputs:**

    **Outputs:**

     * A combined dark frame, with variance extension.
    """

    master_bpm = MasterBadPixelMaskRequirement()
    obresult = ObservationResultRequirement()
    master_bias = MasterBiasRequirement()

    darkframe = Product(MasterDark)

    def run(self, rinput):

        _logger.info('starting dark reduction')

        flow = self.init_filters(rinput)

        iinfo = gather_info_frames(rinput.obresult.frames)
        ref_exptime = 0.0
        for el in iinfo[1:]:
            if abs(el['texp'] - ref_exptime) > 1e-4:
                _logger.error('image with wrong exposure time')
                raise RecipeError('image with wrong exposure time')

        hdulist = basic_processing_with_combination(rinput,
                                                    flow,
                                                    method=median,
                                                    errors=True)

        pdata = hdulist[0].data

        # update hdu header with
        # reduction keywords

        hdr = hdulist[0].header
        self.set_base_headers(hdr)
        hdr['CCDMEAN'] = pdata.mean()

        _logger.info('dark reduction ended')
        result = self.create_result(darkframe=hdulist)
        return result
Пример #12
0
class SpectralFlatRecipe(EmirRecipe):

    master_bpm = MasterBadPixelMaskRequirement()
    obresult = ObservationResultRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()

    flatframe = Product(MasterSpectralFlat)

    def run(self, rinput):
        return self.create_result(flatframe=MasterSpectralFlat())
Пример #13
0
class NBImageRecipeInput(RecipeInput):
    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    extinction = Parameter(0.0, 'Mean atmospheric extinction')
    sources = Parameter(
        [], 'List of x, y coordinates to measure FWHM', optional=True)
    offsets = Offsets_Requirement()
    sky_images = Parameter(5, 'Images used to estimate the background'
                           ' before and after current image')
    sky_images_sep_time = SkyImageSepTime_Requirement()
    check_photometry_levels = Parameter(
        [0.5, 0.8], 'Levels to check the flux of the objects')
    check_photometry_actions = Parameter(
        ['warn', 'warn', 'default'], 'Actions to take on images')
Пример #14
0
class TestBiasCorrectRecipe(EmirRecipe):

    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    frame = Product(DataFrameType)

    def run(self, rinput):
        _logger.info('starting simple bias reduction')

        flow = self.init_filters(rinput)
        hdu = basic_processing_with_combination(rinput, flow, method=median)
        hdr = hdu.header
        hdr['NUMRNAM'] = (self.__class__.__name__, 'Numina recipe name')
        hdr['NUMRVER'] = (self.__version__, 'Numina recipe version')
        hdulist = fits.HDUList([hdu])

        result = self.create_result(frame=hdulist)
        return result
Пример #15
0
class RasterSpectraRecipe(EmirRecipe):
    """
    Observing mode:
        Raster spectra
    """

    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    master_spectral_ff = Requirement(prods.MasterSpectralFlat,
                                     'Master spectral flatfield')
    st_calibration = Requirement(prods.SlitTransmissionCalibration,
                                 'Slit tranmision calibration')
    w_calibration = Requirement(prods.WavelengthCalibration,
                                'Wavelength calibration')

    cube = Product(prods.DataCube)

    def run(self, rinput):
        return self.create_result(cube=prods.DataCube())
Пример #16
0
class MicroditheredImageRecipeInput(RecipeInput):
    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    extinction = Parameter(0.0, 'Mean atmospheric extinction')
    sources = Parameter([],
                        'List of x, y coordinates to measure FWHM',
                        optional=True)
    offsets = Offsets_Requirement()
    iterations = Parameter(4, 'Iterations of the recipe')
    sky_images = Parameter(
        5, 'Images used to estimate the background before '
        'and after current image')
    sky_images_sep_time = SkyImageSepTime_Requirement()
    check_photometry_levels = Parameter(
        [0.5, 0.8], 'Levels to check the flux of the objects')
    check_photometry_actions = Parameter(['warn', 'warn', 'default'],
                                         'Actions to take on images')
    subpixelization = Parameter(4, 'Number of subdivisions in each pixel side')
    window = Parameter([], 'Region of interesting data', optional=True)
Пример #17
0
class BarDetectionRecipe(EmirRecipe):

    # Recipe Requirements
    #
    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    master_sky = MasterSkyRequirement()

    bars_nominal_positions = Requirement(NominalPositions,
                                         'Nominal positions of the bars')
    median_filter_size = Parameter(5, 'Size of the median box')
    average_box_row_size = Parameter(
        7, 'Number of rows to average for fine centering (odd)')
    average_box_col_size = Parameter(
        21, 'Number of columns to extract for fine centering (odd)')
    fit_peak_npoints = Parameter(
        3, 'Number of points to use for fitting the peak (odd)')

    # Recipe Products
    frame = Product(DataFrameType)
    # derivative = Product(DataFrameType)
    slits = Product(ArrayType)
    positions3 = Product(ArrayType)
    positions5 = Product(ArrayType)
    positions7 = Product(ArrayType)
    positions9 = Product(ArrayType)
    DTU = Product(ArrayType)
    ROTANG = Product(float)
    TSUTC1 = Product(float)
    csupos = Product(ArrayType)
    csusens = Product(ArrayType)

    def run(self, rinput):
        self.logger.info('starting processing for bars detection')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        self.save_intermediate_img(hdulist, 'reduced_image.fits')

        try:
            rotang = hdr['ROTANG']
            tsutc1 = hdr['TSUTC1']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)
            csupos = datamodel.get_csup_from_header(hdr)
            if len(csupos) != 2 * EMIR_NBARS:
                raise RecipeError('Number of CSUPOS != 2 * NBARS')
            csusens = datamodel.get_cs_from_header(hdr)

        except KeyError as error:
            self.logger.error(error)
            raise RecipeError(error)

        self.logger.debug('start finding bars')
        allpos, slits = self.find_bars(hdulist, rinput, csupos, dtur)
        self.logger.debug('end finding bars')

        if self.intermediate_results:
            with open('ds9.reg', 'w') as ds9reg:
                self.to_ds9_reg(ds9reg, slits)

        result = self.create_result(
            frame=hdulist,
            slits=slits,
            positions9=allpos[9],
            positions7=allpos[7],
            positions5=allpos[5],
            positions3=allpos[3],
            DTU=dtub,
            ROTANG=rotang,
            TSUTC1=tsutc1,
            csupos=csupos,
            csusens=csusens,
        )
        return result

    def median_filtering(self, hdulist, rinput):

        # Processed array
        arr = hdulist[0].data

        # Median filter of processed array (two times)
        mfilter_size = rinput.median_filter_size

        self.logger.debug('median filtering 1')
        self.logger.debug('median filtering X, %d columns', mfilter_size)
        arr_median = median_filter(arr, size=(1, mfilter_size))
        self.logger.debug('median filtering X, %d rows', mfilter_size)
        arr_median = median_filter(arr_median, size=(mfilter_size, 1))
        self.save_intermediate_array(arr_median, 'median_image.fits')

        # Median filter of processed array (two times) in the other direction
        # for Y coordinates
        self.logger.debug('median filtering 2')
        self.logger.debug('median filtering Y, %d rows', mfilter_size)
        arr_median_alt = median_filter(arr, size=(mfilter_size, 1))
        self.logger.debug('median filtering Y, %d columns', mfilter_size)
        arr_median_alt = median_filter(arr_median_alt, size=(1, mfilter_size))
        self.save_intermediate_array(arr_median_alt, 'median_image_alt.fits')

        return arr_median, arr_median_alt

    def find_bars(self, hdulist, rinput, csupos, dtur):

        self.logger.debug('filtering image')
        # Processed array
        arr_median, arr_median_alt = self.median_filtering(hdulist, rinput)

        xfac = dtur[0] / EMIR_PIXSCALE
        yfac = -dtur[1] / EMIR_PIXSCALE

        vec = [yfac, xfac]
        self.logger.debug('DTU shift is %s', vec)

        # and the table of approx positions of the slits
        barstab = rinput.bars_nominal_positions
        # Currently, we only use fields 0 and 2
        # of the nominal positions file

        # Number or rows used
        # These other parameters cab be tuned also
        bstart = 1
        bend = 2047
        self.logger.debug('ignoring columns outside %d - %d', bstart, bend - 1)

        # extract a region to average
        wy = (rinput.average_box_row_size // 2)
        wx = (rinput.average_box_col_size // 2)
        self.logger.debug('extraction window is %d rows, %d cols', 2 * wy + 1,
                          2 * wx + 1)
        # Fit the peak with these points
        wfit = 2 * (rinput.fit_peak_npoints // 2) + 1
        self.logger.debug('fit with %d points', wfit)

        # Minimum threshold
        threshold = 5 * EMIR_RON
        # Savitsky and Golay (1964) filter to compute the X derivative
        # scipy >= xx has a savgol_filter function
        # for compatibility we do it manually

        allpos = {}
        ypos3_kernel = None
        slits = numpy.zeros((EMIR_NBARS, 8), dtype='float')

        self.logger.info('find peaks in derivative image')
        for ks in [3, 5, 7, 9]:
            self.logger.debug('kernel size is %d', ks)
            # S and G kernel for derivative
            kw = ks * (ks * ks - 1) / 12.0
            coeffs_are = -numpy.arange((1 - ks) // 2, (ks - 1) // 2 + 1) / kw
            if ks == 3:
                ypos3_kernel = coeffs_are
            self.logger.debug('kernel weights are %s', coeffs_are)

            self.logger.debug('derive image in X direction')
            arr_deriv = convolve1d(arr_median, coeffs_are, axis=-1)
            self.save_intermediate_array(arr_deriv,
                                         'deriv_image_k%d.fits' % ks)
            # Axis 0 is
            #
            # self.logger.debug('derive image in Y direction (with kernel=3)')
            # arr_deriv_alt = convolve1d(arr_median_alt, ypos3_kernel, axis=0)

            positions = []
            self.logger.info('using bar parameters')
            for idx in range(EMIR_NBARS):
                params_l = barstab[idx]
                params_r = barstab[idx + EMIR_NBARS]
                lbarid = int(params_l[0])

                # CSUPOS for this bar
                rbarid = lbarid + EMIR_NBARS
                current_csupos_l = csupos[lbarid - 1]
                current_csupos_r = csupos[rbarid - 1]
                self.logger.debug('CSUPOS for bar %d is %f', lbarid,
                                  current_csupos_l)
                self.logger.debug('CSUPOS for bar %d is %f', rbarid,
                                  current_csupos_r)

                ref_y_coor_virt = params_l[1]  # Do I need to add vec[1]?
                ref_x_l_coor_virt = params_l[3] + current_csupos_l * params_l[2]
                ref_x_r_coor_virt = params_r[3] + current_csupos_r * params_r[2]
                # Transform to REAL..
                ref_x_l_coor, ref_y_l_coor = dist.exvp(ref_x_l_coor_virt,
                                                       ref_y_coor_virt)
                ref_x_r_coor, ref_y_r_coor = dist.exvp(ref_x_r_coor_virt,
                                                       ref_y_coor_virt)
                # FIXME: check if DTU has to be applied
                # ref_y_coor = ref_y_coor + vec[1]
                prow = coor_to_pix_1d(ref_y_l_coor) - 1
                fits_row = prow + 1  # FITS pixel index

                # A function that returns the center of the bar
                # given its X position
                def center_of_bar_l(x):
                    # Pixel values are 0-based
                    # return ref_y_coor + vec[1] - 1
                    # FIXME: check if DTU has to be applied
                    return ref_y_l_coor - 1

                def center_of_bar_r(x):
                    # Pixel values are 0-based
                    # return ref_y_coor + vec[1] - 1
                    # FIXME: check if DTU has to be applied
                    return ref_y_r_coor - 1

                self.logger.debug('looking for bars with ids %d - %d', lbarid,
                                  rbarid)
                self.logger.debug('ref Y virtual position is %7.2f',
                                  ref_y_coor_virt)
                self.logger.debug('ref X virtual positions are %7.2f %7.2f',
                                  ref_x_l_coor_virt, ref_x_r_coor_virt)
                self.logger.debug('ref X positions are %7.2f %7.2f',
                                  ref_x_l_coor, ref_x_r_coor)
                self.logger.debug('ref Y positions are %7.2f %7.2f',
                                  ref_y_l_coor, ref_y_r_coor)
                # if ref_y_coor is outlimits, skip this bar
                # ref_y_coor is in FITS format
                if (ref_y_l_coor >= 2047) or (ref_y_l_coor <= 1):
                    self.logger.debug(
                        'reference y position is outlimits, skipping')
                    positions.append(
                        [lbarid, fits_row, fits_row, fits_row, 1, 1, 0, 3])
                    positions.append(
                        [rbarid, fits_row, fits_row, fits_row, 1, 1, 0, 3])
                    continue

                # minimal width of the slit
                minwidth = 0.9
                if abs(ref_x_l_coor_virt - ref_x_r_coor_virt) < minwidth:
                    self.logger.debug(
                        'slit is less than %d virt pixels, skipping', minwidth)
                    positions.append(
                        [lbarid, fits_row, fits_row, fits_row, 1, 1, 0, 3])
                    positions.append(
                        [rbarid, fits_row, fits_row, fits_row, 1, 1, 0, 3])
                    continue

                # Left bar
                # Dont add +1 to virtual pixels
                self.logger.debug('measure left border (%d)', lbarid)
                regionw = 10
                bstart1 = coor_to_pix_1d(ref_x_l_coor - regionw)
                bend1 = coor_to_pix_1d(ref_x_l_coor + regionw) + 1
                centery, centery_virt, xpos1, xpos1_virt, fwhm, st = char_bar_peak_l(
                    arr_deriv,
                    prow,
                    bstart1,
                    bend1,
                    threshold,
                    center_of_bar_l,
                    wx=wx,
                    wy=wy,
                    wfit=wfit)

                insert1 = [
                    lbarid, centery + 1, centery_virt, fits_row, xpos1 + 1,
                    xpos1_virt, fwhm, st
                ]
                positions.append(insert1)

                # Right bar
                # Dont add +1 to virtual pixels
                self.logger.debug('measure rigth border (%d)', rbarid)
                bstart2 = coor_to_pix_1d(ref_x_r_coor - regionw)
                bend2 = coor_to_pix_1d(ref_x_r_coor + regionw) + 1
                centery, centery_virt, xpos2, xpos2_virt, fwhm, st = char_bar_peak_r(
                    arr_deriv,
                    prow,
                    bstart2,
                    bend2,
                    threshold,
                    center_of_bar_l,
                    wx=wx,
                    wy=wy,
                    wfit=wfit)
                # This centery/centery_virt should be equal to ref_y_coor_virt
                insert2 = [
                    rbarid, centery + 1, centery_virt, fits_row, xpos2 + 1,
                    xpos2_virt, fwhm, st
                ]
                positions.append(insert2)

                # FIXME: hardcoded value
                y1_virt = ref_y_coor_virt - 16.242
                y2_virt = ref_y_coor_virt + 16.242
                _, y1 = dist.exvp(xpos1_virt + 1, y1_virt + 1)
                _, y2 = dist.exvp(xpos2_virt + 1, y2_virt + 1)

                # Update positions

                msg = 'bar %d, centroid-y %9.4f centroid-y virt %9.4f, ' \
                      'row %d, x-pos %9.4f x-pos virt %9.4f, FWHM %6.3f, status %d'
                self.logger.debug(msg, *positions[-2])
                self.logger.debug(msg, *positions[-1])

                if ks == 5:
                    slits[lbarid - 1] = numpy.array(
                        [xpos1, y2, xpos2, y2, xpos2, y1, xpos1, y1])
                    # FITS coordinates
                    slits[lbarid - 1] += 1.0
                    self.logger.debug('inserting bars %d-%d into "slits"',
                                      lbarid, rbarid)

            allpos[ks] = numpy.asarray(
                positions, dtype='float')  # GCS doesn't like lists of lists

        return allpos, slits

    def to_ds9_reg(self, ds9reg, slits):
        """Transform fiber traces to ds9-region format.

        Parameters
        ----------
        ds9reg : BinaryIO
            Handle to output file name in ds9-region format.
        """

        # open output file and insert header

        ds9reg.write('# Region file format: DS9 version 4.1\n')
        ds9reg.write(
            'global color=green dashlist=8 3 width=1 font="helvetica 10 '
            'normal roman" select=1 highlite=1 dash=0 fixed=0 edit=1 '
            'move=1 delete=1 include=1 source=1\n')
        ds9reg.write('physical\n')

        for idx, slit in enumerate(slits, 1):
            xpos1, y2, xpos2, y2, xpos2, y1, xpos1, y1 = slit
            xc = 0.5 * (xpos1 + xpos2) + 1
            yc = 0.5 * (y1 + y2) + 1
            xd = (xpos2 - xpos1)
            yd = (y2 - y1)
            ds9reg.write('box({0},{1},{2},{3},0)\n'.format(xc, yc, xd, yd))
            ds9reg.write('# text({0},{1}) color=red text={{{2}}}\n'.format(
                xpos1 - 5, yc, idx))
            ds9reg.write('# text({0},{1}) color=red text={{{2}}}\n'.format(
                xpos2 + 5, yc, idx + EMIR_NBARS))
Пример #18
0
class TestMaskRecipe(EmirRecipe):

    # Recipe Requirements
    #
    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    master_sky = MasterSkyRequirement()

    pinhole_nominal_positions = Requirement(CoordinateList2DType,
                                            'Nominal positions of the pinholes'
                                            )
    shift_coordinates = Parameter(True, 'Use header information to'
                                  ' shift the pinhole positions from (0,0) '
                                  'to X_DTU, Y_DTU')
    box_half_size = Parameter(4, 'Half of the computation box size in pixels')
    recenter = Parameter(True, 'Recenter the pinhole coordinates')
    max_recenter_radius = Parameter(2.0, 'Maximum distance for recentering')

    median_filter_size = Parameter(5, 'Size of the median box')
    canny_sigma = Parameter(3.0, 'Sigma for the canny algorithm')
    obj_min_size = Parameter(200, 'Minimum size of the slit')
    obj_max_size = Parameter(3000, 'Maximum size of the slit')
    slit_size_ratio = Parameter(4.0, 'Minimum ratio between height and width for slits')

    # Recipe Products
    frame = Product(DataFrameType)
    positions = Product(ArrayType)
    positions_alt = Product(ArrayType)
    slitstable = Product(ArrayType)
    DTU = Product(ArrayType)
    filter = Product(str)
    readmode = Product(str)
    ROTANG = Product(float)
    DETPA = Product(float)
    DTUPA = Product(float)
    param_recenter = Product(bool)
    param_max_recenter_radius = Product(float)
    param_box_half_size = Product(float)

    def run(self, rinput):
        _logger.info('starting processing for slit detection')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        _logger.debug('finding pinholes')

        try:
            filtername = hdr['FILTER']
            readmode = hdr['READMODE']
            rotang = hdr['ROTANG']
            detpa = hdr['DETPA']
            dtupa = hdr['DTUPA']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)
        except KeyError as error:
            _logger.error(error)
            raise RecipeError(error)

        if rinput.shift_coordinates:
            xdtur, ydtur, zdtur = dtur
            xfac = xdtur / EMIR_PIXSCALE
            yfac = -ydtur / EMIR_PIXSCALE

            vec = numpy.array([yfac, xfac])
            _logger.info('shift is %s', vec)
            ncenters = rinput.pinhole_nominal_positions + vec
        else:
            _logger.info('using pinhole coordinates as they are')
            ncenters = rinput.pinhole_nominal_positions

        _logger.info('pinhole characterization')
        positions = pinhole_char(
            hdulist[0].data,
            ncenters,
            box=rinput.box_half_size,
            recenter_pinhole=rinput.recenter,
            maxdist=rinput.max_recenter_radius
        )

        _logger.info('alternate pinhole characterization')
        positions_alt = pinhole_char2(
            hdulist[0].data, ncenters,
            recenter_pinhole=rinput.recenter,
            recenter_half_box=rinput.box_half_size,
            recenter_maxdist=rinput.max_recenter_radius
        )

        _logger.debug('finding slits')

        # First, prefilter with median
        median_filter_size = rinput.median_filter_size
        canny_sigma = rinput.canny_sigma
        obj_min_size = rinput.obj_min_size
        obj_max_size = rinput.obj_max_size

        data1 = hdulist[0].data
        _logger.debug('Median filter with box %d', median_filter_size)
        data2 = median_filter(data1, size=median_filter_size)

        # Grey level image
        img_grey = normalize(data2)

        # Find edges with canny
        _logger.debug('Find edges with canny, sigma %d', canny_sigma)
        edges = canny(img_grey, sigma=canny_sigma)

        # Fill edges
        _logger.debug('Fill holes')
        fill_slits = ndimage.binary_fill_holes(edges)

        _logger.debug('Label objects')
        label_objects, nb_labels = ndimage.label(fill_slits)
        _logger.debug('%d objects found', nb_labels)
        # Filter on the area of the labeled region
        # Perhaps we could ignore this filtering and
        # do it later?
        _logger.debug('Filter objects by size')
        # Sizes of regions
        sizes = numpy.bincount(label_objects.ravel())

        _logger.debug('Min size is %d', obj_min_size)
        _logger.debug('Max size is %d', obj_max_size)

        mask_sizes = (sizes > obj_min_size) & (sizes < obj_max_size)

        # Filter out regions
        nids, = numpy.where(mask_sizes)

        mm = numpy.in1d(label_objects, nids)
        mm.shape = label_objects.shape

        fill_slits_clean = numpy.where(mm, 1, 0)

        # and relabel
        _logger.debug('Label filtered objects')
        relabel_objects, nb_labels = ndimage.label(fill_slits_clean)
        _logger.debug('%d objects found after filtering', nb_labels)
        ids = list(six.moves.range(1, nb_labels + 1))

        _logger.debug('Find regions and centers')
        regions = ndimage.find_objects(relabel_objects)
        centers = ndimage.center_of_mass(data2, labels=relabel_objects,
                                         index=ids
                                         )

        table = char_slit(data2, regions,
                          slit_size_ratio=rinput.slit_size_ratio
                          )

        result = self.create_result(frame=hdulist,
                                    positions=positions,
                                    positions_alt=positions_alt,
                                    slitstable=table,
                                    filter=filtername,
                                    DTU=dtub,
                                    readmode=readmode,
                                    ROTANG=rotang,
                                    DETPA=detpa,
                                    DTUPA=dtupa,
                                    param_recenter=rinput.recenter,
                                    param_max_recenter_radius=rinput.max_recenter_radius,
                                    param_box_half_size=rinput.box_half_size
                                    )
        return result
Пример #19
0
class MultiTwilightFlatRecipe(EmirRecipe):
    """Create a list of twilight flats"""
    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()

    twflatframes = Product(ListOfType(MasterIntensityFlat))

    def run(self, rinput):

        results = []
        self.logger.info('starting multiflat flat reduction')

        # Uncomment this line
        # to revert to non-ramp
        # flow = self.init_filters(rinput)
        saturation = 45000.0

        iinfo = gather_info_frames(rinput.obresult.frames)
        image_groups = {}
        self.logger.info('group images by filter')
        for idx, info in enumerate(iinfo):
            filt = info['filter']
            if filt not in image_groups:
                self.logger.debug('new filter %s', filt)
                image_groups[filt] = []
            img = rinput.obresult.frames[idx]
            self.logger.debug('image %s in group %s', img, filt)
            image_groups[filt].append(img)

        for filt, frames in image_groups.items():
            self.logger.info('processing filter %s', filt)

            # Uncomment this line and comment the following
            # to revert to non-ramp
            # res = self.run_per_filter(frames, flow)
            try:
                res = self.run_per_filter_ramp(frames, saturation=saturation)
                results.append(res)
            except ValueError:
                self.logger.info('filter %s cannot be processed', filt)

        self.logger.info('end multiflat flat reduction')
        result = self.create_result(twflatframes=results)

        return result

    def run_per_filter(self, frames, flow):

        errors = True
        self.logger.debug('using errors: %s', errors)
        hdulist = basic_processing_with_combination_frames(frames,
                                                           flow,
                                                           method=median,
                                                           errors=errors)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)
        mm = hdulist[0].data.mean()
        self.logger.info('mean value of flat is: %f', mm)
        hdr['CCDMEAN'] = mm

        self.logger.debug('normalize image')
        hdulist[0].data /= mm
        if errors:
            self.logger.debug('normalize VAR extension')
            hdulist['variance'].data /= (mm * mm)

        return hdulist

    def run_per_filter_ramp(self, frames, saturation, errors=False):
        imgs = [frame.open() for frame in frames]
        return self.run_img_per_filter_ramp(imgs, saturation, errors)

    def run_img_per_filter_ramp(self, imgs, saturation, errors=False):

        nimages = len(imgs)
        if nimages == 0:
            raise ValueError('len(images) == 0')

        median_frames = numpy.empty((nimages, ))
        exptime_frames = []
        utc_frames = []
        # generate 3D cube
        bshape = self.datamodel.shape
        flat_frames = numpy.empty((bshape[0], bshape[1], nimages))
        for idx, image in enumerate(imgs):
            flat_frames[:, :, idx] = image['primary'].data
            exptime_frames.append(image[0].header['EXPTIME'])
            median_frames[idx] = numpy.median(image['primary'].data)
            utc_frames.append(image[0].header['UTC'])

            self.logger.debug('image %d exptime %f median %f UTC %s', idx,
                              exptime_frames[idx], median_frames[idx],
                              utc_frames[-1])

        # filter saturated images
        good_images = median_frames < saturation
        ngood_images = good_images.sum()
        slope_scaled_var = None
        slope_scaled_num = None
        if ngood_images == 0:
            self.logger.warning('We have only %d good images', ngood_images)
            raise ValueError('No images under saturation')
        elif ngood_images < 2:
            self.logger.warning('We have only %d good images', ngood_images)
            # Reference image
            ref_image = imgs[0]
            slope_scaled = numpy.ones(bshape) * exptime_frames[0]

            if errors:
                slope_scaled_var = numpy.zeros_like(slope_scaled)
                slope_scaled_num = numpy.zeros_like(
                    slope_scaled, dtype='int16') + ngood_images
        else:
            nsaturated = nimages - good_images.sum()
            if nsaturated > 0:
                self.logger.debug(
                    'we have %d images with median value over saturation (%f)',
                    nsaturated, saturation)

            m = flat_frames[:, :, good_images]
            # Reshape array to obtain a 2D array
            m_r = m.reshape((bshape[0] * bshape[1], ngood_images))
            self.logger.debug('fitting slopes with Theil-Sen')
            # self.logger.debug('fitting slopes with mean-squares')
            # ll = nppol.polyfit(median_frames[good_images], m_r.T, deg=1)
            ll = self.filter_nsigma(median_frames[good_images], m_r.T)

            slope = ll[1].reshape(bshape)
            base = ll[0].reshape(bshape)

            # First good frame
            index_of_first_good = numpy.nonzero(good_images)[0][0]
            slope_scaled = slope * exptime_frames[index_of_first_good]
            if errors:
                slope_scaled_var = numpy.zeros_like(slope_scaled)
                slope_scaled_num = numpy.zeros_like(
                    slope_scaled, dtype='int16') + ngood_images

        cdata = []
        for idx, img in enumerate(imgs):
            if good_images[idx]:
                cdata.append(img)

        result = self.compose_result(cdata, slope_scaled, errors,
                                     slope_scaled_var, slope_scaled_num)

        return result

    def filter_nsigma(self, median_val, image_val, nsigma=10.0, nloop=1):
        # Initial estimation
        ll = fit_theil_sen(median_val, image_val)
        ni = 0
        self.logger.debug('initial estimation')
        while ni < nloop:
            # Prediction
            self.logger.debug('loop %d', ni + 1)
            base, slope = ll
            image_val_pred = base + median_val[:, numpy.newaxis] * slope
            image_diff = image_val - image_val_pred
            # Compute MAD
            mad = compute_mad(image_diff)
            sigma_robust = nsigma * 1.4826 * mad
            self.logger.debug('compute robust std deviation')
            self.logger.debug('min %7.1f max %7.1f mean %7.1f',
                              sigma_robust.min(), sigma_robust.max(),
                              sigma_robust.mean())
            # Check values over sigma
            mask_over = numpy.abs(image_diff) >= sigma_robust[:, numpy.newaxis]
            self.logger.debug('values over sigma: %d', mask_over.sum())
            # Insert expected values in image
            # instead of masking
            image_val[mask_over] = image_val_pred[mask_over]
            #
            self.logger.debug('Theil-Sen fit')
            ll = fit_theil_sen(median_val, image_val)
            ni += 1

        return ll

    def compose_result(self,
                       imgs,
                       slope_scaled,
                       errors=False,
                       slope_scaled_var=None,
                       slope_scaled_num=None):

        self.logger.debug('update result header')
        cnum = len(imgs)
        method_name = 'Theil-Sen'
        base_header = imgs[0][0].header
        cdata = imgs

        hdu = fits.PrimaryHDU(data=slope_scaled, header=base_header)
        self.set_base_headers(hdu.header)

        hdu.header['history'] = "Combined %d images using '%s'" % (cnum,
                                                                   method_name)
        hdu.header['history'] = 'Combination time {}'.format(
            datetime.datetime.utcnow().isoformat())

        for img in cdata:
            hdu.header['history'] = "Image {}".format(
                self.datamodel.get_imgid(img))

        prevnum = base_header.get('NUM-NCOM', 1)
        hdu.header['NUM-NCOM'] = prevnum * cnum
        hdu.header['UUID'] = str(uuid.uuid1())
        # Headers of last image
        hdu.header['TSUTC2'] = cdata[-1][0].header['TSUTC2']
        # TODO: use BPM to compute mean
        mm = hdu.data.mean()
        self.logger.info('mean value of flat is: %f', mm)
        hdu.header['CCDMEAN'] = mm
        self.logger.debug('normalize image')
        hdu.data /= mm

        if errors:
            varhdu = fits.ImageHDU(slope_scaled_var, name='VARIANCE')
            num = fits.ImageHDU(slope_scaled_num, name='MAP')
            self.logger.debug('normalize VAR extension')
            varhdu.data /= (mm * mm)
            result = fits.HDUList([hdu, varhdu, num])
        else:
            result = fits.HDUList([hdu])

        return result
Пример #20
0
class TestPinholeRecipe(EmirRecipe):

    # Recipe Requirements
    #
    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    master_sky = MasterSkyRequirement()

    pinhole_nominal_positions = Requirement(CoordinateList2DType,
                                            'Nominal positions of the pinholes'
                                            )
    shift_coordinates = Parameter(True, 'Use header information to'
                                  ' shift the pinhole positions from (0,0) '
                                  'to X_DTU, Y_DTU')
    box_half_size = Parameter(4, 'Half of the computation box size in pixels')
    recenter = Parameter(True, 'Recenter the pinhole coordinates')
    max_recenter_radius = Parameter(2.0, 'Maximum distance for recentering')

    # Recipe Products
    frame = Product(DataFrameType)
    positions = Product(ArrayType)
    positions_alt = Product(ArrayType)
    DTU = Product(ArrayType)
    filter = Product(str)
    readmode = Product(str)
    ROTANG = Product(float)
    DETPA = Product(float)
    DTUPA = Product(float)
    param_recenter = Product(bool)
    param_max_recenter_radius = Product(float)
    param_box_half_size = Product(float)

    def run(self, rinput):
        _logger.info('starting processing for slit detection')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        _logger.debug('finding pinholes')

        try:
            filtername = hdr['FILTER']
            readmode = hdr['READMODE']
            rotang = hdr['ROTANG']
            detpa = hdr['DETPA']
            dtupa = hdr['DTUPA']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)
        except KeyError as error:
            _logger.error(error)
            raise RecipeError(error)

        if rinput.shift_coordinates:
            xdtur, ydtur, zdtur = dtur
            xfac = xdtur / EMIR_PIXSCALE
            yfac = -ydtur / EMIR_PIXSCALE

            vec = numpy.array([yfac, xfac])
            _logger.info('shift is %s', vec)
            ncenters = rinput.pinhole_nominal_positions + vec
        else:
            _logger.info('using pinhole coordinates as they are')
            ncenters = rinput.pinhole_nominal_positions

        _logger.info('pinhole characterization')
        positions = pinhole_char(
            hdulist[0].data,
            ncenters,
            box=rinput.box_half_size,
            recenter_pinhole=rinput.recenter,
            maxdist=rinput.max_recenter_radius
        )

        _logger.info('alternate pinhole characterization')
        positions_alt = pinhole_char2(
            hdulist[0].data, ncenters,
            recenter_pinhole=rinput.recenter,
            recenter_half_box=rinput.box_half_size,
            recenter_maxdist=rinput.max_recenter_radius
        )

        result = self.create_result(frame=hdulist,
                                    positions=positions,
                                    positions_alt=positions_alt,
                                    filter=filtername,
                                    DTU=dtub,
                                    readmode=readmode,
                                    ROTANG=rotang,
                                    DETPA=detpa,
                                    DTUPA=dtupa,
                                    param_recenter=rinput.recenter,
                                    param_max_recenter_radius=rinput.max_recenter_radius,
                                    param_box_half_size=rinput.box_half_size
                                    )
        return result
Пример #21
0
class BarDetectionRecipe(EmirRecipe):

    # Recipe Requirements
    #
    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    master_sky = MasterSkyRequirement()

    bars_nominal_positions = Requirement(CoordinateList2DType,
                                         'Nominal positions of the bars')
    median_filter_size = Parameter(5, 'Size of the median box')
    canny_sigma = Parameter(3.0, 'Sigma for the canny algorithm')
    canny_high_threshold = Parameter(0.04,
                                     'High threshold for the canny algorithm')
    canny_low_threshold = Parameter(0.01,
                                    'High threshold for the canny algorithm')

    # Recipe Products
    frame = Product(DataFrameType)
    positions = Product(ArrayType)
    DTU = Product(ArrayType)
    ROTANG = Product(float)
    csupos = Product(ArrayType)
    csusens = Product(ArrayType)
    param_median_filter_size = Product(float)
    param_canny_high_threshold = Product(float)
    param_canny_low_threshold = Product(float)

    def run(self, rinput):

        logger = logging.getLogger('numina.recipes.emir')

        logger.info('starting processing for bars detection')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        try:
            rotang = hdr['ROTANG']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)
            csupos = datamodel.get_csup_from_header(hdr)
            csusens = datamodel.get_cs_from_header(hdr)

        except KeyError as error:
            logger.error(error)
            raise RecipeError(error)

        logger.debug('finding bars')

        arr = hdulist[0].data

        # Median filter
        logger.debug('median filtering')
        mfilter_size = rinput.median_filter_size

        arr_median = median_filter(arr, size=mfilter_size)

        # Image is mapped between 0 and 1
        # for the full range [0: 2**16]
        logger.debug('image scaling to 0-1')
        arr_grey = normalize_raw(arr_median)

        # Find borders
        logger.debug('find borders')
        canny_sigma = rinput.canny_sigma
        # These threshols corespond roughly with
        # value x (2**16 - 1)
        high_threshold = rinput.canny_high_threshold
        low_threshold = rinput.canny_low_threshold

        edges = canny(arr_grey,
                      sigma=canny_sigma,
                      high_threshold=high_threshold,
                      low_threshold=low_threshold)

        # Number or rows used
        # These other parameters cab be tuned also
        total = 5
        maxdist = 1.0
        bstart = 100
        bend = 1900

        positions = []
        nt = total // 2

        xfac = dtur[0] / EMIR_PIXSCALE
        yfac = -dtur[1] / EMIR_PIXSCALE

        vec = [yfac, xfac]
        logger.debug('DTU shift is %s', vec)

        # Based on the 'edges image'
        # and the table of approx positions of the slits
        barstab = rinput.bars_nominal_positions

        # Currently, we only use fields 0 and 2
        # of the nominal positions file

        for coords in barstab:
            lbarid = int(coords[0])
            rbarid = lbarid + 55
            ref_y_coor = coords[2] + vec[1]
            prow = wc_to_pix_1d(ref_y_coor) - 1
            fits_row = prow + 1  # FITS pixel index

            logger.debug('looking for bars with ids %d - %d', lbarid, rbarid)
            logger.debug('reference y position is Y %7.2f', ref_y_coor)
            # Find the position of each bar

            bpos = find_position(edges, prow, bstart, bend, total)

            nbars_found = len(bpos)

            # If no bar is found, append and empty token
            if nbars_found == 0:
                logger.debug('bars %d, %d not found at row %d', lbarid, rbarid,
                             fits_row)
                thisres1 = (lbarid, fits_row, 0, 0, 1)
                thisres2 = (rbarid, fits_row, 0, 0, 1)

            elif nbars_found == 2:

                # Order values by increasing X
                centl, centr = sorted(bpos, key=lambda cen: cen[0])
                c1 = centl[0]
                c2 = centr[0]

                logger.debug('bars found  at row %d between %7.2f - %7.2f',
                             fits_row, c1, c2)
                # Compute FWHM of the collapsed profile

                cslit = arr_grey[prow - nt:prow + nt + 1, :]
                pslit = cslit.mean(axis=0)

                # Add 1 to return FITS coordinates
                epos, epos_f, error = locate_bar_l(pslit, c1)
                thisres1 = lbarid, fits_row, epos + 1, epos_f + 1, error

                epos, epos_f, error = locate_bar_r(pslit, c2)
                thisres2 = rbarid, fits_row, epos + 1, epos_f + 1, error

            elif nbars_found == 1:
                logger.warn(
                    'only 1 edge found  at row %d, not yet implemented',
                    fits_row)
                thisres1 = (lbarid, fits_row, 0, 0, 1)
                thisres2 = (rbarid, fits_row, 0, 0, 1)

            else:
                logger.warn(
                    '3 or more edges found  at row %d, not yet implemented',
                    fits_row)
                thisres1 = (lbarid, fits_row, 0, 0, 1)
                thisres2 = (rbarid, fits_row, 0, 0, 1)

            positions.append(thisres1)
            positions.append(thisres2)

        logger.debug('end finding bars')
        result = self.create_result(
            frame=hdulist,
            positions=positions,
            DTU=dtub,
            ROTANG=rotang,
            csupos=csupos,
            csusens=csusens,
            param_median_filter_size=rinput.median_filter_size,
            param_canny_high_threshold=rinput.canny_high_threshold,
            param_canny_low_threshold=rinput.canny_low_threshold)
        return result
Пример #22
0
class MaskSpectraExtractionRecipe(EmirRecipe):
    '''
    '''

    # Recipe Requirements
    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    master_sky = MasterSkyRequirement()

    median_filter_size = Parameter(5, 'Size of the median box')
    slits_positions = Requirement(ArrayType,
                                  'Positions and widths of the slits')

    frame = Product(DataFrameType)
    rss = Product(DataFrameType)
    regions = Product(ArrayType)

    #slitstable = Product(ArrayType)
    #DTU = Product(ArrayType)
    #ROTANG = Product(float)
    #DETPA = Product(float)
    #DTUPA = Product(float)

    def run(self, rinput):
        _logger.info('starting extraction')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        # First, prefilter with median
        median_filter_size = rinput.median_filter_size

        data1 = hdulist[0].data
        _logger.debug('Median filter with box %d', median_filter_size)
        data2 = median_filter(data1, size=median_filter_size)

        # Normalize input between -1 and +1
        data3 = img_norm(data2)

        # Tracing parameters
        ws = 10
        step = 15
        hs = 15
        tol = 2
        doplot = False
        npol = 5

        _logger.info('Create output images')
        rssdata = numpy.zeros(
            (rinput.slits_positions.shape[0], data3.shape[1]), dtype='float32')

        # FIXME, number of columns depends on polynomial degree
        regiontable = numpy.zeros(
            (rinput.slits_positions.shape[0], 4 + 2 * (npol + 1)),
            dtype='float32')

        count = 0
        # Loop over slits
        for slit_coords in rinput.slits_positions:
            col, y1, y2 = convert_to_(*slit_coords)
            _logger.info('Processing slit in column %i, row1=%i, row2=%i', col,
                         y1, y2)
            xmin, xmax, ymin, ymax, pfit1, pfit2 = ex_region(data3,
                                                             col,
                                                             y1,
                                                             y2,
                                                             step,
                                                             hs,
                                                             ws,
                                                             tol=tol,
                                                             doplot=doplot)

            _logger.info('Spectrum region is %i, %i, %i, %i', xmin, xmax, ymin,
                         ymax)
            try:
                region = data1[ymin:ymax + 1, xmin:xmax + 1]
                rssdata[count, xmin:xmax + 1] = region.mean(axis=0)
            except ValueError as err:
                _logger.error("Error collapsing spectrum: %s", err)
            # IN FITS convention
            _logger.info('Create regions table')
            regiontable[count, :4] = xmin + 1, xmax + 1, ymin + 1, ymax + 1
            #regiontable[count, 4:4 + npol + 1] = pfit1
            #regiontable[count, 4 + npol + 1:] = pfit2
            count += 1

        hdurss = fits.PrimaryHDU(rssdata)

        result = self.create_result(frame=hdulist,
                                    rss=hdurss,
                                    regions=regiontable)

        return result
Пример #23
0
class TestPointSourceRecipe(EmirRecipe):

    # Recipe Requirements
    #
    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    master_sky = MasterSkyRequirement()

    shift_coordinates = Parameter(
        True, 'Use header information to'
        ' shift the pinhole positions from (0,0) '
        'to X_DTU, Y_DTU')
    box_half_size = Parameter(4, 'Half of the computation box size in pixels')
    recenter = Parameter(True, 'Recenter the pinhole coordinates')
    max_recenter_radius = Parameter(2.0, 'Maximum distance for recentering')

    # Recipe Products
    frame = Product(DataFrameType)
    positions = Product(ArrayType)
    positions_alt = Product(ArrayType)
    DTU = Product(ArrayType)
    filter = Product(str)
    readmode = Product(str)
    ROTANG = Product(float)
    DETPA = Product(float)
    DTUPA = Product(float)
    param_recenter = Product(bool)
    param_max_recenter_radius = Product(float)
    param_box_half_size = Product(float)

    def run(self, rinput):
        _logger.info('starting processing for object detection')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)

        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        _logger.debug('finding point sources')

        try:
            filtername = hdr['FILTER']
            readmode = hdr['READMODE']
            rotang = hdr['ROTANG']
            detpa = hdr['DETPA']
            dtupa = hdr['DTUPA']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)
        except KeyError as error:
            _logger.error(error)
            raise RecipeError(error)

        data = hdulist[0].data

        # Copy needed in numpy 1.7
        # This seems already bitswapped??
        # FIXME: check this works offline/online
        # ndata = data.byteswap().newbyteorder()
        # data = data.byteswap(inplace=True).newbyteorder()

        snr_detect = 5.0
        fwhm = 4.0
        npixels = 15
        box_shape = [64, 64]
        _logger.info('point source detection2')
        _logger.info('using internal mask to remove corners')
        # Corners
        mask = numpy.zeros_like(data, dtype='int32')
        mask[2000:, 0:80] = 1
        mask[2028:, 2000:] = 1
        mask[:50, 1950:] = 1
        mask[:100, :50] = 1
        # Remove corner regions

        _logger.info('compute background map, %s', box_shape)
        bkg = sep.Background(data)

        _logger.info('reference fwhm is %5.1f pixels', fwhm)
        _logger.info('detect threshold, %3.1f over background', snr_detect)
        _logger.info('convolve with gaussian kernel, FWHM %3.1f pixels', fwhm)
        sigma = fwhm * gaussian_fwhm_to_sigma
        #
        kernel = Gaussian2DKernel(sigma)
        kernel.normalize()

        thresh = snr_detect * bkg.globalrms
        data_s = data - bkg.back()
        objects, segmap = sep.extract(data - bkg.back(),
                                      thresh,
                                      minarea=npixels,
                                      filter_kernel=kernel.array,
                                      segmentation_map=True,
                                      mask=mask)
        fits.writeto('segmap.fits', segmap)
        _logger.info('detected %d objects', len(objects))

        # Hardcoded values
        rs2 = 15.0
        fit_rad = 10.0
        flux_min = 1000.0
        flux_max = 30000.0
        _logger.debug('Flux limit is %6.1f %6.1f', flux_min, flux_max)
        # FIXME: this should be a view, not a copy
        xall = objects['x']
        yall = objects['y']
        mm = numpy.array([xall, yall]).T
        _logger.info('computing FWHM')
        # Find objects with pairs inside fit_rad
        kdtree = KDTree(mm)
        nearobjs = (kdtree.query_ball_tree(kdtree, r=fit_rad))
        positions = []
        for idx, obj in enumerate(objects):
            x0 = obj['x']
            y0 = obj['y']
            sl = image_box2d(x0, y0, data.shape, (fit_rad, fit_rad))
            # sl_sky = image_box2d(x0, y0, data.shape, (rs2, rs2))
            part_s = data_s[sl]
            # Logical coordinates
            xx0 = x0 - sl[1].start
            yy0 = y0 - sl[0].start

            _, fwhm_x, fwhm_y = compute_fwhm_2d_simple(part_s, xx0, yy0)

            if min(fwhm_x, fwhm_x) < 3:
                continue
            if flux_min > obj['peak'] or flux_max < obj['peak']:
                continue
            # nobjs is the number of object inside fit_rad
            nobjs = len(nearobjs[idx])

            flag = 0 if nobjs == 1 else 1

            positions.append([idx, x0, y0, obj['peak'], fwhm_x, fwhm_y, flag])

        _logger.info('saving photometry')
        positions = numpy.array(positions)
        positions_alt = positions
        _logger.info('end processing for object detection')

        result = self.create_result(
            frame=hdulist,
            positions=positions_alt,
            positions_alt=positions_alt,
            filter=filtername,
            DTU=dtub,
            readmode=readmode,
            ROTANG=rotang,
            DETPA=detpa,
            DTUPA=dtupa,
            param_recenter=rinput.recenter,
            param_max_recenter_radius=rinput.max_recenter_radius,
            param_box_half_size=rinput.box_half_size)
        return result
Пример #24
0
class TestSlitMaskDetectionRecipe(EmirRecipe):

    # Recipe Requirements
    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    master_sky = MasterSkyRequirement()

    median_filter_size = Parameter(5, 'Size of the median box')
    canny_sigma = Parameter(3.0, 'Sigma for the Canny algorithm')
    canny_high_threshold = Parameter(0.04, 'High threshold for the Canny algorithm')
    canny_low_threshold = Parameter(0.01, 'High threshold for the Canny algorithm')
    obj_min_size = Parameter(200, 'Minimum size of the slit')
    obj_max_size = Parameter(3000, 'Maximum size of the slit')
    slit_size_ratio = Parameter(4.0, 'Minimum ratio between height and width for slits')

    # Recipe Results
    frame = Product(DataFrameType)
    slitstable = Product(ArrayType)
    DTU = Product(ArrayType)
    ROTANG = Product(float)
    DETPA = Product(float)
    DTUPA = Product(float)

    def run(self, rinput):
        _logger.info('starting slit processing')

        _logger.info('basic image reduction')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)
        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        try:
            rotang = hdr['ROTANG']
            detpa = hdr['DETPA']
            dtupa = hdr['DTUPA']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)

        except KeyError as error:
            _logger.error(error)
            raise RecipeError(error)

        _logger.debug('finding slits')

        # First, prefilter with median
        median_filter_size = rinput.median_filter_size
        canny_sigma = rinput.canny_sigma
        obj_min_size = rinput.obj_min_size
        obj_max_size = rinput.obj_max_size

        data1 = hdulist[0].data
        _logger.debug('Median filter with box %d', median_filter_size)
        data2 = median_filter(data1, size=median_filter_size)

        # Grey level image
        img_grey = normalize_raw(data2)

        # Find edges with Canny
        _logger.debug('Find edges with Canny, sigma %f', canny_sigma)
        # These thresholds corespond roughly with
        # value x (2**16 - 1)
        high_threshold = rinput.canny_high_threshold
        low_threshold = rinput.canny_low_threshold
        _logger.debug('Find edges, Canny high threshold %f', high_threshold)
        _logger.debug('Find edges, Canny low threshold %f', low_threshold)
        edges = canny(img_grey, sigma=canny_sigma,
                      high_threshold=high_threshold,
                      low_threshold=low_threshold)
        # Fill edges
        _logger.debug('Fill holes')
        fill_slits =  ndimage.binary_fill_holes(edges)

        _logger.debug('Label objects')
        label_objects, nb_labels = ndimage.label(fill_slits)
        _logger.debug('%d objects found', nb_labels)
        # Filter on the area of the labeled region
        # Perhaps we could ignore this filtering and
        # do it later?
        _logger.debug('Filter objects by size')
        # Sizes of regions
        sizes = numpy.bincount(label_objects.ravel())

        _logger.debug('Min size is %d', obj_min_size)
        _logger.debug('Max size is %d', obj_max_size)

        mask_sizes = (sizes > obj_min_size) & (sizes < obj_max_size)

        # Filter out regions
        nids, = numpy.where(mask_sizes)

        mm = numpy.in1d(label_objects, nids)
        mm.shape = label_objects.shape

        fill_slits_clean = numpy.where(mm, 1, 0)
        #plt.imshow(fill_slits_clean)

        # and relabel
        _logger.debug('Label filtered objects')
        relabel_objects, nb_labels = ndimage.label(fill_slits_clean)
        _logger.debug('%d objects found after filtering', nb_labels)
        ids = list(six.moves.range(1, nb_labels + 1))

        _logger.debug('Find regions and centers')
        regions = ndimage.find_objects(relabel_objects)
        centers = ndimage.center_of_mass(data2, labels=relabel_objects,
                                         index=ids
                                         )

        table = char_slit(data2, regions,
                          slit_size_ratio=rinput.slit_size_ratio
                          )

        result = self.create_result(frame=hdulist, slitstable=table,
                                    DTU=dtub,
                                    ROTANG=rotang,
                                    DETPA=detpa,
                                    DTUPA=dtupa
                                    )

        return result
Пример #25
0
class TestSlitDetectionRecipe(EmirRecipe):

    # Recipe Requirements
    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    master_sky = MasterSkyRequirement()

    median_filter_size = Parameter(5, 'Size of the median box')
    canny_sigma = Parameter(3.0, 'Sigma for the canny algorithm')
    canny_high_threshold = Parameter(0.04, 'High threshold for the Canny algorithm')
    canny_low_threshold = Parameter(0.01, 'High threshold for the Canny algorithm')

    # Recipe Results
    frame = Product(DataFrameType)
    slitstable = Product(ArrayType)
    DTU = Product(ArrayType)
    ROTANG = Product(float)
    DETPA = Product(float)
    DTUPA = Product(float)

    def run(self, rinput):
        _logger.info('starting slit processing')

        _logger.info('basic image reduction')

        flow = self.init_filters(rinput)

        hdulist = basic_processing_with_combination(rinput, flow=flow)
        hdr = hdulist[0].header
        self.set_base_headers(hdr)

        try:
            rotang = hdr['ROTANG']
            detpa = hdr['DETPA']
            dtupa = hdr['DTUPA']
            dtub, dtur = datamodel.get_dtur_from_header(hdr)

        except KeyError as error:
            _logger.error(error)
            raise RecipeError(error)

        _logger.debug('finding slits')


        # Filter values below 0.0
        _logger.debug('Filter values below 0')
        data1 = hdulist[0].data[:]

        data1[data1 < 0.0] = 0.0
        # First, prefilter with median
        median_filter_size = rinput.median_filter_size
        canny_sigma = rinput.canny_sigma

        _logger.debug('Median filter with box %d', median_filter_size)
        data2 = median_filter(data1, size=median_filter_size)

        # Grey level image
        img_grey = normalize_raw(data2)

        # Find edges with Canny
        _logger.debug('Find edges, Canny sigma %f', canny_sigma)
        # These thresholds corespond roughly with
        # value x (2**16 - 1)
        high_threshold = rinput.canny_high_threshold
        low_threshold = rinput.canny_low_threshold
        _logger.debug('Find edges, Canny high threshold %f', high_threshold)
        _logger.debug('Find edges, Canny low threshold %f', low_threshold)
        edges = canny(img_grey, sigma=canny_sigma,
                      high_threshold=high_threshold,
                      low_threshold=low_threshold)
        
        # Fill edges
        _logger.debug('Fill holes')
        # I do a dilation and erosion to fill
        # possible holes in 'edges'
        fill = ndimage.binary_dilation(edges)
        fill2 = ndimage.binary_fill_holes(fill)
        fill_slits = ndimage.binary_erosion(fill2)

        _logger.debug('Label objects')
        label_objects, nb_labels = ndimage.label(fill_slits)
        _logger.debug('%d objects found', nb_labels)
        ids = list(six.moves.range(1, nb_labels + 1))

        _logger.debug('Find regions and centers')
        regions = ndimage.find_objects(label_objects)
        centers = ndimage.center_of_mass(data2, labels=label_objects,
                                         index=ids
                                         )

        table = char_slit(data2, regions,
                          slit_size_ratio=-1.0
                          )

        result = self.create_result(frame=hdulist,
                                    slitstable=table,
                                    DTU=dtub,
                                    ROTANG=rotang,
                                    DETPA=detpa,
                                    DTUPA=dtupa
                                    )

        return result
Пример #26
0
class DitheredImageRecipe(DirectImageCommon):

    """Recipe for the reduction of imaging mode observations.

    Recipe to reduce observations obtained in imaging mode, considering
    different possibilities depending on the size of the offsets
    between individual images.
    In particular, the following observing modes are considered: stare imaging,
    nodded beamswitched imaging, and dithered imaging.

    A critical piece of information here is a table that clearly specifies
    which images can be labeled as *science*, and which ones as *sky*.
    Note that some images are used both as *science* and *sky*
    (when the size of the targets is small compared to the offsets).

    **Observing modes:**

     * StareImage
     * Nodded/Beam-switched images
     * Dithered images


    **Inputs:**

     * Science frames + [Sky Frames]
     * Observing mode name: **stare image**, **nodded beamswitched image**,
       or **dithered imaging**
     * A table relating each science image with its sky image(s) (TBD if
       it's in the FITS header and/or in other format)
     * Offsets between them (Offsets must be integer)
     * Master Dark
     * Bad pixel mask (BPM)
     * Non-linearity correction polynomials
     * Master flat (twilight/dome flats)
     * Master background (thermal background, only in K band)
     * Exposure Time (must be the same in all the frames)
     * Airmass for each frame
     * Detector model (gain, RN, lecture mode)
     * Average extinction in the filter
     * Astrometric calibration (TBD)

    **Outputs:**

     * Image with three extensions: final image scaled to the individual
       exposure time, variance  and exposure time map OR number of images
       combined (TBD)

    **Procedure:**

    Images are corrected from dark, non-linearity and flat. Then, an iterative
    process starts:

     * Sky is computed from each frame, using the list of sky images of each
       science frame. The objects are avoided using a mask (from the second
       iteration on).

     * The relative offsets are the nominal from the telescope. From the second
       iteration on, we refine them using objects of appropriate brightness
       (not too bright, not to faint).

     * We combine the sky-subtracted images, output is: a new image, a variance
       image and a exposure map/number of images used map.

     * An object mask is generated.

     * We recompute the sky map, using the object mask as an additional input.
       From here we iterate (typically 4 times).

     * Finally, the images are corrected from atmospheric extinction and flux
       calibrated.

     * A preliminary astrometric calibration can always be used (using
       the central coordinates of the pointing and the plate scale
       in the detector).
       A better calibration might be computed using available stars (TBD).

    """
    obresult = ObservationResultRequirement()
    master_bpm = MasterBadPixelMaskRequirement()
    master_bias = MasterBiasRequirement()
    master_dark = MasterDarkRequirement()
    master_flat = MasterIntensityFlatFieldRequirement()
    extinction = Extinction_Requirement()
    sources = Catalog_Requirement()
    offsets = Offsets_Requirement()

    iterations = Parameter(4, 'Iterations of the recipe')
    sky_images = Parameter(
        5, 'Images used to estimate the '
        'background before and after current image')
    sky_images_sep_time = SkyImageSepTime_Requirement()
    check_photometry_levels = Parameter(
        [0.5, 0.8], 'Levels to check the flux of the objects')
    check_photometry_actions = Parameter(
        ['warn', 'warn', 'default'], 'Actions to take on images')

    frame = Product(DataFrameType)
    catalog = Product(SourcesCatalog)

    def run(self, recipe_input):
        frame, catalog = self.process(recipe_input, window=None, subpix=1,
                                      stop_after=DirectImageCommon.FULLRED)

        result = self.create_result(frame=frame, catalog=catalog)
        return result