def separateFlatStreams(self, adinputs=None, **params):
        """
        This primitive splits the flat data into two streams, the 'DFFFD_flats' stream containing DFFFD flats, and main
        containing FDDDF flats. It also warns if non-flats somehow made it into the primitive
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        # Initialize lists of AstroData objects to be added to the streams
        flat_FDDDF_list = []
        flat_DFFFD_list = []
        mislabeled = []
        for ad in adinputs:
            tags = ad.tags
            if "FLAT" in tags and ad.fiber_setup() == [
                    'Flat', 'Dark', 'Dark', 'Dark', 'Flat'
            ]:
                flat_FDDDF_list.append(ad)
                log.fullinfo("FDDDF Flat: {}".format(ad.filename))
            elif "FLAT" in tags and ad.fiber_setup() == [
                    'Dark', 'Flat', 'Flat', 'Flat', 'Dark'
            ]:
                flat_DFFFD_list.append(ad)
                log.fullinfo("DFFFD Flat: {}".format(ad.filename))
            else:
                mislabeled.append(ad)
                log.warning("Not registered as Flat: {}".format(ad.filename))
        if not flat_FDDDF_list:
            log.warning("No FDDDF Flats in input list")
        if not flat_DFFFD_list:
            log.warning("No DFFFD Flats in input list")

        self.streams["DFFFD_flats"] = flat_DFFFD_list

        return flat_FDDDF_list
    def combineFlatStreams(self, adinputs=None, source=None, **params):
        """
        This primitive recombines the flat data into one master frame, combining the main stream pre-master
        and the 'source' stream pre-master with a simple max comparison at each pix
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        if source not in self.streams.keys():
            log.info("Stream {} does not exist so nothing to transfer".format(
                source))
            return adinputs

        source_length = len(self.streams[source])
        adinputs_length = len(adinputs)
        if not (adinputs_length == source_length == 1):
            log.warning("Unexpected stream lengths: {} and {}".format(
                adinputs_length, source_length))
            return adinputs
        adoutputs = []
        adout = copy.deepcopy(adinputs[0])
        adout[0].data = np.max(
            [adinputs[0].data[0], self.streams[source][0].data[0]], axis=0)
        adoutputs.append(adout)
        return adoutputs
예제 #3
0
    def storeCalibration(self, adinputs=None, **params):
        """
        Will write calibrations in calibrations/<cal_type>/
        """ 
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        storedcals = self.cachedict["calibrations"]
        caltype = params["caltype"]
        required_tags = REQUIRED_TAG_DICT[caltype]

        # Create storage directory if it doesn't exist
        if not os.path.exists(os.path.join(storedcals, caltype)):
            os.mkdir(os.path.join(storedcals, caltype))

        for ad in adinputs:
            if not ad.tags.issuperset(required_tags):
                log.warning("File {} is not recognized as a {}. Not storing as"
                            " a calibration.".format(ad.filename, caltype))
                continue
            fname = os.path.join(storedcals, caltype, os.path.basename(ad.filename))
            ad.write(fname, overwrite=True)
            log.stdinfo("Calibration stored as {}".format(fname))
            if self.upload and 'calibs' in self.upload:
                try:
                    upload_calibration(fname)
                except:
                    log.warning("Unable to upload file to calibration system")
                else:
                    msg = "File {} uploaded to fitsstore."
                    log.stdinfo(msg.format(os.path.basename(ad.filename)))
        return adinputs
 def storeProcessedFlat(self, rc):
     # Instantiate the log
     log = logutils.get_logger(__name__)
     
     # Log the standard "starting primitive" debug message
     log.debug(gt.log_message("primitive", "storeProcessedFlat",
                              "starting"))
     
     # Loop over each input AstroData object in the input list
     for ad in rc.get_inputs_as_astrodata():
         
         # Updating the file name with the suffix for this primitive and
         # then report the new file to the reduction context
         ad.filename = gt.filename_updater(adinput=ad, suffix=rc["suffix"],
                                           strip=True)
         
         # Adding a PROCFLAT time stamp to the PHU
         gt.mark_history(adinput=ad, keyword="PROCFLAT")
         
         # Refresh the AD types to reflect new processed status
         ad.refresh_types()
     
     # Upload to cal system
     rc.run("storeCalibration")
     
     yield rc
예제 #5
0
    def makeLampFlat(self, adinputs=None, **params):
        """
        This separates the lamp-on and lamp-off flats, stacks them, subtracts
        one from the other, and returns that single frame. Since they are lamp
        flats, no scaling is performed during the stacking.
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        lamp_on_list = self.selectFromInputs(adinputs, tags='LAMPON')
        lamp_off_list = self.selectFromInputs(adinputs, tags='LAMPOFF')
        self.showInputs(lamp_on_list, purpose='lampOn')
        self.showInputs(lamp_off_list, purpose='lampOff')

        stack_params = self._inherit_params(params, "stackFrames")
        stack_params.update({'zero': False, 'scale': False})
        ad_on = self.stackFrames(lamp_on_list, **stack_params)
        ad_off = self.stackFrames(lamp_off_list, **stack_params)

        if ad_on and ad_off:
            log.fullinfo("Subtracting lampOff stack from lampOn stack")
            flat = ad_on[0]
            flat.subtract(ad_off[0])
            flat.update_filename(suffix="_lampOnOff")
            return [flat]
        else:
            log.warning("Cannot subtract lampOff from lampOn flats as do not "
                        "have some of each")
            if ad_on:
                log.warning("Returning stacked lamp on flats")
                return ad_on
            else:
                return []
예제 #6
0
    def standardizeObservatoryHeaders(self, adinputs=None, **params):
        """
        This primitive is used to make the changes and additions to the
        keywords in the headers of Gemini data.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys[self.myself()]

        for ad in adinputs:
            if ad.phu.get(timestamp_key):
                log.warning("No changes will be made to {}, since it has "
                            "already been processed by standardize"
                            "ObservatoryHeaders".format(ad.filename))
                continue

            # Update various header keywords
            log.status("Updating keywords that are common to all Gemini data")
            ad.phu.set('NSCIEXT', len(ad), self.keyword_comments['NSCIEXT'])
            ad.hdr.set('BUNIT', 'adu', self.keyword_comments['BUNIT'])

            # Timestamp and update filename
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=params["suffix"], strip=True)
        return adinputs
예제 #7
0
    def trimOverscan(self, adinputs=None, suffix=None):
        """
        The trimOverscan primitive trims the overscan region from the input
        AstroData object and updates the headers.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys[self.myself()]

        for ad in adinputs:
            if ad.phu.get(timestamp_key) is not None:
                log.warning('No changes will be made to {}, since it has '
                            'already been processed by trimOverscan'.format(
                                ad.filename))
                continue

            ad = gt.trim_to_data_section(
                ad, keyword_comments=self.keyword_comments)

            # Set keyword, timestamp, and update filename
            ad.phu.set('TRIMMED', 'yes', self.keyword_comments['TRIMMED'])
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=suffix, strip=True)
        return adinputs
예제 #8
0
    def makeLampFlat(self, adinputs=None, **params):
        """
        This produces an appropriate stacked GSAOI imaging flat, based on
        the inputs, since one of two procedures must be followed.

        In the standard recipe, the inputs will have come from getList and
        so will all have the same filter and will all need the same recipe.
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        # Leave now with empty list to avoid error when looking at adinputs[0]
        if not adinputs:
            return adinputs

        if adinputs[0].effective_wavelength(output_units='micrometers') < 1.4:
            log.stdinfo('Using stackFrames to make flatfield')
            params.update({'scale': False, 'zero': False})
            adinputs = self.stackFrames(adinputs, **params)
        else:
            log.stdinfo(
                'Using standard makeLampFlat primitive to make flatfield')
            adinputs = super().makeLampFlat(adinputs, **params)

        return adinputs
예제 #9
0
    def standardizeWCS(self,
                       adinputs=None,
                       suffix=None,
                       reference_extension=None):
        """
        This primitive updates the WCS attribute of each NDAstroData extension
        in the input AstroData objects. For spectroscopic data, it means
        replacing an imaging WCS with an approximate spectroscopic WCS.
        For multi-extension ADs, it means prepending a tiling and/or mosaic
        transform before the pixel->world transform, and giving all extensions
        copies of the reference extension's pixel->world transform.

        Parameters
        ----------
        suffix: str/None
            suffix to be added to output files
        reference_extension: int/None
            reference extension whose WCS is inherited by others
        """
        log = self.log
        timestamp_key = self.timestamp_keys[self.myself()]
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        for ad in adinputs:
            # TODO: work towards making this "if 'SPECT' in ad.tags"
            # which is why it's here and not in primitives_gmos_spect
            log.stdinfo(f"Adding spectroscopic WCS to {ad.filename}")
            if {'GMOS', 'SPECT', 'LS'}.issubset(ad.tags):
                add_longslit_wcs(ad)

            # Timestamp and update filename
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=suffix, strip=True)
        return adinputs
예제 #10
0
    def storeProcessedSlitIllum(self, adinputs=None, suffix=None):
        """
        Stores the Processed Slit Illumination file.

        Parameters
        ----------
        adinputs : list of AstroData
            Data that contain the Slit Illumination Response Function.
        suffix : str
            Suffix to be added to each of the input files.

        Returns
        -------
        list of AstroData : the input data is simply forwarded.
        """
        caltype = 'processed_slitillum'
        self.log.debug(gt.log_message("primitive", self.myself(), "starting"))
        adoutputs = list()
        for ad in adinputs:
            passes = 'MAKESILL' in ad.phu
            if passes:
                procstdads = self._markAsCalibration([ad], suffix=suffix,
                                                     primname=self.myself(), keyword="PROCILLM")
                adoutputs.extend(procstdads)
            else:
                adoutputs.append(ad)
        self.storeCalibration(adinputs, caltype=caltype)
        return adoutputs
예제 #11
0
    def separateFlatsDarks(self, adinputs=None, **params):
        """
        This primitive produces two streams, one containing flats, and one
        containing darks. Other files remain in the main stream
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        # Initialize lists of AstroData objects to be added to the streams
        dark_list = []
        flat_list = []
        adoutputs = []
        for ad in adinputs:
            tags = ad.tags
            if "DARK" in tags:
                dark_list.append(ad)
                log.fullinfo("Dark: {}, {}".format(ad.data_label(),
                                                   ad.filename))
            elif "FLAT" in tags:
                flat_list.append(ad)
                log.fullinfo("Flat: {}, {}".format(ad.data_label(),
                                                   ad.filename))
            else:
                adoutputs.append(ad)
                log.warning("Not Dark/Flat: {} {}".format(
                    ad.data_label(), ad.filename))
        if not dark_list:
            log.warning("No Darks in input list")
        if not flat_list:
            log.warning("No Flats in input list")

        self.streams.update({"flats": flat_list})
        self.streams.update({"darks": dark_list})
        return adoutputs
예제 #12
0
    def trimOverscan(self, adinputs=None, suffix=None):
        """
        The trimOverscan primitive trims the overscan region from the input
        AstroData object and updates the headers.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys[self.myself()]

        for ad in adinputs:
            if ad.phu.get(timestamp_key) is not None:
                log.warning('No changes will be made to {}, since it has '
                            'already been processed by trimOverscan'.
                            format(ad.filename))
                continue

            ad = gt.trim_to_data_section(ad,
                                    keyword_comments=self.keyword_comments)

            # Set keyword, timestamp, and update filename
            ad.phu.set('TRIMMED', 'yes', self.keyword_comments['TRIMMED'])
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=suffix, strip=True)
        return adinputs
예제 #13
0
    def separateFlatsDarks(self, adinputs=None, **params):
        """
        This primitive produces two streams, one containing flats, and one
        containing darks. Other files remain in the main stream
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        # Initialize lists of AstroData objects to be added to the streams
        dark_list = []
        flat_list = []
        adoutputs = []
        for ad in adinputs:
            tags = ad.tags
            if "DARK" in tags:
                dark_list.append(ad)
                log.fullinfo("Dark: {}, {}".format(ad.data_label(), ad.filename))
            elif "FLAT" in tags:
                flat_list.append(ad)
                log.fullinfo("Flat: {}, {}".format(ad.data_label(), ad.filename))
            else:
                adoutputs.append(ad)
                log.warning("Not Dark/Flat: {} {}".format(ad.data_label(),
                                                          ad.filename))
        if not dark_list:
            log.warning("No Darks in input list")
        if not flat_list:
            log.warning("No Flats in input list")

        self.streams.update({"flats" : flat_list})
        self.streams.update({"darks" : dark_list})
        return adoutputs
    def subtractLampOnLampOff(self, rc):
        """
        This primitive subtracts the lamp off stack from the lampon stack. It expects there to be only
        one file (the stack) on each stream - call stackLampOnLampOff to do the stacking before calling this
        """

        # Instantiate the log
        log = logutils.get_logger(__name__)

        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "subtractLampOnLampOff", "starting"))

        # Initialize the list of output AstroData objects
        adoutput_list = []

        lampon = rc.get_stream(stream="lampOn", style="AD")[0]
        lampoff = rc.get_stream(stream="lampOff", style="AD")[0]

        log.stdinfo("Lamp ON is: %s %s" % (lampon.data_label(), lampon.filename))
        log.stdinfo("Lamp OFF is: %s %s" % (lampoff.data_label(), lampoff.filename))
        lampon.sub(lampoff)
        lampon.filanme = gt.filename_updater(adinput=lampon, suffix="lampOnOff")

        adoutput_list.append(lampon)
        rc.report_output(adoutput_list)

        yield rc
예제 #15
0
    def transferAttribute(self, adinputs=None, source=None, attribute=None):
        """
        This primitive takes an attribute (e.g., "mask", or "OBJCAT") from
        the AD(s) in another ("source") stream and applies it to the ADs in
        this stream. There must be either the same number of ADs in each
        stream, or only 1 in the source stream.
        
        Parameters
        ----------
        source: str
            name of stream containing ADs whose attributes you want
        attribute: str
            attribute to transfer from ADs in other stream
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        if source not in self.streams.keys():
            log.info("Stream {} does not exist so nothing to transfer".format(source))
            return adinputs

        source_length = len(self.streams[source])
        if not (source_length == 1 or source_length == len(adinputs)):
            log.warning("Incompatible stream lengths: {} and {}".
                        format(len(adinputs), source_length))
            return adinputs

        log.stdinfo("Transferring attribute {} from stream {}".format(attribute, source))

        # Keep track of whether we find anything to transfer, as failing to
        # do so might indicate a problem and we should warn the user
        found = False

        for ad1, ad2 in zip(*gt.make_lists(adinputs, self.streams[source])):
            # Attribute could be top-level or extension-level
            # Use deepcopy so references to original object don't remain
            if hasattr(ad2, attribute):

                try:
                    setattr(ad1, attribute,
                            copy.deepcopy(getattr(ad2, attribute)))

                except ValueError:  # data, mask, are gettable not settable
                    pass

                else:
                    found = True
                    continue

            for ext1, ext2 in zip(ad1, ad2):

                if hasattr(ext2, attribute):
                    setattr(ext1, attribute,
                            copy.deepcopy(getattr(ext2, attribute)))
                    found = True

        if not found:
            log.warning("Did not find any {} attributes to transfer".format(attribute))

        return adinputs
예제 #16
0
    def standardizeObservatoryHeaders(self, adinputs=None, **params):
        """
        This primitive is used to make the changes and additions to the
        keywords in the headers of Gemini data.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys[self.myself()]

        for ad in adinputs:
            if ad.phu.get(timestamp_key):
                log.warning("No changes will be made to {}, since it has "
                            "already been processed by standardize"
                            "ObservatoryHeaders".format(ad.filename))
                continue

            # Update various header keywords
            log.status("Updating keywords that are common to all Gemini data")
            ad.phu.set('NSCIEXT', len(ad), self.keyword_comments['NSCIEXT'])
            ad.hdr.set('BUNIT', 'adu', self.keyword_comments['BUNIT'])
            for ext in ad:
                if 'RADECSYS' in ext.hdr:
                    ext.hdr['RADESYS'] = (ext.hdr['RADECSYS'],
                                          ext.hdr.comments['RADECSYS'])
                    del ext.hdr['RADECSYS']

            # Timestamp and update filename
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=params["suffix"], strip=True)
        return adinputs
예제 #17
0
    def prepare(self, adinputs=None, **params):
        """
        Validate and standardize the datasets to ensure compatibility
        with the subsequent primitives.  The outputs, if written to
        disk will be given the suffix "_prepared".

        Currently, there are no input parameters associated with
        this primitive.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        """
        log = self.log
        log.debug(gt.log_message("primitive", "prepare", "starting"))
        timestamp_key = self.timestamp_keys["prepare"]
        sfx = params["suffix"]
        for primitive in ('validateData', 'standardizeStructure',
                          'standardizeHeaders'):
            passed_params = self._inherit_params(params, primitive)
            adinputs = getattr(self, primitive)(adinputs, **passed_params)

        for ad in adinputs:
            gt.mark_history(ad, self.myself(), timestamp_key)
            ad.update_filename(suffix=sfx, strip=True)
        return adinputs
    def separateLampOff(self, rc):
        """
        This primitive is intended to run on gcal imaging flats. 
        It goes through the input list and figures out which ones are lamp-on
        and which ones are lamp-off
        """
        # Instantiate the log
        log = logutils.get_logger(__name__)

        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "separateLampOff", "starting"))

        # Initialize the list of output AstroData objects
        lampon_list = []
        lampoff_list = []

        # Loop over the input frames
        for ad in rc.get_inputs_as_astrodata():
            if('GCAL_IR_ON' in ad.types):
                log.stdinfo("%s is a lamp-on flat" % ad.data_label())
                #rc.run("addToList(purpose=lampOn)")
                lampon_list.append(ad)
            elif('GCAL_IR_OFF' in ad.types):
                log.stdinfo("%s is a lamp-off flat" % ad.data_label())
                #rc.run("addToList(purpose=lampOff)")
                lampoff_list.append(ad)
            else:
                log.warning("Not a GCAL flatfield? Cannot tell if it is lamp-on or lamp-off for %s" % ad.data_label())

        rc.report_output(lampon_list, stream="lampOn")
        rc.report_output(lampoff_list, stream="lampOff")

        yield rc
예제 #19
0
    def storeCalibration(self, adinputs=None, **params):
        """
        Will write calibrations in calibrations/<cal_type>/
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        storedcals = self.cachedict["calibrations"]
        caltype = params["caltype"]
        required_tags = REQUIRED_TAG_DICT[caltype]

        # Create storage directory if it doesn't exist
        if not os.path.exists(os.path.join(storedcals, caltype)):
            os.mkdir(os.path.join(storedcals, caltype))

        for ad in adinputs:
            if not ad.tags.issuperset(required_tags):
                log.warning("File {} is not recognized as a {}. Not storing as"
                            " a calibration.".format(ad.filename, caltype))
                continue
            fname = os.path.join(storedcals, caltype,
                                 os.path.basename(ad.filename))
            ad.write(fname, overwrite=True)
            log.stdinfo("Calibration stored as {}".format(fname))
            if self.upload and 'calibs' in self.upload:
                try:
                    upload_calibration(fname)
                except:
                    log.warning("Unable to upload file to calibration system")
                else:
                    msg = "File {} uploaded to fitsstore."
                    log.stdinfo(msg.format(os.path.basename(ad.filename)))
        return adinputs
 def storeCalibration(self, rc):
     # Instantiate the log
     log = logutils.get_logger(__name__)
     
     # Log the standard "starting primitive" debug message
     log.debug(gt.log_message("primitive", "storeCalibration", "starting"))
     
     # Determine the path where the calibration will be stored
     storedcals = rc["cachedict"]["storedcals"]
     
     # Loop over each input AstroData object in the input list
     for ad in rc.get_inputs_as_astrodata():
         
         # Construct the filename of the calibration, including the path
         fname = os.path.join(storedcals, os.path.basename(ad.filename))
         
         # Write the calibration to disk. Use rename=False so that
         # ad.filename does not change (i.e., does not include the
         # calibration path)
         ad.write(filename=fname, rename=False, clobber=True)
         log.stdinfo("Calibration stored as %s" % fname)
         
         if "upload" in rc.context:
             try:
                 upload_calibration(fname)
             except:
                 log.warning("Unable to upload file to calibration system")
             else:
                 log.stdinfo("File %s uploaded to fitsstore." % 
                             os.path.basename(ad.filename))
         yield rc
     
     yield rc
예제 #21
0
    def makeLampFlat(self, adinputs=None, **params):
        """
        This separates the lamp-on and lamp-off flats, stacks them, subtracts
        one from the other, and returns that single frame. Since they are lamp
        flats, no scaling is performed during the stacking.
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        lamp_on_list = self.selectFromInputs(adinputs, tags='LAMPON')
        lamp_off_list = self.selectFromInputs(adinputs, tags='LAMPOFF')
        self.showInputs(lamp_on_list, purpose='lampOn')
        self.showInputs(lamp_off_list, purpose='lampOff')

        stack_params = self._inherit_params(params, "stackFrames")
        stack_params.update({'zero': False, 'scale': False})
        ad_on = self.stackFrames(lamp_on_list, **stack_params)
        ad_off = self.stackFrames(lamp_off_list, **stack_params)

        if ad_on and ad_off:
            log.fullinfo("Subtracting lampOff stack from lampOn stack")
            flat = ad_on[0]
            flat.subtract(ad_off[0])
            flat.update_filename(suffix="_lampOnOff")
            return [flat]
        else:
            log.warning("Cannot subtract lampOff from lampOn flats as do not "
                        "have some of each")
            if ad_on:
                log.warning("Returning stacked lamp on flats")
                return ad_on
            else:
                return []
예제 #22
0
    def standardizeInstrumentHeaders(self, adinputs=None, suffix=None):
        """
        This primitive is used to make the changes and additions to the
        keywords in the headers of NIRI data, specifically.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        """
        # Instantiate the log
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys[self.myself()]

        for ad in adinputs:
            if ad.phu.get(timestamp_key):
                log.warning("No changes will be made to {}, since it has "
                            "already been processed by "
                            "standardizeInstrumentHeaders".format(ad.filename))
                continue

            # Standardize the headers of the input AstroData object. Update the
            # keywords in the headers that are specific to FLAMINGOS-2.
            log.status("Updating keywords that are specific to NIRI")

            # Filter name (required for IRAF?)
            ad.phu.set('FILTER', ad.filter_name(stripID=True, pretty=True),
                       self.keyword_comments['FILTER'])

            # Pixel scale (CJS: I'm putting this in the extension too!)
            pixel_scale = ad.pixel_scale()
            ad.phu.set('PIXSCALE', pixel_scale, self.keyword_comments['PIXSCALE'])
            ad.hdr.set('PIXSCALE', pixel_scale, self.keyword_comments['PIXSCALE'])

            for desc in ('read_noise', 'gain', 'non_linear_level',
                         'saturation_level'):
                kw = ad._keyword_for(desc)
                ad.hdr.set(kw, getattr(ad, desc)()[0], self.keyword_comments[kw])
                try:
                    ad.phu.remove(kw)
                except (KeyError, AttributeError):
                    pass

            # The exposure time keyword in raw data the exptime of each coadd
            # but the data have been summed, not averaged, so it needs to be
            # reset to COADDS*EXPTIME. The descriptor always returns that value,
            # regardless of whether the data are prepared or unprepared.
            kw = ad._keyword_for('exposure_time')
            ad.phu.set(kw, ad.exposure_time(), self.keyword_comments[kw])

            if 'SPECT' in ad.tags:
                kw = ad._keyword_for('dispersion_axis')
                self.hdr.set(kw, 1, self.keyword_comments(kw))

            # Timestamp and update filename
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=suffix, strip=True)
        return adinputs
    def normalizeFlat(self, rc):
        """
        This primitive normalizes each science extension of the input
        AstroData object by its mean
        """
        # Instantiate the log
        log = logutils.get_logger(__name__)
        
        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "normalizeFlat", "starting"))
        
        # Define the keyword to be used for the time stamp for this primitive
        timestamp_key = self.timestamp_keys["normalizeFlat"]

        # Initialize the list of output AstroData objects
        adoutput_list = []
        
        # Loop over each input AstroData object in the input list
        for ad in rc.get_inputs_as_astrodata():
            
            # Check whether the normalizeFlat primitive has been run previously
            if ad.phu_get_key_value(timestamp_key):
                log.warning("No changes will be made to %s, since it has " \
                            "already been processed by normalizeFlat" \
                            % (ad.filename))
                # Append the input AstroData object to the list of output
                # AstroData objects without further processing
                adoutput_list.append(ad)
                continue
            
            # Loop over each science extension in each input AstroData object
            for ext in ad[SCI]:
                
                # Normalise the input AstroData object. Calculate the mean
                # value of the science extension
                mean = np.mean(ext.data, dtype=np.float64)
                # Divide the science extension by the mean value of the science
                # extension
                log.fullinfo("Normalizing %s[%s,%d] by dividing by the mean " \
                             "= %f" % (ad.filename, ext.extname(),
                                       ext.extver(), mean))
                ext = ext.div(mean)

            # Add the appropriate time stamps to the PHU
            gt.mark_history(adinput=ad, keyword=timestamp_key)

            # Change the filename
            ad.filename = gt.filename_updater(adinput=ad, suffix=rc["suffix"], 
                                              strip=True)

            # Append the output AstroData object to the list
            # of output AstroData objects
            adoutput_list.append(ad)
        
        # Report the list of output AstroData objects to the reduction
        # context
        rc.report_output(adoutput_list)
        
        yield rc
예제 #24
0
    def standardizeInstrumentHeaders(self, adinputs=None, suffix=None):
        """
        This primitive is used to make the changes and additions to the
        keywords in the headers of NIRI data, specifically.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        """
        # Instantiate the log
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys[self.myself()]

        for ad in adinputs:
            if ad.phu.get(timestamp_key):
                log.warning("No changes will be made to {}, since it has "
                            "already been processed by "
                            "standardizeInstrumentHeaders".format(ad.filename))
                continue

            # Standardize the headers of the input AstroData object. Update the
            # keywords in the headers that are specific to FLAMINGOS-2.
            log.status("Updating keywords that are specific to NIRI")

            # Filter name (required for IRAF?)
            ad.phu.set('FILTER', ad.filter_name(stripID=True, pretty=True),
                       self.keyword_comments['FILTER'])

            # Pixel scale (CJS: I'm putting this in the extension too!)
            pixel_scale = ad.pixel_scale()
            ad.phu.set('PIXSCALE', pixel_scale, self.keyword_comments['PIXSCALE'])
            ad.hdr.set('PIXSCALE', pixel_scale, self.keyword_comments['PIXSCALE'])

            for desc in ('read_noise', 'gain', 'non_linear_level',
                         'saturation_level'):
                kw = ad._keyword_for(desc)
                ad.hdr.set(kw, getattr(ad, desc)()[0], self.keyword_comments[kw])
                try:
                    ad.phu.remove(kw)
                except (KeyError, AttributeError):
                    pass

            # The exposure time keyword in raw data the exptime of each coadd
            # but the data have been summed, not averaged, so it needs to be
            # reset to COADDS*EXPTIME. The descriptor always returns that value,
            # regardless of whether the data are prepared or unprepared.
            kw = ad._keyword_for('exposure_time')
            ad.phu.set(kw, ad.exposure_time(), self.keyword_comments[kw])

            if 'SPECT' in ad.tags:
                kw = ad._keyword_for('dispersion_axis')
                self.hdr.set(kw, 1, self.keyword_comments(kw))

            # Timestamp and update filename
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=suffix, strip=True)
        return adinputs
예제 #25
0
    def stackFlats(self, adinputs=None, **params):
        """
        This primitive will combine the input flats with rejection
        parameters set appropriately for GMOS imaging twilight flats.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files

        apply_dq: bool
            apply DQ mask to data before combining? (passed to stackFrames)

        operation: str
            type of combine operation (passed to stackFrames)

        reject_method: str
            rejection method (passed to stackFrames)

        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        nframes = len(adinputs)
        if nframes < 2:
            log.stdinfo("At least two frames must be provided to stackFlats")
        else:
            # Define rejection parameters based on number of input frames,
            # to be used with minmax rejection. Note: if reject_method
            # parameter is overridden, these parameters will just be
            # ignored
            stack_params = self._inherit_params(params, "stackFrames")
            nlow, nhigh = 0, 0
            if nframes <= 2:
                stack_params["reject_method"] = "none"
            elif nframes <= 5:
                nlow, nhigh = 1, 1
            elif nframes <= 10:
                nlow, nhigh = 2, 2
            else:
                nlow, nhigh = 2, 3
            stack_params.update({
                'nlow': nlow,
                'nhigh': nhigh,
                'zero': False,
                'scale': False,
                'statsec': None,
                'separate_ext': False
            })
            log.fullinfo("For {} input frames, using reject_method={}, "
                         "nlow={}, nhigh={}".format(
                             nframes, stack_params["reject_method"], nlow,
                             nhigh))

            # Run the scaleByIntensity primitive to scale flats to the
            # same level, and then stack
            adinputs = self.scaleByIntensity(adinputs)
            adinputs = self.stackFrames(adinputs, **stack_params)
        return adinputs
예제 #26
0
 def storeProcessedSlitFlat(self, adinputs=None, **params):
     caltype = 'processed_slitflat'
     sfx = params["suffix"]
     self.log.debug(gt.log_message("primitive", self.myself(), "starting"))
     adinputs = self._markAsCalibration(adinputs, suffix=sfx,
                                 primname=self.myself(), keyword="PRSLITFL")
     self.storeCalibration(adinputs, caltype=caltype)
     return adinputs
예제 #27
0
    def mosaicDetectors(self, adinputs=None, **params):
        """
        This primitive does a full mosaic of all the arrays in an AD object.
        An appropriate geometry_conf.py module containing geometric information
        is required.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files.
        sci_only: bool
            mosaic only SCI image data. Default is False
        order: int (1-5)
            order of spline interpolation
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys[self.myself()]

        suffix = params['suffix']
        order = params['order']
        attributes = ['data'] if params['sci_only'] else None
        geotable = import_module('.geometry_conf', self.inst_lookups)

        adoutputs = []
        for ad in adinputs:
            if ad.phu.get(timestamp_key):
                log.warning("No changes will be made to {}, since it has "
                            "already been processed by mosaicDetectors".
                            format(ad.filename))
                adoutputs.append(ad)
                continue

            if len(ad) == 1:
                log.warning("{} has only one extension, so there's nothing "
                            "to mosaic".format(ad.filename))
                adoutputs.append(ad)
                continue

            # If there's an overscan section, we must trim it before mosaicking
            try:
                overscan_kw = ad._keyword_for('overscan_section')
            except AttributeError:  # doesn't exist for this AD, so carry on
                pass
            else:
                if overscan_kw in ad.hdr:
                    ad = gt.trim_to_data_section(ad, self.keyword_comments)

            adg = transform.create_mosaic_transform(ad, geotable)
            ad_out = adg.transform(attributes=attributes, order=order,
                                   process_objcat=False)

            ad_out.orig_filename = ad.filename
            gt.mark_history(ad_out, primname=self.myself(), keyword=timestamp_key)
            ad_out.update_filename(suffix=suffix, strip=True)
            adoutputs.append(ad_out)

        return adoutputs
    def stackFlats(self, rc):
        """
        This primitive will combine the input flats with rejection
        parameters set appropriately for GMOS imaging twilight flats.
        """
        
        # Instantiate the log
        log = gemLog.getGeminiLog(logType=rc["logType"],
                                  logLevel=rc["logLevel"])
        
        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "stackFlats", "starting"))
        
        # Check for at least 2 input frames
        adinput = rc.get_inputs_as_astrodata()
        nframes = len(adinput)
        if nframes<2:
            log.stdinfo("At least two frames must be provided to " +
                        "stackFlats")
            # Report input to RC without change
            adoutput_list = adinput
            rc.report_output(adoutput_list)
        else:
            # Define rejection parameters based on number of input frames,
            # to be used with minmax rejection. Note: if reject_method
            # parameter is overridden, these parameters will just be
            # ignored
            reject_method = rc["reject_method"]
            nlow = 0
            nhigh = 0
            if nframes <= 2:
                reject_method = None
            elif nframes <= 5:
                nlow = 1
                nhigh = 1
            elif nframes <= 10:
                nlow = 2
                nhigh = 2
            else:
                nlow = 2
                nhigh = 3
            log.fullinfo("For %d input frames, using reject_method=%s, "\
                         "nlow=%d, nhigh=%d" % 
                         (nframes, reject_method, nlow, nhigh))

            # Run the scaleByIntensity primitive to scale flats to the
            # same level
            rc.run("scaleByIntensity")

            # Run the stackFrames primitive with the defined parameters
            prim_str = "stackFrames(suffix=%s,operation=%s,mask=%s," \
                       "reject_method=%s,nlow=%s,nhigh=%s)" % \
                       (rc["suffix"],rc["operation"],rc["mask"],
                        reject_method,nlow,nhigh)
            rc.run(prim_str)
        
        yield rc
예제 #29
0
    def stackSkyFrames(self, adinputs=None, **params):
        """
        This primitive stacks the AD frames sent to it with object masking.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        apply_dq: bool
            apply DQ mask to data before combining?
        dilation: int
            dilation radius for expanding object mask
        mask_objects: bool
            mask objects from the input frames?
        nhigh: int
            number of high pixels to reject
        nlow: int
            number of low pixels to reject
        operation: str
            combine method
        reject_method: str
            type of pixel rejection (passed to gemcombine)
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        #timestamp_key = self.timestamp_keys["stackSkyFrames"]

        # Not what stackFrames does when both are set
        stack_params = self._inherit_params(params,
                                            'stackFrames',
                                            pass_suffix=True)
        if stack_params["scale"] and stack_params["zero"]:
            log.warning("Both the scale and zero parameters are set. "
                        "Setting zero=False.")
            stack_params["zero"] = False

        # Need to deepcopy here to avoid changing DQ of inputs
        dilation = params["dilation"]
        if params["mask_objects"]:
            # Purely cosmetic to avoid log reporting unnecessary calls to
            # dilateObjectMask
            if dilation > 0:
                adinputs = self.dilateObjectMask(adinputs, dilation=dilation)
            adinputs = self.addObjectMaskToDQ(
                [deepcopy(ad) for ad in adinputs])

        #if scale or zero:
        #    ref_bg = gt.measure_bg_from_image(adinputs[0], value_only=True)
        #    for ad in adinputs[1:]:
        #        this_bg = gt.measure_bg_from_image(ad, value_only=True)
        #        for ext, this, ref in zip(ad, this_bg, ref_bg):
        #            if scale:
        #                ext *= ref / this
        #            elif zero:
        #                ext += ref - this
        adinputs = self.stackFrames(adinputs, **stack_params)
        return adinputs
예제 #30
0
 def storeBPM(self, adinputs=None, suffix=None):
     caltype = 'bpm'
     self.log.debug(gt.log_message("primitive", self.myself(), "starting"))
     adinputs = gt.convert_to_cal_header(adinput=adinputs, caltype="bpm",
                                         keyword_comments=self.keyword_comments)
     adinputs = self._markAsCalibration(adinputs, suffix=suffix,
                 primname=self.myself(), update_datalab=False, keyword="BPM")
     self.storeCalibration(adinputs, caltype=caltype)
     return adinputs
예제 #31
0
 def storeProcessedFringe(self, adinputs=None, suffix=None):
     caltype = 'processed_fringe'
     self.log.debug(gt.log_message("primitive", self.myself(), "starting"))
     adinputs = gt.convert_to_cal_header(adinput=adinputs, caltype="fringe",
                                       keyword_comments=self.keyword_comments)
     adinputs = self._markAsCalibration(adinputs, suffix=suffix,
             primname=self.myself(), keyword="PROCFRNG", update_datalab=False)
     self.storeCalibration(adinputs, caltype=caltype)
     return adinputs
예제 #32
0
 def storeCalibration(self, adinputs=None, caltype=None):
     """
     Farm some calibration ADs out to the calibration database(s) to process.
     """
     log = self.log
     log.debug(gt.log_message("primitive", self.myself(), "starting"))
     for ad in adinputs:
         self.caldb.store_calibration(ad, caltype=caltype)
     return adinputs
예제 #33
0
 def storeBPM(self, adinputs=None, suffix=None):
     caltype = 'bpm'
     self.log.debug(gt.log_message("primitive", self.myself(), "starting"))
     adinputs = gt.convert_to_cal_header(adinput=adinputs, caltype="bpm",
                                         keyword_comments=self.keyword_comments)
     adinputs = self._markAsCalibration(adinputs, suffix=suffix,
                                        primname=self.myself(), update_datalab=False, keyword="BPM")
     self.storeCalibration(adinputs, caltype=caltype)
     return adinputs
예제 #34
0
 def storeProcessedFlat(self, adinputs=None, suffix=None, force=False):
     caltype = 'processed_flat'
     self.log.debug(gt.log_message("primitive", self.myself(), "starting"))
     if force:
         adinputs = gt.convert_to_cal_header(adinput=adinputs, caltype="flat",
                                             keyword_comments=self.keyword_comments)
     adinputs = self._markAsCalibration(adinputs, suffix=suffix,
                                        primname=self.myself(), keyword="PROCFLAT")
     self.storeCalibration(adinputs, caltype=caltype)
     return adinputs
    def gudayMate(self, *args, **kwargs):

        log = self.log
        log.stdinfo(gt.log_message("primitive", self.myself(), "starting"))
        for ad in self.adinputs:
            log.stdinfo("Hello World! This is {}".format(ad.filename))
            log.stdinfo("Sporting a tagset: {}".format(ad.tags))
            log.stdinfo("Coming to you from {}.".format(self.myself()))

        return
예제 #36
0
 def storeProcessedFlat(self, adinputs=None, suffix=None, force=False):
     caltype = 'processed_flat'
     self.log.debug(gt.log_message("primitive", self.myself(), "starting"))
     if force:
         adinputs = gt.convert_to_cal_header(adinput=adinputs, caltype="flat",
                                             keyword_comments=self.keyword_comments)
     adinputs = self._markAsCalibration(adinputs, suffix=suffix,
                                 primname=self.myself(), keyword="PROCFLAT")
     self.storeCalibration(adinputs, caltype=caltype)
     return adinputs
예제 #37
0
    def applyStackedObjectMask(self, adinputs=None, **params):
        """
        This primitive takes an image with an OBJMASK and transforms that
        OBJMASK onto the pixel planes of the input images, using their WCS
        information. If the first image is a stack, this allows us to mask
        fainter objects than can be detected in the individual input images.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        source: str
            name of stream containing single stacked image
        order: int (0-5)
            order of interpolation
        threshold: float
            threshold above which an interpolated pixel should be flagged
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        source = params["source"]
        order = params["order"]
        threshold = params["threshold"]
        sfx = params["suffix"]

        source_stream = self.streams.get(source, [])
        if len(source_stream) != 1:
            log.warning("Stream {} not found or does not contain single "
                        "AstroData object. Continuing.".format(source_stream))
            return adinputs

        ad_source = source_stream[0]
        # There's no reason why we can't handle multiple extensions
        if any(len(ad) != len(ad_source) for ad in adinputs):
            log.warning("At least one AstroData input has a different number "
                        "of extensions to the reference. Continuing.")
            return adinputs

        for ad in adinputs:
            for ext, source_ext in zip(ad, ad_source):
                transform = Transform([Pix2Sky(WCS(source_ext.hdr)),
                                       Pix2Sky(WCS(ext.hdr)).inverse])
                transform._affine = True
                try:
                    # Convert OBJMASK to float or else uint8 will be returned
                    objmask = transform.apply(source_ext.OBJMASK.astype(np.float32),
                                              output_shape=ext.shape, order=order, cval=0)
                    ext.OBJMASK = np.where(abs(objmask) > threshold, 1, 0).astype(np.uint8)
                except:  # source_ext.OBJMASK not present, or None
                    pass
                # We will deliberately keep the input image's OBJCAT (if it
                # exists) since this will be required for aligning the inputs.
            ad.update_filename(suffix=sfx, strip=True)

        return adinputs
    def skyCorrectNodAndShuffle(self, adinputs=None, suffix=None):
        """
        Perform sky correction on GMOS N&S images bytaking each image and
        subtracting from it a shifted version of the same image.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys[self.myself()]

        for ad in adinputs:
            # Check whether the myScienceStep primitive has been run previously
            if ad.phu.get(timestamp_key):
                log.warning(
                    "No changes will be made to {}, since it has "
                    "already been processed by skyCorrectNodShuffle".format(
                        ad.filename))
                continue

            # Determine N&S offset in (binned) pixels
            shuffle = ad.shuffle_pixels() // ad.detector_y_bin()
            a_nod_count, b_nod_count = ad.nod_count()

            ad_nodded = deepcopy(ad)

            # Shuffle B position data up for all extensions (SCI, DQ, VAR)
            for ext, ext_nodded in zip(ad, ad_nodded):
                #TODO: Add DQ=16 to top and bottom?
                # Set image initially to zero
                ext_nodded.multiply(0)
                # Then replace with the upward-shifted data
                for attr in ('data', 'mask', 'variance'):
                    getattr(ext_nodded,
                            attr)[shuffle:] = getattr(ext, attr)[:-shuffle]

            # Normalize if the A and B nod counts differ
            if a_nod_count != b_nod_count:
                log.stdinfo(
                    "{} A and B nod counts differ...normalizing".format(
                        ad.filename))
                ad.multiply(0.5 * (a_nod_count + b_nod_count) / a_nod_count)
                ad_nodded.multiply(0.5 * (a_nod_count + b_nod_count) /
                                   b_nod_count)

            # Subtract nodded image from image to complete the process
            ad.subtract(ad_nodded)

            # Timestamp and update filename
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=suffix, strip=True)
        return adinputs
예제 #39
0
    def cutFootprints(self, rc):
 
        """
        This primitive will create and append multiple HDU to the output
        AD object. Each HDU correspond to a rectangular cut containing a
        slit from a MOS Flat exposure or a XD flat exposure as in the
        Gnirs case.

        :param logLevel: Verbosity setting for log messages to the screen.
        :type logLevel: integer from 0-6, 0=nothing to screen, 6=everything to
                        screen. OR the message level as a string (i.e.,
                        'critical', 'status', 'fullinfo'...)
        """

        # Instantiate the log
        log = logutils.get_logger(__name__)

        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "cutFootprints", "starting"))

        # Initialize the list of output AstroData objects
        adoutput_list = []

        # Loop over each input AstroData object in the input list
        for ad in rc.get_inputs_as_astrodata():
            # Call the  user level function
      
            # Check that the input ad has the TRACEFP extension,
            # otherwise, create it.
            if ad['TRACEFP'] == None:
                ad = trace_footprints(ad)

            log.stdinfo("Cutting_footprints for: %s"%ad.filename)
            try:
                adout = cut_footprints(ad)
            except:
                log.error("Error in cut_slits with file: %s"%ad.filename)
                # DO NOT add this input ad to the adoutput_lis
                continue
               
               
            # Change the filename
            adout.filename = gt.filename_updater(adinput=ad, 
                                                 suffix=rc["suffix"],
                                                 strip=True)

            # Append the output AstroData object to the list of output 
            # AstroData objects.
            adoutput_list.append(adout)

        # Report the list of output AstroData objects to the reduction
        # context
        rc.report_output(adoutput_list)

        yield rc
예제 #40
0
    def subtract(self,rc):
        # This is a bare-bones primitive interface to the ad sub
        # function from the arith module.  The value, dictionary,
        # or AD instance to be subtracted from the input is stored in
        # rc["operand"]

        # Instantiate the log
        log = gemLog.getGeminiLog(logType=rc["logType"],
                                  logLevel=rc["logLevel"])

        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "subtract", "starting"))

        # Define the keyword to be used for the time stamp for this primitive
        timestamp_key = self.timestamp_keys["subtract"]

        # Initialize the list of output AstroData objects
        adoutput_list = []
        
        # Get data to be subtracted from the RC
        operand = rc["operand"]
        if operand is None:
            log.stdinfo("No operand to subtract; no changes will be "\
                            "made to input")
        elif type(operand)==AstroData:
            log.stdinfo("Subtracting %s from input" % 
                        (operand.filename))
        else:
            log.stdinfo("Subtracting %s from input" % 
                        (repr(operand)))
            
        # Loop over each input AstroData object in the input list
        for ad in rc.get_inputs_as_astrodata():

            if operand is not None:
                # Subtract operand from data
                ad.sub(operand)

                # Add the appropriate time stamps to the PHU
                gt.mark_history(adinput=ad, keyword=timestamp_key)

                # Change the filename
                ad.filename = gt.filename_updater(
                    adinput=ad, suffix=rc["suffix"], strip=True)

            # Append the output AstroData object to the list
            # of output AstroData objects
            adoutput_list.append(ad)

        # Report the list of output AstroData objects to the reduction
        # context
        rc.report_output(adoutput_list)
        
        yield rc
예제 #41
0
    def addLatencyToDQ(self, adinputs=None, **params):
        """
        Flags pixels in the DQ plane of an image based on whether the same
        pixel has been flagged as saturated in a previous image.
        
        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        non_linear : bool
            flag non-linear pixels (as well as saturated ones)?
        time: float
            time (in seconds) for which latency is an issue 
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        flags = DQ.saturated | (DQ.non_linear if params["non_linear"] else 0)
        # Create a timedelta object using the value of the "time" parameter
        seconds = datetime.timedelta(seconds=params["time"])

        # Avoids n^2 calls to the descriptor
        times = [ad.ut_datetime() for ad in adinputs]
        for i, ad in enumerate(adinputs):
            # Find which frames have their bright pixels propagated
            propagated = [
                x for x in zip(adinputs, times)
                if (x[1] < times[i] and times[i] - x[1] < seconds)
            ]
            if propagated:
                log.stdinfo('{} affected by {}'.format(
                    ad.filename,
                    ','.join([x[0].filename for x in propagated])))

                for ad_latent in list(zip(*propagated))[0]:
                    # AD extensions might not be in the same order
                    # Set aux_type to 'bpm' which means hot pixels in a subarray
                    # can still be propagated to a subsequent full-array image
                    ad_latent = gt.clip_auxiliary_data(ad,
                                                       aux=ad_latent,
                                                       aux_type='bpm')
                    for ext, ext_latent in zip(ad, ad_latent):
                        if ext_latent.mask is not None:
                            latency = np.where(ext_latent.mask & flags,
                                               DQ.cosmic_ray,
                                               0).astype(DQ.datatype)
                            ext.mask = latency if ext.mask is None \
                                else ext.mask | latency
            else:
                log.stdinfo('{} is not affected by latency'.format(
                    ad.filename))

            ad.update_filename(suffix=params["suffix"], strip=True)
        return adinputs
예제 #42
0
    def scaleByIntensity(self, adinputs=None, **params):
        """
        This primitive scales input images to the mean value of the first
        image. It is intended to be used to scale flats to the same
        level before stacking.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys[self.myself()]

        ref_mean = None
        for ad in adinputs:
            # If this input hasn't been tiled at all, tile it
            ad_for_stats = self.tileArrays([deepcopy(ad)], tile_all=False)[0] \
                if len(ad)>3 else ad

            # Use CCD2, or the entire mosaic if we can't find a second extn
            try:
                data = ad_for_stats[1].data
            except IndexError:
                data = ad_for_stats[0].data

            # Take off 5% of the width as a border
            xborder = max(int(0.05 * data.shape[1]), 20)
            yborder = max(int(0.05 * data.shape[0]), 20)
            log.fullinfo("Using data section [{}:{},{}:{}] from CCD2 for "
                         "statistics".format(xborder, data.shape[1] - xborder,
                                             yborder, data.shape[0] - yborder))

            stat_region = data[yborder:-yborder, xborder:-xborder]
            mean = np.mean(stat_region)

            # Set reference level to the first image's mean
            if ref_mean is None:
                ref_mean = mean
            scale = ref_mean / mean

            # Log and save the scale factor, and multiply by it
            log.fullinfo("Relative intensity for {}: {:.3f}".format(
                ad.filename, scale))
            ad.phu.set("RELINT",
                       scale,
                       comment=self.keyword_comments["RELINT"])
            ad.multiply(scale)

            # Timestamp and update filename
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=params["suffix"], strip=True)
        return adinputs
예제 #43
0
    def standardizeInstrumentHeaders(self, adinputs=None, **params):
        """
        This primitive is used to make the changes and additions to the
        keywords in the headers of F2 data, specifically.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys[self.myself()]

        for ad in adinputs:
            if ad.phu.get(timestamp_key):
                log.warning("No changes will be made to {}, since it has "
                            "already been processed by "
                            "standardizeInstrumentHeaders".format(ad.filename))
                continue

            # Standardize the headers of the input AstroData object. Update the
            # keywords in the headers that are specific to FLAMINGOS-2.
            log.status("Updating keywords that are specific to FLAMINGOS-2")

            # Filter name (required for IRAF?)
            ad.phu.set('FILTER', ad.filter_name(stripID=True, pretty=True),
                       self.keyword_comments['FILTER'])

            # Pixel scale (CJS: I'm putting this in the extension too!)
            pixel_scale = ad.pixel_scale()
            ad.phu.set('PIXSCALE', pixel_scale,
                       self.keyword_comments['PIXSCALE'])
            ad.hdr.set('PIXSCALE', pixel_scale,
                       self.keyword_comments['PIXSCALE'])

            for desc in ('read_noise', 'gain', 'non_linear_level',
                         'saturation_level'):
                kw = ad._keyword_for(desc)
                ad.hdr.set(kw,
                           getattr(ad, desc)()[0], self.keyword_comments[kw])
                try:
                    ad.phu.remove(kw)
                except (KeyError, AttributeError):
                    pass

            if 'SPECT' in ad.tags:
                kw = ad._keyword_for('dispersion_axis')
                self.hdr.set(kw, 2, self.keyword_comments(kw))

            # Timestamp and update filename
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=params["suffix"], strip=True)
        return adinputs
예제 #44
0
    def stackFrames(self, adinputs=None, **params):
        """
        Combines all the extensions in a slit-viewer frame(s) into a single-
        extension AD instance.

        This primitive wraps the higher level
        :meth:`geminidr.core.primitives_stack.Stack.stackFrames` primitive,
        but rather than stacking separate files to form a combined file,
        it is used to stack the extensions within each slit viewer 'image'
        (collection of exposures).

        This primitive can accept the same parameter set as
        :meth:`geminidr.core.primitives_stack.Stack.stackFrames`.
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys[self.myself()]

        # Keep hold of the first SLITV input's filename so the stacked
        # output has a sensible name.
        first_filename = None

        # CJS: This could be rewritten to produce one output AD for each
        # input AD by combining the extensions from each input separately.
        # That behaviour would not need the addToList/getList primitives.
        # But this is a better match to how the original code behaved.
        adoutputs = []
        extinputs = []
        for ad in adinputs:
            # CJS: Worth doing this check, I feel
            if 'SLITV' not in ad.tags:
                log.warning(
                    "{} is not a slit-viewer image. Continuing.".format(
                        ad.filename))
                adoutputs.append(ad)
                continue

            if not first_filename:
                first_filename = ad.phu['ORIGNAME'] or ad.filename
            # DQ plane is still needed so call stackFrames for ease
            # CJS: This is ugly but should go with pythonic stacking
            for index, ext in enumerate(ad, start=1):
                adext = deepcopy(ext)
                filename = filename_updater(ad, suffix='{:04d}'.format(index))
                adext.filename = filename
                adext.phu['ORIGNAME'] = filename
                extinputs.append(adext)

        adout = super(GHOSTSlit, self).stackFrames(extinputs, **params)[0]
        if first_filename:
            adout.phu['ORIGNAME'] = first_filename
        gt.mark_history(adout, primname=self.myself(), keyword=timestamp_key)
        adoutputs.append(adout)
        return adoutputs
예제 #45
0
    def traceFootprints(self, rc):
        """
        This primitive will create and append a 'TRACEFP' Bintable HDU to the
        AD object. The content of this HDU is the footprints information 
        from the espectroscopic flat in the SCI array.

        :param logLevel: Verbosity setting for log messages to the screen.
        :type logLevel: integer from 0-6, 0=nothing to screen, 6=everything to
                        screen. OR the message level as a string (i.e.,
                        'critical', 'status', 'fullinfo'...)
        """
        # Instantiate the log
        log = logutils.get_logger(__name__)

        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "", "starting"))

        # Initialize the list of output AstroData objects
        adoutput_list = []

        # Loop over each input AstroData object in the input list
        for ad in rc.get_inputs_as_astrodata():
            # Check whether this primitive has been run previously
            if ad.phu_get_key_value("TRACEFP"):
                log.warning("%s has already been processed by traceSlits" \
                            % (ad.filename))
                # Append the input AstroData object to the list of output
                # AstroData objects without further processing
                adoutput_list.append(ad)
                continue
            # Call the  user level function
            try:
                adout = trace_footprints(ad,
                                         function=rc["function"],
                                         order=rc["order"],
                                         trace_threshold=rc["trace_threshold"])
            except:
                log.warning("Error in traceFootprints with file: %s" %
                            ad.filename)

            # Change the filename
            adout.filename = gt.filename_updater(adinput=ad,
                                                 suffix=rc["suffix"],
                                                 strip=True)

            # Append the output AstroData object to the list of output
            # AstroData objects.
            adoutput_list.append(adout)

        # Report the list of output AstroData objects to the reduction
        # context
        rc.report_output(adoutput_list)

        yield rc
예제 #46
0
    def traceFootprints(self, rc):
 
        """
        This primitive will create and append a 'TRACEFP' Bintable HDU to the
        AD object. The content of this HDU is the footprints information 
        from the espectroscopic flat in the SCI array.

        :param logLevel: Verbosity setting for log messages to the screen.
        :type logLevel: integer from 0-6, 0=nothing to screen, 6=everything to
                        screen. OR the message level as a string (i.e.,
                        'critical', 'status', 'fullinfo'...)
        """
        # Instantiate the log
        log = logutils.get_logger(__name__)
        
        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "", "starting"))

        # Initialize the list of output AstroData objects
        adoutput_list = []

        # Loop over each input AstroData object in the input list
        for ad in rc.get_inputs_as_astrodata():
            # Check whether this primitive has been run previously
            if ad.phu_get_key_value("TRACEFP"):
                log.warning("%s has already been processed by traceSlits" \
                            % (ad.filename))
                # Append the input AstroData object to the list of output
                # AstroData objects without further processing
                adoutput_list.append(ad)
                continue
            # Call the  user level function
            try:
               adout = trace_footprints(ad,function=rc["function"],
                                  order=rc["order"],
                                  trace_threshold=rc["trace_threshold"])
            except:
               log.warning("Error in traceFootprints with file: %s"%ad.filename)
               
            # Change the filename
            adout.filename = gt.filename_updater(adinput=ad, 
                                                 suffix=rc["suffix"],
                                                 strip=True)

            # Append the output AstroData object to the list of output 
            # AstroData objects.
            adoutput_list.append(adout)

        # Report the list of output AstroData objects to the reduction
        # context
        rc.report_output(adoutput_list)

        yield rc
예제 #47
0
    def makeFringe(self, adinputs=None, **params):
        """
        This primitive performs the bookkeeping related to the construction of
        a GMOS fringe frame. The pixel manipulation is left to makeFringeFrame

        Parameters
        ----------
        subtract_median_image: bool
            subtract a median image before finding fringes?
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        # Exit without doing anything if any of the inputs are inappropriate
        if not all(self._needs_fringe_correction(ad) for ad in adinputs):
            return adinputs
        if len(set(ad.filter_name(pretty=True) for ad in adinputs)) > 1:
            log.warning("Mismatched filters in input; not making fringe frame")
            return adinputs

        # Fringing on Cerro Pachon is generally stronger than on Maunakea.
        # A SExtractor mask alone is usually sufficient for GN data, but GS
        # data need to be median-subtracted to distinguish fringes from objects
        fringe_params = self._inherit_params(params, "makeFringeFrame", pass_suffix=True)

        # Detect sources in order to get an OBJMASK. Doing it now will aid
        # efficiency by putting the OBJMASK-added images in the list.
        # NB. If we're subtracting the median image, detectSources() has to
        # be run again anyway, so don't do it here.
        # NB2. We don't want to edit adinputs at this stage
        fringe_adinputs = adinputs if fringe_params["subtract_median_image"] else [ad if
                        any(hasattr(ext, 'OBJMASK') for ext in ad)
                        else self.detectSources([ad])[0] for ad in adinputs]

        # Add this frame to the list and get the full list (QA only)
        if "qa" in self.mode:
            self.addToList(fringe_adinputs, purpose='forFringe')
            fringe_adinputs = self.getList(purpose='forFringe')

        if len(fringe_adinputs) < 3:
            log.stdinfo("Fewer than 3 frames provided as input. "
                        "Not making fringe frame.")
            return adinputs

        # We have the required inputs to make a fringe frame
        fringe = self.makeFringeFrame(fringe_adinputs, **fringe_params)
        # Store the result and put the output in the "fringe" stream
        self.storeProcessedFringe(fringe)
        self.streams.update({'fringe': fringe})

        # We now return *all* the input images that required fringe correction
        # so they can all be fringe corrected
        return fringe_adinputs
예제 #48
0
    def cutFootprints(self, rc):
        """
        This primitive will create and append multiple HDU to the output
        AD object. Each HDU correspond to a rectangular cut containing a
        slit from a MOS Flat exposure or a XD flat exposure as in the
        Gnirs case.

        :param logLevel: Verbosity setting for log messages to the screen.
        :type logLevel: integer from 0-6, 0=nothing to screen, 6=everything to
                        screen. OR the message level as a string (i.e.,
                        'critical', 'status', 'fullinfo'...)
        """

        # Instantiate the log
        log = logutils.get_logger(__name__)

        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "cutFootprints", "starting"))

        # Initialize the list of output AstroData objects
        adoutput_list = []

        # Loop over each input AstroData object in the input list
        for ad in rc.get_inputs_as_astrodata():
            # Call the  user level function

            # Check that the input ad has the TRACEFP extension,
            # otherwise, create it.
            if ad['TRACEFP'] == None:
                ad = trace_footprints(ad)

            log.stdinfo("Cutting_footprints for: %s" % ad.filename)
            try:
                adout = cut_footprints(ad)
            except:
                log.error("Error in cut_slits with file: %s" % ad.filename)
                # DO NOT add this input ad to the adoutput_lis
                continue

            # Change the filename
            adout.filename = gt.filename_updater(adinput=ad,
                                                 suffix=rc["suffix"],
                                                 strip=True)

            # Append the output AstroData object to the list of output
            # AstroData objects.
            adoutput_list.append(adout)

        # Report the list of output AstroData objects to the reduction
        # context
        rc.report_output(adoutput_list)

        yield rc
예제 #49
0
 def validateData(self, rc):
     """
     This primitive is used to validate NIRI data, specifically.
     
     :param repair: Set to True to repair the data, if necessary. Note: this
                    feature does not work yet. 
     :type repair: Python boolean
     """
     # Instantiate the log
     log = logutils.get_logger(__name__)
     
     # Log the standard "starting primitive" debug message
     log.debug(gt.log_message("primitive", "validateData", "starting"))
     
     # Define the keyword to be used for the time stamp for this primitive
     timestamp_key = self.timestamp_keys["validateData"]
     
     # Initialize the list of output AstroData objects
     adoutput_list = []
     
     # Loop over each input AstroData object in the input list
     for ad in rc.get_inputs_as_astrodata():
         
         # Check whether the validateData primitive has been run previously
         if ad.phu_get_key_value(timestamp_key):
             log.warning("No changes will be made to %s, since it has "
                         "already been processed by validateData"
                         % ad.filename)
             
             # Append the input AstroData object to the list of output
             # AstroData objects without further processing
             adoutput_list.append(ad)
             continue
         
         # Validate the input AstroData object.
         log.status("No validation required for %s" % ad.filename)
         
         # Add the appropriate time stamps to the PHU
         gt.mark_history(adinput=ad, keyword=timestamp_key)
         
         # Change the filename
         ad.filename = gt.filename_updater(adinput=ad, suffix=rc["suffix"],
                                           strip=True)
         
         # Append the output AstroData object to the list of output
         # AstroData objects
         adoutput_list.append(ad)
     
     # Report the list of output AstroData objects to the reduction context
     rc.report_output(adoutput_list)
     
     yield rc
예제 #50
0
    def addIllumMaskToDQ(self, adinputs=None, suffix=None, illum_mask=None):
        """
        Adds an illumination mask to each AD object

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        mask: str/None
            name of illumination mask mask (None -> use default)
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys[self.myself()]

        # Getting all the filenames first prevents reopening the same file
        # for each science AD
        if illum_mask is None:
            illum_mask = [self._get_illum_mask_filename(ad) for ad in adinputs]

        for ad, illum in zip(
                *gt.make_lists(adinputs, illum_mask, force_ad=True)):
            if ad.phu.get(timestamp_key):
                log.warning(
                    'No changes will be made to {}, since it has '
                    'already been processed by addIllumMaskToDQ'.format(
                        ad.filename))
                continue

            if illum is None:
                # So it can be zipped with the AD
                final_illum = [None] * len(ad)
            else:
                log.fullinfo("Using {} as illumination mask".format(
                    illum.filename))
                final_illum = gt.clip_auxiliary_data(ad,
                                                     aux=illum,
                                                     aux_type='bpm',
                                                     return_dtype=DQ.datatype)

            for ext, illum_ext in zip(ad, final_illum):
                if illum_ext is not None:
                    # Ensure we're only adding the unilluminated bit
                    iext = np.where(illum_ext.data > 0, DQ.unilluminated,
                                    0).astype(DQ.datatype)
                    ext.mask = iext if ext.mask is None else ext.mask | iext

            # Timestamp and update filename
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=suffix, strip=True)

        return adinputs
예제 #51
0
    def addObjectMaskToDQ(self, rc):
        """
        This primitive combines the object mask in a OBJMASK extension
        into the DQ plane
        """
        
        # Instantiate the log
        log = gemLog.getGeminiLog(logType=rc["logType"],
                                  logLevel=rc["logLevel"])
        
        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "addObjectMaskToDQ", "starting"))
        
        # Define the keyword to be used for the time stamp for this primitive
        timestamp_key = self.timestamp_keys["addObjectMaskToDQ"]

        # Initialize the list of output AstroData objects
        adoutput_list = []
        
        # Loop over each input AstroData object in the input list
        for ad in rc.get_inputs_as_astrodata():
            
            for sciext in ad["SCI"]:
                extver = sciext.extver()
                dqext = ad["DQ",extver]
                mask = ad["OBJMASK",extver]
                if mask is None:
                    log.warning("No object mask present for "\
                                    "%s[SCI,%d]; "\
                                    "cannot apply object mask" %
                                (ad.filename,extver))
                else:
                    if dqext is not None:
                        ad["DQ",extver].data = dqext.data | mask.data
                    else:
                        dqext = deepcopy(mask)
                        dqext.rename_ext("DQ",extver)
                        ad.append(dqext)

            # Change the filename
            ad.filename = gt.filename_updater(adinput=ad, suffix=rc["suffix"], 
                                              strip=True)
            
            # Append the output AstroData object to the list 
            # of output AstroData objects
            adoutput_list.append(ad)
        
        # Report the list of output AstroData objects to the reduction
        # context
        rc.report_output(adoutput_list)
        
        yield rc
예제 #52
0
    def wcalResampleToLinearCoords(self,rc):

        """ Uses the Wavecal fit_image solution
        """
        # Instantiate the log
        log = logutils.get_logger(__name__)
        
        # Define the keyword to be used for the time stamp
        timestamp_key = self.timestamp_keys["wcalResampleToLinearCoords"]

        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "wcalResampleToLinearCoords", 
                                 "starting"))
                
        # Initialize the list of output AstroData objects
        adoutput_list = []

        # Loop over each input AstroData object in the input list
        for ad in rc.get_inputs_as_astrodata():

            # Check for a wavelength solution
            if ad["WAVECAL"] is None:
                if "qa" in rc.context:
                    log.warning("No wavelength solution found for %s" %
                                ad.filename)

                    adout=ad   # Don't do anything 
                else:
                    raise Errors.InputError("No wavelength solution found "\
                                            "for %s" % ad.filename)
            else:
                # Wavelength solution found. 
                wc = Wavecal(ad)
                wc.read_wavecal_table()
                adout = wc.resample_image_asAstrodata()

            # Add the appropriate time stamps to the PHU
            gt.mark_history(adinput=adout, keyword=timestamp_key)

            # Change the filename
            adout.filename = gt.filename_updater(
                adinput=adout, suffix=rc["suffix"], strip=True)
            
            # Append the output AstroData object to the list
            # of output AstroData objects
            adoutput_list.append(adout)
        
        # Report the list of output AstroData objects to the reduction
        # context
        rc.report_output(adoutput_list)
        
        yield rc
    def determineWavelengthSolution(self,rc):

        # Instantiate the log
        log = logutils.get_logger(__name__)
        
        # Define the keyword to be used for the time stamp
        timestamp_key = self.timestamp_keys["determineWavelengthSolution"]

        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "determineWavelengthSolution",
                                 "starting"))
                
        # Initialize the list of output AstroData objects
        adoutput_list = []

        # Loop over each input AstroData object in the input list
        for ad in rc.get_inputs_as_astrodata():

            # Instantiate ETI and then run the task 
            # Run in a try/except because gswavelength sometimes fails
            # badly, and we want to be able to continue without
            # wavelength calibration in the QA case
            gswavelength_task = eti.gswavelengtheti.GswavelengthETI(rc,ad)
            try:
                adout = gswavelength_task.run()
            except Errors.OutputError:
                gswavelength_task.clean()
                if "qa" in rc.context:
                    log.warning("gswavelength failed for input " + ad.filename)
                    adoutput_list.append(ad)
                    continue
                else:
                    raise Errors.ScienceError("gswavelength failed for input "+
                                              ad.filename + ". Try interactive"+
                                              "=True")

            # Add the appropriate time stamps to the PHU
            gt.mark_history(adinput=adout, keyword=timestamp_key)

            # Change the filename
            adout.filename = gt.filename_updater(
                adinput=adout, suffix=rc["suffix"], strip=True)
            
            # Append the output AstroData object to the list
            # of output AstroData objects
            adoutput_list.append(adout)
        
        # Report the list of output AstroData objects to the reduction
        # context
        rc.report_output(adoutput_list)

        yield rc
    def skyCorrectFromSlit(self,rc):

        # Instantiate the log
        log = logutils.get_logger(__name__)
        
        # Define the keyword to be used for the time stamp
        timestamp_key = self.timestamp_keys["skyCorrectFromSlit"]

        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "skyCorrectFromSlit", "starting"))
                
        # Initialize the list of output AstroData objects
        adoutput_list = []

        # Loop over each input AstroData object in the input list
        for ad in rc.get_inputs_as_astrodata():

            try:
                xbin = ad.detector_x_bin().as_pytype()
                ybin = ad.detector_y_bin().as_pytype()
                bin_factor = xbin*ybin
                roi = ad.detector_roi_setting().as_pytype()
            except:
                bin_factor = 1
                roi = "unknown"

            if bin_factor<=2 and roi=="Full Frame" and "qa" in rc.context:
                log.warning("Frame is too large to subtract sky efficiently; not "\
                            "subtracting sky for %s" % ad.filename)
                adoutput_list.append(ad)
                continue

            # Instantiate ETI and then run the task 
            gsskysub_task = eti.gsskysubeti.GsskysubETI(rc,ad)
            adout = gsskysub_task.run()

            # Add the appropriate time stamps to the PHU
            gt.mark_history(adinput=adout, keyword=timestamp_key)

            # Change the filename
            adout.filename = gt.filename_updater(
                adinput=adout, suffix=rc["suffix"], strip=True)
            
            # Append the output AstroData object to the list
            # of output AstroData objects
            adoutput_list.append(adout)
        
        # Report the list of output AstroData objects to the reduction
        # context
        rc.report_output(adoutput_list)
        
        yield rc
예제 #55
0
    def standardizeInstrumentHeaders(self, adinputs=None, **params):
        """
        This primitive is used to make the changes and additions to the
        keywords in the headers of F2 data, specifically.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))
        timestamp_key = self.timestamp_keys[self.myself()]

        for ad in adinputs:
            if ad.phu.get(timestamp_key):
                log.warning("No changes will be made to {}, since it has "
                            "already been processed by "
                            "standardizeInstrumentHeaders".format(ad.filename))
                continue

            # Standardize the headers of the input AstroData object. Update the
            # keywords in the headers that are specific to FLAMINGOS-2.
            log.status("Updating keywords that are specific to FLAMINGOS-2")

            # Filter name (required for IRAF?)
            ad.phu.set('FILTER', ad.filter_name(stripID=True, pretty=True),
                       self.keyword_comments['FILTER'])

            # Pixel scale (CJS: I'm putting this in the extension too!)
            pixel_scale = ad.pixel_scale()
            ad.phu.set('PIXSCALE', pixel_scale, self.keyword_comments['PIXSCALE'])
            ad.hdr.set('PIXSCALE', pixel_scale, self.keyword_comments['PIXSCALE'])

            for desc in ('read_noise', 'gain', 'non_linear_level',
                         'saturation_level'):
                kw = ad._keyword_for(desc)
                ad.hdr.set(kw, getattr(ad, desc)()[0], self.keyword_comments[kw])
                try:
                    ad.phu.remove(kw)
                except (KeyError, AttributeError):
                    pass

            if 'SPECT' in ad.tags:
                kw = ad._keyword_for('dispersion_axis')
                self.hdr.set(kw, 2, self.keyword_comments(kw))

            # Timestamp and update filename
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=params["suffix"], strip=True)
        return adinputs
예제 #56
0
    def validateData(self, adinputs=None, suffix=None):
        """
        This is the data validation primitive. It checks that the instrument
        matches the primitivesClass and that there are the correct number
        of extensions.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        """
        log = self.log
        timestamp_key = self.timestamp_keys[self.myself()]
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        for ad in adinputs:
            if ad.phu.get(timestamp_key):
                log.warning("No changes will be made to {}, since it has "
                            "already been processed by validateData".
                            format(ad.filename))
                continue

            # Check that the input is appropriate for this primitivesClass
            # Only the instrument is checked
            inst_name = ad.instrument(generic=True)
            if not inst_name in self.tagset:
                prim_class_name = self.__class__.__name__
                raise IOError("Input file {} is {} data and not suitable for "
                    "{} class".format(ad.filename, inst_name, prim_class_name))

            # Report if this is an image without square binned pixels
            if 'IMAGE' in ad.tags:
                xbin = ad.detector_x_bin()
                ybin = ad.detector_y_bin()
                if xbin != ybin:
                    log.warning("Image {} is {} x {} binned data".
                                format(ad.filename, xbin, ybin))

            if self._has_valid_extensions(ad):
                log.fullinfo("The input file has been validated: {} contains "
                             "{} extension(s)".format(ad.filename, len(ad)))
            else:
                raise IOError("The {} extension(s) in {} does not match the "
                              "number of extensions expected in raw {} "
                              "data.".format(len(ad), ad.filename, inst_name))

            # Timestamp and update filename
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)
            ad.update_filename(suffix=suffix, strip=True)
        return adinputs
예제 #57
0
    def makeFringeFrame(self, adinputs=None, **params):
        """
        Make a fringe frame from a list of images.

        Parameters
        ----------
        suffix: str
            suffix to be added to output files
        subtract_median_image: bool
            if True, create and subtract a median image before object
            detection as a first-pass fringe removal

        """
        log = self.log
        log.debug(gt.log_message("primitive", self.myself(), "starting"))

        if len(adinputs) < 3:
            log.stdinfo('Fewer than 3 frames provided as input. '
                        'Not making fringe frame.')
            return []

        adinputs = self.correctBackgroundToReference([deepcopy(ad) for ad in adinputs],
                                            suffix='_bksub', remove_background=True,
                                                     separate_ext=False)

        # If needed, construct a median image and subtract from all frames to
        # do a first-order fringe removal and hence better detect real objects
        if params["subtract_median_image"]:
            median_image = self.stackFrames(adinputs, scale=False,
                            zero=False, operation="median",
                            reject_method="minmax", nlow=0, nhigh=1)
            if len(median_image) > 1:
                raise ValueError("Problem with creating median image")
            median_image = median_image[0]
            for ad in adinputs:
                ad.subtract(median_image)
            adinputs = self.detectSources(adinputs,
                        **self._inherit_params(params, "detectSources"))
            for ad in adinputs:
                ad.add(median_image)

        # Add object mask to DQ plane and stack with masking
        # separate_ext is irrelevant unless (scale or zero) but let's be explicit
        adinputs = self.stackSkyFrames(adinputs, mask_objects=True, separate_ext=False,
                                       scale=False, zero=False,
                    **self._inherit_params(params, "stackSkyFrames", pass_suffix=True))
        if len(adinputs) > 1:
            raise ValueError("Problem with stacking fringe frames")

        return adinputs
예제 #58
0
    def makeIRAFCompatible(self, adinputs=None):
        """
        Add keywords to make the pipeline-processed file compatible
        with the tasks in the Gemini IRAF package.
        """
        log = self.log
        log.debug(gt.log_message('primitive', self.myself(), 'starting'))
        timestamp_key = self.timestamp_keys[self.myself()]

        for ad in adinputs:
            irafcompat.pipeline2iraf(ad)
            gt.mark_history(ad, primname=self.myself(), keyword=timestamp_key)

        return adinputs