Beispiel #1
0
def f2_fplen(ad):
    """ Calculates the minimum (x1) and maximum (x2) footprint
        extend from the slit position for F2 MOS data
        (This code is a take from f2cut.cl)
    """
    from astrodata import Lookups

    yoffset_delta = Lookups.get_lookup_table('Gemini/F2/F2offsets.py',
                                         'yoffset_delta')
    filter_table = Lookups.get_lookup_table('Gemini/F2/F2offsets.py',
                                         'filter_table')

    header = ad.phu.header

    if 'grism' in header:
        grism = header['grism']
    elif 'grismpos' in header:
        grism = header['grismpos']
    else:
        raise KeyError('Keyword "GRISM" nor "GRISMPOS" not found in PHU.')

    if 'filter' in header:
        filter = header['filter']
    else:
        raise KeyError('Keyword "FILTER" not found in PHU.')

    if 'mospos' in header:
        slit = header['mospos'][:-1]      # All chars but last.
    else:
        raise KeyError('Keyword "MOSPOS" not found in PHU.')

    # Get the tuple value (yoffset,delta)
    yoffset,delta = yoffset_delta[(grism, filter, slit)]

    # From filter_table calculates filter_width based on cuton50 and cutoff50.
    filt_names=['center','width','cuton80','cutoff80','cuton50',
                'cutoff50','transmission']
    filter_lower = filter_table[filter][filt_names.index('cuton50')]
    filter_upper = filter_table[filter][filt_names.index('cutoff50')]
    filter_width = filter_upper - filter_lower

    # Rename delta value to orig_dispersion (more meaning).
    orig_dispersion = delta 

    # The dispersion is required in microns, but the value
    # in the tuple (yoffset,delta) is in (negative) Angstroms
    dispersion = orig_dispersion / -10000
 
    # Form dictionary to output
    y1_off = yoffset - (filter_width / dispersion) / 2
    y2_off = yoffset + (filter_width / dispersion) / 2
    out = {'y1_off':y1_off,'y2_off':y2_off}
    return out
    def init(self, rc):
        # Load the timestamp keyword dictionary that will be used to define the
        # keyword to be used for the time stamp for all the primitives and user
        # level function. This only needs to be done once in the highest level
        # primitive due to primitive inheritance.
        self.timestamp_keys = Lookups.get_lookup_table(
            "Gemini/timestamp_keywords", "timestamp_keys")

        # Also load the standard comments for header keywords that will be
        # updated in the primitives
        self.keyword_comments = Lookups.get_lookup_table(
            "Gemini/keyword_comments", "keyword_comments")

        return 
 def __init__(self):
     self.niriSpecDict = Lookups.get_lookup_table(
         "Gemini/NIRI/NIRISpecDict", "niriSpecDict")
     self.niriFilternameMapConfig = Lookups.get_lookup_table(
         "Gemini/NIRI/NIRIFilterMap", "niriFilternameMapConfig")
     self.nsappwave = Lookups.get_lookup_table(
         "Gemini/IR/nsappwavepp.fits", 1)
     
     filternamemap = {}
     for line in self.niriFilternameMapConfig:
         linefiltername = gmu.filternameFrom([line[1], line[2], line[3]])
         filternamemap.update({linefiltername:line[0]})
     self.niriFilternameMap = filternamemap
     
     GEMINI_DescriptorCalc.__init__(self)
 def wavelength_band(self, dataset, **args):
     if "IMAGE" in dataset.types:
         # If imaging, associate the filter name with a central wavelength
         filter_table = Lookups.get_lookup_table(
             "Gemini/NIRI/NIRIFilterWavelength", "filter_wavelength")
         filter = str(dataset.filter_name(pretty=True))
         if filter in filter_table:
             ctrl_wave = filter_table[filter]
         else:
             raise Errors.TableKeyError()
     else:
         ctrl_wave = dataset.central_wavelength(asMicrometers=True)
     
     min_diff = None
     band = None
     
     for std_band, std_wave in self.std_wavelength_band.items():
         diff = abs(std_wave - ctrl_wave)
         if min_diff is None or diff < min_diff:
             min_diff = diff
             band = std_band
     
     if band is None:
         raise Errors.CalcError()
     else:
         ret_wavelength_band = band
     
     # Instantiate the return DescriptorValue (DV) object
     ret_dv = DescriptorValue(ret_wavelength_band, name="wavelength_band",
                              ad=dataset)
     return ret_dv
def f2_appwave(ad,kwvalues):
    
    camera2wcs = Lookups.get_lookup_table('Gemini/F2/camera_f2spec.py',
                                         'camera2wcs')
    ss="%s,%s,%s,%s"%tuple(kwvalues)
    for extn in ['SCI','VAR','DQ']:
        if ad[extn] == None:
            continue
        for xad in ad[extn]:
            crval,cdelt = camera2wcs[ss] 
            update_wcs(xad,crval,cdelt)
 def nominal_photometric_zeropoint(self, dataset, **args):
     # Since this descriptor function accesses keywords in the headers of
     # the pixel data extensions, always construct a dictionary where the
     # key of the dictionary is an (EXTNAME, EXTVER) tuple
     ret_nominal_photometric_zeropoint = {}
     
     table = Lookups.get_lookup_table("Gemini/NIRI/Nominal_Zeropoints",
                                      "nominal_zeropoints")
     
     # Get the values of the gain, detector name and filter name using the
     # appropriate descriptors. Use as_pytype() to return the values as the
     # default python type rather than an object.
     gain = dataset.gain().as_pytype()
     camera = dataset.camera().as_pytype()
     filter_name = dataset.filter_name(pretty=True).as_pytype()
     
     if gain is None or camera is None or filter_name is None:
         # The descriptor functions return None if a value cannot be
         # found and stores the exception info. Re-raise the exception.
         # It will be dealt with by the CalculatorInterface.
         if hasattr(dataset, "exception_info"):
             raise dataset.exception_info
     
     # Get the value of the BUNIT keyword from the header of each pixel data
     # extension as a dictionary where the key of the dictionary is an
     # ("*", EXTVER) tuple 
     bunit_dict = gmu.get_key_value_dict(adinput=dataset, keyword="BUNIT")
     
     for ext_name_ver, bunit in bunit_dict.iteritems():
         # If bunit is "electron" or None, set the gain factor to 0.0 
         gain_factor = 0.0
         
         if bunit == "adu":
             gain_factor = 2.5 * math.log10(gain)
             
         nominal_zeropoint_key = (filter_name, camera)
         
         if nominal_zeropoint_key in table:
             nominal_photometric_zeropoint = (
                 table[nominal_zeropoint_key] - gain_factor)
         else:
             raise Errors.TableKeyError()
         
         # Update the dictionary with the nominal photometric zeropoint
         # value 
         ret_nominal_photometric_zeropoint.update({
             ext_name_ver:nominal_photometric_zeropoint})
     
     # Instantiate the return DescriptorValue (DV) object
     ret_dv = DescriptorValue(ret_nominal_photometric_zeropoint,
                              name="nominal_photometric_zeropoint",
                              ad=dataset)
     return ret_dv
def gnirs_appwave(ad,kwvalues):
    

    camera2delta = Lookups.get_lookup_table('Gemini/GNIRS/camera_gnirsspec.py',
                                         'camera2delta')
    camera2wcs = Lookups.get_lookup_table('Gemini/GNIRS/camera_gnirsspec.py',
                                         'camera2wcs')

    kws="%s,%s,%s,%s"%tuple(kwvalues)
    for extn in ['SCI','VAR','DQ']:
        if ad[extn] == None:
            continue
        for xad in ad[extn]:
            # NSCUTSPC is the spectrum order number 
            if xad.header.has_key('NSCUTSPC'):
               # Xdispersed data
               ss = kws+','+str(xad.header['NSCUTSPC'])
               crval,cdelt,t1,t2,t3 = camera2wcs[ss] 
            else:
               # Long Slit data
               ss = kws
               cdelt,resol,hi,lo = camera2delta[ss] 
               crval = xad.phu.header['waveleng']
            update_wcs(xad,crval,cdelt)
def _get_static_bias_level_for_ext(adinput=None):
    """
    Determine the static bias level value from GMOS data.
    """

    # Get the static bias level lookup table
    gmosampsBias, gmosampsBiasBefore20060831 = Lookups.get_lookup_table(
        "Gemini/GMOS/GMOSAmpTables", "gmosampsBias",
        "gmosampsBiasBefore20060831")
    
    # Get the UT date, read speed setting and gain setting values using the
    # appropriate descriptors
    ut_date_dv = adinput.ut_date()
    gain_setting_dv = adinput.gain_setting()
    read_speed_setting_dv = adinput.read_speed_setting()
    
    # Get the name of the detector amplifier from the header of the specific
    # pixel data extension
    ampname = adinput.get_key_value("AMPNAME")
    ret_static_bias_level = None

    if not (ut_date_dv.is_none() and read_speed_setting_dv.is_none() and
            gain_setting.dv.is_none()) and ampname is not None:
        # Use as_pytype() to return the values as the default python type
        # rather than an object
        ut_date = str(adinput.ut_date())
        gain_setting = adinput.gain_setting().as_pytype()
        read_speed_setting = adinput.read_speed_setting().as_pytype()

        obs_ut_date = datetime(*strptime(ut_date, "%Y-%m-%d")[0:6])
        old_ut_date = datetime(2006, 8, 31, 0, 0)
        
        bias_key = (read_speed_setting, gain_setting, ampname)
        ret_static_bias_level = None

        if obs_ut_date > old_ut_date:
            if bias_key in gmosampsBias:
                ret_static_bias_level = gmosampsBias[bias_key]
        else:
            if bias_key in gmosampsBiasBefore20060831:
                ret_static_bias_level = gmosampsBiasBefore20060831[bias_key]
    
    return ret_static_bias_level
Beispiel #9
0
 def __init__(self, setref=None, storename=None):
     self.elements["user"] = getpass.getuser()
     self.storename = storename
     info = Lookups.compose_multi_table("*/warehouse_settings",
                                        "warehouse_elements",
                                        "shelf_addresses",
                                        "type_shelf_names",
                                        "type_store_precedence")
     # basic elements like root
     self.warehouse_elements = info["warehouse_elements"]
     # named storage templates
     self.shelf_addresses = info["shelf_addresses"]
     self._convert_shelf_addresses()
     # shelf_addresses/names to use for a given type
     self.type_shelf_names = info["type_shelf_names"]
     # the order to check the types, so some types override others
     # e.g. SETREF has a default storage location, a sort of misc. file
     # that should be over written by other packages
     self.type_store_precedence = info["type_store_precedence"]
     #print "fs20: ts_prec", self.type_store_precedence
     #print "fs21: sh_names", self.type_shelf_names
     if setref:
         self.elements_from_setref(setref)
def _obtain_unbinned_arraygap(adinput):
    """
    This function was copied directly from primitives_GMOS.py, it
    should be refactored at some point in the future! It was tweaked
    to only return unbinned array gaps. 
    
    This function obtains the raw array gap size for the different GMOS
    detectors and returns it after correcting for binning. There are two
    values in the GMOSArrayGaps.py file in the GMOS
    lookup directory, one for unbinned data and one to be used to calculate
    the chip gap when the data are binned.
    """
    
    # Get the dictionary containing the CCD gaps
    all_arraygaps_dict = Lookups.get_lookup_table("Gemini/GMOS/GMOSArrayGaps.py","gmosArrayGaps")
    
    # Obtain the X binning and detector type for the ad input
    detector_type = adinput.phu_get_key_value("DETTYPE")
    
    # Check the read values
    if detector_type is None:
        if hasattr(ad, "exception_info"):
            raise adinput.exception_info
        
    # We're only interested in the unbinned values for gacq
    binning = "unbinned"
        
    # Form the key
    key = (detector_type, binning)
    
    # Obtain the array gap value 
    if key in all_arraygaps_dict:
        arraygap = all_arraygaps_dict[key] 
    else:
        raise Errors.ScienceError("Array gap value not " +
                                  "found for %s" % (detector_type))
    return arraygap
 def __init__(self):
     self.nifsArrayDict = Lookups.get_lookup_table(
         "Gemini/NIFS/NIFSArrayDict", "nifsArrayDict")
     self.nifsConfigDict = Lookups.get_lookup_table(
         "Gemini/NIFS/NIFSConfigDict", "nifsConfigDict")
     GEMINI_DescriptorCalc.__init__(self)
Beispiel #12
0
                                for pkey in args.globalParams.keys():
                                    co.update({pkey:args.globalParams[pkey]})

                        # Remove after write int works properly
                        if (args.writeInt == True):       
                                co.update({"writeInt":True})  

                        # Add the log level/name/mode to the global dict
                        co.update({'loglevel':args.loglevel})     
                        co.update({'logfile':args.logfile})       
                        co.update({'logmode':args.logmode})
                        co.update({'logindent':logutils.SW})

                        # Insert calibration url dictionary
                        # if given by command line will overide the lookup
                        calurldict = Lookups.get_lookup_table("Gemini/calurl_dict",
                                                              "calurl_dict")
                        if args.cal_mgr:
                            calmgr_str = args.cal_mgr                        
                            if calmgr_str[7:12] == 'local':
                                calurldict.update({'LOCALCALMGR' : calmgr_str})
                            else:
                                calurldict.update({'CALMGR' : calmgr_str})
                        co.update({'calurl_dict':calurldict})
                        #print "REDUCE 721", co.report(internal_dict=True)

                        if (useTK):
                            while cw.bReady == False:
                                # this is hopefully not really needed
                                # did it to give the tk thread a chance to get running
                                time.sleep(.1)
                            cw.new_control_window(rec,co)
    def addDQ(self, rc):
        """
        This primitive is used to add a DQ extension to the input AstroData
        object. The value of a pixel in the DQ extension will be the sum of the
        following: (0=good, 1=bad pixel (found in bad pixel mask), 2=pixel is
        in the non-linear regime, 4=pixel is saturated). This primitive will
        trim the BPM to match the input AstroData object(s).
        
        :param bpm: The file name, including the full path, of the BPM(s) to be
                    used to flag bad pixels in the DQ extension. If only one
                    BPM is provided, that BPM will be used to flag bad pixels
                    in the DQ extension for all input AstroData object(s). If
                    more than one BPM is provided, the number of BPMs must
                    match the number of input AstroData objects. If no BPM is
                    provided, the primitive will attempt to determine an
                    appropriate BPM.
        :type bpm: string or list of strings
        """
        # Instantiate the log
        log = logutils.get_logger(__name__)
        
        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "addDQ", "starting"))
        
        # Define the keyword to be used for the time stamp for this primitive
        timestamp_key = self.timestamp_keys["addDQ"]
        
        # Initialize the list of output AstroData objects
        adoutput_list = []
        
        # Set the data type of the data quality array
        # It can be uint8 for now, it will get converted up as we assign higher bit values
        # shouldn't need to force it up to 16bpp yet.
        dq_dtype = np.dtype(np.uint8)
        #dq_dtype = np.dtype(np.uint16)
        
        # Get the input AstroData objects
        adinput = rc.get_inputs_as_astrodata()
        
        # Loop over each input AstroData object in the input list
        for ad in adinput:
            
            # Check whether the addDQ primitive has been run previously
            if ad.phu_get_key_value(timestamp_key):
                log.warning("No changes will be made to %s, since it has "
                            "already been processed by addDQ" % ad.filename)
                
                # Append the input AstroData object to the list of output
                # AstroData objects without further processing
                adoutput_list.append(ad)
                continue
            
            # Parameters specified on the command line to reduce are converted
            # to strings, including None
            ##M What about if a user doesn't want to add a BPM at all?
            ##M Are None's not converted to Nonetype from the command line?
            if rc["bpm"] and rc["bpm"] != "None":
                # The user supplied an input to the bpm parameter
                bpm = rc["bpm"]
            else:
                # The user did not supply an input to the bpm parameter, so try
                # to find an appropriate one. Get the dictionary containing the
                # list of BPMs for all instruments and modes.
                all_bpm_dict = Lookups.get_lookup_table("Gemini/BPMDict",
                                                        "bpm_dict")
                
                # Call the _get_bpm_key helper function to get the key for the
                # lookup table 
                key = self._get_bpm_key(ad)
                
                # Get the appropriate BPM from the look up table
                if key in all_bpm_dict:
                    bpm = lookup_path(all_bpm_dict[key])
                else:
                    bpm = None
                    log.warning("No BPM found for %s, no BPM will be "
                                "included" % ad.filename)

            # Ensure that the BPMs are AstroData objects
            bpm_ad = None
            if bpm is not None:
                log.fullinfo("Using %s as BPM" % str(bpm))
                if isinstance(bpm, AstroData):
                    bpm_ad = bpm
                else:
                    bpm_ad = AstroData(bpm)
                    ##M Do we want to fail here depending on context?
                    if bpm_ad is None:
                        log.warning("Cannot convert %s into an AstroData "
                                    "object, no BPM will be added" % bpm)

            final_bpm = None
            if bpm_ad is not None:
                # Clip the BPM data to match the size of the input AstroData
                # object science and pad with overscan region, if necessary
                final_bpm = gt.clip_auxiliary_data(adinput=ad, aux=bpm_ad,
                                                   aux_type="bpm")[0]

            # Get the non-linear level and the saturation level using the
            # appropriate descriptors - Individual values get checked in the
            # next loop 
            non_linear_level_dv = ad.non_linear_level()
            saturation_level_dv = ad.saturation_level()

            # Loop over each science extension in each input AstroData object
            for ext in ad[SCI]:
                
                # Retrieve the extension number for this extension
                extver = ext.extver()
                
                # Check whether an extension with the same name as the DQ
                # AstroData object already exists in the input AstroData object
                if ad[DQ, extver]:
                    log.warning("A [%s,%d] extension already exists in %s"
                                % (DQ, extver, ad.filename))
                    continue
                
                # Get the non-linear level and the saturation level for this
                # extension
                non_linear_level = non_linear_level_dv.get_value(extver=extver)
                saturation_level = saturation_level_dv.get_value(extver=extver)

                # To store individual arrays created for each of the DQ bit
                # types
                dq_bit_arrays = []

                # Create an array that contains pixels that have a value of 2
                # when that pixel is in the non-linear regime in the input
                # science extension
                if non_linear_level is not None:
                    non_linear_array = None
                    if saturation_level is not None:
                        # Test the saturation level against non_linear level
                        # They can be the same or the saturation level can be
                        # greater than but not less than the non-linear level.
                        # If they are the same then only flag saturated pixels
                        # below. This just means not creating an unneccessary
                        # intermediate array.
                        if saturation_level > non_linear_level:
                            log.fullinfo("Flagging pixels in the DQ extension "
                                         "corresponding to non linear pixels "
                                         "in %s[%s,%d] using non linear "
                                         "level = %.2f" % (ad.filename, SCI,
                                                           extver,
                                                           non_linear_level))

                            non_linear_array = np.where(
                                ((ext.data >= non_linear_level) &
                                (ext.data < saturation_level)), 2, 0)
                            
                        elif saturation_level < non_linear_level:
                            log.warning("%s[%s,%d] saturation_level value is"
                                        "less than the non_linear_level not"
                                        "flagging non linear pixels" %
                                        (ad.filname, SCI, extver))
                        else:
                            log.fullinfo("Saturation and non-linear values "
                                         "for %s[%s,%d] are the same. Only "
                                         "flagging saturated pixels."
                                         % (ad.filename, SCI, extver))
                            
                    else:
                        log.fullinfo("Flagging pixels in the DQ extension "
                                     "corresponding to non linear pixels "
                                     "in %s[%s,%d] using non linear "
                                     "level = %.2f" % (ad.filename, SCI, extver,
                                                       non_linear_level))

                        non_linear_array = np.where(
                            (ext.data >= non_linear_level), 2, 0)
                    
                    dq_bit_arrays.append(non_linear_array)

                # Create an array that contains pixels that have a value of 4
                # when that pixel is saturated in the input science extension
                if saturation_level is not None:
                    saturation_array = None
                    log.fullinfo("Flagging pixels in the DQ extension "
                                 "corresponding to saturated pixels in "
                                 "%s[%s,%d] using saturation level = %.2f" %
                                 (ad.filename, SCI, extver, saturation_level))
                    saturation_array = np.where(
                        ext.data >= saturation_level, 4, 0)
                    dq_bit_arrays.append(saturation_array)
                
                # BPMs have an EXTNAME equal to DQ
                bpmname = None
                if final_bpm is not None:
                    bpm_array = None
                    bpmname = os.path.basename(final_bpm.filename)
                    log.fullinfo("Flagging pixels in the DQ extension "
                                 "corresponding to bad pixels in %s[%s,%d] "
                                 "using the BPM %s[%s,%d]" %
                                 (ad.filename, SCI, extver, bpmname, DQ, extver))
                    bpm_array = final_bpm[DQ, extver].data
                    dq_bit_arrays.append(bpm_array)
                
                # Create a single DQ extension from the three arrays (BPM,
                # non-linear and saturated)
                if not dq_bit_arrays:
                    # The BPM, non-linear and saturated arrays were not
                    # created. Create a single DQ array with all pixels set
                    # equal to 0 
                    log.fullinfo("The BPM, non-linear and saturated arrays "
                                 "were not created. Creating a single DQ "
                                 "array with all the pixels set equal to zero")
                    final_dq_array = np.zeros(ext.data.shape).astype(dq_dtype)

                else:
                    final_dq_array = self._bitwise_OR_list(dq_bit_arrays)
                    final_dq_array = final_dq_array.astype(dq_dtype)
                
                # Create a data quality AstroData object
                dq = AstroData(data=final_dq_array)
                dq.rename_ext(DQ, ver=extver)
                dq.filename = ad.filename
                
                # Call the _update_dq_header helper function to update the
                # header of the data quality extension with some useful
                # keywords
                dq = self._update_dq_header(sci=ext, dq=dq, bpmname=bpmname)
                
                # Append the DQ AstroData object to the input AstroData object
                log.fullinfo("Adding extension [%s,%d] to %s"
                             % (DQ, extver, ad.filename))
                ad.append(moredata=dq)
            
            # Add the appropriate time stamps to the PHU
            gt.mark_history(adinput=ad, keyword=timestamp_key)
            
            # Change the filename
            ad.filename = gt.filename_updater(adinput=ad, suffix=rc["suffix"],
                                              strip=True)
            
            # Append the output AstroData object to the list of output
            # AstroData objects
            adoutput_list.append(ad)

        # Report the list of output AstroData objects to the reduction context
        rc.report_output(adoutput_list)
        
        yield rc
Beispiel #14
0
#! /usr/bin/env python
import sys, os
import time

import numpy as np
from matplotlib import pyplot as pl
import pyfits as pf

from astrodata import AstroData, new_pyfits_version
from astrodata import Lookups

from gempy.library import gfit
from gempy.adlibrary import segmentation as seg

# Load the timestamp keyword dictionary.
timestamp_keys = Lookups.get_lookup_table("Gemini/timestamp_keywords",
                                          "timestamp_keys")
def print_timing(func):
    def wrapper(*arg,**kargs):
        t1 = time.time()
        res = func(*arg,**kargs)
        t2 = time.time()
        print '%s took %0.3fs' % (func.func_name, (t2-t1))
        return res
    return wrapper

def trace_footprints(ad, function='polynomial', order=2, 
                         trace_threshold=1., debug=False):
    """

    This function finds the footprints edges of spectroscopic flats, creates a 
    BINTABLE extension with the footprint parameters and appends it to the output
 def __init__(self):
     self.f2ArrayDict = Lookups.get_lookup_table(
         "Gemini/F2/F2ArrayDict", "f2ArrayDict")
     self.nifsConfigDict = Lookups.get_lookup_table(
         "Gemini/F2/F2ConfigDict", "f2ConfigDict")
     GEMINI_DescriptorCalc.__init__(self)
 def addMDF(self, rc):
     """
     This primitive is used to add an MDF extension to the input AstroData
     object. If only one MDF is provided, that MDF will be add to all input
     AstroData object(s). If more than one MDF is provided, the number of
     MDF AstroData objects must match the number of input AstroData objects.
     If no MDF is provided, the primitive will attempt to determine an
     appropriate MDF.
     
     :param mdf: The file name of the MDF(s) to be added to the input(s)
     :type mdf: string
     
     """
     # Instantiate the log
     log = logutils.get_logger(__name__)
     
     # Log the standard "starting primitive" debug message
     log.debug(gt.log_message("primitive", "addMDF", "starting"))
     
     # Define the keyword to be used for the time stamp for this primitive
     timestamp_key = self.timestamp_keys["addMDF"]
     
     # Initialize the list of output AstroData objects
     adoutput_list = []
     
     # Get the input AstroData objects
     adinput = rc.get_inputs_as_astrodata()
     
     # Loop over each input AstroData object in the input list
     for ad in adinput:
         
         # Check whether the addMDF primitive has been run previously
         if ad.phu_get_key_value(timestamp_key):
             log.warning("No changes will be made to %s, since it has "
                         "already been processed by addMDF" % ad.filename)
             
             # Append the input AstroData object to the list of output
             # AstroData objects without further processing
             adoutput_list.append(ad)
             continue
         
         # Check whether the input is spectroscopic data
         if "SPECT" not in ad.types:
             log.stdinfo("%s is not spectroscopic data, so no MDF will be "
                         "added" % ad.filename)
             
             # Append the input AstroData object to the list of output
             # AstroData objects without further processing
             adoutput_list.append(ad)
             continue
         
         # Check whether an MDF extension already exists in the input
         # AstroData object
         if ad["MDF"]:
             log.warning("An MDF extension already exists in %s, so no MDF "
                         "will be added" % ad.filename)
             
             # Append the input AstroData object to the list of output
             # AstroData objects without further processing
             adoutput_list.append(ad)
             continue
         
         # Parameters specified on the command line to reduce are converted
         # to strings, including None
         if rc["mdf"] and rc["mdf"] != "None":
             # The user supplied an input to the mdf parameter
             mdf = rc["mdf"]
         else:
             # The user did not supply an input to the mdf parameter, so try
             # to find an appropriate one. Get the dictionary containing the
             # list of MDFs for all instruments and modes.
             all_mdf_dict = Lookups.get_lookup_table("Gemini/MDFDict",
                                                     "mdf_dict")
             
             # The MDFs are keyed by the instrument and the MASKNAME. Get
             # the instrument and the MASKNAME values using the appropriate
             # descriptors 
             instrument = ad.instrument()
             mask_name = ad.phu_get_key_value("MASKNAME")
             
             # Create the key for the lookup table
             if instrument is None or mask_name is None:
                 log.warning("Unable to create the key for the lookup "
                             "table (%s), so no MDF will be added"
                             % ad.exception_info)
                 
                 # Append the input AstroData object to the list of output
                 # AstroData objects without further processing
                 adoutput_list.append(ad)
                 continue
             
             key = "%s_%s" % (instrument, mask_name)
             
             # Get the appropriate MDF from the look up table
             if key in all_mdf_dict:
                 mdf = lookup_path(all_mdf_dict[key])
             else:
                 # The MASKNAME keyword defines the actual name of an MDF
                 if not mask_name.endswith(".fits"):
                     mdf = "%s.fits" % mask_name
                 else:
                     mdf = str(mask_name)
                 
                 # Check if the MDF exists in the current working directory
                 if not os.path.exists(mdf):
                     log.warning("The MDF %s was not found in the current "
                                 "working directory, so no MDF will be "
                                 "added" % mdf)
                 
                 # Append the input AstroData object to the list of output
                 # AstroData objects without further processing
                 adoutput_list.append(ad)
                 continue
         
         # Ensure that the MDFs are AstroData objects
         if not isinstance(mdf, AstroData):
             mdf_ad = AstroData(mdf)
         
         if mdf_ad is None:
             log.warning("Cannot convert %s into an AstroData object, so "
                         "no MDF will be added" % mdf)
             
             # Append the input AstroData object to the list of output
             # AstroData objects without further processing
             adoutput_list.append(ad)
             continue
         
         # Check if the MDF is a single extension fits file
         if len(mdf_ad) > 1:
             log.warning("The MDF %s is not a single extension fits file, "
                         "so no MDF will be added" % mdf)
             
             # Append the input AstroData object to the list of output
             # AstroData objects without further processing
             adoutput_list.append(ad)
             continue
             
         # Name the extension appropriately
         mdf_ad.rename_ext("MDF", 1)
         
         # Append the MDF AstroData object to the input AstroData object
         log.fullinfo("Adding the MDF %s to the input AstroData object "
                      "%s" % (mdf_ad.filename, ad.filename))
         ad.append(moredata=mdf_ad)
         
         # Add the appropriate time stamps to the PHU
         gt.mark_history(adinput=ad, keyword=timestamp_key)
         
         # Change the filename
         ad.filename = gt.filename_updater(adinput=ad, suffix=rc["suffix"],
                                           strip=True)
         
         # Append the output AstroData object to the list of output
         # AstroData objects
         adoutput_list.append(ad)
     
     # Report the list of output AstroData objects to the reduction context
     rc.report_output(adoutput_list)
     
     yield rc
Beispiel #17
0
from astrodata import Lookups

package_classes = Lookups.compose_multi_table(
                            "*/warehouse_settings", "warehouse_package")
package_dict = {}
if "warehouse_package" in package_classes:
    warehouse_packages = package_classes["warehouse_package"]
    for package in warehouse_packages:
        for key in package:
            if key not in package_dict:
                package_dict[key] = package[key]
else:
    warehouse_packages = []

dataset_extensions_dict = Lookups.compose_multi_table(
                        "*/filetypes", "data_object_precedence")

if "data_object_precedence" in dataset_extensions_dict:
    dataset_extensions = dataset_extensions_dict["data_object_precedence"]
else:
    dataset_extensions = None

ingest_sources_dict = Lookups.compose_multi_table(
                            "*/warehouse_daemon.py",
                            "ingest_sources")
if ingest_sources_dict and "ingest_sources" in ingest_sources_dict:
    ingest_sources = ingest_sources_dict["ingest_sources"]
else:
    ingest_sources = None
    
Beispiel #18
0
def gmos_fplen(ad):
    """
      GMOS minimum and maximum offsets from
      the slit position in the dispersion direction
      This is a take from gscut.cl
    """
    from astrodata import Lookups

    gratings = Lookups.get_lookup_table('Gemini/GMOS/StandardGMOSGratings.py',
                                         'StandardGMOSGratings')
    filters = Lookups.get_lookup_table('Gemini/GMOS/GMOSfilters.py',
                                         'GMOSfilters')
    grating_tilt = Lookups.get_lookup_table('Gemini/GMOS/GMOSgratingTilt.py',
                                        'grating_tilt')


    # Define the spectral cut-off limit (red limit) according to the iccd
    # (detector type). Value is in nm. If needed, this can be changed to also
    # accomodate different values for GMOS-N and GMOS-S, see pixscale

    pixscale = {   #(instrument,detector_type)
        ('GMOS-N','SDSU II CCD'):             0.0727, # GMOS-N EEV pixscale
        ('GMOS-N','SDSU II e2v DD CCD42-90'): 0.07288, # GMOS-N e2vDD pixscale
        ('GMOS-N','S10892-01'):               0.0727, # GMOS-N Hamamatsu pixscale 
        ('GMOS-S','SDSU II CCD'):             0.073, # GMOS-S EEV pixscale
               }

    detector_upper_spec_limit = {
        'SDSU II CCD':             1025,   # EEV CCDs
        'SDSU II e2v DD CCD42-90': 1050,   # e2vDD CCDs
        'S10892-01':               1080,   # Hamamatsu CCDs
                                }

    npix_y, npix_x = ad.data.shape
    xbin = ad.detector_x_bin()
    ybin = ad.detector_y_bin()
    instrument = ad.instrument().as_str()

    # Get header values. The input AD should have been
    # verified to contain a MOS image.
    phu = ad.phu_get_key_value
    dettype = phu('DETTYPE')
    grating_name = phu('GRATING')
    filter1 = phu('FILTER1')
    filter2 = phu('FILTER2')
    cwave = phu('GRWLEN')
    tilt = phu('GRTILT')
    tilt = np.radians(tilt) 

    xscale = pixscale[instrument,dettype]*xbin
    yscale = pixscale[instrument,dettype]*ybin

    # Get grating info from lookup table
    grule, gblaze, gR, gcoverage, gwave1, gwave2,\
        wavoffset, l_yoff = gratings[grating_name]

    greq=(cwave*grule)/1.e6

    # grating_tilt is a list of tuples
    greqs  = [g for g,t in grating_tilt]
    gtilts = [t for g,t in grating_tilt]
    # Interpolate at greq
    gtilt = np.interp(greq, greqs, gtilts)

    gtilt = np.radians(gtilt)
    a = np.sin(gtilt+0.872665) / np.sin(gtilt)
    gR = 206265. * greq/(0.5*81.0*np.sin(gtilt))
    nmppx = a*xscale*cwave*81.0*np.sin(gtilt)/(206265.*greq)
    wave1 = gwave1
    wave2 = gwave2

    # get filter information
    fwave1 = 0.0     ; wmn1 = 0.0     ; wmn2 = 0.0
    fwave2 = 99999.0 ; wmx1 = 99999.0 ; wmx2 = 99999.0
    if filter1 != '' and 'open' not in filter1:
        wmn1, wmx1, ffile = filters[filter1]
    if filter2 != '' and 'open' not in filter2:
        wmn1, wmx1, ffile = filters[filter2]

    fwave1 = max(wmn1,wmn2)
    fwave2 = min(wmx1,wmx2)

    # determine whether filter or grating limits wavelength coverage
    wave1 = max(wave1,fwave1)
    wave2 = min(wave2,fwave2)

    # This sets the hard red limit according to detector type if user doesn't
    # supply an upper limit
    if wave2 > detector_upper_spec_limit[dettype]:
        wave2 = detector_upper_spec_limit[dettype]

    speclist= (wave1,wave2,wavoffset,nmppx,a,cwave,l_yoff)
    return speclist
def _get_static_bias_level(adinput=None):
    """
    Determine the static bias level value from GMOS data.
    """

    # Since this function accesses keywords in the headers of the pixel data
    # extensions, always construct a dictionary where the key of the dictionary
    # is an EXTVER integer
    static_bias_level = {}
    
    # Get the static bias level lookup table
    gmosampsBias, gmosampsBiasBefore20060831 = Lookups.get_lookup_table(
        "Gemini/GMOS/GMOSAmpTables", "gmosampsBias",
        "gmosampsBiasBefore20060831")
    
    # Get the UT date, read speed setting and gain setting values using the
    # appropriate descriptors
    ut_date_dv = adinput.ut_date()
    read_speed_setting_dv = adinput.read_speed_setting()
    gain_setting_dv = adinput.gain_setting()
    
    # Get the name of the detector amplifier from the header of each pixel data
    # extension as a dictionary
    ampname_dict = gmu.get_key_value_dict(
        adinput=adinput, keyword="AMPNAME", dict_key_extver=True)
    
    if not (ut_date_dv.is_none() and read_speed_setting_dv.is_none() and
            gain_setting_dv.is_none()) and ampname_dict is not None:
        
        # Use as_pytype() to return the values as the default python type
        # rather than an object
        ut_date = str(ut_date_dv)
        read_speed_setting = read_speed_setting_dv.as_pytype()
        
        # Create a gain setting dictionary where the key of the dictionary is
        # an EXTVER integer
        gain_setting_dict = gain_setting_dv.collapse_by_extver()
        
        if not gain_setting_dv.validate_collapse_by_extver(gain_setting_dict):
            # The validate_collapse_by_extver function returns False if the
            # values in the dictionary with the same EXTVER are not equal 
            raise Errors.CollapseError()
        
        obs_ut_date = datetime(*strptime(ut_date, "%Y-%m-%d")[0:6])
        old_ut_date = datetime(2006, 8, 31, 0, 0)
        
        for extver, gain_setting in gain_setting_dict.iteritems():
            ampname  = ampname_dict[extver]
            bias_key = (read_speed_setting, gain_setting, ampname)

            bias_level = None
            if obs_ut_date > old_ut_date:
                if bias_key in gmosampsBias:
                    bias_level = gmosampsBias[bias_key]
            else:
                if bias_key in gmosampsBiasBefore20060831:
                    bias_level = gmosampsBiasBefore20060831[bias_key]
            
            # Update the dictionary with the bias level value
            static_bias_level.update({extver: bias_level})
    
    # if len(static_bias_level) == 1:
    #     # Only one value will be returned
    #     ret_static_bias_level = static_bias_level.values()[0]  #!! Not a dict !
    # else:

    unique_values = set(static_bias_level.values())

    if len(unique_values) == 1 and None in unique_values:
        # The bias level was not found for any of the pixel data extensions
        # (all the values in the dictionary are equal to None)
        ret_static_bias_level = None
    else:
        ret_static_bias_level = static_bias_level
    
    return ret_static_bias_level
Beispiel #20
0
def findMOSEdges(hdulist):
        """
          findMOSEdges is a take from gmos/gscut.cl to find the
          REFPIX value.
          It also finds the bottom and top edges of each slice by taking
          a vertical bar on CCD2 at 1200:1500
          (vbar = np.mean(bigpix[:,1200:1500],axis=1))
        """

        from math import sin,cos 
        asecmm = 1.611444
        
        pixscale ={'GMOS_N': 0.0727 ,'GMOS-S': 0.073}

        bigpix = hdulist['SCI',1].data
        
        pi = np.pi

        phu = hdulist[0]._header
        header = hdulist['SCI',1]._header
        tb = hdulist['MDF',1].data
     
        inst = phu['INSTRUME']
        
        grating = phu['grating']

        # Check for  data file location
        #fp, pathname, description = imp.find_module('gwavecal')
        #dirn = os.path.dirname(pathname)
        #reffile = os.path.join(dirn, 'StandardGMOSGratings.py')

        print 'fm00: dirni::',dirn,pathname

        GMOSfilters = Lookups.get_lookup_table('Gemini/GMOS/GMOSfilters.py','GMOSfilters')

        StandardGMOSGratings = Lookups.get_lookup_table('Gemini/GMOS/StandardGMOSGratings.py',
                                    'StandardGMOSGratings')

        grule, gblaze, gR, gcoverage, gwave1, gwave2,\
        wavoffset, l_yoff = StandardGMOSGratings[grating]

        filter1 = phu['filter1']
        filter2 = phu['filter2']

        # get filter information
        fwave1 = 0.0     ; wmn1 = 0.0     ; wmn2 = 0.0
        fwave2 = 99999.0 ; wmx1 = 99999.0 ; wmx2 = 99999.0
        if 'open' not in filter1:
            wmn1, wmx1, ffile = GMOSfilters[filter1]
        if 'open' not in filter2:
            wmn2, wmx2, ffile = GMOSfilters[filter2]

        cwave = phu['grwlen']
        tilt = phu['grtilt']
        
        tilt = tilt*pi/180.
        ss = header['ccdsum']     # Read a string 'x y'
        xbin,ybin = float(ss[0]),float(ss[2])

        xscale = pixscale[inst]*xbin
        yscale = pixscale[inst]*ybin

        greq = cwave*grule/1.e6
        gratinfile = os.path.join(dirn, 'gratingeq.dat')
        x,y = np.loadtxt(gratinfile,unpack=True)
        z = gfit.Gfit(x,y,'cubic')
        gtilt = z(greq)

        gtilt = gtilt * pi/180.
        a = sin(gtilt+0.872665) / sin(gtilt)
        gR = 206265. * greq/(0.5*81.0*sin(gtilt))
        nmppx = a*xscale*cwave*81.0*sin(gtilt)/(206265.*greq)
        wave1 = gwave1
        wave2 = gwave2

        fwave1 = max(wmn1,wmn2)
        fwave2 = min(wmx1,wmx2)

        # determine whether filter or grating limits wavelength coverage
        wave1 = max(wave1,fwave1)
        wave2 = min(wave2,fwave2)

        # in pixels
        speclen = round((wave2-wave1)/nmppx)

        # pixel value of central wavelength from left (red) end of spectrum
        #crpix1?
        pixcwave = speclen - (cwave-wave1)/nmppx

        nypix, nxpix = np.shape(bigpix)
        xcen = nxpix/2.
        ycen = nypix/2.



        sx = tb.field('slitpos_mx')
        sy = tb.field('slitpos_my')
        zx = tb.field('slitsize_mx')
        zy = tb.field('slitsize_my')
        pr = tb.field('priority')

        #loop over the slits
        g = np.argsort(sy)
        sx = sx[g]
        sy = sy[g]
        zx = zx[g]
        zy = zy[g]
        pr = pr[g]

        # Determines the edges of the stripes
        sz = np.shape(bigpix)
        cm = sz[1]/2
        vbar = np.mean(bigpix[:,cm-150:cm+150],axis=1)
        lowa, topa = mos_edges(vbar)
        cdif = len(sx)-len(lowa)
        # make these array the same length
        if cdif > 0:
            print "WARNING::::::: The number os slits is less the number of entries in tb:"
            print "               by:",cdif,"\n"
            lowa = np.concatenate(([0],lowa , [0]))
            topa = np.concatenate(([0],topa , [0]))
        print "NUMBER of slits by mos_edges():",len(lowa)


        #TODO 
        #     put tilt
        crpix = [] 
        zzpeaks = []
        #for spos_mx,spos_my,ssize_mx,ssize_my,priority,le,te in zip(sx,sy,zx,zy,pr,lowa,topa):
        k = 1
        for spos_mx,spos_my,ssize_mx,ssize_my,priority in zip(sx,sy,zx,zy,pr):

            k += 1
        #    if k > 3: break

            # Convert from mask to pixel coordinates and correct for
            # binning in both directions
            #if priority == 0: continue

            xccd = spos_mx * asecmm/xscale
            if (inst=='GMOS-S'):
                # yccd=spos_my*asecmm/yscale
                # Not only there is a y-offset (85) but there is also 
                # a distortion.  The solution below is not perfect but
                # it is already much better than the first order
                # solution [Kathleen Labrie]
                yccd = 0.99911*spos_my - 1.7465E-5*spos_my**2 + \
                    3.0494E-7*spos_my**3
                yccd = yccd * asecmm/yscale
            else:
                yccd = 0.99591859227*spos_my + \
                    5.3042211333437E-8*spos_my**2 + \
                    1.7447902551997E-7*spos_my**3
                yccd = yccd * asecmm/yscale

            slitwid = ssize_mx*asecmm
            slitlen = ssize_my*asecmm

            # set slit length if the aperture is a circle,
            # radius=slitwid
            #if (slittype=="circle")
            #    slitlen=2.*slitwid

            xccd = xcen+xccd
            yccd = ycen+yccd

            # simple correction for distortion in x
            y = (yccd/nypix - 0.5)
            dx = nxpix * (0.0014*y - 0.0167*y**2)
            #print(yccd," ",y," ",dx)

            # slit height
            specwid = round(1.05*slitlen/yscale)
            center = specwid/2

            refpix = pixcwave

            # Position of object, take into account that lambda decreases
            # with x
            x1 = round(xcen-(xcen-xccd)/a-pixcwave) + wavoffset/nmppx + dx
            x2 = x1 + speclen-1
            y1 = round(yccd-center+l_yoff)
            y2 = y1 + specwid-1
            #print k,': (%.2f %.2f) (%.2f %.2f)'%(x1,x2,y1,y2),
            # check spectrum isn't off chip
            if x1 < 1:
                refpix = refpix+x1-1.
                x1 = 1

            if x2 > nxpix:
                x2 = nxpix
            if y1 < 1:
                y1 = 1
            if y2 > nypix:
                y2 = nypix

            crpix.append(refpix)
            #ss= '%.2f %.2f %.4f'% (refpix,cwave*10,-10.*nmppx),le,te,te-le
            #self.log.info(ss)

            """
              having the slits lines all vertical, i.e. applied rotation already.
              we now fit each of the slits
              See if we have shift of the lines in each of the slits
            """
        crval = cwave*10
        cdelt = -10.*nmppx

        return np.asarray(crpix), lowa, topa, crval, cdelt
Beispiel #21
0
#!/usr/bin/env python
import json
from geventwebsocket import WebSocketServer, WebSocketApplication, Resource
from subprocess import Popen, PIPE
from time import sleep
import os
from astrodata import generaldata
from astrodata.generaldata import GeneralData
from astrodata import Lookups
from base64 import b64encode, b64decode
from pylab import *
import numpy as np
dw_info = Lookups.compose_multi_table(  "*/warehouse_settings", 
                                        "warehouse_elements", 
                                        "shelf_addresses", 
                                        "type_shelf_names",
                                        "type_store_precedence"
                                      )
wpack = Lookups.compose_multi_table("*/warehouse_settings", "warehouse_package")
outpacks = []
for pack in wpack:
    outpacks.append(repr(pack));    
    

# type of quick vieew image to use, use name from mimetype (image/<imext>)    
imext = "png"
IMEXT = imext            
                                      
dw_info["warehouse_package"] = outpacks
class EchoApplication(WebSocketApplication):
    _ra_stdin   = None
def _set_geo_values(ad,ccdsecs,detsecs,binning):
    """Read geometric values from the Lookup tables.
       
       *Input:*
         ad: AD object

       *Output:*
         dictionary:  Keys are: gaps,blocksize,mosaic_grid,
                      shift,rotation,magnification,interpolator,ref_block


    """
    _geoVal = {'gaps':None, 'blocksize':None, 'mosaic_grid':None,
                    'shift':None, 'rotation':None, 'magnification':None,
                    'interpolator':None,'ref_block':None}

    # Get the dictionary containing the GMOS geometric information 
    # needed by mosaicAD
    instrument = str(ad.instrument())
    
    dettype =  ad.phu_get_key_value("DETTYPE")
    detector =  ad.phu_get_key_value("DETECTOR")

    x_bin,y_bin = binning
    if (x_bin > 1) or (y_bin >1):
        bin_string = "binned"
    else:
        bin_string = "unbinned"


    if ad.is_type('GMOS'):  
        lookup = 'Gemini/GMOS/geometry_conf'
    else:
        lookup = 'Gemini/'+instrument+'/geometry_conf'

    # Now we used the Lookup table service to read a given
    # list of dictionaries in the list 'dnames' located
    # in modules 'geometry_conf.py'.
    # Then we form a key based on the instrument, dettype,
    # detector, and bin_string to load the correct value
    # in the hidden dictionary _geoVal.


    # These are names using 'binned', 'unbinned'
    dnames = ('gaps_tile', 'gaps_transform','shift')
    geo_table = Lookups.get_lookup_table(lookup, *dnames)

    key = (instrument, dettype, detector, bin_string)
    for dn,gdic in zip(dnames,geo_table):
        _geoVal[dn] = gdic[key]

    # These are names using 'unbinned' only
    dnames = ('blocksize', 'mosaic_grid',\
               'rotation','magnification')
    # Reset dictionary search key for these dnames.
    key = (instrument, dettype, detector, 'unbinned')
    geo_table = Lookups.get_lookup_table(lookup, *dnames)

    for dn,gdic in zip(dnames,geo_table):
        # Get the dictionary item value given the key
        _geoVal[dn] = gdic[key]
          

    geo_table = Lookups.get_lookup_table(lookup, 'interpolator')
    _geoVal['interpolator'] = geo_table['SCI']

    geo_table = Lookups.get_lookup_table(lookup, 'ref_block')
    _geoVal['ref_block'] = np.asarray(geo_table['ref_block'])


    # Now binned the appropiate parameters.


    xshift,yshift =  np.asfarray(_geoVal['shift']).transpose()
    rot  = np.asfarray(_geoVal['rotation'])
    xrot = rot*x_bin/y_bin
    yrot = rot*y_bin/x_bin
    xshift = xshift/x_bin
    yshift = yshift/y_bin
    rotation = [(x,y) for x,y in zip(xrot,yrot)]
    shift = [(x,y) for x,y in zip(xshift,yshift)]
    # For x,y gap
    gaps_tile      = _geoVal['gaps_tile']
    gaps_transform = _geoVal['gaps_transform']
    for k in gaps_tile.keys():          # Binn the values
           gaps_tile[k] = (gaps_tile[k][0]/x_bin,gaps_tile[k][1]/y_bin)
    for k in gaps_transform.keys():          # Binn the values
           gaps_transform[k] = (gaps_transform[k][0]/x_bin,
                                gaps_transform[k][1]/y_bin)

    blocksize = np.asfarray(_geoVal['blocksize'])
    nrows = blocksize[0]/x_bin
    ncols = blocksize[1]/y_bin
    blocksize = (nrows,ncols)
    mosaic_grid = _geoVal['mosaic_grid']
    magnification = _geoVal['magnification']
    ref_block = tuple(_geoVal['ref_block'])
    interpolator = _geoVal['interpolator']
    
    nblocksx,nblocksy = mosaic_grid
    # Determines the actual block size depending if we have
    # or not a ROI.
    # For return_ROI True
    # Get minimum lower left corner from all amplifier coords
    # Use as_dict in the meantime because with as_list it deletes
    # repeated elements.

    gap_dict = {'tile_gaps':gaps_tile, 'transform_gaps':gaps_transform}


    # Dictionary with all the values. Useful for printing.
    geodict = {'blocksize':blocksize,
               'mosaic_grid':mosaic_grid, 
               'transformation':{'shift':shift,'rotation':rotation,
                                 'magnification':magnification},
               'interpolator':interpolator,
               'ref_block':ref_block,'gap_dict':gap_dict,
               }

    return geodict   
    def addReferenceCatalog(self, rc):
        """
        The reference catalog is a dictionary in jhk_catalog.py


        Append the catalog as a FITS table with extenstion name
        'REFCAT', containing the following columns:

        - 'Id'       : Unique ID. Simple running number
        - 'Name'     : SDSS catalog source name
        - 'RAJ2000'  : RA as J2000 decimal degrees
        - 'DEJ2000'  : Dec as J2000 decimal degrees
        - 'J'     : SDSS u band magnitude
        - 'e_umag'   : SDSS u band magnitude error estimage
        - 'H'     : SDSS g band magnitude
        - 'e_gmag'   : SDSS g band magnitude error estimage
        - 'rmag'     : SDSS r band magnitude
        - 'e_rmag'   : SDSS r band magnitude error estimage
        - 'K'     : SDSS i band magnitude
        - 'e_imag'   : SDSS i band magnitude error estimage

        :param source: Source catalog to query. This used as the catalog
                       name on the vizier server
        :type source: string

        :param radius: The radius of the cone to query in the catalog, 
                       in degrees. Default is 4 arcmin
        :type radius: float
        """

        import pyfits as pf

        # Instantiate the log
        log = gemLog.getGeminiLog(logType=rc["logType"],
                                  logLevel=rc["logLevel"])

        # Log the standard "starting primitive" debug message
        log.debug(gt.log_message("primitive", "addReferenceCatalog", "starting"))

        # Define the keyword to be used for the time stamp for this primitive
        timestamp_key = self.timestamp_keys["addReferenceCatalog"]

        # Initialize the list of output AstroData objects
        adoutput_list = []

        # Get the necessary parameters from the RC
        source = rc["source"]
        radius = rc["radius"]

        # Get Local JHK catalog as a dictionary

        jhk = Lookups.get_lookup_table("Gemini/NIRI/jhk_catalog", "jhk") 

        #form arrays with input dict 
        ra=[]; dec=[]; vals=[]
        for key in jhk.keys():    
            ra.append(key[0])
            dec.append(key[1])
            vals.append(jhk[key])
        # sort in ra
        order = np.argsort(ra)
        ra,dec = map(np.asarray, (ra,dec))
        ra = ra[order]
        dec = dec[order]
        vals = [vals[k] for k in order]
        # Get the magnitudes and errs from each record (j,je,h,he,k,ke,name)
        vals = np.asarray([vals[k][:6] for k in range(len(ra))])
        # Separate mags into J,H,K mags arrays for clarity
        irmag={}
        irmag['Jmag']=     vals[:,0]
        irmag['Jmag_err']= vals[:,1]
        irmag['Hmag']=     vals[:,2]
        irmag['Hmag_err']= vals[:,3]
        irmag['Kmag']=     vals[:,4]
        irmag['Kmag_err']= vals[:,5]

        #print 'JMAG00:',[(irmag['Jmag'][i],irmag['Jmag_err'][i]) 
        #                for i in range(5)]

        # Loop over each input AstroData object in the input list
        adinput = rc.get_inputs_as_astrodata()
        for ad in adinput:

            try:
                input_ra = ad.ra().as_pytype()
                input_dec = ad.dec().as_pytype()
            except:
                if "qa" in rc.context:
                    log.warning("No RA/Dec in header of %s; cannot find "\
                                "reference sources" % ad.filename)
                    adoutput_list.append(ad)
                    continue
                else:
                    raise

            table_name = 'jhk.tab'
            # Loop through the science extensions
            for sciext in ad['SCI']:
                extver = sciext.extver()

                # Did we get anything?
                if (1): # We do have a dict with ra,dec
                    # Create on table per extension

                    # Create a running id number
                    refid=range(1, len(ra)+1)

                    # Make the pyfits columns and table
                    c1 = pf.Column(name="Id",format="J",array=refid)
                    c3 = pf.Column(name="RAJ2000",format="D",unit="deg",array=ra)
                    c4 = pf.Column(name="DEJ2000",format="D",unit="deg",array=dec)
                    c5 = pf.Column(name="Jmag",format="E",array=irmag['Jmag'])
                    c6 = pf.Column(name="e_Jmag",format="E",array=irmag['Jmag_err'])
                    c7 = pf.Column(name="Hmag",format="E",array=irmag['Hmag'])
                    c8 = pf.Column(name="e_Hmag",format="E",array=irmag['Hmag_err'])
                    c9 = pf.Column(name="Kmag",format="E",array=irmag['Kmag'])
                    c10= pf.Column(name="e_Kmag",format="E",array=irmag['Kmag_err'])
                    col_def = pf.ColDefs([c1,c3,c4,c5,c6,c7,c8,c9,c10])
                    tb_hdu = pf.new_table(col_def)

                    # Add comments to the REFCAT header to describe it.
                    tb_hdu.header.add_comment('Source catalog derived from the %s'
                                         ' catalog on vizier' % table_name)

                    tb_ad = AstroData(tb_hdu)
                    tb_ad.rename_ext('REFCAT', extver)

                    if(ad['REFCAT',extver]):
                        log.fullinfo("Replacing existing REFCAT in %s" % ad.filename)
                        ad.remove(('REFCAT', extver))
                    else:
                        log.fullinfo("Adding REFCAT to %s" % ad.filename)
                    ad.append(tb_ad)

            # Match the object catalog against the reference catalog
            # Update the refid and refmag columns in the object catalog
            if ad.count_exts("OBJCAT")>0:
                ad = _match_objcat_refcat(adinput=ad)[0]
            else:
                log.warning("No OBJCAT found; not matching OBJCAT to REFCAT")

            # Add the appropriate time stamps to the PHU
            gt.mark_history(adinput=ad, keyword=timestamp_key)

            # Change the filename
            ad.filename = gt.filename_updater(adinput=ad, suffix=rc["suffix"],
                                              strip=True)

            # Append the output AstroData object to the list 
            # of output AstroData objects
            adoutput_list.append(ad)

        # Report the list of output AstroData objects to the reduction
        # context
        rc.report_output(adoutput_list)

        yield rc
import numpy as np
import pywcs
from astrodata import Errors
from astrodata import Lookups
from astrodata.adutils import gemLog
from gempy.library import astrotools as at
from gempy.gemini import gemini_tools as gt
from primitives_GENERAL import GENERALPrimitives

# Load the standard comments for header keywords that will be updated
# in these functions
keyword_comments = Lookups.get_lookup_table("Gemini/keyword_comments", "keyword_comments")


class RegisterPrimitives(GENERALPrimitives):
    """
    This is the class containing all of the registration primitives for the
    GEMINI level of the type hierarchy tree. It inherits all the primitives
    from the level above, 'GENERALPrimitives'.
    """

    astrotype = "GEMINI"

    def init(self, rc):
        GENERALPrimitives.init(self, rc)
        return rc

    init.pt_hide = True

    def correctWCSToReferenceFrame(self, rc):
        """ 
 def __init__(self):
     self.gnirsArrayDict = Lookups.get_lookup_table(
         "Gemini/GNIRS/GNIRSArrayDict", "gnirsArrayDict")
     self.gnirsConfigDict = Lookups.get_lookup_table(
         "Gemini/GNIRS/GNIRSConfigDict", "gnirsConfigDict")
     GEMINI_DescriptorCalc.__init__(self)