def parse_nist_tbl(tbl, parse_dict): '''Parses a NIST table using various criteria Parameters ---------- tbl : Table Read previously from NIST ASCII file parse_dict : dict Dict of parsing criteria. Read from load_parse_dict Returns ------- tbl : Table Rows meeting the criteria ''' # Parse gdI = tbl['RelInt'] >= parse_dict['min_intensity'] try: gdA = tbl['Aki'] >= parse_dict['min_Aki'] except TypeError: debugger.set_trace() gdw = tbl['wave'] >= parse_dict['min_wave'] # Combine allgd = gdI & gdA & gdw # Return return tbl[allgd]
def get_pc(data, k, tol=0.0, maxiter=20, nofix=False, noortho=False): p = data.shape[0] if p == 0: msgs.error("You need to supply more components in the PCA") #n = np.size(data)/p if k > p: debugger.set_trace() msgs.error( "The number of principal components must be less than or equal" + msgs.newline() + "to the order of the fitting function") # Set the initial conditions eigv = np.zeros((p, k)) eigv[:k, :k] = np.identity(k) niter = 0 diff = tol * 2.0 + 1.0 while (niter < maxiter) and (diff > tol): hidden = np.dot(np.dot(np.linalg.inv(np.dot(eigv.T, eigv)), eigv.T), data) oldeigv = eigv.copy() eigv = np.dot( data, np.dot(hidden.T, np.linalg.inv(np.dot(hidden, hidden.T)))) if tol > 0.0: diff = 0.0 for i in range(k): diff += np.abs( 1.0 - np.sum(oldeigv[:, i] * eigv[:, i]) / np.sqrt(np.sum(oldeigv[:, i]**2) * np.sum(eigv[:, i]**2))) niter += 1 # Orthonormalize? if not noortho: for b in range(k): # Orthogonalize for bp in range(b): dot = np.sum(eigv[:, b] * eigv[:, bp]) eigv[:, b] -= dot * eigv[:, bp] # Normalize dot = np.sum(eigv[:, b]**2) dot = 1.0 / np.sqrt(dot) eigv[:, b] *= dot # Project variables onto new coordinates? if not nofix: hidden = np.dot(eigv.T, data) eval_hidden, evec_hidden = do_pca(hidden.T, cov=True) eigv = np.dot(eigv, evec_hidden.T) hidden = np.dot(eigv.T, data) return eigv, hidden
def pix_to_amp(naxis0, naxis1, datasec, numamplifiers): """ Generate a frame that identifies each pixel to an amplifier, and then trim it to the data sections. This frame can be used to later identify which trimmed pixels correspond to which amplifier Parameters ---------- naxis0 : int naxis1 : int datasec : list numamplifiers : int Returns ------- retarr : ndarray Frame assigning pixels to amplifiers """ # For convenience # Initialize the returned array retarr = np.zeros((naxis0, naxis1)) for i in range(numamplifiers): #datasec = "datasec{0:02d}".format(i+1) #x0, x1 = settings.spect[dnum][datasec][0][0], settings.spect[dnum][datasec][0][1] #y0, y1 = settings.spect[dnum][datasec][1][0], settings.spect[dnum][datasec][1][1] x0, x1 = datasec[i][0][0], datasec[i][0][1] y0, y1 = datasec[i][1][0], datasec[i][1][1] if x0 < 0: x0 += naxis0 if x1 <= 0: x1 += naxis0 if y0 < 0: y0 += naxis1 if y1 <= 0: y1 += naxis1 # Fill in the pixels for this amplifier xv = np.arange(x0, x1) yv = np.arange(y0, y1) w = np.ix_(xv, yv) try: retarr[w] = i + 1 except IndexError: debugger.set_trace() # Save these locations for trimming if i == 0: xfin = xv.copy() yfin = yv.copy() else: xfin = np.unique(np.append(xfin, xv.copy())) yfin = np.unique(np.append(yfin, yv.copy())) # Construct and array with the rows and columns to be extracted w = np.ix_(xfin, yfin) return retarr[w]
def set_detector(self, chip): detectors = [ # Detector 1 (Thor) -- http://www.eso.org/sci/php/optdet/instruments/fors2/index.html pypeitpar.DetectorPar( dataext=0, specaxis=1, specflip=False, xgap=0., ygap=0., ysize=1., platescale=0.126, # average between order 11 & 30, see manual darkcurr=0.0, saturation= 2.0e5, # I think saturation may never be a problem here since there are many DITs nonlinear=0.80, numamplifiers=1, gain=0.70, ronoise=2.9, # High gain datasec='[:,10:]', # For 1x binning, I think oscansec='[:,0:10]', suffix='_Thor'), # Detector 2 (Belenos) pypeitpar.DetectorPar( dataext=0, specaxis=1, specflip=False, xgap=0., ygap=0., ysize=1., platescale=0.126, # average between order 11 & 30, see manual darkcurr=0.0, saturation= 2.0e5, # I think saturation may never be a problem here since there are many DITs nonlinear=0.80, numamplifiers=1, gain=0.70, ronoise=3.0, # High gain datasec='[20:,0:2048]', oscansec='[4:20,4:2044]', suffix='_Belenos') ] if chip == 'CHIP1': self.detector = [detectors[0]] elif chip == 'CHIP2': debugger.set_trace() # NEED TO SET DATASEC self.detector = [detectors[1]]
def load_sorted(sorted_file): """ Load a .sorted file (mainly to generate a .pypeit file) Parameters ---------- sorted_file : str Returns ------- all_setups : list list of all setups eg. ['A','B'] all_setuplines : list list of lists of all setup lines which describe the setup all_setupfiles : list list of lists of all setup files including the header """ all_setups, all_setuplines, all_setupfiles = [], [], [] try: with open(sorted_file, 'r') as ff: # Should begin with #### fline = ff.readline() if fline[0:4] != '####': msgs.error('Bad .sorted fomatting') # Loop on setups while fline[0:5] != '##end': # Setup lines setuplines = [] while fline[0:2] != '#-': fline = ff.readline() # Setup name if 'Setup' in fline: all_setups.append(fline[6:].strip()) # setuplines.append(fline) all_setuplines.append(setuplines[:-1]) # Data files datafiles = [] while fline[0:2] != '##': fline = ff.readline() datafiles.append(fline) all_setupfiles.append(datafiles[:-1]) except: debugger.set_trace() # Return return all_setups, all_setuplines, all_setupfiles
def compound_meta(self, headarr, meta_key): """ Args: headarr: list meta_key: str Returns: value """ if meta_key == 'binning': binspatial, binspec = parse.parse_binning(headarr[0]['BINNING']) binning = parse.binning2string(binspec, binspatial) return binning elif meta_key == 'dispangle': if headarr[0]['GRATEPOS'] == 3: return headarr[0]['G3TLTWAV'] elif headarr[0]['GRATEPOS'] == 4: return headarr[0]['G4TLTWAV'] else: debugger.set_trace() else: msgs.error("Not ready for this compound meta")
def build_wv_calib(self, arccen, method, skip_QA=False): """ Main routine to generate the wavelength solutions in a loop over slits Wrapper to arc.simple_calib or arc.calib_with_arclines self.maskslits is updated for slits that fail Args: method : str 'simple' -- arc.simple_calib 'arclines' -- arc.calib_with_arclines 'holy-grail' -- wavecal.autoid.HolyGrail 'reidentify' -- wavecal.auotid.ArchiveReid 'full_template' -- wavecal.auotid.full_template skip_QA (bool, optional) Returns: dict: self.wv_calib """ # Obtain a list of good slits ok_mask = np.where(~self.maskslits)[0] # Obtain calibration for all slits if method == 'simple': lines = self.par['lamps'] line_lists = waveio.load_line_lists(lines) self.wv_calib = arc.simple_calib_driver( self.msarc, line_lists, arccen, ok_mask, nfitpix=self.par['nfitpix'], IDpixels=self.par['IDpixels'], IDwaves=self.par['IDwaves']) elif method == 'semi-brute': # TODO: THIS IS CURRENTLY BROKEN debugger.set_trace() final_fit = {} for slit in ok_mask: # HACKS BY JXP self.par['wv_cen'] = 8670. self.par['disp'] = 1.524 # ToDO remove these hacks and use the parset in semi_brute best_dict, ifinal_fit \ = autoid.semi_brute(arccen[:, slit], self.par['lamps'], self.par['wv_cen'], self.par['disp'], match_toler=self.par['match_toler'], func=self.par['func'], n_first=self.par['n_first'], sigrej_first=self.par['n_first'], n_final=self.par['n_final'], sigrej_final=self.par['sigrej_final'], sigdetect=self.par['sigdetect'], nonlinear_counts= self.nonlinear_counts) final_fit[str(slit)] = ifinal_fit.copy() elif method == 'basic': final_fit = {} for slit in ok_mask: status, ngd_match, match_idx, scores, ifinal_fit = \ autoid.basic(arccen[:, slit], self.par['lamps'], self.par['wv_cen'], self.par['disp'], nonlinear_counts=self.nonlinear_counts) final_fit[str(slit)] = ifinal_fit.copy() if status != 1: self.maskslits[slit] = True elif method == 'holy-grail': # Sometimes works, sometimes fails arcfitter = autoid.HolyGrail(arccen, par=self.par, ok_mask=ok_mask) patt_dict, final_fit = arcfitter.get_results() elif method == 'reidentify': # Now preferred # Slit positions arcfitter = autoid.ArchiveReid(arccen, self.spectrograph, self.par, ok_mask=ok_mask, slit_spat_pos=self.slit_spat_pos) patt_dict, final_fit = arcfitter.get_results() elif method == 'full_template': # Now preferred if self.binspectral is None: msgs.error( "You must specify binspectral for the full_template method!" ) final_fit = autoid.full_template(arccen, self.par, ok_mask, self.det, self.binspectral, nsnippet=self.par['nsnippet']) else: msgs.error( 'Unrecognized wavelength calibration method: {:}'.format( method)) self.wv_calib = final_fit # Remake mask (*mainly for the QA that follows*) self.maskslits = self.make_maskslits(len(self.maskslits)) ok_mask = np.where(~self.maskslits)[0] # QA if not skip_QA: for slit in ok_mask: outfile = qa.set_qa_filename(self.master_key, 'arc_fit_qa', slit=slit, out_dir=self.qa_path) autoid.arc_fit_qa(self.wv_calib[str(slit)], outfile=outfile) # Return self.steps.append(inspect.stack()[0][3]) return self.wv_calib
def get_meta_value(self, ifile, meta_key, headarr=None, required=False, ignore_bad_header=False, usr_row=None): """ Return meta data from a given file (or its array of headers) Args: ifile: str or None Input filename meta_key: str or list of str headarr: list, optional List of headers required: bool, optional Require the meta key to be returnable ignore_bad_header: bool, optional Over-ride required; not recommended usr_row: Row Provides user supplied frametype (and other things not used) Returns: value: value or list of values """ if headarr is None: headarr = self.get_headarr(ifile) # Loop? if isinstance(meta_key, list): values = [] for mdict in meta_key: values.append( self.get_meta_value(mdict, headarr=headarr, required=required)) # return values # Are we prepared to provide this meta data? if meta_key not in self.meta.keys(): if required: msgs.error( "Need to allow for meta_key={} in your meta data".format( meta_key)) else: msgs.warn("Requested meta data does not exist...") return None # Is this not derivable? If so, use the default # or search for it as a compound method value = None if self.meta[meta_key]['card'] is None: if 'default' in self.meta[meta_key].keys(): value = self.meta[meta_key]['default'] elif 'compound' in self.meta[meta_key].keys(): value = self.compound_meta(headarr, meta_key) else: msgs.error( "Failed to load spectrograph value for meta: {}".format( meta_key)) else: # Grab from the header, if we can try: value = headarr[self.meta[meta_key]['ext']][self.meta[meta_key] ['card']] except (KeyError, TypeError): value = None if value is None: # Was this required? if required: kerror = True if not ignore_bad_header: # Is this meta required for this frame type (Spectrograph specific) if ('required_ftypes' in self.meta[meta_key]) and (usr_row is not None): kerror = False # Is it required? for ftype in usr_row['frametype'].split(','): if ftype in self.meta[meta_key]['required_ftypes']: kerror = True # Bomb out? if kerror: msgs.error( 'Required meta "{:s}" did not load! You may have a corrupt header' .format(meta_key)) else: msgs.warn( "Required card {:s} missing from your header of {:s}. Proceeding with risk.." .format(self.meta[meta_key]['card'], ifile)) return None # Deal with dtype (DO THIS HERE OR IN METADATA? I'M TORN) if self.meta_data_model[meta_key]['dtype'] == str: value = str(value).strip() elif self.meta_data_model[meta_key]['dtype'] == int: value = int(value) elif self.meta_data_model[meta_key]['dtype'] == float: value = float(value) elif self.meta_data_model[meta_key]['dtype'] == tuple: assert isinstance(value, tuple) else: debugger.set_trace() # Return return value
def read_gmos(raw_file, det=1): """ Read the GMOS data file Parameters ---------- raw_file : str Filename detector_par : ParSet Needed for numamplifiers if not other things det : int, optional Detector number; Default = 1 Returns ------- array : ndarray Combined image header : FITS header sections : list List of datasec, oscansec, ampsec sections """ # Check for file; allow for extra .gz, etc. suffix fil = glob.glob(raw_file + '*') if len(fil) != 1: msgs.error("Found {:d} files matching {:s}".format(len(fil))) # Read msgs.info("Reading GMOS file: {:s}".format(fil[0])) hdu = fits.open(fil[0]) head0 = hdu[0].header head1 = hdu[1].header # Number of amplifiers (could pull from DetectorPar but this avoids needing the spectrograph, e.g. view_fits) numamp = (len(hdu) - 1) // 3 # Setup for datasec, oscansec dsec = [] osec = [] # get the x and y binning factors... binning = head1['CCDSUM'] xbin, ybin = [int(ibin) for ibin in binning.split(' ')] # First read over the header info to determine the size of the output array... datasec = head1['DATASEC'] x1, x2, y1, y2 = np.array(parse.load_sections(datasec, fmt_iraf=False)).flatten() biassec = head1['BIASSEC'] b1, b2, b3, b4 = np.array(parse.load_sections(biassec, fmt_iraf=False)).flatten() nxb = b2 - b1 + 1 # determine the output array size... nx = (x2 - x1 + 1) * numamp + nxb * numamp ny = y2 - y1 + 1 # allocate output array... array = np.zeros((nx, ny)) if numamp == 2: if det == 1: # BLUEST DETECTOR order = range(6, 4, -1) elif det == 2: # BLUEST DETECTOR order = range(3, 5) elif det == 3: # BLUEST DETECTOR order = range(1, 3) elif numamp == 4: if det == 1: # BLUEST DETECTOR order = range(12, 8, -1) elif det == 2: # BLUEST DETECTOR order = range(8, 4, -1) elif det == 3: # BLUEST DETECTOR order = range(4, 0, -1) else: debugger.set_trace() # insert extensions into master image... for kk, jj in enumerate(order): # grab complete extension... data, overscan, datasec, biassec, x1, x2 = gemini_read_amp(hdu, jj) #, linebias=linebias, nobias=nobias, $ #x1=x1, x2=x2, y1=y1, y2=y2, gaindata=gaindata) # insert components into output array... inx = data.shape[0] xs = inx * kk xe = xs + inx # insert data... # Data section #section = '[:,{:d}:{:d}]'.format(xs, xe) # Eliminate lines section = '[{:d}:{:d},:]'.format(xs, xe) # Eliminate lines dsec.append(section) array[xs:xe, :] = np.flipud(data) #; insert postdata... xs = nx - numamp * nxb + kk * nxb xe = xs + nxb #debugger.set_trace() #section = '[:,{:d}:{:d}]'.format(xs, xe) osection = '[{:d}:{:d},:]'.format(xs, xe) # TRANSPOSED FOR WHAT COMES osec.append(osection) array[xs:xe, :] = overscan # make sure BZERO is a valid integer for IRAF obzero = head1['BZERO'] #head0['O_BZERO'] = obzero head0['BZERO'] = 32768 - obzero # Return, transposing array back to goofy Python indexing return array, head0, (dsec, osec)
def bpm(self, shape=None, filename=None, det=None, **null_kwargs): """ Generate a BPM Parameters ---------- det : int, REQUIRED **null_kwargs: Captured and never used Returns ------- badpix : ndarray """ # Get the empty bpm: force is always True self.empty_bpm(shape=shape, filename=filename, det=det) if det == 1: msgs.info("Using hard-coded BPM for det=1 on GMOSs") # TODO: Fix this # Get the binning hdu = fits.open(filename) binning = hdu[1].header['CCDSUM'] hdu.close() # Apply the mask xbin = int(binning.split(' ')[0]) badc = 616 // xbin self.bpm_img[badc, :] = 1 elif det == 2: msgs.info("Using hard-coded BPM for det=2 on GMOSs") # Get the binning hdu = fits.open(filename) binning = hdu[1].header['CCDSUM'] hdu.close() # Apply the mask xbin = int(binning.split(' ')[0]) if xbin != 2: debugger.set_trace() # NEED TO CHECK FOR YOUR BINNING # Up high badr = (898 * 2) // xbin # Transposed self.bpm_img[badr:badr + (8 * 2) // xbin, :] = 1 # Down low badr = (161 * 2) // xbin # Transposed self.bpm_img[badr, :] = 1 elif det == 3: msgs.info("Using hard-coded BPM for det=2 on GMOSs") # Get the binning hdu = fits.open(filename) binning = hdu[1].header['CCDSUM'] hdu.close() # Apply the mask xbin = int(binning.split(' ')[0]) if xbin != 2: debugger.set_trace() # NEED TO CHECK FOR YOUR BINNING badr = (281 * 2) // xbin # Transposed self.bpm_img[badr:badr + (2 * 2) // xbin, :] = 1 return self.bpm_img
def ech_load_specobj(fname, order=None): """ Load a spec1d file into a list of SpecObjExp objects Parameters ---------- fname : str Returns ------- specObjs : list of SpecObjExp head0 """ #if order is None: # msgs.warn('You did not specify an order. Return specObjs with all orders.') # specObjs, head0 = load.load_specobj(fname) # return specObjs, head0 speckeys = [ 'WAVE', 'SKY', 'MASK', 'FLAM', 'FLAM_IVAR', 'FLAM_SIG', 'COUNTS_IVAR', 'COUNTS' ] # specObjs = [] hdulist = fits.open(fname) head0 = hdulist[0].header for hdu in hdulist: if hdu.name == 'PRIMARY': continue #elif hdu.name[8:17] != 'ORDER'+'{0:04}'.format(order): # continue # Parse name idx = hdu.name objp = idx.split('-') if objp[-1][0:3] == 'DET': det = int(objp[-1][3:]) else: det = int(objp[-1][1:]) if objp[-2][:5] == 'ORDER': iord = int(objp[-2][5:]) else: msgs.warn('Loading longslit data ?') iord = int(-1) # if order is not None and iord !=order then do not return this extenction # if order is None return all extensions # if order is not None and iord ==order then only return the specific order you want. if (order is not None) and (iord != order): continue # Load data spec = Table(hdu.data) shape = (len(spec), 1024) # 2nd number is dummy # New and wrong try: specobj = specobjs.SpecObj(shape, None, None, idx=idx) except: debugger.set_trace() msgs.error("BUG ME") # Add order number specobj.ech_orderindx = iord # ToDo: need to changed to the real order number? specobj.ech_order = iord # Add trace try: specobj.trace_spat = spec['TRACE'] except: # KLUDGE! specobj.trace_spat = np.arange(len(spec['BOX_WAVE'])) # Add spectrum if 'BOX_COUNTS' in spec.keys(): for skey in speckeys: try: specobj.boxcar[skey] = spec['BOX_{:s}'.format(skey)].data except KeyError: pass # Add units on wave specobj.boxcar['WAVE'] = specobj.boxcar['WAVE'] * units.AA if 'OPT_COUNTS' in spec.keys(): for skey in speckeys: try: specobj.optimal[skey] = spec['OPT_{:s}'.format(skey)].data except KeyError: pass # Add units on wave specobj.optimal['WAVE'] = specobj.optimal['WAVE'] * units.AA # Append specObjs.append(specobj) # Return return specObjs, head0
def find_standard(self): """ Identify the standard star from the list of all spectra in the specobjs Wrapper to flux.find_standard which simply takes the brightest Returns ------- self.std : SpecObj Corresponds to the chosen spectrum """ if self.par['std_obj_id'] is not None: _ = self._set_std_obj() return if self.multi_det is not None: sv_stds = [] # Find the standard in each detector for det in self.multi_det: stds = [sobj for sobj in self.std_specobjs if sobj.det == det] if len(stds) == 0: debugger.set_trace() idx = flux.find_standard(stds) sv_stds.append(stds[idx]) msgs.info("Using standard {} for det={}".format( stds[idx], det)) # Now splice msgs.info( "Splicing the standards -- The name will be for the first detector" ) std_splice = sv_stds[0].copy() # Append for ostd in sv_stds[1:]: try: std_splice.optimal['WAVE_GRID'] = np.append( std_splice.optimal['WAVE_GRID'].value, ostd.optimal['WAVE_GRID'].value) * units.AA except KeyError: std_splice.optimal['WAVE'] = np.append( std_splice.optimal['WAVE'].value, ostd.optimal['WAVE'].value) * units.AA for key in ['COUNTS', 'COUNTS_IVAR']: std_splice.optimal[key] = np.append( std_splice.optimal[key], ostd.optimal[key]) self.std = std_splice elif self.spectrograph.pypeline == 'Echelle': # Find brightest object in each order std_brightest = self.std_specobjs[flux.find_standard( self.std_specobjs)] std_objid = std_brightest['idx'].split('-')[0] self.std_idx = np.zeros(len(self.std_specobjs), dtype=bool) for ii in range(len(self.std_specobjs)): if std_objid in self.std_specobjs[ii]['idx']: self.std_idx[ii] = True # Set internal self.std = self.std_specobjs[self.std_idx] # Step self.steps.append(inspect.stack()[0][3]) # Return return self.std else: # Find brightest object in the exposures # Searches over all slits (over all detectors), and all objects self.std_idx = flux.find_standard(self.std_specobjs) # Set internal self.std = self.std_specobjs[self.std_idx] # Step self.steps.append(inspect.stack()[0][3]) # Return return self.std
def instr_setup(sci_ID, det, fitstbl, setup_dict=None, must_exist=False, skip_cset=False, config_name=None, copy=False): """ DEPRECATED Define the instrument configuration. .. todo:: - This needs to be made a class object and pulled out of core. configuration ID: A, B, C detector number: 01, 02, 03, .. calibration ID: aa, ab, ac, .. Args: sci_ID (:obj:`int`): The selected science frame (binary) det (:obj:`int`): The 1-indexed detector fitstbl (:class:`pypeit.metadata.PypeItMetaData`): The fits file metadata used by PypeIt setup_dict (:obj:`dict`, optional): The dictionary with the instrument configurations that have already been parsed for this execution of PypeIt. If None, the dictionary is instantiated from scratch; otherwise, any new setup information is added to the output dictionary. must_exist (:obj:`bool`, optional); The setup must exist in the provided `setup_dict`. If not, the function will raise an error. skip_cset (:obj:`bool`, optional): Skip setting the calibration identifier. This should only be True when first generating instrument .setup file. config_name (:obj:`str`, optional): Can be used to choose a specific configuration ID. copy (:obj:`bool`, optional): Do not perform any in-place additions to the setup dictionary. Instead copy any input dictionary and return the modified dictionary. By default, modifications are made to the input dictionary, which is returned instead of a modified copy. Returns: str, dict: Returns the string identifier for the instrument configuration and a dictionary with the configuration data. The latter is either a new object, a modified copy of the input dictionary, or the input dictionary after it has been modified in place. """ debugger.set_trace() # Labels cfig_str = string.ascii_uppercase cstr = '--' # Find the first arc exposure tied to this science exposure idx = np.where(fitstbl.find_frames('arc', sci_ID=sci_ID))[0][0] # Use this exposure to pull the relevant information for the instrument setup dispname = fitstbl['dispname'][idx] if 'dispname' in fitstbl.keys() else 'none' dispangle = fitstbl['dispangle'][idx] if 'dispangle' in fitstbl.keys() else 'none' dichroic = fitstbl['dichroic'][idx] if 'dichroic' in fitstbl.keys() else 'none' decker = fitstbl['decker'][idx] if 'decker' in fitstbl.keys() else 'none' slitwid = fitstbl['slitwid'][idx] if 'slitwid' in fitstbl.keys() else 'none' slitlen = fitstbl['slitlen'][idx] if 'slitlen' in fitstbl.keys() else 'none' binning = fitstbl['binning'][idx] if 'binning' in fitstbl.keys() else 'none' # Generate the relevant dictionaries cdict = dict(disperser={'name':dispname, 'angle':dispangle}, dichroic=dichroic, slit={'decker':decker, 'slitwid':slitwid, 'slitlen':slitlen}) ddict = {'binning':binning, 'det':det, 'namp':fitstbl.spectrograph.detector[det-1]['numamplifiers']} # Configuration setupID = None if setup_dict is None: # Generate new configuration dictionary setupID = 'A' if config_name is None else config_name _setup_dict = dict() _setup_dict[setupID] = {} _setup_dict[setupID][cstr] = cdict else: # Try to find the setup in the existing configuration dictionary for ckey in setup_dict.keys(): mtch = True for key in setup_dict[ckey][cstr].keys(): # Dict? if isinstance(setup_dict[ckey][cstr][key], dict): for ikey in setup_dict[ckey][cstr][key].keys(): mtch &= is_equal(setup_dict[ckey][cstr][key][ikey],cdict[key][ikey]) else: mtch &= is_equal(setup_dict[ckey][cstr][key], cdict[key]) if mtch: setupID = ckey break # Augment setup_dict? _setup_dict = setup_dict.copy() if copy else setup_dict if setupID is None: if must_exist: msgs.error('This setup ID is not present in the setup_dict.') maxs = max(_setup_dict.keys()) setupID = cfig_str[cfig_str.index(maxs)+1] _setup_dict[setupID] = {} _setup_dict[setupID][cstr] = cdict # Detector dkey = det_setup(_setup_dict[setupID], ddict) # Calib set if not skip_cset: calib_key = calib_set(_setup_dict[setupID], fitstbl, sci_ID) else: calib_key = '--' # Finish and return return '{:s}_{:s}_{:s}'.format(setupID, dkey, calib_key), _setup_dict
def ech_load_specobj(fname,order=None): """ Load a spec1d file into a list of SpecObjExp objects Parameters ---------- fname : str Returns ------- specObjs : list of SpecObjExp head0 """ #if order is None: # msgs.warn('You did not specify an order. Return specObjs with all orders.') # specObjs, head0 = load.load_specobj(fname) # return specObjs, head0 speckeys = ['WAVE', 'SKY', 'MASK', 'FLAM', 'FLAM_IVAR', 'FLAM_SIG', 'COUNTS_IVAR', 'COUNTS'] # specObjs = [] hdulist = fits.open(fname) head0 = hdulist[0].header for hdu in hdulist: if hdu.name == 'PRIMARY': continue #elif hdu.name[8:17] != 'ORDER'+'{0:04}'.format(order): # continue # Parse name idx = hdu.name objp = idx.split('-') if objp[-1][0:3] == 'DET': det = int(objp[-1][3:]) else: det = int(objp[-1][1:]) if objp[-2][:5] == 'ORDER': iord = int(objp[-2][5:]) else: msgs.warn('Loading longslit data ?') iord = int(-1) # if order is not None and iord !=order then do not return this extenction # if order is None return all extensions # if order is not None and iord ==order then only return the specific order you want. if (order is not None) and (iord !=order): continue # Load data spec = Table(hdu.data) shape = (len(spec), 1024) # 2nd number is dummy # New and wrong try: specobj = specobjs.SpecObj(shape, None, None, idx = idx) except: debugger.set_trace() msgs.error("BUG ME") # Add order number specobj.ech_orderindx = iord # ToDo: need to changed to the real order number? specobj.ech_order = iord # Add trace try: specobj.trace_spat = spec['TRACE'] except: # KLUDGE! specobj.trace_spat = np.arange(len(spec['BOX_WAVE'])) # Add spectrum if 'BOX_COUNTS' in spec.keys(): for skey in speckeys: try: specobj.boxcar[skey] = spec['BOX_{:s}'.format(skey)].data except KeyError: pass # Add units on wave specobj.boxcar['WAVE'] = specobj.boxcar['WAVE'] * units.AA if 'OPT_COUNTS' in spec.keys(): for skey in speckeys: try: specobj.optimal[skey] = spec['OPT_{:s}'.format(skey)].data except KeyError: pass # Add units on wave specobj.optimal['WAVE'] = specobj.optimal['WAVE'] * units.AA # Append specObjs.append(specobj) # Return return specObjs, head0