def create_master_bias(qd, dbFile, data_dir, master_bias='MCbias.fits', overwrite=True): cur_dir = os.getcwd() #os.chdir(data_dir) iraf.chdir(data_dir) if os.path.exists(master_bias): if overwrite is True: remove = raw_input('Remove bias file? (y), n ') if remove != 'n': os.remove(master_bias) else: return None else: print('Master bias, {} already exists and overwrite={}'.format( master_bias, overwrite)) return None print(" --Creating Bias MasterCal--") SQL = fileSelect.createQuery('bias', qd) bias_files = fileSelect.fileListQuery(dbFile, SQL, qd) start_date = Time(qd['DateObs'].split(':')[0], out_subfmt='date') end_date = qd['DateObs'].split(':')[1] while len(bias_files) < 7: start_date = start_date - 1.0 * u.day qd['DateObs'] = '{}:{}'.format(start_date, end_date) SQL = fileSelect.createQuery('bias', qd) bias_files = fileSelect.fileListQuery(dbFile, SQL, qd) gmos.gbias.unlearn() bias_flags = { 'logfile': 'biasLog.txt', 'rawpath': '', 'fl_vardq': 'yes', 'verbose': 'no' } print('{} bias frames used in Master Bias over date range {}'.format( len(bias_files), qd['DateObs'])) if len(bias_files) < 10: print('******WARNING less than 10 bias files********') if len(bias_files) > 1: gmos.gbias(','.join(str(x) for x in bias_files), master_bias, **bias_flags) # Clean up if not os.path.exists(master_bias): #Check that IRAF didn't error sys.exit('ERROR creating Master Bias: {}'.format(master_bias)) #Remove intermediate files if qd['Instrument'] == 'GMOS-N': image_str = 'gN' else: image_str = 'gS' image_str = '{}{}*.fits'.format(image_str, qd['DateObs'][0:4]) iraf.imdel(image_str) flist = glob.glob('tmplist*') for ifile in flist: os.remove(ifile) iraf.chdir(cur_dir)
def create_coadd_img(qd, targets, dbFile, data_dir, prefix='mrg', overwrite=True): ''' despite the fact that it looks like this can be run from outside the raw directory, it can't be. Caveats: * It takes a lot of patience and trial-and-error tweaking of parameters to get good results * There is little control over sky background * The output image is no bigger than the first (reference) image, rather than the union of the image footprints ''' ## Co-add the images, per position and filter. print(" -- Begin image co-addition --") cur_dir = os.getcwd() iraf.chdir(data_dir) # Use primarily the default task parameters. gemtools.imcoadd.unlearn() coaddFlags = { 'fwhm': 3, 'datamax': 6.e4, 'geointer': 'nearest', 'logfile': 'imcoaddLog.txt' } filters = ['Ha', 'HaC', 'SII', 'r', 'i'] for f in filters: print " - Co-addding science images in filter: {}".format(f) qd['Filter2'] = f + '_G%' for t in targets: qd['Object'] = t + '%' print " - Co-addding science images for position: {}".format(t) outImage = t + '_' + f + '.fits' coAddFiles = fileSelect.fileListQuery( dbFile, fileSelect.createQuery('sciImg', qd), qd) if len(coAddFiles) > 1: gemtools.imcoadd(','.join(prefix + str(x) for x in coAddFiles), outimage=outImage, **coaddFlags) iraf.delete("*_trn*,*_pos,*_cen") iraf.imdelete("*badpix.pl,*_med.fits,*_mag.fits") #iraf.imdelete ("mrgS*.fits") print("=== Finished Calibration Processing ===") iraf.chdir(cur_dir)
def gmos_ls_proc2( sciTargets, stdTarget, dbFile='./raw/obsLog.sqlite3', qd_full={ 'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 4', 'RoI': 'Full', 'Disperser': 'B600+_%', 'CentWave': 485.0, 'AperMask': '1.0arcsec', 'Object': 'AM2306-72%', 'DateObs': '2007-06-05:2007-07-07' }, qd_censp={ 'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 4', 'RoI': 'CenSp', 'Disperser': 'B600+_%', 'CentWave': 485.0, 'AperMask': '1.0arcsec', 'Object': 'LTT9239', 'DateObs': '2007-06-05:2007-07-07' }, biasFlags={ 'logfile': 'biasLog.txt', 'rawpath': './raw/', 'fl_vardq': 'yes', 'verbose': 'no' }, flatFlags={ 'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias': 'yes', 'fl_dark': 'no', 'fl_fixpix': 'no', 'fl_oversize': 'no', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'rawpath': './raw', 'fl_inter': 'no', 'fl_detec': 'yes', 'function': 'spline3', 'order': '13,11,28', 'logfile': 'gsflatLog.txt', 'verbose': 'no' }, sciFlags={ 'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias': 'yes', 'fl_gscrrej': 'no', 'fl_dark': 'no', 'fl_flat': 'yes', 'fl_gmosaic': 'yes', 'fl_fixpix': 'no', 'fl_gsappwave': 'yes', 'fl_oversize': 'no', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'rawpath': './raw', 'fl_inter': 'no', 'logfile': 'gsreduceLog.txt', 'verbose': 'no' }, waveFlags={ 'coordlist': 'gmos$data/CuAr_GMOS.dat', 'fwidth': 6, 'nsum': 50, 'function': 'chebyshev', 'order': 5, 'fl_inter': 'no', 'logfile': 'gswaveLog.txt', 'verbose': 'no' }, sciCombFlags={ 'combine': 'average', 'reject': 'ccdclip', 'fl_vardq': 'yes', 'fl_dqprop': 'yes', 'logfile': 'gemcombineLog.txt', 'verbose': 'no' }, transFlags={ 'fl_vardq': 'yes', 'interptype': 'linear', 'fl_flux': 'yes', 'logfile': 'gstransLog.txt' }, skyFlags={ 'fl_oversize': 'no', 'fl_vardq': 'yes', 'logfile': 'gsskysubLog.txt' }, extrFlags={ 'apwidth': 3., 'fl_inter': 'yes', 'find': 'yes', 'trace': 'yes', 'tfunction': 'chebyshev', 'torder': '6', 'tnsum': 20, 'background': 'fit', 'bfunction': 'chebyshev', 'border': 2, 'fl_vardq': 'no', 'logfile': 'gsextrLog.txt' }, calibFlags={ 'extinction': 'onedstds$ctioextinct.dat', 'fl_ext': 'yes', 'fl_scale': 'no', 'sfunction': 'sens', 'fl_vardq': 'yes', 'logfile': 'gscalibrateLog.txt' }, skip_wavecal=True, clean_files=False): """ Parameters ---------- dbFile : str Filename containing the SQL sqlite3 database created by obslog.py It must be placed in the ./raw/ directory Default is `./raw/obsLog.sqlite3` sciTargets : dict Dictionary with the associations of science targets and its associated ARC for wavelength calibration as well as the regions defining the sky along the slit. e.g. sciTargetd = {'AM2306-721_a': {'arc': 'gsS20070623S0071', 'sky': '520:720'}, 'AM2306-72_b': {'arc': 'gsS20070623S0081', 'sky': '670:760,920:1020'}} Note that there could be more than one target defined this way. stdTarget : dict Dictionary with the associations of standard star targets and its associated ARC for wavelength calibration as well as the regions defining the sky along the slit. e.g. stdTarget = {'LTT1788': {'arc': 'S20180711S0281', 'sky': '170:380,920:1080'}} qd_full : dictionary Query Dictionary of essential parameter=value pairs for Full RoI. Meant for science object. qd_censp : dictionary Query Dictionary of essential parameter=value pairs for CenSp RoI. Meant for standard star. biasFlags : dict Dictionary for the keyword flags of gmos.gbias() function flatFlags : dict Dictionary for the keyword flags of gmos.gsflat() function sciFlags : dict Dictionary for the keyword flags of gmos.gsreduce() function Based on these flags a set of arcFlags and stdFlags dictionaries will be created for basic processing. waveFlags : dict Dictionary for the keyword flags of gmos.gswavelength() function sciCombFlags : dict Dictionary for the keyword flags of gemtools.gemcombine() function Based on these flags a set of stdCombFlags dictionary will be created for the standard advanced processing. transFlags : dict Dictionary for the keyword flags of gmos.gstransform() function. xxx skyFlags : dict Dictionary for the keyword flags of gmos.gsskysub() function extrFlags : dict Dictionary for the keywords flags of gmos.gsextract() function calibFlags : dict XXX skip_wavecal : bool Whether to skip interactive wavelength calibration. Useful when this is already done. Returns ------- """ print("### Begin Processing GMOS/Longslit Images ###") print("###") print("=== Creating MasterCals ===") # From the work_directory: # Create the query dictionary of essential parameter=value pairs for Full and CenSp RoIs qd = {'Full': qd_full, 'CenSp': qd_censp} print(" --Creating Bias MasterCal--") # Set the task parameters. gemtools.gemextn.unlearn() # Disarm a bug in gbias gmos.gbias.unlearn() regions = ['Full', 'CenSp'] for r in regions: # The following SQL generates the list of full-frame files to process. SQL = fs.createQuery('bias', qd[r]) biasFiles = fs.fileListQuery(dbFile, SQL, qd[r]) # The str.join() funciton is needed to transform a python list into a # comma-separated string of file names that IRAF can understand. if len(biasFiles) > 1: # NT comment: sometimes if there are too many files, gmos.gbias() raises an error. # import pdb; pdb.set_trace() gmos.gbias(','.join(str(x) for x in biasFiles), 'MCbias' + r, **biasFlags) # Clean up year_obs = qd_full['DateObs'].split('-')[0] if clean_files: iraf.imdel("gS{}*.fits".format(year_obs)) ask_user( "MC Bias done. Would you like to continue to proceed with GCAL Spectral Master Flats? (y/n): ", ['y', 'yes']) print(" -- Creating GCAL Spectral Flat-Field MasterCals --") # Set the task parameters. qd['Full'].update({'DateObs': '*'}) qd['CenSp'].update({'DateObs': '*'}) gmos.gireduce.unlearn() gmos.gsflat.unlearn() # Normalize the spectral flats per CCD. # The response fitting should be done interactively. if flatFlags['fl_inter'] != 'yes': print( "The response fitting should be done interactively. Please set flatFlags['fl_inter'] = 'yes'." ) ask_user( "Do you still want to proceed despite this important warning? (y/n): ", ['yes', 'y']) for r in regions: qr = qd[r] flatFiles = fs.fileListQuery(dbFile, fs.createQuery('gcalFlat', qr), qr) if len(flatFiles) > 0: gmos.gsflat(','.join(str(x) for x in flatFiles), 'MCflat' + r, bias='MCbias' + r, **flatFlags) if clean_files: iraf.imdel('gS{}*.fits,gsS{}*.fits'.format(year_obs, year_obs)) ask_user( "GCAL Spectral Flat-Field MasterCals done. Would you like to continue to proceed with Basic Processing? (y/n): ", ['y', 'yes']) print("=== Processing Science Files ===") print(" -- Performing Basic Processing --") # Set task parameters. gmos.gsreduce.unlearn() sciFlags = sciFlags # redundant but put here because NT likes it arcFlags = copy.deepcopy(sciFlags) arcFlags.update({'fl_flat': 'no', 'fl_vardq': 'no', 'fl_fulldq': 'no'}) stdFlags = copy.deepcopy(sciFlags) stdFlags.update({'fl_fixpix': 'yes', 'fl_vardq': 'no', 'fl_fulldq': 'no'}) # Perform basic reductions on all exposures for science targets. print(" - Arc exposures -") for r in regions: qr = qd[r] arcFiles = fs.fileListQuery(dbFile, fs.createQuery('arc', qr), qr) if len(arcFiles) > 0: gmos.gsreduce(','.join(str(x) for x in arcFiles), bias='MCbias' + r, **arcFlags) print(" - Std star exposures -") r = 'CenSp' stdFiles = fs.fileListQuery(dbFile, fs.createQuery('std', qd[r]), qd[r]) if len(stdFiles) > 0: gmos.gsreduce(','.join(str(x) for x in stdFiles), bias='MCbias' + r, flatim='MCflat' + r, **stdFlags) print(" - Science exposures -") r = 'Full' sciFiles = fs.fileListQuery(dbFile, fs.createQuery('sciSpec', qd[r]), qd[r]) if len(sciFiles) > 0: gmos.gsreduce(','.join(str(x) for x in sciFiles), bias='MCbias' + r, flatim='MCflat' + r, **sciFlags) # Clean up if clean_files: iraf.imdel('gS{}*.fits'.format(year_obs)) ask_user( "Basic processing done. Would you like to continue to determine wavelength calibration? (y/n): ", ['y', 'yes']) print(" -- Determine wavelength calibration --") # Set task parameters gmos.gswavelength.unlearn() # The fit to the dispersion relation should be performed interactively. # Here we will use a previously determined result. if waveFlags['fl_inter'] != 'yes': print( "The fit to the dispersion relation should be performed interactively. Please set waveFlags['fl_inter'] = 'yes'." ) ask_user( "Do you still want to proceed despite this important warning? (y/n): ", ['yes', 'y']) # Need to select specific wavecals to match science exposures. # NT: we do this now from the sciTargets + stdTarget input dictionaries # e.g. ''' sciTargets = { 'AM2306-721_a': {'arc': 'gsS20070623S0071', 'sky': '520:720'}, 'AM2306-72_b': {'arc': 'gsS20070623S0081', 'sky': '670:760,920:1020'}, 'AM2306-721_c': {'arc': 'gsS20070623S0091', 'sky': '170:380,920:1080'} } ''' #prefix = 'gsS20070623S0' #for arc in ['071', '081', '091', '109']: # gmos.gswavelength(prefix + arc, **waveFlags) prefix = 'gs' arc_files = [] for key in sciTargets.keys(): arc_files += [sciTargets[key]['arc']] for key in stdTarget.keys(): arc_files += [stdTarget[key]['arc']] # import pdb; pdb.set_trace() if skip_wavecal is not True: for arc in arc_files: gmos.gswavelength(prefix + arc, **waveFlags) ### End of basic processing. Continue with advanced processing. ask_user( "Wavelength solution done. Would you like to continue with advanced processing? (y/n): ", ['y', 'yes']) print(" -- Performing Advanced Processing --") print(" -- Combine exposures, apply dispersion, subtract sky --") # Set task parameters. gemtools.gemcombine.unlearn() sciCombFlags = sciCombFlags stdCombFlags = copy.deepcopy(sciCombFlags) stdCombFlags.update({'fl_vardq': 'no', 'fl_dqprop': 'no'}) gmos.gstransform.unlearn() # apply gtransform to standard # Process the Standard Star prefix = "gs" qs = qd['CenSp'] stdFiles = fs.fileListQuery(dbFile, fs.createQuery('std', qs), qs) std_name = stdTarget.keys()[0] if len(stdFiles) == 0: ValueError( "No standard star associated. Please check parameters of search (e.g. RoI=CentSp)" ) # import pdb; pdb.set_trace() if len(stdFiles) > 1: # import pdb; pdb.set_trace() gemtools.gemcombine(','.join(prefix + str(x) for x in stdFiles), std_name, **stdCombFlags) else: os.system("cp {}.fits {}.fits".format(prefix + stdFiles[0], std_name)) gmos.gstransform(std_name, wavtraname=prefix + stdTarget[std_name]['arc'], **transFlags) # The sky regions should be selected with care, using e.g. prows/pcols: # pcols ("tAM2306b.fits[SCI]", 1100, 2040, wy1=40, wy2=320) print( "The sky regions should be selected with care, using e.g. with prows/pcols (see tutorial)." ) ''' answer = raw_input("Please provide the long_sample string to apply to gmos.gsskysub() for the standard star." "e.g. '20:70,190:230' (say 'no' for using the example as the default values): ") if answer in ['n', 'no']: print("Using default long_sample set by stdTarget values {}.".format(stdTarget[std_name]['sky'])) long_sample_std = stdTarget[std_name]['sky'] else: long_sample_std = answer ''' long_sample_std = stdTarget[std_name]['sky'] ask_user( "Before proceeding it is important that you have set a good sky region for the standard.\n" "Thus far you have selected: {}\n Would you like to proceed with the current one? (y/n): " .format(long_sample_std), ['yes', 'y']) # apply sky substraction skyFlags = skyFlags gmos.gsskysub.unlearn() gmos.gsskysub('t{}'.format(std_name), long_sample=long_sample_std) # NT: make sure the process works ok until here before proceeding further. i.e. setting the sky region manually and correctly. # NT: seems to be working. print(" -- Extract Std spectrum --") # Extract the std spectrum using a large aperture. # It's important to trace the spectra interactively. gmos.gsextract.unlearn() gmos.gsextract("st" + std_name, **extrFlags) print(" -- Derive the Flux calibration --") gmos.gsstandard.unlearn() sensFlags = { 'fl_inter': 'no', 'starname': 'XXX', 'caldir': 'onedstds$ctionewcal/', 'observatory': 'Gemini-South', 'extinction': 'onedstds$ctioextinct.dat', 'function': 'chebyshev', 'order': 9, 'verbose': 'no', 'logfile': 'gsstdLog.txt' } sensFlags['starname'] = stdTarget[std_name][ 'iraf_name'] # replace corresponding starname gmos.gsstandard('est' + std_name, sfile='std.txt', sfunction='sens', **sensFlags) ask_user( "Sensitivity function from standard star done. Would you like to continue with reduction of science" " exposures? (y/n): ", ['yes', 'y']) # Process the science targets. # Use a dictionary to associate science targets with Arcs and sky regions. prefix = 'gs' extract_individuals = True for targ, p in sciTargets.iteritems(): qs = qd['Full'] qs['Object'] = p['name'] # Fix up the target name for the output file sciOut = p['name_out'] sciFiles = fs.fileListQuery(dbFile, fs.createQuery('sciSpec', qs), qs) all_files = ','.join(prefix + str(x) for x in sciFiles) gemtools.gemcombine(all_files, sciOut, **sciCombFlags) gmos.gstransform(sciOut, wavtraname=prefix + p['arc'], **transFlags) ask_user( "It is important to select a good sky region for substraction. Thus far you have selected {}" " based on the sciTargets input dictionary. Would you like to continue? (y/n): " .format(p['sky']), ['y', 'yes']) gmos.gsskysub('t' + sciOut, long_sample=p['sky'], **skyFlags) if extract_individuals: import pdb pdb.set_trace() for fname in sciFiles: gmos.gstransform(prefix + fname, wavtraname=prefix + p['arc'], **transFlags) gmos.gsskysub('t' + prefix + fname, long_sample=p['sky'], **skyFlags) gmos.gscalibrate.unlearn() gmos.gscalibrate('st' + prefix + fname, **calibFlags) # Clean up if clean_files: iraf.imdel("gsS{}*.fits".format(year_obs)) ask_user( "Sky substraction done. Would you like to continue to apply sensitivity function? (y/n): ", ['y']) ## Apply the sensitivity function. gmos.gscalibrate.unlearn() gmos.gscalibrate('st' + sciOut + '*', **calibFlags) calibFlags.update({'fl_vardq': 'no'}) gmos.gscalibrate('est' + std_name, **calibFlags) print(" -- Extract Target Spectra --") method = 'gsextract' if method == 'gsextract': gmos.gsextract.unlearn() # import pdb;pdb.set_trace() gmos.gsextract("cst" + sciOut, **extrFlags) elif method == 'sarith': # not implemented yet onedspec.nsum = 4 onedspec.sarith('cst{}.fits[SCI]'.format(sciOut), 'copy', '', 'ecst{}.ms'.format(sciOut), apertures='222-346x4') print("=== Finished Calibration Processing ===")
print(" --Creating Bias MasterCal--") # Set the task parameters. gemtools.gemextn.unlearn() # Disarm a bug in gbias gmos.gbias.unlearn() biasFlags = { 'logfile': 'biasLog.txt', 'rawpath': './raw/', 'fl_vardq': 'yes', 'verbose': 'yes' } regions = ['Full', 'CenSp'] for r in regions: # The following SQL generates the list of full-frame files to process. SQL = fs.createQuery('bias', qd[r]) biasFiles = fs.fileListQuery(dbFile, SQL, qd[r]) # The str.join() funciton is needed to transform a python list into a # comma-separated string of file names that IRAF can understand. if len(biasFiles) > 1: gmos.gbias(','.join(str(x) for x in biasFiles), 'MCbias' + r, **biasFlags) # Clean up iraf.imdel("gS2007*.fits") print(" -- Creating GCAL Spectral Flat-Field MasterCals --") # Set the task parameters. qd['Full'].update({'DateObs': '*'}) qd['CenSp'].update({'DateObs': '*'}) gmos.gireduce.unlearn()
def gmos_img_proc2( dbFile="./raw/obsLog.sqlite3", qd={ 'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 2', 'RoI': 'Full', 'Object': 'M8-%', 'DateObs': '2006-09-01:2006-10-30' }, bias_dateobs="2006-09-01:2006-10-30", biasFlags={ 'logfile': 'biasLog.txt', 'rawpath': './raw/', 'fl_vardq': 'yes', 'verbose': 'yes' }, flat_dateobs='2006-09-10:2006-10-10', flatFlags={ 'fl_scale': 'yes', 'sctype': 'mean', 'fl_vardq': 'yes', 'rawpath': './raw/', 'logfile': 'giflatLog.txt', 'verbose': 'yes' }, filters=['Ha', 'HaC', 'SII', 'r', 'i'], sciFlags={ 'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias': 'yes', 'fl_dark': 'no', 'fl_flat': 'yes', 'logfile': 'gireduceLog.txt', 'rawpath': './raw/', 'fl_vardq': 'yes', 'bpm': bpm_gmos, 'verbose': 'yes' }, mosaicFlags={ 'fl_paste': 'no', 'fl_fixpix': 'no', 'fl_clean': 'yes', 'geointer': 'nearest', 'logfile': 'gmosaicLog.txt', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'verbose': 'yes' }, coaddFlags={ 'fwhm': 3, 'datamax': 6.e4, 'geointer': 'nearest', 'logfile': 'imcoaddLog.txt' }, targets=['M8-1', 'M8-2', 'M8-3'], clean_files=False): """ Parameters ---------- dbFile : str Filename containing the SQL sqlite3 database created by obslog.py It must be placed in the ./raw/ directory Default is `./raw/obsLog.sqlite3` qd : dictionary Query Dictionary of essential parameter=value pairs. Select bias exposures within ~2 months of the target observations e.g. qd= {'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 2', 'RoI': 'Full', 'Object': 'M8-%', 'DateObs': '2006-09-01:2006-10-30' } bias_dateobs : str String representing the bias search Obsdate e.g. bias_dateobs = `2006-09-01:2006-10-30` biasFlags : dict Dictionary for the keyword flags of gmos.gbias() function flat_dateobs : str String representing the flat search Obsdate e.g. flat_dateobs = `2006-09-10:2006-10-10` flatFlags : dict Dictionary for the keyword flags of gmos.giflat() function e.g. flatFlags = {'fl_scale': 'yes', 'sctype': 'mean', 'fl_vardq': 'yes','rawpath': './raw/', 'logfile': 'giflatLog.txt', 'verbose': 'yes'} filters : list List of filter names to perform reduction e.g. filters=['Ha', 'HaC', 'SII', 'r', 'i'] sciFlags : dict Dictionary for the keyword flags of gmos.gireduce() function mosaicFlags : dict Dictionary for the keyword flags of gmos.gimosaic() function coaddFlags : dict Dictionary for the keyword flags of gemtools.imcoadd() function targets : list List of names of target observations for the co-addition e.g. targets = ['M8-1', 'M8-2', 'M8-3'] clean_files : bool Whether to clean intermediate files from reduction process Returns ------- Reduce GMOS imaging based on tutorial example. """ print("### Begin Processing GMOS/MOS Images ###") print("###") print("=== Creating MasterCals ===") # From the work_directory: # Create the query dictionary of essential parameter=value pairs. # Select bias exposures within ~2 months of the target observations: print(" --Creating Bias MasterCal--") qd.update({'DateObs': bias_dateobs}) # Set the task parameters. gmos.gbias.unlearn() # The following SQL generates the list of files to process. SQL = fs.createQuery('bias', qd) biasFiles = fs.fileListQuery(dbFile, SQL, qd) # The str.join() function is needed to transform a python list into a string # filelist that IRAF can understand. if len(biasFiles) > 1: files_all = ','.join(str(x) for x in biasFiles) # import pdb; pdb.set_trace() gmos.gbias(files_all, 'MCbias.fits', **biasFlags) # Clean up year_obs = qd['DateObs'].split('-')[0] if clean_files: iraf.imdel('gS{}*.fits'.format(year_obs)) ask_user( "MC Bias done. Would you like to continue to proceed with Master Flats? (y/n): ", ['y', 'yes']) print(" --Creating Twilight Imaging Flat-Field MasterCal--") # Select flats obtained contemporaneously with the observations. qd.update({'DateObs': flat_dateobs}) # Set the task parameters. gmos.giflat.unlearn() #filters = ['Ha', 'HaC', 'SII', 'r', 'i'] for f in filters: print " Building twilight flat MasterCal for filter: %s" % (f) # Select filter name using a substring of the official designation. qd['Filter2'] = f + '_G%' mcName = 'MCflat_%s.fits' % (f) flatFiles = fs.fileListQuery(dbFile, fs.createQuery('twiFlat', qd), qd) if len(flatFiles) > 0: files_all = ','.join(str(x) for x in flatFiles) # import pdb; pdb.set_trace() gmos.giflat(files_all, mcName, bias='MCbias', **flatFlags) if clean_files: iraf.imdel('gS{}*.fits,rgS{}*.fits'.format(year_obs, year_obs)) ask_user( "MC Flats done. Would you like to continue to proceed with processing Science Images? (y/n): ", ['yes', 'y']) print("=== Processing Science Images ===") # Remove restriction on date range qd['DateObs'] = '*' prefix = 'rg' gmos.gireduce.unlearn() gemtools.gemextn.unlearn() # disarms a bug in gmosaic gmos.gmosaic.unlearn() # Reduce the science images, then mosaic the extensions in a loop for f in filters: print " Processing science images for filter: %s" % (f) qd['Filter2'] = f + '_G%' flatFile = 'MCflat_' + f + '.fits' SQL = fs.createQuery('sciImg', qd) sciFiles = fs.fileListQuery(dbFile, SQL, qd) if len(sciFiles) > 0: # Make sure BPM table is in sciFlags for employing the imaging Static BPM for this set of detectors. # import pdb; pdb.set_trace() all_files = ','.join(str(x) for x in sciFiles) gmos.gireduce(all_files, bias='MCbias', flat1=flatFile, **sciFlags) for file in sciFiles: gmos.gmosaic(prefix + file, **mosaicFlags) else: print("No Science images found for filter {}. Check database.". format(f)) import pdb pdb.set_trace() if clean_files: iraf.imdelete('gS{}*.fits,rgS{}*.fits'.format(year_obs, year_obs)) ask_user( "Science Images done. Would you like to continue to proceed with image co-addition? (y/n): ", ['y', 'yes']) ## Co-add the images, per position and filter. print(" -- Begin image co-addition --") # Use primarily the default task parameters. gemtools.imcoadd.unlearn() prefix = 'mrg' for f in filters: print " - Co-addding science images in filter: %s" % (f) qd['Filter2'] = f + '_G%' for t in targets: qd['Object'] = t + '%' print " - Co-addding science images for position: %s" % (t) outImage = t + '_' + f + '.fits' coAddFiles = fs.fileListQuery(dbFile, fs.createQuery('sciImg', qd), qd) all_files = ','.join(prefix + str(x) for x in coAddFiles) if all_files == '': print( 'No files available for co-addition. Check that the target names are written correctly.' ) import pdb pdb.set_trace() gemtools.imcoadd(all_files, outimage=outImage, **coaddFlags) ask_user( "Co-addition done. Would you like to clean the latest intermediate reduction files? (y/n): ", ['y', 'yes']) if clean_files: iraf.delete("*_trn*,*_pos,*_cen") iraf.imdelete("*badpix.pl,*_med.fits,*_mag.fits") # iraf.imdelete ("mrgS*.fits") print("=== Finished Calibration Processing ===")
'Object': 'M8-%', 'DateObs': '2006-09-01:2006-10-30' } print(" --Creating Bias MasterCal--") # Set the task parameters. gmos.gbias.unlearn() biasFlags = { 'logfile': 'biasLog.txt', 'rawpath': './raw/', 'fl_vardq': 'yes', 'verbose': 'no' } # The following SQL generates the list of files to process. SQL = fs.createQuery('bias', qd) biasFiles = fs.fileListQuery(dbFile, SQL, qd) # The str.join() function is needed to transform a python list into a string # filelist that IRAF can understand. if len(biasFiles) > 1: gmos.gbias(','.join(str(x) for x in biasFiles), 'MCbias.fits', **biasFlags) # Clean up iraf.imdel('gS2006*.fits') print(" --Creating Twilight Imaging Flat-Field MasterCal--") # Select flats obtained contemporaneously with the observations. qd.update({'DateObs': '2006-09-10:2006-10-10'}) # Set the task parameters.
def gmos_mos_proc(): ''' Modified version of the GMOS Data Reduction Cookbook companion script to the chapter: "Reduction of Multi-Object Spectra with IRAF" PyRAF script to: Process MOS exposures for Sculptor Dwarf field 1, in program GN-2008-B-Q-025 The names for the relevant header keywords and their expected values are described in the DRC chapter entitled "Supplementary Material" Perform the following starting in the parent work directory: cd /path/to/work_directory Place the fileSelect.py module in your work directory. Now execute this script from the unix prompt: python gmos_img_proc.py ''' print ("### Begin Processing GMOS/MOS Spectra ###") print (' ') print ("---> You must have the MDF files:") print ("---> GS2008BQ025-01.fits and GS2008BQ025-02.fits") print ("---> in your work directory. ") print (' ') print ("=== Creating MasterCals ===") dbFile='raw/obsLog.sqlite3' #Create query dictionaries for the science observations at each CentWave # Select bias exposures within ~2 months of the target observations: qdf = {'use_me':1, 'CcdBin':'4 2', 'DateObs':'2008-09-10:2008-12-12', #'DateObs':'2008-10-20:2008-11-21', 'Instrument':'GMOS-S', 'Disperser':'B600+_%', 'AperMask':'GS2008BQ025-01', 'CentWave':520.0, 'Object':'Sculptor-field1', 'RoI':'Full' } #Create query dictionaries for the standard star observation qd_std = copy.deepcopy(qdf) qd_std['AperMask'] = '1.0arcsec' qd_std['Object'] = 'LTT1020' print (" --Creating Bias MasterCal-- ") #Use primarily the default task parameters gemtools.gemextn.unlearn() # Disarm a bug in gbias gmos.gbias.unlearn() #gmos.gbias.logfile = 'biasLog.txt' #gmos.gbias.rawpath = './raw/' #gmos.gbias.fl_vardq = 'yes' #gmos.gbias.verbose = 'no' biasFlags = { 'logfile':'biasLog.txt','rawpath':'./raw/','fl_vardq':'yes', 'verbose':'no' } #This SQL query generates the list of full-frame files to process. Note that since the std star has the # same RoI, CCD binning, and CCD gain/read-out speed, we only need to make one bias file. SQL = fs.createQuery('bias', qdf) biasFull = fs.fileListQuery(dbFile, SQL, qdf) # The join function originally used runs into problems - use this f.write # to make a string of comma-separated files that IRAF can understand. print (" --Generating MasterCal for Full-- ") with open('biases.lis', 'w') as f: [f.write(x+'\n') for x in biasFull] #Create the bias MasterCal gmos.gbias('@biases.lis', 'MCbiasFull.fits', **biasFlags) # Clean up #iraf.imdel('gS2008*.fits') print (" --Creating GCAL Spectral Flat-Field MasterCals--") # Set the task parameters. gmos.gireduce.unlearn() gmos.gsflat.unlearn() gmos.gsflat.fl_vardq = 'yes' gmos.gsflat.fl_fulldq = 'yes' gmos.gsflat.fl_oversize = 'no' gmos.gsflat.fl_inter = 'no' gmos.gsflat.logfile = 'gsflatLog.txt' gmos.gsflat.rawpath = './raw' gmos.gsflat.verbose = 'no' call("ls Sculptor*.fits",shell=True) #Perform flat-field normalization for the science images print (" -Full Flat (GCAL & Twi) normalization for science images, non-interactive-") qdf['DateObs'] = '*' qdf['Filter2'] = 'open2-8' cwf = {'B6-520':520.0, 'B6-525':525.0, 'B6-522':522.5} #flatType = ['gcalFlat', 'twiFlat'] #No twilight flats were available for this observation - calibrating using only the gcal flats flatType = ['gcalFlat'] for ft in flatType: for tag,w in cwf.iteritems(): qdf['Disperser'] = tag[0:2] + '00+_%' qdf['CentWave'] = w flatName = 'MC' + ft + '-M01_' + tag combName = 'MC' + ft + 'Comb-M01_' + tag flatFull = fs.fileListQuery(dbFile, fs.createQuery(ft, qdf), qdf) with open('flats_sci.lis', 'w') as f: [f.write(x+'\n') for x in flatFull] print "Flatfielding for " + str(ft) + " and " + str(w) gmos.gsflat ('@flats_sci.lis', flatName, bias='MCbiasFull', fl_keep='yes', combflat=combName, fl_usegrad='yes', fl_seprows='no', order='53') os.remove('flats_sci.lis') call("ls Sculptor*.fits",shell=True) #Perform flat-field normalization for the standard star. Standard star was taken at centw 415,520,625 print (" -Full Flat (GCAL & Twi) normalization for the standard star, non-interactive-") qd_std['DateObs'] = '*' qd_std['Filter2'] = 'open2-8' cws = {'B6-415':415.0, 'B6-520':520.0, 'B6-625':625.0} #flatType = ['gcalFlat', 'twiFlat'] #No twilight flats were available for this observation - calibrating using only the gcal flats flatType = ['gcalFlat'] for ft in flatType: for tag,w in cws.iteritems(): qd_std['Disperser'] = tag[0:2] + '00+_%' qd_std['CentWave'] = w flatName = 'MC' + ft + '-M01_' + tag combName = 'MC' + ft + 'Comb-M01_' + tag flatFull = fs.fileListQuery(dbFile, fs.createQuery(ft, qd_std), qd_std) with open('flats_std.lis', 'w') as f: [f.write(x+'\n') for x in flatFull] gmos.gsflat ('@flats_std.lis', flatName, bias='MCbiasFull', fl_keep='yes', combflat=combName, fl_usegrad='yes', fl_seprows='no', order='53') os.remove('flats_std.lis') call("ls Sculptor*.fits",shell=True) print ("=== Processing Science Files ===") print (" -- Performing Basic Processing --") # Use primarily the default task parameters. gmos.gsreduce.unlearn() gmos.gsreduce.logfile = 'gsreduceLog.txt' gmos.gsreduce.rawpath = './raw' gmos.gsreduce.verbose = 'no' gmos.gsreduce.fl_fixpix = 'no' gmos.gsreduce.fl_oversize = 'no' #Perform single-frame CR rejection #gmos.gsreduce.fl_gscr = 'yes' print (" - GSReducing MOS Science and Arc exposures -") for tag,w in cwf.iteritems(): qdf['Disperser'] = tag[0:2] + '00+_%' qdf['CentWave'] = w flatName = 'MCgcalFlat-M01_' + tag gradName = 'MCgcalFlatComb-M01_' + tag arcFull = fs.fileListQuery(dbFile, fs.createQuery('arc', qdf), qdf) gmos.gsreduce (','.join(str(x) for x in arcFull), bias='MCbiasFull', gradimage=gradName, fl_flat='no') sciFull = fs.fileListQuery(dbFile, fs.createQuery('sciSpec', qdf), qdf) gmos.gsreduce (','.join(str(x) for x in sciFull), bias='MCbiasFull', flatim=flatName, gradimage=gradName, fl_vardq='yes', fl_fulldq='yes') call("ls Sculptor*.fits",shell=True) print (" - GSReducing Longslit Std-star and Arc exposures -") for tag,w in cws.iteritems(): qd_std['Disperser'] = tag[0:2] + '00+_%' qd_std['CentWave'] = w flatName = 'MCgcalFlat-M01_' + tag arc_std = fs.fileListQuery(dbFile, fs.createQuery('arc', qd_std), qd_std) gmos.gsreduce (','.join(str(x) for x in arc_std), bias='MCbiasFull', fl_flat='no') std_files = fs.fileListQuery(dbFile, fs.createQuery('std', qd_std), qd_std) gmos.gsreduce (','.join(str(x) for x in std_files), bias='MCbiasFull', flatim=flatName, fl_fixpix='yes') call("ls Sculptor*.fits",shell=True) # Clean up - uncomment this eventually #iraf.imdel('gS2008*.fits') print ("=== Finished Basic Calibration Processing ===") print ("\n") print ("=== Performing cosmic-ray rejection using gemcrspec ===") #note that this construction works because there's only one exposure per position/grating/cenwave combo #if you have multiple exposures, comment this block out and use gemcombine to do outlier rejection when combining images instead gemtools.gemcrspec.unlearn() gemtools.gemcrspec.xorder = '9' gemtools.gemcrspec.yorder = '-1' gemtools.gemcrspec.sigclip = '4.5' gemtools.gemcrspec.sigfrac= '0.5' gemtools.gemcrspec.objlim = '1.0' gemtools.gemcrspec.verbose = 'no' prefix = 'gs' for tag,w in cwf.iteritems(): qdf['Disperser'] = tag[0:2] + '00+_%' qdf['CentWave'] = w outFile = qdf['Object'] + '-M01_' + tag sciFull = fs.fileListQuery(dbFile, fs.createQuery('sciSpec', qdf), qdf) print sciFull gemtools.gemcrspec(','.join(prefix+str(x) for x in sciFull), outFile) # Do the same for the standard star for tag,w in cws.iteritems(): qd_std['Disperser'] = tag[0:2] + '00+_%' qd_std['CentWave'] = w outFile = qd_std['Object'] + '-M01_' + tag stdFull = fs.fileListQuery(dbFile, fs.createQuery('std', qd_std), qd_std) print stdFull gemtools.gemcrspec(','.join(prefix+str(x) for x in stdFull), outFile) call("ls Sculptor*.fits",shell=True) #Unused block for doing outlier rejection with multiple exposures per position/grating/cenwave combo ''' # Use primarily the default task parameters. gemtools.gemcombine.unlearn() gemtools.gemcombine.logfile = 'gemcombineLog.txt' gemtools.gemcombine.reject = 'ccdclip' gemtools.gemcombine.fl_vardq = 'yes' gemtools.gemcombine.fl_dqprop = 'yes' gemtools.gemcombine.verbose = 'no' prefix = 'gs' for tag,w in cwf.iteritems(): qdf['Disperser'] = tag[0:2] + '00+_%' qdf['CentWave'] = w outFile = qdf['Object'] + tag sciFull = fs.fileListQuery(dbFile, fs.createQuery('sciSpec', qdf), qdf) gemtools.gemcombine (','.join(prefix+str(x) for x in sciFull), outFile) # Do the same for the standard star for tag,w in cws.iteritems(): qdf['Disperser'] = tag[0:2] + '00+_%' qdf['CentWave'] = w outFile = qd_std['Object'] + tag stdFull = fs.fileListQuery(dbFile, fs.createQuery('std', qd_std), qd_std) gemtools.gemcombine (','.join(prefix+str(x) for x in stdFull), outFile) ''' print ("=== Beginning wavelength calibration ===") print (" -- Deriving wavelength calibration --") # Begin with longslit Arcs. # The fit to the dispersion relation should be performed interactively; # here we will us a previously determined result. # There are many arcs to choose from: we only need one for each setting. gmos.gswavelength.unlearn() waveFlags = { 'coordlist':'gmos$data/CuAr_GMOS.dat','fwidth':6,'nsum':50, 'function':'chebyshev','order':5, 'fl_inter':'no','logfile':'gswaveLog.txt','verbose':'no' } for seq in ['091','092','093']: inFile = prefix + 'S20081129S0' + seq gmos.gswavelength(inFile,**waveFlags) # Now for the MOS arcs waveFlags.update({'order':7,'nsum':20,'step':2}) for seq in ['249','250','251']: inFile = prefix + 'S20081120S0' + seq gmos.gswavelength(inFile,**waveFlags) call("ls Sculptor*.fits",shell=True) #This block is in the tutorial but it seems incorrect - it applies the calibration to non gsreduced arcs!! ''' for tag,w in cwf.iteritems(): qdf['Disperser'] = tag[0:2] + '00+_%' qdf['CentWave'] = w outFile = qdf['Object'] + tag arcFull = fs.fileListQuery(dbFile, fs.createQuery('arcP', qdf), qdf) gmos.gswavelength (','.join(prefix+str(x) for x in arcFull),**waveFlags) ''' print (" -- Applying wavelength calibration -- ") gmos.gstransform.unlearn() transFlags = { 'fl_vardq':'no','interptype':'linear','fl_flux':'yes', 'logfile':'gstransformLog.txt','verbose':'no' } #Construct a mapping for the wavelength calibration. Format (arc id,sci/std id,target):'filter/disperser' print (" -- Calibrating standard star exposures -- ") gmos.gstransform ('LTT1020-M01_B6-415', wavtraname='gsS20081129S0091', **transFlags) gmos.gstransform ('LTT1020-M01_B6-520', wavtraname='gsS20081129S0092', **transFlags) gmos.gstransform ('LTT1020-M01_B6-625', wavtraname='gsS20081129S0093', **transFlags) call("ls Sculptor*.fits",shell=True) #This block seems to operate on the non CR-cleaned images! ''' transMap = { ('091','036','LTT1020'):'B6-415', ('092','039','LTT1020'):'B6-520', ('093','040','LTT1020'):'B6-625' } print (" -- Calibrating standard star exposures -- ") for id,tag in transMap.iteritems(): inFile = 'gsS20081129S0' + id[1] wavFile = 'gsS20081129S0' + id[0] outFile = 't' + id[2] + '_' + tag gmos.gstransform(inFile,outimages=outFile,wavtraname=wavFile) ''' print (" -- Calibrating MOS science exposures -- ") transFlags.update({'fl_vardq':'yes'}) gmos.gstransform ('Sculptor-field1-M01_B6-520', wavtraname='gsS20081120S0249', **transFlags) gmos.gstransform ('Sculptor-field1-M01_B6-522', wavtraname='gsS20081120S0250', **transFlags) gmos.gstransform ('Sculptor-field1-M01_B6-525', wavtraname='gsS20081120S0251', **transFlags) call("ls Sculptor*.fits",shell=True) #Note that our standard star is extremely bright so the signal is swamped by light from the star - this step is probably optional print (" == Beginning flux calibration == " ) print (" -- Performing sky subtraction on standard star -- " ) # This will require summing the spectra along columns, e.g.: # iraf.pcols("tLTT1020-M01_B6-415.fits[SCI]",400,1400,wy1=0,wy2=1000) # The sky regions should be selected with care, using e.g. prows/pcols. gmos.gsskysub.unlearn() gmos.gsskysub.logfile='gsskysubLog.txt' gmos.gsskysub.fl_oversize = 'no' gmos.gsskysub.verbose = 'no' gmos.gsskysub ('tLTT1020-M01_*', long_sample='850:1000,1350:1500') call("ls Sculptor*.fits",shell=True) print(" -- Extracting longslit 1-D spectra of standard stars -- ") gmos.gsextract.unlearn() extrFlags = { 'apwidth':3.,'fl_inter':'no','find':'yes', 'trace':'yes','tfunction':'spline3','tnsum':20,'tstep':50, 'weights':'none','background':'none', 'fl_vardq':'no','verbose':'no','logfile':'gsextractLog.txt' } ordc = {'B6-415':7,'B6-520':8,'B6-625':8} #Perform extraction on the standard star: for tag,o in ordc.iteritems(): infile = 'stLTT1020-M01_' + tag gmos.gsextract(infile, torder=o,**extrFlags) print(" -- Flux calibrating the standard star -- ") gmos.gsstandard.unlearn() sensFlags = { 'fl_inter':'yes','starname':'l1020','caldir':'onedstds$ctionewcal/', 'observatory':'Gemini-South','extinction':'onedstds$ctioextinct.dat', 'function':'spline3','order':7,'verbose':'no','logfile':'gsstdLog.txt' } gmos.gsstandard ('estLTT1020-M01_B6*', sfile='std_B6', sfunction='sens_B6', **sensFlags) call("ls Sculptor*.fits",shell=True) print (" --Processing done-- ")
def gmos_img_proc2(dbFile="./raw/obsLog.sqlite3", qd={'use_me': 1,'Instrument': 'GMOS-S', 'CcdBin': '2 2', 'RoI': 'Full', 'Object': 'M8-%', 'DateObs': '2006-09-01:2006-10-30'}, bias_dateobs="2006-09-01:2006-10-30", biasFlags={'logfile': 'biasLog.txt', 'rawpath': './raw/', 'fl_vardq': 'yes', 'verbose': 'yes'}, flat_dateobs='2006-09-10:2006-10-10', flatFlags = {'fl_scale': 'yes', 'sctype': 'mean', 'fl_vardq': 'yes','rawpath': './raw/', 'logfile': 'giflatLog.txt', 'verbose': 'yes'}, filters = ['Ha', 'HaC', 'SII', 'r', 'i'], sciFlags={'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias':'yes', 'fl_dark': 'no','fl_flat': 'yes', 'logfile':'gireduceLog.txt', 'rawpath': './raw/','fl_vardq': 'yes','bpm':bpm_gmos, 'verbose': 'yes'}, mosaicFlags = {'fl_paste': 'no', 'fl_fixpix': 'no', 'fl_clean': 'yes', 'geointer': 'nearest', 'logfile': 'gmosaicLog.txt', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'verbose': 'yes'}, coaddFlags = {'fwhm': 3, 'datamax': 6.e4, 'geointer': 'nearest', 'logfile': 'imcoaddLog.txt'}, targets = ['M8-1', 'M8-2', 'M8-3'], clean_files = False ): """ Parameters ---------- dbFile : str Filename containing the SQL sqlite3 database created by obslog.py It must be placed in the ./raw/ directory Default is `./raw/obsLog.sqlite3` qd : dictionary Query Dictionary of essential parameter=value pairs. Select bias exposures within ~2 months of the target observations e.g. qd= {'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 2', 'RoI': 'Full', 'Object': 'M8-%', 'DateObs': '2006-09-01:2006-10-30' } bias_dateobs : str String representing the bias search Obsdate e.g. bias_dateobs = `2006-09-01:2006-10-30` biasFlags : dict Dictionary for the keyword flags of gmos.gbias() function flat_dateobs : str String representing the flat search Obsdate e.g. flat_dateobs = `2006-09-10:2006-10-10` flatFlags : dict Dictionary for the keyword flags of gmos.giflat() function e.g. flatFlags = {'fl_scale': 'yes', 'sctype': 'mean', 'fl_vardq': 'yes','rawpath': './raw/', 'logfile': 'giflatLog.txt', 'verbose': 'yes'} filters : list List of filter names to perform reduction e.g. filters=['Ha', 'HaC', 'SII', 'r', 'i'] sciFlags : dict Dictionary for the keyword flags of gmos.gireduce() function mosaicFlags : dict Dictionary for the keyword flags of gmos.gimosaic() function coaddFlags : dict Dictionary for the keyword flags of gemtools.imcoadd() function targets : list List of names of target observations for the co-addition e.g. targets = ['M8-1', 'M8-2', 'M8-3'] clean_files : bool Whether to clean intermediate files from reduction process Returns ------- Reduce GMOS imaging based on tutorial example. """ print ("### Begin Processing GMOS/MOS Images ###") print ("###") print ("=== Creating MasterCals ===") # From the work_directory: # Create the query dictionary of essential parameter=value pairs. # Select bias exposures within ~2 months of the target observations: print (" --Creating Bias MasterCal--") qd.update({'DateObs': bias_dateobs}) # Set the task parameters. gmos.gbias.unlearn() # The following SQL generates the list of files to process. SQL = fs.createQuery('bias', qd) biasFiles = fs.fileListQuery(dbFile, SQL, qd) # The str.join() function is needed to transform a python list into a string # filelist that IRAF can understand. if len(biasFiles) > 1: files_all = ','.join(str(x) for x in biasFiles) # import pdb; pdb.set_trace() gmos.gbias(files_all, 'MCbias.fits', **biasFlags) # Clean up year_obs = qd['DateObs'].split('-')[0] if clean_files: iraf.imdel('gS{}*.fits'.format(year_obs)) ask_user("MC Bias done. Would you like to continue to proceed with Master Flats? (y/n): ",['y','yes']) print (" --Creating Twilight Imaging Flat-Field MasterCal--") # Select flats obtained contemporaneously with the observations. qd.update({'DateObs': flat_dateobs}) # Set the task parameters. gmos.giflat.unlearn() #filters = ['Ha', 'HaC', 'SII', 'r', 'i'] for f in filters: print " Building twilight flat MasterCal for filter: %s" % (f) # Select filter name using a substring of the official designation. qd['Filter2'] = f + '_G%' mcName = 'MCflat_%s.fits' % (f) flatFiles = fs.fileListQuery(dbFile, fs.createQuery('twiFlat', qd), qd) if len(flatFiles) > 0: files_all = ','.join(str(x) for x in flatFiles) # import pdb; pdb.set_trace() gmos.giflat(files_all, mcName, bias='MCbias', **flatFlags) if clean_files: iraf.imdel('gS{}*.fits,rgS{}*.fits'.format(year_obs, year_obs)) ask_user("MC Flats done. Would you like to continue to proceed with processing Science Images? (y/n): ", ['yes','y']) print ("=== Processing Science Images ===") # Remove restriction on date range qd['DateObs'] = '*' prefix = 'rg' gmos.gireduce.unlearn() gemtools.gemextn.unlearn() # disarms a bug in gmosaic gmos.gmosaic.unlearn() # Reduce the science images, then mosaic the extensions in a loop for f in filters: print " Processing science images for filter: %s" % (f) qd['Filter2'] = f + '_G%' flatFile = 'MCflat_' + f + '.fits' SQL = fs.createQuery('sciImg', qd) sciFiles = fs.fileListQuery(dbFile, SQL, qd) if len(sciFiles) > 0: # Make sure BPM table is in sciFlags for employing the imaging Static BPM for this set of detectors. # import pdb; pdb.set_trace() all_files = ','.join(str(x) for x in sciFiles) gmos.gireduce(all_files, bias='MCbias', flat1=flatFile, **sciFlags) for file in sciFiles: gmos.gmosaic(prefix + file, **mosaicFlags) else: print("No Science images found for filter {}. Check database.".format(f)) import pdb; pdb.set_trace() if clean_files: iraf.imdelete('gS{}*.fits,rgS{}*.fits'.format(year_obs,year_obs)) ask_user("Science Images done. Would you like to continue to proceed with image co-addition? (y/n): ", ['y','yes']) ## Co-add the images, per position and filter. print (" -- Begin image co-addition --") # Use primarily the default task parameters. gemtools.imcoadd.unlearn() prefix = 'mrg' for f in filters: print " - Co-addding science images in filter: %s" % (f) qd['Filter2'] = f + '_G%' for t in targets: qd['Object'] = t + '%' print " - Co-addding science images for position: %s" % (t) outImage = t + '_' + f + '.fits' coAddFiles = fs.fileListQuery(dbFile, fs.createQuery('sciImg', qd), qd) all_files = ','.join(prefix + str(x) for x in coAddFiles) if all_files == '': print('No files available for co-addition. Check that the target names are written correctly.') import pdb; pdb.set_trace() gemtools.imcoadd(all_files, outimage=outImage, **coaddFlags) ask_user("Co-addition done. Would you like to clean the latest intermediate reduction files? (y/n): ", ['y','yes']) if clean_files: iraf.delete("*_trn*,*_pos,*_cen") iraf.imdelete("*badpix.pl,*_med.fits,*_mag.fits") # iraf.imdelete ("mrgS*.fits") print ("=== Finished Calibration Processing ===")
# Select bias exposures within ~2 months of the target observations: qd = {'use_me':1, 'Instrument':'GMOS-S','CcdBin':'2 2','RoI':'Full','Object':'M8-%', 'DateObs':'2006-09-01:2006-10-30' } print (" --Creating Bias MasterCal--") # Set the task parameters. gmos.gbias.unlearn() biasFlags = { 'logfile':'biasLog.txt','rawpath':'./raw/','fl_vardq':'yes', 'verbose':'no' } # The following SQL generates the list of files to process. SQL = fs.createQuery('bias', qd) biasFiles = fs.fileListQuery(dbFile, SQL, qd) # The str.join() function is needed to transform a python list into a string # filelist that IRAF can understand. if len(biasFiles) > 1: gmos.gbias(','.join(str(x) for x in biasFiles), 'MCbias.fits', **biasFlags) # Clean up iraf.imdel('gS2006*.fits') print (" --Creating Twilight Imaging Flat-Field MasterCal--") # Select flats obtained contemporaneously with the observations. qd.update({'DateObs':'2006-09-10:2006-10-10'}) # Set the task parameters.
def gmos_ls_proc2( sciTargets, stdTarget, dbFile='./raw/obsLog.sqlite3', qd_full={'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 4', 'RoI': 'Full', 'Disperser': 'B600+_%', 'CentWave': 485.0, 'AperMask': '1.0arcsec', 'Object': 'AM2306-72%','DateObs': '2007-06-05:2007-07-07'}, qd_censp={'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 4', 'RoI': 'CenSp', 'Disperser': 'B600+_%', 'CentWave': 485.0, 'AperMask': '1.0arcsec', 'Object': 'LTT9239','DateObs': '2007-06-05:2007-07-07'}, biasFlags={'logfile': 'biasLog.txt', 'rawpath': './raw/', 'fl_vardq': 'yes', 'verbose': 'no'}, flatFlags = {'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias': 'yes', 'fl_dark': 'no', 'fl_fixpix': 'no', 'fl_oversize': 'no', 'fl_vardq': 'yes', 'fl_fulldq': 'yes','rawpath': './raw', 'fl_inter': 'no', 'fl_detec': 'yes', 'function': 'spline3', 'order': '13,11,28', 'logfile': 'gsflatLog.txt', 'verbose': 'no'}, sciFlags = {'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias': 'yes', 'fl_gscrrej': 'no','fl_dark': 'no', 'fl_flat': 'yes', 'fl_gmosaic': 'yes', 'fl_fixpix': 'no', 'fl_gsappwave': 'yes', 'fl_oversize': 'no', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'rawpath': './raw', 'fl_inter': 'no', 'logfile': 'gsreduceLog.txt', 'verbose': 'no'}, waveFlags = {'coordlist': 'gmos$data/CuAr_GMOS.dat', 'fwidth': 6, 'nsum': 50, 'function': 'chebyshev', 'order': 5, 'fl_inter': 'no', 'logfile': 'gswaveLog.txt', 'verbose': 'no'}, sciCombFlags = {'combine': 'average', 'reject': 'ccdclip', 'fl_vardq': 'yes', 'fl_dqprop': 'yes', 'logfile': 'gemcombineLog.txt', 'verbose': 'no'}, transFlags={'fl_vardq': 'yes', 'interptype': 'linear', 'fl_flux': 'yes', 'logfile': 'gstransLog.txt'}, skyFlags={'fl_oversize': 'no', 'fl_vardq': 'yes', 'logfile': 'gsskysubLog.txt'}, extrFlags = {'apwidth': 3., 'fl_inter': 'yes', 'find': 'yes','trace': 'yes', 'tfunction': 'chebyshev', 'torder': '6', 'tnsum': 20, 'background': 'fit', 'bfunction': 'chebyshev', 'border': 2, 'fl_vardq': 'no', 'logfile': 'gsextrLog.txt'}, calibFlags = {'extinction': 'onedstds$ctioextinct.dat', 'fl_ext': 'yes', 'fl_scale': 'no','sfunction': 'sens', 'fl_vardq': 'yes', 'logfile': 'gscalibrateLog.txt'}, skip_wavecal=True, clean_files=False): """ Parameters ---------- dbFile : str Filename containing the SQL sqlite3 database created by obslog.py It must be placed in the ./raw/ directory Default is `./raw/obsLog.sqlite3` sciTargets : dict Dictionary with the associations of science targets and its associated ARC for wavelength calibration as well as the regions defining the sky along the slit. e.g. sciTargetd = {'AM2306-721_a': {'arc': 'gsS20070623S0071', 'sky': '520:720'}, 'AM2306-72_b': {'arc': 'gsS20070623S0081', 'sky': '670:760,920:1020'}} Note that there could be more than one target defined this way. stdTarget : dict Dictionary with the associations of standard star targets and its associated ARC for wavelength calibration as well as the regions defining the sky along the slit. e.g. stdTarget = {'LTT1788': {'arc': 'S20180711S0281', 'sky': '170:380,920:1080'}} qd_full : dictionary Query Dictionary of essential parameter=value pairs for Full RoI. Meant for science object. qd_censp : dictionary Query Dictionary of essential parameter=value pairs for CenSp RoI. Meant for standard star. biasFlags : dict Dictionary for the keyword flags of gmos.gbias() function flatFlags : dict Dictionary for the keyword flags of gmos.gsflat() function sciFlags : dict Dictionary for the keyword flags of gmos.gsreduce() function Based on these flags a set of arcFlags and stdFlags dictionaries will be created for basic processing. waveFlags : dict Dictionary for the keyword flags of gmos.gswavelength() function sciCombFlags : dict Dictionary for the keyword flags of gemtools.gemcombine() function Based on these flags a set of stdCombFlags dictionary will be created for the standard advanced processing. transFlags : dict Dictionary for the keyword flags of gmos.gstransform() function. xxx skyFlags : dict Dictionary for the keyword flags of gmos.gsskysub() function extrFlags : dict Dictionary for the keywords flags of gmos.gsextract() function calibFlags : dict XXX skip_wavecal : bool Whether to skip interactive wavelength calibration. Useful when this is already done. Returns ------- """ print ("### Begin Processing GMOS/Longslit Images ###") print ("###") print ("=== Creating MasterCals ===") # From the work_directory: # Create the query dictionary of essential parameter=value pairs for Full and CenSp RoIs qd = {'Full': qd_full, 'CenSp': qd_censp} print (" --Creating Bias MasterCal--") # Set the task parameters. gemtools.gemextn.unlearn() # Disarm a bug in gbias gmos.gbias.unlearn() regions = ['Full', 'CenSp'] for r in regions: # The following SQL generates the list of full-frame files to process. SQL = fs.createQuery('bias', qd[r]) biasFiles = fs.fileListQuery(dbFile, SQL, qd[r]) # The str.join() funciton is needed to transform a python list into a # comma-separated string of file names that IRAF can understand. if len(biasFiles) > 1: # NT comment: sometimes if there are too many files, gmos.gbias() raises an error. # import pdb; pdb.set_trace() gmos.gbias(','.join(str(x) for x in biasFiles), 'MCbias' + r, **biasFlags) # Clean up year_obs = qd_full['DateObs'].split('-')[0] if clean_files: iraf.imdel("gS{}*.fits".format(year_obs)) ask_user("MC Bias done. Would you like to continue to proceed with GCAL Spectral Master Flats? (y/n): ",['y','yes']) print (" -- Creating GCAL Spectral Flat-Field MasterCals --") # Set the task parameters. qd['Full'].update({'DateObs': '*'}) qd['CenSp'].update({'DateObs': '*'}) gmos.gireduce.unlearn() gmos.gsflat.unlearn() # Normalize the spectral flats per CCD. # The response fitting should be done interactively. if flatFlags['fl_inter'] != 'yes': print("The response fitting should be done interactively. Please set flatFlags['fl_inter'] = 'yes'.") ask_user("Do you still want to proceed despite this important warning? (y/n): ", ['yes','y']) for r in regions: qr = qd[r] flatFiles = fs.fileListQuery(dbFile, fs.createQuery('gcalFlat', qr), qr) if len(flatFiles) > 0: gmos.gsflat(','.join(str(x) for x in flatFiles), 'MCflat' + r, bias='MCbias' + r, **flatFlags) if clean_files: iraf.imdel('gS{}*.fits,gsS{}*.fits'.format(year_obs, year_obs)) ask_user("GCAL Spectral Flat-Field MasterCals done. Would you like to continue to proceed with Basic Processing? (y/n): ",['y','yes']) print ("=== Processing Science Files ===") print (" -- Performing Basic Processing --") # Set task parameters. gmos.gsreduce.unlearn() sciFlags = sciFlags # redundant but put here because NT likes it arcFlags = copy.deepcopy(sciFlags) arcFlags.update({'fl_flat': 'no', 'fl_vardq': 'no', 'fl_fulldq': 'no'}) stdFlags = copy.deepcopy(sciFlags) stdFlags.update({'fl_fixpix': 'yes', 'fl_vardq': 'no', 'fl_fulldq': 'no'}) # Perform basic reductions on all exposures for science targets. print (" - Arc exposures -") for r in regions: qr = qd[r] arcFiles = fs.fileListQuery(dbFile, fs.createQuery('arc', qr), qr) if len(arcFiles) > 0: gmos.gsreduce(','.join(str(x) for x in arcFiles), bias='MCbias' + r, **arcFlags) print (" - Std star exposures -") r = 'CenSp' stdFiles = fs.fileListQuery(dbFile, fs.createQuery('std', qd[r]), qd[r]) if len(stdFiles) > 0: gmos.gsreduce(','.join(str(x) for x in stdFiles), bias='MCbias' + r, flatim='MCflat' + r, **stdFlags) print (" - Science exposures -") r = 'Full' sciFiles = fs.fileListQuery(dbFile, fs.createQuery('sciSpec', qd[r]), qd[r]) if len(sciFiles) > 0: gmos.gsreduce(','.join(str(x) for x in sciFiles), bias='MCbias' + r, flatim='MCflat' + r, **sciFlags) # Clean up if clean_files: iraf.imdel('gS{}*.fits'.format(year_obs)) ask_user("Basic processing done. Would you like to continue to determine wavelength calibration? (y/n): ",['y','yes']) print (" -- Determine wavelength calibration --") # Set task parameters gmos.gswavelength.unlearn() # The fit to the dispersion relation should be performed interactively. # Here we will use a previously determined result. if waveFlags['fl_inter'] != 'yes': print("The fit to the dispersion relation should be performed interactively. Please set waveFlags['fl_inter'] = 'yes'.") ask_user("Do you still want to proceed despite this important warning? (y/n): ", ['yes','y']) # Need to select specific wavecals to match science exposures. # NT: we do this now from the sciTargets + stdTarget input dictionaries # e.g. ''' sciTargets = { 'AM2306-721_a': {'arc': 'gsS20070623S0071', 'sky': '520:720'}, 'AM2306-72_b': {'arc': 'gsS20070623S0081', 'sky': '670:760,920:1020'}, 'AM2306-721_c': {'arc': 'gsS20070623S0091', 'sky': '170:380,920:1080'} } ''' #prefix = 'gsS20070623S0' #for arc in ['071', '081', '091', '109']: # gmos.gswavelength(prefix + arc, **waveFlags) prefix = 'gs' arc_files = [] for key in sciTargets.keys(): arc_files += [sciTargets[key]['arc']] for key in stdTarget.keys(): arc_files += [stdTarget[key]['arc']] # import pdb; pdb.set_trace() if skip_wavecal is not True: for arc in arc_files: gmos.gswavelength(prefix + arc, **waveFlags) ### End of basic processing. Continue with advanced processing. ask_user("Wavelength solution done. Would you like to continue with advanced processing? (y/n): ",['y','yes']) print (" -- Performing Advanced Processing --") print (" -- Combine exposures, apply dispersion, subtract sky --") # Set task parameters. gemtools.gemcombine.unlearn() sciCombFlags = sciCombFlags stdCombFlags = copy.deepcopy(sciCombFlags) stdCombFlags.update({'fl_vardq': 'no', 'fl_dqprop': 'no'}) gmos.gstransform.unlearn() # apply gtransform to standard # Process the Standard Star prefix = "gs" qs = qd['CenSp'] stdFiles = fs.fileListQuery(dbFile, fs.createQuery('std', qs), qs) std_name = stdTarget.keys()[0] if len(stdFiles) == 0: ValueError("No standard star associated. Please check parameters of search (e.g. RoI=CentSp)") # import pdb; pdb.set_trace() if len(stdFiles) > 1: # import pdb; pdb.set_trace() gemtools.gemcombine(','.join(prefix + str(x) for x in stdFiles), std_name, **stdCombFlags) else: os.system("cp {}.fits {}.fits".format(prefix + stdFiles[0], std_name)) gmos.gstransform(std_name, wavtraname=prefix + stdTarget[std_name]['arc'], **transFlags) # The sky regions should be selected with care, using e.g. prows/pcols: # pcols ("tAM2306b.fits[SCI]", 1100, 2040, wy1=40, wy2=320) print("The sky regions should be selected with care, using e.g. with prows/pcols (see tutorial).") ''' answer = raw_input("Please provide the long_sample string to apply to gmos.gsskysub() for the standard star." "e.g. '20:70,190:230' (say 'no' for using the example as the default values): ") if answer in ['n', 'no']: print("Using default long_sample set by stdTarget values {}.".format(stdTarget[std_name]['sky'])) long_sample_std = stdTarget[std_name]['sky'] else: long_sample_std = answer ''' long_sample_std = stdTarget[std_name]['sky'] ask_user("Before proceeding it is important that you have set a good sky region for the standard.\n" "Thus far you have selected: {}\n Would you like to proceed with the current one? (y/n): ".format(long_sample_std), ['yes','y']) # apply sky substraction skyFlags = skyFlags gmos.gsskysub.unlearn() gmos.gsskysub('t{}'.format(std_name), long_sample=long_sample_std) # NT: make sure the process works ok until here before proceeding further. i.e. setting the sky region manually and correctly. # NT: seems to be working. print (" -- Extract Std spectrum --") # Extract the std spectrum using a large aperture. # It's important to trace the spectra interactively. gmos.gsextract.unlearn() gmos.gsextract("st" + std_name, **extrFlags) print (" -- Derive the Flux calibration --") gmos.gsstandard.unlearn() sensFlags = { 'fl_inter': 'no', 'starname': 'XXX', 'caldir': 'onedstds$ctionewcal/', 'observatory': 'Gemini-South', 'extinction': 'onedstds$ctioextinct.dat', 'function': 'chebyshev', 'order': 9, 'verbose': 'no', 'logfile': 'gsstdLog.txt' } sensFlags['starname'] = stdTarget[std_name]['iraf_name'] # replace corresponding starname gmos.gsstandard('est'+std_name, sfile='std.txt', sfunction='sens', **sensFlags) ask_user("Sensitivity function from standard star done. Would you like to continue with reduction of science" " exposures? (y/n): ",['yes','y']) # Process the science targets. # Use a dictionary to associate science targets with Arcs and sky regions. prefix = 'gs' extract_individuals = True for targ, p in sciTargets.iteritems(): qs = qd['Full'] qs['Object'] = p['name'] # Fix up the target name for the output file sciOut = p['name_out'] sciFiles = fs.fileListQuery(dbFile, fs.createQuery('sciSpec', qs), qs) all_files = ','.join(prefix + str(x) for x in sciFiles) gemtools.gemcombine(all_files, sciOut, **sciCombFlags) gmos.gstransform(sciOut, wavtraname=prefix + p['arc'], **transFlags) ask_user("It is important to select a good sky region for substraction. Thus far you have selected {}" " based on the sciTargets input dictionary. Would you like to continue? (y/n): ".format(p['sky']),['y','yes']) gmos.gsskysub('t' + sciOut, long_sample=p['sky'], **skyFlags) if extract_individuals: import pdb; pdb.set_trace() for fname in sciFiles: gmos.gstransform(prefix + fname, wavtraname=prefix + p['arc'], **transFlags) gmos.gsskysub('t' + prefix + fname, long_sample=p['sky'], **skyFlags) gmos.gscalibrate.unlearn() gmos.gscalibrate('st'+prefix+fname, **calibFlags) # Clean up if clean_files: iraf.imdel("gsS{}*.fits".format(year_obs)) ask_user("Sky substraction done. Would you like to continue to apply sensitivity function? (y/n): ",['y']) ## Apply the sensitivity function. gmos.gscalibrate.unlearn() gmos.gscalibrate('st'+sciOut+'*', **calibFlags) calibFlags.update({'fl_vardq': 'no'}) gmos.gscalibrate('est'+std_name, **calibFlags) print (" -- Extract Target Spectra --") method = 'gsextract' if method == 'gsextract': gmos.gsextract.unlearn() # import pdb;pdb.set_trace() gmos.gsextract("cst" + sciOut, **extrFlags) elif method == 'sarith': # not implemented yet onedspec.nsum = 4 onedspec.sarith('cst{}.fits[SCI]'.format(sciOut), 'copy', '', 'ecst{}.ms'.format(sciOut), apertures='222-346x4') print ("=== Finished Calibration Processing ===")
qd['CenSp'].update({'RoI':'CentSp','Object':'LTT9239'}) print (" --Creating Bias MasterCal--") # Set the task parameters. gemtools.gemextn.unlearn() # Disarm a bug in gbias gmos.gbias.unlearn() biasFlags = { 'logfile':'biasLog.txt','rawpath':'./raw/','fl_vardq':'yes', 'verbose':'yes' } regions = ['Full','CenSp'] for r in regions: # The following SQL generates the list of full-frame files to process. SQL = fs.createQuery('bias', qd[r]) biasFiles = fs.fileListQuery(dbFile, SQL, qd[r]) # The str.join() funciton is needed to transform a python list into a # comma-separated string of file names that IRAF can understand. if len(biasFiles) > 1: gmos.gbias(','.join(str(x) for x in biasFiles), 'MCbias'+r, **biasFlags) # Clean up iraf.imdel("gS2007*.fits") print (" -- Creating GCAL Spectral Flat-Field MasterCals --") # Set the task parameters. qd['Full'].update({'DateObs':'*'}) qd['CenSp'].update({'DateObs':'*'})
def create_master_twilight_flat(qd, dbFile, data_dir, overwrite=True): ''' despite the fact that it looks like this can be run from outside the raw directory, it can't be. ''' cur_dir = os.getcwd() #os.chdir(data_dir) iraf.chdir(data_dir) print(" --Creating Twilight Imaging Flat-Field MasterCal--") # Set the task parameters. gmos.giflat.unlearn() flat_flags = { 'fl_scale': 'yes', 'sctype': 'mean', 'fl_vardq': 'yes', 'rawpath': '', 'logfile': 'giflatLog.txt', 'verbose': 'no' } if qd['Instrument'] == 'GMOS-N': flat_flags['bpm'] = 'gmos$data/gmos-n_bpm_HAM_22_12amp_v1.fits' else: flat_flags['bpm'] = 'gmos$data/gmos-s_bpm_HAM_22_12amp_v1.fits' filters = ['Ha', 'HaC', 'SII', 'r', 'i'] original_dateobs = qd['DateObs'] for f in filters: # Select filter name using a substring of the official designation. qd['Filter2'] = f + '_G%' SQL = fileSelect.createQuery('twiFlat', qd) flat_files = fileSelect.fileListQuery(dbFile, SQL, qd) start_date = Time(qd['DateObs'].split(':')[0], out_subfmt='date') end_date = qd['DateObs'].split(':')[1] while (len(flat_files) < 7) & ( (Time(end_date) - start_date) < 365 * u.day): start_date = start_date - 1.0 * u.day qd['DateObs'] = '{}:{}'.format(start_date, end_date) SQL = fileSelect.createQuery('twiFlat', qd) flat_files = fileSelect.fileListQuery(dbFile, SQL, qd) mc_name = 'MCflat_{}.fits'.format(f) if os.path.exists(mc_name): if overwrite is True: remove = raw_input( 'Remove flat file {}? (y), n '.format(mc_name)) if remove == 'y': os.remove(mc_name) else: return None else: print('Master flat, {} already exists and overwrite={}'.format( mc_name, overwrite)) return None if len(flat_files) > 0: print( " Building twilight flat MasterCal for: {} with {} flat frames from date range {}" .format(f, len(flat_files), qd['DateObs'])) gmos.giflat(','.join(str(x) for x in flat_files), mc_name, bias='MCbias', **flat_flags) qd['DateObs'] = original_dateobs # Clean up if not os.path.exists(mc_name): sys.exit('ERROR creating Master Flat Field: {}'.format(mc_name)) if qd['Instrument'] == 'GMOS-N': image_str = 'gN' else: image_str = 'gS' image_str = '{}{}*.fits'.format(image_str, qd['DateObs'][0:4]) for f in filters: del_str = f + image_str iraf.imdel(del_str) flist = glob.glob('tmpfile*') for ifile in flist: os.remove(ifile) iraf.chdir(cur_dir)
def calibrate_science_images(qd, dbFile, data_dir, biasfilename='MCbias', overwrite=True): ''' despite the fact that it looks like this can be run from outside the raw directory, it can't be. #Bad pixel maps live in /Users/bostroem/anaconda/envs/geminiconda/iraf_extern/gemini/gmos/data #You can find this directory with pyraf; cd gmos; cd data; pwd ''' print("=== Processing Science Images ===") cur_dir = os.getcwd() iraf.chdir(data_dir) prefix = 'rg' # Set task parameters. # Employ the imaging Static BPM for this set of detectors. gmos.gireduce.unlearn() sciFlags = { 'fl_over': 'yes', #Overscan subtraction 'fl_trim': 'yes', #Overscan region trimmed 'fl_bias': 'yes', #Subtract Bias residual 'fl_dark': 'no', #Subtract Dark 'fl_flat': 'yes', #Subtract flat 'logfile': 'gireduceLog.txt', 'rawpath': '', 'fl_vardq': 'yes', #Propagate VAR and DQ extensions 'verbose': 'no' } if qd['Instrument'] == 'GMOS-N': sciFlags['bpm'] = 'gmos$data/gmos-n_bpm_HAM_22_12amp_v1.fits' else: sciFlags['bpm'] = 'gmos$data/gmos-s_bpm_HAM_22_12amp_v1.fits' gemtools.gemextn.unlearn() # disarms a bug in gmosaic gmos.gmosaic.unlearn() mosaicFlags = { 'fl_paste': 'no', 'fl_fixpix': 'no', 'fl_clean': 'yes', 'geointer': 'nearest', 'logfile': 'gmosaicLog.txt', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'verbose': 'no' } # Reduce the science images, then mosaic the extensions in a loop filters = ['Ha', 'HaC', 'SII', 'r', 'i'] for f in filters: print(" Processing science images for: %s" % (f)) qd['Filter2'] = f + '_G%' flatFile = 'MCflat_' + f sciFiles = fileSelect.fileListQuery( dbFile, fileSelect.createQuery('sciImg', qd), qd) if len(sciFiles) > 0: gmos.gireduce(','.join(str(x) for x in sciFiles), bias=biasfilename, flat1=flatFile, **sciFlags) #Combine multi-extension images into one image for file in sciFiles: gmos.gmosaic(prefix + file, **mosaicFlags) iraf.chdir(cur_dir)
def gmos_ls_proc(): ''' GMOS Data Reduction Cookbook companion script to the chapter: "Reduction of Longslit Spectra with PyRAF" PyRAF script to: Process GMOS spectra for AM2306-721, in program GS-2007A-Q-76. The names for the relevant header keywords and their expected values are described in the DRC chapter entitled "Supplementary Material" Perform the following starting in the parent work directory: cd /path/to/work_directory Place the fileSelect.py module in your work directory. Now execute this script from the unix prompt: python gmos_ls_proc.py ''' print("### Begin Processing GMOS/Longslit Images ###") print("###") print("=== Creating MasterCals ===") # This whole example depends upon first having built an sqlite3 database of metadata: # cd ./raw # python obslog.py obsLog.sqlite3 dbFile = './raw/obsLog.sqlite3' # From the work_directory: # Create the query dictionary of essential parameter=value pairs. # Select bias exposures within ~2 months of the target observations: qd = { 'Full': { 'use_me': 1, 'Instrument': 'GMOS-S', 'CcdBin': '2 4', 'RoI': 'Full', 'Disperser': 'B600+_%', 'CentWave': 485.0, 'AperMask': '1.0arcsec', 'Object': 'AM2306-72%', 'DateObs': '2007-06-05:2007-07-07' } } # Make another copy for the CenterSpec RoI: qd['CenSp'] = copy.deepcopy(qd['Full']) qd['CenSp'].update({'RoI': 'CentSp', 'Object': 'LTT9239'}) print(" --Creating Bias MasterCal--") # Set the task parameters. gemtools.gemextn.unlearn() # Disarm a bug in gbias gmos.gbias.unlearn() biasFlags = { 'logfile': 'biasLog.txt', 'rawpath': './raw/', 'fl_vardq': 'yes', 'verbose': 'no' } regions = ['Full', 'CenSp'] for r in regions: # The following SQL generates the list of full-frame files to process. SQL = fs.createQuery('bias', qd[r]) biasFiles = fs.fileListQuery(dbFile, SQL, qd[r]) # The str.join() funciton is needed to transform a python list into a # comma-separated string of file names that IRAF can understand. if len(biasFiles) > 1: gmos.gbias(','.join(str(x) for x in biasFiles), 'MCbias' + r, **biasFlags) # Clean up iraf.imdel("gS2007*.fits") print(" -- Creating GCAL Spectral Flat-Field MasterCals --") # Set the task parameters. qd['Full'].update({'DateObs': '*'}) qd['CenSp'].update({'DateObs': '*'}) gmos.gireduce.unlearn() gmos.gsflat.unlearn() # Normalize the spectral flats per CCD. # The response fitting should be done interactively. flatFlags = { 'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias': 'yes', 'fl_dark': 'no', 'fl_fixpix': 'no', 'fl_oversize': 'no', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'rawpath': './raw', 'fl_inter': 'no', 'fl_detec': 'yes', 'function': 'spline3', 'order': '13,11,28', 'logfile': 'gsflatLog.txt', 'verbose': 'no' } for r in regions: qr = qd[r] flatFiles = fs.fileListQuery(dbFile, fs.createQuery('gcalFlat', qr), qr) if len(flatFiles) > 0: gmos.gsflat(','.join(str(x) for x in flatFiles), 'MCflat' + r, bias='MCbias' + r, **flatFlags) iraf.imdel('gS2007*.fits,gsS2007*.fits') print("=== Processing Science Files ===") print(" -- Performing Basic Processing --") # Set task parameters. gmos.gsreduce.unlearn() sciFlags = { 'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias': 'yes', 'fl_gscrrej': 'no', 'fl_dark': 'no', 'fl_flat': 'yes', 'fl_gmosaic': 'yes', 'fl_fixpix': 'no', 'fl_gsappwave': 'yes', 'fl_oversize': 'no', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'rawpath': './raw', 'fl_inter': 'no', 'logfile': 'gsreduceLog.txt', 'verbose': 'no' } arcFlags = copy.deepcopy(sciFlags) arcFlags.update({'fl_flat': 'no', 'fl_vardq': 'no', 'fl_fulldq': 'no'}) stdFlags = copy.deepcopy(sciFlags) stdFlags.update({'fl_fixpix': 'yes', 'fl_vardq': 'no', 'fl_fulldq': 'no'}) # Perform basic reductions on all exposures for science targets. print(" - Arc exposures -") for r in regions: qr = qd[r] arcFiles = fs.fileListQuery(dbFile, fs.createQuery('arc', qr), qr) if len(arcFiles) > 0: gmos.gsreduce(','.join(str(x) for x in arcFiles), bias='MCbias' + r, **arcFlags) print(" - Std star exposures -") r = 'CenSp' stdFiles = fs.fileListQuery(dbFile, fs.createQuery('std', qd[r]), qd[r]) if len(stdFiles) > 0: gmos.gsreduce(','.join(str(x) for x in stdFiles), bias='MCbias' + r, flatim='MCflat' + r, **stdFlags) print(" - Science exposures -") r = 'Full' sciFiles = fs.fileListQuery(dbFile, fs.createQuery('sciSpec', qd[r]), qd[r]) if len(sciFiles) > 0: gmos.gsreduce(','.join(str(x) for x in sciFiles), bias='MCbias' + r, flatim='MCflat' + r, **sciFlags) # Clean up iraf.imdel('gS2007*.fits') print(" -- Determine wavelength calibration --") # Set task parameters gmos.gswavelength.unlearn() waveFlags = { 'coordlist': 'gmos$data/CuAr_GMOS.dat', 'fwidth': 6, 'nsum': 50, 'function': 'chebyshev', 'order': 5, 'fl_inter': 'no', 'logfile': 'gswaveLog.txt', 'verbose': 'no' } # The fit to the dispersion relation should be performed interactively. # Here we will use a previously determined result. # Need to select specific wavecals to match science exposures. prefix = 'gsS20070623S0' for arc in ['071', '081', '091', '109']: gmos.gswavelength(prefix + arc, **waveFlags) ### End of basic processing. Continue with advanced processing. print(" -- Performing Advanced Processing --") print(" -- Combine exposures, apply dispersion, subtract sky --") # Set task parameters. gemtools.gemcombine.unlearn() sciCombFlags = { 'combine': 'average', 'reject': 'ccdclip', 'fl_vardq': 'yes', 'fl_dqprop': 'yes', 'logfile': 'gemcombineLog.txt.txt', 'verbose': 'no' } stdCombFlags = copy.deepcopy(sciCombFlags) stdCombFlags.update({'fl_vardq': 'no', 'fl_dqprop': 'no'}) gmos.gstransform.unlearn() transFlags = { 'fl_vardq': 'yes', 'interptype': 'linear', 'fl_flux': 'yes', 'logfile': 'gstransLog.txt' } # The sky regions should be selected with care, using e.g. prows/pcols: # pcols ("tAM2306b.fits[SCI]", 1100, 2040, wy1=40, wy2=320) gmos.gsskysub.unlearn() skyFlags = { 'fl_oversize': 'no', 'fl_vardq': 'yes', 'logfile': 'gsskysubLog.txt' } # Process the Standard Star prefix = "gs" qs = qd['CenSp'] stdFiles = fs.fileListQuery(dbFile, fs.createQuery('std', qs), qs) gemtools.gemcombine(','.join(prefix + str(x) for x in stdFiles), 'LTT9239', **stdCombFlags) gmos.gstransform('LTT9239', wavtraname='gsS20070623S0109', **transFlags) gmos.gsskysub('tLTT9239', long_sample='20:70,190:230') print(" -- Extract Std spectrum --") # Extract the std spectruma using a large aperture. # It's important to trace the spectra interactively. gmos.gsextract.unlearn() extrFlags = { 'apwidth': 3., 'fl_inter': 'no', 'find': 'yes', 'trace': 'yes', 'tfunction': 'chebyshev', 'torder': '6', 'tnsum': 20, 'background': 'fit', 'bfunction': 'chebyshev', 'border': 2, 'fl_vardq': 'no', 'logfile': 'gsextrLog.txt' } gmos.gsextract("stLTT9239", **extrFlags) print(" -- Derive the Flux calibration --") gmos.gsstandard.unlearn() sensFlags = { 'fl_inter': 'yes', 'starname': 'l9239', 'caldir': 'onedstds$ctionewcal/', 'observatory': 'Gemini-South', 'extinction': 'onedstds$ctioextinct.dat', 'function': 'chebyshev', 'order': 9, 'verbose': 'no', 'logfile': 'gsstdLog.txt' } gmos.gsstandard('estLTT9239', sfile='std.txt', sfunction='sens', **sensFlags) # Process the science targets. # Use a dictionary to associate science targets with Arcs and sky regions. sciTargets = { 'AM2306-721_a': { 'arc': 'gsS20070623S0071', 'sky': '520:720' }, 'AM2306-72_b': { 'arc': 'gsS20070623S0081', 'sky': '670:760,920:1020' }, 'AM2306-721_c': { 'arc': 'gsS20070623S0091', 'sky': '170:380,920:1080' } } for targ, p in sciTargets.iteritems(): qs = qd['Full'] qs['Object'] = targ # Fix up the target name for the output file sciOut = targ.split('-')[0] + targ[-1] sciFiles = fs.fileListQuery(dbFile, fs.createQuery('sciSpec', qs), qs) gemtools.gemcombine(','.join(prefix + str(x) for x in sciFiles), sciOut, **sciCombFlags) gmos.gstransform(sciOut, wavtraname=p['arc'], **transFlags) gmos.gsskysub('t' + sciOut, long_sample=p['sky'], **skyFlags) # Clean up iraf.imdel("gsS2007*.fits") ## Apply the sensitivity function. gmos.gscalibrate.unlearn() calibFlags = { 'extinction': 'onedstds$ctioextinct.dat', 'fl_ext': 'yes', 'fl_scale': 'no', 'sfunction': 'sens', 'fl_vardq': 'yes', 'logfile': 'gscalibrateLog.txt' } gmos.gscalibrate('stAM2306*', **calibFlags) calibFlags.update({'fl_vardq': 'no'}) gmos.gscalibrate('estLTT9239', **calibFlags) print(" -- Extract Target Spectra --") onedspec.nsum = 4 onedspec.sarith('cstAM2306b.fits[SCI]', 'copy', '', 'ecstAM2306b.ms', apertures='222-346x4') print("=== Finished Calibration Processing ===")