def copyFLTs(self): """ Copy all _flt, _spt, and _asn files to a temporary working directory. :Note: One should run either copyRaws or copyFLTs but usually not both! """ #make a new dir path = 'tmp' try: os.mkdir(path) except: for d in glob.glob('./%s/*.*' % path): os.remove(d) for fle in glob.glob('./opus/*_flt.fits'): shutil.copy(fle, path) for fle in glob.glob('./support/*_spt.fits'): shutil.copy(fle, path) for fle in glob.glob('./asn/*_asn.fits'): shutil.copy(fle, path) #change the current working directory to tmp os.chdir(os.getcwd() + '/' + path) iraf.chdir(os.getcwd())
def create_master_bias(qd, dbFile, data_dir, master_bias='MCbias.fits', overwrite=True): cur_dir = os.getcwd() #os.chdir(data_dir) iraf.chdir(data_dir) if os.path.exists(master_bias): if overwrite is True: remove = raw_input('Remove bias file? (y), n ') if remove != 'n': os.remove(master_bias) else: return None else: print('Master bias, {} already exists and overwrite={}'.format( master_bias, overwrite)) return None print(" --Creating Bias MasterCal--") SQL = fileSelect.createQuery('bias', qd) bias_files = fileSelect.fileListQuery(dbFile, SQL, qd) start_date = Time(qd['DateObs'].split(':')[0], out_subfmt='date') end_date = qd['DateObs'].split(':')[1] while len(bias_files) < 7: start_date = start_date - 1.0 * u.day qd['DateObs'] = '{}:{}'.format(start_date, end_date) SQL = fileSelect.createQuery('bias', qd) bias_files = fileSelect.fileListQuery(dbFile, SQL, qd) gmos.gbias.unlearn() bias_flags = { 'logfile': 'biasLog.txt', 'rawpath': '', 'fl_vardq': 'yes', 'verbose': 'no' } print('{} bias frames used in Master Bias over date range {}'.format( len(bias_files), qd['DateObs'])) if len(bias_files) < 10: print('******WARNING less than 10 bias files********') if len(bias_files) > 1: gmos.gbias(','.join(str(x) for x in bias_files), master_bias, **bias_flags) # Clean up if not os.path.exists(master_bias): #Check that IRAF didn't error sys.exit('ERROR creating Master Bias: {}'.format(master_bias)) #Remove intermediate files if qd['Instrument'] == 'GMOS-N': image_str = 'gN' else: image_str = 'gS' image_str = '{}{}*.fits'.format(image_str, qd['DateObs'][0:4]) iraf.imdel(image_str) flist = glob.glob('tmplist*') for ifile in flist: os.remove(ifile) iraf.chdir(cur_dir)
def cosmic_rays (List_image): full_path = List_image path = os.path.dirname(full_path) List_image = full_path.replace(path + '/','') os.chdir(path) iraf.chdir(path) image_list = open(List_image,'r') image_out = open('c'+List_image,'w') for line in image_list: line = line.split('\n') print 'Cosmic rays removal: ' + line[0] hdulist = fits.open(line[0]) #print hdulist[0].header['HIERARCH ESO DET OUT1 RON'] iraf.lacos_spec.input = line[0] iraf.lacos_spec.output = 'c' + line[0] if os.path.isfile('c' + line[0]): os.system('rm c'+line[0]) if os.path.isfile('mask_'+line[0]): os.system('rm mask_'+line[0]) iraf.lacos_spec.outmask= 'mask_'+line[0] iraf.lacos_spec.readn= hdulist[0].header['HIERARCH ESO DET OUT1 RON'] iraf.lacos_spec.gain = hdulist[0].header['HIERARCH ESO DET OUT1 GAIN'] iraf.lacos_spec.verbose = 'no' iraf.lacos_spec(mode='h') os.system('rm mask_'+line[0]) image_out.write('c'+line[0]+'\n') hdulist.close() image_list.close() image_out.close()
def refaver(reffiles, combined_name): """Average two reference files together using msarith. .. warning:: This task requires IRAF/PyRAF to be installed. Parameters ---------- reffiles : list List of reference files to be averaged together combined_name : str Output name of the combined file """ from pyraf import iraf from iraf import stsdas from iraf import mstools print('#-----------------------#') print('combining datasets') print(reffiles) print('into') print(combined_name) print('#-----------------------#') all_paths = {os.path.split(item)[0] for item in reffiles} assert len(all_paths) == 1, "More than one path found" initial_dir = os.getcwd() os.chdir(list(all_paths)[0]) iraf.chdir(list(all_paths)[0]) all_subfiles = [] for subfile in reffiles: subfile = os.path.split(subfile)[-1] outfile = subfile.replace('.fits', '_aver.fits') print("Running msarith / 2 on {}".format(subfile)) iraf.msarith(subfile, '/', 2, outfile, verbose=1) all_subfiles.append(outfile) assert len(all_subfiles) == 2, 'Length of subfiles doesnt equal 2: {}'.format(all_subfiles) if not combined_name.endswith('.fits'): combined_name = combined_name + '.fits' #-- remove path from output name combined_name = os.path.split(combined_name)[-1] iraf.msarith(all_subfiles[0], '+', all_subfiles[1], combined_name, verbose=1) for filename in all_subfiles: os.remove(filename) #-- move back to beginning location os.chdir(initial_dir) iraf.chdir(initial_dir)
def calibrate_standard_images(qd, dbFile, std_name, biasfilename='MCbias', overwrite=True): print("=== Processing Science Images ===") prefix = 'rg' cur_dir = os.getcwd() iraf.chdir(data_dir) # Set task parameters. # Employ the imaging Static BPM for this set of detectors. gmos.gireduce.unlearn() sciFlags = { 'fl_over': 'yes', 'fl_trim': 'yes', 'fl_bias': 'yes', 'fl_dark': 'no', 'fl_flat': 'yes', 'logfile': 'gireduceLog.txt', 'rawpath': '', 'fl_vardq': 'yes', #####'bpm':'bpm_gmos-s_EEV_v1_2x2_img_MEF.fits', 'verbose': 'no' } gemtools.gemextn.unlearn() # disarms a bug in gmosaic gmos.gmosaic.unlearn() mosaicFlags = { 'fl_paste': 'no', 'fl_fixpix': 'no', 'fl_clean': 'yes', 'geointer': 'nearest', 'logfile': 'gmosaicLog.txt', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'verbose': 'no' } # Reduce the science images, then mosaic the extensions in a loop filters = ['Ha', 'HaC', 'SII', 'r', 'i'] for f in filters: print(" Processing science images for: %s" % (f)) qd['Filter2'] = f + '_G%' flatFile = 'MCflat_' + f sql_query = '''SELECT file FROM obslog WHERE Object='{}' AND Filter2 LIKE '{}%' '''.format( std_name, f) sciFiles = run_query(sql_query, dbFile) sciFiles = [x[0] for x in sciFiles] if len(sciFiles) > 0: gmos.gireduce(','.join(str(x) for x in sciFiles), bias=biasfilename, flat1=flatFile, **sciFlags) for file in sciFiles: gmos.gmosaic(prefix + file, **mosaicFlags) iraf.chdir(cur_dir)
def refaver(reffiles, combined_name): """ Average two reference files toghether Arithmetic for the combination is done using msarith """ from pyraf import iraf from iraf import stsdas from iraf import mstools print('#-----------------------#') print('combining datasets') print(reffiles) print('into') print(combined_name) print('#-----------------------#') all_paths = {os.path.split(item)[0] for item in reffiles} assert len(all_paths) == 1, "More than one path found" initial_dir = os.getcwd() os.chdir(list(all_paths)[0]) iraf.chdir(list(all_paths)[0]) all_subfiles = [] for subfile in reffiles: subfile = os.path.split(subfile)[-1] outfile = subfile.replace('.fits', '_aver.fits') print("Running msarith / 2 on {}".format(subfile)) iraf.msarith(subfile, '/', 2, outfile, verbose=1) all_subfiles.append(outfile) assert len( all_subfiles) == 2, 'Length of subfiles doesnt equal 2: {}'.format( all_subfiles) if not combined_name.endswith('.fits'): combined_name = combined_name + '.fits' #-- remove path from output name combined_name = os.path.split(combined_name)[-1] iraf.msarith(all_subfiles[0], '+', all_subfiles[1], combined_name, verbose=1) for filename in all_subfiles: os.remove(filename) #-- move back to beginning location os.chdir(initial_dir) iraf.chdir(initial_dir)
def longslit(args, waves, assoc): """Reduce longslit data""" combine = [] mask = 'longslit' path = os.path.join(args.objectid, mask).replace(' ', '_') utils.makedir(path) for wave in waves: flats = inventory.get_file_longslit(assoc, obs='flat', wave=wave) arcs = inventory.get_file_longslit(assoc, obs='arc', wave=wave) sciences = inventory.get_file_longslit(assoc, obs='science', wave=wave) iraf.chdir(path) utils.create_symlink(args.bias, args.force_overwrite) for flat, arc, science in zip(flats, arcs, sciences): #os.symlink(os.path.join('../../', '{}.fits'.format(flat)), '{}.fits'.format(flat)) #os.symlink(os.path.join('../../', '{}.fits'.format(arc)), '{}.fits'.format(arc)) #os.symlink(os.path.join('../../', '{}.fits'.format(science)), '{}.fits'.format(science)) for f in (flat, arc, science): utils.create_symlink(f, args.force_overwrite) flat, comb = tasks.call_gsflat(args, flat) arc = tasks.call_gsreduce(args, arc, flat, args.bias, comb) science = tasks.call_gsreduce(args, science, flat, args.bias, comb) tasks.call_gdisplay(args, science, 1) science = tasks.call_lacos(args, science, longslit=True) tasks.call_gdisplay(args, science, 1) tasks.call_gswave(args, arc) tasks.call_gstransform(args, arc, arc) science = tasks.call_gstransform(args, science, arc) tasks.call_gdisplay(args, science, 1) combine.append(tasks.call_gsskysub(args, science)) if len(combine) == len(waves): added = tasks.call_imcombine( args, str(mask), combine, longslit=True) tasks.call_gdisplay(args, added, 1) spectra = tasks.call_gsextract(args, added) Naps = raw_input('Number of apertures extracted: ') # In case you don't see the message after so many # consecutive "Enters" while Naps == '': Naps = raw_input('Please enter number of apertures extracted: ') Naps = int(Naps) tasks.cut_apertures(args, spectra, '{}_'.format(args.objectid), Naps) utils.delete('tmp*') iraf.chdir('../..') return
def create_coadd_img(qd, targets, dbFile, data_dir, prefix='mrg', overwrite=True): ''' despite the fact that it looks like this can be run from outside the raw directory, it can't be. Caveats: * It takes a lot of patience and trial-and-error tweaking of parameters to get good results * There is little control over sky background * The output image is no bigger than the first (reference) image, rather than the union of the image footprints ''' ## Co-add the images, per position and filter. print(" -- Begin image co-addition --") cur_dir = os.getcwd() iraf.chdir(data_dir) # Use primarily the default task parameters. gemtools.imcoadd.unlearn() coaddFlags = { 'fwhm': 3, 'datamax': 6.e4, 'geointer': 'nearest', 'logfile': 'imcoaddLog.txt' } filters = ['Ha', 'HaC', 'SII', 'r', 'i'] for f in filters: print " - Co-addding science images in filter: {}".format(f) qd['Filter2'] = f + '_G%' for t in targets: qd['Object'] = t + '%' print " - Co-addding science images for position: {}".format(t) outImage = t + '_' + f + '.fits' coAddFiles = fileSelect.fileListQuery( dbFile, fileSelect.createQuery('sciImg', qd), qd) if len(coAddFiles) > 1: gemtools.imcoadd(','.join(prefix + str(x) for x in coAddFiles), outimage=outImage, **coaddFlags) iraf.delete("*_trn*,*_pos,*_cen") iraf.imdelete("*badpix.pl,*_med.fits,*_mag.fits") #iraf.imdelete ("mrgS*.fits") print("=== Finished Calibration Processing ===") iraf.chdir(cur_dir)
def renorm_compare(sp, rnval, rnunits, bp, callable): """Sp and bp must have a special .syndescrip attribute that contains the string needed to make it with synphot""" # Renormalize the spectrum pysyn = callable(sp, bp, rnval, rnunits) #pysyn=sp.renorm(rnval,rnunits,bp) #pysyn = spectrum.StdRenorm(sp,bp,rnval,rnunits) userdir = tempfile.mkdtemp(suffix='pysynphot') old_cwd = os.getcwd() iraf.chdir(userdir) # Make a wavetable and a wavecat fname = "%s.fits" % rnunits pysyn.writefits(fname, clobber=True) wname = "%s.cat" % rnunits f = open(wname, 'w') f.write("box %s\n" % fname) f.close() oname = "syn_%s" % fname try: #Run countrate spstring = "rn(%s,%s,%s,%s) " % (sp.syndescrip, bp.syndescrip, rnval, rnunits) iraf.countrate(spectrum=spstring, magnitude="", instrument="box(15000,30000)", form=str(pysyn.fluxunits), wavecat=wname, output=oname) syn = S.FileSpectrum(oname) # Check that they have the same shape and fluxunits assert (syn.flux.shape == pysyn.flux.shape) assert (type(syn.fluxunits) == type(pysyn.fluxunits)) # Now a real test idx = np.where(syn.flux != 0) rat = (syn.flux[idx] / pysyn.flux[idx]) q = abs(1 - rat[2:-2]) qtrunc = (10**4*q).astype(np.int) assert np.alltrue(qtrunc < 110), \ "Min/max ratio = %f,%f" % (q.min(),q.max()) finally: iraf.chdir(old_cwd) shutil.rmtree(userdir)
def BIAS(bias): #iraf.unlearn(iraf.zerocombine) path_bias = os.path.dirname(bias) bias = bias.replace(path_bias + '/','') iraf.chdir(path_bias) os.chdir(path_bias) iraf.zerocombine.input = '@' + bias if os.path.isfile('MasterBias.fits'): subprocess.call(['rm','MasterBias.fits']) iraf.zerocombine.output = 'MasterBias.fits' iraf.zerocombine.rdnoise = 'HIERARCH ESO DET OUT1 RON' iraf.zerocombine.gain = 'HIERARCH ESO DET OUT1 GAIN' iraf.zerocombine.ccdtype = '' #iraf.lpar(iraf.zerocombine) iraf.zerocombine(mode='h')
def longslit(args, waves, assoc): """Reduce longslit data""" combine = [] mask = 'longslit' path = os.path.join(args.objectid, mask).replace(' ', '_') for wave in waves: flat = inventory.get_file(assoc, mask, obs='flat', wave=wave) # finding the flat is enough to know that the mask exists. if flat: arc = inventory.get_file(assoc, mask, obs='arc', wave=wave) science = inventory.get_file(assoc, mask, obs='science', wave=wave) iraf.chdir(path) flat, comb = tasks.call_gsflat(args, flat) arc = tasks.call_gsreduce(args, arc, flat, args.bias, comb) science = tasks.call_gsreduce(args, science, flat, args.bias, comb) tasks.call_gdisplay(args, science, 1) science = tasks.call_lacos(args, science, longslit=True) tasks.call_gdisplay(args, science, 1) tasks.call_gswave(args, arc) tasks.call_gstransform(args, arc, arc) science = tasks.call_gstransform(args, science, arc) tasks.call_gdisplay(args, science, 1) combine.append(tasks.call_gsskysub(args, science)) if len(combine) == len(waves): added = tasks.call_imcombine(args, str(mask), combine, longslit=True) tasks.call_gdisplay(args, added, 1) spectra = tasks.call_gsextract(args, mask) Naps = raw_input('Number of apertures extracted: ') # In case you don't see the message after so many # consecutive "Enters" while Naps == '': Naps = raw_input('Please enter number of apertures extracted: ') Naps = int(Naps) tasks.cut_apertures(args.objectid, Naps) utils.delete('tmp*') iraf.chdir('../..') return
def FLAT(masterbias,flat): # FALTA NORMALIZAR EL MASTERFLAT complete_reduction(flat,masterbias,'') path_flat = os.path.dirname(flat) flat = flat.replace(path_flat + '/','') iraf.chdir(path_flat) os.chdir(path_flat) iraf.flatcombine.input = '@' + flat if os.path.isfile('MasterFlat.fits'): subprocess.call(['rm', 'MasterFlat.fits']) iraf.flatcombine.output = 'MasterFlat.fits' iraf.flatcombine.process = 'no' iraf.flatcombine.subsets = 'no' iraf.flatcombine.rdnoise = 'HIERARCH ESO DET OUT1 RON' iraf.flatcombine.gain = 'HIERARCH ESO DET OUT1 GAIN' iraf.flatcombine.ccdtype = '' iraf.flatcombine.statsec = '[800:900,5:105]' iraf.flatcombine.reject = 'minmax' #iraf.lpar(iraf.flatcombine) iraf.flatcombine(mode='h')
def calibrate_science_images(qd, dbFile, data_dir, biasfilename='MCbias', overwrite=True): ''' despite the fact that it looks like this can be run from outside the raw directory, it can't be. #Bad pixel maps live in /Users/bostroem/anaconda/envs/geminiconda/iraf_extern/gemini/gmos/data #You can find this directory with pyraf; cd gmos; cd data; pwd ''' print("=== Processing Science Images ===") cur_dir = os.getcwd() iraf.chdir(data_dir) prefix = 'rg' # Set task parameters. # Employ the imaging Static BPM for this set of detectors. gmos.gireduce.unlearn() sciFlags = { 'fl_over': 'yes', #Overscan subtraction 'fl_trim': 'yes', #Overscan region trimmed 'fl_bias': 'yes', #Subtract Bias residual 'fl_dark': 'no', #Subtract Dark 'fl_flat': 'yes', #Subtract flat 'logfile': 'gireduceLog.txt', 'rawpath': '', 'fl_vardq': 'yes', #Propagate VAR and DQ extensions 'verbose': 'no' } if qd['Instrument'] == 'GMOS-N': sciFlags['bpm'] = 'gmos$data/gmos-n_bpm_HAM_22_12amp_v1.fits' else: sciFlags['bpm'] = 'gmos$data/gmos-s_bpm_HAM_22_12amp_v1.fits' gemtools.gemextn.unlearn() # disarms a bug in gmosaic gmos.gmosaic.unlearn() mosaicFlags = { 'fl_paste': 'no', 'fl_fixpix': 'no', 'fl_clean': 'yes', 'geointer': 'nearest', 'logfile': 'gmosaicLog.txt', 'fl_vardq': 'yes', 'fl_fulldq': 'yes', 'verbose': 'no' } # Reduce the science images, then mosaic the extensions in a loop filters = ['Ha', 'HaC', 'SII', 'r', 'i'] for f in filters: print(" Processing science images for: %s" % (f)) qd['Filter2'] = f + '_G%' flatFile = 'MCflat_' + f sciFiles = fileSelect.fileListQuery( dbFile, fileSelect.createQuery('sciImg', qd), qd) if len(sciFiles) > 0: gmos.gireduce(','.join(str(x) for x in sciFiles), bias=biasfilename, flat1=flatFile, **sciFlags) #Combine multi-extension images into one image for file in sciFiles: gmos.gmosaic(prefix + file, **mosaicFlags) iraf.chdir(cur_dir)
def vega(rawFrame, grating, hLineInter, log, over): """ Use iraf.telluric to remove H lines from standard star, then remove normalization added by telluric with iraf.imarith. The extension for vega_ext.fits is specified from grating (from header of telluricfile.fits). Args: """ if grating == 'K': ext = '1' sample = "21537:21778" scale = 0.8 elif grating == 'H': ext = '2' sample = "16537:17259" scale = 0.7 elif grating == 'J': ext = '3' sample = "11508:13492" scale = 0.885 elif grating == 'Z': ext = '4' sample = "*" scale = 0.8 else: logging.info( "\nWARNING: invalid standard star band. Exiting this correction.") return if os.path.exists("1_htel" + rawFrame + ".fits"): if over: os.remove("1_htel" + rawFrame + ".fits") iraf.chdir(os.getcwd()) tell_info = iraf.telluric(input="0_tel" + rawFrame + ".fits[1]", output="1_htel" + rawFrame, cal=RUNTIME_DATA_PATH + 'vega_ext.fits[' + ext + ']', xcorr='yes', tweakrms='yes', airmass=1.0, inter=hLineInter, sample=sample, threshold=0.1, lag=3, shift=0., dshift=0.05, scale=scale, dscale=0.05, offset=0., smooth=1, cursor='', mode='al', Stdout=1) else: logging.info( "Output file exists and -over not set - skipping H line correction" ) return else: iraf.chdir(os.getcwd()) tell_info = iraf.telluric(input="0_tel" + rawFrame + ".fits[1]", output="1_htel" + rawFrame, cal=RUNTIME_DATA_PATH + 'vega_ext.fits[' + ext + ']', xcorr='yes', tweakrms='yes', airmass=1.0, inter=hLineInter, sample=sample, threshold=0.1, lag=3, shift=0., dshift=0.05, scale=scale, dscale=0.05, offset=0., smooth=1, cursor='', mode='al', Stdout=1) # need this loop to identify telluric output containing warning about pix outside calibration limits (different formatting) if "limits" in tell_info[-1].split()[-1]: norm = tell_info[-2].split()[-1] else: norm = tell_info[-1].split()[-1] if os.path.exists("final_tel_no_hLines_no_norm.fits"): if over: # Subtle bugs in iraf mean imarith doesn't work. So we use an astropy/numpy solution. # Open the image and the scalar we will be dividing it by. operand1 = astropy.io.fits.open("1_htel" + rawFrame + '.fits')[0].data operand2 = float(norm) # Create a new data array multiplied = np.array(operand1, copy=True) # Don't forget to include the original header! If you don't later IRAF tasks get confused. header = astropy.io.fits.open("1_htel" + rawFrame + '.fits')[0].header for i in range(len(multiplied)): if operand2 != 0: multiplied[i] = operand1[i] / operand2 else: multiplied[i] = 1 # Set the data and header of the in-memory image hdu = astropy.io.fits.PrimaryHDU(multiplied) hdu.header = header # Finally, write the new image to a new .fits file. It only has one extension; zero, with a header and data. hdu.writeto('final_tel_no_hLines_no_norm.fits') #iraf.imarith(operand1="1_htel" + rawFrame, op='/', operand2=norm, result='final_tel_no_hLines_no_norm', title='', divzero=0.0, hparams='', pixtype='', calctype='', verbose='yes', noact='no', mode='al') else: logging.info( "Output file exists and -over not set - skipping H line normalization correction" ) else: #iraf.imarith(operand1="1_htel" + rawFrame, op='/', operand2=norm, result='final_tel_no_hLines_no_norm', title='', divzero=0.0, hparams='', pixtype='', calctype='', verbose='yes', noact='no', mode='al') operand1 = astropy.io.fits.open("1_htel" + rawFrame + '.fits')[0].data operand2 = float(norm) multiplied = np.array(operand1, copy=True) header = astropy.io.fits.open("1_htel" + rawFrame + '.fits')[0].header for i in range(len(multiplied)): if operand2 != 0: multiplied[i] = operand1[i] / operand2 else: multiplied[i] = 1 hdu = astropy.io.fits.PrimaryHDU(multiplied) hdu.header = header hdu.writeto('final_tel_no_hLines_no_norm.fits') if os.path.exists('final_tel_no_hLines_no_norm.fits'): os.remove("1_htel" + rawFrame + ".fits") shutil.move('final_tel_no_hLines_no_norm.fits', "1_htel" + rawFrame + ".fits")
# ----- Importing IRAF from the root directory ----- # current_dir = os.getcwd() os.chdir(ic.dir_iraf) from pyraf import iraf # ----- Loading Ha sum images ----- # from astropy.io import fits dir_cmb = '/data/jlee/DATA/Gemini/Programs/GN-2019A-Q-215/analysis/combine/' working_dir = dir_cmb os.chdir(working_dir) iraf.chdir(working_dir) # Ha_list = sorted(glob.glob('Ha_sum-*.fits')) # ----- Calculating WCS offsets ----- # from astropy.io import fits PA = 85.0*np.pi/180.0 for i in np.arange(len(ic.centwave)): ref_fits = glob.glob(ic.dir_iraf+ic.nredux+'_'+ic.centwave[i]+'/*'+ic.cube_ref+'_3D.fits') if (ref_fits != []): h01 = fits.getheader(ref_fits[0], ext=0) f = open('offset.txt', 'w')
if __name__ == "__main__": # copy over sensr.fits, sensb.fits files # before running this script # launch the image viewer # os.system('ds9 &') topdir = os.getcwd() # Get the raw directory rawpath = '%s/raw/' % topdir # Sort the files into the correct directories fs = sort() # Change into the reduction directory iraf.chdir('work') # Initialize variables that depend on which site was used extfile, observatory, base_stddir, rawpath = init_northsouth(fs, topdir, rawpath) # Get the observation type obstypes, obsclasses = getobstypes(fs) # Make the bias frame makebias(fs, obstypes, rawpath) # get the object name objname = getobjname(fs, obstypes) # Make the text files for the IRAF tasks maketxtfiles(fs, obstypes, obsclasses, objname)
def refaver(reffiles, combined_name): """Average two reference files together using msarith. .. warning:: This task requires IRAF/PyRAF to be installed. Parameters ---------- reffiles : list List of reference files to be averaged together combined_name : str Output name of the combined file """ from pyraf import iraf from iraf import stsdas from iraf import mstools print('#-----------------------#') print('combining datasets') print(reffiles) print('into') print(combined_name) print('#-----------------------#') all_paths = {os.path.split(item)[0] for item in reffiles} assert len(all_paths) == 1, "More than one path found" initial_dir = os.getcwd() os.chdir(list(all_paths)[0]) try: iraf.chdir(list(all_paths)[0]) except: iraf.chdir(''.join( ['/grp/hst/stis/darks_biases/refstis_new/', list(all_paths)[0]])) all_subfiles = [] for subfile in reffiles: subfile = os.path.split(subfile)[-1] outfile = subfile.replace('.fits', '_aver.fits') print("Running msarith / 2 on {}".format(subfile)) iraf.msarith(subfile, '/', 2, outfile, verbose=1) all_subfiles.append(outfile) assert len( all_subfiles) == 2, 'Length of subfiles doesnt equal 2: {}'.format( all_subfiles) if not combined_name.endswith('.fits'): combined_name = combined_name + '.fits' #-- remove path from output name combined_name = os.path.split(combined_name)[-1] iraf.msarith(all_subfiles[0], '+', all_subfiles[1], combined_name, verbose=1) for filename in all_subfiles: os.remove(filename) #-- move back to beginning location os.chdir(initial_dir) iraf.chdir(initial_dir)
def ns(args, cluster, mask, files_science, assoc, cutdir, align_suffix='_aligned'): """nod-and-shuffle -- NOT YET IMPLEMENTED""" Nmasks = 0 combine = [] print('Mask {0}'.format(mask), end=2*'\n') path = os.path.join(args.objectid, 'mask{0}'.format(mask)) darks = utils.get_darks() for science in files_science.keys(): arc = inventory.get_file( assoc, science, mask, obs='arc', wave=files_science[science]) # finding the arc is enough to know that the mask exists. if not arc: print('Not enough data for mask {2} (science file {1})'.format( mask, science)) continue # all observations sum to 1 Nmasks += 1 / len(files_science.keys()) #arc = inventory.get_file( #assoc, science, mask, obs='arc', wave=files_science[science]) #utils.copy_MDF(science, args.objectid, str(mask)) iraf.chdir(path) dark = tasks.call_gbias( darks, fl_over='no', fl_trim='yes', fl_vardq='no', fl_inter='no', median='no') science = tasks.call_gprepare( science, fl_vardq='no', fl_addmdf='yes') science = tasks.call_gireduce( science, bias=dark, fl_over='no', fl_trim='yes', fl_bias='yes', fl_dark='no', fl_flat='no', fl_addmdf='no') sciwithsky = tasks.call_gmosaic( science, fl_paste='no', geointer='linear', fl_fixpix='no', fl_clean='yes') science = tasks.call_gnsskysub( science, fl_paste='no', fl_fixpix='no', fl_clean='yes', fl_fixnc='no') science = tasks.call_gmosaic( science, fl_paste='no', geointer='linear', fl_fixpix='no', fl_clean='yes') offsetfile = utils.write_offsets(inimages, 'offsets.dat') # SHOULD PROBABLY CREATE A BPM, see step 7 of Adam's notes # (skipping for now) #science = tasks.call_imcombine(science, #flat, comb = tasks.call_gsflat( #args, flat, fl_over='yes', fl_inter='yes', fl_answer='yes') #arc = tasks.call_gsreduce(args, arc, '', comb) #science = tasks.call_gsreduce(args, science, '', comb, mode = 'ns1') #tasks.call_gdisplay(args, science, 1) ##Nslits = utils.get_nslits(science) ##science = tasks.call_lacos(args. science, Nslits) #science = tasks.call_gnsskysub(science) #tasks.call_gdisplay(args, science, 1) #tasks.call_gswave(arc) #tasks.call_gstransform(arc, arc) #science = tasks.call_gsreduce(args, science, flat, '', mode = 'ns2') #science = tasks.call_gstransform(science, arc) ##science = tasks.call_gsreduce(args, science, flat, '', bias = False) ##if align: ##tasks.call_align(arc, align, Nslits) ##science = tasks.call_gstransform(science, arc) ##if align: ##tasks.call_align(science, align_suffix, Nslits) ##tasks.call_gdisplay(args, science + align_suffix, 1) ##science = tasks.call_gnscombine(science, align_suffix) tasks.call_gdisplay(args, science, 1) utils.delete('tmp*') iraf.chdir('../..') # cut spectra tasks.cut_spectra(args, str(mask), spec='2d') tasks.cut_spectra(args, str(mask), spec='1d') check_gswave.main( args.objectid, mask, gmos.gswavelength.logfile, 'gswcheck.log') return Nmasks
arcspecs.append(np.array([[(out[1].sum(1))]])) redfn = calfn.replace('.fits', '_spec.fits') arcspec = np.median(np.array(arcspecs), 0) archeader = thesearcheaders[jj] keys = 'BANDID1', 'BANDID2', 'BANDID3', 'BANDID4', 'APNUM1', 'WCSDIM' , 'CTYPE3' , 'CD3_3' , 'LTM1_1' , 'LTM2_2' , 'LTM3_3' , 'WAT0_001', 'WAT1_001', 'WAT2_001', 'WAT3_001', 3 keyvals = 'spectrum: background fit, weights variance, clean yes' , 'background: background fit' ,'sigma - background fit, weights variance, clean yes', 'wavelength', '1 1 540.99 550.99','1 1 538.02 548.02' ,'LINEAR ', 1., 1., 1., 1., 'system=equispec' , 'wtype=linear label=Pixel' , 'wtype=linear' , 'wtype=linear' for kk, kv in zip(keys, keyvals): archeader[kk] = kv if writefiles: pyfits.writeto(redfn, np.tile(arcspec, (4,1,1)).astype(np.float32), archeader, clobber=clobber) if wavecal: os.chdir(_proc) iraf.chdir(_proc) loc_redfn =os.path.split(redfn)[1] if waveCalRef is None: iraf.identify(loc_redfn, database=_wldat, ftype='emission', fwidth=3, order=2, niterate=3, cradius=3, coordlist=lamp_list, function='spline3') waveCalRef = '' + loc_redfn else: iraf.reidentify(waveCalRef, loc_redfn, interactive='no', override='yes', refit='yes', nlost=1, cradius=10, addfeatures='no', coordlist=lamp_list) #, function='spline3', order=2, niterate=3) disp_soln = ns.getdisp(_wldat + os.sep + 'id' + loc_redfn.replace('.fits',''), 'spline3') if writefiles: ns.wspectext(loc_redfn) datasets.append([redfns[-nthissci:], redfn]) os.chdir(dir0) iraf.chdir(dir0)
def transform(rawdir, silent=False, verbose=False): ''' Transform OH_stack 2-D FITS image for wavelength calibration checks Parameters ---------- rawdir : str Path to raw files. Must end in a '/' silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- 2-D image with transformation called 'fOH_stack.fits' and 'tfOH_stack.fits' Notes ----- Created by Chun Ly, 4 July 2017 Modified by Chun Ly, 20 September 2017 - Call check_path() Modified by Chun Ly, 22 November 2017 - Add file checking and log.warn calls Modified by Chun Ly, 29 November 2017 - Bug fix: Missing else statement Modified by Chun Ly, 9 January 2018 - Import glog and call for stdout and ASCII logging Modified by Chun Ly, 19 June 2018 - Call QA_wave_cal.get_database_model - Pass function and order to nsfitcoords for OH stack Modified by Chun Ly, 20 June 2018 - Set nsfitcoords xorder fitting Modified by Chun Ly, 22 June 2018 - Call residual_wave_cal for wavelength solution check ''' # + on 09/01/2018 logfile = rawdir+'OH_stack.log' mylogger = glog.log0(logfile)._get_logger() cdir = os.getcwd()+'/' # + on 06/05/2017 rawdir = check_path(rawdir) # + on 20/09/2017 iraf.chdir(rawdir) mylogger.info("Running nsfitcoords on OH_stack") # Mod on 09/01/2018 outfile1 = rawdir + 'fOH_stack.fits' if not exists(outfile1): func0, order0 = QA_wave_cal.get_database_model(rawdir, 'OH') iraf.gnirs.nsfitcoords('wOH_stack.fits', outprefix='', outspectra='fOH_stack.fits', lamp='wOH_stack.fits', database='database_OH/', function=func0, lyorder=order0, lxorder=QA_wave_cal.xorder) else: # Mod on 09/01/2018 mylogger.warn('File exists!!! : '+outfile1) mylogger.warn('Will not run nsfitcoords on OH stacked data') outfile2 = rawdir + 'tfOH_stack.fits' if not exists(outfile2): iraf.gnirs.nstransform('fOH_stack.fits', outprefix='', outspectra='tfOH_stack.fits', database='database_OH/') else: # Mod on 09/01/2018 mylogger.warn('File exists!!! : '+outfile2) mylogger.warn('Will not run nstransform on OH stacked data') iraf.chdir(cdir) # + on 22/06/2018 QA_wave_cal.residual_wave_cal(rawdir, dataset='OH', cal='OH')
def wave_cal(rawdir, cdir, silent=False, verbose=False): ''' Run gnirs.nswavelength on OH_stack 2-D FITS image for wavelength calibration Parameters ---------- rawdir : str Path to raw files. Must end in a '/' silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True Returns ------- 2-D image with transformation called 'fOH_stack.fits' and 'tfOH_stack.fits' Notes ----- Created by Chun Ly, 13 July 2017 Modified by Chun Ly, 20 September 2017 - Call check_path() Modified by Chun Ly, 16 November 2017 - Call wave_cal_script to get PyRAF code Modified by Chun Ly, 16 November 2017 - Bug fix: indentation typo with else statement Modified by Chun Ly, 20 November 2017 - Bug fix: Pass in cdir Modified by Chun Ly, 9 January 2018 - Import glog and call for stdout and ASCII logging Modified by Chun Ly, 14 June 2018 - Import and call get_OH_centers Modified by Chun Ly, 19 June 2018 - Pass gnirs logfile to mylogger ''' # + on 09/01/2018 logfile = rawdir+'OH_stack.log' mylogger = glog.log0(logfile)._get_logger() rawdir = check_path(rawdir) # + on 20/09/2017 iraf.chdir(rawdir) timestamp = systime().replace(':','.') logfile = rawdir+'gnirs_'+timestamp+'.log' iraf.gemini.gnirs.logfile = logfile mylogger.info("GNIRS logfile : "+logfile) get_OH_centers.main(rawdir) # + on 16/11/2017 script_file = 'wave_cal_OH.py' if not exists(script_file): wave_cal_script.main(rawdir, line_source='OH') else: # Mod on 09/01/2018 mylogger.info('File exists!!! : '+script_file) mylogger.info('Will not override!!!') # + on 16/11/2017 do_run = 0 if not exists('wOH_stack.fits'): do_run = 1 if do_run: # Mod on 09/01/2018 mylogger.info("In order to perform interactive calibration, open up") mylogger.info("a PyRAF terminal in an anaconda IRAF environment") mylogger.info("'cd' into "+rawdir) mylogger.info("Execute the following command :") mylogger.info("execfile('"+script_file+"')") t_out = raw_input("## Hit RETURN when OH wavelength calibration is completed") else: # Mod on 09/01/2018 mylogger.warn('Files exist!!!') mylogger.warn('Will not run nswavelength on OH stacked data') iraf.chdir(cdir)
def inventory(self, pattern="*.fits"): # Change to the nights directoty os.chdir(self.obsnight) print(" Changed from directory %s --> %s" % (self.cwd, self.obsnight), file=sys.stderr) print(" We are now in: %s " % os.getcwd(), file=sys.stderr) # Do the same in Iraf iraf.chdir(self.obsnight) print(" We are now in Iraf's: ", file=sys.stderr) iraf.pwd() # Get the all the fits files names and sort them out self.fitslist = glob.glob( pattern) # Keep it relative, no absolute paths print(" Found %s fits files in %s " % (len(self.fitslist), self.obsnight), file=sys.stderr) print(" Will make data inventory now... this might take a while", file=sys.stderr) # Get the filternames self.getfilters() # Returns self.filters # Make the images hash self.zeros = [] self.dflats = {} self.object = {} self.forsky = {} # BCS images only! for filter in self.filters: self.dflats[filter] = [] self.object[filter] = [] self.forsky[filter] = [] # For through all files i = 0 for file in self.fitslist: try: header = pyfits.getheader(file) filter1 = header['FILTER1'] filter2 = header['FILTER2'] obstype = header['OBSTYPE'] exptime = header['EXPTIME'] Namps = header['NAMPS'] except: continue if filter1.split()[1] != 'Open': filter = filter1.split()[1] else: filter = filter2.split()[1] print("Found %s -- %s" % (filter, file)) #try: # filter = filter.split()[1] #except: # filter = filter.split()[0] if i == 0: Namp0 = Namps if Namps != Namp0: print("ERROR: Namps mixed, %s - %s, image:%s" % (Namo0, Namps, file)) # Zero frames can have any filter they want if obstype == 'ZERO': self.zeros.append(file) # In case there a spurious filter if filter not in self.filters: continue elif exptime is None: print(" Skipping %s, no EXPTIME key" % file, file=sys.stderr) continue elif Namps is None: print(" Skipping %s, no NAMPS key" % file, file=sys.stderr) continue elif obstype == 'DFLAT' or obstype == 'FLAT': self.dflats[filter].append(file) elif obstype == 'OBJECT': self.object[filter].append(file) if float(exptime) >= 80: # Select the program files only self.forsky[filter].append(file) # tweak the fiter name if filter in self.filters: tweak_filter(file) i = i + 1 self.Namps = Namp0 print(" Image inventory ready", file=sys.stderr)
def makeBlackBody(rawFrame, grating, log, over): """ - From Z header information from the cube, make a black body. - Make scale factor: mean of black body over fLambda. - Multiply blackbody spectrum by scale factor. Creates: - Unscaled blackbody, bbody.fits - A scaled 1D blackbody spectrum, scaledBlackBody.fits[0] """ # Find the start and end wavelengths of the blackbody from our cube header. target_header = astropy.io.fits.open('../products_uncorrected/ctfbrsn' + rawFrame + '.fits') wstart = target_header[1].header['CRVAL3'] wdelt = target_header[1].header['CD3_3'] wend = wstart + (2040 * wdelt) crpix3 = target_header[1].header['CRPIX3'] # Find the standard star temperature from 0_std_starRAWNAME.txt try: with open("0_std_star" + rawFrame + ".txt", "r") as f: lines = f.read() # ['k', 'K', '7.615', '9700', 'h', 'H', '7.636', '9700', 'j', 'J', '7.686', '9700', 'j', 'J', '7.686', '9700'] lines = lines.split() # Mag is entry after the grating, but may also be N/A. Check for that. for i in range(len(lines)): if grating in lines[i]: standardStarSpecTemperature = lines[i + 2] logging.info("Read a standard star teff of " + str(standardStarSpecTemperature)) except IOError: logging.info( "No std_starRAWNAME.txt file found; setting to spec temperature to 9700K for a rough flux scaling" ) standardStarSpecTemperature = 9700 if crpix3 != 1.: logging.info( "WARNING in Reduce: CRPIX of wavelength axis not equal to one. Exiting flux calibration." ) raise SystemExit # Make a blackbody for each of the 2040 NIFS spectral pixels. if os.path.exists("3_BBody" + rawFrame + ".fits"): if over: os.remove("3_BBody" + rawFrame + ".fits") iraf.chdir(os.getcwd()) iraf.mk1dspec(input="3_BBody" + rawFrame, output="", title='', ncols=2040, naps=1, header='', wstart=wstart, wend=wend, temperature=standardStarSpecTemperature) logging.info( "\nMade a blackbody in 3_BBody{}.fits".format(rawFrame)) else: logging.info( "\nOutput exists and -over not set - skipping production of unscaled black body" ) else: iraf.chdir(os.getcwd()) iraf.mk1dspec(input="3_BBody" + rawFrame, output="", title='', ncols=2040, naps=1, header='', wstart=wstart, wend=wend, temperature=standardStarSpecTemperature) logging.info("\nMade a blackbody in 3_BBody{}.fits".format(rawFrame))
def createMaster(self): """ Create master dome flat from dome flat file lists """ # remove old file list_lampon = [] list_lampoff = [] if type(self.__inpfiles_on) == type(list()): list_lampon = self.__inpfiles_on elif os.path.isdir(self.__inpfiles_on): frlist = os.listdir(self.__inpfiles_on) path = os.path.abspath(self.__inpfiles_on) list_lampon = [ os.path.join(path, f) for f in sorted(frlist) if re.match(r"luci\d.\d{8}.\d{4}.fits", f) ] else: raise Exception("Cannot read flat-off files") if type(self.__inpfiles_off) == type(list()): list_lampoff = self.__inpfiles_off elif os.path.isdir(self.__inpfiles_off): frlist = os.listdir(self.__inpfiles_off) path = os.path.abspath(self.__inpfiles_off) list_lampoff = [ os.path.join(path, f) for f in sorted(frlist) if re.match(r"luci\d.\d{8}.\d{4}.fits", f) ] else: raise Exception("Cannot read flat-off files") if not os.path.exists(self.__tempdir): os.makedirs(self.__tempdir) ### IRAF will overwrite existing files iraf.clobber = 'yes' base, infile = os.path.split(self.__outpname) if not os.path.exists(base): os.makedirs(base) iraf.chdir(base) ### combine on-frames flat_lampon = os.path.join(self.__tempdir, "flat_lampON.fits") with open(os.path.join(self.__tempdir, "files_on.list"), 'w') as fo: for f in list_lampon: fo.write(str(f) + "\n") iraf.mscred.flatcombine( input="@" + (os.path.join(self.__tempdir, "files_on.list")).replace('//', '/'), output=flat_lampon, combine='median', ccdtype='', process='no', reject='sigclip', subset='no', scale='mode') ### combine off-frames flat_lampoff = os.path.join(self.__tempdir, "flat_lampOFF.fits") with open(os.path.join(self.__tempdir, "files_off.list"), 'w') as fo: for f in list_lampoff: fo.write(str(f) + "\n") iraf.mscred.flatcombine(input="@" + (os.path.join( self.__tempdir, "files_off.list")).replace('//', '/'), output=flat_lampoff, combine='median', ccdtype='', process='no', reject='sigclip', subset='no', scale='mode') ### subtract files flat_diff = os.path.join(self.__tempdir, "flat_lampON_OFF.fits") iraf.imarith(operand1=flat_lampon, operand2=flat_lampoff, op='-', result=flat_diff) ### Normalize flat if self.__normal: f = fits.open(flat_diff) naxis1 = f[0].header['NAXIS1'] naxis2 = f[0].header['NAXIS2'] offset1 = int(naxis1 * 0.1) offset2 = int(naxis2 * 0.1) median = np.median(f[0].data[offset2:(naxis2 - offset2), offset1:(naxis1 - offset1)]) iraf.imarith(operand1=flat_diff, operand2=median, op='/', result=self.__outpname.replace("//", "/")) else: shutil.move(flat_diff, self.__outpname) iraf.chdir() ### Clean up tryrem(self.__tempdir, "flat_lampOFF.fits") tryrem(self.__tempdir, "flat_lampON.fits") tryrem(self.__tempdir, "flat_lampON_OFF.fits") tryrem(self.__tempdir, "files_on.list") tryrem(self.__tempdir, "files_off.list")
arcspecs.append(np.array([[(out[1].sum(1))]])) redfn = _proc + ('SOFI_%s_%s_arcs.fits' % (targ, mode)).replace(' ','_') #calfn.replace('.fits', '_spec.fits') arcspec = np.median(np.array(arcspecs), 0) archeader = thesearcheaders[jj] keys = 'BANDID1', 'BANDID2', 'BANDID3', 'BANDID4', 'APNUM1', 'WCSDIM' , 'CTYPE3' , 'CD3_3' , 'LTM1_1' , 'LTM2_2' , 'LTM3_3' , 'WAT0_001', 'WAT1_001', 'WAT2_001', 'WAT3_001', 'FILENAME' keyvals = 'spectrum: background fit, weights variance, clean yes' , 'background: background fit' ,'sigma - background fit, weights variance, clean yes', 'unknown', '1 1 540.99 550.99',3 ,'LINEAR ', 1., 1., 1., 1., 'system=equispec' , 'wtype=linear label=Pixel' , 'wtype=linear' , 'wtype=linear', os.path.split(thesearcfns[-1])[-1] for kk, kv in zip(keys, keyvals): archeader[kk] = kv if (writefiles or wavecal) and not didwavecal[mode]: pyfits.writeto(redfn, np.tile(arcspec, (4,1,1)).astype(np.float32), archeader, clobber=True, output_verify='ignore') if wavecal and not didwavecal[mode]: os.chdir(_proc) iraf.chdir(_proc) loc_redfn =os.path.split(redfn)[1] iraf.identify(loc_redfn, database=_wldat, ftype='emission', fwidth=3, order=2, niterate=3, cradius=3, coordlist=lamp_list, function='spline3') disp_soln = ns.getdisp(_wldat + os.sep + 'id' + loc_redfn.replace('.fits',''), 'spline3') waveout = pyfits.open(loc_redfn ) waveout[0].data[-1,0] = disp_soln waveout.writeto(loc_redfn, clobber=True) if writefiles: ns.wspectext(loc_redfn) didwavecal[mode] = True datasets.append([redfns[-nthissci:], redfn, mode]) #os.chdir(dir0) #iraf.chdir(dir0)
#for the FUV data idir = '/user/bostroem/science/12465_otfr20120425/mama/' ##idir = '/Users/bostroem/science/12465_otfr20120425/mama/' os.chdir(idir) flist = glob.glob('obrc04???_flt.fits')+glob.glob('obrc05???_flt.fits') dec_dict = make_declination_dict(flist) for targ_dec in dec_dict.keys(): combine_dithered_images(dec_dict, targ_dec, options.use_hdr_offset) pdb.set_trace() ''' #idir = '/Users/bostroem/science/12465_otfr20121109/ccd/' #idir = '/user/bostroem/science/12465_otfr20130503/ccd/' idir = '/Users/bostroem/science/2014_dc_aas/' os.chdir(idir) iraf.chdir(idir) ''' flist = glob.glob('obrc06*_flt.fits')#+glob.glob('ob???????_flt.fits') dec_dict = make_declination_dict(flist) for targ_dec in dec_dict.keys(): combine_dithered_images(dec_dict, targ_dec, options.use_hdr_offset) ''' flist = glob.glob('obrc01*_flc_flt.fits')#+glob.glob('ob???????_flc_flt.fits') dec_dict = make_declination_dict(flist) for targ_dec in dec_dict.keys(): combine_dithered_images(dec_dict, targ_dec, options.use_hdr_offset) ''' flist = glob.glob('obrc07*_flt.fits')#+glob.glob('ob???????_flt.fits') dec_dict = make_declination_dict(flist)
def cross_check(path, cdir, dbase): ''' Check arc/OH calibration against OH/arc dataset Parameters ---------- silent : boolean Turns off stdout messages. Default: False verbose : boolean Turns on additional stdout messages. Default: True skysub : boolean Display skysubtracted or un-skysubtracted images. Default: False Returns ------- Notes ----- Created by Chun Ly, 31 May 2018 - Generate transformed image with iraf.gnirs.nstransform - Call mylogger to warn if files exist - Change gnirs logfile - Move call to iraf.gemini.nsheaders to this function - iraf.nstransform does not like suffixes. Using underscores - Call OH_check - Check for database file before running nsfitcoords and nstransform Modified by Chun Ly, 3 June 2018 - Bug fix: Call OH_check with cross_check=True - Call arc_check2 for database_OH case Modified by Chun Ly, 8 June 2018 - Exit when db_file not available Modified by Chun Ly, 19 June 2018 - Call get_database_model - Pass function and order to nsfitcoords Modified by Chun Ly, 20 June 2018 - Set nsfitcoords xorder fitting ''' logfile = path+'QA_wave_cal.log' mylogger = glog.log0(logfile)._get_logger() timestamp = systime().replace(':','.') logfile = path+'gnirs_'+timestamp+'.log' iraf.gemini.gnirs.logfile = logfile iraf.gemini.nsheaders("gnirs") mylogger.info("Raw data is located in : %s" % path) mylogger.info("GNIRS logfile : "+logfile) iraf.chdir(path) if dbase == 'database/': infile = 'OH_stack.fits' outfile = 'fOH_stack_arc.fits' lamp = 'warc_stack.fits' if dbase == 'database_OH/': infile = 'arc_stack.fits' outfile = 'farc_stack_OH.fits' lamp = 'wOH_stack.fits' db_file = dbase+'id'+lamp.replace('.fits','_SCI_1_') if not exists(db_file): mylogger.warn("Wavelength calibration file not found : "+db_file) mylogger.warn("Exiting!!!") return else: if not exists(outfile): source0 = 'OH' if '_OH' in dbase else 'arc' func0, order0 = get_database_model(path, source0) iraf.gnirs.nsfitcoords(infile, outprefix='', outspectra=outfile, lamp=lamp, database=dbase, function=func0, lyorder=order0, lxorder=xorder) else: mylogger.warn('File exists! : '+outfile) t_outfile = 't'+outfile if not exists(t_outfile): iraf.gnirs.nstransform(outfile, outprefix='', outspectra=t_outfile, database=dbase) else: mylogger.warn('File exists! : '+t_outfile) iraf.chdir(cdir) if dbase == 'database/': OH_check(path, cross_check=True) # + on 03/06/2018 if dbase == 'database_OH/': arc_check2(path, cross_check=True)
def dir_path (path): os.chdir(path) iraf.chdir(path)
arcspec = np.median(np.array(arcspecs), 0) archeader = thesearcheaders[jj] keys = 'BANDID1', 'BANDID2', 'BANDID3', 'BANDID4', 'APNUM1', 'WCSDIM', 'CTYPE3', 'CD3_3', 'LTM1_1', 'LTM2_2', 'LTM3_3', 'WAT0_001', 'WAT1_001', 'WAT2_001', 'WAT3_001', 3 keyvals = 'spectrum: background fit, weights variance, clean yes', 'background: background fit', 'sigma - background fit, weights variance, clean yes', 'wavelength', '1 1 540.99 550.99', '1 1 538.02 548.02', 'LINEAR ', 1., 1., 1., 1., 'system=equispec', 'wtype=linear label=Pixel', 'wtype=linear', 'wtype=linear' for kk, kv in zip(keys, keyvals): archeader[kk] = kv if writefiles: pyfits.writeto(redfn, np.tile(arcspec, (4, 1, 1)).astype(np.float32), archeader, clobber=clobber) if wavecal: os.chdir(_proc) iraf.chdir(_proc) loc_redfn = os.path.split(redfn)[1] if waveCalRef is None: iraf.identify(loc_redfn, database=_wldat, ftype='emission', fwidth=3, order=2, niterate=3, cradius=3, coordlist=lamp_list, function='spline3') waveCalRef = '' + loc_redfn else: iraf.reidentify(waveCalRef, loc_redfn,
def mos(args, mask, files_science, assoc, align_suffix='_aligned'): """The reduction process for MOS data. It goes through file identification, calibration and extraction of spectra. """ Nmasks = 0 combine = [] print('Mask {0}'.format(mask), end=2*'\n') path = os.path.join(args.objectid, mask).replace(' ', '_') # for now bias = args.bias # debugging - I don't think this should ever happen but hey if not files_science: raise ValueError('Empty variable `files_science`') for science in files_science: flat = inventory.get_file( assoc, science, mask, obs='flat', wave=files_science[science]) # finding the flat is enough to know that the mask exists. if not flat: print('Not enough data for mask {0} (science file {1})'.format( mask, science)) continue # all observations add up to 1 Nmasks += 1 / len(files_science.keys()) arc = inventory.get_file( assoc, science, mask, obs='arc', wave=files_science[science]) iraf.chdir(path) # first gsreduce the flat to create the gradient image for gscut grad = tasks.create_gradimage(args, flat, bias) flat, comb = tasks.call_gsflat(args, flat) arc = tasks.call_gsreduce(args, arc, flat, bias, grad) science = tasks.call_gsreduce(args, science, flat, bias, grad) tasks.call_gdisplay(args, science, 1) Nslits = utils.get_nslits(science) science = tasks.call_lacos(args, science, Nslits) tasks.call_gdisplay(args, science, 1) tasks.call_gswave(args, arc) tasks.call_gstransform(args, arc, arc) if args.align: tasks.call_align(arc, align, Nslits) science = tasks.call_gstransform(args, science, arc) if args.align: tasks.call_align(science, align_suffix, Nslits) tasks.call_gdisplay(args, science + align_suffix, 1) science = tasks.call_gsskysub(args, science, align_suffix) tasks.call_gdisplay(args, science, 1) combine.append(science) else: tasks.call_gdisplay(args, science, 1) science = tasks.call_gsskysub(args, science, '') tasks.call_gdisplay(args, science, 1) combine.append(science) # once we've reduced all individual images if len(combine) == len(files_science.keys()): added = tasks.call_imcombine( args, mask, combine, path, Nslits) tasks.call_gdisplay(args, added, 1) spectra = tasks.call_gsextract(args, added) if args.align: aligned = tasks.call_align(added, align, Nslits) tasks.call_gdisplay(args, aligned, 1) utils.delete('tmp*') iraf.chdir('../..') # cut spectra tasks.cut_spectra(args, added, mask, spec='2d', path=path) tasks.cut_spectra(args, spectra, mask, spec='1d', path=path) check_gswave.main( args.objectid, mask, gmos.gswavelength.logfile, 'gswcheck.log') return Nmasks
def ratir_doall(name, ra, dec, refdate, cat="SDSS-R9", varorder=1): """Full pipeline for image subtractions with RATIR data.""" filts = ["r", "i", "Z", "Y", "J", "H"] pixscale_dict = { 'r': 0.317, 'i': 0.317, 'Z': 0.292, 'Y': 0.292, 'J': 0.292, 'H': 0.292 } exptime_dict = { 'r': 80.0, 'i': 80.0, 'Z': 67.11, 'Y': 67.11, 'J': 67.11, 'H': 67.11 } gain_dict = { 'r': 1.23, 'i': 1.23, 'Z': 2.20, 'Y': 2.20, 'J': 2.40, 'H': 2.40 } readn_dict = { 'r': 13.6, 'i': 13.6, 'Z': 14.70, 'Y': 14.70, 'J': 11.25, 'H': 11.25 } sat_dict = { 'r': 50000.0, 'i': 50000.0, 'Z': 24000.0, 'Y': 24000.0, 'J': 24000.0, 'H': 24000.0 } dirs = glob.glob("20??????") dirs.remove(refdate) # Create directory structure for filt in filts: os.mkdir(filt) # Rename "new" files for dir in dirs: if os.path.exists("%s/stack_C0_r.fits" % dir) and os.path.exists( "%s/stack_C0_r.rms.fits" % dir): shutil.copy("%s/stack_C0_r.fits" % dir, "r/%s_r.fits" % dir) shutil.copy("%s/stack_C0_r.rms.fits" % dir, "r/%s_r.rms.fits" % dir) if os.path.exists("%s/stack_C1_i.fits" % dir) and os.path.exists( "%s/stack_C1_i.rms.fits" % dir): shutil.copy("%s/stack_C1_i.fits" % dir, "i/%s_i.fits" % dir) shutil.copy("%s/stack_C1_i.rms.fits" % dir, "i/%s_i.rms.fits" % dir) if os.path.exists("%s/stackA_C2_ZY.fits" % dir) and os.path.exists( "%s/stackA_C2_ZY.rms.fits" % dir): shutil.copy("%s/stackA_C2_ZY.fits" % dir, "Z/%s_Z.fits" % dir) shutil.copy("%s/stackA_C2_ZY.rms.fits" % dir, "Z/%s_Z.rms.fits" % dir) if os.path.exists("%s/stackB_C2_ZY.fits" % dir) and os.path.exists( "%s/stackB_C2_ZY.rms.fits" % dir): shutil.copy("%s/stackB_C2_ZY.fits" % dir, "Y/%s_Y.fits" % dir) shutil.copy("%s/stackB_C2_ZY.rms.fits" % dir, "Y/%s_Y.rms.fits" % dir) if os.path.exists("%s/stackA_C3_JH.fits" % dir) and os.path.exists( "%s/stackA_C3_JH.rms.fits" % dir): shutil.copy("%s/stackA_C3_JH.fits" % dir, "J/%s_J.fits" % dir) shutil.copy("%s/stackA_C3_JH.rms.fits" % dir, "J/%s_J.rms.fits" % dir) if os.path.exists("%s/stackB_C3_JH.fits" % dir) and os.path.exists( "%s/stackB_C3_JH.rms.fits" % dir): shutil.copy("%s/stackB_C3_JH.fits" % dir, "H/%s_H.fits" % dir) shutil.copy("%s/stackB_C3_JH.rms.fits" % dir, "H/%s_H.rms.fits" % dir) # Rename reference files shutil.copy("%s/stack_C0_r.fits" % refdate, "r/ref_r.fits") shutil.copy("%s/stack_C0_r.rms.fits" % refdate, "r/ref_r.rms.fits") shutil.copy("%s/stack_C1_i.fits" % refdate, "i/ref_i.fits") shutil.copy("%s/stack_C1_i.rms.fits" % refdate, "i/ref_i.rms.fits") shutil.copy("%s/stackA_C2_ZY.fits" % refdate, "Z/ref_Z.fits") shutil.copy("%s/stackA_C2_ZY.rms.fits" % refdate, "Z/ref_Z.rms.fits") shutil.copy("%s/stackB_C2_ZY.fits" % refdate, "Y/ref_Y.fits") shutil.copy("%s/stackB_C2_ZY.rms.fits" % refdate, "Y/ref_Y.rms.fits") shutil.copy("%s/stackA_C3_JH.fits" % refdate, "J/ref_J.fits") shutil.copy("%s/stackA_C3_JH.rms.fits" % refdate, "J/ref_J.rms.fits") shutil.copy("%s/stackB_C3_JH.fits" % refdate, "H/ref_H.fits") shutil.copy("%s/stackB_C3_JH.rms.fits" % refdate, "H/ref_H.rms.fits") # Create ds9 region file for OT location coofile = open("Coords.reg", "w") coofile.write('fk5;circle(%10.5f,%10.5f,1.0") # text={%s}' % (ra, dec, name)) coofile.close() # Get SDSS reference stars os.system("getsdss.pl -r 7.0 -f sdss.reg -p %10.5f %10.5f sdss.txt" % (ra, dec)) # Get 2MASS stars os.system( "getastrom.pl -d 2mass -r 7.0 -f temp.reg %10.5f %10.5f 2mass.txt" % (ra, dec)) tmass = Starlist("temp.reg") for star in tmass: star.mags["YMAG"] = star.mags["JMAG"] + 0.5 * ( star.mags["JMAG"] - star.mags["HMAG"]) + 0.08 tmass.write("2mass.reg") os.remove("temp.reg") # If no SDSS coverage, copy 2mass.reg to sdss.reg (kludge until PS1) if os.stat("sdss.txt")[6] == 0: shutil.copy("ps1.reg", "sdss.reg") # Loop over filters for filt in filts: iraf.chdir(filt) # Update ref header update_head("ref_%s.fits" % filt, ["PIXSCALE", "SATURATE"], [pixscale_dict[filt], sat_dict[filt]]) # Generate mask for reference iraf.imexpr("a == 0 ? 1 : 0", "ref_%s.mask.fits" % filt, "ref_%s.fits" % filt) # Generate variance for reference iraf.imcopy("ref_%s.rms.fits" % filt, "ref_%s.var.fits" % filt) iraf.imreplace("ref_%s.var.fits" % filt, 1.0e31, lower=INDEF, upper=0.0) # Detect objects in reference iqobjs("ref_%s.fits" % filt, 3.0, sat_dict[filt], wtimage="ref_%s.var.fits" % filt, skyval="0.0") # Tweak WCS in reference image p60scamp("ref_%s.fits" % filt, distortdeg=1, match=yes, rms=True, mask=True, cat=cat) os.remove("ref_%s.cat" % filt) # Get reference stars for PSF matching sdss = Starlist("../sdss.reg") ref = Starlist("ref_%s.fits.stars" % filt) sdss.wcs2pix("ref_%s.fits" % filt) a, b = sdss.match(ref, maxnum=1000) a.write("psf_%s.reg" % filt) # Loop over "new images" ims = glob.glob("????????_%s.fits" % filt) for im in ims: # Update headers nstack = int(get_head(im, "EXPTIME") / exptime_dict[filt]) update_head(im, ["GAIN", "READN", "SATURATE"], [ nstack * gain_dict[filt], readn_dict[filt] / np.sqrt(nstack), sat_dict[filt] ]) # Create variance files iraf.imcopy("%s.rms.fits" % im[:-5], "%s.var.fits" % im[:-5]) iraf.imreplace("%s.var.fits" % im[:-5], 1.0e31, lower=INDEF, upper=0.0) # Create bad pixel mask iraf.imexpr("a == 0 ? 1 : 0", "%s.mask.fits" % im[:-5], im) # Update WCS for new images p60scamp(im, distortdeg=1, match=yes, rms=True, mask=True, cat=cat) os.remove("%s.cat" % im[:-5]) # Detect objects and measure seeing iqobjs(im, 5.0, sat_dict[filt], wtimage="%s.var.fits" % im[:-5], skyval="0.0") # Match sources for PSF determination newstars = Starlist("%s.stars" % im) newstars.pix2wcs(im) newstars.wcs2pix("ref_%s.fits" % filt) c, d = newstars.match(a, maxnum=1000) d.write("psf_%s.%s.reg" % (filt, im[:-5])) # If there was a problem with alignment, will be no PSF stars if not os.stat("psf_%s.%s.reg" % (filt, im[:-5]))[6] == 0: if varorder == 0: #if (filt=="J") or (filt=="H"): p60sdsssub(im, "ref_%s.fits" % filt, "../Coords.reg", stamps="psf_%s.%s.reg" % (filt, im[:-5]), nsx=2, nsy=2, ko=0, distortdeg=1) else: p60sdsssub(im, "ref_%s.fits" % filt, "../Coords.reg", stamps="psf_%s.%s.reg" % (filt, im[:-5]), nsx=5, nsy=5, ko=1, distortdeg=1) # PSF photometry for im in ims: if os.path.exists("%s.sub.fits" % im[:-5]): psfphot(im, "psf_%s.reg" % filt, "../Coords.reg", wtimage="ref_%s.var.fits" % filt, varorder=varorder) # Photometry if (filt == "J") or (filt == "H") or (filt == "Y"): ratircal("%s.%s.dat" % (name, filt), ims, "../2mass.reg", filt) else: ratircal("%s.%s.dat" % (name, filt), ims, "../sdss.reg", filt) iraf.chdir("../") return
def complete_reduction (lista,masterbias,masterflat,cosmic='no'): full_bias = masterbias full_flat = masterflat full_lista = lista path_lista = os.path.dirname(full_lista) lista = full_lista.replace(path_lista + '/','') if masterbias != '': masterbias = full_bias.replace(os.path.dirname(full_bias) + '/','') if os.path.isfile(path_lista + '/' + masterbias) and os.path.dirname(full_bias) != path_lista: subprocess.call('rm ' + path_lista + '/' + masterbias,shell=True) subprocess.call('cp ' + full_bias + ' ' + path_lista, shell=True) if not os.path.isfile(path_lista + '/' + masterbias): subprocess.call('cp ' + full_bias + ' ' + path_lista, shell=True) if masterflat != '': masterflat = full_flat.replace(os.path.dirname(full_flat) + '/','') if os.path.isfile(path_lista + '/'+ masterflat) and os.path.dirname(full_flat) != path_lista: subprocess.call('rm ' + path_lista + '/' + masterflat,shell=True) subprocess.call('cp ' + full_flat + ' ' + path_lista,shell=True) if not os.path.isfile(path_lista + '/' + masterflat): subprocess.call('cp ' + full_flat + ' ' + path_lista, shell=True) os.chdir(path_lista) iraf.chdir(path_lista) print 'Reduction ' + lista lis = open(lista,'r') for line in lis: line = line.split('\n') hdulist = fits.open(line[0]) X = hdulist[0].header['HIERARCH ESO DET CHIP1 X'] #Start of X = 1 Y = hdulist[0].header['HIERARCH ESO DET CHIP1 Y'] #Start of Y = 1 NX = hdulist[0].header['HIERARCH ESO DET OUT1 NX'] #Valid in X NY = hdulist[0].header['HIERARCH ESO DET OUT1 NY'] #Valid in Y bias_sec = '' trim_sec= '' Y_MIN = 910 # You need to set this! Y_MAX = 1131 # You need to set this! X_TRIM = 200 PRESCANX = hdulist[0].header['HIERARCH ESO DET OUT1 PRSCX'] #PRESCAN X PRESCANY = hdulist[0].header['HIERARCH ESO DET OUT1 PRSCY'] #PRESCAN Y OVERSCANX = hdulist[0].header['HIERARCH ESO DET OUT1 OVSCX'] #OVERSCAN X OVERSCANY = hdulist[0].header['HIERARCH ESO DET OUT1 OVSCY'] #OVERSCAN Y iraf.ccdproc.images = line[0] iraf.ccdproc.output = '' iraf.ccdproc.ccdtype = '' iraf.ccdproc.fixpix = 'no' iraf.ccdproc.flatcor = 'no' iraf.ccdproc.zerocor = 'no' iraf.ccdproc.darkcor = 'no' if str(PRESCANX) == '0' and str(PRESCANY) == '0' and str(OVERSCANX) == '0' and str(OVERSCANY) == '0': iraf.ccdproc.overscan = 'no' print "NO OVERSCAN" if NY > Y_MAX: iraf.ccdproc.trim = 'yes' print 'TRIMMIN' trim_sec = '[' + str(X_TRIM) + ':' + str(NX - OVERSCANX) + ',' + str(Y) + ':' + str(NY) + ']' iraf.ccdproc(mode='h') else: iraf.ccdproc.trim = 'no' print 'NO TRIMMIN' else: iraf.ccdproc.overscan = 'yes' iraf.ccdproc.trim = 'yes' if PRESCANX > 0 and OVERSCANX >= 0: if PRESCANY > 0 and OVERSCANY >= 0: bias_sec = '[' + str(X) + ':' + str(PRESCANX) + ',' + str(Y) + ':' + str(PRESCANY) + ']' elif PRESCANY >= 0 and OVERSCANY >0: bias_sec = '[' + str(X) + ':' + str(PRESCANX) + ',' + str(NY- OVERSCANY) + ':' + str(NY) + ']' else: bias_sec = '[' + str(X) + ':' + str(PRESCANX) + ',*]' elif PRESCANX >= 0 and OVERSCANX > 0: if PRESCANY > 0 and OVERSCANY >= 0: bias_sec = '[' + str(NX-OVERSCANX) + ':' + str(NX) + ',' + str(Y) + ':' + str(PRESCANY) + ']' elif PRESCANY >= 0 and OVERSCANY >0: bias_sec = '['+ str( NX- OVERSCANX) + ':' + str(NX) + ',' + str(NY- OVERSCANY) + ':' + str(NY) + ']' else: bias_sec = '[' + str(NX-OVERSCANX) + ':' + str(NX) + ',*]' else: if PRESCANY > 0 and OVERSCANY >= 0: bias_sec = '[*,' + str(Y) + ':' + str(PRESCANY) + ']' elif PRESCANY >= 0 and OVERSCANY >0: bias_sec = '[*,' + str(NY-OVERSCANY) + ':' + str(NY) +']' if NY > Y_MAX: trim_sec = '[' + str(X_TRIM) + ':' + str(NX - OVERSCANX) + ',' + str(Y_MIN) + ':' + str(Y_MAX) + ']' else: trim_sec = '[' + str(X_TRIM) + ':' + str(NX - OVERSCANX) + ',' + str(Y) + ':' + str(NY) + ']' print 'OVERSCAN & TRIMMIN \n %s %s' % (bias_sec, trim_sec) iraf.ccdproc.biassec = bias_sec iraf.ccdproc.trimsec = trim_sec iraf.ccdproc(mode='h') if masterbias != '' and masterflat != '': if cosmic == 'yes': c_out = single_cosmic(line[0]) with open('c'+lista,'a') as outfile: outfile.write(c_out + '\n') if not os.path.isfile('response.fits'): response(masterflat) apply_imarith(masterflat,'/','response.fits') apply_imarith(c_out,'/','response.fits') iraf.ccdproc.flatcor = 'yes' if masterflat == '': iraf.ccdproc.flatcor = 'no' if masterbias == '': iraf.ccdproc.zerocor = 'no' iraf.ccdproc.darkcor= 'no' iraf.ccdproc.zero = masterbias iraf.ccdproc.flat = masterflat #iraf.lpar(iraf.ccdproc) iraf.ccdproc(mode='h') hdulist.close() lis.close() if os.path.isfile(masterbias) and os.path.dirname(full_bias) != path_lista: subprocess.call(['rm',masterbias]) if os.path.isfile(masterflat) and os.path.dirname(full_flat) != path_lista: subprocess.call(['rm',masterflat])
def getShiftScale(rawFrame, telluricInter, log, over): """ Use iraf.telluric() to get the best shift and scale of a telluric correction spectrum. Writes: "6_shiftScale"+rawFrame+".txt" : """ if os.path.exists('5_oneDCorrected' + rawFrame + '.fits') and os.path.exists("6_shiftScale" + rawFrame + ".txt"): if over: os.remove('5_oneDCorrected' + rawFrame + '.fits') # TODO(nat): implement logging for this iraf.chdir(os.getcwd()) tell_info = iraf.telluric( input='4_cubeslice' + rawFrame + '.fits[0]', output='5_oneDCorrected' + rawFrame + '.fits', cal="3_chtel" + rawFrame + '.fits[0]', airmass=1.0, answer='yes', ignoreaps='yes', xcorr='yes', tweakrms='yes', inter=telluricInter, sample="*", threshold=0.1, lag=3, shift=0., dshift=0.1, scale=1.0, dscale=0.1, offset=1, smooth=1, cursor='', mode='al', Stdout=1) else: logging.info( "\nOutput exists and -over not set - skipping get shift scale of telluric correction and fit" ) return else: iraf.chdir(os.getcwd()) tell_info = iraf.telluric(input='4_cubeslice' + rawFrame + '.fits[0]', output='5_oneDCorrected' + rawFrame + '.fits', cal="3_chtel" + rawFrame + '.fits[0]', airmass=1.0, answer='yes', ignoreaps='yes', xcorr='yes', tweakrms='yes', inter=telluricInter, sample="*", threshold=0.1, lag=3, shift=0., dshift=0.1, scale=1.0, dscale=0.1, offset=1, smooth=1, cursor='', mode='al', Stdout=1) # Get shift and scale from the list of values iraf.telluric() returns. # Sample tell_info: # ['cubeslice.fits[0]: norm.fits[1]: cubeslice.fits[0]: dshift 5.', 'window:again:window:window:again:window:window:again:window:TELLURIC:', # ' Output: vtella - HE1353-1917', ' Input: cubeslice.fits[0] - HE1353-1917', ' # Calibration: norm.fits[1] - Hip70765', ' Tweak: shift = 59.12, scale = 1.323, # normalization = 0.9041', ' WARNING: 3 pixels outside of calibration limits'] tellshift = 0. scale = 1.0 for i in range(len(tell_info)): # Now string looks like ' Tweak: shift = 59.12, scale = 1.323, normalization = 0.9041' if "Tweak" in tell_info[i]: # Remove the first 9 characters, temp = tell_info[i][9:] # Split into a list; now it looks like '['shift', '=', '59.12,', 'scale', '=', '1.323,', 'normalization', '=', '0.9041']' temp = temp.split() # Index two is the shift value with a trailing comma, index 5 is the scale value with a trailing comma. # Remove trailing comma. tellshift = temp[2].replace(',', '') # Turn it into a float. tellshift = float(tellshift) # Convert to a clean float # Do the same for the scale. scale = temp[5].replace(',', '') scale = float(scale) with open("6_shiftScale" + rawFrame + ".txt", "w") as text_file: text_file.write("Shift: {} Scale: {} \n".format(tellshift, scale))
pk_line = 1400 ''' Line for finding peaks (gfreduce) Line/column for finding apertures (gfextract) ''' # ---------------------------------------- # # ----- Importing IRAF from the root directory ----- # current_dir = os.getcwd() os.chdir(ic.dir_iraf) from pyraf import iraf from pyraf.iraf import gemini, gmos os.chdir(current_dir) iraf.chdir(current_dir) iraf.unlearn('gfreduce') iraf.unlearn('gfscatsub') # ---------- Pre-processing of the science frames ---------- # # MDF, bias, and overscan iraf.imdelete('g@' + ic.lst_std) iraf.imdelete('rg@' + ic.lst_std) iraf.gfreduce('@' + ic.lst_std, rawpath=ic.rawdir, fl_extract='no', bias=ic.caldir + ic.procbias, fl_over='yes',
def create_master_twilight_flat(qd, dbFile, data_dir, overwrite=True): ''' despite the fact that it looks like this can be run from outside the raw directory, it can't be. ''' cur_dir = os.getcwd() #os.chdir(data_dir) iraf.chdir(data_dir) print(" --Creating Twilight Imaging Flat-Field MasterCal--") # Set the task parameters. gmos.giflat.unlearn() flat_flags = { 'fl_scale': 'yes', 'sctype': 'mean', 'fl_vardq': 'yes', 'rawpath': '', 'logfile': 'giflatLog.txt', 'verbose': 'no' } if qd['Instrument'] == 'GMOS-N': flat_flags['bpm'] = 'gmos$data/gmos-n_bpm_HAM_22_12amp_v1.fits' else: flat_flags['bpm'] = 'gmos$data/gmos-s_bpm_HAM_22_12amp_v1.fits' filters = ['Ha', 'HaC', 'SII', 'r', 'i'] original_dateobs = qd['DateObs'] for f in filters: # Select filter name using a substring of the official designation. qd['Filter2'] = f + '_G%' SQL = fileSelect.createQuery('twiFlat', qd) flat_files = fileSelect.fileListQuery(dbFile, SQL, qd) start_date = Time(qd['DateObs'].split(':')[0], out_subfmt='date') end_date = qd['DateObs'].split(':')[1] while (len(flat_files) < 7) & ( (Time(end_date) - start_date) < 365 * u.day): start_date = start_date - 1.0 * u.day qd['DateObs'] = '{}:{}'.format(start_date, end_date) SQL = fileSelect.createQuery('twiFlat', qd) flat_files = fileSelect.fileListQuery(dbFile, SQL, qd) mc_name = 'MCflat_{}.fits'.format(f) if os.path.exists(mc_name): if overwrite is True: remove = raw_input( 'Remove flat file {}? (y), n '.format(mc_name)) if remove == 'y': os.remove(mc_name) else: return None else: print('Master flat, {} already exists and overwrite={}'.format( mc_name, overwrite)) return None if len(flat_files) > 0: print( " Building twilight flat MasterCal for: {} with {} flat frames from date range {}" .format(f, len(flat_files), qd['DateObs'])) gmos.giflat(','.join(str(x) for x in flat_files), mc_name, bias='MCbias', **flat_flags) qd['DateObs'] = original_dateobs # Clean up if not os.path.exists(mc_name): sys.exit('ERROR creating Master Flat Field: {}'.format(mc_name)) if qd['Instrument'] == 'GMOS-N': image_str = 'gN' else: image_str = 'gS' image_str = '{}{}*.fits'.format(image_str, qd['DateObs'][0:4]) for f in filters: del_str = f + image_str iraf.imdel(del_str) flist = glob.glob('tmpfile*') for ifile in flist: os.remove(ifile) iraf.chdir(cur_dir)
print "couldn't imcopy " + _dmask # Combine the flat-field- and dark-frame-derived pixel masks: #ir.imcalc(_fmask+","+_dmask, _mask, "im1||im2") pyfits.writeto(_mask.replace(maskfn, postfn), ny.logical_or( pyfits.getdata(_fmask.replace(maskfn, postfn)), pyfits.getdata(_dmask)).astype(float), clobber=True) ir.imcopy(_mask.replace(maskfn, postfn), _mask) if verbose: print "Done making bad pixel mask!" if procData: os.chdir(_proc) ir.chdir(_proc) if processCal: # Add 'exptime' header to all cal, target, and lamp files: ns.write_exptime(rawcal, itime=itime) #ns.write_exptime(rawlamp) # Correct for bad pixels and normalize all the frames by the flat field ir.load('crutil') ns.preprocess('@' + rawcal, '@' + proccal, qfix=qfix, qpref='', flat=_sflatdcn, mask=_mask.replace(maskfn, postfn),
#for the FUV data idir = '/user/bostroem/science/12465_otfr20120425/mama/' ##idir = '/Users/bostroem/science/12465_otfr20120425/mama/' os.chdir(idir) flist = glob.glob('obrc04???_flt.fits')+glob.glob('obrc05???_flt.fits') dec_dict = make_declination_dict(flist) for targ_dec in dec_dict.keys(): combine_dithered_images(dec_dict, targ_dec, options.use_hdr_offset) pdb.set_trace() ''' #idir = '/Users/bostroem/science/12465_otfr20121109/ccd/' #idir = '/user/bostroem/science/12465_otfr20130503/ccd/' idir = '/Users/bostroem/science/2014_dc_aas/' os.chdir(idir) iraf.chdir(idir) ''' flist = glob.glob('obrc06*_flt.fits')#+glob.glob('ob???????_flt.fits') dec_dict = make_declination_dict(flist) for targ_dec in dec_dict.keys(): combine_dithered_images(dec_dict, targ_dec, options.use_hdr_offset) ''' flist = glob.glob( 'obrc01*_flc_flt.fits') #+glob.glob('ob???????_flc_flt.fits') dec_dict = make_declination_dict(flist) for targ_dec in dec_dict.keys(): combine_dithered_images(dec_dict, targ_dec, options.use_hdr_offset) ''' flist = glob.glob('obrc07*_flt.fits')#+glob.glob('ob???????_flt.fits') dec_dict = make_declination_dict(flist) for targ_dec in dec_dict.keys():