def get_obstype_lists(path, pattern=None, weeding=True): date = path[-9:-1] if pattern is None: file_list = glob.glob(path + "*.fits") else: file_list = glob.glob(path + '*' + pattern + '*.fits') # first weed out binned observations if weeding: unbinned = [] binned = [] for file in file_list: xdim = pyfits.getval(file, 'NAXIS2') if xdim == 4112: unbinned.append(file) else: binned.append(file) else: unbinned = file_list # prepare output lists if weeding: acq_list = binned[:] else: acq_list = [] bias_list = [] dark_list = [] flat_list = [] skyflat_list = [] domeflat_list = [] arc_list = [] thxe_list = [] laser_list = [] laser_and_thxe_list = [] stellar_list = [] unknown_list = [] for file in unbinned: obj_type = pyfits.getval(file, 'OBJECT') if obj_type.lower() == 'acquire': if not weeding: acq_list.append(file) elif obj_type.lower().startswith('bias'): bias_list.append(file) elif obj_type.lower().startswith('dark'): dark_list.append(file) elif obj_type.lower().startswith('flat'): flat_list.append(file) elif obj_type.lower().startswith('skyflat'): skyflat_list.append(file) elif obj_type.lower().startswith('domeflat'): domeflat_list.append(file) elif obj_type.lower().startswith('arc'): arc_list.append(file) elif obj_type.lower() in ["thxe", "thxe-only", "simth"]: thxe_list.append(file) elif obj_type.lower() in ["lc", "lc-only", "lfc", "lfc-only", "simlc"]: laser_list.append(file) elif obj_type.lower() in [ "thxe+lfc", "lfc+thxe", "lc+simthxe", "lc+thxe" ]: laser_and_thxe_list.append(file) elif obj_type.lower().startswith( ("wasp", "proxima", "kelt", "toi", "tic", "hd", "hr", "hip", "gj", "gl", "ast", "alpha", "beta", "gamma", "delta", "tau", "ksi", "ach", "zeta", "ek", '1', '2', '3', '4', '5', '6', '7', '8', '9', 'bd', 'bps', 'cd', 'he', 'g', 'cs')): stellar_list.append(file) else: unknown_list.append(file) # sort out which calibration lamps were actually on for the exposures tagged as either "SimLC" or "SimTh" laser_only_list = [] simth_only_list = [] laser_and_simth_list = [] calib_list = laser_list + thxe_list + laser_and_thxe_list calib_list.sort() if int(date) < 20190503: chipmask_path = '/Users/christoph/OneDrive - UNSW/chipmasks/archive/' try: chipmask = np.load(chipmask_path + 'chipmask_' + date + '.npy').item() except: chipmask = np.load(chipmask_path + 'chipmask_' + '20180921' + '.npy').item() # look at the actual 2D image (using chipmasks for LFC and simThXe) to determine which calibration lamps fired for file in calib_list: img = correct_for_bias_and_dark_from_filename( file, np.zeros((4096, 4112)), np.zeros((4096, 4112)), gain=[1., 1.095, 1.125, 1.], scalable=False, savefile=False, path=path) lc = laser_on(img, chipmask) thxe = thxe_on(img, chipmask) if (not lc) and (not thxe): unknown_list.append(file) elif (lc) and (thxe): laser_and_simth_list.append(file) else: if lc: laser_only_list.append(file) elif thxe: simth_only_list.append(file) else: # since May 2019 the header keywords are correct, so check for LFC / ThXe in header, as that is MUCH faster for file in calib_list: lc = 0 thxe = 0 h = pyfits.getheader(file) if 'LCNEXP' in h.keys( ): # this indicates the latest version of the FITS headers (from May 2019 onwards) if ('LCEXP' in h.keys()) or ( 'LCMNEXP' in h.keys() ): # this indicates the LFC actually was actually exposed (either automatically or manually) lc = 1 else: # if not, just go with the OBJECT field if file in laser_list + laser_and_thxe_list: lc = 1 if h['SIMCALTT'] > 0: thxe = 1 if lc + thxe == 1: if lc == 1: laser_only_list.append(file) else: simth_only_list.append(file) elif lc + thxe == 2: laser_and_simth_list.append(file) else: unknown_list.append(file) return acq_list, bias_list, dark_list, flat_list, skyflat_list, domeflat_list, arc_list, simth_only_list, laser_only_list, laser_and_simth_list, stellar_list, unknown_list
arc_sublists = {'lfc': [], 'thxe': [], 'both': [], 'neither': []} # nasty temp fix to make sure we are always looking at the 2D images until the header keywords are reliable checkdate = '1' + date[1:] if int(checkdate) < 20190503: # look at the actual 2D image (using chipmasks for LFC and simThXe) to determine which calibration lamps fired for file in arc_list: img = correct_for_bias_and_dark_from_filename(file, medbias, MDS, gain=gain, scalable=False, savefile=False, path=pathdict['raw']) lc = laser_on(img, chipmask) thxe = thxe_on(img, chipmask) if (not lc) and (not thxe): arc_sublists['neither'].append(file) elif (lc) and (thxe): arc_sublists['both'].append(file) else: if lc: arc_sublists['lfc'].append(file) elif thxe: arc_sublists['thxe'].append(file) else: # since May 2019 the header keywords are correct, so check for LFC / ThXe in header, as that is MUCH faster for file in arc_list: lc = 0 thxe = 0
def get_obstype_lists(pathdict, pattern=None, weeding=True, quick=False, raw_goodonly=True, savefiles=True): """ This routine performs the "INGEST" step, ie for all files in a given night it identifies the type of observation and sorts the files into lists. For simcalib exposures it also determines which lamps were actually firing, no matter what the header says, as that can often be wrong (LC / SimTh / LC+SimTh). INPUT: "pathdict" : dictionary containing all directories relevant to the reduction "pattern" : if provided, only files containing a certain string pattern will be included "weeding" : boolean - do you want to weed out binned observations? "quick" : boolean - if TRUE, simcalib status in determined from headers alone (not from 2-dim images) "raw_goodonly" : boolean - if TRUE, expect 8-digit date (YYYYMMDD) - if FALSE expect 6-digit date (YYMMDD) "savefiles" : boolean - do you want to save the lists into output files OUTPUT: lists containing the filenames (incl. directory) of the respective observations of a certain type MODHIST: 20200421 - CMB removed domeflat and skyflat lists (not used with Veloce) """ path = pathdict['raw'] chipmask_path = pathdict['cm'] if raw_goodonly: date = path[-9:-1] else: date = '20' + path[-13:-7] if pattern is None: file_list = glob.glob(path + date[-2:] + "*.fits") else: file_list = glob.glob(path + '*' + pattern + '*.fits') # first weed out binned observations if weeding: unbinned = [] binned = [] for file in file_list: xdim = pyfits.getval(file, 'NAXIS2') if xdim == 4112: unbinned.append(file) else: binned.append(file) else: unbinned = file_list # prepare output lists if weeding: acq_list = binned[:] else: acq_list = [] bias_list = [] dark_list = [] flat_list = [] # skyflat_list = [] # domeflat_list = [] arc_list = [] thxe_list = [] laser_list = [] laser_and_thxe_list = [] stellar_list = [] unknown_list = [] for file in unbinned: obj_type = pyfits.getval(file, 'OBJECT') if obj_type.lower() == 'acquire': if not weeding: acq_list.append(file) elif obj_type.lower().startswith('bias'): bias_list.append(file) elif obj_type.lower().startswith('dark'): dark_list.append(file) elif obj_type.lower().startswith('flat'): flat_list.append(file) # elif obj_type.lower().startswith('skyflat'): # skyflat_list.append(file) # elif obj_type.lower().startswith('domeflat'): # domeflat_list.append(file) elif obj_type.lower().startswith('arc'): arc_list.append(file) elif obj_type.lower() in ["thxe", "thxe-only", "simth"]: thxe_list.append(file) elif obj_type.lower() in ["lc", "lc-only", "lfc", "lfc-only", "simlc"]: laser_list.append(file) elif obj_type.lower() in [ "thxe+lfc", "lfc+thxe", "lc+simthxe", "lc+thxe" ]: laser_and_thxe_list.append(file) elif obj_type.lower().startswith( ("wasp", "proxima", "kelt", "toi", "tic", "hd", "hr", "hip", "gj", "gl", "ast", "alpha", "beta", "gamma", "delta", "tau", "ksi", "ach", "zeta", "ek", '1', '2', '3', '4', '5', '6', '7', '8', '9', 'mercury', 'bd', 'bps', 'cd', 'he', 'g', 'cs', 'bkt', 'meingast', 'spangap', 'sarah', 'rm', 'fp', 'vel')): stellar_list.append(file) else: unknown_list.append(file) # sort out which calibration lamps were actually on for the exposures tagged as either "SimLC" or "SimTh" laser_only_list = [] simth_only_list = [] laser_and_simth_list = [] calib_list = laser_list + thxe_list + laser_and_thxe_list calib_list.sort() if quick: checkdate = date[:] else: checkdate = '1' + date[1:] if int(checkdate) < 20190503: # check if chipmask for that night already exists (if not revert to the closest one in time (preferably earlier in time)) if os.path.isfile(chipmask_path + 'chipmask_' + date + '.npy'): chipmask = np.load(chipmask_path + 'chipmask_' + date + '.npy').item() else: cm_list = glob.glob(chipmask_path + 'chipmask*.npy') cm_datelist = [int(cm.split('.')[-2][-8:]) for cm in cm_list] cm_datelist.sort( ) # need to make sure it is sorted, so that find_nearest finds the earlier one in time if two dates are found that have the same delta_t to date cm_dates = np.array(cm_datelist) alt_date = find_nearest(cm_dates, int(date)) chipmask = np.load(chipmask_path + 'chipmask_' + str(alt_date) + '.npy').item() # look at the actual 2D image (using chipmasks for LFC and simThXe) to determine which calibration lamps fired for file in calib_list: img = correct_for_bias_and_dark_from_filename( file, np.zeros((4096, 4112)), np.zeros((4096, 4112)), gain=[1., 1.095, 1.125, 1.], scalable=False, savefile=False, path=pathdict['raw']) lc = laser_on(img, chipmask) thxe = thxe_on(img, chipmask) if (not lc) and (not thxe): unknown_list.append(file) elif (lc) and (thxe): laser_and_simth_list.append(file) else: if lc: laser_only_list.append(file) elif thxe: simth_only_list.append(file) else: # since May 2019 the header keywords are (mostly) correct, so could check for LFC / ThXe in header, as that is MUCH faster for file in calib_list: lc = 0 thxe = 0 h = pyfits.getheader(file) if 'LCNEXP' in h.keys( ): # this indicates the latest version of the FITS headers (from May 2019 onwards) if ('LCEXP' in h.keys()) or ( 'LCMNEXP' in h.keys() ): # this indicates the LFC actually was actually exposed (either automatically or manually) lc = 1 else: # if not, just go with the OBJECT field if file in laser_list + laser_and_thxe_list: lc = 1 if (h['SIMCALTT'] > 0) and (h['SIMCALN'] > 0) and (h['SIMCALSE'] > 0): thxe = 1 if lc + thxe == 1: if lc == 1: laser_only_list.append(file) else: simth_only_list.append(file) elif lc + thxe == 2: laser_and_simth_list.append(file) else: unknown_list.append(file) # sort all lists acq_list.sort() bias_list.sort() dark_list.sort() flat_list.sort() arc_list.sort() simth_only_list.sort() laser_only_list.sort() laser_and_simth_list.sort() stellar_list.sort() unknown_list.sort() if savefiles: shortfn_acq_list = [fn.split('/')[-1] for fn in acq_list] np.savetxt(path + date + '_acquire_list.txt', shortfn_acq_list, fmt='%s') shortfn_bias_list = [fn.split('/')[-1] for fn in bias_list] np.savetxt(path + date + '_bias_list.txt', shortfn_bias_list, fmt='%s') shortfn_dark_list = [fn.split('/')[-1] for fn in dark_list] np.savetxt(path + date + '_dark_list.txt', shortfn_dark_list, fmt='%s') shortfn_flat_list = [fn.split('/')[-1] for fn in flat_list] np.savetxt(path + date + '_flat_list.txt', shortfn_flat_list, fmt='%s') shortfn_arc_list = [fn.split('/')[-1] for fn in arc_list] np.savetxt(path + date + '_arc_list.txt', shortfn_arc_list, fmt='%s') shortfn_simth_only_list = [fn.split('/')[-1] for fn in simth_only_list] np.savetxt(path + date + '_simth_only_list.txt', shortfn_simth_only_list, fmt='%s') shortfn_laser_only_list = [fn.split('/')[-1] for fn in laser_only_list] np.savetxt(path + date + '_lfc_only_list.txt', shortfn_laser_only_list, fmt='%s') shortfn_laser_and_simth_list = [ fn.split('/')[-1] for fn in laser_and_simth_list ] np.savetxt(path + date + '_lfc_and_simth_list.txt', shortfn_laser_and_simth_list, fmt='%s') shortfn_stellar_list = [fn.split('/')[-1] for fn in stellar_list] np.savetxt(path + date + '_stellar_list.txt', shortfn_stellar_list, fmt='%s') shortfn_unknown_list = [fn.split('/')[-1] for fn in unknown_list] np.savetxt(path + date + '_unknown_list.txt', shortfn_unknown_list, fmt='%s') # return acq_list, bias_list, dark_list, flat_list, skyflat_list, domeflat_list, arc_list, simth_only_list, laser_only_list, laser_and_simth_list, stellar_list, unknown_list return acq_list, bias_list, dark_list, flat_list, arc_list, simth_only_list, laser_only_list, laser_and_simth_list, stellar_list, unknown_list
def process_science_images(imglist, P_id, chipmask, mask=None, stripe_indices=None, quick_indices=None, sampling_size=25, slit_height=32, qsh=23, gain=[1., 1., 1., 1.], MB=None, ronmask=None, MD=None, scalable=False, saveall=False, pathdict=None, ext_method='optimal', from_indices=True, slope=True, offset=True, fibs='all', date=None, timit=False): """ Process all science / calibration lamp images. This includes: (1) bias and dark subtraction (2) cosmic ray removal (3) background extraction and estimation (4) flat-fielding (ie removal of pixel-to-pixel sensitivity variations) ============================= (5) extraction of stripes (6) extraction of 1-dim spectra (7) get relative intensities of different fibres (8) wavelength solution (9) barycentric correction (for stellar observations only) """ print('WARNING: I commented out BARCYRORR') # cont = raw_input('Do you still want to continue?') cont = 'y' assert cont.lower() == 'y', 'You chose to quit!' assert pathdict is not None, 'ERROR: pathdict not provided!!!' path = pathdict['raw'] if timit: start_time = time.time() # sort image list, just in case imglist.sort() # get a list with the object names object_list = [ pyfits.getval(file, 'OBJECT').split('+')[0] for file in imglist ] if object_list[0][:3] == 'ARC': obstype = 'ARC' elif object_list[0].lower() in [ "lc", "lc-only", "lfc", "lfc-only", "simlc", "thxe", "thxe-only", "simth", "thxe+lfc", "lfc+thxe", "lc+simthxe", "lc+thxe" ]: obstype = 'simcalib' else: obstype = 'stellar' if obstype in ['stellar', 'ARC']: # and the indices where the object changes (to figure out which observations belong to one epoch) changes = np.where( np.array(object_list)[:-1] != np.array(object_list)[1:] )[0] + 1 # need the plus one to get the indices of the first occasion of a new object # list of indices for individual epochs - there's gotta be a smarter way to do this... all_epoch_list = [] if len(changes) > 0: all_epoch_list.append(np.arange(0, changes[0])) for j in range(len(changes) - 1): all_epoch_list.append(np.arange(changes[j], changes[j + 1])) all_epoch_list.append(np.arange(changes[-1], len(object_list))) else: all_epoch_list.append(np.arange(0, len(object_list))) ##################################### ### (1) bias and dark subtraction ### ##################################### # if INPUT arrays are not given, read them from default files if path is None: print('WARNING: output file directory not provided!!!') print('Using same directory as input file...') dum = imglist[0].split('/') path = imglist[0][0:-len(dum[-1])] if date is None: date = path.split('/')[-2] if MB is None: # no need to fix orientation, this is already a processed file [ADU] # MB = pyfits.getdata(path + 'master_bias.fits') MB = pyfits.getdata(path + 'median_bias.fits') if ronmask is None: # no need to fix orientation, this is already a processed file [e-] ronmask = pyfits.getdata(path + 'read_noise_mask.fits') if MD is None: if scalable: # no need to fix orientation, this is already a processed file [e-] MD = pyfits.getdata(path + 'master_dark_scalable.fits', 0) # err_MD = pyfits.getdata(path + 'master_dark_scalable.fits', 1) else: # no need to fix orientation, this is already a processed file [e-] print( 'WARNING: scalable KW not properly implemented (stellar_list can have different exposure times...)' ) texp = 600. MD = pyfits.getdata( path + 'master_dark_t' + str(int(np.round(texp, 0))) + '.fits', 0) # err_MD = pyfits.getdata(path + 'master_dark_t' + str(int(np.round(texp, 0))) + '.fits', 1) if not from_indices: ron_stripes = extract_stripes(ronmask, P_id, return_indices=False, slit_height=slit_height, savefiles=False, timit=True) # loop over all files for i, filename in enumerate(imglist): # (0) do some housekeeping with filenames, and check if there are multiple exposures for a given epoch of a star dum = filename.split('/') dum2 = dum[-1].split('.') obsname = dum2[0] obsnum = int(obsname[-5:]) object = pyfits.getval(filename, 'OBJECT').split('+')[0] object_indices = np.where(object == np.array(object_list))[0] texp = pyfits.getval(filename, 'ELAPSED') # check if this exposure belongs to the same epoch as the previous one if obstype in ['stellar', 'ARC']: # list of all the observations belonging to this epoch epoch_ix = [ sublist for sublist in all_epoch_list if i in sublist ] # different from object_indices, as epoch_ix contains only indices for this particular epoch if there are multiple epochs of a target in a given night epoch_list = list(np.array(imglist)[epoch_ix]) # make sublists according to the four possible calibration lamp configurations epoch_sublists = {'lfc': [], 'thxe': [], 'both': [], 'neither': []} if i > 0: if filename in epoch_list: new_epoch = False else: new_epoch = True # delete existing temp bg files so we don't accidentally load them for a wrong epoch if os.path.isfile(path + 'temp_bg_lfc.fits'): os.remove(path + 'temp_bg_lfc.fits') if os.path.isfile(path + 'temp_bg_thxe.fits'): os.remove(path + 'temp_bg_thxe.fits') if os.path.isfile(path + 'temp_bg_both.fits'): os.remove(path + 'temp_bg_both.fits') if os.path.isfile(path + 'temp_bg_neither.fits'): os.remove(path + 'temp_bg_neither.fits') else: new_epoch = True # delete existing temp bg files so we don't accidentally load them for a wrong epoch if os.path.isfile(path + 'temp_bg_lfc.fits'): os.remove(path + 'temp_bg_lfc.fits') if os.path.isfile(path + 'temp_bg_thxe.fits'): os.remove(path + 'temp_bg_thxe.fits') if os.path.isfile(path + 'temp_bg_both.fits'): os.remove(path + 'temp_bg_both.fits') if os.path.isfile(path + 'temp_bg_neither.fits'): os.remove(path + 'temp_bg_neither.fits') else: if i == 0: new_epoch = True else: new_epoch = False print('Extracting ' + obstype + ' spectrum ' + str(i + 1) + '/' + str(len(imglist)) + ': ' + obsname) if obstype in ['stellar', 'ARC']: # nasty temp fix to make sure we are always looking at the 2D images until the header keywords are reliable checkdate = '1' + date[1:] if int(checkdate) < 20190503: # look at the actual 2D image (using chipmasks for LFC and simThXe) to determine which calibration lamps fired for file in epoch_list: img = correct_for_bias_and_dark_from_filename( file, MB, MD, gain=gain, scalable=scalable, savefile=saveall, path=path) lc = laser_on(img, chipmask) thxe = thxe_on(img, chipmask) if (not lc) and (not thxe): epoch_sublists['neither'].append(file) elif (lc) and (thxe): epoch_sublists['both'].append(file) else: if lc: epoch_sublists['lfc'].append(file) elif thxe: epoch_sublists['thxe'].append(file) # now check the calibration lamp configuration for the main observation in question img = correct_for_bias_and_dark_from_filename( filename, MB, MD, gain=gain, scalable=scalable, savefile=saveall, path=path) lc = laser_on(img, chipmask) thxe = thxe_on(img, chipmask) if (not lc) and (not thxe): lamp_config = 'neither' elif (lc) and (thxe): lamp_config = 'both' else: if lc: lamp_config = 'lfc' elif thxe: lamp_config = 'thxe' else: # since May 2019 the header keywords are (mostly) correct, so could just check for LFC / ThXe in header, as that is MUCH faster for file in epoch_list: lc = 0 thxe = 0 h = pyfits.getheader(file) if 'LCNEXP' in h.keys( ): # this indicates the latest version of the FITS headers (from May 2019 onwards) if ('LCEXP' in h.keys()) or ( 'LCMNEXP' in h.keys() ): # this indicates the LFC actually was actually exposed (either automatically or manually) lc = 1 else: # if not, just go with the OBJECT field if ('LC' in pyfits.getval( filename, 'OBJECT').split('+')) or ( 'LFC' in pyfits.getval( filename, 'OBJECT').split('+')): lc = 1 if (h['SIMCALTT'] > 0) and (h['SIMCALN'] > 0) and (h['SIMCALSE'] > 0): thxe = 1 assert lc + thxe in [ 0, 1, 2 ], 'ERROR: could not establish status of LFC and simultaneous ThXe for ' + obsname + '.fits !!!' if lc + thxe == 0: epoch_sublists['neither'].append(file) elif lc + thxe == 1: if lc == 1: epoch_sublists['lfc'].append(file) else: epoch_sublists['thxe'].append(file) elif lc + thxe == 2: epoch_sublists['both'].append(file) # now check the calibration lamp configuration for the main observation in question lc = 0 thxe = 0 h = pyfits.getheader(filename) if 'LCNEXP' in h.keys( ): # this indicates the latest version of the FITS headers (from May 2019 onwards) if ('LCEXP' in h.keys()) or ( 'LCMNEXP' in h.keys() ): # this indicates the LFC actually was actually exposed (either automatically or manually) lc = 1 else: # if not latest header version, just go with the OBJECT field if ('LC' in pyfits.getval( filename, 'OBJECT').split('+')) or ('LFC' in pyfits.getval( filename, 'OBJECT').split('+')): lc = 1 if h['SIMCALTT'] > 0: thxe = 1 if lc + thxe == 0: lamp_config = 'neither' elif lc + thxe == 1: if lc == 1: lamp_config = 'lfc' else: lamp_config = 'thxe' elif lc + thxe == 2: lamp_config = 'both' else: # for sim. calibration images we don't need to check for the calibration lamp configuration for all exposures (done external to this function)! # just for the file in question and then create a dummy copy of the image list so that it is in the same format that is expected for stellar observations # nasty temp fix to make sure we are always looking at the 2D images until the header keywords are reliable checkdate = '1' + date[1:] if int(checkdate) < 20190503: # now check the calibration lamp configuration for the main observation in question img = correct_for_bias_and_dark_from_filename( filename, MB, MD, gain=gain, scalable=scalable, savefile=saveall, path=path) lc = laser_on(img, chipmask) thxe = thxe_on(img, chipmask) if (not lc) and (not thxe): lamp_config = 'neither' elif (lc) and (thxe): lamp_config = 'both' else: if lc: lamp_config = 'lfc' elif thxe: lamp_config = 'thxe' else: # now check the calibration lamp configuration for the main observation in question lc = 0 thxe = 0 h = pyfits.getheader(filename) if 'LCNEXP' in h.keys( ): # this indicates the latest version of the FITS headers (from May 2019 onwards) if ('LCEXP' in h.keys()) or ( 'LCMNEXP' in h.keys() ): # this indicates the LFC actually was actually exposed (either automatically or manually) lc = 1 else: # if not latest header version, just go with the OBJECT field if ('LC' in pyfits.getval( filename, 'OBJECT').split('+')) or ('LFC' in pyfits.getval( filename, 'OBJECT').split('+')): lc = 1 if h['SIMCALTT'] > 0: thxe = 1 if lc + thxe == 0: lamp_config = 'neither' elif lc + thxe == 1: if lc == 1: lamp_config = 'lfc' else: lamp_config = 'thxe' elif lc + thxe == 2: lamp_config = 'both' epoch_sublists = {} epoch_sublists[lamp_config] = imglist[:] # (1) call routine that does all the overscan-, bias- & dark-correction stuff and proper error treatment img = correct_for_bias_and_dark_from_filename(filename, MB, MD, gain=gain, scalable=scalable, savefile=saveall, path=path) # [e-] # err = np.sqrt(img + ronmask*ronmask) # [e-] # TEMPFIX: (how should I be doing this properly???) err_img = np.sqrt(np.clip(img, 0, None) + ronmask * ronmask) # [e-] ## (2) remove cosmic rays from background, then fit and remove background ## check if there are multiple exposures for this epoch (if yes, we can do the much simpler "median_remove_cosmics") if len(epoch_sublists[lamp_config]) == 1: # do it the hard way using LACosmic # identify and extract background bg_raw = extract_background(img, chipmask['bg'], timit=timit) # remove cosmics, but only from background cosmic_cleaned_img = remove_cosmics(bg_raw.todense(), ronmask, obsname, path, Flim=3.0, siglim=5.0, maxiter=1, savemask=False, savefile=False, save_err=False, verbose=True, timit=True) # [e-] # identify and extract background from cosmic-cleaned image bg = extract_background(cosmic_cleaned_img, chipmask['bg'], timit=timit) # bg = extract_background_pid(cosmic_cleaned_img, P_id, slit_height=30, exclude_top_and_bottom=True, timit=timit) # fit background bg_coeffs, bg_img = fit_background(bg, clip=10, return_full=True, timit=timit) elif len(epoch_sublists[lamp_config]) == 2: if new_epoch or not os.path.isfile(path + 'temp_bg_' + lamp_config + '.fits'): # list of individual exposure times for this epoch subepoch_texp_list = [ pyfits.getval(file, 'ELAPSED') for file in epoch_sublists[lamp_config] ] tscale = np.array(subepoch_texp_list) / texp # get background from the element-wise minimum-image of the two images img1 = correct_for_bias_and_dark_from_filename( epoch_sublists[lamp_config][0], MB, MD, gain=gain, scalable=scalable, savefile=False) img2 = correct_for_bias_and_dark_from_filename( epoch_sublists[lamp_config][1], MB, MD, gain=gain, scalable=scalable, savefile=False) min_img = np.minimum(img1 / tscale[0], img2 / tscale[1]) # identify and extract background from the minimum-image bg = extract_background(min_img, chipmask['bg'], timit=timit) # bg = extract_background_pid(min_img, P_id, slit_height=30, exclude_top_and_bottom=True, timit=timit) del min_img # fit background bg_coeffs, bg_img = fit_background(bg, clip=10, return_full=True, timit=timit) # save background image to temporary file for re-use later (when reducing the next file of this sublist) pyfits.writeto(path + 'temp_bg_' + lamp_config + '.fits', np.float32(bg_img), overwrite=True) else: # no need to re-compute background, just load it from file print( 'Loading background image for this epoch and lamp configuration...' ) bg_img = pyfits.getdata(path + 'temp_bg_' + lamp_config + '.fits') else: if new_epoch or not os.path.isfile(path + 'temp_bg_' + lamp_config + '.fits'): # make sure this sublist is not too long (otherwise we might run out of memory in this step) if len(epoch_sublists[lamp_config]) > 10: mainix = epoch_sublists[lamp_config].index(filename) if mainix < 5: epoch_sublists[lamp_config] = epoch_sublists[ lamp_config][:11] elif mainix > len(epoch_sublists[lamp_config]) - 6: epoch_sublists[lamp_config] = epoch_sublists[ lamp_config][-11:] else: epoch_sublists[lamp_config] = epoch_sublists[ lamp_config][mainix - 5:mainix + 6] # list of individual exposure times for this epoch subepoch_texp_list = [ pyfits.getval(file, 'ELAPSED') for file in epoch_sublists[lamp_config] ] tscale = np.array(subepoch_texp_list) / texp # make list of actual images img_list = [] for file in epoch_sublists[lamp_config]: img_list.append( correct_for_bias_and_dark_from_filename( file, MB, MD, gain=gain, scalable=scalable, savefile=False)) # # index indicating which one of the files in the epoch list is the "main" one # main_index = np.where(np.array(epoch_ix) == i)[0][0] # take median after scaling to same exposure time as main exposure med_img = np.median(np.array(img_list) / tscale.reshape(len(img_list), 1, 1), axis=0) del img_list # identify and extract background from the median image bg = extract_background(med_img, chipmask['bg'], timit=timit) # bg = extract_background_pid(med_img, P_id, slit_height=30, exclude_top_and_bottom=True, timit=timit) del med_img # fit background bg_coeffs, bg_img = fit_background(bg, clip=10, return_full=True, timit=timit) # save background image to temporary file for re-use later (when reducing the next file of this sublist) pyfits.writeto(path + 'temp_bg_' + lamp_config + '.fits', np.float32(bg_img), overwrite=True) else: # no need to re-compute background, just load it from file print( 'Loading background image for this epoch and lamp configuration...' ) bg_img = pyfits.getdata(path + 'temp_bg_' + lamp_config + '.fits') # now actually subtract the background model bg_corrected_img = img - bg_img # # save background model to file (or APPEND TO RAW FILE???) # bg_fn= path + obsname + '_BG_model.fits' # pyfits.writeto(bg_fn, bg_img) # cosmic_cleaned_img = median_remove_cosmics(img_list, main_index=main_index, scales=scaled_texp, ronmask=ronmask, debug_level=1, timit=True) # (3) fit and remove background (ERRORS REMAIN UNCHANGED) # bg_corrected_img = remove_background(cosmic_cleaned_img, P_id, obsname, path, degpol=5, slit_height=slit_height, save_bg=True, savefile=True, save_err=False, # exclude_top_and_bottom=True, verbose=True, timit=True) # [e-] # bg_corrected_img = remove_background(img, P_id, obsname, path, degpol=5, slit_height=slit_height, save_bg=False, savefile=True, save_err=False, # exclude_top_and_bottom=True, verbose=True, timit=True) # [e-] # adjust errors? # (4) remove pixel-to-pixel sensitivity variations (2-dim) # XXXXXXXXXXXXXXXXXXXXXXXXXXX # TEMPFIX final_img = bg_corrected_img.copy() # [e-] # final_img = img.copy() # [e-] # adjust errors? # (5) extract stripes if not from_indices: stripes, stripe_indices = extract_stripes(final_img, P_id, return_indices=True, slit_height=slit_height, savefiles=saveall, obsname=obsname, path=path, timit=True) err_stripes = extract_stripes(err_img, P_id, return_indices=False, slit_height=slit_height, savefiles=saveall, obsname=obsname + '_err', path=path, timit=True) if stripe_indices is None: # this is just to get the stripe indices in case we forgot to provide them (DONE ONLY ONCE, if at all...) stripes, stripe_indices = extract_stripes(final_img, P_id, return_indices=True, slit_height=slit_height, savefiles=False, obsname=obsname, path=path, timit=True) # (6) perform extraction of 1-dim spectrum if from_indices: pix, flux, err = extract_spectrum_from_indices( final_img, err_img, quick_indices, method='quick', slit_height=qsh, ronmask=ronmask, savefile=True, filetype='fits', obsname=obsname, date=date, pathdict=pathdict, lamp_config=lamp_config, timit=True) pix, flux, err = extract_spectrum_from_indices( final_img, err_img, stripe_indices, method=ext_method, slope=slope, offset=offset, fibs=fibs, slit_height=slit_height, ronmask=ronmask, savefile=True, filetype='fits', obsname=obsname, date=date, pathdict=pathdict, lamp_config=lamp_config, timit=True) else: pix, flux, err = extract_spectrum(stripes, err_stripes=err_stripes, ron_stripes=ron_stripes, method='quick', slit_height=qsh, ronmask=ronmask, savefile=True, filetype='fits', obsname=obsname, date=date, pathdict=pathdict, lamp_config=lamp_config, timit=True) pix, flux, err = extract_spectrum(stripes, err_stripes=err_stripes, ron_stripes=ron_stripes, method=ext_method, slope=slope, offset=offset, fibs=fibs, slit_height=slit_height, ronmask=ronmask, savefile=True, filetype='fits', obsname=obsname, date=date, pathdict=pathdict, lamp_config=lamp_config, timit=True) # # (7) get relative intensities of different fibres # if from_indices: # relints = get_relints_from_indices(P_id, final_img, err_img, stripe_indices, mask=mask, sampling_size=sampling_size, slit_height=slit_height, return_full=False, timit=True) # else: # relints = get_relints(P_id, stripes, err_stripes, mask=mask, sampling_size=sampling_size, slit_height=slit_height, return_full=False, timit=True) # # # # (8) get wavelength solution # #XXXXX # # (9) get barycentric correction # if obstype == 'stellar': # bc = get_barycentric_correction(filename) # bc = np.round(bc,2) # if np.isnan(bc): # bc = '' # # write the barycentric correction into the FITS header of both the quick-extracted and the optimal-extracted reduced spectrum files # outfn_list = glob.glob(path + '*' + obsname + '*extracted*') # for outfn in outfn_list: # pyfits.setval(outfn, 'BARYCORR', value=bc, comment='barycentric velocity correction [m/s]') # #now append relints, wl-solution, and barycorr to extracted FITS file header # outfn = path + obsname + '_extracted.fits' # if os.path.isfile(outfn): # #relative fibre intensities # dum = append_relints_to_FITS(relints, outfn, nfib=19) # #wavelength solution # #pyfits.setval(fn, 'RELINT' + str(i + 1).zfill(2), value=relints[i], comment='fibre #' + str(fibnums[i]) + ' - ' + fibinfo[i] + ' fibre') if timit: print('Total time elapsed: ' + str(np.round(time.time() - start_time, 1)) + ' seconds') return