def test_history_header_access(): """Test building a History object from a fits header""" # Get a blank header header = initialize_header(primary=True) history1 = History() expected_history = ['2021-02-23T21:17 PypeIt Reducing target CFHQS1', 'Combining frames:', '"DE.20100913.22358.fits.gz"', 'Callibration frames:', 'arc,tilt "DE.20100913.56927.fits.gz"', 'pixelflat,illumflat,trace "DE.20100913.57161.fits.gz"', 'pixelflat,illumflat,trace "DE.20100913.57006.fits.gz"'] # Write and then read history read from header for h in expected_history: history1.append(h, add_date=False) history1.write_to_header(header) history2 = History(header) assert verify_history(history2, expected_history)
def to_file(self, ofile, primary_hdr=None, **kwargs): """ Over-load :func:`pypeit.datamodel.DataContainer.to_file` to deal with the header Args: ofile (:obj:`str`): Filename primary_hdr (`astropy.io.fits.Header`_, optional): **kwargs: Passed to super.to_file() """ if primary_hdr is None: primary_hdr = io.initialize_header(primary=True) # Build the header if self.head0 is not None and self.PYP_SPEC is not None: spectrograph = load_spectrograph(self.PYP_SPEC) subheader = spectrograph.subheader_for_spec(self.head0, self.head0) else: subheader = {} # Add em in for key in subheader: primary_hdr[key] = subheader[key] # Do it super(OneSpec, self).to_file(ofile, primary_hdr=primary_hdr, **kwargs)
def save(self): """ Saves sensitivity to self.sensfile """ # Write to outfile msgs.info('Writing sensitivity function results to file: {:}'.format( self.sensfile)) # Standard init hdr = initialize_header() hdr['PYP_SPEC'] = (self.spectrograph.spectrograph, 'PypeIt: Spectrograph name') hdr['PYPELINE'] = self.spectrograph.pypeline # - List the completed steps hdr['STEPS'] = (','.join(self.steps), 'Completed sensfunc steps') # - Provide the file names hdr['SPC1DFIL'] = self.spec1dfile # Write the fits file data = [self.wave_sens, self.sensfunc] extnames = ['WAVE', 'SENSFUNC'] # Write the fits file hdulist = fits.HDUList( [fits.PrimaryHDU(header=hdr)] + [fits.ImageHDU(data=d, name=n) for d, n in zip(data, extnames)]) hdu_meta = fits.table_to_hdu(self.meta_table) hdu_meta.name = 'METADATA' hdu_out = fits.table_to_hdu(self.out_table) hdu_out.name = 'OUT_TABLE' hdulist.append(hdu_meta) hdulist.append(hdu_out) hdulist.writeto(self.sensfile, overwrite=True, checksum=True)
def to_hdu(self, primary_hdr=None, add_primary=False): """ Construct one or more HDU extensions with the data. The organization of the data into separate HDUs is performed by :func:`_bundle`, which returns a list of objects. The type of each element in the list determines how it is handled. If the object is a dictionary with a single key/item pair, the key is used as the extension header. Otherwise, the objects are written to unnamed extensions. The object or dictionary item is passed to :func:`pypeit.io.write_to_hdu` to construct the HDU. Args: primary_hdr (`astropy.io.fits.Header`): Header to add in the primary HDU. If None, set by :func:`pypeit.io.initialize_header()`. add_primary (:obj:`bool`, optional): If False, the returned object is a simple :obj:`list`, with a list of HDU objects (either `astropy.io.fits.ImageHDU`_ or `astropy.io.fits.BinTableHDU`_). If true, the method constructs an `astropy.io.fits.HDUList` with a primary HDU, such that this call:: hdu = fits.HDUList([fits.PrimaryHDU()] + self.to_hdu()) and this call:: hdu = self.to_hdu(add_primary=True) are identical. To give a specific primary HDU header, use ``primary_hdr``; otherwise, :func:`pypeit.io.initialize_header()` will be used to initialize the header when ``add_primary`` is True. Returns: :obj:`list`, `astropy.io.fits.HDUList`_: A list of HDUs, where the type depends on the value of ``add_primary``. """ # Bundle the data data = self._bundle() # Initialize the base header hdr = io.initialize_header() if primary_hdr is None else primary_hdr hdr['DATAMOD'] = (self.__class__.__name__, 'Datamodel class') hdr['DATAVER'] = (self.version, 'Datamodel version') # Construct the list of HDUs hdu = [] for d in data: if isinstance(d, dict) and len(d) == 1: ext = list(d.keys())[0] hdu += [io.write_to_hdu(d[ext], name=ext, hdr=hdr)] else: hdu += [io.write_to_hdu(d, hdr=hdr)] return fits.HDUList([fits.PrimaryHDU(header=hdr)] + hdu) if add_primary else hdu
def write(self, outfile, hdr=None, iext=None): """ Write the image(s) to a multi-extension FITS file Note: This method cannot be named "save" as it would conflict with the imported module Extensions will be: PRIMARY IMAGE IVAR (optional) MASK (optional) Args: outfile: iext (str, optional): Name for the first extension Defaults to IMAGE hdr (`astropy.io.fits.Header`, optional): The header to write """ if hdr is None: hdr = initialize_header() # Chk if not self.validate(): msgs.warn("Image is not ready to save.") return # Save whatever is available data = [self.image] if iext is None: ext = ['IMAGE'] else: ext = [iext] # Work on the rest for item in ['ivar', 'mask']: if getattr(self, item) is not None: data.append(getattr(self, item)) ext.append(item.upper()) # TODO -- Default to float32 for float images? # Write the fits file save.write_fits(hdr, data, outfile, extnames=ext)
def test_write_and_append_history(): """Test appending to a History object and writing the history to a fits header""" # Get a blank header header = initialize_header(primary=True) history = History(header) # Append a test history with and without a date test_history='Test history' history.append(test_history) history.append(test_history, add_date=False) # Convert to a fits header and verify the 'HISTORY' keyword history.write_to_header(header) assert header['HISTORY'][0][17:] == test_history assert header['HISTORY'][1] == test_history # Convert date into an astropy time verifying it doesn't throw an exception t = Time(header['HISTORY'][0][0:16], format='isot')
def main(args): """ Executes 2d coadding """ msgs.warn('PATH =' + os.getcwd()) # Load the file if args.file is not None: spectrograph, config_lines, spec2d_files = read_coadd2d_file(args.file) # Parameters # TODO: Shouldn't this reinstantiate the same parameters used in # the PypeIt run that extracted the objects? Why are we not # just passing the pypeit file? # JFH: The reason is that the coadd2dfile may want different reduction parameters spectrograph_def_par = spectrograph.default_pypeit_par() parset = par.PypeItPar.from_cfg_lines(cfg_lines=spectrograph_def_par.to_config(), merge_with=config_lines) elif args.obj is not None: # TODO: We should probably be reading the pypeit file and using those parameters here rather than using the # default parset. # TODO: This needs to define the science path spec2d_files = glob.glob('./Science/spec2d_*' + args.obj + '*') head0 = fits.getheader(spec2d_files[0]) spectrograph_name = head0['SPECTROG'] spectrograph = load_spectrograph(spectrograph_name) parset = spectrograph.default_pypeit_par() else: msgs.error('You must either input a coadd2d file with --file or an object name with --obj') # Update with configuration specific parameters (which requires science file) and initialize spectrograph spectrograph_cfg_lines = spectrograph.config_specific_par(spec2d_files[0]).to_config() parset = par.PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines, merge_with=parset.to_config()) # If detector was passed as an argument override whatever was in the coadd2d_file if args.det is not None: msgs.info("Restricting reductions to detector={}".format(args.det)) parset['rdx']['detnum'] = int(args.det) # Get headers (if possible) and base names spec1d_files = [files.replace('spec2d', 'spec1d') for files in spec2d_files] head1d = None for spec1d_file in spec1d_files: if os.path.isfile(spec1d_file): head1d = fits.getheader(spec1d_file) break if head1d is None: msgs.warn("No 1D spectra so am generating a dummy header for output") head1d = io.initialize_header() head2d = fits.getheader(spec2d_files[0]) if args.basename is None: filename = os.path.basename(spec2d_files[0]) basename = filename.split('_')[2] else: basename = args.basename # Write the par to disk par_outfile = basename+'_coadd2d.par' print("Writing the parameters to {}".format(par_outfile)) parset.to_config(par_outfile) # Now run the coadds skysub_mode = head2d['SKYSUB'] ir_redux = True if 'DIFF' in skysub_mode else False # Print status message msgs_string = 'Reducing target {:s}'.format(basename) + msgs.newline() msgs_string += 'Performing coadd of frames reduce with {:s} imaging'.format(skysub_mode) msgs_string += msgs.newline() + 'Combining frames in 2d coadd:' + msgs.newline() for file in spec2d_files: msgs_string += '{0:s}'.format(os.path.basename(file)) + msgs.newline() msgs.info(msgs_string) # TODO: This needs to be added to the parameter list for rdx redux_path = os.getcwd() master_dirname = os.path.basename(head2d['PYPMFDIR']) + '_coadd' master_dir = os.path.join(redux_path, master_dirname) # Make the new Master dir if not os.path.isdir(master_dir): msgs.info('Creating directory for Master output: {0}'.format(master_dir)) os.makedirs(master_dir) # Instantiate the sci_dict sci_dict = OrderedDict() # This needs to be ordered sci_dict['meta'] = {} sci_dict['meta']['vel_corr'] = 0. sci_dict['meta']['ir_redux'] = ir_redux # Find the detectors to reduce detectors = PypeIt.select_detectors(detnum=parset['rdx']['detnum'], ndet=spectrograph.ndet) if len(detectors) != spectrograph.ndet: msgs.warn('Not reducing detectors: {0}'.format(' '.join([str(d) for d in set(np.arange(spectrograph.ndet) + 1) - set(detectors)]))) # Loop on detectors for det in detectors: msgs.info("Working on detector {0}".format(det)) sci_dict[det] = {} # Instantiate Coadd2d coadd = coadd2d.CoAdd2D.get_instance(spec2d_files, spectrograph, parset, det=det, offsets=parset['coadd2d']['offsets'], weights=parset['coadd2d']['weights'], ir_redux=ir_redux, debug_offsets=args.debug_offsets, debug=args.debug, samp_fact=args.samp_fact) # Coadd the slits coadd_dict_list = coadd.coadd(only_slits=None) # TODO implement only_slits later # Create the psuedo images psuedo_dict = coadd.create_psuedo_image(coadd_dict_list) # Reduce msgs.info('Running the extraction') sci_dict[det]['sciimg'], sci_dict[det]['sciivar'], sci_dict[det]['skymodel'], sci_dict[det]['objmodel'], \ sci_dict[det]['ivarmodel'], sci_dict[det]['outmask'], sci_dict[det]['specobjs'] = coadd.reduce( psuedo_dict, show = args.show, show_peaks = args.peaks) # Save psuedo image master files coadd.save_masters(master_dir) # Make the new Science dir # TODO: This needs to be defined by the user scipath = os.path.join(redux_path, 'Science_coadd') if not os.path.isdir(scipath): msgs.info('Creating directory for Science output: {0}'.format(scipath)) os.makedirs(scipath) # Save the results save.save_all(sci_dict, coadd.stack_dict['master_key_dict'], master_dir, spectrograph, head1d, head2d, scipath, basename)#, binning=coadd.binning)
def write_to_fits(self, subheader, outfile, overwrite=True, update_det=None, slitspatnum=None, debug=False): """ Write the set of SpecObj objects to one multi-extension FITS file Args: outfile (str): subheader (:obj:`dict`): overwrite (bool, optional): slitspatnum (:obj:`str` or :obj:`list`, optional): Restricted set of slits for reduction update_det (int or list, optional): If provided, do not clobber the existing file but only update the indicated detectors. Useful for re-running on a subset of detectors """ if os.path.isfile(outfile) and (not overwrite): msgs.warn("Outfile exists. Set overwrite=True to clobber it") return # If the file exists and update_det (and slit_spat_num) is provided, use the existing header # and load up all the other hdus so that we only over-write the ones # we are updating if os.path.isfile(outfile) and (update_det is not None or slitspatnum is not None): _specobjs = SpecObjs.from_fitsfile(outfile) mask = np.ones(_specobjs.nobj, dtype=bool) # Update_det if update_det is not None: # Pop out those with this detector (and slit if slit_spat_num is provided) for det in np.atleast_1d(update_det): mask[_specobjs.DET == det] = False elif slitspatnum is not None: # slitspatnum dets, spat_ids = slittrace.parse_slitspatnum(slitspatnum) for det, spat_id in zip(dets, spat_ids): mask[(_specobjs.DET == det) & (_specobjs.SLITID == spat_id)] = False _specobjs = _specobjs[mask] # Add in the new for sobj in self.specobjs: _specobjs.add_sobj(sobj) else: _specobjs = self.specobjs # Build up the Header header = initialize_header(primary=True) for key in subheader.keys(): header[key.upper()] = subheader[key] # Init prihdu = fits.PrimaryHDU() hdus = [prihdu] prihdu.header = header # Add class info prihdu.header['DMODCLS'] = (self.__class__.__name__, 'Datamodel class') prihdu.header['DMODVER'] = (self.version, 'Datamodel version') detector_hdus = {} nspec, ext = 0, 0 # Loop on the SpecObj objects for sobj in _specobjs: if sobj is None: continue # HDUs if debug: import pdb pdb.set_trace() shdul = sobj.to_hdu() if len(shdul) == 2: # Detector? detector_hdus[sobj['DET']] = shdul[1] shdu = [shdul[0]] elif len(shdul) == 1: # Detector? shdu = shdul else: msgs.error("Should not get here...") # Check -- If sobj had only 1 array, the BinTableHDU test will fail assert len(shdu) == 1, 'Bad data model!!' assert isinstance(shdu[0], fits.hdu.table.BinTableHDU), 'Bad data model2' #shdu[0].header['DMODCLS'] = (self.__class__.__name__, 'Datamodel class') #shdu[0].header['DMODVER'] = (self.version, 'Datamodel version') # Name shdu[0].name = sobj.NAME # Extension keywd = 'EXT{:04d}'.format(ext) prihdu.header[keywd] = sobj.NAME ext += 1 nspec += 1 # Append hdus += shdu # Deal with Detectors for key, item in detector_hdus.items(): # TODO - Add EXT to the primary header for these?? prefix = specobj.det_hdu_prefix(key) # Name if prefix not in item.name: # In case we are re-loading item.name = specobj.det_hdu_prefix(key) + item.name # Append hdus += [item] # A few more for the header prihdu.header['NSPEC'] = nspec # Code versions initialize_header(hdr=prihdu.header) # Finish hdulist = fits.HDUList(hdus) if debug: import pdb pdb.set_trace() hdulist.writeto(outfile, overwrite=overwrite) msgs.info("Wrote 1D spectra to {:s}".format(outfile)) return
def build_primary_hdr(self, raw_header, spectrograph, master_key_dict=None, master_dir=None, redux_path=None, subheader=None, history=None): """ Build the primary header for a spec2d file Args: raw_header (`astropy.io.fits.Header`_): Header from the raw FITS file (i.e. original header) spectrograph (:class:`~pypeit.spectrographs.spectrograph.Spectrograph`): Spectrograph used to obtain the data. master_key_dict (:obj:`dict`, optional): Dictionary of master keys from :class:`~pypeit.calibrations.Calibrations`. master_dir (:obj:`str`): Path to the ``Masters`` folder redux_path (:obj:`str`, optional): Full path to the reduction output files. subheader (:obj:`dict`, optional): Generated by :func:`~pypeit.spectrographs.spectrograph.Spectrograph.subheader_for_spec`. Returns: `astropy.io.fits.Header`_: The primary header for the output fits file. """ hdr = io.initialize_header(primary=True) hdukeys = [ 'BUNIT', 'COMMENT', '', 'BITPIX', 'NAXIS', 'NAXIS1', 'NAXIS2', 'HISTORY', 'EXTEND', 'DATASEC' ] for key in raw_header.keys(): # Use new ones if key in hdukeys: continue # Update unused ones hdr[key] = raw_header[key] # History if history is not None: history.write_to_header(hdr) # Sub-header if subheader is not None: for key in subheader.keys(): hdr[key.upper()] = subheader[key] # PYPEIT # TODO Should the spectrograph be written to the header? hdr['PIPELINE'] = str('PYPEIT') hdr['PYPELINE'] = spectrograph.pypeline hdr['PYP_SPEC'] = spectrograph.name hdr['DATE-RDX'] = str(datetime.date.today().strftime('%Y-%m-%d')) # MasterFrame info # TODO -- Should this be in the header of the individual HDUs ? if master_key_dict is not None: if 'bias' in master_key_dict.keys(): hdr['BIASMKEY'] = master_key_dict['bias'] if 'arc' in master_key_dict.keys(): hdr['ARCMKEY'] = master_key_dict['arc'] if 'trace' in master_key_dict.keys(): hdr['TRACMKEY'] = master_key_dict['trace'] if 'flat' in master_key_dict.keys(): hdr['FLATMKEY'] = master_key_dict['flat'] # Processing steps # TODO: Assumes processing steps for all detectors are the same... Does # this matter? det = self.detectors[0] if self[det].process_steps is not None: hdr['PROCSTEP'] = (','.join(self[det].process_steps), 'Completed reduction steps') # Some paths if master_dir is not None: hdr['PYPMFDIR'] = str(master_dir) if redux_path is not None: hdr['PYPRDXP'] = redux_path # Sky sub mode if 'bkg_redux' in self['meta'] and self['meta']['bkg_redux']: hdr['SKYSUB'] = 'DIFF' else: hdr['SKYSUB'] = 'MODEL' # obj find mode if 'find_negative' in self['meta'] and self['meta']['find_negative']: hdr['FINDOBJ'] = 'POS_NEG' else: hdr['FINDOBJ'] = 'POS' return hdr
def main(args): """ Executes 2d coadding """ msgs.warn('PATH =' + os.getcwd()) # Load the file if args.file is not None: spectrograph_name, config_lines, spec2d_files = io.read_spec2d_file( args.file, filetype="coadd2d") spectrograph = load_spectrograph(spectrograph_name) # Parameters # TODO: Shouldn't this reinstantiate the same parameters used in # the PypeIt run that extracted the objects? Why are we not # just passing the pypeit file? # JFH: The reason is that the coadd2dfile may want different reduction parameters spectrograph_def_par = spectrograph.default_pypeit_par() parset = par.PypeItPar.from_cfg_lines( cfg_lines=spectrograph_def_par.to_config(), merge_with=config_lines) elif args.obj is not None: # TODO: We should probably be reading the pypeit file and using those parameters here rather than using the # default parset. # TODO: This needs to define the science path spec2d_files = glob.glob('./Science/spec2d_*' + args.obj + '*') head0 = fits.getheader(spec2d_files[0]) spectrograph_name = head0['PYP_SPEC'] spectrograph = load_spectrograph(spectrograph_name) parset = spectrograph.default_pypeit_par() else: msgs.error( 'You must either input a coadd2d file with --file or an object name with --obj' ) # Update with configuration specific parameters (which requires science file) and initialize spectrograph spectrograph_cfg_lines = spectrograph.config_specific_par( spec2d_files[0]).to_config() parset = par.PypeItPar.from_cfg_lines(cfg_lines=spectrograph_cfg_lines, merge_with=parset.to_config()) # If detector was passed as an argument override whatever was in the coadd2d_file if args.det is not None: msgs.info("Restricting reductions to detector={}".format(args.det)) parset['rdx']['detnum'] = int(args.det) # Get headers (if possible) and base names spec1d_files = [ files.replace('spec2d', 'spec1d') for files in spec2d_files ] head1d = None for spec1d_file in spec1d_files: if os.path.isfile(spec1d_file): head1d = fits.getheader(spec1d_file) break if head1d is None: msgs.warn("No 1D spectra so am generating a dummy header for output") head1d = io.initialize_header() head2d = fits.getheader(spec2d_files[0]) if args.basename is None: filename = os.path.basename(spec2d_files[0]) basename = filename.split('_')[2] else: basename = args.basename # Write the par to disk par_outfile = basename + '_coadd2d.par' print("Writing the parameters to {}".format(par_outfile)) parset.to_config(par_outfile) # Now run the coadds skysub_mode = head2d['SKYSUB'] ir_redux = True if 'DIFF' in skysub_mode else False # Print status message msgs_string = 'Reducing target {:s}'.format(basename) + msgs.newline() msgs_string += 'Performing coadd of frames reduce with {:s} imaging'.format( skysub_mode) msgs_string += msgs.newline( ) + 'Combining frames in 2d coadd:' + msgs.newline() for file in spec2d_files: msgs_string += '{0:s}'.format(os.path.basename(file)) + msgs.newline() msgs.info(msgs_string) # TODO: This needs to be added to the parameter list for rdx redux_path = os.getcwd() master_dirname = os.path.basename(head2d['PYPMFDIR']) + '_coadd' master_dir = os.path.join(redux_path, master_dirname) # Make the new Master dir if not os.path.isdir(master_dir): msgs.info( 'Creating directory for Master output: {0}'.format(master_dir)) os.makedirs(master_dir) # Instantiate the sci_dict sci_dict = OrderedDict() # This needs to be ordered sci_dict['meta'] = {} sci_dict['meta']['vel_corr'] = 0. sci_dict['meta']['ir_redux'] = ir_redux # Find the detectors to reduce detectors = PypeIt.select_detectors(detnum=parset['rdx']['detnum'], ndet=spectrograph.ndet) if len(detectors) != spectrograph.ndet: msgs.warn('Not reducing detectors: {0}'.format(' '.join([ str(d) for d in set(np.arange(spectrograph.ndet) + 1) - set(detectors) ]))) # Loop on detectors for det in detectors: msgs.info("Working on detector {0}".format(det)) sci_dict[det] = {} # Instantiate Coadd2d coadd = coadd2d.CoAdd2D.get_instance( spec2d_files, spectrograph, parset, det=det, offsets=parset['coadd2d']['offsets'], weights=parset['coadd2d']['weights'], ir_redux=ir_redux, debug_offsets=args.debug_offsets, debug=args.debug, samp_fact=args.samp_fact) # Coadd the slits coadd_dict_list = coadd.coadd( only_slits=None) # TODO implement only_slits later # Create the pseudo images pseudo_dict = coadd.create_pseudo_image(coadd_dict_list) # Reduce msgs.info('Running the extraction') # TODO -- This should mirror what is in pypeit.extract_one # TODO -- JFH :: This ought to return a Spec2DObj and SpecObjs which would be slurped into # AllSpec2DObj and all_specobsj, as below. # TODO -- JFH -- Check that the slits we are using are correct sci_dict[det]['sciimg'], sci_dict[det]['sciivar'], sci_dict[det]['skymodel'], sci_dict[det]['objmodel'], \ sci_dict[det]['ivarmodel'], sci_dict[det]['outmask'], sci_dict[det]['specobjs'], sci_dict[det]['detector'], \ sci_dict[det]['slits'], sci_dict[det]['tilts'], sci_dict[det]['waveimg'] = coadd.reduce( pseudo_dict, show = args.show, show_peaks = args.peaks) # Save pseudo image master files #coadd.save_masters() # Make the new Science dir # TODO: This needs to be defined by the user scipath = os.path.join(redux_path, 'Science_coadd') if not os.path.isdir(scipath): msgs.info('Creating directory for Science output: {0}'.format(scipath)) os.makedirs(scipath) # THE FOLLOWING MIMICS THE CODE IN pypeit.save_exposure() # TODO -- These lines should be above once reduce() passes back something sensible all_specobjs = specobjs.SpecObjs() for det in detectors: all_specobjs.add_sobj(sci_dict[det]['specobjs']) # Write outfile1d = os.path.join(scipath, 'spec1d_{:s}.fits'.format(basename)) subheader = spectrograph.subheader_for_spec(head2d, head2d) all_specobjs.write_to_fits(subheader, outfile1d) # 2D spectra # TODO -- These lines should be above once reduce() passes back something sensible all_spec2d = spec2dobj.AllSpec2DObj() all_spec2d['meta']['ir_redux'] = ir_redux for det in detectors: all_spec2d[det] = spec2dobj.Spec2DObj( det=det, sciimg=sci_dict[det]['sciimg'], ivarraw=sci_dict[det]['sciivar'], skymodel=sci_dict[det]['skymodel'], objmodel=sci_dict[det]['objmodel'], ivarmodel=sci_dict[det]['ivarmodel'], scaleimg=np.array([1.0], dtype=np.float), bpmmask=sci_dict[det]['outmask'], detector=sci_dict[det]['detector'], slits=sci_dict[det]['slits'], waveimg=sci_dict[det]['waveimg'], tilts=sci_dict[det]['tilts'], sci_spat_flexure=None, sci_spec_flexure=None, vel_corr=None, vel_type=None) # Build header outfile2d = os.path.join(scipath, 'spec2d_{:s}.fits'.format(basename)) pri_hdr = all_spec2d.build_primary_hdr( head2d, spectrograph, subheader=subheader, # TODO -- JFH :: Decide if we need any of these redux_path=None, master_key_dict=None, master_dir=None) # Write all_spec2d.write_to_fits(outfile2d, pri_hdr=pri_hdr)
def write_to_fits(self, subheader, outfile, overwrite=True, update_det=None, slitspatnum=None, history=None, debug=False): """ Write the set of SpecObj objects to one multi-extension FITS file Args: subheader (:obj:`dict`): outfile (str): overwrite (bool, optional): slitspatnum (:obj:`str` or :obj:`list`, optional): Restricted set of slits for reduction. If provided, do not clobber the existing file but only update the indicated slits. Useful for re-running on a subset of slits update_det (int or list, optional): If provided, do not clobber the existing file but only update the indicated detectors. Useful for re-running on a subset of detectors """ if os.path.isfile(outfile) and not overwrite: msgs.warn(f'{outfile} exits. Set overwrite=True to overwrite it.') return # If the file exists and update_det (and slit_spat_num) is provided, use the existing header # and load up all the other hdus so that we only over-write the ones # we are updating if os.path.isfile(outfile) and (update_det is not None or slitspatnum is not None): _specobjs = SpecObjs.from_fitsfile(outfile) mask = np.ones(_specobjs.nobj, dtype=bool) # Update_det if update_det is not None: # Pop out those with this detector (and slit if slit_spat_num is provided) for det in np.atleast_1d(update_det): mask[_specobjs.DET == det] = False elif slitspatnum is not None: # slitspatnum dets, spat_ids = parse.parse_slitspatnum(slitspatnum) for det, spat_id in zip(dets, spat_ids): mask[(_specobjs.DET == det) & (_specobjs.SLITID == spat_id)] = False _specobjs = _specobjs[mask] # Add in the new # TODO: Is the loop necessary? add_sobj can take many SpecObj objects. for sobj in self.specobjs: _specobjs.add_sobj(sobj) else: _specobjs = self.specobjs # Build up the Header header = io.initialize_header(primary=True) for key in subheader.keys(): if key.upper() == 'HISTORY': if history is None: for line in str(subheader[key.upper()]).split('\n'): header[key.upper()] = line else: header[key.upper()] = subheader[key] # Init prihdu = fits.PrimaryHDU() hdus = [prihdu] prihdu.header = header # Add class info prihdu.header['DMODCLS'] = (self.__class__.__name__, 'Datamodel class') prihdu.header['DMODVER'] = (self.version, 'Datamodel version') # Add history if history is not None: history.write_to_header(prihdu.header) detector_hdus = {} nspec, ext = 0, 0 # Loop on the SpecObj objects for sobj in _specobjs: if sobj is None: continue # HDUs if debug: raise NotImplementedError('Debugging for developers only.') #embed() #exit() shdul = sobj.to_hdu() if len(shdul) not in [1, 2]: msgs.error( 'CODING ERROR: SpecObj datamodel changed. to_hdu should return 1 or 2 ' 'HDUs. If returned, the 2nd one should be the detector/mosaic.' ) if len(shdul) == 2: detector_hdus[sobj['DET']] = shdul[1] shdu = [shdul[0]] else: shdu = shdul if len(shdu) != 1 or not isinstance(shdu[0], fits.hdu.table.BinTableHDU): msgs.error('CODING ERROR: SpecObj datamodel changed.') # Name shdu[0].name = sobj.NAME # Extension keywd = 'EXT{:04d}'.format(ext) prihdu.header[keywd] = sobj.NAME ext += 1 nspec += 1 # Append hdus += shdu # Deal with Detectors for key, item in detector_hdus.items(): prefix = item.header['name'] # Name if prefix not in item.name: # In case we are re-loading item.name = f'{prefix}-{item.name}' # Append hdus += [item] # A few more for the header prihdu.header['NSPEC'] = nspec # Code versions io.initialize_header(hdr=prihdu.header) # Finish hdulist = fits.HDUList(hdus) if debug: raise NotImplementedError('Debugging for developers only.') #embed() #exit() hdulist.writeto(outfile, overwrite=overwrite) msgs.info("Wrote 1D spectra to {:s}".format(outfile)) return