def to_dict(self): """ Generate a dict from the sub-system Returns ------- outdict : dict JSON capatible """ import datetime import getpass date = str(datetime.date.today().strftime('%Y-%b-%d')) user = getpass.getuser() # Generate the dict outdict = dict(abs_type='SubSystem', Name=self.name, zabs=self.zabs, vlim=self.vlim.to('km/s').value, lbl=self.lbl, CreationDate=date, user=user ) # Components outdict['components'] = {} for component in self._components: outdict['components'][component.name] = component.to_dict() # Polish outdict = ltu.jsonify_dict(outdict) # Return return outdict
def to_dict(self): """ Convert component data to a dict Returns ------- cdict : dict """ cdict = dict(Zion=self.Zion, zcomp=self.zcomp, vlim=self.vlim.to('km/s').value, Name=self.name, RA=self.coord.ra.value, DEC=self.coord.dec.value, A=self.A, Ej=self.Ej.to('1/cm').value, comment=self.comment, flag_N=self.flag_N, logN=self.logN, sig_logN=self.sig_logN) # AbsLines cdict['lines'] = {} for iline in self._abslines: cdict['lines'][iline.wrest.value] = iline.to_dict() # Polish cdict = ltu.jsonify_dict(cdict) # Return return cdict
def to_dict(self): """ Write AbsSystem data to a dict """ import datetime import getpass date = str(datetime.date.today().strftime('%Y-%b-%d')) user = getpass.getuser() # Generate the dict outdict = dict(Name=self.name, abs_type=self.abs_type, zabs=self.zabs, vlim=self.vlim.to('km/s').value, zem=self.zem, NHI=self.NHI, sig_NHI=self.sig_NHI, RA=self.coord.ra.value, DEC=self.coord.dec.value, kin=self.kin, Refs=self.Refs, CreationDate=date, user=user ) # Components outdict['components'] = {} for component in self._components: outdict['components'][component.name] = component.to_dict() # Polish outdict = ltu.jsonify_dict(outdict) # Return return outdict
def write_to_fits(self, outfil, clobber=True, add_wave=False): """ Write to a FITS file Should generate a separate code to make a Binary FITS table format Parameters ---------- outfil: String Name of the FITS file clobber: bool (True) Clobber existing file? add_wave: bool (False) Force writing of wavelength array """ # TODO # 1. Add unit support for wavelength arrays from specutils.wcs.specwcs import Spectrum1DPolynomialWCS, Spectrum1DLookupWCS from specutils.io import write_fits as sui_wf prihdu = sui_wf._make_hdu(self.data) # Not for binary table format prihdu.name = "FLUX" hdu = fits.HDUList([prihdu]) # Type if type(self.wcs) is Spectrum1DPolynomialWCS: # CRVAL1, etc. WCS # WCS wcs = self.wcs wcs.write_fits_header(prihdu.header) # Error array? if self.sig is not None: sighdu = fits.ImageHDU(self.sig) sighdu.name = "ERROR" hdu.append(sighdu) # if add_wave: wvhdu = fits.ImageHDU(self.dispersion.value) wvhdu.name = "WAVELENGTH" hdu.append(wvhdu) elif type(self.wcs) is Spectrum1DLookupWCS: # Wavelengths as an array (without units for now) # Add sig, wavelength to HDU if self.sig is not None: sighdu = fits.ImageHDU(self.sig) sighdu.name = "ERROR" hdu.append(sighdu) wvhdu = fits.ImageHDU(self.dispersion.value) wvhdu.name = "WAVELENGTH" hdu.append(wvhdu) else: raise ValueError("write_to_fits: Not ready for this type of spectrum wavelengths") if hasattr(self, "co") and self.co is not None: cohdu = fits.ImageHDU(self.co) cohdu.name = "CONTINUUM" hdu.append(cohdu) # Deal with header if hasattr(self, "head"): hdukeys = prihdu.header.keys() # Append ones to avoid hdukeys = hdukeys + ["BUNIT", "COMMENT", "", "NAXIS2", "HISTORY"] for key in self.head.keys(): # Use new ones if key in hdukeys: continue # Update unused ones try: prihdu.header[key] = self.head[key] except ValueError: raise ValueError("l.spectra.utils: Bad header key card") # History if "HISTORY" in self.head.keys(): # Strip \n tmp = str(self.head["HISTORY"]).replace("\n", " ") try: prihdu.header.add_history(str(tmp)) except ValueError: import pdb pdb.set_trace() if self.meta is not None and len(self.meta) > 0: d = liu.jsonify_dict(self.meta) prihdu.header["METADATA"] = json.dumps(d) hdu.writeto(outfil, clobber=clobber) print("Wrote spectrum to {:s}".format(outfil))