def _loader(self): """ Load a template from the database Returns ------- meta: metadata of the spectra (header) lam: wavelengths flux: flux """ try: self.wave_unit = units.validate_unit(self.wave_unit) self.flux_unit = units.validate_unit(self.flux_unit) except exceptions.SynphotError: # Assumes angtroms and FLAM self.wave_unit = u.AA self.flux_unit = units.FLAM # self.resolution = self.resolution * self.wave_unit if self.data_type == "fits": # make try and except here to catch most problems meta, lam, flux = read_fits_spec(self.path, ext=1, wave_unit=self.wave_unit, flux_unit=self.flux_unit, wave_col=self.wave_column_name, flux_col=self.flux_column_name) else: meta, lam, flux = read_ascii_spec(self.path, wave_unit=self.wave_unit, flux_unit=self.flux_unit) return meta, lam, flux
def _loader(self): """ Load a filter from the database Returns ------- meta: metadata of the spectra (header) lam: wavelengths flux: flux """ try: # it should also try to read it from the file directly self.wave_unit = units.validate_unit(self.wave_unit) except exceptions.SynphotError: self.wave_unit = u.AA # make try and except here to catch most problems if self.data_type == "fits": meta, lam, trans = read_fits_spec(self.filename, ext=1, wave_unit=self.wave_unit) # wave_col=self.wave_column_name, flux_col=self.trans_column_name) elif self.data_type == "ascii": meta, lam, trans = read_ascii_spec(self.filename, wave_unit=self.wave_unit) # wave_col=self.wave_column_name, flux_col=self.trans_column_name) # , flux_unit=self._internal_flux_unit, lam = lam.to(u.AA) return meta, lam, trans.value
def _loader(self): """ Load a filter from the database Returns ------- meta: metadata of the spectra (header) lam: wavelengths flux: flux """ try: # it should also try to read it from the file directly self.wave_unit = units.validate_unit(self.wave_unit) except exceptions.SynphotError: self.wave_unit = u.AA if self.data_type == "fits": meta, lam, rvs = read_fits_spec( self.filename, ext=1, wave_unit=self.wave_unit, flux_unit=self.extinction_unit, # self._internal_flux_unit, wave_col=self.wave_column, flux_col=self.extinction_column) elif self.data_type == "ascii": meta, lam, rvs = read_ascii_spec( self.filename, wave_unit=self.wave_unit, flux_unit=self.extinction_unit, # self._internal_flux_unit, wave_col=self.wave_column, flux_col=self.extinction_column) return meta, lam, rvs
def interpolate_spectral_element(parfilename, interpval, ext=1): """Interpolate (or extrapolate) throughput spectra in given parameterized FITS table to given parameter value. FITS table is parsed with :func:`stsynphot.stio.read_interp_spec`. Parameterized values must be in ascending order in the table columns. If extrapolation is needed but not allowed, default throughput from ``THROUGHPUT`` column will be used. Parameters ---------- parfilename : str Parameterized filename contains a suffix followed by a column name specificationin between square brackets. For example, ``path/acs_fr656n_006_syn.fits[fr656n#]``. interpval : float Desired parameter value. ext : int, optional FITS extension index of the data table. Returns ------- sp : `synphot.spectrum.SpectralElement` Empirical bandpass at ``interpval``. Raises ------ synphot.exceptions.ExtrapolationNotAllowed Extrapolation is not allowed by data table. synphot.exceptions.SynphotError No columns available for interpolation or extrapolation. """ def_colname = 'THROUGHPUT' warndict = {} # Separate real filename and column name specification xre = _interpfilepatt.search(parfilename) if xre is None: raise synexceptions.SynphotError( '{0} must be in the format of "path/filename.fits' '[col#]"'.format(parfilename)) filename = parfilename[0:xre.start()] col_prefix = xre.group('col').upper() # Read data table data, wave_unit, doshift, extrapolate = stio.read_interp_spec( filename, tab_ext=ext) wave_unit = units.validate_unit(wave_unit) wave0 = data['WAVELENGTH'] # Determine the columns that bracket the desired value. # Grab all columns that begin with the parameter name (e.g. 'MJD#') # and then split off the numbers after the '#'. col_names = [] col_pars = [] for n in data.names: cn = n.upper() if cn.startswith(col_prefix): col_names.append(cn) col_pars.append(float(cn.split('#')[1])) if len(col_names) < 1: raise synexceptions.SynphotError( '{0} contains no interpolated columns for {1}.'.format( filename, col_prefix)) # Assumes ascending order of parameter values in table. min_par = col_pars[0] max_par = col_pars[-1] # Exact match. No interpolation needed. if interpval in col_pars: thru = data[col_names[col_pars.index(interpval)]] # Need interpolation. elif (interpval > min_par) and (interpval < max_par): upper_ind = np.searchsorted(col_pars, interpval) lower_ind = upper_ind - 1 thru = _interp_spec( interpval, wave0, col_pars[lower_ind], col_pars[upper_ind], data[col_names[lower_ind]], data[col_names[upper_ind]], doshift) # Need extrapolation, if allowed. elif extrapolate: # Extrapolate below lowest columns. if interpval < min_par: thru = _extrap_spec(interpval, min_par, col_pars[1], data[col_names[0]], data[col_names[1]]) # Extrapolate above highest columns. else: # interpval > max_par thru = _extrap_spec(interpval, col_pars[-2], max_par, data[col_names[-2]], data[col_names[-1]]) # Extrapolation not allowed. else: # Use default, if available. if def_colname in data.names: warnings.warn( 'Extrapolation not allowed, using default throughput for ' '{0}.'.format(parfilename), AstropyUserWarning) warndict['DefaultThroughput'] = True thru = data[def_colname] # Nothing can be done. else: raise synexceptions.ExtrapolationNotAllowed( 'No default throughput for {0}.'.format(parfilename)) meta = {'expr': '{0}#{1:g}'.format(filename, interpval), 'warnings': warndict} return SpectralElement( Empirical1D, points=wave0*wave_unit, lookup_table=thru, meta=meta)
def p_functioncall(self, tree): # Where all the real interpreter action is. # Note that things that should only be done at the top level # are performed in :func:`interpret` defined below. """ V ::= function_call ( V LPAREN V RPAREN ) """ if not isinstance(tree[2].value, list): args = [tree[2].value] else: args = tree[2].value fname = tree[0].value metadata = {'expr': '{0}{1}'.format(fname, tuple(args))} if fname not in _SYFUNCTIONS: log.error('Unknown function: {0}'.format(fname)) self.error(fname) else: # Constant spectrum if fname == 'unit': if args[1] not in _SYFORMS: log.error('Unrecognized unit: {0}'.format(args[1])) self.error(fname) try: fluxunit = units.validate_unit(args[1]) tree.value = SourceSpectrum(ConstFlux1D, amplitude=args[0] * fluxunit, meta=metadata) except NotImplementedError as e: log.error(str(e)) self.error(fname) # Black body elif fname == 'bb': tree.value = SourceSpectrum(BlackBodyNorm1D, temperature=args[0]) # Power law elif fname == 'pl': if args[2] not in _SYFORMS: log.error('Unrecognized unit: {0}'.format(args[2])) self.error(fname) try: fluxunit = units.validate_unit(args[2]) tree.value = SourceSpectrum(PowerLawFlux1D, amplitude=1 * fluxunit, x_0=args[0], alpha=-args[1], meta=metadata) except (synexceptions.SynphotError, NotImplementedError) as e: log.error(str(e)) self.error(fname) # Box throughput elif fname == 'box': tree.value = SpectralElement(Box1D, amplitude=1, x_0=args[0], width=args[1], meta=metadata) # Source spectrum from file elif fname == 'spec': tree.value = SourceSpectrum.from_file(irafconvert(args[0])) tree.value.meta.update(metadata) # Passband elif fname == 'band': tree.value = spectrum.band(tree[2].svalue) tree.value.meta.update(metadata) # Gaussian emission line elif fname == 'em': if args[3] not in _SYFORMS: log.error('Unrecognized unit: {0}'.format(args[3])) self.error(fname) x0 = args[0] fluxunit = units.validate_unit(args[3]) totflux = units.convert_flux(x0, args[2] * fluxunit, units.PHOTLAM).value tree.value = SourceSpectrum(GaussianFlux1D, total_flux=totflux, mean=x0, fwhm=args[1]) # Catalog interpolation elif fname == 'icat': tree.value = grid_to_spec(*args) # Renormalize source spectrum elif fname == 'rn': sp = args[0] bp = args[1] fluxunit = units.validate_unit(args[3]) rnval = args[2] * fluxunit if not isinstance(sp, SourceSpectrum): sp = SourceSpectrum.from_file(irafconvert(sp)) if not isinstance(bp, SpectralElement): bp = SpectralElement.from_file(irafconvert(bp)) # Always force the renormalization to occur: prevent exceptions # in case of partial overlap. Less robust but duplicates # IRAF SYNPHOT. Force the renormalization in the case of # partial overlap, but raise an exception if the spectrum and # bandpass are entirely disjoint. try: tree.value = sp.normalize(rnval, band=bp, area=conf.area, vegaspec=spectrum.Vega) except synexceptions.PartialOverlap: tree.value = sp.normalize(rnval, band=bp, area=conf.area, vegaspec=spectrum.Vega, force=True) tree.value.warnings = { 'force_renorm': ('Renormalization exceeds the limit ' 'of the specified passband.') } tree.value.meta.update(metadata) # Redshift source spectrum (flat spectrum if fails) elif fname == 'z': sp = args[0] # ETC generates junk (i.e., 'null') sometimes if isinstance(sp, str) and sp != 'null': sp = SourceSpectrum.from_file(irafconvert(sp)) if isinstance(sp, SourceSpectrum): tree.value = sp tree.value.z = args[1] else: tree.value = SourceSpectrum(ConstFlux1D, amplitude=1) tree.value.meta.update(metadata) # Extinction elif fname == 'ebmvx': try: tree.value = spectrum.ebmvx(args[1], args[0]) except synexceptions.SynphotError as e: log.error(str(e)) self.error(fname) tree.value.meta.update(metadata) # Default else: tree.value = ('would call {0} with the following args: ' '{1}'.format(fname, repr(args)))
def interpolate_spectral_element(parfilename, interpval, ext=1): """Interpolate (or extrapolate) throughput spectra in given parameterized FITS table to given parameter value. FITS table is parsed with :func:`stsynphot.stio.read_interp_spec`. Parameterized values must be in ascending order in the table columns. If extrapolation is needed but not allowed, default throughput from ``THROUGHPUT`` column will be used. Parameters ---------- parfilename : str Parameterized filename contains a suffix followed by a column name specificationin between square brackets. For example, ``path/acs_fr656n_006_syn.fits[fr656n#]``. interpval : float Desired parameter value. ext : int, optional FITS extension index of the data table. Returns ------- sp : `synphot.spectrum.SpectralElement` Empirical bandpass at ``interpval``. Raises ------ synphot.exceptions.ExtrapolationNotAllowed Extrapolation is not allowed by data table. synphot.exceptions.SynphotError No columns available for interpolation or extrapolation. """ def_colname = 'THROUGHPUT' warndict = {} # Separate real filename and column name specification xre = _interpfilepatt.search(parfilename) if xre is None: raise synexceptions.SynphotError( '{0} must be in the format of "path/filename.fits' '[col#]"'.format(parfilename)) filename = parfilename[0:xre.start()] col_prefix = xre.group('col').upper() # Read data table data, wave_unit, doshift, extrapolate = stio.read_interp_spec( filename, tab_ext=ext) wave_unit = units.validate_unit(wave_unit) wave0 = data['WAVELENGTH'] # Determine the columns that bracket the desired value. # Grab all columns that begin with the parameter name (e.g. 'MJD#') # and then split off the numbers after the '#'. col_names = [] col_pars = [] for n in data.names: cn = n.upper() if cn.startswith(col_prefix): col_names.append(cn) col_pars.append(float(cn.split('#')[1])) if len(col_names) < 1: raise synexceptions.SynphotError( '{0} contains no interpolated columns for {1}.'.format( filename, col_prefix)) # Assumes ascending order of parameter values in table. min_par = col_pars[0] max_par = col_pars[-1] # Exact match. No interpolation needed. if interpval in col_pars: thru = data[col_names[col_pars.index(interpval)]] # Need interpolation. elif (interpval > min_par) and (interpval < max_par): upper_ind = np.searchsorted(col_pars, interpval) lower_ind = upper_ind - 1 thru = _interp_spec( interpval, wave0, col_pars[lower_ind], col_pars[upper_ind], data[col_names[lower_ind]], data[col_names[upper_ind]], doshift) # Need extrapolation, if allowed. elif extrapolate: # Extrapolate below lowest columns. if interpval < min_par: thru = _extrap_spec(interpval, min_par, col_pars[1], data[col_names[0]], data[col_names[1]]) # Extrapolate above highest columns. else: # interpval > max_par thru = _extrap_spec(interpval, col_pars[-2], max_par, data[col_names[-2]], data[col_names[-1]]) # Extrapolation not allowed. else: # Use default, if available. if def_colname in data.names: warnings.warn( 'Extrapolation not allowed, using default throughput for ' '{0}.'.format(parfilename), AstropyUserWarning) warndict['DefaultThroughput'] = True thru = data[def_colname] # Nothing can be done. else: raise synexceptions.ExtrapolationNotAllowed( 'No default throughput for {0}.'.format(parfilename)) meta = {'expr': '{0}#{1:g}'.format(filename, interpval), 'warnings': warndict} return SpectralElement( Empirical1D, points=wave0 * wave_unit, lookup_table=thru, meta=meta)