def _get_milky_way_foreground(self, filename=None): """ Read in MW spectrum and return an interpolated version. Interpolated version for fitting the desired wavelength interval and binning. Source data come from Charles Danforth and are a median-combination of 92 normalized COS/G130M+G160M AGN spectra valid from 1130-1800A. """ if filename is None: filename = os.path.join(trident_path(), "data", "spectral_templates", "mw_foreground_COS.txt") data = np.loadtxt(filename) MW_lambda = YTArray(data[:, 0], 'angstrom') MW_flux = data[:, 1] index = np.digitize(self.lambda_field, MW_lambda) np.clip(index, 1, MW_lambda.size - 1, out=index) slope = (MW_flux[index] - MW_flux[index - 1]) / \ (MW_lambda[index] - MW_lambda[index - 1]) my_flux = slope * (self.lambda_field - MW_lambda[index]) + MW_flux[index] # just set values that go beyond the data to 1 my_flux[self.lambda_field > 1799.9444] = 1.0 return my_flux
def _get_qso_spectrum(self, emitting_redshift, observing_redshift, filename=None): """ Read in QSO spectrum and return an interpolated version. Interpolated version for fitting the desired wavelength interval and binning. """ if observing_redshift is None: observing_redshift = self.observing_redshift if emitting_redshift is None: emitting_redshift = 0. # Following Hogg (2000) eq. 13 for the effective redshift z12 of # observing at z1 redshift light emitted at z2: # 1 + z12 = (1 + z2) / (1 + z1) redshift_eff = (1 + emitting_redshift) / (1 + observing_redshift) - 1 if filename is None: filename = os.path.join(trident_path(), "data", "spectral_templates", "qso_background_COS_HST.txt") data = np.loadtxt(filename) qso_lambda = YTArray(data[:, 0], 'angstrom') qso_lambda += qso_lambda * redshift_eff qso_flux = data[:, 1] index = np.digitize(self.lambda_field, qso_lambda) np.clip(index, 1, qso_lambda.size - 1, out=index) slope = (qso_flux[index] - qso_flux[index - 1]) / \ (qso_lambda[index] - qso_lambda[index - 1]) my_flux = slope * (self.lambda_field - qso_lambda[index]) + qso_flux[index] return my_flux
def __init__(self, function=None, width=None, filename=None): self.kernel = [] self.filename = filename self.function = function self.width = width # if filename is defined, use it if filename is not None: # Check to see if the file is in the local dir if os.path.isfile(filename): lsf_file = open(filename, 'r') # otherwise use the file in the lsf_kernels dir else: filename2 = os.path.join(trident_path(), "data", \ "lsf_kernels", filename) if os.path.isfile(filename2): lsf_file = open(filename2, 'r') else: raise RuntimeError( "LSF filename not found in current " + "directory or in %s/data/lsf_kernels directory" % trident_path()) for line in lsf_file: self.kernel.append(float(line.split()[1])) lsf_file.close() self.kernel = np.array(self.kernel) self.width = self.kernel.size elif function is not None and width is not None: if function == 'boxcar': if width % 2 == 0: mylog.warn( "LSF kernel must have an odd length. Reducing kernel size by 1." ) width -= 1 self.kernel = np.ones(width) / width elif function == 'gaussian': from astropy.convolution import Gaussian1DKernel self.kernel = Gaussian1DKernel(width) else: raise RuntimeError( "Either LSF filename OR function+width must be specified.")
def load_line_list_from_file(self, filename): """ Load a line list from a file into the LineDatabase. Line list file is a tab-delimited text file in the format: element, ion_state, wavelength, gamma, f_value, (name) H, I, 1215.67, 4.69e8, 4.16e-1, Ly a **Parameters** filename : string The filename of the list to add. First looks in trident.__path__/data/line_lists directory, then in cwd. """ # check to see if file exists in trident/data/line_lists # if not, look in cwd filename = os.path.join(trident_path(), "data", "line_lists", filename) if not os.path.isfile(filename): filename = filename.split(os.sep)[-1] if not os.path.isfile(filename): raise RuntimeError("line_list %s is not found in local " "directory or in trident/data/line_lists " % (filename.split(os.sep)[-1])) # Step through each line of text in file and add to database for line in open(filename).readlines(): online = line.rstrip().split() if line.startswith("#") or len(online) < 5: continue element, ion_state, wavelength, gamma, f_value = online[:5] # optional identifier should be added if existent if len(online) > 5: identifier = " ".join(online[5:]) else: identifier = None self.add_line(element, ion_state, wavelength, gamma, f_value, identifier=identifier)
Line from trident.lsf import \ LSF from trident.plotting import \ plot_spectrum from trident.spectrum_generator import \ SpectrumGenerator, \ valid_instruments, \ load_spectrum from trident.utilities import \ make_onezone_dataset, \ make_onezone_ray from trident.ray_generator import \ make_simple_ray, \ make_compound_ray from trident.roman import \ to_roman, \ from_roman from trident.light_ray import \ LightRay # Making installation path global path = trident_path()
def test_line_database_from_local_file(): line_file = os.path.join(trident_path(), 'data/line_lists/lines.txt') copyfile(line_file, 'test_lines.txt') ld = LineDatabase('test_lines.txt') print(ld) os.remove('test_lines.txt')
def test_path(): """ Tests that the trident path is working ok. """ trident_path()