예제 #1
0
    def init_stars_to_process(self):
        """Initialization star and gas particle lists."""

        from caesar.group import Group, collate_group_ids
        from caesar.property_manager import ptype_ints
        from caesar.cyloser import smass_at_formation

        #if isinstance(self.groups[0],Group):
        self.ngroup, self.gasids, self.gid_bins = collate_group_ids(
            self.groups, 'gas', self.obj.simulation.ngas)
        self.ngroup, self.starids, self.sid_bins = collate_group_ids(
            self.groups, 'star', self.obj.simulation.nstar)
        #else:
        #    sys.exit('Must provide a list of Caesar groups.')
        self.scount = sum([len(i.slist) for i in self.groups])
        self.gcount = sum([len(i.glist) for i in self.groups])
        self.Nobjs = len(self.groups)

        # get original stellar mass at time of formation
        self.obj.smass_orig = smass_at_formation(self.obj,
                                                 self.groups,
                                                 self.ssp_mass,
                                                 self.ssp_ages,
                                                 self.ssp_logZ,
                                                 nproc=self.nproc)

        memlog('Loaded %d stars and %d gas in %d objects to process' %
               (self.scount, self.gcount, self.Nobjs))
        return
예제 #2
0
 def _determine_ptypes(self):
     """Determines what particle/field types to collect."""
     self.ptypes = ['gas', 'star']
     #if 'blackholes' in self.obj._kwargs and self.obj._kwargs['blackholes']:
     self.blackholes = self.dust = self.dm2 = False
     if hasattr(self.obj, '_ds_type'):
         if 'PartType5' in self.obj._ds_type.ds.particle_fields_by_type:
             if 'BH_Mdot' in self.obj._ds_type.ds.particle_fields_by_type[
                     'PartType5'] or 'StellarFormationTime' in self.obj._ds_type.ds.particle_fields_by_type[
                         'PartType5']:
                 self.ptypes.append('bh')
                 self.blackholes = True
         else:
             memlog('No black holes found')
     if hasattr(
             self.obj, '_kwargs'
     ) and 'dust' in self.obj._kwargs and self.obj._kwargs['dust']:
         mylog.warning('Enabling active dust particles')
         self.ptypes.append('dust')
         self.dust = True
     self.ptypes.append('dm')
     if hasattr(self.obj, '_kwargs'
                ) and 'dm2' in self.obj._kwargs and self.obj._kwargs['dm2']:
         self.ptypes.append('dm2')
         self.dm2 = True
예제 #3
0
파일: pyloser.py 프로젝트: lq3552/caesar
 def init_ssp_table(self):
     import os
     read_flag = False
     if os.path.exists(self.ssp_table_file):
         try:
             self.read_ssp_table(self.ssp_table_file)
             memlog('Read SSP table %s' % self.ssp_table_file)
             read_flag = True
         except:
             memlog('Error reading SSP table %s, will generate...' %
                    self.ssp_table_file)
     if not read_flag:  # generate table with Caesar default options
         ssp_ages, ssp_logZ, mass_remaining, wavelengths, ssp_spectra = generate_ssp_table(
             self.ssp_table_file,
             return_table=True,
             imf_type=1,
             add_neb_emission=True,
             sfh=0,
             zcontinuous=1
         )  # note Caesar default FSPS options; run generate_ssp_table() separately to set desired FSPS options
         self.ssp_ages = np.array(ssp_ages, dtype=MY_DTYPE)
         self.ssp_logZ = np.array(ssp_logZ, dtype=MY_DTYPE)
         self.ssp_mass = np.array(mass_remaining, dtype=MY_DTYPE)
         self.ssp_wavelengths = np.array(wavelengths, dtype=MY_DTYPE)
         self.ssp_spectra = np.array(ssp_spectra, dtype=MY_DTYPE)
예제 #4
0
파일: pyloser.py 프로젝트: lq3552/caesar
    def init_extinction(self):
        from caesar.pyloser.atten_laws import calzetti, chevallard, conroy, cardelli, smc, lmc
        wave = self.ssp_wavelengths.astype(np.float64)
        self.ext_curves = []
        self.ext_curves.append(calzetti(wave))
        self.ext_curves.append(chevallard(wave))
        self.ext_curves.append(conroy(wave))
        self.ext_curves.append(cardelli(wave))
        self.ext_curves.append(smc(wave))
        self.ext_curves.append(lmc(wave))
        self.ext_curves = np.asarray(self.ext_curves)

        memlog('Starting photometry using %s extinction law' % self.ext_law)
        if 'calzetti' in self.ext_law: self.ext_law = 0
        elif 'chevallard' in self.ext_law: self.ext_law = 1
        elif 'conroy' in self.ext_law: self.ext_law = 2
        elif self.ext_law == 'mw' or self.ext_law == 'cardelli' or 'CCM' in self.ext_law:
            self.ext_law = 3
        elif 'smc' in self.ext_law:
            self.ext_law = 4
        elif 'lmc' in self.ext_law:
            self.ext_law = 5
        elif self.ext_law == 'mix_calz_MW':
            self.ext_law = 6
        elif self.ext_law == 'composite':
            self.ext_law = 7
        else:
            mylog.warning(
                'Extinction law %s not recognized, assuming composite' %
                self.ext_law)
            self.ext_law = 7
예제 #5
0
def write_progens(obj, data, data_type, caesar_file, index_name):
    f = h5py.File(caesar_file,'r+')
    memlog('Writing %s info into %s'%(index_name,caesar_file))
    if check_if_progen_is_present(data_type, caesar_file, index_name):
        del f['%s_data/%s' % (data_type, index_name)]
    f.create_dataset('%s_data/%s' % (data_type, index_name), data=data, compression=1)
    f.close()
    return    
예제 #6
0
파일: pyloser.py 프로젝트: lq3552/caesar
def generate_ssp_table(ssp_lookup_file,
                       Zsol=Solar['total'],
                       oversample=[2, 2],
                       return_table=False,
                       **fsps_options):
    '''
        Generates an SPS lookup table, oversampling in [age,metallicity] by oversample
        '''
    import fsps
    mylog.info('Generating SSP lookup table %s' % (ssp_lookup_file))
    mylog.info('with FSPS options: %s' % (fsps_options))
    fsps_opts = ''
    for key, value in fsps_options.items():
        fsps_opts = fsps_opts + ("{0} = {1}, ".format(key, value))
    fsps_opts = np.string_(fsps_opts)
    fsps_ssp = fsps.StellarPopulation(**fsps_options)
    wavelengths = fsps_ssp.wavelengths
    ssp_ages = []
    mass_remaining = []
    ssp_ages.append(fsps_ssp.ssp_ages[0])
    mass_remaining.append(fsps_ssp.stellar_mass[0])
    for i in range(len(fsps_ssp.ssp_ages) - 1):
        for j in range(i + 1, i + oversample[0]):
            ssp_ages.append((fsps_ssp.ssp_ages[j] - fsps_ssp.ssp_ages[j - 1]) *
                            (j - i) / oversample[0] + fsps_ssp.ssp_ages[j - 1])
            mass_remaining.append(
                (fsps_ssp.stellar_mass[j] - fsps_ssp.stellar_mass[j - 1]) *
                (j - i) / oversample[0] + fsps_ssp.stellar_mass[j - 1])
        ssp_ages.append(fsps_ssp.ssp_ages[j])
        mass_remaining.append(fsps_ssp.stellar_mass[j])
    ssp_logZ = []
    ssp_logZ.append(fsps_ssp.zlegend[0])
    for i in range(len(fsps_ssp.zlegend) - 1):
        for j in range(i + 1, i + oversample[1]):
            ssp_logZ.append((fsps_ssp.zlegend[j] - fsps_ssp.zlegend[j - 1]) *
                            (j - i) / oversample[1] + fsps_ssp.zlegend[j - 1])
        ssp_logZ.append(fsps_ssp.zlegend[j])
    ssp_logZ = np.log10(ssp_logZ)
    ssp_spectra = []
    for age in ssp_ages:
        for Zmet in ssp_logZ:
            fsps_ssp.params["logzsol"] = Zmet - np.log10(Zsol)
            spectrum = fsps_ssp.get_spectrum(tage=10**(age - 9))[1]
            ssp_spectra.append(spectrum)
    with h5py.File(ssp_lookup_file, 'w') as hf:
        hf.create_dataset('fsps_options', data=fsps_opts)
        hf.create_dataset('ages', data=ssp_ages)
        hf.create_dataset('logZ', data=ssp_logZ)
        hf.create_dataset('mass_remaining', data=mass_remaining)
        hf.create_dataset('wavelengths', data=wavelengths)
        hf.create_dataset('spectra', data=ssp_spectra)
    memlog('Generated lookup table with %d ages and %d metallicities' %
           (len(ssp_ages), len(ssp_logZ)))

    if return_table:
        return ssp_ages, ssp_logZ, mass_remaining, wavelengths, ssp_spectra
예제 #7
0
    def Av_per_group(self):
        memlog('Finding LOS A_V values for %d objects' % (len(self.groups)))
        try:
            import tqdm
            for obj_ in tqdm.tqdm(self.groups):
                Av_per_star = self.obj.AV_star[obj_.slist]
                obj_.group_Av = Av_per_star

        except:
            for obj_ in self.groups:
                Av_per_star = self.obj.AV_star[obj_.slist]
                obj_.group_Av = Av_per_star
예제 #8
0
    def _determine_ptypes(self):
        """Determines what particle/field types to collect."""
        self.ptypes = []
        #if 'blackholes' in self.obj._kwargs and self.obj._kwargs['blackholes']:
        if ('Gas' in self.obj._ds_type.ds.particle_fields_by_type) or (
                'PartType0' in self.obj._ds_type.ds.particle_fields_by_type):
            self.ptypes.append('gas')
        if ('Stars' in self.obj._ds_type.ds.particle_fields_by_type) or (
                'PartType4' in self.obj._ds_type.ds.particle_fields_by_type):
            self.ptypes.append('star')
        self.ptypes.append('dm')
        self.blackholes = self.dust = self.dm2 = False
        if hasattr(self.obj, '_ds_type'):
            if 'PartType5' in self.obj._ds_type.ds.particle_fields_by_type:
                if 'BH_Mdot' in self.obj._ds_type.ds.particle_fields_by_type[
                        'PartType5'] or 'StellarFormationTime' in self.obj._ds_type.ds.particle_fields_by_type[
                            'PartType5']:
                    self.ptypes.append('bh')
                    self.blackholes = True
            elif 'Bndry' in self.obj._ds_type.ds.particle_fields_by_type:
                if 'Mass' in self.obj._ds_type.ds.particle_fields_by_type[
                        'Bndry']:
                    self.ptypes.append('bh')
                    self.blackholes = True
            else:
                memlog('No black holes found')
        if hasattr(
                self.obj, '_kwargs'
        ) and 'dust' in self.obj._kwargs and self.obj._kwargs['dust']:
            mylog.warning('Enabling active dust particles')
            self.ptypes.append('dust')
            self.dust = True

        if hasattr(self.obj, '_kwargs') and 'lowres' in self.obj._kwargs:
            if 2 in self.obj._kwargs['lowres']:
                self.ptypes.append('dm2')
                self.dm2 = True
                print(ptype_aliases[self.obj._ds_type.ds_type]['dm2'],
                      'is assumed as low resolution particles')
            if 3 in self.obj._kwargs['lowres']:
                self.ptypes.append('dm3')
                self.dm3 = True
                print(ptype_aliases[self.obj._ds_type.ds_type]['dm3'],
                      'is assumed as low resolution particles')
        print(
            'The particle types will be loaded: ',
            [ptype_aliases[self.obj._ds_type.ds_type][i] for i in self.ptypes])
예제 #9
0
    def _photometry_init(self):
        """Collect particle information for photometry"""
        from caesar.property_manager import get_property, ptype_ints

        memlog('Loading gas and star particles for photometry')
        self._determine_ptypes()

        self.pos = np.empty((0, 3), dtype=MY_DTYPE)
        self.vel = np.empty((0, 3), dtype=MY_DTYPE)
        self.mass = np.empty(0, dtype=MY_DTYPE)
        self.ptype = np.empty(0, dtype=np.int32)
        for ip, p in enumerate(['gas', 'star']):
            data = get_property(self.obj, 'pos',
                                p).to(self.obj.units['length'])
            self.pos = np.append(self.pos, data.d, axis=0)
            data = get_property(self.obj, 'vel',
                                p).to(self.obj.units['velocity'])
            self.vel = np.append(self.vel, data.d, axis=0)
            data = get_property(self.obj, 'mass', p).to(self.obj.units['mass'])
            self.mass = np.append(self.mass, data.d, axis=0)
            self.ptype = np.append(self.ptype,
                                   np.full(len(data),
                                           ptype_ints[p],
                                           dtype=np.int32),
                                   axis=0)
        self._assign_local_lists()
        self._assign_particle_counts()
        memlog('Loaded particle data')

        self._load_gas_data()
        self._load_star_data()
        memlog('Loaded gas and star data')
예제 #10
0
 def _member_search_init(self, select='all'):
     """Collect particle information for member_search()"""
     memlog('Initializing member search, loading particles')
     self.obj.simulation.ds_type = self.obj._ds_type.ds_type
     #         self._determine_ptypes()
     self.load_particle_data(select=select)
     memlog('Loaded particle data')
     self._assign_particle_counts()
     if ('Gas' in self.obj._ds_type.ds.particle_fields_by_type) or (
             'PartType0' in self.obj._ds_type.ds.particle_fields_by_type):
         if isinstance(select, str) and select == 'all':
             self._load_gas_data()
         else:
             self._load_gas_data(select=select[self.ptypes.index('gas')])
     if ('Stars' in self.obj._ds_type.ds.particle_fields_by_type) or (
             'PartType4' in self.obj._ds_type.ds.particle_fields_by_type):
         if isinstance(select, str) and select == 'all':
             self._load_star_data()
         else:
             self._load_star_data(select=select[self.ptypes.index('star')])
     if self.blackholes:
         if isinstance(select, str) and select == 'all':
             self._load_bh_data()
         else:
             self._load_bh_data(select=select[self.ptypes.index('bh')])
     memlog('Loaded baryon data')
예제 #11
0
    def init_ssp_table(self):
        """Initialization SSP table, either reading it in or creating (and storing) it."""

        import os
        read_flag = False
        if os.path.exists(self.ssp_table_file):
            try:
                self.read_ssp_table(self.ssp_table_file)
                memlog('Read SSP table %s' % self.ssp_table_file)
                read_flag = True
            except:
                memlog('Error reading SSP table %s, will generate...' %
                       self.ssp_table_file)
        if not read_flag:  # generate table with Caesar default options
            if self.ssp_model == 'FSPS':
                ssp_ages, ssp_logZ, mass_remaining, wavelengths, ssp_spectra = generate_ssp_table_fsps(
                    self.ssp_table_file,
                    return_table=True,
                    imf_type=1,
                    add_neb_emission=True,
                    sfh=0,
                    zcontinuous=1
                )  # note Caesar default FSPS options; run generate_ssp_table() separately to set desired FSPS options
            elif self.ssp_model == 'BPASS':
                ssp_ages, ssp_logZ, mass_remaining, wavelengths, ssp_spectra = generate_ssp_table_bpass(
                    self.ssp_table_file, return_table=True)
            elif self.ssp_model == 'BC03':
                ssp_ages, ssp_logZ, mass_remaining, wavelengths, ssp_spectra = generate_ssp_table_bc03(
                    self.ssp_table_file, return_table=True)
            else:
                print('ssp_model=%s not recognized in generate_ssp_table()')
                sys.exit(-1)
            self.ssp_ages = np.array(ssp_ages, dtype=MY_DTYPE)
            self.ssp_logZ = np.array(ssp_logZ, dtype=MY_DTYPE)
            self.ssp_mass = np.array(mass_remaining, dtype=MY_DTYPE)
            self.ssp_wavelengths = np.array(wavelengths, dtype=MY_DTYPE)
            self.ssp_spectra = np.array(ssp_spectra, dtype=MY_DTYPE)
예제 #12
0
 def _member_search_init(self, select='all'):
     """Collect particle information for member_search()"""
     memlog('Initializing member search, loading particles')
     self._determine_ptypes()
     self.load_particle_data(select=select)
     memlog('Loaded particle data')
     self._assign_particle_counts()
     if select is 'all': self._load_gas_data()
     else: self._load_gas_data(select=select[self.ptypes.index('gas')])
     if select is 'all': self._load_star_data()
     else: self._load_star_data(select=select[self.ptypes.index('star')])
     if self.blackholes:
         if select is 'all': self._load_bh_data()
         else: self._load_bh_data(select=select[self.ptypes.index('bh')])
     memlog('Loaded baryon data')
예제 #13
0
def find_progens(pid_current, pid_target, gid_current, gid_target, pid_hash, npart_target, 
                 n_most=None, min_in_common=0.1, nproc=1, reverse_match=False):
    """Find most massive and second most massive progenitor/descendants.
    
    Parameters
    ----------
    pids_current : np.ndarray
       particle IDs from the current snapshot.
    pids_target : np.ndarray
       particle IDs from the previous/next snapshot.
    gids_current : np.ndarray
       group IDs from the current snapshot.
    gids_target : np.ndarray
       group IDs from the previous/next snapshot.
    pid_hash : np.ndarray
       indexes for the start of each group in pids_current
    n_most : int 
        Find n_most most massive progenitors/descendants, None for all.
    min_in_common : float 
        Require >this fraction of parts in common between object and progenitor to be a valid progenitor.
    nproc : int
        Number of cores for multiprocessing. Note that this doesn't help much since most of the time is spent in sorting.
    reverse_match : bool
        
    """

    # Sort the progenitor IDs and object numbers for faster searching
    isort_target = np.argsort(pid_target)
    pid_target = pid_target[isort_target]  # target particles' IDs
    gid_target = gid_target[isort_target]  # galaxy IDs for the target particles
    ngal_curr = len(pid_hash)-1  # number of galaxies to find progens/descendants for
    memlog('Progen doing %d groups (nproc=%d)'%(ngal_curr,nproc))

    # Loop over current objects to find progens for each
    if nproc>1:
        prog_index_tmp, match_frac_tmp = \
                   zip(*Parallel(n_jobs=nproc)(delayed(_find_target_group)\
                   (pid_current[pid_hash[ig]:pid_hash[ig+1]],pid_target,gid_target,
                    npart_target,min_in_common,return_N=n_most,reverse_match=reverse_match) \
                  for ig in range(ngal_curr)))
        if n_most is not None:
            prog_index = np.array(prog_index_tmp,dtype=np.int32)
            match_frac = np.array(match_frac_tmp,dtype=np.float)
        else:
            prog_index = np.array(prog_index_tmp,dtype=object)
            match_frac = np.array(match_frac_tmp,dtype=object)
    else:
        if n_most is not None:
            prog_index = np.zeros((ngal_curr,n_most),dtype=np.int32)
            match_frac = np.zeros((ngal_curr,n_most),dtype=np.float)
        else:
            prog_index = np.zeros(ngal_curr,dtype=object)
            match_frac = np.zeros(ngal_curr,dtype=object)
        for ig in range(ngal_curr):
            prog_index[ig], match_frac[ig] = \
                    _find_target_group(pid_current[pid_hash[ig]:pid_hash[ig+1]],pid_target,
                                       gid_target,npart_target,min_in_common,return_N=n_most, 
                                       reverse_match=reverse_match)

   
    return prog_index, match_frac
예제 #14
0
def generate_ssp_table_bc03(
        ssp_lookup_file,
        Zsol=Solar['total'],
        return_table=False,
        model_dir='/home/rad/caesar/bc03/models/Padova1994/chabrier'):
    '''
        Generates an SPS lookup table from BC03 data.
        '''
    import pandas as pd

    mylog.info('Generating BC03 SSP lookup table %s' % (ssp_lookup_file))
    mylog.info('Using BC03 files in: %s' % (model_dir))

    if 'Padova1994' in model_dir:
        metallicities = np.array([0.0001, 0.0004, 0.004, 0.008, 0.02,
                                  0.05])  # for Padova 1994 library
    elif 'Padova2000' in model_dir:
        metallicities = np.array(
            [0.0001, 0.0004, 0.004, 0.008, 0.019,
             0.03])  # for Padova 2000 library (not recommended)
    ssp_logZ = np.log10(metallicities)

    for iZ in range(len(metallicities)):
        ised_file = '%s/bc2003_hr_m%d2_chab_ssp.ised_ASCII' % (
            model_dir, iZ + 2)  # must be un-gzipped!

        # read in entire file
        f = open(ised_file, 'r')
        data = f.read().split()

        # get ages
        nage = int(data[0])
        ssp_ages = np.array(
            [np.log10(max(float(x), 1.e5)) for x in data[1:nage + 1]])

        # get wavelengths
        count = len(ssp_ages)
        while (data[count] != '1221' and
               data[count] != '6900'):  # look for possible BC03 nwave values
            count += 1
        nwave = int(data[count])
        wavelengths = np.array([(float(x))
                                for x in data[count + 1:nwave + count + 1]])
        count = nwave + count + 1

        # initialize arrays
        if iZ == 0:
            ssp_spectra = np.zeros((nage * len(metallicities), nwave))
            mass_remaining = []

        # get spectra
        for iage in range(nage):
            spec = np.array([(float(x))
                             for x in data[count + 1:nwave + count + 1]])
            ssp_spectra[iZ * nage + iage] = spec * wavelengths**2 / CLIGHT_AA
            count += nwave + 54  # skip past the unknown 52 extra numbers at the end of each line

        # get mass remaining
        m_file = '%s/bc2003_hr_m%d2_chab_ssp.4color' % (model_dir, iZ + 2)
        logage, msleft = np.loadtxt(m_file, usecols=(0, 6), unpack=True)
        msleft = np.append(np.array([1.0]), msleft)
        mass_remaining.append(msleft)

    mass_remaining = np.asarray(mass_remaining).flatten()

    with h5py.File(ssp_lookup_file, 'w') as hf:
        hf.create_dataset('fsps_options', data=model_dir)
        hf.create_dataset('ages', data=ssp_ages)
        hf.create_dataset('logZ', data=ssp_logZ)
        hf.create_dataset('mass_remaining', data=mass_remaining)
        hf.create_dataset('wavelengths', data=wavelengths)
        hf.create_dataset('spectra', data=ssp_spectra)
    memlog('Generated BC03 lookup table with %d ages and %d metallicities' %
           (len(ssp_ages), len(ssp_logZ)))

    if return_table:
        return ssp_ages, ssp_logZ, mass_remaining, wavelengths, ssp_spectra
예제 #15
0
def generate_ssp_table_bpass(
        ssp_lookup_file,
        Zsol=Solar['total'],
        return_table=False,
        model_dir='/home/rad/caesar/BPASSv2.2.1_bin-imf_chab100'):
    '''
        Generates an SPS lookup table from BPASS.
        '''
    from hoki import load
    from glob import glob

    mylog.info('Generating BPASS SSP lookup table %s' % (ssp_lookup_file))
    mylog.info('Using BPASS files in: %s' % (model_dir))

    specfiles = glob(model_dir + '/spectra*')  # these must be gunzipped
    smfiles = glob(model_dir + '/starmass*')  # these must be gunzipped
    output_temp = load.model_output(specfiles[0])
    #output_temp = output_temp[(output_temp.WL>LAMBDA_LO)&(output_temp.WL<LAMBDA_HI)]  # restrict wavelength range for faster calculations
    #print(specfiles[0],output_temp)

    ages = np.array([float(a) for a in output_temp.columns[1:]])
    age_mask = (10**ages / 1e9) < 18  # Gyr
    ages = ages[age_mask]

    wavelengths = output_temp['WL'].values
    metallicities = np.array([None] * len(specfiles))

    for i, mod in enumerate(specfiles):  # parse metallicities from filenames
        try:
            metallicities[i] = float('0.' + mod[-7:-4])
        except:  # ...handle em5=1e-5 and em4=1e-4 cases
            metallicities[i] = 10**-float(mod[-5])

    # sort by increasing metallicity
    Z_idx = np.argsort(metallicities)
    metallicities = metallicities[Z_idx].astype(float)

    ssp_spectra = np.zeros((len(ages) * len(metallicities), len(wavelengths)))
    for iZ, mod in enumerate(np.array(specfiles)[Z_idx]):
        output = load.model_output(mod)
        #output = output[(output.WL>LAMBDA_LO)&(output.WL<LAMBDA_HI)]  # restrict wavelength range for faster calculations
        for iage, a in enumerate(ages):
            j = iZ * len(ages) + iage
            ssp_spectra[j] = output[str(a)].values
            ssp_spectra[
                j] *= wavelengths**2 / CLIGHT_AA  # convert from per AA to per Hz

    mass_remaining = []
    for i, mod in enumerate(np.array(smfiles)[Z_idx]):
        output = load.model_output(mod)
        mass_remaining.append(output['stellar_mass'].values)
    mass_remaining = np.asarray(mass_remaining).flatten() / 1.e6  # to Mo

    # convert units
    ssp_ages = ages  # log yr
    ssp_logZ = np.log10(metallicities)
    ssp_spectra /= 1e6  # to Msol
    #print(np.shape(mass_remaining),mass_remaining)

    with h5py.File(ssp_lookup_file, 'w') as hf:
        hf.create_dataset('fsps_options', data=model_dir)
        hf.create_dataset('ages', data=ssp_ages)
        hf.create_dataset('logZ', data=ssp_logZ)
        hf.create_dataset('mass_remaining', data=mass_remaining)
        hf.create_dataset('wavelengths', data=wavelengths)
        hf.create_dataset('spectra', data=ssp_spectra)
    memlog('Generated BPASS lookup table with %d ages and %d metallicities' %
           (len(ssp_ages), len(ssp_logZ)))

    if return_table:
        return ssp_ages, ssp_logZ, mass_remaining, wavelengths, ssp_spectra
예제 #16
0
    def init_bands(self):
        """Initialization bands to compute."""

        import fsps
        if isinstance(self.band_names, str):
            self.band_names = [self.band_names]
        if self.band_names[0] == 'all':
            self.band_names = fsps.list_filters()
        elif self.band_names[0] == 'uvoir':
            self.band_names = []
            for ib, b in enumerate(fsps.list_filters()):
                band = fsps.filters.get_filter(
                    b)  # look up characteristics of desired band
                band_wave = band.transmission[0]  # filter wavelengths
                band_trans = band.transmission[1]  # filter response function
                meanwave = np.sum(
                    band.transmission[0] * band.transmission[1]) / np.sum(
                        band.transmission[1])
                if meanwave < 50000: self.band_names.append(b)
        else:
            # collect all filters containing the input string(s)
            allfilters = fsps.list_filters()
            mybands = []
            for b in self.band_names:  # check that requested bands are actually available
                for b_all in allfilters:
                    if b in b_all:
                        if b == b_all:
                            mybands.append(b_all)  # if exact match, add
                        elif len(b) > 3:
                            mybands.append(
                                b_all
                            )  # avoid adding matching short band names (e.g. 'u')
            if len(mybands) == 0:
                assert b in allfilters, 'Band %s not found among available FSPS filters! Call fsps.list_filters() to list filters.' % self.band_names
            self.band_names = mybands
        # V band is always computed, so that one has A_V (= V_dust - V_nodust)
        if 'v' not in self.band_names:
            self.band_names.append('v')

        # Madau IGM attenuation is applied directly to rest-frame bandpasses only when computing apparent magnitudes; compute this curve here for specific redshift
        redshift = self.obj.simulation.redshift
        if self.use_cosmic_ext:
            from synphot import etau_madau  # see synphot.readthedocs.io/en/latest/synphot/tutorials.html
            extcurve = etau_madau(self.ssp_wavelengths * (1. + redshift),
                                  redshift)
            cosmic_ext = extcurve(self.ssp_wavelengths)
        else:
            cosmic_ext = np.ones(len(self.ssp_wavelengths))

        # set up band information
        nbands = len(self.band_names)
        self.band_meanwave = np.zeros(nbands, dtype=MY_DTYPE)
        self.band_indexes = np.zeros(nbands + 1, dtype=np.int32)
        self.band_ftrans = np.empty(0, dtype=MY_DTYPE)
        self.band_iwave0 = np.zeros(nbands, dtype=np.int32)
        self.band_iwave1 = np.zeros(nbands, dtype=np.int32)
        self.band_indz = np.zeros(nbands + 1, dtype=np.int32)
        self.band_ztrans = np.empty(0, dtype=MY_DTYPE)
        self.band_iwz0 = np.zeros(nbands, dtype=np.int32)
        self.band_iwz1 = np.zeros(nbands, dtype=np.int32)
        for ib, b in enumerate(self.band_names):
            band = fsps.filters.get_filter(
                b)  # look up characteristics of desired band
            band_wave = band.transmission[0]  # filter wavelengths
            band_trans = band.transmission[1]  # filter response function
            self.band_meanwave[ib] = np.sum(
                band.transmission[0] * band.transmission[1]) / np.sum(
                    band.transmission[1])
            # Set up transmission curve in region probed by rest-frame band
            ind = np.where((self.ssp_wavelengths > band_wave[0])
                           & (self.ssp_wavelengths < band_wave[-1]))[
                               0]  # indices of wavelengths in the band
            self.band_iwave0[ib] = ind[0]
            self.band_iwave1[ib] = ind[-1] + 1
            ftrans = np.interp(self.ssp_wavelengths[ind], band_wave,
                               band_trans)  # transmission at those wavelengths
            dnu = CLIGHT_AA / self.ssp_wavelengths[
                ind[0]:ind[-1] + 1] - CLIGHT_AA / self.ssp_wavelengths[
                    ind[0] + 1:ind[-1] + 2]  # convert to delta-nu
            #dnu = self.ssp_wavelengths[ind[0]+1:ind[-1]+2] - self.ssp_wavelengths[ind[0]:ind[-1]+1]  # delta-lambda
            self.band_ftrans = np.append(self.band_ftrans, ftrans * dnu)
            self.band_indexes[ib + 1] = len(self.band_ftrans)
            # Now set up band for apparent mag computation
            # We will blueshift the band, corresponding to redshifting the intrinsic spectrum
            ind = np.where(
                (self.ssp_wavelengths > band_wave[0] *
                 self.obj.simulation.scale_factor)
                & (self.ssp_wavelengths < band_wave[-1] *
                   self.obj.simulation.scale_factor)
            )[0]  # indices of wavelengths for redshifted rest-frame spectrum (i.e. blueshifted band)
            self.band_iwz0[ib] = ind[0]
            self.band_iwz1[ib] = ind[-1] + 1
            ftrans = np.interp(self.ssp_wavelengths[ind],
                               band_wave * self.obj.simulation.scale_factor,
                               band_trans)  # transmission at those wavelengths
            dnu = CLIGHT_AA / self.ssp_wavelengths[
                ind[0]:ind[-1] + 1] - CLIGHT_AA / self.ssp_wavelengths[
                    ind[0] + 1:ind[-1] + 2]  # convert to delta-nu
            #dnu = self.ssp_wavelengths[ind[0]+1:ind[-1]+2] - self.ssp_wavelengths[ind[0]:ind[-1]+1]  # delta-lambda
            self.band_ztrans = np.append(
                self.band_ztrans, np.array(ftrans * dnu * cosmic_ext[ind]))
            self.band_indz[ib + 1] = len(self.band_ztrans)

        memlog('Computing %d bands: %s' %
               (len(self.band_names), self.band_names))