Пример #1
0
 def __init__(self, path, reference_list_path='reference_list.txt',
              out_db='mc_results.db'):
     '''loads data and makes db'''
     # get all results*.pik files
     files = glob.glob(os.path.join(path, 'results*.pik'))
     # make db for quick access
     if not os.path.exists(out_db):
         reference = np.loadtxt(reference_list_path)
         # create new dbs
         self.db = numpy_sql(out_db)
         self.conn = self.db.cursor()
         for f in files:
             # get result number
             num = ''.join(re.findall('\d',f))
             self.conn.execute('''CREATE TABLE s%s (Real_SFH real, Real_age real, Real_Z real, chains array, log_post array)'''%num)
             results = pik.load(open(f))
             # put in db
             for res in results:
                 if len(results[res]) < 1:
                     continue
                 samp = results[res][0]
                 row = (results[res][2][0], results[res][2][1], results[res][2][2],
                     adapt_array(samp.flatchain),adapt_array(samp.flatlnprobability))
                 self.conn.execute('INSERT INTO s%s VALUES (?,?,?,?,?)'%num, row)
             
             self.conn.execute('CREATE UNIQUE INDEX i%s ON s%s (Real_SFH, Real_age, Real_Z)'%(num, num))
             self.db.commit()
     else:
         self.db = numpy_sql(out_db)
         self.conn = self.db.cursor()
     # get tables
     self.tables = []
     for i in self.conn.execute('select * from sqlite_master').fetchall():
         if i[0] == 'table':
             self.tables.append(i[1])
Пример #2
0
 def __init__(self, data, db_name='burst_dtau_10.db', have_dust=False,
              have_losvd=False):
     self.has_dust = have_dust
     self.has_losvd = have_losvd
     self.db_name = db_name
     #check if data is right type
     
     self.data = {}
     #get mean data values to 1
     self.norm = {}
     self.norm_prior = {}
     for i in data:
         self.norm[i] = 1./data[i][:,1].mean()
         self.norm_prior[i] = np.log10(self.norm[i])
         self.data[i] = data[i].copy()
         self.data[i][:,1] *= self.norm[i]
         if self.data[i].shape[1] == 3:
             # Propagate the uncertany
             self.data[i][:,2] *= self.norm[i]
     self.db = util.numpy_sql(db_name)
     self._table_name = self.db.execute('select * from sqlite_master').fetchall()[0][1]
     # Tell which models are avalible and how many galaxies to fit
     self.models = {'burst': data.keys()}
     # Get param range (tau, age, metal)
     self.param_range = []
     for column in ['tau', 'age', 'metalicity']:
         self.param_range.append(np.sort(np.ravel(self.db.execute(
             'Select DISTINCT %s FROM %s'%(column,self._table_name)).fetchall())))
     self._hull = None
     # make resolution
     self._define_resolu()
Пример #3
0
def random_exp_gal(db_path='/home/thuso/Phd/experements/hierarical/LRG_Stack/exp_dtau_10.db'):
    '''gets num_gal of exponetal model spectra without any background model'''
    db = util.numpy_sql(db_path)
    param = get_points(db)
    data = tri_lin_interp(db, param, make_param_range(db))
    # Make noise

    return data, param
Пример #4
0
def random_exp_gal(
        db_path='/home/thuso/Phd/experements/hierarical/LRG_Stack/exp_dtau_10.db'
):
    '''gets num_gal of exponetal model spectra without any background model'''
    db = util.numpy_sql(db_path)
    param = get_points(db)
    data = tri_lin_interp(db, param, make_param_range(db))
    # Make noise

    return data, param
Пример #5
0
def random_burst_gal(db_path='/home/thuso/Phd/experements/hierarical/LRG_Stack/burst_dtau_10.db'):
    '''gets num_gal of burst model spectra without any background model'''
    db = util.numpy_sql(db_path)
    param = get_points(db)
    data = tri_lin_interp(db, param[['tau','age','metalicity']], make_param_range(db))
    # Make noise

    # Change wavelength range
    data = data[nu.where(nu.logical_and(data[:,0]>=3200, data[:,0] <= 9500))]

    return data, param
Пример #6
0
def make_csp_lib(csp_type, csp_num=10, save_path='.'):
    '''Makes a csp library. does n steps of tau in GYR to 0 -5 GYR.
    With multicore processing
    '''
    # get burst function
    assert csp_type.lower() in ['burst', 'exp'], "csp_type must be 'burst' or 'exp'"
    if csp_type == 'burst':
        model = make_burst
    else:
        model = make_exp
    #pool = Pool()
    tau = nu.linspace(0, 5, csp_num)
    models = map(model, tau)
    # save spectra to database
    save_name = os.path.join(save_path, '%s_dtau_%d.db'%(csp_type,csp_num))
    conn = numpy_sql(save_name)
    c = conn.cursor()
    #create table
    c.execute('''CREATE TABLE %s (imf text, model text, tau real, age real,
    metalicity real ,
    spec array)'''%csp_type)

    for csp in models:
        for meta_gal in csp:
            for age in nu.log10(1+meta_gal.ages):
                if age == 0:
                    age = nu.log10(1*10**5)
                if nu.isclose(10**(age-9), 20):
                    data = nu.vstack((meta_gal.ls, meta_gal.get_sed(20))).T
                else:
                    data = nu.vstack((meta_gal.ls,
                                      meta_gal.get_sed(10**(age-9)))).T
                # reverse
                data = data[-1:0:-1]
                if 'length' in meta_gal.meta_data:
                    length = float(meta_gal.meta_data['length'])
                elif 'tau' in meta_gal.meta_data:
                    length = float(meta_gal.meta_data['tau'])
                else:
                    # if SSP
                    length = 0.
                
                insert  = (meta_gal.meta_data['imf'],meta_gal.meta_data['model']
                           , length, age,
                           nu.log10(float(meta_gal.meta_data['met'])), data,)
                c.execute('INSERT INTO %s VALUES (?,?,?,?,?,?)'%csp_type, insert)
            # save
            conn.commit()
    conn.close()
Пример #7
0
def random_burst_gal(
    db_path='/home/thuso/Phd/experements/hierarical/LRG_Stack/burst_dtau_10.db'
):
    '''gets num_gal of burst model spectra without any background model'''
    db = util.numpy_sql(db_path)
    param = get_points(db)
    data = tri_lin_interp(db, param[['tau', 'age', 'metalicity']],
                          make_param_range(db))
    # Make noise

    # Change wavelength range
    data = data[nu.where(nu.logical_and(data[:, 0] >= 3200,
                                        data[:, 0] <= 9500))]

    return data, param
Пример #8
0
def get_data(db_path):
    '''retrives data from db.
    returns matrix of params (ssp, n param), spec (ssp, wavelength)'''

    db = util.numpy_sql(db_path)
    # get table name
    table_name = db.execute('select * from sqlite_master').fetchall()[0][1]
    # fetch all
    spec, param = [] ,[]
    for imf, model, tau, age, metal, buf_spec in db.execute('SELECT * From %s'%table_name):
        spec.append(util.convert_array(buf_spec)[:,1])
        param.append([tau, age, metal])
    
    param = np.array(param)
    spec = np.array(spec)
    return param, spec, util.convert_array(buf_spec)[:,0]
Пример #9
0
def get_data(db_path):
    '''retrives data from db.
    returns matrix of params (ssp, n param), spec (ssp, wavelength)'''

    db = util.numpy_sql(db_path)
    # get table name
    table_name = db.execute('select * from sqlite_master').fetchall()[0][1]
    # fetch all
    spec, param = [], []
    for imf, model, tau, age, metal, buf_spec in db.execute(
            'SELECT * From %s' % table_name):
        spec.append(util.convert_array(buf_spec)[:, 1])
        param.append([tau, age, metal])

    param = np.array(param)
    spec = np.array(spec)
    return param, spec, util.convert_array(buf_spec)[:, 0]
Пример #10
0
    def __init__(self,
                 data,
                 db_name='burst_dtau_10.db',
                 have_dust=False,
                 have_losvd=False):
        self.has_dust = have_dust
        self.has_losvd = have_losvd
        self.db_name = db_name
        #check if data is right type

        self.data = {}
        #get mean data values to 1
        self.norm = {}
        self.norm_prior = {}
        for i in data:
            self.norm[i] = 1. / data[i][:, 1].mean()
            self.norm_prior[i] = np.log10(self.norm[i])
            self.data[i] = data[i].copy()
            self.data[i][:, 1] *= self.norm[i]
            if self.data[i].shape[1] == 3:
                # Propagate the uncertany
                self.data[i][:, 2] *= self.norm[i]
        self.db = util.numpy_sql(db_name)
        self._table_name = self.db.execute(
            'select * from sqlite_master').fetchall()[0][1]
        # Tell which models are avalible and how many galaxies to fit
        self.models = {'burst': data.keys()}
        # Get param range (tau, age, metal)
        self.param_range = []
        for column in ['tau', 'age', 'metalicity']:
            self.param_range.append(
                np.sort(
                    np.ravel(
                        self.db.execute(
                            'Select DISTINCT %s FROM %s' %
                            (column, self._table_name)).fetchall())))
        self._hull = None
        # make resolution
        self._define_resolu()
Пример #11
0
 def lik(self, param, bins, return_model=False, normalize=True):
     '''Calculates log likelyhood for burst model'''
     for gal in param[bins]:
         #ipdb.set_trace()
         # get interp spectra
         #check if points are in range
         columns = ['tau', 'age', 'metalicity']
         if self.is_in_hull(param[bins][gal][columns]):
             # check if db works
             try:
                 self.db.execute('select * from sqlite_master').fetchall()[0][1]
             except: #ProgrammingError:
                 #make new database connection
                 self.db = util.numpy_sql(self.db_name)
                 
             spec = tri_lin_interp(self.db,
                 param[bins][gal][columns], self.param_range)
         else:
             if return_model:
                 yield -np.inf, gal, []
             else:
                 yield -np.inf, gal
             continue
         if normalize:
             model = {'wave':spec[:,0],0: spec[:,1]}
         else:
             model = {'wave':spec[:,0],
                  0: spec[:,1] * 10**param[bins][gal]['normalization'].iat[0]}
         # Redshift
         model['wave'] = ag.redshift(model['wave'],
                                     param[bins][gal]['redshift']) 
         # Dust
         if self.has_dust:
             columns = ['$T_{bc}$','$T_{ism}$']
             model = ag.dust(param[bins][gal][columns].iloc[0],
                             model)
     
         # LOSVD
         if self.has_losvd:
             # wave range for convolution
             wave_range = [self.data[gal][:,0].min(),
                           self.data[gal][:,0].max()]
             # check if resolution has been calculated
             columns = ['$\\sigma$','$V$','$h_3$','$h_4$']
             send_param = param[bins][gal][columns].iloc[0]
             model = ag.LOSVD(model, send_param,
                                wave_range, self.resolu[gal])
         #match data wavelengths with model
         try:
             model = ag.data_match(self.data[gal], model)
         except:
             model = ag.data_match(self.data[gal], model, rebin=False)
         #calculate map for normalization
         norm = ag.normalize(self.data[gal], model[0])
         self.norm_prior[gal] = np.log10(norm)
         if normalize:
             model[0] *= norm
         # Calc liklihood
         if self.data[gal].shape[1] >= 3:
             # Has Uncertanty
             out_lik = stats_dist.norm.logpdf(
             self.data[gal][:,1], model[0], self.data[gal][:,2])
         else:
             #no uncertanty or bad entry
             out_lik = stats_dist.norm.logpdf(
                 self.data[gal][:,1], model[0])
         if return_model:
             yield out_lik.sum(), gal, model
         else:
             yield out_lik.sum(), gal
Пример #12
0
    def lik(self, param, bins, return_model=False, normalize=True):
        '''Calculates log likelyhood for burst model'''
        for gal in param[bins]:
            #ipdb.set_trace()
            # get interp spectra
            #check if points are in range
            columns = ['tau', 'age', 'metalicity']
            if self.is_in_hull(param[bins][gal][columns]):
                # check if db works
                try:
                    self.db.execute(
                        'select * from sqlite_master').fetchall()[0][1]
                except:  #ProgrammingError:
                    #make new database connection
                    self.db = util.numpy_sql(self.db_name)

                spec = tri_lin_interp(self.db, param[bins][gal][columns],
                                      self.param_range)
            else:
                if return_model:
                    yield -np.inf, gal, []
                else:
                    yield -np.inf, gal
                continue
            if normalize:
                model = {'wave': spec[:, 0], 0: spec[:, 1]}
            else:
                model = {
                    'wave': spec[:, 0],
                    0:
                    spec[:, 1] * 10**param[bins][gal]['normalization'].iat[0]
                }
            # Redshift
            model['wave'] = ag.redshift(model['wave'],
                                        param[bins][gal]['redshift'])
            # Dust
            if self.has_dust:
                columns = ['$T_{bc}$', '$T_{ism}$']
                model = ag.dust(param[bins][gal][columns].iloc[0], model)

            # LOSVD
            if self.has_losvd:
                # wave range for convolution
                wave_range = [
                    self.data[gal][:, 0].min(), self.data[gal][:, 0].max()
                ]
                # check if resolution has been calculated
                columns = ['$\\sigma$', '$V$', '$h_3$', '$h_4$']
                send_param = param[bins][gal][columns].iloc[0]
                model = ag.LOSVD(model, send_param, wave_range,
                                 self.resolu[gal])
            #match data wavelengths with model
            try:
                model = ag.data_match(self.data[gal], model)
            except:
                model = ag.data_match(self.data[gal], model, rebin=False)
            #calculate map for normalization
            norm = ag.normalize(self.data[gal], model[0])
            self.norm_prior[gal] = np.log10(norm)
            if normalize:
                model[0] *= norm
            # Calc liklihood
            if self.data[gal].shape[1] >= 3:
                # Has Uncertanty
                out_lik = stats_dist.norm.logpdf(self.data[gal][:, 1],
                                                 model[0], self.data[gal][:,
                                                                          2])
            else:
                #no uncertanty or bad entry
                out_lik = stats_dist.norm.logpdf(self.data[gal][:, 1],
                                                 model[0])
            if return_model:
                yield out_lik.sum(), gal, model
            else:
                yield out_lik.sum(), gal