Example #1
0
    def run(self, file, show_output = False, run_sampling = True):

        # calcolo posteriors GW
        samples = get_samples(file = file)
        self.pLD = gaussian_kde(samples['luminosity_distance'])
        self.p_pos = pos_posterior(samples['ra'],samples['dec'], number = 1)
        probs = []
        self.catalog = self.GalInABox2(catalog='GLADE')
        for ra, dec in zip(self.catalog['RAJ2000'], self.catalog['DEJ2000']):
            probs.append(np.exp(self.p_pos.score_samples([[np.deg2rad(ra),np.deg2rad(dec)]]))[0]) # si riesce ad ottimizzare?
        self.catalog['ppos'] = np.array(probs)
        # Levo le galassie troppo fuori dal posterior
        self.catalog = self.catalog[self.catalog['ppos'] > 0.01] # empirico!
        # Levo le galassie fuori dal range di distanza
        self.dropgal()
        # run
        job = cpnest.CPNest(self, verbose=1, nthreads=4, nlive=1000, maxmcmc=100)
        if run_sampling:
            job.run()
            posteriors = job.get_posterior_samples(filename = 'posterior.dat')
        # Calcolo posterior su z
        posteriors = np.genfromtxt('posterior.dat', names = True)
        just_z = [post[0] for post in posteriors]
        self.pdfz = gaussian_kde(just_z)

        # Calcolo probabilità e ordino le galassie
        prob = self.catalog['z'].apply(self.pdfz)
        prob = prob/prob.max()
        self.catalog['p'] = prob
        self.catalog = self.catalog.sort_values('p', ascending = False)
        # self.get_names()
        self.catalog.to_csv('rank.txt', header=True, index=None, sep='\t', mode='w')
        if show_output:
            self.plot_outputs()
Example #2
0
 def setUp(self):
     self.model = GaussianModel()
     self.work = cpnest.CPNest(self.model,
                               verbose=2,
                               Nlive=500,
                               maxmcmc=2000)
     self.work.run()
Example #3
0
 def setUp(self):
     self.model = GaussianModel()
     self.work = cpnest.CPNest(self.model,
                               verbose=1,
                               nlive=1000,
                               nthreads=2,
                               maxmcmc=200,
                               poolsize=100)
     self.work.run()
Example #4
0
 def run(self):
     out_dir = os.path.dirname(os.path.abspath(self.checkpoint_file))
     if self._sampler is None:
         self._sampler = cpnest.CPNest(self.model_call, verbose=1,
                                       output=out_dir,
                                       nthreads=self.nthreads,
                                       nlive=self.nlive,
                                       maxmcmc=self.maxmcmc, resume=True)
     res = self._sampler.run()
Example #5
0
 def setUp(self):
     self.model = GaussianModel()
     self.work = cpnest.CPNest(self.model,
                               verbose=2,
                               Nlive=500,
                               Nthreads=4,
                               maxmcmc=200,
                               balance_samplers=True)
     self.work.run()
Example #6
0
    def run(self, file, show_output=False, run_sampling=True):

        # posteriors GW calculation
        samples = get_samples(file=file)
        self.pLD = gaussian_kde(samples['luminosity_distance'])
        self.p_pos = pos_posterior(samples['ra'], samples['dec'], number=1)
        probs = []
        for ra, dec in zip(self.catalog['RA'], self.catalog['Dec']):
            probs.append(np.exp(self.p_pos.score_samples([[ra, dec]]))[0])
        self.catalog['ppos'] = np.array(probs)
        # Dropping galaxies outside the confident volume
        # Position
        self.catalog = self.catalog[self.catalog['ppos'] > 0.01]  # empirical!
        # Distance
        self.dropgal()
        # run
        job = cpnest.CPNest(self,
                            verbose=1,
                            nthreads=4,
                            nlive=1000,
                            maxmcmc=100)
        if run_sampling:
            job.run()
            posteriors = job.get_posterior_samples(filename='posterior.dat')
        # z posteriors calculation
        posteriors = np.genfromtxt('posterior_backup.dat', names=True)
        just_z = [post[0] for post in posteriors]
        self.pdfz = gaussian_kde(just_z)

        # Probability calculation and galaxy sorting
        prob = self.catalog['z'].apply(self.pdfz)
        prob = prob / prob.max()
        self.catalog['p'] = prob
        self.catalog = self.catalog.sort_values('p', ascending=False)
        self.catalog.to_csv('rank' + self.detection_id + '.txt',
                            header=True,
                            index=None,
                            sep='&',
                            mode='w')
        if show_output:
            self.plot_outputs()
 def setUp(self, xmax=20):
     self.model = HalfGaussianModel(xmax=xmax)
     self.work = cpnest.CPNest(self.model, verbose=1, nlive=500, nthreads=1)
     self.work.run()
Example #8
0
        m = p['m']
        lp -= 0.5 * (
            (m - self.mmu) /
            self.msigma)**2  # no need for normalisation constant on the prior

        return lp


nlive = 1024  # number of live point
maxmcmc = 1024  # maximum MCMC chain length
nthreads = 1  # use one CPU core

# set up the algorithm
work = cpnest.CPNest(StraightLineModel(data, x, straight_line, sigma),
                     verbose=0,
                     nthreads=nthreads,
                     nlive=nlive,
                     maxmcmc=maxmcmc)

# run the algorithm
work.run()

logZcpnest = work.NS.logZ  # value of log Z
infogaincpnest = work.NS.state.info  # value of the information gain
logZerrcpnest = np.sqrt(
    infogaincpnest / nlive)  # estimate of the statistcal uncertainty on logZ

# get the null log likelihood (evidence that the data is Gaussian noise with zero mean, and given standard devaition)
logZnull = work.user.log_likelihood({'m': 0., 'c': 0.})

print('Marginalised evidence is {} ± {}'.format(logZcpnest, logZerrcpnest))
Example #9
0
# In[ ]:

# import cpnest
import cpnest

print('CPNest version: {}'.format(cpnest.__version__))

nlive = 1024  # number of live point
maxmcmc = 1024  # maximum MCMC chain length
nthreads = 2  # use one CPU core

# set up the algorithm
work = cpnest.CPNest(MBHBModel(data, time, model_mbhb, sigma),
                     verbose=0,
                     nthreads=nthreads,
                     nlive=nlive,
                     maxmcmc=maxmcmc)
# run the algorithm
from datetime import datetime

t0 = datetime.now()
work.run()
t1 = datetime.now()

timecpnest = (t1 - t0)
print("Time taken to run 'CPNest' is {} seconds".format(timecpnest))

# In[ ]:

logZcpnest = work.NS.logZ  # value of log Z
Example #10
0
def main():
    parser = argparse.ArgumentParser(description="Quasar proper motions code")

    parser.add_argument("parameter_file",
                        metavar="Parameter file",
                        type=str,
                        help=".par file")

    args = parser.parse_args()

    params = C.set_params(args.parameter_file)

    U.assert_config_params(params)

    C.check_output_dir(params['General']['output_dir'])

    C.record_config_params(params)

    data = AD.AstrometricDataframe()

    AD.load_astrometric_data(data, params=params['Data'])

    astrometric_model = S.model(data,
                                logL_method=params['MCMC']['logL_method'],
                                prior_bounds=params['MCMC']['prior_bounds'])

    nest = cpnest.CPNest(astrometric_model,
                         output=params['General']['output_dir'],
                         nthreads=params['MCMC']['nthreads'],
                         nlive=params['MCMC']['nlive'],
                         maxmcmc=params['MCMC']['maxmcmc'],
                         resume=True,
                         verbose=params['General']['verbose'])

    nest.run()

    nested_samples = nest.get_nested_samples(filename=None)
    np.savetxt(os.path.join(params['General']['output_dir'],
                            'nested_samples.dat'),
               nested_samples.ravel(),
               header=' '.join(nested_samples.dtype.names),
               newline='\n',
               delimiter=' ')

    posterior_samples = nest.get_posterior_samples(filename=None)
    np.savetxt(os.path.join(params['General']['output_dir'],
                            'posterior_samples.dat'),
               posterior_samples.ravel(),
               header=' '.join(posterior_samples.dtype.names),
               newline='\n',
               delimiter=' ')

    A_limit = PP.post_process_results(
        posterior_file=os.path.join(params['General']['output_dir'],
                                    'posterior_samples.dat'),
        which_basis=astrometric_model.which_basis,
        Lmax=params['Data']['Lmax'],
        L=astrometric_model.overlap_matrix_Cholesky,
        pol=params['Post_processing']['pol'],
        limit=params['Post_processing']['limit'])

    U.export_data(data, A_limit, output=params['General']['output_dir'])

    if params['General']['plotting'] == True:
        P.plot(data, output=params['General']['output_dir'])
Example #11
0
                                    self.catalog['DEJ2000'])
        ])

        logL_detected += log_p_det
        logL = logsumexp([logL_detected, logL_non_detected])

        return logL


if __name__ == '__main__':

    Gal_cat = GalInABox([190, 200], [-22, -17], u.deg, u.deg,
                        catalog='GLADE')  #[::100]
    M = completeness(Gal_cat)
    #    NGC4993 = Vizier.query_object('NGC4993', catalog = 'GLADE')[1].to_pandas()
    #    M = completeness(NGC4993)
    # M.dropgal(nsigma = 3)
    # print([zi for zi in M.catalog['z']])
    # import matplotlib.pyplot as plt
    # from mpl_toolkits.mplot3d import Axes3D
    # fig = plt.figure()
    # ax = fig.add_subplot(111, projection='3d')
    # S = ax.scatter(M.catalog['RAJ2000'], M.catalog['DEJ2000'], M.catalog['z'], c=M.catalog['Bmag'])
    # plt.colorbar(S)
    # plt.show()
    # exit()
    #    print (M.catalog)
    #    exit()
    job = cpnest.CPNest(M, verbose=2, nthreads=4, nlive=5000, maxmcmc=1000)
    job.run()
Example #12
0
            return -np.inf
        # Flat prior is assumed
        return 0.

    def log_likelihood(self, x):
        logL = 0.
        mu = x['mu']
        sigma = x['sigma']
        for e in self.events:
            logL += logsumexp(e.GWlogpost + Gaussexp(e.mag, mu, sigma))
        return logL


if __name__ == '__main__':

    mag_file = 'path/to/mags/'
    magnitudes = np.genfromtxt(mag_file, names=True)
    mags_array = magnitudes['mag']
    events = np.empty(250, dtype=event)

    for i in range(250):
        j = i + 1
        path_to_data = folder + str(j)
        data = np.genfromtxt(path_to_data + '/galaxy_ranks.txt', names=True)
        events[i].mag = mags_array
        events[i].GWlogpost = data['logposterior']

    W = mu_sigma(events)
    job = cpnest.CPNest(W, verbose=1, nthreads=4, nlive=1000, maxmcmc=1024)
    job.run()
Example #13
0
    import pandas as pd
    data = pd.read_csv(opts.data, sep=',')
    if opts.region is None:
        data = data.to_records()
    else:
        data = data[data['denominazione_regione'].str.contains(
            opts.region)].to_records()

    time = 1.0 + np.linspace(0.0, data.shape[0], data.shape[0])
    M = diffusion_model(data, time, growth_model=opts.model)

    if 1:
        work = cpnest.CPNest(M,
                             verbose=2,
                             poolsize=opts.poolsize,
                             nthreads=opts.threads,
                             nlive=opts.nlive,
                             maxmcmc=opts.maxmcmc,
                             output=opts.output,
                             resume=1)
        work.run()
        print('Model evidence {0}'.format(work.NS.logZ))
        x = work.get_posterior_samples(filename='posterior.dat')
    else:
        x = np.genfromtxt(os.path.join(opts.output, 'posterior.dat'),
                          names=True)

    import matplotlib
    matplotlib.use("MACOSX")
    import matplotlib.dates as mdates
    import matplotlib.ticker as ticker
    import datetime as DT
Example #14
0
                self.omega.om = x['om']
                self.omega.ol = x['ol']
                # Calcolo prior. Ciascuna coordinata è pesata con le probabilità
                # delle coordinate ('banane') GW, così come z.
                # Temporaneamente, è assunta gaussiana intorno a un evento.
                log_P_Z = np.log(gaussian(x['z'], Gal.z, Gal.z / 10.0))
                log_P_S = np.log(Schechter(x['M'], self.omega))
                log_P_RA = np.log(
                    gaussian(x['ra'], Gal.ra.rad, Gal.ra.rad / 100.))
                log_P_DEC = np.log(
                    gaussian(x['dec'], Gal.dec.rad, Gal.dec.rad / 100.))
                log_P_ComVol = np.log(
                    lal.ComovingVolumeElement(x['z'], self.omega))
                return log_P_S + log_P_Z + log_P_ComVol + log_P_RA + log_P_DEC
                # PROBLEMA! Come introduco le delta(ra,dec)?

    def log_likelihood(self, x):
        logL = 0.0
        logL += np.log(
            gaussian(lal.LuminosityDistance(self.omega, x['z']), 33.4, 3.34))
        logL += np.log(gaussian(x['ra'], GW.ra.rad, GW.ra.rad / 10.))
        logL += np.log(gaussian(x['dec'], GW.dec.rad, GW.dec.rad / 10.))
        return logL


if __name__ == '__main__':
    M = completeness()
    job = cpnest.CPNest(M, verbose=2, nthreads=8, nlive=2000, maxmcmc=1024)
    job.run()
# GLADE galaxy catalog