Example #1
0
    def __init__(self, psrs, bayesephem=True, gamma_common=4.33,
                 orf='hd', wideband=False, select=None):

        # initialize standard model with fixed white noise and
        # and powerlaw red and gw signal
        self.pta = models.model_2a(psrs, psd='powerlaw', bayesephem=bayesephem,
                                   gamma_common=gamma_common, wideband=wideband,
                                   select=select)


        # get frequencies here
        self.freqs = self._get_freqs()

        # get F-matrices and set up cache
        self.Fmats = self.get_Fmats()
        self._set_cache_parameters()

        # pulsar locations
        self.psrlocs = [p.pos for p in psrs]

        # overlap reduction function
        if orf == 'hd':
            self.orf = utils.hd_orf
        elif orf == 'dipole':
            self.orf = utils.dipole_orf
        elif orf == 'monopole':
            self.orf = utils.monopole_orf
        else:
            raise ValueError('Unknown ORF!')
def test_model2a_pshift(dmx_psrs, caplog):
    caplog.set_level(logging.CRITICAL)
    m2a = models.model_2a(dmx_psrs,
                          noisedict=noise_dict,
                          pshift=True,
                          pseed=42)
    assert hasattr(m2a, 'get_lnlikelihood')
def test_model2a_broken_plaw(dmx_psrs, caplog):
    caplog.set_level(logging.CRITICAL)
    m2a = models.model_2a(dmx_psrs,
                          psd='broken_powerlaw',
                          delta_common=0,
                          noisedict=noise_dict)
    assert hasattr(m2a, 'get_lnlikelihood')
def test_hypermodel(dmx_psrs, caplog):
    m2a = models.model_2a(dmx_psrs, noisedict=noise_dict)
    m3a = models.model_3a(dmx_psrs, noisedict=noise_dict)
    ptas = {0: m2a, 1: m3a}
    hm = hypermodel.HyperModel(ptas)
    assert hasattr(hm, 'get_lnlikelihood')
    assert 'gw_log10_A' in hm.param_names
    assert 'nmodel' in hm.param_names
def test_setup_sampler(dmx_psrs, caplog):
    m2a = models.model_2a(dmx_psrs, noisedict=noise_dict)
    samp = sampler.setup_sampler(m2a, outdir=outdir, human='tester')
    assert hasattr(samp, "sample")
    paramfile = os.path.join(outdir, "pars.txt")
    assert os.path.isfile(paramfile)
    with open(paramfile, "r") as f:
        params = [line.rstrip('\n') for line in f]
    for ptapar, filepar in zip(m2a.param_names, params):
        assert ptapar == filepar
Example #6
0
def test_jumpproposal(dmx_psrs,caplog):
    m2a=models.model_2a(dmx_psrs,noisedict=noise_dict)
    jp=sampler.JumpProposal(m2a)
    assert jp.draw_from_prior.__name__ == 'draw_from_prior'
    assert jp.draw_from_signal_prior.__name__ == 'draw_from_signal_prior'
    assert (jp.draw_from_par_prior('J1713+0747').__name__ ==
            'draw_from_J1713+0747_prior')
    assert (jp.draw_from_par_log_uniform({'gw':(-20,-10)}).__name__ ==
            'draw_from_gw_log_uniform')
    assert (jp.draw_from_signal('red noise').__name__ ==
            'draw_from_red noise_signal')
Example #7
0
    def __init__(self,
                 psrs,
                 bayesephem=True,
                 gamma_common=4.33,
                 orf='hd',
                 wideband=False,
                 select=None,
                 noisedict=None,
                 pta=None):

        # initialize standard model with fixed white noise and
        # and powerlaw red and gw signal

        if pta is None:
            self.pta = models.model_2a(psrs,
                                       psd='powerlaw',
                                       bayesephem=bayesephem,
                                       gamma_common=gamma_common,
                                       is_wideband=wideband,
                                       select='backend',
                                       noisedict=noisedict)
        else:
            self.pta = pta

        self.gamma_common = gamma_common
        # get frequencies here
        self.freqs = self._get_freqs(psrs)

        # set up cache
        self._set_cache_parameters()

        # pulsar locations
        self.psrlocs = [p.pos for p in psrs]

        # overlap reduction function
        if orf == 'hd':
            self.orf = model_orfs.hd_orf
        elif orf == 'dipole':
            self.orf = model_orfs.dipole_orf
        elif orf == 'monopole':
            self.orf = model_orfs.monopole_orf
        elif orf == 'gw_monopole':
            self.orf = model_orfs.gw_monopole_orf
        elif orf == 'gw_dipole':
            self.orf = model_orfs.gw_dipole_orf
        elif orf == 'st':
            self.orf = model_orfs.st_orf
        else:
            raise ValueError('Unknown ORF!')
def test_hyper_sampler(dmx_psrs, caplog):
    m2a = models.model_2a(dmx_psrs, noisedict=noise_dict)
    m3a = models.model_3a(dmx_psrs, noisedict=noise_dict)
    ptas = {0: m2a, 1: m3a}
    hm = hypermodel.HyperModel(ptas)
    samp = hm.setup_sampler(outdir=outdir, human='tester')
    assert hasattr(samp, "sample")
    paramfile = os.path.join(outdir, "pars.txt")
    assert os.path.isfile(paramfile)
    with open(paramfile, "r") as f:
        params = [line.rstrip('\n') for line in f]
    for ptapar, filepar in zip(hm.param_names, params):
        assert ptapar == filepar
    x0 = hm.initial_sample()
    assert len(x0) == len(hm.param_names)
Example #9
0
def test_extend_emp_dists_1d_kde(dmx_psrs, caplog):
    with open(datadir+'/emp_dist_samples.pkl', 'rb') as fin:
        tmp_data = pickle.load(fin)

    m2a = models.model_2a(dmx_psrs, noisedict=noise_dict)
    new_dist = make_empirical_distributions_KDE(m2a, tmp_data['names'], tmp_data['names'],
                                                tmp_data['samples'], save_dists=False)
    new_dist = sampler.extend_emp_dists(m2a, new_dist)
    for ii in range(len(tmp_data['names'])):
        m2a.params[ii].prior._defaults['pmin'] -= 0.1
    new_dist = sampler.extend_emp_dists(m2a, new_dist)
    assert len(new_dist) == 6
    for i in range(6):
        assert new_dist[i].minval <= m2a.params[i].prior._defaults['pmin']
        assert new_dist[i].maxval >= m2a.params[i].prior._defaults['pmax']
Example #10
0
def test_extend_emp_dists_1d(dmx_psrs, caplog):
    with open(datadir+'/emp_dist_samples.pkl', 'rb') as fin:
        tmp_data = pickle.load(fin)

    m2a = models.model_2a(dmx_psrs, noisedict=noise_dict)
    new_dist = make_empirical_distributions(m2a, tmp_data['names'], tmp_data['names'],
                                            tmp_data['samples'], save_dists=False)
    # run extend when edges match priors
    new_dist = sampler.extend_emp_dists(m2a, new_dist)
    # change priors so they don't match edges of
    # empirical distribution
    for ii in range(len(tmp_data['names'])):
        m2a.params[ii].prior._defaults['pmin'] -= 0.1
    new_dist = sampler.extend_emp_dists(m2a, new_dist)
    assert len(new_dist) == 6
    for i in range(6):
        assert new_dist[i]._edges[0] <= m2a.params[i].prior._defaults['pmin']
        assert new_dist[i]._edges[-1] >= m2a.params[i].prior._defaults['pmax']
Example #11
0
def test_extend_emp_dists_2d(dmx_psrs, caplog):
    with open(datadir+'/emp_dist_samples.pkl', 'rb') as fin:
        tmp_data = pickle.load(fin)
    m2a = models.model_2a(dmx_psrs, noisedict=noise_dict)
    parnames = [[tmp_data['names'][0], tmp_data['names'][1]],
                [tmp_data['names'][2], tmp_data['names'][3]],
                [tmp_data['names'][4], tmp_data['names'][5]]]
    new_dist = make_empirical_distributions(m2a, parnames, tmp_data['names'],
                                            tmp_data['samples'], save_dists=False)
    # case 1, edges match priors
    new_dist = sampler.extend_emp_dists(m2a, new_dist)
    # case 2, edges don't match priors (set priors to be different)
    for ii in range(len(tmp_data['names'])):
        m2a.params[ii].prior._defaults['pmin'] -= 0.1
        m2a.params[ii].prior._defaults['pmax'] += 0.1
    new_dist = sampler.extend_emp_dists(m2a, new_dist)
    assert len(new_dist) == 3
    for i in range(3):
        k = 2 * i
        assert new_dist[i]._edges[0][0] <= m2a.params[k].prior._defaults['pmin']
        assert new_dist[i]._edges[0][-1] <= m2a.params[k].prior._defaults['pmax']
        assert new_dist[i]._edges[1][0] <= m2a.params[k + 1].prior._defaults['pmin']
        assert new_dist[i]._edges[1][-1] <= m2a.params[k + 1].prior._defaults['pmax']
Example #12
0
        del psrs[idx]
    Outdir = args.outdir + '{0}/'.format(args.nyears)

with open(args.noisepath, 'r') as fin:
    noise_dict = json.load(fin)

if args.vary_gamma:
    gamma_gw = None
else:
    gamma_gw = args.gamma_gw
if args.model == 'model_2a':
    pta = models.model_2a(psrs,
                          psd=args.psd,
                          noisedict=noise_dict,
                          n_gwbfreqs=args.nfreqs,
                          gamma_common=gamma_gw,
                          upper_limit=args.gwb_ul,
                          bayesephem=args.bayes_ephem,
                          wideband=args.wideband,
                          pshift=args.pshift,
                          select='backend')
if args.model == 'model_3a':
    pta = models.model_3a(psrs,
                          psd=args.psd,
                          noisedict=noise_dict,
                          n_gwbfreqs=args.nfreqs,
                          gamma_common=gamma_gw,
                          upper_limit=args.gwb_ul,
                          bayesephem=args.bayes_ephem,
                          wideband=args.wideband,
                          pshift=args.pshift,
                          select='backend')
def test_model2a_5gwb(dmx_psrs, caplog):
    caplog.set_level(logging.CRITICAL)
    m2a = models.model_2a(dmx_psrs, n_gwbfreqs=5, noisedict=noise_dict)
    assert hasattr(m2a, 'get_lnlikelihood')
with open(args.noisepath, 'r') as fin:
    noise = json.load(fin)

if args.rn_psrs[0] == 'all':
    rn_psrs = 'all'
else:
    rn_psrs = args.rn_psrs

pta_crn = models.model_2a(psrs,
                          psd=args.psd,
                          noisedict=noise,
                          n_gwbfreqs=args.nfreqs,
                          gamma_common=args.gamma_gw,
                          delta_common=0.,
                          upper_limit=False,
                          bayesephem=args.bayes_ephem,
                          be_type='setIII',
                          wideband=False,
                          rn_psrs=rn_psrs,
                          select='backend',
                          pshift=False)

####Sky Scramble script
if args.sky_scramble is None:
    pass
else:
    scr_npz = np.load(args.sky_scramble)
    thetas = scr_npz['thetas'][args.process, :]
    phis = scr_npz['phis'][args.process, :]
    for p, theta, phi in zip(psrs, thetas, phis):
def pta_model2a(dmx_psrs, caplog):
    m2a = models.model_2a(dmx_psrs, noisedict=noise_dict)
    return m2a
Example #16
0
def pta_model2a(dmx_psrs, caplog):
    m2a = models.model_2a(dmx_psrs, noisedict=noise_dict, tnequad=True)
    return m2a
Example #17
0
                         upper_limit=False,
                         bayesephem=args.bayes_ephem,
                         be_type='setIII',
                         wideband=False,
                         rn_psrs=rn_psrs,
                         pshift=args.pshift,
                         pseed=args.process,
                         psr_models=False)

pta_crn = models.model_2a(psrs,
                          psd='powerlaw',
                          noisedict=noise,
                          n_gwbfreqs=args.nfreqs,
                          gamma_common=13 / 3.,
                          upper_limit=False,
                          bayesephem=args.bayes_ephem,
                          be_type='setIII',
                          wideband=False,
                          rn_psrs=rn_psrs,
                          pshift=args.pshift,
                          pseed=args.process,
                          select='backend',
                          psr_models=False)

print(pta_gw.get_lnlikelihood(np.hstack(p.sample() for p in pta_gw.params)))
print(pta_crn.get_lnlikelihood(np.hstack(p.sample() for p in pta_crn.params)))
#
# #Load in T matrix from previous run to check same random phase shifts
# if os.path.exists(args.outdir+'Tmats.npy'):
#     Tmats = np.load(args.outdir+'Tmats.npy')
#     print('Loading Tmat list from earlier incarnation.')
# else: # Save random phase shifted T matrices