def test_hypermodel(dmx_psrs, caplog):
    m2a = models.model_2a(dmx_psrs, noisedict=noise_dict)
    m3a = models.model_3a(dmx_psrs, noisedict=noise_dict)
    ptas = {0: m2a, 1: m3a}
    hm = hypermodel.HyperModel(ptas)
    assert hasattr(hm, 'get_lnlikelihood')
    assert 'gw_log10_A' in hm.param_names
    assert 'nmodel' in hm.param_names
def test_hyper_sampler(dmx_psrs, caplog):
    m2a = models.model_2a(dmx_psrs, noisedict=noise_dict)
    m3a = models.model_3a(dmx_psrs, noisedict=noise_dict)
    ptas = {0: m2a, 1: m3a}
    hm = hypermodel.HyperModel(ptas)
    samp = hm.setup_sampler(outdir=outdir, human='tester')
    assert hasattr(samp, "sample")
    paramfile = os.path.join(outdir, "pars.txt")
    assert os.path.isfile(paramfile)
    with open(paramfile, "r") as f:
        params = [line.rstrip('\n') for line in f]
    for ptapar, filepar in zip(hm.param_names, params):
        assert ptapar == filepar
    x0 = hm.initial_sample()
    assert len(x0) == len(hm.param_names)
Beispiel #3
0
    std_models = [(s + rn_std + gw_std)(psr) for psr in pkl_psrs]
    low_models = [(s + rn_low + gw_low)(psr) for psr in pkl_psrs]

    pta_std = signal_base.PTA(std_models)
    pta_low = signal_base.PTA(low_models)

    ptas = {0:pta_std,
            1:pta_low}

    pta_std.set_default_params(noise)
    pta_low.set_default_params(noise)

    with open(args.pta_pkl,'wb') as fout:
        cloudpickle.dump(ptas,fout)

hm = hypermodel.HyperModel(models=ptas)
sampler = hm.setup_sampler(outdir=args.outdir, resume=True,
                           empirical_distr = args.emp_distr)


try:
    gw_std_freqs = get_freqs(pta_std, signal_id='gw')
    gw_low_freqs = get_freqs(pta_low, signal_id='gw')
    np.savetxt(args.outdir + 'low_gw_freqs.txt', gw_low_freqs, fmt='%.18e')
    np.savetxt(args.outdir + 'std_gw_freqs.txt', gw_std_freqs, fmt='%.18e')
except:
    pass

model_params = {}
for ii,mod in enumerate(ptas):
    model_params.update({ii : ptas[ii].param_names})
Beispiel #4
0
        mask = np.array([str(p) in param_diffs for p in model.params])
        x0.extend([np.array(pp.sample()).ravel().tolist() \
                   if pp.name not in noisedict.keys() \
                   else np.array(noisedict[pp.name]).ravel().tolist() \
                   for pp in np.array(model.params)[mask]])

        uniq_params = np.union1d([str(p) for p in model.params], \
                                  uniq_params)

    x0.extend([[0.1]])

    return np.array([p for sublist in x0 for p in sublist])


if params.sampler == 'ptmcmcsampler':
    super_model = hypermodel.HyperModel(pta)
    print('Super model parameters: ', super_model.params)
    print('Output directory: ', params.output_dir)
    # Filling in PTMCMCSampler jump covariance matrix
    # if params.mcmc_covm is not None:
    #   ndim = len(super_model.param_names)
    #   identity_covm = np.diag(np.ones(ndim) * 1**2)
    #   identity_covm_df = pd.DataFrame(identity_covm, \
    #                                   index=super_model.param_names, \
    #                                   columns=super_model.param_names)
    #   covm = params.mcmc_covm.combine_first(identity_covm_df)
    #   identity_covm_df.update(covm)
    #   covm = np.array(identity_covm_df)
    # else:
    #   covm = None
    sampler = super_model.setup_sampler(
                         n_gwbfreqs=args.nfreqs,
                         gamma_common=args.gamma_gw,
                         delta_common=0.,
                         upper_limit=False,
                         bayesephem=args.bayes_ephem,
                         be_type='setIII',
                         wideband=False,
                         rn_psrs=rn_psrs,
                         pshift=args.pshift)
ptas = {0: pta_crn, 1: pta_gw}

if args.model_wts is None:
    model_wts = None
else:
    model_wts = dict(enumerate(args.model_wts))

hm = hypermodel.HyperModel(models=ptas, log_weights=model_wts)

c0 = co.HyperModelCore(args.outdir, chaindir=args.outdir)

for ii in np.random.randint(c0.burn, c0.chain.shape[0], 100):
    x = c0.chain[ii, :-4]
    new_lnlike = hm.get_lnlikelihood(x)
    new_lnpost = new_lnlike + hm.get_lnprior(x)
    old_lnlike = c0.chain[ii, -3]
    old_lnpost = c0.chain[ii, -4]
    print('New LnLike, Old LnLike, New-Old:', new_lnlike, old_lnlike,
          new_lnlike - old_lnlike)
    print('New LnPost, Old LnPost, New-Old:', new_lnpost, old_lnpost,
          new_lnpost - old_lnpost)