Exemplo n.º 1
0
    unlensed = mg.get_map()
    noise_map = ng.get_map()
    lensed = enlensing.displace_map(unlensed, alpha_pix, order=lens_order)
    tot_beamed = maps.filter_map(lensed,kbeam) #+ fg_true
    stamp = tot_beamed  + noise_map
    if task==0:
        io.plot_img(unlensed,pout_dir + "0_unlensed.png")
        io.plot_img(lensed,pout_dir + "1_lensed.png")
        io.plot_img(fg,pout_dir + "2_fg.png")
        io.plot_img(stamp,pout_dir + "3_tot.png")

    # Bayesian

    totlnlikes = []    
    for k,kamp in enumerate(bkamps):
        lnlike = maps.get_lnlike(cinvs[k],stamp) + logdets[k]
        totlnlike = lnlike #+ lnprior[k]
        totlnlikes.append(totlnlike)
    nlnlikes = -0.5*np.array(totlnlikes)
    mstats.add_to_stats("totlikes",nlnlikes)


    # lnlikes2d = np.zeros((bkamps.size,famps.size))
    # for k,kamp in enumerate(bkamps):
    #     for j,famp in enumerate(famps):
    #         cinv_updated, det_updated = cupdater.get_cinv(k,famp)
    #         lnlike = maps.get_lnlike(cinv_updated,stamp) + det_updated
    #         lnlikes2d[k,j] = lnlike
    
    # mstats.add_to_stack("lnlike2d",-0.5*lnlikes2d)
    
Exemplo n.º 2
0
        enlensing.displace_map(unlensed.copy(), alpha_pix, order=lens_order),
        kbeam)
    fdownsampled = enmap.enmap(resample.resample_fft(lensed, bshape), bwcs)
    stamp = fdownsampled + noise_map

    #cutout = lensed  + noise_map
    cutout = stamp[int(bshape[0] / 2. - shape[0] / 2.):int(bshape[0] / 2. +
                                                           shape[0] / 2.),
                   int(bshape[0] / 2. - shape[0] / 2.):int(bshape[0] / 2. +
                                                           shape[0] / 2.)]

    # print(cinvs[k].shape,cutout.shape)

    totlnlikes = []
    for k, kamp in enumerate(kamps):
        lnlike = maps.get_lnlike(cinvs[k], cutout) + logdets[k]
        totlnlike = lnlike  #+ lnprior[k]
        totlnlikes.append(totlnlike)
    nlnlikes = -0.5 * np.array(totlnlikes)
    mstats.add_to_stats("totlikes", nlnlikes)

mstats.get_stats()

lnlikes = mstats.vectors["totlikes"].sum(axis=0)
lnlikes -= lnlikes.max()

pl = io.Plotter(xlabel="$A$", ylabel="$\\mathrm{ln}\\mathcal{L}$")
for j in range(mstats.vectors["totlikes"].shape[0]):
    pl.add(
        kamps, mstats.vectors["totlikes"][j, :] /
        mstats.vectors["totlikes"][j, :].max())