def stackCats(stackfile, configname, answerfile, outfile):

    filebase = os.path.basename(answerfile)
    match = re.match('(.+)_answers.pkl', filebase)
    simtype = match.group(1)

    with open(answerfile, 'rb') as input:
        answers = cPickle.load(input)

    tostack = [x[0] for x in readtxtfile.readtxtfile(stackfile)]

    config = nfwfit.readConfiguration(configname)

    simreader = nfwfit.buildSimReader(config)

    nfwutils.global_cosmology.set_cosmology(simreader.getCosmology())

    fitter = nfwfit.buildFitter(config)

    profile = fitter.profileBuilder

    if profile.binspacing == 'linear':
        binedges = np.linspace(profile.minradii, profile.maxradii,
                               profile.nbins + 1)
    else:
        binedges = np.logspace(np.log10(profile.minradii),
                               np.log10(profile.maxradii), profile.nbins + 1)

    stackedprofile = OnlineStatistics(binedges)

    for catalogname in tostack:

        filebase = os.path.basename(catalogname)

        match = idpatterns[simtype].match(filebase)

        haloid = int(match.group(1))

        try:
            truth = answers[haloid]
        except KeyError:
            print 'Failure at {0}'.format(output)
            raise

        catalog = nfwfit.readSimCatalog(catalogname, simreader, config)

        stackedprofile.accumulate(catalog, truth)

    stackedprofile.writeSimCat(outfile)

    return stackedprofile
Exemple #2
0
def createFakeChains(config,
                     nclusters,
                     zcluster,
                     r_mpc_edges,
                     beta_s,
                     galdensity,
                     shapenoise,
                     nsamples=1000,
                     mass=10**15.2):

    mtrues = mass * np.ones(nclusters)

    r_mpcs, shearprofiles, shearerrs = createClusterSet(
        config, mtrues, zcluster, r_mpc_edges, beta_s, galdensity, shapenoise)

    fitter = nfwfit.buildFitter(config)

    chains = []

    for i in range(nclusters):

        mcmc_model = None
        for j in range(10):
            try:
                mcmc_model = fitter.model.makeMCMCModel(
                    r_mpcs[i], shearprofiles[i], shearerrs[i], beta_s,
                    beta_s**2, zcluster)
                break
            except pymc.ZeroProbability:
                pass
        if mcmc_model is None:
            raise pymc.ZeroProbability

        manager = varcontainer.VarContainer()
        options = varcontainer.VarContainer()
        manager.options = options

        options.singlecore = True
        options.adapt_every = 100
        options.adapt_after = 100
        options.nsamples = nsamples
        manager.model = mcmc_model

        runner = pma.MyMCMemRunner()
        runner.run(manager)
        runner.finalize(manager)

        chains.append(manager.chain['m200'][200:])

    return mtrues, chains
Exemple #3
0
def fitClusterSet(config, zcluster, r_mpcs, beta_s, shearprofiles, shearerrs):

    fitter = nfwfit.buildFitter(config)

    fitMasses = np.zeros(len(shearprofiles))
    fitErrs = np.zeros(len(shearprofiles))
    mask = np.ones(len(shearprofiles))

    for i in range(len(shearprofiles)):

        fitres = fitter.minChisqMethod(r_mpcs[i], shearprofiles[i],
                                       shearerrs[i], beta_s, beta_s**2,
                                       zcluster)
        if fitres is None:
            fitres = fitter.minChisqMethod(r_mpcs[i],
                                           shearprofiles[i],
                                           shearerrs[i],
                                           beta_s,
                                           beta_s**2,
                                           zcluster,
                                           useSimplex=True)

        if fitres is None:
            mask[i] = 0
        else:

            fitMasses[i] = fitres[0]['m200'] * fitter.model.massScale
            #            fitMasses[i] = nfwutils.Mdelta(fitres[0]['rscale'], 4.0, zcluster, 200.)
            asymerrs = fitres[1]['m200']
            try:
                fitErrs[i] = fitter.model.massScale * (asymerrs[1] -
                                                       asymerrs[0]) / 2.
            except TypeError:
                fitErrs[i] = asymerrs
#
#    mask = (mask == 1)

    return fitMasses, fitErrs, mask
Exemple #4
0
def plotLogProbSurface(catalogname,
                       configfile,
                       fig=None,
                       noiselevels=[0., 0.03, 0.07, 0.15]):

    if fig is None:
        fig = pylab.figure()

    config = nfwfit.readConfiguration(configfile)

    simreader = nfwfit.buildSimReader(config)

    nfwutils.global_cosmology.set_cosmology(simreader.getCosmology())

    catalog = nfwfit.readSimCatalog(catalogname, simreader, config)

    fitter = nfwfit.buildFitter(config)

    r_mpc, ghat, sigma_ghat, beta_s, beta_s2, zlens = fitter.prepData(
        catalog, config)

    print sigma_ghat

    fitter.model.setData(beta_s, beta_s2, zlens)

    guess = fitter.model.guess()

    massgrid = np.arange(1e14, 1e15, 2.5e13) / fitter.model.massScale
    masscenters = (massgrid[1:] + massgrid[:-1]) / 2.

    for noise in noiselevels:

        noisy_g = ghat + noise * np.random.standard_normal(len(ghat))
        noisy_sigma = np.sqrt(sigma_ghat**2 + noise**2)

        if len(guess) == 2:

            concengrid = np.arange(1., 15., 0.25)
            concencenters = (concengrid[1:] + concengrid[:-1]) / 2.

            chisqgrid = np.zeros((len(masscenters), len(concencenters)))

            for i in range(len(masscenters)):
                for j in range(len(concencenters)):

                    chisqgrid[i, j] = fitmodel.ChiSqStat(
                        noisy_g, noisy_sigma,
                        fitter.model(r_mpc, masscenters[i], concencenters[j]))

            probgrid = np.exp(-0.5 * (chisqgrid - np.max(chisqgrid)))
            massprobgrid = np.sum(probgrid, axis=1) / np.sum(probgrid)
            logprob = np.log(massprobgrid)

        else:

            chisqgrid = np.zeros(len(masscenters))

            for i in range(len(massgrid)):

                chisqgrid[i] = fitmodel.ChiSqStat(
                    noisy_g, noisy_sigma, fitter.model(r_mpc, masscenters[i]))

            logprob = -0.5 * chisqgrid

        ax = pylab.gca()
        ax.plot(masscenters, np.exp(logprob - np.max(logprob)), label=noise)

        print 'Max: {0}'.format(masscenters[np.argmax(logprob)])

    pylab.legend()
    return fig, massgrid, concengrid, chisqgrid, logprob
def consolidateFits(workdir, simtype, outdir):

    failfile = open('{0}/fails'.format(outdir), 'w')

    idpattern = idpatterns[simtype]

    answers = cPickle.load(
        open('{0}/{1}_answers.pkl'.format(workdir, simtype), 'rb'))

    outputfiles = glob.glob('%s/*.out' % outdir)
    nhalos = len(outputfiles)

    ids = []
    measured_m200s = np.zeros(nhalos)
    measured_m200errs = np.zeros(nhalos)
    measured_m500s = np.zeros(nhalos)
    measured_m500errs = np.zeros(nhalos)
    measured_cs = np.zeros(nhalos)
    measured_rs = np.zeros(nhalos)

    true_m200s = np.zeros(nhalos)
    true_m500s = np.zeros(nhalos)
    true_cs = np.zeros(nhalos)
    redshifts = np.zeros(nhalos)

    results = dict(ids=ids,
                   measured_m200s=measured_m200s,
                   measured_m200errs=measured_m200errs,
                   measured_m500s=measured_m500s,
                   measured_m500errss=measured_m500errs,
                   measured_cs=measured_cs,
                   measured_rs=measured_rs,
                   true_m200s=true_m200s,
                   true_m500s=true_m500s,
                   true_cs=true_cs,
                   redshifts=redshifts)

    class WeirdException(Exception):
        pass

    #load up the environment for cosmology, and mc relation if used
    config = nfwfit.readConfiguration('{0}/config.sh'.format(outdir))
    simreader = nfwfit.buildSimReader(config)
    nfwutils.global_cosmology.set_cosmology(simreader.getCosmology())
    fitter = nfwfit.buildFitter(config)

    configname = os.path.basename(outdir)

    for i, output in enumerate(outputfiles):

        filebase = os.path.basename(output)

        match = idpattern.match(filebase)

        try:
            haloid = int(match.group(1))
        except AttributeError as e:
            print filebase
            raise e
        except ValueError:
            haloid = match.group(1)

        try:
            truth = answers[haloid]
        except KeyError:
            print 'Failure at {0}'.format(output)
            raise
        ids.append(haloid)
        true_m200s[i] = truth['m200']
        true_m500s[i] = truth['m500']
        true_cs[i] = truth['concen']
        redshifts[i] = truth['redshift']

        input = open(output)
        measured = cPickle.load(input)
        input.close()

        if measured is None:
            print 'Fail {0} {1}'.format(configname, haloid)
            failfile.write('Fail {0} {1}\n'.format(configname, haloid))
            continue

        measured_m200s[i] = measured[0][
            'm200'] * fitter.model.massScale * nfwutils.global_cosmology.h
        measured_m200errs[i] = np.mean(np.abs(
            measured[1]
            ['m200'])) * fitter.model.massScale * nfwutils.global_cosmology.h
        if 'c200' in measured:
            measured_cs[i] = measured['c200']
        else:
            ## need to dig up the mc relation
            measured_cs[i] = fitter.model.massconRelation(
                np.abs(measured_m200s[i]), redshifts[i],
                fitter.model.overdensity)

        #####
        #calculate m500

        measured_rs[i] = nfwutils.rscaleConstM(np.abs(measured_m200s[i]),
                                               measured_cs[i], redshifts[i],
                                               fitter.model.overdensity)
        measured_m500s[i] = nfwutils.Mdelta(measured_rs[i], measured_cs[i],
                                            redshifts[i], 500)

        if measured_m200s[i] < 0:
            measured_m500s[i] = -measured_m500s[i]

        if not np.isfinite(measured_m500s[i]):
            print 'NOT FINITE'
            print haloid
            print measured

    cPickle.dump(results, open('%s/consolidated.pkl' % outdir, 'w'))

    failfile.close()
def compareNoiseProfiles(data=None):
    ''' Compare intrinsic noise levels to assumed shape noise'''

    if data is None:
        data = {}

    if 'centers_mpc' not in data:

        config = nfwfit.readConfiguration(
            '/vol/euclid1/euclid1_1/dapple/mxxl_lensing/mxxlsnap54/general-c4-r10-n0_0-xrayNONE/config.sh'
        )

        simreader = nfwfit.buildSimReader(config)

        nfwutils.global_cosmology.set_cosmology(simreader.getCosmology())

        fitter = nfwfit.buildFitter(config)

        intrnoise_profiles = []

        for haloid in range(800, 880):

            catbase = '/vol/euclid1/euclid1_raid1/dapple/mxxl_lensing/mxxlsnap54/halo_54_{}_0'.format(
                haloid)

            catalog = nfwfit.readSimCatalog(catbase, simreader, config)

            r_mpc, ghat, sigma_ghat, beta_s, beta_s2, zlens = fitter.prepData(
                catalog)

            intrnoise_profiles.append(sigma_ghat)

        intrnoise_profiles = np.row_stack(intrnoise_profiles)
        intrnoise = np.mean(intrnoise_profiles, axis=0)
        intrnoise_err = np.std(intrnoise_profiles, axis=0)

        edges_mpc = np.linspace(config.profilemin, config.profilemax,
                                config.nbins + 1)
        centers_mpc = (edges_mpc[1:] + edges_mpc[:-1]) / 2.
        dL = nfwutils.global_cosmology.angulardist(zlens)
        edges_arcmin = (edges_mpc / dL) * (180 / np.pi) * 60
        bin_areas = np.pi * (edges_arcmin[1:]**2 - edges_arcmin[:-1]**2)

        shapesigma = 0.25
        galdensity = 20.  # per sq arc min
        shape_noise = shapesigma / np.sqrt(galdensity * bin_areas)

        data['centers_mpc'] = centers_mpc
        data['intrnoise'] = intrnoise
        data['intrnoise_err'] = intrnoise_err
        data['shape_noise'] = shape_noise

    else:

        centers_mpc = data['centers_mpc']
        intrnoise = data['intrnoise']
        intrnoise_err = data['intrnoise_err']
        shape_noise = data['shape_noise']

    fig = pylab.figure()
    ax = pylab.gca()

    ax.plot(centers_mpc,
            intrnoise / intrnoise[2],
            label='Intrinsic Noise [80 cluster avg]',
            marker='None',
            linestyle='-',
            linewidth=2,
            color=pp.colors[0])
    ax.plot(centers_mpc,
            shape_noise / shape_noise[2],
            marker='None',
            linestyle='-',
            linewidth=3,
            color=pp.colors[1],
            label='Shape Noise')

    ax.legend(loc='upper right')
    ax.set_xlabel('Radius [Mpc]', fontsize=16)
    ax.set_ylabel('Relative Shear Error [Arbit Norm]', fontsize=16)

    fig.tight_layout()

    fig.savefig('figures/relative_shear_noise.png')
    fig.savefig('figures/relative_shear_noise.pdf')
    fig.savefig('figures/relative_shear_noise.eps')

    return fig, data
Exemple #7
0
def massapp(catalog, config, nfwconfig):

    zcluster = catalog.hdu.header['ZLENS']
    dL = nfwutils.global_cosmology.angulardist(zcluster)

    r2 = config.massappr2

    controlbins = config.controlbins
    
    minradii = config.profilemin
    rmax = config.profilemax
    nbins = config.nbins


    # fit NFW profile

    nfwfitter = nfwfit.buildFitter(nfwconfig)
    nfwm200, nfwm200err = nfwfitter.runUntilNotFail(catalog, config)
    nfwm200 = nfwm200['m200']
    c200 = nfwfitter.model.massconRelation(np.abs(nfwm200)*nfwfitter.model.massScale*nfwutils.global_cosmology.h, nfwfitter.model.zcluster, nfwfitter.model.overdensity)       


    rho_c = nfwutils.global_cosmology.rho_crit(zcluster)
    rho_c_over_sigma_c = 1.5 * dL * nfwutils.global_cosmology.beta([1e6], zcluster)[0] * nfwutils.global_cosmology.hubble2(zcluster) / nfwutils.global_cosmology.v_c**2

    nfwrscale = tools.rscaleConstM(nfwm200,
                                   c200,
                                   rho_c,
                                   200)
    

    # calculate gamma for catalog
    #use kappa from best fit nfw profile



    nfwkappa = tools.NFWKappa(np.ascontiguousarray(catalog['r_mpc'], dtype='<d'), c200, nfwrscale, rho_c_over_sigma_c)
    gamma = catalog['ghat']*(1-catalog['beta_s']*nfwkappa)/catalog['beta_s']


    radii, shear, shearerr, avebeta, avebeta2, ngals = logbinning(catalog, gamma, minradii, r2, nbins)
    
    cradii, cshear, cshearerr, cavebeta, cavebeat2, cngals = logbinning(catalog, gamma, r2, rmax, controlbins)
    integrand2 = cshear/cradii
    int2 = 2*rmax**2*scipy.integrate.simps(integrand2, cradii)/(rmax**2 - r2**2)
    
    #kappa aperture
    kappa_ap = avekappa(r2, rmax, nfwrscale, c200, rho_c_over_sigma_c)


    r1s = radii
    kappa_proj = np.zeros_like(r1s)
    matching_m200s = np.zeros_like(r1s)
    mass_enclosed = np.zeros_like(r1s)
    density_enclosed = np.zeros_like(r1s)

    for cur_ap_index, r1 in enumerate(r1s):

        #gamma integrals

        integrand1 = (shear/radii)[cur_ap_index:]
        res = scipy.integrate.simps(integrand1, radii[cur_ap_index:])
        int1 = 2*res

        zeta_c = int1 + int2


        #find best matched nfw that reproduces kappa core

        kappa_r1 = zeta_c + kappa_ap
        kappa_proj[cur_ap_index] = kappa_r1

        ##

        def findNFW(m200):

            c200 = nfwfitter.model.massconRelation(np.abs(m200)*nfwfitter.model.massScale*nfwutils.global_cosmology.h, nfwfitter.model.zcluster, nfwfitter.model.overdensity)       
        
            nfwrscale = tools.rscaleConstM(m200,
                                           c200,
                                           rho_c,
                                           200)

            avekappa = tools.aveEnclosedKappa(np.array([r1], dtype=np.double),
                                              c200,
                                              nfwrscale,
                                              rho_c_over_sigma_c)
            return avekappa - kappa_r1

        ##

        best_m200 = scipy.optimize.brentq(findNFW, 5e13, 1e16)
        matching_m200s[cur_ap_index] = best_m200
        best_c200 = nfwfitter.model.massconRelation(np.abs(best_m200)*nfwfitter.model.massScale*nfwutils.global_cosmology.h, nfwfitter.model.zcluster, nfwfitter.model.overdensity)       
        
        best_nfwrscale = tools.rscaleConstM(best_m200,
                                            best_c200,
                                            rho_c,
                                            200)

        mass_enclosed[cur_ap_index] = nfwutils.massInsideR(best_nfwrscale, best_c200,
                                                           zcluster, r1)
        vol = (4./3)*np.pi*r1**3
        density_enclosed[cur_ap_index] = mass_enclosed[cur_ap_index] / vol



    return r1s, kappa_proj, matching_m200s, mass_enclosed, density_enclosed