Exemplo n.º 1
0
def processSummaryFile(resultfile, ext, simdir):

    summary = cPickle.load(open(resultfile, 'rb'))

    dir, filename = os.path.split(resultfile)
    base = filename.split(ext)[0]
    print base
    if simdir is None:

        simfile = '%s/%s.cat' % (dir, base)
        if not os.path.exists(simfile):
            simfile = '%s/../%s.cat' % (dir, base)
            if not os.path.exists(simfile):
                simfile = '%s/../../%s.cat' % (dir, base)

    else:
        simfile = '%s/%s.cat' % (simdir, base)

    sim = ldac.openObjectFile(simfile)
    scale_radius = sim.hdu.header['R_S']
    zcluster = sim.hdu.header['Z']
    r500 = nfwutils.rdelta(scale_radius, 4.0, 500)
    #    m500 = nfwutils.Mdelta(scale_radius, 4.0, zcluster, 500)
    m500 = nfwutils.massInsideR(scale_radius, 4.0, zcluster, 1.5)

    return MassDist(zcluster, scale_radius, m500, summary['quantiles'][50],
                    summary['stddev'], None), None
Exemplo n.º 2
0
def readMLBootstraps_diffR(dir, items, bootrange, diffR, cluster_zs):

    masses = {}
    mask = {}

    for cluster in [x[0] for x in items]:

        workdir = '%s/%s' % (dir, cluster)

        masses[cluster] = np.zeros(len(bootrange))
        mask[cluster] = np.ones(len(bootrange))

        zcluster = cluster_zs[cluster]

        for i, bootnum in enumerate(bootrange):
            inputfile = '%s/bootstrap_%d.ml.out' % (workdir, bootnum)

            if not os.path.exists(inputfile):
                mask[cluster][i] = 0
                masses[cluster][i] = -1
                continue

            db = pymc.database.pickle.load(inputfile)
            sampled_masses = np.array([nfwutils.massInsideR(rs, c, zcluster, diffR) \
                                           for rs, c in zip(db.trace('r_scale')[3000::3], db.trace('concentration')[3000::3])])

            masses[cluster][i] = np.median(sampled_masses)

    return masses, mask
Exemplo n.º 3
0
def consolidate(massdists):

    results = {}
    for key, clusters in massdists.iteritems():
        zcluster, m500 = key

        rs_true = clusters[0][0].rs

        r500 = nfwutils.rdelta(rs_true, 4.0, 500)
        #        truemass = nfwutils.massInsideR(rs_true, 4.0, zcluster, r500)
        truemass = nfwutils.massInsideR(rs_true, 4.0, zcluster, 1.5)
        massests = np.array([x.mu for x, y in clusters])
        diff = massests - truemass
        #        ml, (m, p) = sp.ConfidenceRegion(diff, bins=19)
        bias = np.average(diff)
        #        bias = ml
        scatter = np.std(diff)
        typsig = np.average(np.array([x.sig for x, y in clusters]))
        cdfs = []
        #        for summ, masses in clusters:
        #            sorted_masses = np.sort(masses)
        #            cdf = float(len(sorted_masses[sorted_masses <= truemass])) / len(sorted_masses)
        #            cdfs.append(cdf)
        #

        results[key] = (truemass, diff, bias, scatter, typsig, cdfs)

    return results
Exemplo n.º 4
0
    def run(self, manager):

        model = manager.model
        nsamples = manager.options.nsamples
        outputFile = manager.options.outputFile
        burn = manager.options.burn

        sanitycheck = {}
        sanitycheck['r_mpc'] = model.r_mpc
        sanitycheck['ghats'] = model.ghats
        sanitycheck['pz'] = model.pdz
        sanitycheck['betas'] = model.betas
        sanitycheck['SeqNr'] = manager.inputcat['SeqNr']
        sanitycheck['phi'] = manager.inputcat['phi']
        #        sanitycheck['rho_c'] = model.rho_c
        #        sanitycheck['rho_c_over_sigma_c'] = model.rho_c_over_sigma_c

        with open('sanitycheck.pkl', 'w') as output:
            cPickle.dump(sanitycheck, output)

        manager.mcmc = pymc.MCMC(input=model, db='pickle', dbname=outputFile)

        try:
            manager.shapedistro.sampler_callback(mcmc)
        except:
            #doesn't have it, doesn't matter
            pass

        manager.mcmc.sample(nsamples)

        if isinstance(manager.mcmc.concentration, float):
            manager.masses = np.array([ nfwutils.massInsideR(rs,
                                                             manager.mcmc.concentration,
                                                             manager.mcmc.zcluster,
                                                             manager.r500) \
                                            for rs in manager.mcmc.trace('r_scale')[burn:] ])
        else:
            manager.masses = np.array([ nfwutils.massInsideR(rs,
                                                             c,
                                                             manager.mcmc.zcluster,
                                                             manager.r500) \
                                            for rs, c in zip(manager.mcmc.trace('r_scale')[burn:],
                                                             manager.mcmc.trace('concentration')[burn:])])
Exemplo n.º 5
0
def processFile(resultfile, nsamples=10000, mass_radius=1.5):

    print resultfile

    results = ldac.openObjectFile(resultfile)

    dir, filename = os.path.split(resultfile)
    filebase, ext = os.path.splitext(filename)
    simfile = '%s/%s.cat' % (dir, filebase)
    if not os.path.exists(simfile):
        simfile = '%s/../%s.cat' % (dir, filebase)
        if not os.path.exists(simfile):
            simfile = '%s/../../%s.cat' % (dir, filebase)

    sim = ldac.openObjectFile(simfile)

    scale_radius = sim.hdu.header['R_S']
    concentration = sim.hdu.header['CONCEN']
    zcluster = sim.hdu.header['Z']

    masses = results['Mass']
    pdf = np.exp(results['prob'] - max(results['prob']))

    cdf = np.cumsum(pdf)
    cdf = cdf / cdf[-1]

    buffered_cdf = np.zeros(len(cdf) + 2)
    buffered_cdf[-1] = 1.
    buffered_cdf[1:-1] = cdf

    mass_samples = []
    for i in range(nsamples):

        cdf_pick = np.random.uniform()
        inbin = np.logical_and(
            np.roll(buffered_cdf, 1) <= cdf_pick, buffered_cdf > cdf_pick)
        mass_samples.append(masses[inbin[1:-1]][0])

#    rs_samples = [nfwutils.RsMassInsideR(x, concentration, zcluster, mass_radius) \
#                      for x in mass_samples]

#    r500 = nfwutils.rdelta(scale_radius, concentration, 500)
#    m500 = nfwutils.Mdelta(scale_radius, concentration, zcluster, 500)
    m500 = nfwutils.massInsideR(scale_radius, concentration, zcluster, 1.5)

    #    masses = np.array([nfwutils.massInsideR(x, concentration, zcluster, r500) for x in rs_samples])
    masses = mass_samples

    mu = np.median(masses)
    sig = np.std(masses)

    chisq = ((masses - mu) / sig)**2

    return MassDist(zcluster, scale_radius, m500, mu, sig, chisq), masses
Exemplo n.º 6
0
def readCCSummary_diffR(dir,
                        clusters,
                        bootrange,
                        diffR,
                        cluster_zs,
                        concentration=4.):

    masses = {}
    mask = {}

    for cluster in clusters:

        workdir = '%s/%s' % (dir, cluster)

        zcluster = cluster_zs[cluster]

        masses[cluster] = np.zeros(len(bootrange))
        mask[cluster] = np.ones(len(bootrange))

        for i, bootnum in enumerate(bootrange):
            inputfile = '%s/bootstrap_%d.cc.out' % (workdir, bootnum)

            if not os.path.exists(inputfile):
                print inputfile
                mask[cluster][i] = 0
                masses[cluster][i] = -1
                continue

            input = open(inputfile)
            for line in input.readlines():
                if cc_regex.match(line):
                    tokens = line.split()
                    mass = float(tokens[-1])
                    if mass == 0.:
                        mass = 1e13

                    rscale = nfwutils.RsMassInsideR(mass, concentration,
                                                    zcluster, 1.5)
                    masses[cluster][i] = nfwutils.massInsideR(
                        rscale, concentration, zcluster, diffR)
                    break
            input.close()

        print cluster, len(masses[cluster])

    return masses, mask
Exemplo n.º 7
0
def alteredCosmology(cat, om):

    cosmology = nfwutils.Cosmology(om, 1. - om)
    comovingdist = nfwutils.ComovingDistMemoization(cosmology)

    betas = nfwutils.beta_s(cat['z'], 0.3, comovingdist)
    dl = nfwutils.angulardist(0.3, comovingdist=comovingdist)

    r_mpc = cat['r_pix'] * 0.2 * (1. / 3600.) * (np.pi / 180.) * dl

    shears = tools.NFWShear(r_mpc,
                            4.0,
                            0.5,
                            0.3,
                            dl,
                            Omega_m=om,
                            Omega_l=1 - om)
    kappa = tools.NFWKappa(r_mpc,
                           4.0,
                           0.5,
                           0.3,
                           dl,
                           Omega_m=om,
                           Omega_l=1 - om)
    g = betas * shears / (1 - betas * kappa)
    scale_radii = beta_method(cat['r_pix'],
                              r_mpc,
                              g,
                              np.mean(betas),
                              np.mean(betas**2),
                              4,
                              0.3,
                              Omega_m=om,
                              Omega_l=1 - om)
    mass = nfwutils.massInsideR(scale_radii, 4.0, 0.3, 1, cosmology)

    return mass
Exemplo n.º 8
0
def processFracBiasData(workdir,
                        subdirs,
                        clusters,
                        redshifts,
                        concentration=4.,
                        mradius=1.5):

    fracbiases = []

    for subdir in subdirs:

        masses, errs, massgrid, scale_radii = readMLMasses(
            workdir, subdir, clusters)

        truemasses = [
            nfwutils.massInsideR(scale_radii[x], concentration, redshifts[x],
                                 mradius) for x in clusters
        ]

        fracbias = calcFracBias(massgrid, truemasses)

        fracbiases.append(fracbias)

    return fracbiases
Exemplo n.º 9
0
def fitAltOffsetScript(data=None):

    if data is None:
        data = {}

    worklist = readtxtfile('worklist')
    clusters = [x[0] for x in worklist]

    workdir = '/u/ki/dapple/nfs12/cosmos/simulations/clusters_2012-05-17-highdensity/'
    subdirs = ['%sBVRIZ' % x for x in ['', 'contam0p10/', 'contam0p20/']]
    concentration = 4.
    mradius = 1.5
    redshifts = cm.readClusterRedshifts()

    figs = []

    for subdir in subdirs:

        if subdir not in data:

            data[subdir] = {}

        curdata = data[subdir]

        if 'masses' not in curdata:

            curdata['masses'], errs, massgrid, curdata[
                'scale_radii'] = ss.readMLMasses(workdir, subdir, clusters)

        masses = curdata['masses']
        scale_radii = curdata['scale_radii']

        if 'grid' not in curdata:

            refmasses = {}

            for cluster in clusters:

                refmasses[cluster] = nfwutils.massInsideR(
                    scale_radii[cluster], concentration, redshifts[cluster],
                    mradius) * np.ones_like(masses[cluster])

            curdata['grid'], curdata['means'], curdata[
                'scatters'] = isg.intrinsicScatter(
                    refmasses,
                    masses,
                    means=1. + np.arange(-0.08, 0.08, 0.0001),
                    scatters=np.arange(0.005, 0.05, 0.0025))

            means = curdata['means']
            scatters = curdata['scatters']

            mode, (r68, r95) = isg.getdist_1d_hist(means[0],
                                                   means[1],
                                                   levels=[0.68, 0.95])
            curdata['meandist'] = (mode, r68, r95)

            mode, (r68, r95) = isg.getdist_1d_hist(scatters[0],
                                                   scatters[1],
                                                   levels=[0.68, 0.95])
            curdata['scatterdist'] = (mode, r68, r95)

        for varname in 'mean scatter'.split():

            mode, r68, r95 = curdata['%sdist' % varname]

            print mode, r68, r95

            print '%s\t%2.4f\t+%2.4f\t-%2.4f\t+%2.4f\t-%2.4f' % (
                varname, mode, r68[0][1] - mode, mode - r68[0][0],
                r95[0][1] - mode, mode - r95[0][0])

            x, prob = curdata['%ss' % varname]
            fig = isgp.plotdist_1d_hist(x, prob, mode, [r68[0], r95[0]])
            ax = fig.axes[0]
            ax.set_title('%s %s' % (subdir, varname))

            figs.append(fig)
            fig.show()

    return figs, data
Exemplo n.º 10
0
def PointEstPzScript(data=None):

    if data is None:

        worklist = readtxtfile('simclusterlist')
        clusters = [x[0] for x in worklist]
        redshifts = cm.readClusterRedshifts()
        properredshifts = np.array([redshifts[x] for x in clusters])

        subdirs = ['contam0p10/BVRIZ']

        MLfracbias = ss.processFracBiasData(
            '/u/ki/dapple/nfs12/cosmos/simulations/clusters_2012-05-17-highdensity',
            subdirs, clusters, redshifts)[0]

        Bpointmass, Bpointgrid, scale_radii = ss.readPointMasses(
            '/u/ki/dapple/nfs12/cosmos/simulations/clusters_2012-05-17',
            'contam0p10/newman/BVRIZ', clusters)

        truemasses = [
            nfwutils.massInsideR(scale_radii[x], 4., redshifts[x], 1.5)
            for x in clusters
        ]

        Bpointfracbias = ss.calcFracBias(Bpointgrid, truemasses)

        data = [MLfracbias, Bpointfracbias, properredshifts]

    else:

        MLfracbias = data[0]
        Bpointfracbias = data[1]
        properredshifts = data[2]

    fig = pylab.figure()

    try:

        ax = fig.add_axes([0.15, 0.12, 0.96 - 0.15, 0.95 - 0.12])

        ax.axhline(0.0, c='k', linewidth=1.25)

        Apointmean, Apointerr = ss.bootstrapMean(MLfracbias)
        Bpointmean, Bpointerr = ss.bootstrapMean(Bpointfracbias)

        ax.errorbar(properredshifts - 0.0025,
                    Bpointmean,
                    Bpointerr,
                    fmt='cs',
                    label=r'Point Estimators',
                    color='#BFBFD4')
        ax.errorbar(properredshifts + 0.0025,
                    Apointmean,
                    Apointerr,
                    fmt='ro',
                    label=r'P(z) Method')

        ax.set_xlim(0.16, 0.72)
        ax.set_ylim(-0.08, 0.19)

        ax.set_xlabel('Cluster Redshift')
        ax.set_ylabel(r'Fractional Mass Bias within 1.5 Mpc')

        #        ax.text(0.2, 0.12, r'$BVr^{+}i^{+}z^{+}$ Photo-$z$ Point Est', fontsize=16)

        ax.legend(loc='upper left', numpoints=1, ncol=1)

        fig.savefig('publication/clustersims_pointest_pz_compare.eps')

    finally:

        return fig, data
Exemplo n.º 11
0
def MLPointEstScript(data=None):

    if data is None:

        worklist = readtxtfile('simclusterlist')
        clusters = [x[0] for x in worklist]
        redshifts = cm.readClusterRedshifts()
        properredshifts = np.array([redshifts[x] for x in clusters])

        subdirs = ['contam0p10/BVRIZ']

        #        MLfracbias = ss.processFracBiasData('/u/ki/dapple/nfs12/cosmos/simulations/publication/highsn/cluster3',
        #                                            subdirs, clusters, redshifts)[0]
        #
        #
        #        Apointmass, Apointgrid, scale_radii = ss.readPointMasses('/u/ki/dapple/nfs12/cosmos/simulations/publication/highsn/cluster3', 'contam0p10/newman/APER', clusters)
        Bpointmass, Bpointgrid, scale_radii = ss.readPointMasses(
            '/u/ki/dapple/nfs12/cosmos/simulations/clusters_2012-05-17',
            'contam0p10/newman/BVRIZ', clusters)

        truemasses = [
            nfwutils.massInsideR(scale_radii[x], 4., redshifts[x], 1.5)
            for x in clusters
        ]

        #        Apointfracbias = ss.calcFracBias(Apointgrid, truemasses)
        Bpointfracbias = ss.calcFracBias(Bpointgrid, truemasses)

        data = [None, Bpointfracbias, properredshifts]

    else:

        #        Apointfracbias = data[0]
        Bpointfracbias = data[1]
        properredshifts = data[2]

    fig = pylab.figure()

    try:

        ax = fig.add_axes([0.15, 0.12, 0.95 - 0.15, 0.95 - 0.12])

        ax.axhline(0.0, c='k', linewidth=1.25)

        #        Apointmean, Apointerr = ss.bootstrapMean(Apointfracbias)
        Bpointmean, Bpointerr = ss.bootstrapMean(Bpointfracbias)

        ax.errorbar(properredshifts,
                    Bpointmean,
                    Bpointerr,
                    fmt='bo',
                    label=r'$BVr^+i^+z^+$')
        #        ax.errorbar(properredshifts+0.0025, Apointmean, Apointerr, fmt='rs', label=r'$uBVr^+i^+z^+$')

        ax.text(0.166, 0.135, r'$BVr^+i^+z^+$ Photo-Z Point Est', fontsize=16)

        ax.set_xlim(0.16, 0.72)
        ax.set_ylim(-0.05, 0.15)

        ax.set_xlabel('Cluster Redshift', fontsize=16)
        ax.set_ylabel(r'Fractional Mass Bias within 1.5 Mpc')

        #        ax.legend(loc='lower right', numpoints = 1, ncol=2)

        fig.savefig('publication/clustersims_pointest_compare.eps')

    finally:

        return fig, data
Exemplo n.º 12
0
print '!!!!!!!!!!!!!!'
print len(inputcat.filter(goodObjs))

rss, nfails = bm.bootstrap_newman_method(inputcat['r_mpc'][goodObjs],
                                         inputcat['ghats'][goodObjs],
                                         betas[goodObjs],
                                         sigma,
                                         concentration,
                                         zcluster,
                                         nBootstraps,
                                         msigma=msigma)

#r500 = nfwutils.rdelta(true_scale_radius, concentration, 500)
#m500 = nfwutils.Mdelta(true_scale_radius, concentration, zcluster, 500)
m15 = nfwutils.massInsideR(true_scale_radius, concentration, zcluster, 1.5)

masses = np.array([nfwutils.massInsideR(x, concentration, zcluster, 1.5) \
                       for x in rss])

mu = np.mean(masses)
sig = np.std(masses)

chisq = ((masses - mu) / sig)**2

massdist = pcs.MassDist(zcluster, true_scale_radius, m15, mu, sig, chisq)

cols = [
    pyfits.Column(name='Rs', format='E', array=rss),
    pyfits.Column(name='masses', format='E', array=masses)
]
Exemplo n.º 13
0
workdir = sys.argv[1]
subdir = sys.argv[2]
outfile = sys.argv[3]
nsamples = int(sys.argv[4])

items = readtxtfile('simclusterlist')
clusters = clusters = [ x[0] for x in items]

redshifts = cm.readClusterRedshifts()
properredshifts = np.array([redshifts[x] for x in clusters])


masses, errs, massgrid, scale_radii = ss.readMLMasses(workdir, subdir, clusters)
truemasses = {}
for cluster in clusters:
    truemasses[cluster] = nfwutils.massInsideR(scale_radii[cluster], 4., redshifts[cluster], 1.5)

x = np.hstack([len(masses[c])*[truemasses[c]] for c in clusters])
y = np.hstack([masses[c] for c in clusters])
yerr = np.hstack([errs[c] for c in clusters])



calibmodel = isc.IntrinsicScatter(x, y, yerr)
calibMCMC = calibmodel.buildMCMC(outfile)
calibMCMC.m_angle.value = np.pi / 4.

calibMCMC.sample(nsamples)

calibMCMC.db.close()
Exemplo n.º 14
0
def massapp(catalog, config, nfwconfig):

    zcluster = catalog.hdu.header['ZLENS']
    dL = nfwutils.global_cosmology.angulardist(zcluster)

    r2 = config.massappr2

    controlbins = config.controlbins
    
    minradii = config.profilemin
    rmax = config.profilemax
    nbins = config.nbins


    # fit NFW profile

    nfwfitter = nfwfit.buildFitter(nfwconfig)
    nfwm200, nfwm200err = nfwfitter.runUntilNotFail(catalog, config)
    nfwm200 = nfwm200['m200']
    c200 = nfwfitter.model.massconRelation(np.abs(nfwm200)*nfwfitter.model.massScale*nfwutils.global_cosmology.h, nfwfitter.model.zcluster, nfwfitter.model.overdensity)       


    rho_c = nfwutils.global_cosmology.rho_crit(zcluster)
    rho_c_over_sigma_c = 1.5 * dL * nfwutils.global_cosmology.beta([1e6], zcluster)[0] * nfwutils.global_cosmology.hubble2(zcluster) / nfwutils.global_cosmology.v_c**2

    nfwrscale = tools.rscaleConstM(nfwm200,
                                   c200,
                                   rho_c,
                                   200)
    

    # calculate gamma for catalog
    #use kappa from best fit nfw profile



    nfwkappa = tools.NFWKappa(np.ascontiguousarray(catalog['r_mpc'], dtype='<d'), c200, nfwrscale, rho_c_over_sigma_c)
    gamma = catalog['ghat']*(1-catalog['beta_s']*nfwkappa)/catalog['beta_s']


    radii, shear, shearerr, avebeta, avebeta2, ngals = logbinning(catalog, gamma, minradii, r2, nbins)
    
    cradii, cshear, cshearerr, cavebeta, cavebeat2, cngals = logbinning(catalog, gamma, r2, rmax, controlbins)
    integrand2 = cshear/cradii
    int2 = 2*rmax**2*scipy.integrate.simps(integrand2, cradii)/(rmax**2 - r2**2)
    
    #kappa aperture
    kappa_ap = avekappa(r2, rmax, nfwrscale, c200, rho_c_over_sigma_c)


    r1s = radii
    kappa_proj = np.zeros_like(r1s)
    matching_m200s = np.zeros_like(r1s)
    mass_enclosed = np.zeros_like(r1s)
    density_enclosed = np.zeros_like(r1s)

    for cur_ap_index, r1 in enumerate(r1s):

        #gamma integrals

        integrand1 = (shear/radii)[cur_ap_index:]
        res = scipy.integrate.simps(integrand1, radii[cur_ap_index:])
        int1 = 2*res

        zeta_c = int1 + int2


        #find best matched nfw that reproduces kappa core

        kappa_r1 = zeta_c + kappa_ap
        kappa_proj[cur_ap_index] = kappa_r1

        ##

        def findNFW(m200):

            c200 = nfwfitter.model.massconRelation(np.abs(m200)*nfwfitter.model.massScale*nfwutils.global_cosmology.h, nfwfitter.model.zcluster, nfwfitter.model.overdensity)       
        
            nfwrscale = tools.rscaleConstM(m200,
                                           c200,
                                           rho_c,
                                           200)

            avekappa = tools.aveEnclosedKappa(np.array([r1], dtype=np.double),
                                              c200,
                                              nfwrscale,
                                              rho_c_over_sigma_c)
            return avekappa - kappa_r1

        ##

        best_m200 = scipy.optimize.brentq(findNFW, 5e13, 1e16)
        matching_m200s[cur_ap_index] = best_m200
        best_c200 = nfwfitter.model.massconRelation(np.abs(best_m200)*nfwfitter.model.massScale*nfwutils.global_cosmology.h, nfwfitter.model.zcluster, nfwfitter.model.overdensity)       
        
        best_nfwrscale = tools.rscaleConstM(best_m200,
                                            best_c200,
                                            rho_c,
                                            200)

        mass_enclosed[cur_ap_index] = nfwutils.massInsideR(best_nfwrscale, best_c200,
                                                           zcluster, r1)
        vol = (4./3)*np.pi*r1**3
        density_enclosed[cur_ap_index] = mass_enclosed[cur_ap_index] / vol



    return r1s, kappa_proj, matching_m200s, mass_enclosed, density_enclosed
Exemplo n.º 15
0
outdir = sys.argv[2]
cluster = sys.argv[3]
filter = sys.argv[4]
image = sys.argv[5]

controller = msfd.makeController()
handler = controller.filehandler
options, args = handler.createOptions(workdir=workdir,
                                      incatalog='%s/%s.%s.%s.cut_lensing.cat' %
                                      (workdir, cluster, filter, image),
                                      cluster=cluster,
                                      filter=filter,
                                      image=image)
options, args = controller.modelbuilder.createOptions(options=options,
                                                      args=args)

options.outputFile = '%s/%s.%s.%s.out' % (outdir, cluster, filter, image)
controller.load(options, args)

db = pymc.database.pickle.load(options.outputFile)

controller.mcmc = pymc.MCMC(controller.model, db=db)

controller.masses = np.array([
    nfwutils.massInsideR(rs, controller.mcmc.concentration,
                         controller.mcmc.zcluster, controller.r500)
    for rs in np.exp(controller.mcmc.trace('log_r_scale')[3000:])
])

mm.dumpMasses(controller.masses, options.outputFile)