def testnoisebias(noise=0.04, niters=10000): r_mpc = np.linspace(0.25, 3.0, 20) beta_s = 0.5 zcluster = 0.5 g = createPerfectProfile(1e15, 4.0, zcluster, r_mpc, beta_s) gerr = noise / np.sqrt(r_mpc) # m200scan = np.logspace(np.log10(1e13), np.log10(5e15), 200) m200scan = np.arange(-1e15 + 5e12, 5e15, 1e13) config = nfwfit.readConfiguration('testnoisebias.config') likelihood = Likelihood(config) likelihood.bindData(r_mpc, g, gerr, beta_s, zcluster) perfectpdf = np.array([likelihood(m200=curm200) for curm200 in m200scan]) maxs = np.zeros(niters) for i in range(niters): gwnoise = g + gerr * np.random.standard_normal(size=len(g)) likelihood.bindData(r_mpc, gwnoise, gerr, beta_s, zcluster) logprob = np.array([likelihood(m200=curm200) for curm200 in m200scan]) maxs[i] = m200scan[logprob == np.max(logprob)] return r_mpc, g, gerr, m200scan, perfectpdf, maxs
def stackCats(stackfile, configname, answerfile, outfile): filebase = os.path.basename(answerfile) match = re.match('(.+)_answers.pkl', filebase) simtype = match.group(1) with open(answerfile, 'rb') as input: answers = cPickle.load(input) tostack = [x[0] for x in readtxtfile.readtxtfile(stackfile)] config = nfwfit.readConfiguration(configname) simreader = nfwfit.buildSimReader(config) nfwutils.global_cosmology.set_cosmology(simreader.getCosmology()) fitter = nfwfit.buildFitter(config) profile = fitter.profileBuilder if profile.binspacing == 'linear': binedges = np.linspace(profile.minradii, profile.maxradii, profile.nbins + 1) else: binedges = np.logspace(np.log10(profile.minradii), np.log10(profile.maxradii), profile.nbins + 1) stackedprofile = OnlineStatistics(binedges) for catalogname in tostack: filebase = os.path.basename(catalogname) match = idpatterns[simtype].match(filebase) haloid = int(match.group(1)) try: truth = answers[haloid] except KeyError: print 'Failure at {0}'.format(output) raise catalog = nfwfit.readSimCatalog(catalogname, simreader, config) stackedprofile.accumulate(catalog, truth) stackedprofile.writeSimCat(outfile) return stackedprofile
def run(simtype, chaindir, outfile, delta, massbin=0, samples=__samples__, ngauss=__ngauss__, singlecore=__singlecore__): outfile = '{}.gauss{}'.format(outfile, ngauss) config = nfwfit.readConfiguration('%s/config.sh' % chaindir) simreader = nfwfit.buildSimReader(config) nfwutils.global_cosmology.set_cosmology(simreader.getCosmology()) massedges = rundln.defineMassEdges(simtype, delta) halos = dln.loadPDFs(chaindir, simtype, simreader, delta, massedges, massbin) if len(halos) < 10: sys.exit(0) success = False for i in range(20): try: parts = dln.buildGaussMixture1DModel(halos, ngauss, modeltype='ratio') success = True break except (AssertionError, pymc.ZeroProbability) as e: continue assert (success is True) with open('%s.massrange' % outfile, 'w') as output: output.write('%f\n%f\n' % (massedges[massbin], massedges[massbin + 1])) dln.sample(parts, outfile, samples, singlecore=singlecore)
def doMassUncert(argdict): return nfwnoise.calcMassUncert(**argdict) ##### if __name__ == '__main__': nthreads = 4 pool = ThreadPool(nthreads) nmasses = 10000 nperthread = nmasses / nthreads config = nfwfit.readConfiguration('mxxl_imperial/snap41/c4_r10/config.sh') r_mpc = np.arange(0.75, 3.0, 0.2) beta_s = 0.5 zcluster = 0.5 masses = 10**np.random.uniform(14, 16, nmasses) concens = np.random.uniform(1.1, 19.9, nmasses) testmasses = np.logspace(14., 16., 400) log10testmasses = np.log10(testmasses) shearprofiles_p, shearerr_p = nfwnoise.createClusterSet( config, masses, zcluster, r_mpc, beta_s, 0.01) profile_threadgroups = [ dict(config=config, r_mpc=r_mpc,
def plotLogProbSurface(catalogname, configfile, fig=None, noiselevels=[0., 0.03, 0.07, 0.15]): if fig is None: fig = pylab.figure() config = nfwfit.readConfiguration(configfile) simreader = nfwfit.buildSimReader(config) nfwutils.global_cosmology.set_cosmology(simreader.getCosmology()) catalog = nfwfit.readSimCatalog(catalogname, simreader, config) fitter = nfwfit.buildFitter(config) r_mpc, ghat, sigma_ghat, beta_s, beta_s2, zlens = fitter.prepData( catalog, config) print sigma_ghat fitter.model.setData(beta_s, beta_s2, zlens) guess = fitter.model.guess() massgrid = np.arange(1e14, 1e15, 2.5e13) / fitter.model.massScale masscenters = (massgrid[1:] + massgrid[:-1]) / 2. for noise in noiselevels: noisy_g = ghat + noise * np.random.standard_normal(len(ghat)) noisy_sigma = np.sqrt(sigma_ghat**2 + noise**2) if len(guess) == 2: concengrid = np.arange(1., 15., 0.25) concencenters = (concengrid[1:] + concengrid[:-1]) / 2. chisqgrid = np.zeros((len(masscenters), len(concencenters))) for i in range(len(masscenters)): for j in range(len(concencenters)): chisqgrid[i, j] = fitmodel.ChiSqStat( noisy_g, noisy_sigma, fitter.model(r_mpc, masscenters[i], concencenters[j])) probgrid = np.exp(-0.5 * (chisqgrid - np.max(chisqgrid))) massprobgrid = np.sum(probgrid, axis=1) / np.sum(probgrid) logprob = np.log(massprobgrid) else: chisqgrid = np.zeros(len(masscenters)) for i in range(len(massgrid)): chisqgrid[i] = fitmodel.ChiSqStat( noisy_g, noisy_sigma, fitter.model(r_mpc, masscenters[i])) logprob = -0.5 * chisqgrid ax = pylab.gca() ax.plot(masscenters, np.exp(logprob - np.max(logprob)), label=noise) print 'Max: {0}'.format(masscenters[np.argmax(logprob)]) pylab.legend() return fig, massgrid, concengrid, chisqgrid, logprob
def consolidateFits(workdir, simtype, outdir): failfile = open('{0}/fails'.format(outdir), 'w') idpattern = idpatterns[simtype] answers = cPickle.load( open('{0}/{1}_answers.pkl'.format(workdir, simtype), 'rb')) outputfiles = glob.glob('%s/*.out' % outdir) nhalos = len(outputfiles) ids = [] measured_m200s = np.zeros(nhalos) measured_m200errs = np.zeros(nhalos) measured_m500s = np.zeros(nhalos) measured_m500errs = np.zeros(nhalos) measured_cs = np.zeros(nhalos) measured_rs = np.zeros(nhalos) true_m200s = np.zeros(nhalos) true_m500s = np.zeros(nhalos) true_cs = np.zeros(nhalos) redshifts = np.zeros(nhalos) results = dict(ids=ids, measured_m200s=measured_m200s, measured_m200errs=measured_m200errs, measured_m500s=measured_m500s, measured_m500errss=measured_m500errs, measured_cs=measured_cs, measured_rs=measured_rs, true_m200s=true_m200s, true_m500s=true_m500s, true_cs=true_cs, redshifts=redshifts) class WeirdException(Exception): pass #load up the environment for cosmology, and mc relation if used config = nfwfit.readConfiguration('{0}/config.sh'.format(outdir)) simreader = nfwfit.buildSimReader(config) nfwutils.global_cosmology.set_cosmology(simreader.getCosmology()) fitter = nfwfit.buildFitter(config) configname = os.path.basename(outdir) for i, output in enumerate(outputfiles): filebase = os.path.basename(output) match = idpattern.match(filebase) try: haloid = int(match.group(1)) except AttributeError as e: print filebase raise e except ValueError: haloid = match.group(1) try: truth = answers[haloid] except KeyError: print 'Failure at {0}'.format(output) raise ids.append(haloid) true_m200s[i] = truth['m200'] true_m500s[i] = truth['m500'] true_cs[i] = truth['concen'] redshifts[i] = truth['redshift'] input = open(output) measured = cPickle.load(input) input.close() if measured is None: print 'Fail {0} {1}'.format(configname, haloid) failfile.write('Fail {0} {1}\n'.format(configname, haloid)) continue measured_m200s[i] = measured[0][ 'm200'] * fitter.model.massScale * nfwutils.global_cosmology.h measured_m200errs[i] = np.mean(np.abs( measured[1] ['m200'])) * fitter.model.massScale * nfwutils.global_cosmology.h if 'c200' in measured: measured_cs[i] = measured['c200'] else: ## need to dig up the mc relation measured_cs[i] = fitter.model.massconRelation( np.abs(measured_m200s[i]), redshifts[i], fitter.model.overdensity) ##### #calculate m500 measured_rs[i] = nfwutils.rscaleConstM(np.abs(measured_m200s[i]), measured_cs[i], redshifts[i], fitter.model.overdensity) measured_m500s[i] = nfwutils.Mdelta(measured_rs[i], measured_cs[i], redshifts[i], 500) if measured_m200s[i] < 0: measured_m500s[i] = -measured_m500s[i] if not np.isfinite(measured_m500s[i]): print 'NOT FINITE' print haloid print measured cPickle.dump(results, open('%s/consolidated.pkl' % outdir, 'w')) failfile.close()
def compareNoiseProfiles(data=None): ''' Compare intrinsic noise levels to assumed shape noise''' if data is None: data = {} if 'centers_mpc' not in data: config = nfwfit.readConfiguration( '/vol/euclid1/euclid1_1/dapple/mxxl_lensing/mxxlsnap54/general-c4-r10-n0_0-xrayNONE/config.sh' ) simreader = nfwfit.buildSimReader(config) nfwutils.global_cosmology.set_cosmology(simreader.getCosmology()) fitter = nfwfit.buildFitter(config) intrnoise_profiles = [] for haloid in range(800, 880): catbase = '/vol/euclid1/euclid1_raid1/dapple/mxxl_lensing/mxxlsnap54/halo_54_{}_0'.format( haloid) catalog = nfwfit.readSimCatalog(catbase, simreader, config) r_mpc, ghat, sigma_ghat, beta_s, beta_s2, zlens = fitter.prepData( catalog) intrnoise_profiles.append(sigma_ghat) intrnoise_profiles = np.row_stack(intrnoise_profiles) intrnoise = np.mean(intrnoise_profiles, axis=0) intrnoise_err = np.std(intrnoise_profiles, axis=0) edges_mpc = np.linspace(config.profilemin, config.profilemax, config.nbins + 1) centers_mpc = (edges_mpc[1:] + edges_mpc[:-1]) / 2. dL = nfwutils.global_cosmology.angulardist(zlens) edges_arcmin = (edges_mpc / dL) * (180 / np.pi) * 60 bin_areas = np.pi * (edges_arcmin[1:]**2 - edges_arcmin[:-1]**2) shapesigma = 0.25 galdensity = 20. # per sq arc min shape_noise = shapesigma / np.sqrt(galdensity * bin_areas) data['centers_mpc'] = centers_mpc data['intrnoise'] = intrnoise data['intrnoise_err'] = intrnoise_err data['shape_noise'] = shape_noise else: centers_mpc = data['centers_mpc'] intrnoise = data['intrnoise'] intrnoise_err = data['intrnoise_err'] shape_noise = data['shape_noise'] fig = pylab.figure() ax = pylab.gca() ax.plot(centers_mpc, intrnoise / intrnoise[2], label='Intrinsic Noise [80 cluster avg]', marker='None', linestyle='-', linewidth=2, color=pp.colors[0]) ax.plot(centers_mpc, shape_noise / shape_noise[2], marker='None', linestyle='-', linewidth=3, color=pp.colors[1], label='Shape Noise') ax.legend(loc='upper right') ax.set_xlabel('Radius [Mpc]', fontsize=16) ax.set_ylabel('Relative Shear Error [Arbit Norm]', fontsize=16) fig.tight_layout() fig.savefig('figures/relative_shear_noise.png') fig.savefig('figures/relative_shear_noise.pdf') fig.savefig('figures/relative_shear_noise.eps') return fig, data