Ejemplo n.º 1
0
inj = PointSourceInjector(Gamma, sinDec_bandwidth=.05, src_dec=src_dec, seed=0)

results = PointSourceLLH.weighted_sensitivity(llh_single,
                                              src_ra=src_ra,
                                              src_dec=src_dec,
                                              alpha=.5,
                                              beta=.9,
                                              inj=inj,
                                              trials={
                                                  'n_inj': [],
                                                  'TS': [],
                                                  'nsources': [],
                                                  'gamma': []
                                              },
                                              bckg_trials=bckg_trials,
                                              eps=0.01,
                                              n_iter=250,
                                              maxtrial=1000)
#Currently have n_iter down from 1000 to reduce estimation time. Also lowering maxtrial from 1000 to 500.
print results

#choose an output dir, and make sure it exists
this_dir = os.path.dirname(os.path.abspath(__file__))
out_dir = misc.ensure_dir(datafolder + 'sensitivity/')

# save the output
outfile = out_dir + 'single_sensitivity_dec{0:+010.5}.array'.format(dec_deg)
print 'Saving', outfile, '...'
cache.save(results, outfile)
Ejemplo n.º 2
0
    llhmodel,
    src_ra=src_ra,
    src_dec=src_dec,
    alpha=.5,
    beta=.9,
    inj=inj,
    mc=MC,
    TSval=TSval,
    w_theoMC=modelweights['{}'.format(llhweight)],
    w_theo=modelweights['{}'.format(injweight)],
    w_theo_fit=modelweights['{}'.format(injweight)],
    eps=0.05,
    n_iter=100)
print sensitivity

#discovery = PointSourceLLH.weighted_sensitivity(llhmodel,src_ra=src_ra,src_dec=src_dec,alpha=2.867e-7,beta=.5,inj=inj,trials={'n_inj':[],'TS':[],'nsources':[],'gamma':[]},bckg_trials=bckg_trials,eps=0.01,n_iter=250)
#print discovery

#choose an output dir, and make sure it exists
this_dir = os.path.dirname(os.path.abspath(__file__))
sens_dir = misc.ensure_dir(
    '/data/user/brelethford/Output/stacking_sensitivity/4yr_Starburst/{0}yr/{1}_mhuber_git/{2}_inj/sensitivity/'
    .format(str(n), llhweight, injweight))

# save the output
outfile_sens = sens_dir + 'gamma{}.array'.format(Gamma)

print 'Saving', outfile_sens, '...'
cache.save(sensitivity, outfile_sens)
#cache.save(discovery, outfile_disc)
Ejemplo n.º 3
0
###################################
# BACKGROUND ONLY TS DISTRIBUTION #
###################################

print("\nRunning background only trials ...")
print("Grabbing background trials from {}...".format(bg_dir))
batches = [
    cache.load(bg_dir + file) for file in os.listdir(bg_dir)
    if file.endswith('.array')
]
trials = np.concatenate(batches, axis=1)

#save trials and fit params
outfile = sens_dir + 'trials.array'
cache.save(trials, outfile)

ts_parameters = fit_background(trials,
                               dir_name + "background_trials_%s.png" % tag)
print("Finished background fit.")

outfile = sens_dir + 'bg_fit.array'
cache.save(ts_parameters, outfile)

#These get used below for fitting the disc. pot.
median_ts, eta, ndf, scale = ts_parameters

print("\nBackground only TS > 0 described by:")
print(" Median TS: %6.4f" % median_ts)
print(" PDF(TS>0): %6.4f * chi2(ts / %.4f, ndf = %.4f)" % (eta, scale, ndf))
Ejemplo n.º 4
0
    '--batchsize',
    dest='batchsize',
    type=int,
    default=1000,
    metavar='BATCHSIZE',
    help='Assigns how many background trials are used in each batch.')

opts, args = parser.parse_args()
batch = opts.batch
batchsize = opts.batchsize

##For this check we'll use the gamma weighting scheme for llh method and injection:
llhmodel = data_double.init(energy=True, weighting=flux)

bckg_trials = PointSourceLLH.background_scrambles(llhmodel,
                                                  src_ra,
                                                  src_dec,
                                                  alpha=0.5,
                                                  maxiter=batchsize)

#choose an output dir, and make sure it exists
this_dir = os.path.dirname(os.path.abspath(__file__))
out_dir = misc.ensure_dir(
    '/data/user/brelethford/Output/stacking_sensitivity/2LAC/flux/background_trials/'
)

# save the output
outfile = out_dir + 'background_batch_{}.array'.format(batch)
print 'Saving', outfile, '...'
cache.save(bckg_trials, outfile)
Ejemplo n.º 5
0
                                                src_ra=src_ra,
                                                src_dec=src_dec,
                                                alpha=2.867e-7,
                                                beta=.5,
                                                inj=inj,
                                                trials={
                                                    'n_inj': [],
                                                    'TS': [],
                                                    'nsources': [],
                                                    'gamma': []
                                                },
                                                bckg_trials=bckg_trials,
                                                eps=0.01,
                                                n_iter=250)
print discovery

#choose an output dir, and make sure it exists
this_dir = os.path.dirname(os.path.abspath(__file__))
#sens_dir = misc.ensure_dir ('/data/user/brelethford/Output/stacking_sensitivity/SwiftBAT70m/{0}/{1}_inj/sensitivity/'.format(llhweight, injweight))
disc_dir = misc.ensure_dir(
    '/data/user/brelethford/Output/stacking_sensitivity/SwiftBAT70m/{0}/{1}_inj/disc/'
    .format(llhweight, injweight))

# save the output
#outfile_sens = sens_dir + 'gamma{}.array'.format(Gamma)
outfile_disc = disc_dir + 'gamma{}.array'.format(Gamma)

print 'Saving', outfile_disc, '...'
#cache.save(sensitivity, outfile_sens)
cache.save(discovery, outfile_disc)
Ejemplo n.º 6
0
#!/usr/bin/env python

import numpy as np
from icecube.umdtools import cache

#make sources
N = 10

dec_limits = np.radians([-85.0, 85.0])
ra_limits = np.radians([0.0, 360.0])
decs = np.linspace(dec_limits[0], dec_limits[1], N)
ras = np.linspace(ra_limits[0], 2 * ra_limits[1], N) % (2 * np.pi)
ns = np.arange(1, N + 1)

params = {'n': ns, 'dec': decs, 'ra': ras}

cache.save(params, 'params.pickle')
#Also save it as pickle and a txtfile for the svn site
outfolder = "/data/i3home/brelethford/csky/stacktest/teststack/"
outfile = open(outfolder + "params.txt", "w")
outfile.write('N     Dec (radians)     RA (radians)\n')
for n, dec, ra in zip(ns, decs, ras):
    outfile.write('{0}     {1:f}     {2:f}\n'.format(n, dec, ra))
Ejemplo n.º 7
0
#!/usr/bin/env python

import numpy as np
import icecube.astro as astro
from icecube.umdtools import cache

rawparams = np.genfromtxt('blackhole233.csv', skip_header=True,
                          delimiter=',').T

l, b, z, mass, flux2micron, d1Mpc, M_by_R2 = rawparams[1:]

ra, dec = astro.gal_to_equa(l, b)

params = {
    'ra': list(ra),
    'dec': list(dec),
    'z': list(z),
    'mass': list(mass),
    'flux2m': list(flux2micron),
    'distMpc': list(d1Mpc),
    'M_by_R2': list(M_by_R2)
}

cache.save(params,
           '/data/user/brelethford/Data/blackhole/pickle/params.pickle')
Ejemplo n.º 8
0
#
#def get_flux_bckg_trials():
#    return PointSourceLLH.background_scrambles(llh_flux,src_ra,src_dec,alpha=0.5)
#
#bckg_trials_flux = cache.get (datafolder + 'SwiftBAT70m/pickle/bckg_trials_flux.pickle', get_flux_bckg_trials)
#
#
#def get_redshift_bckg_trials():
#    return PointSourceLLH.background_scrambles(llh_redshift,src_ra,src_dec,alpha=0.5)
#
#bckg_trials_redshift = cache.get (datafolder + 'SwiftBAT70m/pickle/bckg_trials_redshift.pickle', get_redshift_bckg_trials)
#

##This one's gonna work a little differently than the single source sensitivity. First off, we need to calculate the background scrambles ahead of time, with the definition provided in psLLH_stack.py. I'll try to do this all within the same function:##

## Background Trials have the following keys:##
##['beta', 'TS_beta', 'beta_err', 'n_inj', 'nsources', 'TS', 'gamma']##
## Let's use a uniform weight (none) first to yield our bckg trials. ##

#choose an output dir, and make sure it exists
this_dir = os.path.dirname(os.path.abspath(__file__))
out_dir = misc.ensure_dir(
    '/data/user/brelethford/Output/stacking_sensitivity/SwiftBAT70m/uniform/background_trials/'
)

# save the output
outfile = out_dir + 'background_batch_{}.array'.format(batch)
print 'Saving', outfile, '...'
cache.save(bckg_trials_uniform, outfile)
Ejemplo n.º 9
0
# 'r' is the signal PDF divided by background PDF for an event
# setting values less than 1e-10 to zero
analysis.set_config(min_r=1.0e-10,
                    sigma_t_truncate=0,
                    sigma_t_min=1.0e-3,
                    sigma_t_max=1.0e7,
                    max_delang=np.pi)

# perform background trials
analysis.set_bg_thrower()

tsd_bg_tw = grbllh.do_trials(N, [analysis],
                             llh_type=grbllh.LlhType.max_source,
                             mu=0.,
                             seed=options.randomseed)

# Store the TestStatDist object in a pickle file
if try_one_source:
    cache.save(
        tsd_bg_tw,
        "/data/user/sfahey/FRB/L2_Analysis/tsd/bg/tsd_oneSrc_seed%s_dT%.2f" %
        (str(options.randomseed).zfill(2), dT))
else:
    cache.save(
        [tsd_bg_tw],
        "/data/user/sfahey/FRB/L2_Analysis/tsd/bg/tsd_maxburst_seed%s_dT%.3f" %
        (str(options.randomseed).zfill(3), dT))

print "Time: ", time.time() - time0