Пример #1
0
## Now to import my llh model framework. ##
import data_multi

llh40= data_multi.init40(energy=True, weighting = weights)
llh79 = data_multi.init79(energy=True, weighting = weights)
llh86I= data_multi.init86I(energy=True, weighting = weights)
llh59= data_multi.init59(energy=True, weighting = weights)

#We've loaded in the appropriate llh samples, now let's put them both in the blender (not sure about weighting)
samples = [llh40,llh59,llh79,llh86I]

llhmodel = data_multi.multi_init(samples,energy=True)

inj = PointSourceInjector(Gamma, sinDec_bandwidth=.05, src_dec= src_dec, theo_weight = weights, seed=0) 

sensitivity = MultiPointSourceLLH.weighted_sensitivity(llhmodel,src_ra=src_ra,src_dec=src_dec,alpha=.5,beta=.9,inj=inj,trials={'n_inj':[],'TS':[],'nsources':[],'gamma':[]},bckg_trials=bckg_trials,eps=0.02,n_iter=250)
print sensitivity

#discovery = PointSourceLLH.weighted_sensitivity(llhmodel,src_ra=src_ra,src_dec=src_dec,alpha=2.867e-7,beta=.5,inj=inj,trials={'n_inj':[],'TS':[],'nsources':[],'gamma':[]},bckg_trials=bckg_trials,eps=0.01,n_iter=250)
#print discovery

#choose an output dir, and make sure it exists
this_dir = os.path.dirname(os.path.abspath (__file__))
sens_dir = misc.ensure_dir ('/data/user/brelethford/Output/stacking_sensitivity/30youngSNR/old_sensitivity/')

# save the output
outfile_sens = sens_dir + 'gamma{}.array'.format(Gamma)

print 'Saving', outfile_sens, '...'
cache.save(sensitivity, outfile_sens)
#cache.save(discovery, outfile_disc)
Пример #2
0
                        sinDec_bandwidth=0.05,
                        src_dec=np.atleast_1d(src_dec[i]))
    for i in range(len(src_dec))
]
flux_array = []
#fill the injarrays:
for i in range(len(src_dec)):
    inj = inj_array[i]
    MultiPointSourceLLH.weighted_sensitivity(llhmodel,
                                             src_ra=np.atleast_1d(0.),
                                             src_dec=np.atleast_1d(src_dec[i]),
                                             alpha=.5,
                                             beta=.9,
                                             inj=inj,
                                             trials={
                                                 'n_inj': [],
                                                 'TS': [],
                                                 'nsources': [],
                                                 'gamma': []
                                             },
                                             bckg_trials=bckg_trials,
                                             eps=10.99,
                                             n_iter=1)
    flux_array.append(inj.mu2flux(1) * 1000)

for i in range(len(src_dec)):
    print('for declination = ' + str(src_dec[i]) + ', flux = ' +
          str(flux_array[i]))


def get_results():
Пример #3
0
batch = opts.batch
batchsize = opts.batchsize

## We'll assign the proper weighting scheme for the search, then use it to calculate and cache the associated bckg trials: ##

llh79 = data_multi.init79(energy=True, weighting = weights)
llh86I= data_multi.init86I(energy=True, weighting = weights)
llh59= data_multi.init59(energy=True, weighting = weights)
llh40= data_multi.init40(energy=True, weighting = weights)
#We've loaded in the appropriate llh samples, now let's put them both in the blender (not sure about weighting)

samples = [llh40,llh59,llh79,llh86I]

llhmodel = data_multi.multi_init(samples,energy=True)

bckg_trials = MultiPointSourceLLH.background_scrambles(llhmodel,src_ra,src_dec,alpha=0.5,maxiter=batchsize)

#choose an output dir, and make sure it exists
this_dir = os.path.dirname(os.path.abspath (__file__))
out_dir = misc.ensure_dir ('/data/user/brelethford/Output/stacking_sensitivity/30youngSNR/old_background_trials/')

# save the output
outfile = out_dir + 'background_batch_{}.array'.format(batch)
print 'Saving', outfile, '...'
cache.save(bckg_trials, outfile)