import itertools
from scipy.signal import convolve2d
from skylab.ps_injector_stack_100 import PointSourceInjector
from skylab.psLLH_stack import MultiPointSourceLLH
from skylab.utils import poisson_weight
from optparse import OptionParser
import argparse
## I'll have to add an argument later to do specific weighting schemes beyond this script, but for now I'll just add the ability manually. ##
sys.path.append("/home/brelethford/Documents/IceCube_Research/Scripts/AGN_Core")
projfolder='/home/brelethford/Documents/IceCube_Research/'

## Read in background trials previously logged ##

import datatest_stack_bins

llh_single = datatest_stack_bins.init(energy=True)

mc = datatest_stack_bins.MC()

##I have to load in whichever declination I'm working on at the moment...##

parser = OptionParser (usage = '%prog [options]')
parser.add_option ('--dec', dest = 'dec', type = float,
                default = 0., metavar = 'DEC',
                help = 'sin of the source declination.')

opts, args = parser.parse_args ()
dec_deg = np.arcsin(opts.dec) * 180./np.pi


datafolder='/data/user/brelethford/Output/all_sky_sensitivity/results/single_stacked/bin_test/dec{0:+010.5}/'.format(dec_deg)
    'beta_err': beta_err,
    'TS_beta': TS_beta,
    'gamma': np.asarray(gamma)
}

## These params contain everything we should need to weight our sources. Remember to convert src_dec to sindec ##

src_ra, src_dec, redshift, gamma, flux, lum = params['ra'], np.sin(
    params['dec']
), params['redshift'], params['gamma'], params['flux'], params['lum']

import datatest_stack_bins

#llh_uniform = datatest_stack.init(energy=True)

llh_flux = datatest_stack_bins.init(energy=True, weighting=flux)

#llh_redshift = datatest_stack.init(energy=True, weighting = np.power(redshift,-2))

#For this part it's unnecessary to populate signal, but remember how to do so down the line...

# init likelihood class
# llh = datatest_stack.init(energy=True)
# mc = datatest_stack.MC()
# extra=datatest_stack.extra()
# dpsi=extra["dpsi"]
# print llh

### Sensitivity ###

##This one's gonna work a little differently than the single source sensitivity. First off, we need to calculate the background scrambles ahead of time, with the definition provided in psLLH_stack.py. I'll try to do this all within the same function:##
    'TS': np.asarray(TS),
    'beta': beta,
    'beta_err': beta_err,
    'TS_beta': TS_beta,
    'gamma': np.asarray(gamma)
}

## These params contain everything we should need to weight our sources. Remember to convert src_dec to sindec ##

src_ra, src_dec, redshift, gamma, flux, lum = params['ra'], np.sin(
    params['dec']
), params['redshift'], params['gamma'], params['flux'], params['lum']

import datatest_stack_bins

llh_uniform = datatest_stack_bins.init(energy=True)

#llh_flux = datatest_stack.init(energy=True, weighting = flux)

#llh_redshift = datatest_stack.init(energy=True, weighting = np.power(redshift,-2))

#For this part it's unnecessary to populate signal, but remember how to do so down the line...

# init likelihood class
# llh = datatest_stack.init(energy=True)
# mc = datatest_stack.MC()
# extra=datatest_stack.extra()
# dpsi=extra["dpsi"]
# print llh

### Sensitivity ###
beta_err = (np.sqrt(fraction * (1. - fraction) / float(i)) if 0 < beta < 1 else 1.)

##Now we have all the pieces of the original dictionary. Time to glue bckg_trials back in place, in their proper file type.##
bckg_trials = {'n_inj':n_inj,'nsources':np.asarray(nsources), 'TS':np.asarray(TS), 'beta':beta, 'beta_err':beta_err, 'TS_beta':TS_beta, 'gamma':np.asarray(gamma)}

## These params contain everything we should need to weight our sources. Remember to convert src_dec to sindec ##

src_ra, src_dec, redshift, gamma, flux, lum =  params['ra'], np.sin(params['dec']), params['redshift'], params['gamma'], params['flux'], params['lum']

import datatest_stack_bins

#llh_redshift = datatest_stack_bins.init(energy=True)

#llh_flux = datatest_stack.init(energy=True, weighting = flux)
print(type(redshift))
llh_redshift = datatest_stack_bins.init(energy=True, weighting = list(np.power(redshift,-2)))

#For this part it's unnecessary to populate signal, but remember how to do so down the line...

# init likelihood class
   # llh = datatest_stack.init(energy=True)
   # mc = datatest_stack.MC()
   # extra=datatest_stack.extra()
   # dpsi=extra["dpsi"]
   # print llh

### Sensitivity ###

##This one's gonna work a little differently than the single source sensitivity. First off, we need to calculate the background scrambles ahead of time, with the definition provided in psLLH_stack.py. I'll try to do this all within the same function:##

## Background Trials have the following keys:##
示例#5
0
    type=int,
    default=1000,
    metavar='BATCHSIZE',
    help='Assigns how many background trials are used in each batch.')

opts, args = parser.parse_args()
dec_deg = np.arcsin(opts.dec) * 180. / np.pi

src_ra = [0.0]
src_dec = [np.radians(opts.dec)]

batch = opts.batch
batchsize = opts.batchsize
import datatest_stack_bins

llh_uniform = datatest_stack_bins.init(energy=False)

##Okay, so the following is the part where we need to split this up into parallel processing. I think the pertinant variable to use here is n_iter... let's test a background scramble with n_iter=5 to see how fast it goes. Though, maybe it's max_iter? check with previously pickled results to see which number of bckg trials we got.

bckg_trials_single = PointSourceLLH.background_scrambles(llh_uniform,
                                                         src_ra,
                                                         src_dec,
                                                         alpha=0.5,
                                                         maxiter=batchsize)

## Background Trials have the following keys:##
##['beta', 'TS_beta', 'beta_err', 'n_inj', 'nsources', 'TS', 'gamma']##
## Let's use a uniform weight (none) first to yield our bckg trials. ##

#choose an output dir, and make sure it exists
this_dir = os.path.dirname(os.path.abspath(__file__))
import itertools
from scipy.signal import convolve2d
from skylab.ps_injector_stack import PointSourceInjector
from skylab.psLLH_stack import MultiPointSourceLLH
from skylab.utils import poisson_weight
from optparse import OptionParser
import argparse
## I'll have to add an argument later to do specific weighting schemes beyond this script, but for now I'll just add the ability manually. ##
sys.path.append("/home/brelethford/Documents/IceCube_Research/Scripts/AGN_Core")
projfolder='/home/brelethford/Documents/IceCube_Research/'

## Read in background trials previously logged ##

import datatest_stack_bins

llh_single = datatest_stack_bins.init(energy=False)

mc = datatest_stack_bins.MC()

##I have to load in whichever declination I'm working on at the moment...##

parser = OptionParser (usage = '%prog [options]')
parser.add_option ('--dec', dest = 'dec', type = float,
                default = 0., metavar = 'DEC',
                help = 'sin of the source declination.')

opts, args = parser.parse_args ()
dec_deg = np.arcsin(opts.dec) * 180./np.pi

datafolder='/data/user/brelethford/Output/all_sky_sensitivity/results/single_stacked/no_energy/dec{0:+010.5}/'.format(dec_deg)