Beispiel #1
0
    def reload_null(self):
        """
        This function reloads the null flux 
        """

        sp = SterileParams(0., 0., 0., 0.)

        if self.ui.recoBox.isChecked():
            which = config["recon_flux"] + ".dat"
        else:
            which = config["nu_flux_downsize"] + ".dat"

        f = open(gen_filename(config["datapath"], which, sp), 'rb')
        all_data = pickle.load(f)
        f.close()

        if self.ui.recoBox.isChecked():
            self.e_reco = np.array(bhist([all_data["e_reco"]]).centers)
            self.a_reco = np.array(bhist([all_data["a_reco"]]).centers)
        else:
            self.e_reco = np.array(all_data["e_true"])
            self.a_reco = np.array(all_data["a_true"])

        all_data = all_data["flux"]

        self.flux_null = np.zeros(shape=(len(self.e_reco), len(self.a_reco)))
        for key in all_data.keys():
            if self.check_key(key):
                self.flux_null += np.array(
                    all_data[key]) if self.ui.recoBox.isChecked(
                    ) else self.apply_xs(np.array(all_data[key]), key)
Beispiel #2
0
    def __init__(self, *args):
        for arg in args:
            if not isinstance(arg, doLLH):
                raise TypeError()

        self.doLLHs = list(args)
        self._meta_skip = all(entry.skip_missing for entry in self.doLLHs)
        self._options = [part.options for part in self.doLLHs]
        self._skipped = 0
        self._done = 0

        # get the edges
        fn = gen_filename(
            data_folder, self.doLLHs[-1]._filenames, SterileParams()
        )  #just use the null for this, we're only looking at the bin edges
        obj = open(fn, 'rb')
        data = pickle.load(obj)
        obj.close()

        self._e_edges = data["e_edges"]
        n_e = len(self._e_edges) - 1
        self._a_edges = data["a_edges"]
        n_a = len(self._a_edges) - 1
        self._reco_obj = DataReco(self._e_edges * (1e9), self._a_edges,
                                  self._e_edges * (1e9), self._a_edges)
        self._reco_tensor = [[[[
            self._reco_obj.get_energy_reco_odds(j, l) *
            self._reco_obj.get_czenith_reco_odds(k, i, l) for i in range(n_a)
        ] for j in range(n_e)] for k in range(n_a)] for l in range(n_e)]
Beispiel #3
0
def load(param):
    """
    Use this to load in the fluxes. If they're there, load them. If not, make them

    pass on the sterileparams object so it knows what to load and what to pass to the data maker
    """
    #filename = "null_exp.dat" if null_bool else "ster_exp.dat"
    where = "/home/benito/software/data/cascade/hg_sib/expected_fluxes_reco"

    #filename = gen_filename(where, "expected_flux_smeared.dat", param)
    filename = gen_filename(where, "best_expected_flux.dat", param)

    if True:  #os.path.exists(filename):
        print("Loading {}".format(filename))
        f = open(filename, 'rb')
        data = pickle.load(f)
        f.close()
        return data
    else:
        # only import these if we need to
        from cascade.sensitivity.make_from_mc import build_mc_flux
        from cascade.sensitivity.generate_all_integrated_fluxes import make_meta_flux

        data = make_meta_flux(param,
                              do_mc=False,
                              smeary=True,
                              good_angles=True)
        return data
Beispiel #4
0
 def __init__(self,
              expectation=SterileParams(),
              use_syst=True,
              flatten=False):
     self.f_name = gen_filename(
         config["datapath"] + "/expected_fluxes_reco/",
         "expected_flux_from_mc.dat", expectation)
     self.use_mc = True
     _generic_LLHMachine.__init__(self, expectation, use_syst, flatten)
Beispiel #5
0
 def __init__(self):
     self.f_name = gen_filename(
         config["datapath"] + "/expected_fluxes_reco/", "expected_flux.dat",
         SterileParams())
     self.use_mc = False
     _generic_LLHMachine.__init__(self,
                                  SterileParams(),
                                  use_syst=True,
                                  flatten=True)
Beispiel #6
0
    def get_interp_flux(self):
        """
        This creates the interpolated flux by accessing the currently set angles, finding the four neighboring fluxes, and then performing a bilinear interpolation 
        """

        # this gets the indices of the two mixing angle values neighboring the intermediate one we hav enow
        i_x1, i_x2 = get_loc(self.electron_angle, self.theta03s)
        i_y1, i_y2 = get_loc(self.tau_angle, self.theta23s)

        # now let's build the parameter objects using those neighboring points we have
        param_11 = SterileParams(self.theta03s[i_x1], self.thetamu,
                                 self.theta23s[i_y1], self.msq)
        param_12 = SterileParams(self.theta03s[i_x1], self.thetamu,
                                 self.theta23s[i_y2], self.msq)
        param_21 = SterileParams(self.theta03s[i_x2], self.thetamu,
                                 self.theta23s[i_y1], self.msq)
        param_22 = SterileParams(self.theta03s[i_x2], self.thetamu,
                                 self.theta23s[i_y2], self.msq)

        which = (config["recon_flux"] if self.ui.recoBox.isChecked() else
                 config["nu_flux_downsize"]) + ".dat"

        # using those indices, we generate the names of the flux files and load
        flux_11 = self._load_flux_file(
            gen_filename(config["datapath"], which, param_11))
        flux_12 = self._load_flux_file(
            gen_filename(config["datapath"], which, param_12))
        flux_21 = self._load_flux_file(
            gen_filename(config["datapath"], which, param_21))
        flux_22 = self._load_flux_file(
            gen_filename(config["datapath"], which, param_22))

        # these are useful intermediates used for my bilinear interpolation function
        p0 = (self.electron_angle, self.tau_angle)
        p1 = (self.theta03s[i_x1], self.theta23s[i_y1])
        p2 = (self.theta03s[i_x2], self.theta23s[i_y2])

        return bilinear_interp(p0, p1, p2, flux_11, flux_12, flux_21, flux_22)
Beispiel #7
0
    def set_central(self, params):
        """
        Changes the expectation from which LLHs are calculated!

        Loads the expectation in, sets it 
        """
        fn = gen_filename(data_folder, self._filenames, params)
        f = open(fn, 'rb')
        data = pickle.load(f)
        f.close()

        self._expectation = data['event_rate']

        self.set_central_from_expectation(self._expectation)
Beispiel #8
0
    def __init__(self,
                 expectation=SterileParams(),
                 use_syst=True,
                 flatten=False,
                 smearmode=False,
                 special=False):
        suffix = "_smeared" if smearmode else ""
        suffix = "_smearedwell" if special else ""

        self.f_name = gen_filename(
            config["datapath"] + "/expected_fluxes_reco/",
            "expected_flux{}.dat".format(suffix), expectation)
        self.use_mc = False
        _generic_LLHMachine.__init__(self, expectation, use_syst, flatten,
                                     smearmode, special)
Beispiel #9
0
 def load_file(self, params):
     """
     Load the corresponding file in! return the event rate
     """
     fn = gen_filename(data_folder, self._filenames, params)
     if not os.path.exists(fn):
         raise IOError("Couldn't find file {}".format(fn))
     f = open(fn, 'rb')
     try:
         data = pickle.load(f)
     except IOError:
         print("Error Loading File {}".format(fn))
         raise IOError("Error loading file, but not skipping these")
     f.close()
     return data
def _load_flux(params, tracks=False):
    name = gen_filename(config["datapath"], config["nu_flux"]+".dat", params)

    if False:
        f = open(name,'rb')
        all_data = pickle.load(f)
        f.close()
    else:
        kwargs ={"as_data":True}
        data = raw_flux(params, kwargs=kwargs) 
        flux = {}
        for key in data.get_keys(just_casc=(not tracks),just_tracks=tracks):
            flux[key] = np.array(data.fluxes[key])
        return (np.array(data.energies), np.array(data.angles), flux)

    return( all_data["e_true"], all_data["a_true"], all_data["flux"] )
Beispiel #11
0
def generate_astr_flux(params, **kwargs):
    """
    This script generates an astrophysical flux at the given SterileParameters

    It saves the flux to the datapath that the configuration file is configured to
    """
    print("Received {} point".format(params))
    if "forced_filename" not in kwargs:
        kwargs["forced_filename"] =  gen_filename(config["datapath"], config["prop_astro_flux"], params)

    if "flavor_ratio" not in kwargs:
        flavor_ratio = get_flavor_ratio(params)
        kwargs["flavor_ratio"] = flavor_ratio
    else:
        print("Using ratio: {}".format(kwargs["flavor_ratio"]))

    kwargs["state_setter"] = astro_initial_state
    kwargs["osc"] = False

    return raw_flux(params, kwargs)
Beispiel #12
0
def gen_flux(params):
    """
    First we make a neutrino flux file at the detector (true flux binning)
    Then we get the fluxes, binned in energy deposited
    Then we incorporate the detector response 
    """
    if not isinstance(params, SterileParams):
        raise TypeError("Expected {} for params, not {}".format(
            SterileParams, type(params)))

    raw_flux_name = gen_filename(config["datapath"],
                                 config["nu_flux"] + ".dat", params)
    if os.path.exists(raw_flux_name) and config["use_pregen_mceq_flux"]:
        pass
    else:
        raw_flux_name = raw_flux(params)

    a, b, c, d, err = generate_singly_diff_fluxes(config["n_bins"],
                                                  datafile=raw_flux_name)
    incorporate_recon(a, b, c, d, errors=err, params=params, just_flux=True)
Beispiel #13
0
def load(null_bool):
    filename = "null_exp.dat" if null_bool else "ster_exp.dat"

    if False: #os.path.exists(filename):
        f = open(filename,'rb')
        data = pickle.load(f)
        f.close()
        return data
    else:
        if null_bool:
            params = SterileParams()
        else:
            params = SterileParams(0.0, 0.1609, 0.0, 4.7)
        # load the fluxes 
        fu_filename = gen_filename(config["datapath"], "raw_det_flux.dat", params)
        flux = Data(fu_filename)

        data = build_mc_flux(flux)
        f = open(filename, 'wb')
        pickle.dump(data, f, -1)
        f.close()
        return data
def make_meta_flux(params, do_mc = False):
    """
    This makes and saves 
    """
    # look for the atmospheric fluxes. These should all be pre-generated 
    start = time() 
    kwargs = {}
    kwargs["as_data"]=True
    atmo_data = raw_flux(params,kwargs=kwargs)
    astr_data = generate_astr_flux(params, as_data=True)

    print("Calculating Expected Binned Flux at {}".format(params))
    if do_mc:
        print("In MC mode")
    # now we use these two to build the full expected flux
    if do_mc:
        full_flux = build_mc_flux(atmo_data, astr_data)
    else:
        full_flux = get_expectation(atmo_data, astr_data) #dict 
    middle = time()
    # save the object
    filename = "expected_flux_from_mc_smearedwell.dat" if do_mc else "best_expected_flux.dat"

    suffix = "{}from_mc".format("_not_" if (not do_mc) else "_")
    new_filename = gen_filename(config["datapath"]+ "/expected_fluxes_reco/", filename, params)

    print("Saving to {}".format(new_filename))
    f = open(new_filename ,'wb')
    pickle.dump(full_flux, f, -1)
    f.close()
    end = time()

    print("Flux Sim took {:.1f} seconds".format(middle-start))
    print("Saving took {:.3f} seconds".format(end-middle))

    return full_flux
    return data


ratios = False

central_s = SterileParams()
#sterile_s = SterileParams(theta13=0.1652, theta23=0.2293, msq2=4.6416)
sterile_s = SterileParams(theta13=0.1652, theta23=0.2293, msq2=4.5)
use_params = central_s

cascade_name_root = "best_expected_flux.dat"
track_name_root = "expected_flux_from_mc_smearedwell.dat"
datadir = os.path.join(config["datapath"], "expected_fluxes_reco")
# datadir = "/home/benito/software/data/cascade/hg_sib/expected_fluxes_reco/"

sterile_casc_fname = gen_filename(datadir, cascade_name_root, sterile_s)
sterile_track_fname = gen_filename(datadir, track_name_root, sterile_s)

cascade_fname = gen_filename(datadir, cascade_name_root, central_s)
track_fname = gen_filename(datadir, track_name_root, central_s)

#data = ldata(f_name)
#data2 = ldata(f_name_s)
try:
    cascade_datadict = ldata(cascade_fname)
except IOError:
    cascade_datadict = make_meta_flux(central_s)
try:
    track_datadict = ldata(track_fname)
except IOError:
    track_datadict = make_meta_flux(central_s, True)
Beispiel #16
0
    So everything is either a zero or a one! 
    """
    e_bins = len(energies)
    a_bins = len(zeniths)

    inistate = np.zeros(shape=(a_bins, e_bins, 2, n_nu))

    for i_e in range(e_bins):
        for i_a in range(a_bins):
            for neut_type in range(2):
                inistate[i_a][i_e][neut_type][1] = 1.0
    return inistate


# either generate the file or load it in!
expected_fn = gen_filename(config["datapath"], "unitary_prob.dat", params)
if os.path.exists(expected_fn):
    final_probs = Data(expected_fn)
else:
    kwargs = {}
    kwargs["as_data"] = False
    kwargs["state_setter"] = state_setter
    kwargs["forced_filename"] = expected_fn
    final_probs = Data(raw_flux(params, kwargs=kwargs))

# now we need three keys
curr = "CC"  # doesn't matter
neut = "nuBar"
root_flav = "Mu"
flavors = ['E', 'Mu', 'Tau']
# flavor neutrino current
Beispiel #17
0
def raw_flux(params, kwargs={}):
    """
    This is the main function. It saves a data file for the flux with a unique name for the given physics 
    """
    if not isinstance(params, SterileParams):
        raise TypeError("Expected {} for params, not {}".format(
            SterileParams, type(params)))

    if "forced_filename" in kwargs:
        forced_filename = kwargs["forced_filename"]
    else:
        forced_filename = None
    if "state_setter" in kwargs:
        state_setter = kwargs["state_setter"]
    else:
        state_setter = get_initial_state

    if "osc" in kwargs:
        osc = kwargs["osc"]
    else:
        osc = True
    if not osc:
        print("NOT USING OSCILLATIONS")

    if "as_data" in kwargs:
        as_data = kwargs["as_data"]
    else:
        as_data = False

    if forced_filename is not None:
        if not isinstance(forced_filename, str):
            raise TypeError("Forced filename should be {}, or {}".format(
                str, None))

    print("Propagating Neutrinos at {}".format(params))
    n_nu = 4
    Emin = 1. * un.GeV
    Emax = 10. * un.PeV
    cos_zenith_min = -0.999
    cos_zenith_max = 0.2

    use_earth_interactions = True

    zeniths = nsq.linspace(cos_zenith_min, cos_zenith_max, angular_bins)
    energies = nsq.logspace(Emin, Emax,
                            energy_bins)  # DIFFERENT FROM NUMPY LOGSPACE

    nus_atm = nsq.nuSQUIDSAtm(zeniths, energies, n_nu, nsq.NeutrinoType.both,
                              use_earth_interactions)

    nus_atm.Set_MixingAngle(0, 1, 0.563942)
    nus_atm.Set_MixingAngle(0, 2, 0.154085)
    nus_atm.Set_MixingAngle(1, 2, 0.785398)

    #sterile parameters
    nus_atm.Set_MixingAngle(0, 3, params.theta03)
    nus_atm.Set_MixingAngle(1, 3, params.theta13)
    nus_atm.Set_MixingAngle(2, 3, params.theta23)
    nus_atm.Set_SquareMassDifference(3, params.msq2)

    nus_atm.Set_SquareMassDifference(1, 7.65e-05)
    nus_atm.Set_SquareMassDifference(2, 0.00247)

    nus_atm.SetNeutrinoCrossSections(xs)

    nus_atm.Set_TauRegeneration(True)

    #settting some zenith angle stuff
    nus_atm.Set_rel_error(1.0e-6)
    nus_atm.Set_abs_error(1.0e-6)
    #nus_atm.Set_GSL_step(gsl_odeiv2_step_rk4)
    nus_atm.Set_GSL_step(nsq.GSL_STEP_FUNCTIONS.GSL_STEP_RK4)

    # we load in the initial state. Generating or Loading from a file
    inistate = state_setter(energies, zeniths, n_nu, kwargs)
    if np.min(inistate) < 0:
        raise ValueError("Found negative value in inistate: {}".format(
            np.min(inistate)))
    nus_atm.Set_initial_state(inistate, nsq.Basis.flavor)

    # we turn off the progress bar for jobs run on the cobalts
    nus_atm.Set_ProgressBar(False)
    nus_atm.Set_IncludeOscillations(osc)

    nus_atm.EvolveState()

    int_en = 700
    int_cos = 100
    int_min_e = log10(Emin)
    int_max_e = log10(Emax)

    filename = ""
    if not as_data:

        if forced_filename is None:
            filename = gen_filename(config["datapath"],
                                    config["nu_flux"] + ".dat", params)
        else:
            filename = forced_filename
        print("Saving File to {}".format(filename))

        if not config["overwrite"]:
            backup(filename)

        obj = open(filename, 'wt')
    else:
        cobalt = os.environ.get("_CONDOR_SCRATCH_DIR")
        if cobalt == None or cobalt == "" or cobalt == ".":
            this_dir = None
        else:
            this_dir = cobalt
        obj = NamedTemporaryFile(mode='wt', buffering=1, dir=this_dir)

    angle = cos_zenith_min
    energy = int_min_e
    obj.write(
        "# log10(energy) cos(zenith) flux_nuE flux_nuMu flux_nuTau flux_nuEBar flux_nuMuBar flux_nuTauBar\n"
    )
    this_value = 0.0
    while angle < cos_zenith_max:
        energy = int_min_e
        while energy < int_max_e:
            #obj.write( ~string~ )
            obj.write("{} {}".format(energy, angle))
            reg_energy = pow(10., energy)
            for flavor in range(n_nu):
                this_value = nus_atm.EvalFlavor(flavor, angle, reg_energy, 0)
                if this_value < 0:
                    this_value = 0.0
                obj.write(" {}".format(this_value))
            for flavor in range(n_nu):
                this_value = nus_atm.EvalFlavor(flavor, angle, reg_energy, 1)
                if this_value < 0:
                    this_value = 0.0
                obj.write(" {}".format(this_value))

            energy += (int_max_e - int_min_e) / int_en
            obj.write("\n")

        angle += (cos_zenith_max - cos_zenith_min) / int_cos

    if as_data:
        data_obj = Data(obj.name)
        obj.close()  # deletes tempfile
        return data_obj
    else:
        obj.close()
        return (filename)
Beispiel #18
0

def _load_error(name):
    f = open(name, 'rb')
    all_data = pickle.load(f)
    f.close()

    e_reco = all_data["e_reco"]
    a_reco = all_data["a_reco"]
    error = all_data["error"]

    return (e_reco, a_reco, error)


e_reco, a_reco, kflux = _load_flux(
    gen_filename(config["datapath"], config["recon_flux"] + ".dat", 0.1339,
                 0.0, 1.3))

e_reco, a_reco, kerror = _load_error(
    gen_filename(config["datapath"], config["flux_error"] + ".dat", 0.1339,
                 0.0, 1.3))

flux = sum(kflux.values())
error = sum(kerror.values())

angle_widths = bhist([np.arccos(a_reco)]).widths
angle_centers = bhist([np.arccos(a_reco)]).centers
energy_centers = np.array(bhist([e_reco]).centers)
# first dim of flux is for energy
# flux[energy][angle]

print(np.shape(flux))
Beispiel #19
0
    likelihoods = [[0, 0, 0, 0.0]
                   for i in range(len(msqs) * len(theta24s) * len(theta34s))]
    chi2 = np.zeros(shape=(len(theta24s), len(theta34s), len(msqs)))

    central_chi = 0.0

    if special:
        f_name_init = "expected_flux_smearedwell.dat"
    else:
        if smear:
            f_name_init = "expected_flux_smeared.dat"
        else:
            f_name_init = "expected_flux_from_mc.dat" if use_mc else "expected_flux.dat"
    if not compare:
        f = open(
            gen_filename(config["datapath"] + "/expected_fluxes_reco/",
                         f_name_init, expectation), 'rb')
        central_chi = -2 * likelihooder.get_llh(pickle.load(f))
        f.close()
    else:
        likelihooder.set_expectation(true_data)
        central_chi = -2 * likelihooder.get_llh(true_data)

    skipped = 0
    found = 0
    for th24 in range(len(theta24s)):
        for th34 in range(len(theta34s)):
            for msq in range(len(msqs)):

                pam = SterileParams(theta13=theta24s[th24],
                                    theta23=theta34s[th34],
                                    msq2=msqs[msq])
Beispiel #20
0
from cascade.utils import Data

import os
from math import log10

import numpy as np
import matplotlib
matplotlib.use('TkAgg')
import matplotlib.pyplot as plt
import matplotlib.cm as cm

from cascade.utils import config, gen_filename

livetime = 10 * 3600 * 24 * 365.

null_pt = gen_filename(config["datapath"], config["nu_flux"], 0., 0., 0.)
sterile_pt = gen_filename(config["datapath"], config["nu_flux"], 0.13388166,
                          0.0, 1.3)
null_dat = Data(null_pt, 4)
sterile_dat = Data(sterile_pt, 4)

n_bin = 100

_ang_range = (min(null_dat.angles), max(null_dat.angles))
_eng_range = (min(null_dat.energies), max(null_dat.energies))

angles = np.linspace(_ang_range[0], _ang_range[1], n_bin + 1)
energies = np.logspace(log10(_eng_range[0]), log10(_eng_range[1]), n_bin)

#angles = null_dat._angles
#energies = np.array(null_dat._energies)
Beispiel #21
0
import matplotlib
matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt

plt.style.use(os.path.join(os.path.dirname(__file__), "..", ".." , "cascade.mplstyle"))

interp = True

#f = open("../cummulative_probs.dat",'rb')
#f = open("/home/benito/software/data/cascade/hg_sib//expectations/0.0/scaled_cummulative_probsnonorm_special_nosys_0.0_0.0_0.0_0.0.dat",'rb')
#f = open("/home/benito/software/data/cascade/hg_sib//expectations/0.0/scaled_cummulative_probsnonorm_special_0.0_0.0_0.0_0.0.dat",'rb')

#/home/benito/software/data/cascade/hg_sib/0.0/newSense_result_float_0.0_0.0_0.0_0.0.dat
# /home/benito/software/data/cascade/hg_sib/0.0/joint_likelihood_nosys_0.0_0.0_0.0_0.0.dat <- no sys
fname = gen_filename(config["datapath"], "joint_likelihood_smearing.dat" , SterileParams(theta13=0.1652, theta23=0.2293, msq2=4.6416))

f = open(fname, 'rb')
obj = pickle.load(f)
f.close()

theta24s = obj["theta24s"]
theta34s = obj["theta34s"]
msqs = obj["msqs"]
chi2 = np.array(obj["chi2s"]) 

deg = 180./3.1415926



ps = [0.10] #, 0.01]
    "smear": smearing
}
mc_options = {
    "is_mc": True,
    "use_syst": systematics,
    "skip_missing": True,
    "smear": False
}

llhood = doLLH("best_expected_flux.dat", central_exp=central, options=options)
mcllhood = doLLH("expected_flux_from_mc_smearedwell.dat",
                 central_exp=central,
                 options=mc_options)
jointllh = JointLLH(mcllhood, llhood)

thetas = np.concatenate(([0], np.arcsin(np.sqrt(np.logspace(-3, 0, 90))) / 2))

llh_dict = Scanner(jointllh,
                   theta24s=th24s,
                   theta34s=th34s,
                   msqs=msqs,
                   th14_mode=True,
                   theta14s=thetas)
results = llh_dict.scan()

outname = "llh_1d_scan.dat"
write_dir = gen_filename(config["datapath"], outname, central)
print("Wrote to {}".format(write_dir))
f = open(write_dir, 'wb')
pickle.dump(results, f, -1)
f.close()
Beispiel #23
0
    def __init__(self, filenames, central_exp=SterileParams(), options={}):
        self._is_mc = False
        self._flatten = False
        self._upgoing = True
        self._skip_missing = False
        self._use_syst = True
        self._fix_norm = -1
        self._fixing_norm = False
        self._use_sideband_err = False
        self._smear = False
        if not isinstance(filenames, str):
            raise TypeError("Filename should be {}, not {}".format(
                str, type(filenames)))
        self._filenames = filenames

        self._parse_options(options)
        self._options = options
        if self._smear:
            # get the edges
            fn = gen_filename(data_folder, self._filenames, central_exp)
            obj = open(fn, 'rb')
            data = pickle.load(obj)
            obj.close()

            self._e_edges = data["e_edges"]
            n_e = len(self._e_edges) - 1
            self._a_edges = data["a_edges"]
            n_a = len(self._a_edges) - 1
            self._reco_obj = DataReco(self._e_edges * (1e9), self._a_edges,
                                      self._e_edges * (1e9), self._a_edges)
            self._reco_tensor = [[[[
                self._reco_obj.get_energy_reco_odds(j, l) *
                self._reco_obj.get_czenith_reco_odds(k, i, l)
                for i in range(n_a)
            ] for j in range(n_e)] for k in range(n_a)] for l in range(n_e)]

        # do the initial configuration
        self.set_central(central_exp)

        self._net_error_m_sys = np.zeros(
            shape=np.shape(self._net_error_m_stat))
        self._net_error_p_sys = np.zeros(
            shape=np.shape(self._net_error_p_stat))

        if self._use_syst:
            fn = gen_filename(data_folder, self._filenames, central_exp)
            f = open(fn, 'rb')
            expectation = pickle.load(f)
            f.close()

            ice_grad_0 = unc_wrapper(expectation, Syst.icegrad0, options)
            ice_grad_1 = unc_wrapper(expectation, Syst.icegrad1, options)
            astro_grad = unc_wrapper(expectation, Syst.astrogam, options)
            cr_grad = unc_wrapper(expectation, Syst.crgam, options)

            self._net_error_m_sys = astro_grad[0]**2
            self._net_error_m_sys = self._net_error_m_sys + cr_grad[
                0]**2 + ice_grad_0[0]**2 + ice_grad_1[0]**2
            self._net_error_m_sys = np.sqrt(self._net_error_m_sys)

            self._net_error_p_sys = astro_grad[1]**2
            self._net_error_p_sys = self._net_error_p_sys + cr_grad[
                1]**2 + ice_grad_0[1]**2 + ice_grad_1[1]**2
            self._net_error_p_sys = np.sqrt(self._net_error_p_sys)
"""
add_contour("/home/bsmithers/software/data/hg_sib/0.1609e0/best_llh_1_00eV_smearing_0.0_0.1609e0_0.0_1.0000e0.dat", r"1.0eV$^{2}$, $\theta_{24}=0.1609$", '-')
add_contour("/home/bsmithers/software/data/hg_sib/0.1609e0/best_llh_3_30eV_smearing_0.0_0.1609e0_0.0_3.3000e0.dat", r"3.3eV$^{2}$, $\theta_{24}=0.1609$", '-')
add_contour("/home/bsmithers/software/data/hg_sib/0.1609e0/best_llh_4_64eV_smearing_0.0_0.1609e0_0.0_4.6400e0.dat", r"4.64eV$^{2}$, $\theta_{24}=0.1609$", '-')
add_contour("/home/bsmithers/software/data/hg_sib/0.3826e0/best_llh_1_00eV_smearing_0.0_0.3826e0_0.0_1.0000e0.dat", r"1.0eV$^{2}$, $\theta_{24}=0.3826$", '--')
add_contour("/home/bsmithers/software/data/hg_sib/0.3826e0/best_llh_3_30eV_smearing_0.0_0.3826e0_0.0_3.3000e0.dat", r"3.3eV$^{2}$, $\theta_{24}=0.3826$", '--')
add_contour("/home/bsmithers/software/data/hg_sib/0.3826e0/best_llh_4_64eV_smearing_0.0_0.3826e0_0.0_4.6400e0.dat", r"4.64eV$^{2}$, $\theta_{24}=0.3826$", '--')
"""

msqs = [1.0, 3.3, 4.64]
th24s = [0.1609, 3.826]
th34s = [0.0]
thetas = np.concatenate(([0], np.arcsin(np.sqrt(np.logspace(-3, 0, 90))) / 2))

filename = gen_filename(config["datapath"],
                        "newllh_1d_scan.dat",
                        params=SterileParams())
f = open(filename, 'rb')
data_dict = pickle.load(f)
f.close()

msqs = data_dict["msqs"]
th14s = data_dict["theta14s"]
th24s = data_dict["theta24s"]
th34s = data_dict["theta34s"]
chis = data_dict["chi2s"]
# chis = chis - np.nanmin(chis)

print("Chi Shape: {}".format(np.shape(chis)))

cl = 4 * 1.9
import os

import matplotlib
matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt
plt.style.use(os.path.join(os.path.dirname(__file__), "..", ".." , "cascade.mplstyle"))

from cascade.deporeco import DataReco

import os 

exist_masses = np.concatenate((np.array([0]), np.logspace(-2,2,40)))

fn = "best_expected_flux.dat"
data_folder = os.path.join(config["datapath"], "expected_fluxes_reco")
f_name =  gen_filename(data_folder, fn, SterileParams())
sterile_fname = gen_filename(data_folder, fn, SterileParams(theta13=0.1652, theta23=0.2293, msq2=4.6416))

#sterile_string = r"$\sin^{2}(2\theta_{24})=0.1, \sin^{2}(2\theta_{34})=0.20, \Delta m_{41}^{2}=4.64$"
sterile_string = r"Sterile Neutrino"


def loadit(filename):
    f = open(filename, 'rb')
    tdata = pickle.load(f)
    f.close()
    return tdata

data = loadit(f_name)
sdata = loadit(sterile_fname)

def _load_flux(name):
    f = open(name, 'rb')
    all_data = pickle.load(f)
    f.close()

    e_reco = all_data["e_true"]
    a_reco = all_data["a_true"]
    flux = all_data["flux"]

    return (e_reco, a_reco, flux)


e_reco, a_reco, flux = _load_flux(
    gen_filename(config["datapath"], config["nu_flux_downsize"] + ".dat",
                 SterileParams()))

energies = bhist([e_reco]).centers
a_widths = bhist([a_reco]).widths
angles = bhist([a_reco]).centers

keys = list(flux.keys())


def is_track(key):

    curr = key.split("_")[2].lower()
    if "nc" == curr:
        return (False)
    elif "cc" == curr:
        flavor = key.split("_")[0].lower()
from cascade.utils import Data, SterileParams, gen_filename, config
from cascade.sensitivity.make_from_mc import build_mc_flux
from cascade.sensitivity.eff_area_reader import quickload

import numpy as np
import matplotlib
matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt
import os

# load the fluxes
null_f = gen_filename(config["datapath"], "raw_det_flux.dat", SterileParams())
ster_f = gen_filename(config["datapath"], "raw_det_flux.dat",
                      SterileParams(0.0, 0.1609, 0.0, 4.7))
null = Data(null_f)
ster = Data(ster_f)

# get the binning information
filename = "effective_area.nu_mu.txt"
area_data = quickload(
    os.path.join(
        os.path.join(config["datapath"], "charm_search_supplemental/"),
        filename))
e_edges = np.array(area_data["e_reco"])
a_edges = np.array(area_data["cos_th"])

e_centers = 0.5 * (e_edges[:-1] + e_edges[1:])
a_centers = 0.5 * (a_edges[:-1] + a_edges[1:])

# get the flux in the centers of each of the bins
null_flux = np.zeros(shape=(len(e_centers), len(a_centers)))
Beispiel #28
0
 def load(self, param):
     filename = gen_filename(self.folder, self.filenames, param)
     f = open(filename, 'rb')
     data = pickle.load(f)
     f.close()
     return data
"""
import numpy as np
import matplotlib
matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt
import matplotlib.cm as cm

# these are like the MOST important things, damn
from cascade.utils import bhist, SterileParams, gen_filename, config
from cascade.deposit import generate_singly_diff_fluxes

null = SterileParams(0., 0., 0., 0.)
#ster = SterileParams(0., 0.1609, 0, 4.47)
ster = SterileParams(0., 0.1609, 0.2205, 4.47)

raw_null = gen_filename(config["datapath"], config["nu_flux"] + ".dat", null)
raw_ster = gen_filename(config["datapath"], config["nu_flux"] + ".dat", ster)

n_bins = 40

true_e_edges, depo_e_edges, null_flux, czenith_edges, errs = generate_singly_diff_fluxes(
    n_bins, datafile=raw_null)
true_e_edges, depo_e_edges, ster_flux, czenith_edges, errs = generate_singly_diff_fluxes(
    n_bins, datafile=raw_ster)

true_e_widths = np.array(bhist([true_e_edges]).widths)

czeniths = np.array(bhist([czenith_edges]).centers)
energies = np.array(bhist([depo_e_edges]).centers)

keys = [str(key) for key in null_flux.keys()]
    f.close()

    e_reco = all_data["e_reco"]
    a_reco = all_data["a_reco"]
    flux = all_data["flux"]

    return( e_reco, a_reco, flux )


#null_pt = gen_filename(config["datapath"], config["nu_flux"], 0.,0.,0.)

null = SterileParams(0.,0.,0.,0.)
mud = SterileParams(0., 0.1609, 0.2205, 4.47)
eld = SterileParams(0.13, 0., 0.0, 1.3)

e_reco, a_reco, flux_null = _load_flux(gen_filename(config["datapath"], config["recon_flux"]+".dat", null))
e_reco, a_reco, flux_sterile = _load_flux(gen_filename(config["datapath"], config["recon_flux"]+".dat", mud))

ex = list(flux_null.keys())[0]

null_total = np.zeros(shape = np.shape(flux_null[ex]))
sterile_total = np.zeros(shape = np.shape(flux_null[ex]))

just_nubar = False

keep_key = "Tau"
for key in flux_null.keys():
    if just_nubar and (keep_key not in key):
        print("Skip {}".format(key))
        continue