Beispiel #1
0
def combiner(path,filename):
    flist=np.array(glob.glob(path))
    print flist
    first = True
    for hdf5 in flist:
        print hdf5
        if first:
            spectrum1 = store.load(hdf5)
            first = False
        else:
            spectrum2 = store.load(hdf5)
            spectrum1.add(spectrum2)
    store.dump(filename, spectrum1)
Beispiel #2
0
    def load_pre_made(self, spectrum, global_pars):
        """ Load pre-made convolved spectra.

        This method is used to load a pre-made spectra convolved with
        certain resolution, energy-scale or shift values, or a
        combination of two or more at given values.

        The method loads the loads the correct spectra from HDF5s,
        stored in the given directory.

        Args:
          spectrum (:class:`echidna.core.spectra.Spectra`): Spectrum
            to convolve.

        Returns:
          (:class:`echidna.core.spectra.Spectra`): Convolved spectrum,
            ready for applying further systematics or fitting.
        """
        # Locate spectrum to load from HDF5
        # Base directory should be set beforehand
        if self._pre_made_base_dir is None:
            raise AttributeError("Pre-made directory is not set.")

        # Start with base spectrum name
        filename = spectrum.get_name()
        directory = self._pre_made_base_dir

        # Add current value of each global parameter
        for parameter in global_pars:
            par = self._fit_config.get_par(parameter)
            directory, filename = par.get_pre_convolved(directory, filename)
        # Load spectrum from hdf5
        spectrum = store.load(directory + filename + ".hdf5")

        return spectrum
Beispiel #3
0
    def test_serialisation(self):
        """ Test saving and then reloading a test spectra.

        """
        test_decays = 10
        test_spectra = spectra.Spectra("Test", test_decays)
        for x in range(0, test_decays):
            energy = random.uniform(0, test_spectra._energy_high)
            radius = random.uniform(0, test_spectra._radial_high)
            time = random.uniform(0, test_spectra._time_high)
            test_spectra.fill(energy, radius, time)

        store.dump("test.hdf5", test_spectra)
        loaded_spectra = store.load("test.hdf5")
        self.assertTrue(loaded_spectra.sum() == test_decays)
        self.assertTrue(numpy.array_equal(test_spectra._data, loaded_spectra._data))
        self.assertTrue(test_spectra._energy_low == loaded_spectra._energy_low)
        self.assertTrue(test_spectra._energy_high == loaded_spectra._energy_high)
        self.assertTrue(test_spectra._energy_bins == loaded_spectra._energy_bins)
        self.assertTrue(test_spectra._energy_width == loaded_spectra._energy_width)
        self.assertTrue(test_spectra._radial_low == loaded_spectra._radial_low)
        self.assertTrue(test_spectra._radial_high == loaded_spectra._radial_high)
        self.assertTrue(test_spectra._radial_bins == loaded_spectra._radial_bins)
        self.assertTrue(test_spectra._radial_width == loaded_spectra._radial_width)
        self.assertTrue(test_spectra._time_low == loaded_spectra._time_low)
        self.assertTrue(test_spectra._time_high == loaded_spectra._time_high)
        self.assertTrue(test_spectra._time_bins == loaded_spectra._time_bins)
        self.assertTrue(test_spectra._time_width == loaded_spectra._time_width)
        self.assertTrue(test_spectra._num_decays == loaded_spectra._num_decays)
def main(args):
    """Smears energy and dumps spectra.

    Args:
      args (Namespace): Container for arguments. See
        >>> python dump_smeared_energy.py -h
    """
    if args.dest:
        if os.path.isdir(args.dest):
            directory = args.dest
            if directory[-1] != "/":
                directory += "/"
        else:
            raise ValueError("%s does not exist" % args.dest)
    else:
        directory = os.path.dirname(args.path)+"/"  # strip filename
    # strip directory and extension
    filename = os.path.splitext(os.path.basename(args.path))[0]

    if args.energy_resolution:
        if args.gaus:
            energy_smear = smear.EnergySmearRes(poisson=False)
        else:
            energy_smear = smear.EnergySmearRes(poisson=True)
        energy_smear.set_resolution(args.energy_resolution)
    else:  # use light yield
        if args.gaus:
            energy_smear = smear.EnergySmearLY(poisson=False)
        else:
            energy_smear = smear.EnergySmearLY(poisson=True)
        energy_smear.set_resolution(args.light_yield)
    spectrum = store.load(args.path)

    if args.smear_method == "weight":  # Use default smear method
        for par in spectrum.get_config().get_pars():
            if "energy" in par:
                energy_par = par
                spectrum = energy_smear.weighted_smear(spectrum,
                                                       par=energy_par)
    elif args.smear_method == "random":
        for par in spectrum.get_config().get_pars():
            if "energy" in par:
                energy_par = par
                spectrum = energy_smear.random_smear(spectrum,
                                                     par=energy_par)
    else:  # Not a valid smear method
        parser.error(args.smear_method + " is not a valid smear method")

    if args.energy_resolution:
        str_rs = str(args.energy_resolution)
        filename = directory + filename + "_" + str_rs + "rs.hdf5"
    else:
        str_ly = str(args.light_yield)
        if str_ly[-2:] == '.0':
            str_ly = str_ly[:-2]
        filename = directory + filename + "_" + str_ly + "ly.hdf5"
    store.dump(filename, spectrum)
Beispiel #5
0
def main(args):
    """Smears energy and dumps spectra.

    Args:
      args (Namespace): Container for arguments. See::
        python dump_smeared_energy.py -h
    """
    if not args.input:
        parser.print_help()
        raise ValueError("No input file provided")
    if not args.pars:
        parser.print_help()
        raise ValueError("No parameters provided")
    if not args.low and not args.up:
        parser.print_help()
        raise ValueError("Must provide lower and/or upper bounds to"
                         "shrink to.")
    spectrum = store.load(args.input)
    num_pars = len(args.pars)
    if args.low and args.up:
        if len(args.low) != num_pars or len(args.up) != num_pars:
            raise ValueError("Must have the same number of pars as bounds")
        shrink = {}
        for i, par in enumerate(args.pars):
            par_low = par+"_low"
            par_high = par+"_high"
            shrink[par_low] = args.low[i]
            shrink[par_high] = args.up[i]
        spectrum.shrink(**shrink)
    elif args.low:
        if len(args.low) != num_pars:
            raise ValueError("Must have the same number of pars as bounds")
        shrink = {}
        for i, par in enumerate(args.pars):
            par_low = par+"_low"
            shrink[par_low] = args.low[i]
        spectrum.shrink(**shrink)
    else:
        if len(args.up) != num_pars:
            raise ValueError("Must have the same number of pars as bounds")
        shrink = {}
        for i, par in enumerate(args.pars):
            par_high = par+"_high"
            shrink[par_high] = args.up[i]
        spectrum.shrink(**shrink)
    f_out = args.output
    if not f_out:
        directory = os.path.dirname(args.input)
        filename = os.path.splitext(os.path.basename(args.input))[0]
        f_out = directory + "/" + filename + "_shrunk.hdf5"
    store.dump(f_out, spectrum)
    _logger.info("Shrunk "+str(args.input)+", saved to "+str(f_out))
Beispiel #6
0
def slicer(spectrumPath,filler,nslice):
    for i in range(nslice):
       spectrum=store.load(spectrumPath)
       print spectrum.sum()
       shrink_dict = {"energy_reco_low": 0.,
                      "energy_reco_high": 0.6,
                      "radial_reco_low": i*6000.0/nslice,
                      "radial_reco_high": (i+1)*6000/nslice}
       spectrum.cut(**shrink_dict)
       spectrum.scale(1)
       spec2=copy.copy(spectrum)
       spec2._name=str(i*1000)+"mm to "+str((i+1)*1000)+"mm"
       print type(spec2)
       filler.append(spec2)
Beispiel #7
0
def slicerReco(spectrumPath,filler,nslice):
    for i in range(nslice):
       spectrum=store.load(spectrumPath)
       print spectrum.sum()
       shrink_dict = {"energy_reco_low": 0.,
                      "energy_reco_high": 1.,
                      "radial_reco_low": i*6000.0/nslice,
                      "radial_reco_high": (i+1)*6000/nslice}
       spectrum.cut(**shrink_dict)
       spectrum.scale(1)
       spec2=copy.copy(spectrum)
       spec2._name="Reco"
       print type(spec2)
       print "This gives the number os events in each window:"
       print "reco : "+str(i*6000.0/nslice)+"mm to "+str((i+1)*6000.0/nslice)+"mm : "+str(spec2.sum())
       filler.append(spec2)
Beispiel #8
0
    def load_pre_made(self, spectrum, global_pars):
        """ Load pre-made convolved spectra.

        This method is used to load a pre-made spectra convolved with
        certain resolution, energy-scale or shift values, or a
        combination of two or more at given values.

        The method loads the loads the correct spectra from HDF5s,
        stored in the given directory.

        Args:
          spectrum (:class:`echidna.core.spectra.Spectra`): Spectrum
            to convolve.

        Returns:
          (:class:`echidna.core.spectra.Spectra`): Convolved spectrum,
            ready for applying further systematics or fitting.
        """
        # Locate spectrum to load from HDF5
        # Start with base spectrum name
        filename = os.path.basename(spectrum._location)
        if self._pre_made_base_dir:
            directory = self._pre_made_base_dir
        else:
            directory = os.path.dirname(spectrum._location) + '/'

        # Add current value of each global parameter
        for par in global_pars:
            dim = par._dimension
            added_dim = False
            if dim not in directory:
                added_dim = True
                directory += dim + '/'
            directory, filename = par.get_pre_convolved(directory, filename,
                                                        added_dim)
        # Load spectrum from hdf5
        num_decays = spectrum._num_decays
        fit_config = spectrum._fit_config
        orig_num_decays = None
        if hasattr(spectrum, '_orig_num_decays'):
            orig_num_decays = spectrum._orig_num_decays
        spectrum = store.load(directory + filename)
        if orig_num_decays:
            spectrum._num_decays = orig_num_decays
        spectrum.scale(num_decays)
        spectrum._fit_config = fit_config
        return spectrum
Beispiel #9
0
def main(args):
    """ Scales and dumps spectra.

    Args:
      args (Namespace): Container for arguments. See::

        $ python dump_scaled.py -h

    Raises:
      IOError: If no file is given to scale
      ValueError: If no scale factor is given
      ValueError: If no parameter is given to scale.
      ValueError: If destination directory does not exits.
    """
    if not args.file:
        parser.print_help()
        raise IOError("No file given in command line to scale")
    if not args.scale:
        parser.print_help()
        raise ValueError("No scale factor given")
    if not args.par:
        parser.print_help()
        raise ValueError("No parameter to scale given")
    if args.dest:
        if os.path.isdir(args.dest):
            directory = args.dest
            if directory[-1] != "/":
                directory += "/"
        else:
            raise ValueError("%s does not exist." % args.dest)
    else:
        directory = os.path.dirname(args.file) + "/"
    filename = os.path.splitext(os.path.basename(args.file))[0]
    scaler = scale.Scale()
    scaler.set_scale_factor(args.scale)
    spectrum = store.load(args.file)
    scaled_spectrum = scaler.scale(spectrum, args.par)
    str_sc = str(args.scale)
    if str_sc[-2:] == '.0':
        str_sc = str_sc[:-2]
    str_sc.rstrip('0')
    filename = directory + filename + "_" + str_sc + "sc.hdf5"
    store.dump(filename, scaled_spectrum)
Beispiel #10
0
    def test_serialisation(self):
        """ Test saving and then reloading a test spectra.

        """
        test_decays = 10
        test_spectra = spectra.Spectra("Test", test_decays)
        for x in range(0, test_decays):
            energy = random.uniform(0, test_spectra._energy_high)
            radius = random.uniform(0, test_spectra._radial_high)
            time = random.uniform(0, test_spectra._time_high)
            test_spectra.fill(energy, radius, time)

        store.dump("test.hdf5", test_spectra)
        loaded_spectra = store.load("test.hdf5")
        self.assertTrue(loaded_spectra.sum() == test_decays)
        self.assertTrue(
            numpy.array_equal(test_spectra._data, loaded_spectra._data))
        self.assertTrue(test_spectra._energy_low == loaded_spectra._energy_low)
        self.assertTrue(
            test_spectra._energy_high == loaded_spectra._energy_high)
        self.assertTrue(
            test_spectra._energy_bins == loaded_spectra._energy_bins)
        self.assertTrue(
            test_spectra._energy_width == loaded_spectra._energy_width)
        self.assertTrue(test_spectra._radial_low == loaded_spectra._radial_low)
        self.assertTrue(
            test_spectra._radial_high == loaded_spectra._radial_high)
        self.assertTrue(
            test_spectra._radial_bins == loaded_spectra._radial_bins)
        self.assertTrue(
            test_spectra._radial_width == loaded_spectra._radial_width)
        self.assertTrue(test_spectra._time_low == loaded_spectra._time_low)
        self.assertTrue(test_spectra._time_high == loaded_spectra._time_high)
        self.assertTrue(test_spectra._time_bins == loaded_spectra._time_bins)
        self.assertTrue(test_spectra._time_width == loaded_spectra._time_width)
        self.assertTrue(test_spectra._num_decays == loaded_spectra._num_decays)
def main(args):
    """ Script to set 90% CL on all four Majoron-emitting modes.
    """
    # Load signal spectra
    signals = []
    for signal_hdf5 in args.signals:
        spectrum = store.load(signal_hdf5)
        print spectrum._name
        print "Num decays:", spectrum._num_decays
        print "events:", spectrum.sum()
        signals.append(spectrum)

    # Load background spectra
    floating_backgrounds = []
    Te130_2n2b = store.load(args.two_nu)
    print Te130_2n2b._name
    Te130_2n2b._num_decays = Te130_2n2b.sum()  # Sum not raw events
    print "Num decays:", Te130_2n2b._num_decays
    print "events:", Te130_2n2b.sum()
    floating_backgrounds.append(Te130_2n2b)
    B8_Solar = store.load(args.b8_solar)
    print B8_Solar._name
    B8_Solar._num_decays = B8_Solar.sum()  # Sum not raw events
    print "Num decays:", B8_Solar._num_decays
    print "events:", B8_Solar.sum()
    floating_backgrounds.append(B8_Solar)

    # Apply FV and livetime cuts
    fv_radius = constants._fv_radius
    livetime = 1.0
    for spectrum in signals:
        spectrum.cut(time_low=0.0, time_high=livetime)  # cut to livetime
        spectrum.shrink(radial_low=0.0, radial_high=fv_radius)  # shrink to FV
        spectrum.shrink_to_roi(0.5, 3.0, 0)  # shrink to ROI
    for spectrum in floating_backgrounds:
        spectrum.cut(time_low=0.0, time_high=livetime)  # cut to livelime
        spectrum.shrink(radial_low=0.0, radial_high=fv_radius)  # shrink to FV
        spectrum.shrink_to_roi(0.5, 3.0, 0)  # shrink to ROI

    # Signal configuration
    signal_configs_np = []  # no penalty term
    signal_configs = []
    prior = 0.0

    Te130_0n2b_n1_counts = numpy.linspace(signals[0]._num_decays,
                                          0.0, 100, False)
    # endpoint=False in linspace arrays
    Te130_0n2b_n1_config_np = limit_config.LimitConfig(prior,
                                                       Te130_0n2b_n1_counts)
    Te130_0n2b_n1_config = limit_config.LimitConfig(prior,
                                                    Te130_0n2b_n1_counts)
    signal_configs_np.append(Te130_0n2b_n1_config_np)
    signal_configs.append(Te130_0n2b_n1_config)

    Te130_0n2b_n2_counts = numpy.linspace(signals[1]._num_decays,
                                          0.0, 100, False)
    Te130_0n2b_n2_config_np = limit_config.LimitConfig(prior,
                                                       Te130_0n2b_n2_counts)
    Te130_0n2b_n2_config = limit_config.LimitConfig(prior,
                                                    Te130_0n2b_n2_counts)
    signal_configs_np.append(Te130_0n2b_n2_config_np)
    signal_configs.append(Te130_0n2b_n2_config)

    Te130_0n2b_n3_counts = numpy.linspace(signals[2]._num_decays,
                                          0.0, 100, False)
    Te130_0n2b_n3_config_np = limit_config.LimitConfig(prior,
                                                       Te130_0n2b_n3_counts)
    Te130_0n2b_n3_config = limit_config.LimitConfig(prior,
                                                    Te130_0n2b_n3_counts)
    signal_configs_np.append(Te130_0n2b_n3_config_np)
    signal_configs.append(Te130_0n2b_n3_config)

    Te130_0n2b_n7_counts = numpy.linspace(signals[3]._num_decays,
                                          0.0, 100, False)
    Te130_0n2b_n7_config_np = limit_config.LimitConfig(prior,
                                                       Te130_0n2b_n7_counts)
    Te130_0n2b_n7_config = limit_config.LimitConfig(prior,
                                                    Te130_0n2b_n7_counts)
    signal_configs_np.append(Te130_0n2b_n7_config_np)
    signal_configs.append(Te130_0n2b_n7_config)

    # Background configuration
    # Te130_2n2b
    Te130_2n2b_prior = 3.7396e6  # Based on NEMO-3 T_1/2, for 1 year livetime
                                 # Since we used cut method to cut to livetime
    # No penalty term
    Te130_2n2b_counts_np = numpy.array([Te130_2n2b_prior])
    Te130_2n2b_config_np = limit_config.LimitConfig(Te130_2n2b_prior,
                                                    Te130_2n2b_counts_np)
    # With penalty term
    Te130_2n2b_counts = numpy.linspace(0.8*Te130_2n2b_prior,
                                       1.2*Te130_2n2b_prior, 51)
    # 51 bins to make sure midpoint (no variation from prior) is included
    # to use in penalty term (20%, Andy's document on systematics)
    sigma = 0.2 * Te130_2n2b_prior
    Te130_2n2b_config = limit_config.LimitConfig(Te130_2n2b_prior,
                                                 Te130_2n2b_counts, sigma)

    # B8_Solar
    # from integrating whole spectrum scaled to Valentina's number
    B8_Solar_prior = 1252.99691
    # No penalty term
    B8_Solar_counts_np = numpy.array([B8_Solar_prior])
    B8_Solar_config_np = limit_config.LimitConfig(B8_Solar_prior,
                                                  B8_Solar_counts_np)
    # With penalty term
    B8_Solar_counts = numpy.linspace(0.96*B8_Solar_prior,
                                     1.04*B8_Solar_prior, 11)
    # 11 bins to make sure midpoint (no variation from prior) is included
    sigma = 0.04 * B8_Solar_prior  # 4% To use in penalty term
    B8_Solar_config = limit_config.LimitConfig(B8_Solar_prior,
                                               B8_Solar_counts, sigma)

    # DBIsotope converter information - constant across modes
    isotope_name = "Te130"
    atm_weight_iso = 129.906229
    atm_weight_nat = 127.6
    abundance = 0.3408

    # Make a list of associated nuclear physics info
    nuclear_params = []
    # n=1:
    phase_space = 5.94e-16
    matrix_element = 3.97  # Averaged
    nuclear_params.append((phase_space, matrix_element))
    # n=2:
    phase_space = None
    matrix_element = None
    nuclear_params.append((phase_space, matrix_element))
    # n=1:
    phase_space = 1.06e-17  # Assuming two Majorons emitted
    matrix_element = 1.e-3
    nuclear_params.append((phase_space, matrix_element))
    # n=1:
    phase_space = 4.83e-17
    matrix_element = 1.e-3
    nuclear_params.append((phase_space, matrix_element))

    # chi squared calculator
    calculator = chi_squared.ChiSquared()

    # Set output location
    output_dir = echidna.__echidna_base__ + "/results/snoplus/"

    for signal, signal_config_np, nuclear_param in zip(signals,
                                                       signal_configs_np,
                                                       nuclear_params):
        print signal._name
        # Create no penalty limit setter
        set_limit_np = limit_setting.LimitSetting(
            signal, floating_backgrounds=floating_backgrounds)
        # Configure signal
        set_limit_np.configure_signal(signal_config_np)
        # Configure 2n2b
        set_limit_np.configure_background(Te130_2n2b._name,
                                          Te130_2n2b_config_np)
        # Configure B8
        set_limit_np.configure_background(B8_Solar._name, B8_Solar_config_np)

        # Set converter
        phase_space, matrix_element = nuclear_param
        roi_efficiency = signal.get_roi(0).get("efficiency")
        converter = decay.DBIsotope(
            isotope_name, atm_weight_iso, atm_weight_nat, abundance,
            phase_space, matrix_element, roi_efficiency=roi_efficiency)

        # Set chi squared calculator
        set_limit_np.set_calculator(calculator)

        # Get limit
        try:
            limit = set_limit_np.get_limit()
            print "-----------------------------------"
            print "90% CL at " + str(limit) + " counts"
            half_life = converter.counts_to_half_life(limit, livetime=livetime)
            print "90% CL at " + str(half_life) + " yr"
            print "-----------------------------------"
        except IndexError as detail:
            print "-----------------------------------"
            print detail
            print "-----------------------------------"

    for i, signal_config_np in enumerate(signal_configs_np):
        store.dump_ndarray(output_dir+signals[i]._name+"_np.hdf5",
                           signal_config_np)
    raw_input("RETURN to continue")

    signal_num = 0
    for signal, signal_config, nuclear_param in zip(signals, signal_configs,
                                                    nuclear_params):
        print signal._name
        # Create limit setter
        set_limit = limit_setting.LimitSetting(
            signal, floating_backgrounds=floating_backgrounds)
        # Configure signal
        set_limit.configure_signal(signal_config)
        # Configure 2n2b
        set_limit.configure_background(Te130_2n2b._name, Te130_2n2b_config,
                                       plot_systematic=True)
        # Configure B8
        set_limit.configure_background(B8_Solar._name, B8_Solar_config,
                                       plot_systematic=True)

        # Set converter
        phase_space, matrix_element = nuclear_param
        roi_efficiency = signal.get_roi(0).get("efficiency")
        converter = decay.DBIsotope(
            isotope_name, atm_weight_iso, atm_weight_nat, abundance,
            phase_space, matrix_element, roi_efficiency=roi_efficiency)

        # Set chi squared calculator
        set_limit.set_calculator(calculator)

        # Get limit
        try:
            limit = set_limit.get_limit()
            print "-----------------------------------"
            print "90% CL at " + str(limit) + " counts"
            half_life = converter.counts_to_half_life(limit, livetime=livetime)
            print "90% CL at " + str(half_life) + " yr"
            print "-----------------------------------"
        except IndexError as detail:
            print "-----------------------------------"
            print detail
            print "-----------------------------------"

        # Dump SystAnalysers to hdf5
        for syst_analyser in set_limit._syst_analysers.values():
            store.dump_ndarray(output_dir+syst_analyser._name+str(signal_num)+".hdf5",
                               syst_analyser)
        signal_num += 1

    # Dump configs to hdf5
    for i, signal_config in enumerate(signal_configs):
        store.dump_ndarray(output_dir+signals[i]._name+".hdf5", signal_config)
    store.dump_ndarray(output_dir+"Te130_2n2b_config.hdf5", Te130_2n2b_config)
    store.dump_ndarray(output_dir+"B8_Solar_config.hdf5", B8_Solar_config)
Beispiel #12
0
                                             .format(prospective_dir))

if __name__ == "__main__":
    parser = argparse.ArgumentParser(description="Example limit setting script")
    parser.add_argument("-v", "--verbose", action="store_true",
                        help="Print progress and timing information")
    parser.add_argument("-s", "--signal", action=ReadableDir,
                        help="Supply path for signal hdf5 file")
    parser.add_argument("-t", "--two_nu", action=ReadableDir,
                        help="Supply paths for Te130_2n2b hdf5 files")
    parser.add_argument("-b", "--b8_solar", action=ReadableDir,
                        help="Supply paths for B8_Solar hdf5 files")
    args = parser.parse_args()

    # Create signal spectrum
    Te130_0n2b = store.load(args.signal)

    # Create background spectra
    Te130_2n2b = store.load(args.two_nu)
    B8_Solar = store.load(args.b8_solar)

    # Shrink spectra to 5 years - livetime used by Andy
    # And make 3.5m fiducial volume cut
    Te130_0n2b.shrink(0.0, 10.0, 0.0, 3500.0, 0.0, 5.0)
    Te130_2n2b.shrink(0.0, 10.0, 0.0, 3500.0, 0.0, 5.0)
    B8_Solar.shrink(0.0, 10.0, 0.0, 3500.0, 0.0, 5.0)

    # Create list of backgrounds
    backgrounds = []
    backgrounds.append(Te130_2n2b)
    backgrounds.append(B8_Solar)
Beispiel #13
0
    parser.add_argument("-s",
                        "--signal",
                        action=ReadableDir,
                        help="Supply path for signal hdf5 file")
    parser.add_argument("-t",
                        "--two_nu",
                        action=ReadableDir,
                        help="Supply paths for Te130_2n2b hdf5 files")
    parser.add_argument("-b",
                        "--b8_solar",
                        action=ReadableDir,
                        help="Supply paths for B8_Solar hdf5 files")
    args = parser.parse_args()

    # Create signal spectrum
    Te130_0n2b = store.load(args.signal)

    # Create background spectra
    Te130_2n2b = store.load(args.two_nu)
    B8_Solar = store.load(args.b8_solar)

    # Shrink spectra to 5 years - livetime used by Andy
    # And make 3.5m fiducial volume cut
    Te130_0n2b.shrink(0.0, 10.0, 0.0, 3500.0, 0.0, 5.0)
    Te130_2n2b.shrink(0.0, 10.0, 0.0, 3500.0, 0.0, 5.0)
    B8_Solar.shrink(0.0, 10.0, 0.0, 3500.0, 0.0, 5.0)

    # Create list of backgrounds
    backgrounds = []
    backgrounds.append(Te130_2n2b)
    backgrounds.append(B8_Solar)
Beispiel #14
0
def main(args):
    """ Script to set 90% CL on all four Majoron-emitting modes.
    """
    # Load signal spectra
    signals = []
    for signal_hdf5 in args.signals:
        spectrum = store.load(signal_hdf5)
        print spectrum._name
        print "Num decays:", spectrum._num_decays
        print "raw events:", spectrum._raw_events
        print "events:", spectrum.sum()
        signals.append(spectrum)

    # Load background spectra
    floating_backgrounds = []
    Xe136_2n2b = store.load(args.two_nu)
    print Xe136_2n2b._name
    print "Num decays:", Xe136_2n2b._num_decays
    print "raw events:", Xe136_2n2b._raw_events
    print "events:", Xe136_2n2b.sum()
    floating_backgrounds.append(Xe136_2n2b)
    B8_Solar = store.load(args.b8_solar)
    print B8_Solar._name
    B8_Solar._num_decays = B8_Solar.sum()  # Sum not raw events
    print "Num decays:", B8_Solar._num_decays
    print "raw events:", B8_Solar._raw_events
    print "events:", B8_Solar.sum()
    floating_backgrounds.append(B8_Solar)

    # Apply FV and livetime cuts
    fv_radius = 1200.0  # 1.2m PRC 86, 021601 (2012)
    livetime = 1.0
    for spectrum in signals:
        spectrum.cut(time_low=0.0, time_high=livetime)  # cut to livetime
        spectrum.shrink(radial_low=0.0, radial_high=fv_radius)  # shrink to FV
        spectrum.shrink_to_roi(0.5, 3.0, 0)  # shrink to ROI
    for spectrum in floating_backgrounds:
        spectrum.cut(time_low=0.0, time_high=livetime)  # cut to livetime
        spectrum.shrink(radial_low=0.0, radial_high=fv_radius)  # shrink to FV
        spectrum.shrink_to_roi(0.5, 3.0, 0)  # shrink to ROI

    # Signal configuration
    signal_configs_np = []  # no penalty term
    signal_configs = []
    prior = 0.0

    Xe136_0n2b_n1_counts = numpy.linspace(signals[0]._num_decays,
                                          0.0, 100, False)
    # endpoint=False in linspace arrays
    Xe136_0n2b_n1_config_np = limit_config.LimitConfig(prior,
                                                       Xe136_0n2b_n1_counts)
    Xe136_0n2b_n1_config = limit_config.LimitConfig(prior,
                                                    Xe136_0n2b_n1_counts)
    signal_configs_np.append(Xe136_0n2b_n1_config_np)
    signal_configs.append(Xe136_0n2b_n1_config)

    Xe136_0n2b_n2_counts = numpy.linspace(signals[1]._num_decays,
                                          0.0, 100, False)
    Xe136_0n2b_n2_config_np = limit_config.LimitConfig(prior,
                                                       Xe136_0n2b_n2_counts)
    Xe136_0n2b_n2_config = limit_config.LimitConfig(prior,
                                                    Xe136_0n2b_n2_counts)
    signal_configs_np.append(Xe136_0n2b_n2_config_np)
    signal_configs.append(Xe136_0n2b_n2_config)

    Xe136_0n2b_n3_counts = numpy.linspace(signals[2]._num_decays,
                                          0.0, 100, False)
    Xe136_0n2b_n3_config_np = limit_config.LimitConfig(prior,
                                                       Xe136_0n2b_n3_counts)
    Xe136_0n2b_n3_config = limit_config.LimitConfig(prior,
                                                    Xe136_0n2b_n3_counts)
    signal_configs_np.append(Xe136_0n2b_n3_config_np)
    signal_configs.append(Xe136_0n2b_n3_config)

    Xe136_0n2b_n7_counts = numpy.linspace(signals[3]._num_decays,
                                          0.0, 100, False)
    Xe136_0n2b_n7_config_np = limit_config.LimitConfig(prior,
                                                       Xe136_0n2b_n7_counts)
    Xe136_0n2b_n7_config = limit_config.LimitConfig(prior,
                                                    Xe136_0n2b_n7_counts)
    signal_configs_np.append(Xe136_0n2b_n7_config_np)
    signal_configs.append(Xe136_0n2b_n7_config)

    # Background configuration
    # Xe136_2n2b
    Xe136_2n2b_prior = 1.132e6  # Based on KLZ T_1/2, for 1 years
                                # Since we used cut method to cut to livetime

    # No penalty term
    Xe136_2n2b_counts_np = numpy.array([Xe136_2n2b_prior])
    Xe136_2n2b_config_np = limit_config.LimitConfig(Xe136_2n2b_prior,
                                                    Xe136_2n2b_counts_np)

    # With penalty term
    Xe136_2n2b_counts = numpy.linspace(0.947*Xe136_2n2b_prior,
                                       1.053*Xe136_2n2b_prior, 51)
    # 51 bins to make sure midpoint (no variation from prior) is included
    # to use in penalty term (5.3% PRC 86, 021601 (2012))
    sigma = 0.053 * Xe136_2n2b_prior
    Xe136_2n2b_config = limit_config.LimitConfig(Xe136_2n2b_prior,
                                                 Xe136_2n2b_counts, sigma)

    # B8_Solar
    # Assume same rate as SNO+ for now
    B8_Solar_prior = 1252.99691
    # No penalty term
    B8_Solar_counts_np = numpy.array([B8_Solar_prior])
    B8_Solar_config_np = limit_config.LimitConfig(B8_Solar_prior,
                                                  B8_Solar_counts_np)
    # With penalty term
    B8_Solar_counts = numpy.linspace(0.96*B8_Solar_prior,
                                     1.04*B8_Solar_prior, 10)
    # 11 bins to make sure midpoint (no variation from prior) is included
    sigma = 0.04 * B8_Solar_prior  # 4% To use in penalty term
    B8_Solar_config = limit_config.LimitConfig(B8_Solar_prior,
                                               B8_Solar_counts, sigma)

    # DBIsotope converter information - constant across modes
    Xe136_atm_weight = 135.907219  # Molar Mass Calculator, http://www.webqc.org/mmcalc.php, 2015-05-07
    Xe134_atm_weight = 133.90539450  # Molar Mass Calculator, http://www.webqc.org/mmcalc.php, 2015-06-03
    # We want the atomic weight of the enriched Xenon
    XeEn_atm_weight = 0.9093*Xe136_atm_weight + 0.0889*Xe134_atm_weight
    Xe136_abundance = 0.089  # Xenon @ Periodic Table of Chemical Elements, http://www/webqc.org/periodictable-Xenon-Xe.html, 05/07/2015
    loading = 0.0244  # PRC 86, 021601 (2012)
    ib_radius = 1540.  # mm, PRC 86, 021601 (2012)
    scint_density = 7.5628e-7  # kg/mm^3, calculated by A Back 2015-07-28

    # Make a list of associated nuclear physics info
    nuclear_params = []
    # n=1:
    phase_space = 6.02e-16
    matrix_element = 2.57  # Averaged
    nuclear_params.append((phase_space, matrix_element))
    # n=2:
    phase_space = None
    matrix_element = None
    nuclear_params.append((phase_space, matrix_element))
    # n=1:
    phase_space = 1.06e-17  # Assuming two Majorons emitted
    matrix_element = 1.e-3
    nuclear_params.append((phase_space, matrix_element))
    # n=1:
    phase_space = 4.54e-17
    matrix_element = 1.e-3
    nuclear_params.append((phase_space, matrix_element))

    # chi squared calculator
    calculator = chi_squared.ChiSquared()

    livetime = 112.3 / 365.25  # y, KamLAND-Zen 112.3 live days

    # Set output location
    output_dir = echidna.__echidna_base__ + "/results/snoplus/"

    for signal, signal_config_np, nuclear_param in zip(signals,
                                                       signal_configs_np,
                                                       nuclear_params):
        print signal._name
        # Create no penalty limit setter
        set_limit_np = limit_setting.LimitSetting(
            signal, floating_backgrounds=floating_backgrounds)
        # Configure signal
        set_limit_np.configure_signal(signal_config_np)
        # Configure 2n2b
        set_limit_np.configure_background(Xe136_2n2b._name,
                                          Xe136_2n2b_config_np)
        # Configure B8
        set_limit_np.configure_background(B8_Solar._name, B8_Solar_config_np)

        # Set converter
        phase_space, matrix_element = nuclear_param
        roi_efficiency = signal.get_roi(0).get("efficiency")
        converter = decay.DBIsotope(
            "Xe136", Xe136_atm_weight, XeEn_atm_weight, Xe136_abundance,
            phase_space, matrix_element, loading=loading, fv_radius=fv_radius,
            outer_radius=ib_radius, scint_density=scint_density,
            roi_efficiency=roi_efficiency)

        # Set chi squared calculator
        set_limit_np.set_calculator(calculator)

        # Get limit
        try:
            limit = set_limit_np.get_limit()
            print "-----------------------------------"
            print "90% CL at " + str(limit) + " counts"
            half_life = converter.counts_to_half_life(limit, livetime=livetime)
            print "90% CL at " + str(half_life) + " yr"
            print "-----------------------------------"
        except IndexError as detail:
            print "-----------------------------------"
            print detail
            print "-----------------------------------"

    for i, signal_config_np in enumerate(signal_configs_np):
        store.dump_ndarray(output_dir+signals[i]._name+"_np.hdf5",
                           signal_config_np)
    raw_input("RETURN to continue")

    signal_num = 0
    for signal, signal_config, nuclear_param in zip(signals, signal_configs,
                                                    nuclear_params):
        print signal._name
        # Create limit setter
        set_limit = limit_setting.LimitSetting(
            signal, floating_backgrounds=floating_backgrounds)
        # Configure signal
        set_limit.configure_signal(signal_config)
        # Configure 2n2b
        set_limit.configure_background(Xe136_2n2b._name, Xe136_2n2b_config,
                                       plot_systematic=True)
        # Configure B8
        set_limit.configure_background(B8_Solar._name, B8_Solar_config,
                                       plot_systematic=True)

        # Set converter
        phase_space, matrix_element = nuclear_param
        roi_efficiency = signal.get_roi(0).get("efficiency")
        converter = decay.DBIsotope(
            "Xe136", Xe136_atm_weight, XeEn_atm_weight, Xe136_abundance,
            phase_space, matrix_element, loading=loading, fv_radius=fv_radius,
            outer_radius=ib_radius, scint_density=scint_density,
            roi_efficiency=roi_efficiency)

        # Set chi squared calculator
        set_limit.set_calculator(calculator)

        # Get limit
        try:
            limit = set_limit.get_limit()
            print "-----------------------------------"
            print "90% CL at " + str(limit) + " counts"
            half_life = converter.counts_to_half_life(limit, livetime=livetime)
            print "90% CL at " + str(half_life) + " yr"
            print "-----------------------------------"
        except IndexError as detail:
            print "-----------------------------------"
            print detail
            print "-----------------------------------"

        # Dump SystAnalysers to hdf5
        for syst_analyser in set_limit._syst_analysers.values():
            store.dump_ndarray(output_dir+syst_analyser._name+str(signal_num)+".hdf5",
                               syst_analyser)
        signal_num += 1

    # Dump configs to hdf5
    for i, signal_config in enumerate(signal_configs):
        store.dump_ndarray(output_dir+signals[i]._name+".hdf5", signal_config)
    store.dump_ndarray(output_dir+"Xe136_2n2b_config.hdf5", Xe136_2n2b_config)
    store.dump_ndarray(output_dir+"B8_Solar_config.hdf5", B8_Solar_config)
Beispiel #15
0
  This will create the hdf5 file ``combined.hdf5``.
  There is no limit to the number of files you can combine.
"""

from echidna.output import store
import argparse

if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("-f",
                        "--files",
                        nargs='+',
                        type=str,
                        help="Space seperated hdf5 files to combine.")
    args = parser.parse_args()
    if not args.files:
        parser.print_help()
        parser.error("Must pass more than 1 file to combine")
    if len(args.files) < 2:
        parser.print_help()
        parser.error("Must pass more than 1 file to combine")
    first = True
    for hdf5 in args.files:
        if first:
            spectrum1 = store.load(hdf5)
            first = False
        else:
            spectrum2 = store.load(hdf5)
            spectrum1.add(spectrum2)
    store.dump("combined.hdf5", spectrum1)
Beispiel #16
0
    # Set sensible levels, pick the desired colormap and define normalization
    if kwargs.get("color_scheme") is None:
        color_scheme = "hot_r"  # default
    else:
        color_scheme = kwargs.get("color_scheme")
    color_map = plt.get_cmap(color_scheme)
    linear = numpy.linspace(numpy.sqrt(data.min()),
                            numpy.sqrt(data.max()), num=100)
    locator = FixedLocator(linear**2)
    levels = locator.tick_values(data.min(), data.max())
    norm = BoundaryNorm(levels, ncolors=color_map.N)

    # Plot color map
    color_map = axis.pcolormesh(X, Y, data, cmap=color_map, norm=norm)
    color_bar = fig.colorbar(color_map)
    color_bar.set_label("Counts per bin")

    if show_plot:
        plt.show()
    return fig


if __name__ == "__main__":
    import echidna
    import echidna.output.store as store

    filename = "/data/Te130_0n2b_mc_smeared.hdf5"
    spectre = store.load(echidna.__echidna_home__ + filename)
    plot_surface(spectre, 2)
def main(args):
    """ Script to set 90% CL on all four Majoron-emitting modes.
    """
    # Load signal spectra
    signals = []
    for signal_hdf5 in args.signals:
        spectrum = store.load(signal_hdf5)
        print spectrum._name
        print "Num decays:", spectrum._num_decays
        print "events:", spectrum.sum()
        signals.append(spectrum)

    # Load background spectra
    floating_backgrounds = []
    Te130_2n2b = store.load(args.two_nu)
    print Te130_2n2b._name
    Te130_2n2b._num_decays = Te130_2n2b.sum()  # Sum not raw events
    print "Num decays:", Te130_2n2b._num_decays
    print "events:", Te130_2n2b.sum()
    floating_backgrounds.append(Te130_2n2b)
    B8_Solar = store.load(args.b8_solar)
    print B8_Solar._name
    B8_Solar._num_decays = B8_Solar.sum()  # Sum not raw events
    print "Num decays:", B8_Solar._num_decays
    print "events:", B8_Solar.sum()
    floating_backgrounds.append(B8_Solar)

    # Apply FV and livetime cuts
    fv_radius = constants._fv_radius
    livetime = 1.0
    for spectrum in signals:
        spectrum.cut(time_low=0.0, time_high=livetime)  # cut to livetime
        spectrum.shrink(radial_low=0.0, radial_high=fv_radius)  # shrink to FV
        spectrum.shrink_to_roi(0.5, 3.0, 0)  # shrink to ROI
    for spectrum in floating_backgrounds:
        spectrum.cut(time_low=0.0, time_high=livetime)  # cut to livelime
        spectrum.shrink(radial_low=0.0, radial_high=fv_radius)  # shrink to FV
        spectrum.shrink_to_roi(0.5, 3.0, 0)  # shrink to ROI

    # Signal configuration
    signal_configs_np = []  # no penalty term
    signal_configs = []
    prior = 0.0

    Te130_0n2b_n1_counts = numpy.linspace(signals[0]._num_decays, 0.0, 100,
                                          False)
    # endpoint=False in linspace arrays
    Te130_0n2b_n1_config_np = limit_config.LimitConfig(prior,
                                                       Te130_0n2b_n1_counts)
    Te130_0n2b_n1_config = limit_config.LimitConfig(prior,
                                                    Te130_0n2b_n1_counts)
    signal_configs_np.append(Te130_0n2b_n1_config_np)
    signal_configs.append(Te130_0n2b_n1_config)

    Te130_0n2b_n2_counts = numpy.linspace(signals[1]._num_decays, 0.0, 100,
                                          False)
    Te130_0n2b_n2_config_np = limit_config.LimitConfig(prior,
                                                       Te130_0n2b_n2_counts)
    Te130_0n2b_n2_config = limit_config.LimitConfig(prior,
                                                    Te130_0n2b_n2_counts)
    signal_configs_np.append(Te130_0n2b_n2_config_np)
    signal_configs.append(Te130_0n2b_n2_config)

    Te130_0n2b_n3_counts = numpy.linspace(signals[2]._num_decays, 0.0, 100,
                                          False)
    Te130_0n2b_n3_config_np = limit_config.LimitConfig(prior,
                                                       Te130_0n2b_n3_counts)
    Te130_0n2b_n3_config = limit_config.LimitConfig(prior,
                                                    Te130_0n2b_n3_counts)
    signal_configs_np.append(Te130_0n2b_n3_config_np)
    signal_configs.append(Te130_0n2b_n3_config)

    Te130_0n2b_n7_counts = numpy.linspace(signals[3]._num_decays, 0.0, 100,
                                          False)
    Te130_0n2b_n7_config_np = limit_config.LimitConfig(prior,
                                                       Te130_0n2b_n7_counts)
    Te130_0n2b_n7_config = limit_config.LimitConfig(prior,
                                                    Te130_0n2b_n7_counts)
    signal_configs_np.append(Te130_0n2b_n7_config_np)
    signal_configs.append(Te130_0n2b_n7_config)

    # Background configuration
    # Te130_2n2b
    Te130_2n2b_prior = 3.7396e6  # Based on NEMO-3 T_1/2, for 1 year livetime
    # Since we used cut method to cut to livetime
    # No penalty term
    Te130_2n2b_counts_np = numpy.array([Te130_2n2b_prior])
    Te130_2n2b_config_np = limit_config.LimitConfig(Te130_2n2b_prior,
                                                    Te130_2n2b_counts_np)
    # With penalty term
    Te130_2n2b_counts = numpy.linspace(0.8 * Te130_2n2b_prior,
                                       1.2 * Te130_2n2b_prior, 51)
    # 51 bins to make sure midpoint (no variation from prior) is included
    # to use in penalty term (20%, Andy's document on systematics)
    sigma = 0.2 * Te130_2n2b_prior
    Te130_2n2b_config = limit_config.LimitConfig(Te130_2n2b_prior,
                                                 Te130_2n2b_counts, sigma)

    # B8_Solar
    # from integrating whole spectrum scaled to Valentina's number
    B8_Solar_prior = 1252.99691
    # No penalty term
    B8_Solar_counts_np = numpy.array([B8_Solar_prior])
    B8_Solar_config_np = limit_config.LimitConfig(B8_Solar_prior,
                                                  B8_Solar_counts_np)
    # With penalty term
    B8_Solar_counts = numpy.linspace(0.96 * B8_Solar_prior,
                                     1.04 * B8_Solar_prior, 11)
    # 11 bins to make sure midpoint (no variation from prior) is included
    sigma = 0.04 * B8_Solar_prior  # 4% To use in penalty term
    B8_Solar_config = limit_config.LimitConfig(B8_Solar_prior, B8_Solar_counts,
                                               sigma)

    # DBIsotope converter information - constant across modes
    isotope_name = "Te130"
    atm_weight_iso = 129.906229
    atm_weight_nat = 127.6
    abundance = 0.3408

    # Make a list of associated nuclear physics info
    nuclear_params = []
    # n=1:
    phase_space = 5.94e-16
    matrix_element = 3.97  # Averaged
    nuclear_params.append((phase_space, matrix_element))
    # n=2:
    phase_space = None
    matrix_element = None
    nuclear_params.append((phase_space, matrix_element))
    # n=1:
    phase_space = 1.06e-17  # Assuming two Majorons emitted
    matrix_element = 1.e-3
    nuclear_params.append((phase_space, matrix_element))
    # n=1:
    phase_space = 4.83e-17
    matrix_element = 1.e-3
    nuclear_params.append((phase_space, matrix_element))

    # chi squared calculator
    calculator = chi_squared.ChiSquared()

    # Set output location
    output_dir = echidna.__echidna_base__ + "/results/snoplus/"

    for signal, signal_config_np, nuclear_param in zip(signals,
                                                       signal_configs_np,
                                                       nuclear_params):
        print signal._name
        # Create no penalty limit setter
        set_limit_np = limit_setting.LimitSetting(
            signal, floating_backgrounds=floating_backgrounds)
        # Configure signal
        set_limit_np.configure_signal(signal_config_np)
        # Configure 2n2b
        set_limit_np.configure_background(Te130_2n2b._name,
                                          Te130_2n2b_config_np)
        # Configure B8
        set_limit_np.configure_background(B8_Solar._name, B8_Solar_config_np)

        # Set converter
        phase_space, matrix_element = nuclear_param
        roi_efficiency = signal.get_roi(0).get("efficiency")
        converter = decay.DBIsotope(isotope_name,
                                    atm_weight_iso,
                                    atm_weight_nat,
                                    abundance,
                                    phase_space,
                                    matrix_element,
                                    roi_efficiency=roi_efficiency)

        # Set chi squared calculator
        set_limit_np.set_calculator(calculator)

        # Get limit
        try:
            limit = set_limit_np.get_limit()
            print "-----------------------------------"
            print "90% CL at " + str(limit) + " counts"
            half_life = converter.counts_to_half_life(limit, livetime=livetime)
            print "90% CL at " + str(half_life) + " yr"
            print "-----------------------------------"
        except IndexError as detail:
            print "-----------------------------------"
            print detail
            print "-----------------------------------"

    for i, signal_config_np in enumerate(signal_configs_np):
        store.dump_ndarray(output_dir + signals[i]._name + "_np.hdf5",
                           signal_config_np)
    raw_input("RETURN to continue")

    signal_num = 0
    for signal, signal_config, nuclear_param in zip(signals, signal_configs,
                                                    nuclear_params):
        print signal._name
        # Create limit setter
        set_limit = limit_setting.LimitSetting(
            signal, floating_backgrounds=floating_backgrounds)
        # Configure signal
        set_limit.configure_signal(signal_config)
        # Configure 2n2b
        set_limit.configure_background(Te130_2n2b._name,
                                       Te130_2n2b_config,
                                       plot_systematic=True)
        # Configure B8
        set_limit.configure_background(B8_Solar._name,
                                       B8_Solar_config,
                                       plot_systematic=True)

        # Set converter
        phase_space, matrix_element = nuclear_param
        roi_efficiency = signal.get_roi(0).get("efficiency")
        converter = decay.DBIsotope(isotope_name,
                                    atm_weight_iso,
                                    atm_weight_nat,
                                    abundance,
                                    phase_space,
                                    matrix_element,
                                    roi_efficiency=roi_efficiency)

        # Set chi squared calculator
        set_limit.set_calculator(calculator)

        # Get limit
        try:
            limit = set_limit.get_limit()
            print "-----------------------------------"
            print "90% CL at " + str(limit) + " counts"
            half_life = converter.counts_to_half_life(limit, livetime=livetime)
            print "90% CL at " + str(half_life) + " yr"
            print "-----------------------------------"
        except IndexError as detail:
            print "-----------------------------------"
            print detail
            print "-----------------------------------"

        # Dump SystAnalysers to hdf5
        for syst_analyser in set_limit._syst_analysers.values():
            store.dump_ndarray(
                output_dir + syst_analyser._name + str(signal_num) + ".hdf5",
                syst_analyser)
        signal_num += 1

    # Dump configs to hdf5
    for i, signal_config in enumerate(signal_configs):
        store.dump_ndarray(output_dir + signals[i]._name + ".hdf5",
                           signal_config)
    store.dump_ndarray(output_dir + "Te130_2n2b_config.hdf5",
                       Te130_2n2b_config)
    store.dump_ndarray(output_dir + "B8_Solar_config.hdf5", B8_Solar_config)
Beispiel #18
0
                        "e.g. 0.05 for 5 percent")
    parser.add_argument("path", type=str,
                        help="specify path to hdf5 file")
    args = parser.parse_args()

    directory = args.path[:args.path.rfind("/")+1]  # strip filename
    # strip directory and extension
    filename = args.path[args.path.rfind("/")+1:args.path.rfind(".")]

    if args.energy_resolution:
        energy_smear = smear.EnergySmearRes()
        energy_smear.set_resolution(args.energy_resolution)
    else:  # use light yield
        energy_smear = smear.EnergySmearLY()
    radial_smear = smear.RadialSmear()
    spectrum = store.load(args.path)

    if args.smear_method == "weight":  # Use default smear method
        for par in spectrum.get_config().get_pars():
            if "energy" in par:
                energy_par = par
                spectrum = energy_smear.weighted_smear(spectrum,
                                                       par=energy_par)
            elif "radial" in par:
                radial_par = par
                spectrum = radial_smear.weighted_smear(spectrum,
                                                       par=radial_par)
    elif args.smear_method == "random":
        for par in spectrum.get_config().get_pars():
            if "energy" in par:
                energy_par = par
Beispiel #19
0
"""

from echidna.output import store
from echidna.core import spectra

if __name__ == "__main__":
    import argparse
    parser = argparse.ArgumentParser()
    parser.add_argument("-i", "--input", type=str,
                        help="Input spectra to rebin")
    parser.add_argument("-o", "--output", type=str, default=None,
                        help="Name of output. Default adds _rebin to name")
    parser.add_argument("-b", "--bins", nargs='+', type=int,
                        help="Number of bins for each dimension")
    args = parser.parse_args()
    directory = args.input[:args.input.rfind("/")+1]  # strip filename
    # strip directory and extension
    filename = args.input[args.input.rfind("/")+1:args.input.rfind(".")]
    spectrum = store.load(args.input)
    new_bins = args.bins
    print spectrum._data.shape
    print "sum pre bin", spectrum.sum()
    spectrum.rebin(new_bins)
    print 'Sum post bin:', spectrum.sum()
    print spectrum._data.shape
    f_out = args.output
    if not f_out:
        f_out = directory + filename + "_rebin" + ".hdf5"
    print "Rebinned", args.input, ", saved to", f_out
    store.dump(f_out, spectrum)
def main(args, name=None, floating_backgrounds=[], signals=[]):
    """ The limit setting script.

    Args:
      args (:class:`argparse.Namespace`): Arguments passed via command-
        line
      floating_backgrounds (list): List of background spectra to float
      signals (list): List of signals to set limits for

    .. warning:: floating backgrounds and signals specified via config
      will be **appended** to the lists passed as args, so if you are
      passing a spectrum to one of these arguments, make sure it is not
      also included in the config.

    """
    logger = utilities.start_logging()

    if args.save_path is not None:
        logger.warning("Overriding default save path!")
        logging.getLogger("extra").warning(
            " --> all output will be saved to %s" %
            output.__default_save_path__)

    args_config = yaml.load(open(args.from_file, "r"))

    if not name:  # no name supplied, use name from config
        name = args_config.get("name")
        logger.info("Configuration name: %s" % name)

    logging.getLogger("extra").debug("\n\n%s\n" % yaml.dump(args_config))

    # Set plot-grab error if required
    if args.upper_bound or args.lower_bound:
        pixel_err = 0.005
        n_pixel = 4
        plot_grab_err = numpy.sqrt(3 * (n_pixel*pixel_err)**2)

    # Set ROI from config
    if args_config.get("roi") is not None:
        roi = args_config.get("roi")
        logger.info("Set ROI")
        logging.getLogger("extra").info("\n%s\n" % json.dumps(roi))
        if not isinstance(roi, dict):
            raise TypeError("roi should be a dictionary, "
                            "not type %s" % type(roi))
    else:
        logger.warning("No ROI found, spectra will not be shrunk")

    # Set per_bin, as required
    if args_config.get("per_bin") is not None:
        per_bin = args_config.get("per_bin")
        logger.info("Storing per-bin information: %s" % per_bin)
    else:
        logger.warning("No per-bin flag found - setting per_bin to False")
        per_bin = False

    # Set store_summary, as required
    if args_config.get("store_summary") is not None:
        store_summary = args_config.get("store_summary")
        logger.info("Storing Summary information: %s" % store_summary)
    else:
        logger.warning("No store_summary flag found - "
                       "setting store_summary to False")
        store_summary = False

    # Set test_statistic
    # This is fixed
    chi_squared = test_statistic.BakerCousinsChi(per_bin=True)
    logger.info("Using test statisitc: BakerCousinsChi")

    # Set fit_config
    if args_config.get("fit_config") is not None:
        fit_config = GlobalFitConfig.load_from_file(
            args_config.get("fit_config"))
    else:  # Don't have any global fit parameters here - make blank config
        logger.warning("No fit_config path found - creating blank config")
        parameters = OrderedDict({})
        # The name set here will be the same name given to the GridSearch
        # created by the fitter, and the Summary class saved to hdf5.
        fit_config = GlobalFitConfig(name, parameters)
    logger.info("Using GlobalFitConfig with the following parameters:")
    logging.getLogger("extra").info(fit_config.get_pars())

    # Set data
    if args_config.get("data") is not None:
        logger.info("Using data spectrum %s" % args_config.get("data"))
        data = store.load(args_config.get("data"))
    else:
        logger.error("No data path found")
        logging.getLogger("extra").warning(
            " --> echidna can use total background as data, "
            "but a blank data spectrum should still be supplied.")
        raise ValueError("No data path found")

    # Apply plot-grab errors as appropriate
    if args.upper_bound:
        data_neg_errors = utilities.get_array_errors(
            data._data, lin_err=-plot_grab_err, log10=True)
        data._data = data._data + data_neg_errors
    if args.lower_bound:
        data_pos_errors = utilities.get_array_errors(
            data._data, lin_err=plot_grab_err, log10=True)
        data._data = data._data + data_pos_errors

    # Set fixed backgrounds
    # Create fixed_backgrounds dict with Spectra as keys and priors as values
    fixed_backgrounds = {}
    if args_config.get("fixed") is not None:
        if not isinstance(args_config.get("fixed"), dict):
            raise TypeError(
                "Expecting dictionary with paths to fixed backgrounds as keys "
                "and num_decays for each background as values")
        for filename, num_decays in args_config.get("fixed").iteritems():
            logger.info("Using fixed spectrum: %s (%.4f decays)" %
                        (filename, num_decays))
            spectrum = store.load(filename)

            # Add plot-grab errors as appropriate
            if args.upper_bound:
                spectrum_neg_errors = utilities.get_array_errors(
                    spectrum._data, lin_err=-plot_grab_err, log10=True)
                spectrum._data = spectrum._data + spectrum_neg_errors
            if args.lower_bound:
                spectrum_pos_errors = utilities.get_array_errors(
                    spectrum._data, lin_err=plot_grab_err, log10=True)
                spectrum._data = spectrum._data + spectrum_pos_errors

            fixed_backgrounds[spectrum] = num_decays
    else:
        logger.warning("No fixed spectra found")

    # Set floating backgrounds
    spectrum_names = [bkg.get_name() for bkg in floating_backgrounds]
    if args_config.get("floating") is not None:  # passed by config
        if not isinstance(args_config.get("floating"), list):
            raise TypeError("Expecting list of paths to floating backgrounds")
        for filename in args_config.get("floating"):
            spectrum = store.load(filename)
            if spectrum.get_name() not in spectrum_names:
                logger.info("Using floating background from: %s" % filename)
                floating_backgrounds.append(spectrum)
            else:  # Spectrum already loaded - passed via args
                logger.warning(
                    "Background %s already loaded. NOT using floating "
                    "background from: %s" % (spectrum.get_name(), filename))
    else:
        logger.warning("No floating backgrounds found")

    # Add plot-grab errors as appropriate
    for background in floating_backgrounds:
        if args.upper_bound:
            spectrum_neg_errors = utilities.get_array_errors(
                background._data, lin_err=-plot_grab_err, log10=True)
            background._data = background._data + spectrum_neg_errors
        if args.lower_bound:
            spectrum_pos_errors = utilities.get_array_errors(
                background._data, lin_err=plot_grab_err, log10=True)
            background._data = background._data + spectrum_pos_errors

    # Using default minimiser (GridSearch) so let Fit class handle this

    # Create fitter
    # No convolutions here --> use_pre_made = False
    fitter = fit.Fit(roi, chi_squared, fit_config, data=data,
                     fixed_backgrounds=fixed_backgrounds,
                     floating_backgrounds=floating_backgrounds,
                     per_bin=per_bin, use_pre_made=False)
    logger.info("Created fitter")

    # Make data if running sensitivity study
    if args.sensitivity:
        data = fitter.get_data()  # Already added blank spectrum
        # Add fixed background
        data.add(fitter.get_fixed_background())
        # Add floating backgrounds - scaled to prior
        for background in fitter.get_floating_backgrounds():
            prior = background.get_fit_config().get_par("rate").get_prior()
            background.scale(prior)
            data.add(background)
        # Re-set data
        fitter.set_data(data)

    # Fit with no signal
    stat_zero = fitter.fit()
    fit_results = fitter.get_fit_results()
    logger.info("Calculated stat_zero: %.4f" % numpy.sum(stat_zero))
    logger.info("Fit summary:")
    logging.getLogger("extra").info("\n%s\n" %
                                    json.dumps(fit_results.get_summary()))

    # Load signals
    spectrum_names = [signal.get_name() for signal in signals]
    if args_config.get("signals") is not None:
        for filename in args_config.get("signals"):
            signal = store.load(filename)
            if signal.get_name() not in spectrum_names:
                logger.info("Using signal spectrum from: %s" % filename)
                signals.append(signal)
            else:  # signal already loaded - passed via args
                logger.warning(
                    "Signal %s already loaded. NOT using signal "
                    "spectrum from: %s" % (signal.get_name(), filename))
    else:
        logger.error("No signal spectra found")
        raise CompatibilityError("Must have at least one signal to set limit")

    # Add plot-grab errors as appropriate
    # For signal we want to swap negative and positive fluctuations
    # The lower bound on the limit, is when all our backgrounds have
    # fluctuated down (through plot-grabbing) but the signal has
    # fluctuated up. Then the reverse is true for the upper bound,
    # backgrounds are fluctuated up and signal is fluctuated down
    for signal in signals:
        if args.upper_bound:
            signal_pos_errors = utilities.get_array_errors(
                signal._data, lin_err=plot_grab_err, log10=True)
            signal._data = signal._data + signal_pos_errors
        if args.lower_bound:
            signal_neg_errors = utilities.get_array_errors(
                signal._data, lin_err=-plot_grab_err, log10=True)
            signal._data = signal._data + signal_neg_errors

    # KamLAND-Zen limits
    klz_limits = {"Xe136_0n2b_n1": 2.6e24,
                  "Xe136_0n2b_n2": 1.0e24,
                  "Xe136_0n2b_n3": 4.5e23,
                  "Xe136_0n2b_n7": 1.1e22}

    # KamLAND-Zen detector info
    klz_detector = constants.klz_detector

    # Loop through signals and set limit for each
    for signal in signals:
        # Reset GridSearch - with added signal rate parameter
        fitter.get_fit_results().reset_grids()

        # Create converter
        converter = decay.DBIsotope(
            signal._name, klz_detector.get("Xe136_atm_weight"),
            klz_detector.get("XeEn_atm_weight"),
            klz_detector.get("Xe136_abundance"),
            decay.phase_spaces.get(signal._name),
            decay.matrix_elements.get(signal._name),
            loading=klz_detector.get("loading"),
            outer_radius=klz_detector.get("fv_radius"),
            scint_density=klz_detector.get("scint_density"))
        klz_limit = klz_limits.get(signal._name)

        # Create limit setter
        limit_setter = limit.Limit(signal, fitter, per_bin=per_bin)

        limit_scaling = limit_setter.get_limit(store_summary=store_summary)
        signal.scale(limit_scaling)
        half_life = converter.counts_to_half_life(
            limit_scaling,
            n_atoms=converter.get_n_atoms(
                target_mass=klz_detector.get("target_mass")),
            livetime=klz_detector.get("livetime"))

        logging.getLogger("extra").info(
            "\n########################################\n"
            "Signal: %s\n"
            "Calculated limit scaling of %.4g\n"
            " --> equivalent to %.4f events\n" %
            (signal._name, limit_scaling, signal.sum()))
        logging.getLogger("extra").info(
            "Calculated limit half life of %.4g y\n"
            " --> KamLAND-Zen equivalent limit: %.4g y\n"
            "########################################\n" %
            (half_life, klz_limit))
Beispiel #21
0
    parser.add_argument("-m",
                        "--smear_method",
                        nargs='?',
                        const="weight",
                        type=str,
                        default="weight",
                        help="specify the smearing method to use")
    parser.add_argument("path", type=str, help="specify path to hdf5 file")
    args = parser.parse_args()

    directory = args.path[:args.path.rfind("/") + 1]  # strip filename
    # strip directory and extension
    filename = args.path[args.path.rfind("/") + 1:args.path.rfind(".")]

    smearer = smear.Smear()
    spectrum = store.load(args.path)

    if args.smear_method == "weight":  # Use default smear method
        smeared_spectrum = smearer.weight_gaussian_energy_spectra(spectrum)
        smeared_spectrum = smearer.weight_gaussian_radius_spectra(
            smeared_spectrum)
    elif args.smear_method == "random":
        smeared_spectrum = smearer.random_gaussian_energy_spectra(spectrum)
        smeared_spectrum = smearer.random_gaussian_radius_spectra(
            smeared_spectrum)
    else:  # Not a valid smear method
        parser.error(args.smear_method + " is not a valid smear method")

    filename = directory + filename + "_smeared" + ".hdf5"
    store.dump(filename, smeared_spectrum)
Beispiel #22
0
                                     "script.")
    parser.add_argument("-v", "--verbose", action="store_true",
                        help="Print progress and timing information")
    parser.add_argument("-s", "--signal", action=ReadableDir,
                        help="Supply path for signal hdf5 file")
    parser.add_argument("-t", "--two_nu", action=ReadableDir,
                        help="Supply paths for Te130_2n2b hdf5 files")
    parser.add_argument("-b", "--b8_solar", action=ReadableDir,
                        help="Supply paths for B8_Solar hdf5 files")
    args = parser.parse_args()

    # REF: SNO+-doc-2593-v9 (as used by Andy)
    roi = (2.46, 2.68)

    # Create signal spectrum
    Te130_0n2b = store.load(args.signal)
    Te130_0n2b.scale(200.)
    unshrunk = Te130_0n2b.sum()
    Te130_0n2b = store.load(args.signal)
    shrink_dict = {"energy_mc_low": roi[0], "energy_mc_high": roi[1]}
    Te130_0n2b.shrink(**shrink_dict)
    Te130_0n2b.scale(200.)
    shrunk = Te130_0n2b.sum()
    scaling = shrunk/unshrunk

    # Set decay converter

    # REF: SNO+-doc-1728-v2 (all three values)
    atm_weight_iso = 129.9062244
    atm_weight_nat = 127.603
    abundance = 0.3408
    data = shrink(data, roi)
    data.rebin((n_energy_bins, n_rad_bins))
    fixed_backgrounds = spectra.Spectra("fixed_background", 0, spec_config)
    fixed_backgrounds = shrink(fixed_backgrounds, roi)
    fixed_backgrounds.rebin((n_energy_bins, n_rad_bins))
    fixed_backgrounds_dict = {}
    for idx, key in enumerate(scale_dict.keys()):
        # Check key exists
        try:
            reco_paths[key]
        except KeyError:
            print 'Warning: Spectrum file does not exist for: %s ' % key
            continue

        # Load spectrum
        spec = store.load(reco_paths[key])
        # Shrink spectra to roi
        spec = shrink(spec, roi)
        spec.rebin((n_energy_bins, n_rad_bins))
        # Scale
        spec.scale(scale_dict[key])
        # Add reco spectrum to the 'data' spectra
        data.add(spec)

        # Add spectra to data and append fixed bkgnds
        if key == signal:
            # Loadd mc spec for fitting
            signal_spec = store.load(mc_paths[key])
            signal_spec = shrink(signal_spec, roi)
            signal_spec.rebin((n_energy_bins, n_rad_bins))
            signal_spec.scale(scale_dict[key])
Beispiel #24
0
                          spectra._energy_bins)
        data = spectra.surface(1)
        axis.set_xlabel("Time [yr]")
        axis.set_ylabel("Energy [MeV]")
    elif dimension == 2:
        x = _produce_axis(spectra._time_low, spectra._time_high,
                          spectra._time_bins)
        y = _produce_axis(spectra._radial_low, spectra._radial_high,
                          spectra._radial_bins)
        data = spectra.surface(0)
        axis.set_xlabel("Time [yr]")
        axis.set_ylabel("Radius [mm]")
    axis.set_zlabel("Count per bin")
    print len(x), len(y), data.shape
    X, Y = numpy.meshgrid(
        x, y)  # `plot_surface` expects `x` and `y` data to be 2D
    print X.shape, Y.shape
    axis.plot_surface(X, Y, data)
    if show_plot:
        plt.show()
    return fig


if __name__ == "__main__":
    import echidna
    import echidna.output.store as store

    filename = "/data/Te130_0n2b_mc_smeared.hdf5"
    spectre = store.load(echidna.__echidna_home__ + filename)
    plot_surface(spectre, 2)
Beispiel #25
0
    $ python echidna/scripts/combine_hdf5.py -f /path/to/example1.hdf5
      /path/to/example2.hdf5

  This will create the hdf5 file ``combined.hdf5``.
  There is no limit to the number of files you can combine.
"""

from echidna.output import store
import argparse

if __name__ == "__main__":
    parser = argparse.ArgumentParser()
    parser.add_argument("-f", "--files", nargs='+', type=str,
                        help="Space seperated hdf5 files to combine.")
    args = parser.parse_args()
    if not args.files:
        parser.print_help()
        parser.error("Must pass more than 1 file to combine")
    if len(args.files) < 2:
        parser.print_help()
        parser.error("Must pass more than 1 file to combine")
    first = True
    for hdf5 in args.files:
        if first:
            spectrum1 = store.load(hdf5)
            first = False
        else:
            spectrum2 = store.load(hdf5)
            spectrum1.add(spectrum2)
    store.dump("combined.hdf5", spectrum1)
Beispiel #26
0
    def test_serialisation(self):
        """ Test saving and then reloading a test spectra.
        """
        test_spectra = self._test_spectra

        # Save values
        spectra_config = test_spectra.get_config()
        spectra_pars = spectra_config.get_pars()
        energy_high = spectra_config.get_par("energy_mc").get_high()
        energy_bins = spectra_config.get_par("energy_mc").get_bins()
        energy_low = spectra_config.get_par("energy_mc").get_low()
        radial_high = spectra_config.get_par("radial_mc").get_high()
        radial_bins = spectra_config.get_par("radial_mc").get_bins()
        radial_low = spectra_config.get_par("radial_mc").get_low()
        energy_width = spectra_config.get_par("energy_mc").get_width()
        radial_width = spectra_config.get_par("radial_mc").get_width()

        spectra_fit_config = test_spectra.get_fit_config()
        spectra_fit_pars = spectra_fit_config.get_pars()
        rate_prior = spectra_fit_config.get_par("rate").get_prior()
        rate_sigma = spectra_fit_config.get_par("rate").get_sigma()
        rate_low = spectra_fit_config.get_par("rate").get_low()
        rate_high = spectra_fit_config.get_par("rate").get_high()
        rate_bins = spectra_fit_config.get_par("rate").get_bins()

        # Fill spectrum
        for x in range(0, self._test_decays):
            energy = random.uniform(energy_low, energy_high)
            radius = random.uniform(radial_low, radial_high)
            test_spectra.fill(energy_mc=energy, radial_mc=radius)

        # Dump spectrum
        store.dump("test.hdf5", test_spectra)

        # Re-load spectrum
        loaded_spectra = store.load("test.hdf5")

        # Re-load saved values
        spectra_config = loaded_spectra.get_config()
        spectra_pars2 = spectra_config.get_pars()
        energy_high2 = spectra_config.get_par("energy_mc").get_high()
        energy_bins2 = spectra_config.get_par("energy_mc").get_bins()
        energy_low2 = spectra_config.get_par("energy_mc").get_low()
        radial_high2 = spectra_config.get_par("radial_mc").get_high()
        radial_bins2 = spectra_config.get_par("radial_mc").get_bins()
        radial_low2 = spectra_config.get_par("radial_mc").get_low()
        energy_width2 = spectra_config.get_par("energy_mc").get_width()
        radial_width2 = spectra_config.get_par("radial_mc").get_width()

        spectra_fit_config = loaded_spectra.get_fit_config()
        spectra_fit_pars2 = spectra_fit_config.get_pars()
        rate_prior2 = spectra_fit_config.get_par("rate").get_prior()
        rate_sigma2 = spectra_fit_config.get_par("rate").get_sigma()
        rate_low2 = spectra_fit_config.get_par("rate").get_low()
        rate_high2 = spectra_fit_config.get_par("rate").get_high()
        rate_bins2 = spectra_fit_config.get_par("rate").get_bins()

        # Run tests
        self.assertTrue(loaded_spectra.sum() == self._test_decays,
                        msg="Original decays: %.3f, loaded spectra sum %3f"
                        % (float(self._test_decays),
                           float(loaded_spectra.sum())))
        self.assertTrue(numpy.array_equal(self._test_spectra._data,
                                          loaded_spectra._data),
                        msg="Original _data does not match loaded _data")
        self.assertTrue(test_spectra._num_decays == loaded_spectra._num_decays,
                        msg="Original num decays: %.3f, Loaded: %.3f"
                        % (float(test_spectra._num_decays),
                           float(loaded_spectra._num_decays)))

        # Check order of parameters
        self.assertListEqual(spectra_pars, spectra_pars2)
        self.assertListEqual(spectra_fit_pars, spectra_fit_pars2)

        self.assertTrue(energy_low == energy_low2,
                        msg="Original energy low: %.4f, Loaded: %.4f"
                        % (energy_low, energy_low2))
        self.assertTrue(energy_high == energy_high2,
                        msg="Original energy high: %.4f, Loaded: %.4f"
                        % (energy_high, energy_high2))
        self.assertTrue(energy_bins == energy_bins2,
                        msg="Original energy bins: %.4f, Loaded: %.4f"
                        % (energy_bins, energy_bins2))
        self.assertTrue(energy_width == energy_width2,
                        msg="Original energy width: %.4f, Loaded: %.4f"
                        % (energy_width, energy_width2))
        self.assertTrue(radial_low == radial_low2,
                        msg="Original radial low: %.4f, Loaded: %.4f"
                        % (radial_low, radial_low2))
        self.assertTrue(radial_high == radial_high2,
                        msg="Original radial high: %.4f, Loaded: %.4f"
                        % (radial_high, radial_high2))
        self.assertTrue(radial_bins == radial_bins2,
                        msg="Original radial bins: %.4f, Loaded: %.4f"
                        % (radial_bins, radial_bins2))
        self.assertTrue(radial_width == radial_width2,
                        msg="Original radial width: %.4f, Loaded: %.4f"
                        % (radial_width, radial_width2))
        self.assertTrue(rate_prior == rate_prior2,
                        msg="Original rate prior: %.4f, Loaded: %.4f"
                        % (rate_prior, rate_prior2))
        self.assertTrue(rate_sigma == rate_sigma2,
                        msg="Original rate sigma: %.4f, Loaded: %.4f"
                        % (rate_sigma, rate_sigma2))
        self.assertTrue(rate_low == rate_low2,
                        msg="Original rate low: %.4f, Loaded: %.4f"
                        % (rate_low, rate_low2))
        self.assertTrue(rate_high == rate_high2,
                        msg="Original rate high: %.4f, Loaded: %.4f"
                        % (rate_high, rate_high2))
        self.assertTrue(rate_bins == rate_bins2,
                        msg="Original rate bins: %.4f, Loaded: %.4f"
                        % (rate_bins, rate_bins2))