Exemplo n.º 1
0
def load_cfg_to_scan_params(config=None, update_fallback=False):
    if config is None: config = glob.config
    if config is None:
        raise ValueError(
            "config.load_cfg_to_scan_params(): no config provided and no fallback set"
        )
    scan_params = []
    param_name_list = get_scan_param_names(config=config)
    for param_name in param_name_list:
        limits = utils.string_to_object(
            config.get("PARAMS", f"{param_name}.scan.limits", fallback="[]"))
        stepsize = utils.string_to_object(
            config.get("PARAMS",
                       f"{param_name}.scan.stepsize",
                       fallback="None"))
        n_points = utils.string_to_object(
            config.get("PARAMS",
                       f"{param_name}.scan.n_points",
                       fallback="None"))
        label = str(config.get("PARAMS", f"{param_name}.label", fallback=""))
        units = str(config.get("PARAMS", f"{param_name}.units", fallback=""))
        scan_params.append(
            ScanParam(name=param_name,
                      limits=limits,
                      stepsize=stepsize,
                      n_points=n_points,
                      label=label,
                      units=units))
    if update_fallback:
        glob.scan_params = scan_params
    return scan_params
Exemplo n.º 2
0
 def load_from_config(self, cfg, **kwargs):
     if "INPUTS" not in cfg:
         utils.error("InputStore.load_from_config()",
                     "No INPUTS in config file. Ignoring.")
         return
     inputs = utils.string_to_object(
         cfg.get("INPUTS", "Inputs", fallback="[]"))
     self.do_quick_load = glob.custom_store.get("QuickStoreDistributions",
                                                False)
     self.do_quick_save = glob.custom_store.get("QuickLoadDistributions",
                                                False)
     look_for_params = kwargs.get("look_for_params", False)
     for key in inputs:
         fname = cfg.get("INPUTS", f"{key}.file.path", fallback="")
         ftype = cfg.get("INPUTS", f"{key}.file.type", fallback="unknown")
         save_fname = "." + fname.replace("/", "_") + ".pickle"
         if self.do_quick_load and self.quick_load(
                 save_fname,
                 extract=utils.string_to_object(
                     cfg.get("INPUTS", f"{key}.extract", fallback="[]"))):
             utils.info(
                 "InputStore.load_from_config()",
                 f"File {save_fname} quick-loaded in the place of {ftype} file {fname}"
             )
         else:
             if ftype == "hepdata":
                 self.load_from_hepdata(fname,
                                        cfg=cfg,
                                        key=key,
                                        save=save_fname,
                                        look_for_params=look_for_params)
             elif ftype == "yoda":
                 self.load_from_yoda(fname,
                                     cfg=cfg,
                                     key=key,
                                     save=save_fname,
                                     look_for_params=look_for_params)
             elif ftype == "pickle":
                 self.load_from_pickle(fname,
                                       cfg=cfg,
                                       key=key,
                                       save=save_fname,
                                       look_for_params=look_for_params)
             else:
                 raise ValueError(
                     f"InputStore.load_from_config(): Input {key} file {fname} has an unrecognised type {ftype}"
                 )
def format_axis_from_config(section, config=None, axis=None, pedantic=True):
    if config is None: config = glob.config
    if config is None:
        if not pedantic: return
        raise ValueError(
            "format_axis_from_config(): no config provided and config.config not set"
        )
    if axis is None: axis = plt.gca()
    if axis is None:
        if not pedantic: return
        raise ValueError(
            "format_axis_from_config(): no axis provided and plt.gca() is None"
        )
    xlim = utils.string_to_object(
        config.get(section, "axis.xlim", fallback="None"))
    x_minor_loc = utils.string_to_object(
        config.get(section, "xaxis.minor_locator", fallback="None"))
    x_major_loc = utils.string_to_object(
        config.get(section, "xaxis.major_locator", fallback="None"))
    x_ticks_pos = utils.string_to_object(
        config.get(section, "xaxis.ticks_position", fallback="None"))
    ylim = utils.string_to_object(
        config.get(section, "axis.ylim", fallback="None"))
    y_minor_loc = utils.string_to_object(
        config.get(section, "yaxis.minor_locator", fallback="None"))
    y_major_loc = utils.string_to_object(
        config.get(section, "yaxis.major_locator", fallback="None"))
    y_ticks_pos = utils.string_to_object(
        config.get(section, "yaxis.ticks_position", fallback="None"))
    tick_params = utils.string_to_object(
        config.get(section, "axis.tick_params", fallback="None"))
    if xlim is not None: axis.set_xlim(xlim)
    if x_minor_loc is not None:
        axis.xaxis.set_minor_locator(MultipleLocator(x_minor_loc))
    if x_major_loc is not None:
        axis.xaxis.set_major_locator(MultipleLocator(x_major_loc))
    if x_ticks_pos is not None: axis.xaxis.set_ticks_position(x_ticks_pos)
    if ylim is not None: axis.set_ylim(ylim)
    if y_minor_loc is not None:
        axis.yaxis.set_minor_locator(MultipleLocator(y_minor_loc))
    if y_major_loc is not None:
        axis.yaxis.set_major_locator(MultipleLocator(y_major_loc))
    if y_ticks_pos is not None: axis.yaxis.set_ticks_position(y_ticks_pos)
    if tick_params is not None: axis.tick_params(tick_params)
    for label in utils.string_to_object(
            config.get(section, "Labels", fallback=[])):
        plt.text(label[0], label[1], label[2], size="small")
Exemplo n.º 4
0
def get_scan_param_names(config=None):
    if config is None: config = glob.config
    if config is None:
        raise ValueError(
            "config.get_scan_param_names(): no config provided and no fallback set"
        )
    return utils.string_to_object(
        config.get("PARAMS", "ScanParams", fallback="[]"))
Exemplo n.º 5
0
def parse_command_line_arguments(*argv, **kwargs):
    utils.info("parse_command_line_arguments()", "Parsing arguments")
    try:
        opts, rest = getopt.getopt(
            sys.argv[1:], "", [f"{k}=" for k in kwargs] +
            ["save=", "tag=", "show="] + [f"{a}=" for a in argv])
    except getopt.GetoptError as err:
        utils.error(
            "parse_command_line_arguments()",
            "The following error was thrown whilst parsing command-line arguments"
        )
        utils.fatal("parse_command_line_arguments()", err)
    if len(rest) is not 1:
        raise ValueError(
            f"parse_command_line_arguments(): expected 1 unlabelled argument where {len(argv)} provided"
        )
    cfg_name = rest[0]
    save_fname, do_show, tag = None, True, None
    ret = {}
    if not utils.is_file(cfg_name):
        raise RuntimeError(
            f"parse_command_line_arguments(): config file {cfg_name} not found"
        )
    for option, value in opts:
        if option in ["--tag"]:
            tag = str(value)
            utils.info("parse_command_line_arguments()",
                       f"Labelling temporary files using the tag: {tag}")
        if option in ["--save"]:
            save_fname = str(value)
            utils.info("parse_command_line_arguments()",
                       f"Opening plots file {save_fname}")
            plotting.open_plots_pdf(save_fname)
        if option in ["--show"]:
            do_show = utils.string_to_object(value)
            if type(do_show) != bool:
                raise ValueError(
                    f"parse_command_line_arguments(): --show value \"{value}\" could not be cast to a bool"
                )
        if option in argv:
            ret[option] = True

    glob.custom_store["config name"] = cfg_name
    glob.custom_store["do show plots"] = do_show
    glob.custom_store["plots filename"] = save_fname
    glob.custom_store["tag"] = tag
    return ret
Exemplo n.º 6
0
def get_BSM_dists_from_input_store (in_store=None, config=None, prefix="", scan_params=None) :
	if config      is None : config   = glob.config
	if config      is None : raise ValueError("inputs.get_BSM_dists_from_input_store(): no config provided and no global fallback set")
	if in_store    is None : in_store = glob.input_store
	if in_store    is None : raise ValueError("inputs.get_BSM_dists_from_input_store(): no in_store provided and no global fallback set")
	if scan_params is None : scan_params = glob.scan_params
	if scan_params is None : raise ValueError("inputs.get_BSM_dists_from_input_store(): no scan_params provided and no global fallback set")
	ret = {}
	for BSM_input_name in utils.string_to_object(config.get("STEERING", "BSM.load")) :
		if BSM_input_name not in in_store :
			raise KeyError(f"inputs.get_BSM_dists_from_input_store(): no input {BSM_input_name} in input_store")
		i = in_store[BSM_input_name]
		values = []
		print(i.params)
		for scan_param in [p.name for p in scan_params] :
			if scan_param not in i.params :
				raise KeyError(f"inputs.get_BSM_dists_from_input_store(): input {BSM_input_name} has no param called {scan_param}")
			values.append(i.params[scan_param])
		ret[tuple(values)] = Distribution(i.dist, name=prefix+i.dist.name)
	return ret
Exemplo n.º 7
0
def get_dist_from_input_store (values_dist_name, cov_dist_name, in_store=None, name="", n_toys=None) :
	if in_store is None : in_store = glob.input_store
	if in_store is None : raise ValueError("inputs.get_dist_from_input_store(): no in_store argument provided and no fallback set")
	if values_dist_name not in in_store :
		raise KeyError(f"inputs.get_dist_from_input_store(): no distribution called {values_dist_name} in input store")
	values = in_store[values_dist_name].dist.values
	if type(cov_dist_name) is str and cov_dist_name in in_store :
		cov = in_store[cov_dist_name].dist.cov
	else :
		try :
			inp = utils.string_to_object(str(cov_dist_name))
		except ValueError :
			raise KeyError(f"inputs.get_dist_from_input_store(): no distribution called {cov_dist_name} in inputs.input_store")
		if type(inp) in [list, np.ndarray] :
			cov = np.ndarray(inp)
			if cov.shape != (len(values), len(values)) :
				raise ValueError(f"inputs.get_dist_from_input_store(): covariance matrix {cov_dist_name} does not have the expected shape")
		else :
			try :
				cov = inp*np.eye(len(values))
			except TypeError :
				raise ValueError(f"inputs.get_dist_from_input_store(): {cov_dist_name} could not converted into a covariance matrix")
	return Distribution(values=values, cov=cov, name=name, n_toys=n_toys)
Exemplo n.º 8
0
 def load_from_yoda(self, fname, **kwargs):
     subprocess.run([
         "python", "utils2/utils/py2_yoda_interface.py", "-o",
         ".tmp_processed_yoda.pickle", "-r", ".tmp_py2_yoda_info.record",
         f"{fname}"
     ])
     yoda_inputs = pickle.load(open(".tmp_processed_yoda.pickle", "rb"),
                               encoding='latin1')
     new_entries = []
     look_for_params = kwargs.get("look_for_params", False)
     if "cfg" in kwargs and "key" in kwargs:
         cfg, key = kwargs["cfg"], kwargs["key"]
         fextract = utils.string_to_object(
             cfg.get("INPUTS", f"{key}.extract", fallback="[]"))
         for entry_name in fextract:
             includes_SM = utils.string_to_object(
                 cfg.get("INPUTS",
                         f"{key}.{entry_name}.IncludesSM",
                         fallback="[]"))
             value_keys = utils.string_to_object(
                 cfg.get("INPUTS",
                         f"{key}.{entry_name}.keys.values",
                         fallback="[]"))
             covariance_keys = utils.string_to_object(
                 cfg.get("INPUTS",
                         f"{key}.{entry_name}.keys.covariance",
                         fallback="[]"))
             if covariance_keys not in ["use-errors", 0]:
                 raise NotImplementedError(
                     "InputStore.load_from_yoda(): covariance not set as use-errors or 0, currently the only supported options"
                 )
             for value_key in value_keys:
                 if value_key in yoda_inputs: continue
                 raise KeyError(
                     f"required key {value_key} has not been extracted from {fname}"
                 )
             if type(value_keys) is list and len(value_keys) > 0 and type(
                     value_keys[0]) is str:
                 values = np.concatenate([
                     np.array(yoda_inputs[value_key]["y"])
                     for value_key in value_keys
                 ])
             elif type(value_keys) is int:
                 values = np.zeros(shape=value_keys)
             else:
                 values = value_keys
             if covariance_keys == 0:
                 cov = np.zeros(shape=(len(values), len(values)))
             else:
                 cov = np.diag(
                     np.concatenate([
                         np.array(yoda_inputs[value_key]["ey_hi"])
                         for value_key in value_keys
                     ]))
             params = {}
             if look_for_params:
                 for param in glob.scan_params:
                     params[param.name] = cfg.get(
                         "INPUTS",
                         f"{key}.{entry_name}.{param.name}",
                         fallback=None)
             new_entry = Input(name=entry_name,
                               type=cfg.get("INPUTS",
                                            f"{key}.{entry_name}.type",
                                            fallback=""),
                               origin_file=fname,
                               keys=value_keys,
                               params=params,
                               dist=Distribution(name=entry_name,
                                                 values=values,
                                                 cov=cov,
                                                 includes_SM=includes_SM))
             if entry_name in self.entries:
                 utils.warning("InputStore.load_from_yoda()",
                               f"Entry named {entry_name} already exists")
             self.entries[entry_name] = new_entry
             new_entries.append(entry_name)
     if not self.do_quick_save: return
     if "save" not in kwargs:
         utils.warning(
             "InputStore.load_from_yoda",
             f"self.do_quick_save is True but no quicksave file specified for {fname}. Not saving."
         )
         return
     self.quick_save(kwargs["save"], new_entries)
Exemplo n.º 9
0
 def load_from_pickle(self, fname, **kwargs):
     pickle_dict = pickle.load(open(fname, "rb"))
     new_entries = []
     look_for_params = kwargs.get("look_for_params", False)
     if "cfg" in kwargs and "key" in kwargs:
         cfg, key = kwargs["cfg"], kwargs["key"]
         fextract = utils.string_to_object(
             cfg.get("INPUTS", f"{key}.extract", fallback="[]"))
         for entry_name in fextract:
             includes_SM = utils.string_to_object(
                 cfg.get("INPUTS",
                         f"{key}.{entry_name}.IncludesSM",
                         fallback="[]"))
             value_keys = utils.string_to_object(
                 cfg.get("INPUTS",
                         f"{key}.{entry_name}.keys.values",
                         fallback="[]"))
             cov_keys = utils.string_to_object(
                 cfg.get("INPUTS",
                         f"{key}.{entry_name}.keys.covariance",
                         fallback="[]"))
             if type(value_keys) is list and len(value_keys) > 0 and type(
                     value_keys[0]) is str:
                 values = np.concatenate(
                     [pickle_dict[value_key] for value_key in value_keys])
             elif type(value_keys) is int:
                 values = np.zeros(shape=value_keys)
             else:
                 values = value_keys
             num_values = len(values)
             params = {}
             if look_for_params:
                 for param in glob.scan_params:
                     params[param.name] = cfg.get(
                         "INPUTS",
                         f"{key}.{entry_name}.{param.name}",
                         fallback=None)
             cov = np.zeros(shape=(num_values, num_values))
             for cov_key in cov_keys:
                 cov = cov + pickle_dict[cov_key]
             new_entry = Input(name=entry_name,
                               type=cfg.get("INPUTS",
                                            f"{key}.{entry_name}.type",
                                            fallback=""),
                               origin_file=fname,
                               keys=value_keys,
                               params=params,
                               dist=Distribution(name=entry_name,
                                                 values=values,
                                                 cov=cov,
                                                 includes_SM=includes_SM))
             if entry_name in self.entries:
                 utils.warning("InputStore.load_from_pickle()",
                               f"Entry named {entry_name} already exists")
             self.entries[entry_name] = new_entry
             new_entries.append(entry_name)
     if not self.do_quick_save: return
     if "save" not in kwargs:
         utils.warning(
             "InputStore.load_from_pickle",
             f"self.do_quick_save is True but no quicksave file specified for {fname}. Not saving."
         )
         return
     self.quick_save(kwargs["save"], new_entries)
def main():
    #
    #  config and setup
    #
    parse_command_line_arguments("hello")
    do_general_setup()
    print_setup()
    num_scan_params = len(glob.scan_params)
    #
    #  get SM expected limit
    #
    utils.info("get_limits.py",
               "Getting expected and observed confidence limits")
    exp_limit = glob.CL_generator.get_limit(glob.custom_store["SM_exp_dist"])
    obs_limit = glob.CL_generator.get_limit(glob.custom_store["meas_dist"])
    if num_scan_params == 1:
        utils.info(
            "get_limits.py",
            f"Observed {100.*glob.confidence_level:.2f}% confidence limit is {exp_limit}"
        )
        utils.info(
            "get_limits.py",
            f"Expected {100.*glob.confidence_level:.2f}% confidence limit is {obs_limit}"
        )
    #
    #  generate SM toys and get limits
    #
    utils.info(
        "get_limits()",
        f"Throwing toys around the experimental SM expectation and getting limits"
    )
    SM_toy_limits, SM_coverage_grid = get_toy_spread_of_limits()
    n_toys = len(SM_toy_limits)
    if num_scan_params == 1:
        utils.info(
            "get_limits.py",
            f"Median {100.*glob.confidence_level:.2f}% limit of SM toys is {SM_toy_limits[int(0.5*n_toys)]:.0f}"
        )
        utils.info(
            "get_limits.py",
            f"16th percentile {100.*glob.confidence_level:.2f}% limit of SM toys is {SM_toy_limits[int(0.16*n_toys)]:.0f}"
        )
        utils.info(
            "get_limits.py",
            f"84th percentile {100.*glob.confidence_level:.2f}% limit of SM toys is {SM_toy_limits[int(0.84*n_toys)]:.0f}"
        )
    #
    # plot
    #
    plotting.set_mpl_style()
    fig = plt.figure()
    ax = fig.add_subplot(1, 1, 1)

    if num_scan_params == 1:
        limit_toys_5pc, limit_toys_16pc, limit_toys_median, limit_toys_84pc, limit_toys_95pc = SM_toy_limits[
            int(0.05 * n_toys)], SM_toy_limits[int(
                0.16 * n_toys)], SM_toy_limits[int(
                    0.5 * n_toys)], SM_toy_limits[int(
                        0.84 * n_toys)], SM_toy_limits[int(0.95 * n_toys)]
        plt.axvspan(limit_toys_5pc,
                    limit_toys_95pc,
                    color="darkorange",
                    linestyle=None)
        plt.axvspan(limit_toys_16pc,
                    limit_toys_84pc,
                    color="gold",
                    linestyle=None)
        plt.plot([limit_toys_median, limit_toys_median], [0, 1],
                 color="darkblue",
                 linestyle="dashed",
                 linewidth=1)
        plt.plot([exp_limit, exp_limit], [0, 1], color="green")
        plt.plot([obs_limit, obs_limit], [0, 1], color="purple")
        ax.yaxis.set_visible(False)
        ax.set_ylim([0, 1])
    else:
        plt.contourf(SM_coverage_grid.axes[0],
                     SM_coverage_grid.axes[1],
                     SM_coverage_grid.values.transpose(),
                     [0.05, 0.16, 0.84, 0.95],
                     linestyles=None,
                     colors=["gold", "darkorange", "gold"])
        plt.contour(SM_coverage_grid.axes[0],
                    SM_coverage_grid.axes[1],
                    SM_coverage_grid.values.transpose(), [0.5],
                    linestyles="dashed",
                    colors=["darkblue"],
                    linewidths=1)
        for limit in exp_limit:
            plt.plot([x[0] for x in limit], [y[1] for y in limit],
                     color="green")
        for limit in obs_limit:
            plt.plot([x[0] for x in limit], [y[1] for y in limit],
                     color="purple")
        plt.ylabel(
            f"{glob.scan_params[1].label}  [{glob.scan_params[1].units}]",
            horizontalalignment='right',
            y=1.0,
            fontsize="large")

    format_axis_from_config("GET_LIMITS")

    plt.xlabel(f"{glob.scan_params[0].label}  [{glob.scan_params[0].units}]",
               horizontalalignment='right',
               x=1.0,
               fontsize="large")

    plt.legend([
        Line2D([0], [0], color="purple", lw=2),
        Line2D([0], [0], color="green", lw=2),
        Line2D([0], [0], color="darkblue", linestyle="dashed", lw=1),
        Patch(color="gold", linestyle=None),
        Patch(color="darkorange", linestyle=None)
    ], [
        f"Obs. ({100*glob.confidence_level:.0f}% $CL_s$)",
        f"Exp. ({100*glob.confidence_level:.0f}% $CL_s$)", "SM toys: median",
        "SM toys: 68% coverage", "SM toys: 95% coverage"
    ],
               loc=utils.string_to_object(
                   glob.config.get("GET_LIMITS",
                                   "legend.position",
                                   fallback="\'best\'")))

    if glob.custom_store["do show plots"]: plt.show()
    if glob.custom_store["plots filename"] is not None:
        plotting.save_figure(fig)
    plotting.close_plots_pdf()