def test_patchset_get_string_values(datadir): patchset = pyhf.PatchSet( json.load(open(datadir.join('patchset_good_stringvalues.json')))) assert patchset["Gtt_2100_5000_800"] assert patchset["Gbb_2200_5000_800"] assert patchset[[2100, 800, "Gtt"]] assert patchset[[2100, 800, "Gbb"]]
def test_patchset_apply(datadir): patchset = pyhf.PatchSet( json.load(open(datadir.join('example_patchset.json')))) ws = pyhf.Workspace(json.load(open(datadir.join('example_bkgonly.json')))) with mock.patch('pyhf.patchset.PatchSet.verify') as m: assert m.call_count == 0 assert patchset.apply(ws, 'patch_channel1_signal_syst1') assert m.call_count == 1
def get_bkg_and_signal(tarfile, directory_name, model_point): background_only = get_json_from_tarfile( tarfile, directory_name + "/BkgOnly.json", ) patchset = pyhf.PatchSet( get_json_from_tarfile(tarfile, directory_name + "/patchset.json")) signal_patch = patchset[model_point] return background_only, signal_patch
def get_bkg_and_signal(directory_name, model_point): """Load background and signal""" bkgonly_path = directory_name / Path("BkgOnly.json") signal_path = directory_name / Path("patchset.json") if bkgonly_path.exists() and signal_path.exists(): background_only = json.load(open(bkgonly_path)) patchset = pyhf.PatchSet(json.load(open(signal_path))) signal_patch = patchset[model_point] elif bkgonly_path.exists(): background_only = json.load(open(bkgonly_path)) signal_patch = None else: json_filename = list(directory_name.glob("*.json")) json_file = open(json_filename[0]) background_only = json.load(json_file) signal_patch = None return background_only, signal_patch
def get_bkg_and_signal(directory_name, model_point): """Load background and signal""" bkgonly_path = directory_name / Path("BkgOnly.json") signal_path = directory_name / Path("patchset.json") if bkgonly_path.exists() and signal_path.exists(): background_only = json.load(open(bkgonly_path)) patchset = pyhf.PatchSet(json.load(open(signal_path))) signal_patch = patchset[model_point] elif bkgonly_path.exists(): background_only = json.load(open(bkgonly_path)) signal_patch = None else: json_filename = list(directory_name.glob("*.json")) if not json_filename: raise ValueError( "The {directory_name} directory does not contain background and signal information." ) json_file = open(json_filename[0]) background_only = json.load(json_file) signal_patch = None return background_only, signal_patch
def test_patchset_verify(datadir): patchset = pyhf.PatchSet( json.load(open(datadir.join('example_patchset.json')))) ws = pyhf.Workspace(json.load(open(datadir.join('example_bkgonly.json')))) assert patchset.verify(ws) is None
def test_patchset_bad(datadir, patchset_file): patchsetspec = json.load(open(datadir.join(patchset_file))) with pytest.raises(pyhf.exceptions.InvalidPatchSet): pyhf.PatchSet(patchsetspec)
def patchset(datadir, request): spec = json.load(open(datadir.join(request.param))) return pyhf.PatchSet(spec)
def test_patchset_verify_failure(datadir): patchset = pyhf.PatchSet( json.load(open(datadir.join('example_patchset.json')))) with pytest.raises(pyhf.exceptions.PatchSetVerificationError): assert patchset.verify({})
def main(args): if args.config_file is not None: with open(args.config_file, "r") as infile: config = json.load(infile) backend = args.backend pallet_path = Path(config["input_prefix"]).joinpath(config["pallet_name"]) # locally get pyhf pallet for analysis if not pallet_path.exists(): download(config["pallet_url"], pallet_path) analysis_name = config["analysis_name"] analysis_prefix_str = "" if analysis_name is None else f"{analysis_name}_" if config["analysis_dir"] is not None: pallet_path = pallet_path.joinpath(config["analysis_dir"]) with open(pallet_path.joinpath( f"{analysis_prefix_str}BkgOnly.json")) as bkgonly_json: bkgonly_workspace = json.load(bkgonly_json) # Initialize funcX client fxc = FuncXClient() fxc.max_requests = 200 with open("endpoint_id.txt") as endpoint_file: pyhf_endpoint = str(endpoint_file.read().rstrip()) # register functions prepare_func = fxc.register_function(prepare_workspace) infer_func = fxc.register_function(infer_hypotest) # execute background only workspace prepare_task = fxc.run(bkgonly_workspace, backend, endpoint_id=pyhf_endpoint, function_id=prepare_func) # Read patchset in while background only workspace running with open(pallet_path.joinpath( f"{analysis_prefix_str}patchset.json")) as patchset_json: patchset = pyhf.PatchSet(json.load(patchset_json)) workspace = None while not workspace: try: workspace = fxc.get_result(prepare_task) except Exception as excep: print(f"prepare: {excep}") sleep(10) print("--------------------") print(workspace) # execute patch fits across workers and retrieve them when done n_patches = len(patchset.patches) tasks = {} for patch_idx in range(n_patches): patch = patchset.patches[patch_idx] task_id = fxc.run( workspace, patch.metadata, [patch.patch], backend, endpoint_id=pyhf_endpoint, function_id=infer_func, ) tasks[patch.name] = {"id": task_id, "result": None} while count_complete(tasks.values()) < n_patches: for task in tasks.keys(): if not tasks[task]["result"]: try: result = fxc.get_result(tasks[task]["id"]) print( f"Task {task} complete, there are {count_complete(tasks.values())+1} results now" ) tasks[task]["result"] = result except Exception as excep: print(f"inference: {excep}") sleep(15) print("--------------------") print(tasks.values())