def test_create_files_with_removal(self): """Test the removal of old files in create_files()""" testdir = tmp_directory_name() # Step 1: Create test directory containing random file os.makedirs(testdir) self.addCleanup(shutil.rmtree, testdir) testfile = os.path.join(testdir, "test.txt") with open(testfile, "w") as f: f.write("test") self.assertTrue(os.path.isfile(testfile)) # Step 2: Create submission and write output to test directory # Without overwriting of files test_submission = Submission() tab = Table("test") test_submission.add_table(tab) test_submission.create_files(testdir, remove_old=False) # Test file should still exist self.assertTrue(os.path.isfile(testfile)) # Step 3: Recreate submission files with removal test_submission.create_files(testdir, remove_old=True) # Test file should no longer exist self.assertFalse(os.path.isfile(testfile))
def test_nested_files_to_copy(self): """Test that file copying works when tables have files.""" # Create random test file testfile = "testfile.txt" with open(testfile, "w") as f: f.write("test") self.addCleanup(os.remove, testfile) # Output files testdirectory = "./testout" self.addCleanup(shutil.rmtree, testdirectory) self.addCleanup(os.remove, "submission.tar.gz") # Add resource to table, add table to Submission sub = Submission() tab = Table('test') tab.add_additional_resource("a_resource", testfile, True) sub.add_table(tab) # Write outputs sub.create_files(testdirectory) # Check that test file is actually in the tar ball with tarfile.open("submission.tar.gz", "r:gz") as tar: try: tar.getmember(testfile) except KeyError: self.fail( "Submission.create_files failed to write all files to tar ball." )
def test_create_files(self): """Test create_files() for Submission.""" test_submission = Submission() try: test_submission.create_files("test_output") except TypeError: self.fail( "Submission.create_files raised an unexpected TypeError.")
def test_create_files(self): """Test create_files() for Submission.""" testdir = tmp_directory_name() test_submission = Submission() tab = Table("test") test_submission.add_table(tab) test_submission.create_files(testdir) self.doCleanups()
def test_create_files(self): """Test create_files() for Submission.""" testdir = "test_output" test_submission = Submission() self.addCleanup(os.remove, "submission.tar.gz") self.addCleanup(shutil.rmtree, testdir) test_submission.create_files(testdir) self.doCleanups()
def test_yaml_output(self): """Test yaml dump""" tmp_dir = tmp_directory_name() # Create test dictionary testlist = [("x", 1.2), ("x", 2.2), ("y", 0.12), ("y", 0.22)] testdict = defaultdict(list) for key, value in testlist: testdict[key].append(value) # Create test submission test_submission = Submission() test_table = Table("TestTable") x_variable = Variable("X", is_independent=True, is_binned=False) x_variable.values = testdict['x'] y_variable = Variable("Y", is_independent=False, is_binned=False) y_variable.values = testdict['y'] test_table.add_variable(x_variable) test_table.add_variable(y_variable) test_submission.add_table(test_table) test_submission.create_files(tmp_dir) # Test read yaml file table_file = os.path.join(tmp_dir, "testtable.yaml") try: with open(table_file, 'r') as testfile: testyaml = yaml.safe_load(testfile) except yaml.YAMLError as exc: print(exc) # Test compare yaml file to string testtxt = ( "dependent_variables:\n- header:\n name: Y\n values:\n" + " - value: 0.12\n - value: 0.22\nindependent_variables:\n" + "- header:\n name: X\n values:\n - value: 1.2\n - value: 2.2\n" ) with open(table_file, 'r') as testfile: testyaml = testfile.read() self.assertEqual(str(testyaml), testtxt) self.addCleanup(os.remove, "submission.tar.gz") self.addCleanup(shutil.rmtree, tmp_dir) self.doCleanups()
reader_covariance_ee_PtRap4 = RootFileReader( "HEPData/inputs/smp17010/folders_bornleptons/output_root/matrix13__XSRatioSystPtRap4.root" ) # Read the histogram data_covariance_ee_PtRap4 = reader_covariance_ee_PtRap4.read_hist_2d( "covariance_totsum_1") # Create variable objects x_covariance_ee_PtRap4 = Variable("Bin X", is_independent=True, is_binned=True) x_covariance_ee_PtRap4.values = data_covariance_ee_PtRap4["x_edges"] y_covariance_ee_PtRap4 = Variable("Bin Y", is_independent=True, is_binned=False) y_covariance_ee_PtRap4.values = data_covariance_ee_PtRap4["y"] z_covariance_ee_PtRap4 = Variable("covariance Matrix", is_independent=False, is_binned=False) z_covariance_ee_PtRap4.values = data_covariance_ee_PtRap4["z"] table_covariance_XSRatio_ee_PtRap4 = Table("cov matr norm xs aux 6e") table_covariance_XSRatio_ee_PtRap4.description = "Covariance matrix for normalized cross sections using born level leptons for all bins used in bins of Z pt for the 1.6 < |y(Z)| < 2.4 bin in the dielectron final state." table_covariance_XSRatio_ee_PtRap4.location = "Supplementary material" for var in [ x_covariance_ee_PtRap4, y_covariance_ee_PtRap4, z_covariance_ee_PtRap4 ]: table_covariance_XSRatio_ee_PtRap4.add_variable(var) submission.add_table(table_covariance_XSRatio_ee_PtRap4) ### End covariance ee outdir = "example_output" submission.create_files(outdir)
if fig["type_stat"].lower() == "tgraphasymmerrors": y_stat = Uncertainty("stat. uncertainty", is_symmetric=False) y_stat.values = stat["dy"] y.add_uncertainty(y_stat) elif fig["type_stat"].lower() in ["tgrapherrors", "th1"]: y_stat = Uncertainty("stat. uncertainty", is_symmetric=True) y_stat.values = stat["dy"] y.add_uncertainty(y_stat) if fig["type_syst"].lower() == "tgraphasymmerrors": y_syst = Uncertainty("syst. uncertainty", is_symmetric=False) y_syst.values = syst["dy"] y.add_uncertainty(y_syst) elif fig["type_syst"].lower() in ["tgrapherrors", "th1"]: y_syst = Uncertainty("syst. uncertainty", is_symmetric=True) y_syst.values = syst["dy"] y.add_uncertainty(y_syst) # write table if fig["type_stat"].lower() == "th2": table.add_variable(x1) table.add_variable(x2) table.add_variable(y) else: table.add_variable(x1) table.add_variable(y) submission.add_table(table) submission.create_files("output")
wcctZI.values = datactZctZIobs[:, 1] nllctZctZIobs = Variable("-2$\Delta$ L (observed)", is_independent=False, is_binned=False) nllctZctZIobs.values = datactZctZIobs[:, 2] nllctZctZIobs.add_qualifier("SQRT(S)", "13", "TeV") nllctZctZIobs.add_qualifier("LUMINOSITY", "137", "fb$^{-1}$") tabctZctZI.add_variable(wcctZ) tabctZctZI.add_variable(wcctZI) tabctZctZI.add_variable(nllctZctZIobs) tabctZctZI.add_image("../figures/ctZ_ctZI_wBkg.png") #tabctZctZI.keywords() tabctZctZI.keywords["reactions"] = [ "P P --> TOP TOPBAR X", "P P --> TOP TOPBAR GAMMA" ] tabctZctZI.keywords["cmenergies"] = [13000.0] tabctZctZI.keywords["observables"] = ["CLS"] tabctZctZI.keywords["phrases"] = [ "Top", "Quark", "Photon", "lepton+jets", "semileptonic", "Cross Section", "Proton-Proton Scattering", "Inclusive", "Differential" ] submission.add_table(tabctZctZI) ### ### write files ### submission.create_files()
from hepdata_lib import Table table = Table("pa all") table.description = "description." table.location = "upper left." table.keywords["observables"] = ["pa"] from hepdata_lib import RootFileReader reader = RootFileReader("root://eosuser.cern.ch//eos/user/v/vveckaln/analysis_MC13TeV_TTJets/plots/plotter.root") Data = reader.read_hist_1d("L_pull_angle_allconst_reco_leading_jet_scnd_leading_jet_DeltaRgt1p0/L_pull_angle_allconst_reco_leading_jet_scnd_leading_jet_DeltaRgt1p0") Unc = reader.read_hist_1d("L_pull_angle_allconst_reco_leading_jet_scnd_leading_jet_DeltaRgt1p0/L_pull_angle_allconst_reco_leading_jet_scnd_leading_jet_DeltaRgt1p0_totalMCUncShape") from hepdata_lib import Variable, Uncertainty mmed = Variable("pa", is_independent=True, is_binned=False, units="rad") mmed.values = signal["x"] data = Variable("N", is_independent=False, is_binned=False, units="") data.values = Data["y"] unc = Uncertainty("Total", is_symmetric=True) unc.values = Unc["dy"] data.add_uncertainty(unc) table.add_variable(mmed) table.add_variable(data) submission.add_table(table) submission.create_files("example_output")
h = files[year].read_hist_1d( "%s_%s_%s" % (proc, year.replace("A", "pre").replace( "B", "post"), nnBin)) varArr += h["y"] # For the fit and data graphs, there is an associated uncertainty # So add that uncertainty to the Fit and Nobs variables if proc == "Fit" or proc == "Nobs": uncArr += h["dy"] v.values = varArr if proc == "Fit" or proc == "Nobs": uncVals, isSymm = makeUncArray(uncArr) unc = U("unc.", is_symmetric=isSymm) unc.values = uncVals v.add_uncertainty(unc) # Add each sig1, sig2, fit, data variable to the corresponding table tmap[year].add_variable(v) for year, t in sorted(tmap.items()): sub.add_table(t) if __name__ == "__main__": sub = Submission() makeFitPlotHEPData(sub) sub.create_files("output")
up1 = Variable(c.y_var, is_independent=False, is_binned=False, units=c.y_unit) up1.values = graphs["Graph1"]['y'] up1.add_qualifier("Limit", "+1sigma") down1 = Variable(c.y_var, is_independent=False, is_binned=False, units=c.y_unit) down1.values = graphs["Graph4"]['y'] down1.add_qualifier("Limit", "-1sigma") down2 = Variable(c.y_var, is_independent=False, is_binned=False, units=c.y_unit) down2.values = graphs["Graph5"]['y'] down2.add_qualifier("Limit", "-2sigma") table.add_variable(d) table.add_variable(up2) table.add_variable(up1) table.add_variable(obs) table.add_variable(exp) table.add_variable(down1) table.add_variable(down2) submission.add_table(table) if __name__ == "__main__": submission = Submission() for c in config: outdict = read_limit_from_pickle(c.picklePath) draw_limit_with_dict(c,outdict) add_limit_to_submission(c,submission) submission.create_files(out_name)