def test_6_update(self): """test that when something is changed, the unique id stays the same. """ dataset = Dataset(dataset_name_fccsw, xsection=1, cache=True) self.assertEqual(dataset.uid(), self.dataset.uid()) self.assertEqual(dataset.xsection(), 1) dataset = Dataset(dataset_name_fccsw, cache=True) self.assertEqual(dataset.xsection(), 1)
def test_5_jobtype_heppy(self): """test that the jobtype can be determined for heppy""" dataset = Dataset(dataset_name_heppy, dataset_pattern_heppy, cache=False, extract_info=True) self.assertEqual(dataset.jobtype(), 'heppy')
def test_4_yaml(self): """Test that the yaml file can be written and read.""" dataset = Dataset(dataset_name_heppy, dataset_pattern_heppy, cache=cache) data_written = dataset._write_yaml() data_read = dataset._read_yaml() self.assertDictEqual(data_written, data_read)
def test_1_publish(self): """Test that the publish script is working.""" xsection = random.uniform(0, 1) dsname = 'heppy/papas/ee_to_ZZ_condor_A_703' cmd = 'python publish.py {} -x {}'.format(dsname, xsection) os.system(cmd) dset = Dataset(dsname) self.assertAlmostEqual(dset.xsection(), xsection, places=7)
def setUp(self): self.dataset = Dataset(dataset_name_fccsw, dataset_pattern_fccsw, cache=False, xsection=1.8e-9) self.nfiles = 10 self.ngoodfiles = 10 self.nevents = 10
def __init__(self, name, **kwargs): """""" dataset = Dataset(name, extract_info=False, cache=True) super(FCCComponent, self).__init__( dataset.name, dataset.list_of_good_files(), **kwargs )
def test_1(self): """Test FCC component creation""" dset = Dataset(dataset_name_fccsw, dataset_pattern_fccsw, cache=False) comp = FCCComponent(dataset_name_fccsw, xSection=dset.xsection()) self.assertListEqual(dset.list_of_good_files(), comp.files)
def setUp(self): self.dataset = Dataset(dataset_name_pythia8, dataset_pattern_pythia8, cache=False, extract_info=True, cfg=cfg_name, xsection=1.8e-9) self.dataset.write() self.nfiles = 5 self.ngoodfiles = 5 self.nevents = 50
def setUp(self): self.dataset = Dataset(dataset_name_heppy, dataset_pattern_heppy, cache=False, cfg=cfg_name, xsection=1.8e-9) self.nfiles = 1 self.ngoodfiles = 1 self.nevents = 100
def process_dataset(dsname, options): ds = Dataset(dsname, pattern=options.wildcard, xsection=options.xsection, cache=False) if options.verbose: print ds else: print dsname
def setUp(self): self.dataset = Dataset(dataset_name_fccsw, dataset_pattern_fccsw, extract_info=True, cache=False, xsection=1.8e-9) self.dataset.write() self.nfiles = 10 self.ngoodfiles = 10 self.nevents = 10
def process_dataset(dsname, options): ds = Dataset(dsname, pattern=options.wildcard, extract_info=options.extract, xsection=options.xsection, cache=False) ds.write() if options.verbose: print ds else: print dsname
def __init__(self, name, pattern='*.root', cache=True, cfg=None, xsection=None, **kwargs): """""" self.dataset = Dataset(name, pattern, cache, cfg, xsection) super(FCCComponent, self).__init__(self.dataset.name, self.dataset.list_of_good_files(), xSection=self.dataset.xsection(), **kwargs)
def test_empty(self): """Check that an exception is raised when trying to read a dataset with no root file""" with self.assertRaises(ValueError): dataset = Dataset('papas/empty_dataset', '*.root', extract_info=True)
def test_5_jobtype_fccsw(self): """test that the jobtype can be determined for fccsw""" dataset = Dataset(dataset_name_fccsw, cache=cache) self.assertEqual(dataset._jobtype, 'fccsw')
def test_3_nevents(self): """Test that the number of events is correct""" dataset = Dataset(dataset_name_fccsw, cache=cache) self.assertEqual(dataset.nevents(), 100)
def test_4_yaml(self): """Test that the yaml file can be written and read.""" dataset = Dataset(dataset_name_fccsw, cache=True) data_written = dataset.write_yaml() data_read = dataset.read_yaml() self.assertDictEqual(data_written, data_read)
def test_5_jobtype_heppy(self): """test that the jobtype can be determined for heppy""" dataset = Dataset(dataset_name_heppy, cache=cache) self.assertEqual(dataset._jobtype, 'heppy')
def test_3_nevents(self): """Test that the number of events is correct""" dataset = Dataset(dataset_name_heppy, cache=True) self.assertEqual(dataset.nevents(), self.nevents)
#this is the directory where the root files are stored outdir = '/'.join( (condor_pars["base_outputdir"], condor_pars["subdirectory"])) #create a touch file (will be removed at the end if everything works) filename = '/'.join((outdir, "finish.txt")) os.system("touch " + filename) curdir = os.getcwd() #move to the output directory os.chdir(condor_pars["base_outputdir"]) '''base directory where outputs are stored''' basedir.set_basename(condor_pars["base_outputdir"]) ds = Dataset(condor_pars["subdirectory"], pattern="*.root", xsection=None, extract_info=True, cache=False) ds.write() print "ls" os.system("ls -al " + outdir) #put a copy of inf.yaml in the work directory for easy reference os.system("cp " + condor_pars["subdirectory"] + "/info.yaml " + curdir) #remove the touch file os.system("rm " + filename) #move back to the original directory os.chdir(curdir) print "finished creation of info.yaml"
def test_no_yaml(self): with self.assertRaises(IOError): dataset = Dataset('papas/empty_dataset', '*.root', extract_info=False)
def test_no_good_root_file(self): with self.assertRaises(ValueError): dataset = Dataset('papas/nogood_dataset', '*.root', extract_info=True)
def test_2_cache(self): '''Test dataset reading from cache''' dataset = Dataset(dataset_name_heppy, cache=True) self.assertEqual(len(dataset.all_files), self.nfiles) self.assertEqual(len(dataset.list_of_good_files()), self.ngoodfiles) self.assertEqual(dataset.uid(), self.dataset.uid())
def process_dataset(dsname, options): ds = Dataset(dsname) print ds