コード例 #1
0
 def test_6_update(self):
     """test that when something is changed, the unique id stays the same.
     """
     dataset = Dataset(dataset_name_fccsw, xsection=1, cache=True)
     self.assertEqual(dataset.uid(), self.dataset.uid())
     self.assertEqual(dataset.xsection(), 1)
     dataset = Dataset(dataset_name_fccsw, cache=True)
     self.assertEqual(dataset.xsection(), 1)
コード例 #2
0
 def test_5_jobtype_heppy(self):
     """test that the jobtype can be determined for heppy"""
     dataset = Dataset(dataset_name_heppy,
                       dataset_pattern_heppy, 
                       cache=False,
                       extract_info=True)
     self.assertEqual(dataset.jobtype(), 'heppy')
コード例 #3
0
 def test_4_yaml(self):
     """Test that the yaml file can be written and read."""
     dataset = Dataset(dataset_name_heppy, dataset_pattern_heppy, 
                       cache=cache)
     data_written = dataset._write_yaml()
     data_read = dataset._read_yaml()
     self.assertDictEqual(data_written, data_read)
コード例 #4
0
ファイル: test_publish.py プロジェクト: cbernet/fcc_datasets
 def test_1_publish(self):
     """Test that the publish script is working."""
     xsection = random.uniform(0, 1)
     dsname = 'heppy/papas/ee_to_ZZ_condor_A_703'
     cmd = 'python publish.py {} -x {}'.format(dsname, xsection)
     os.system(cmd)
     dset = Dataset(dsname)
     self.assertAlmostEqual(dset.xsection(), xsection, places=7)
コード例 #5
0
 def setUp(self):
     self.dataset = Dataset(dataset_name_fccsw,
                            dataset_pattern_fccsw,
                            cache=False,
                            xsection=1.8e-9)
     self.nfiles = 10
     self.ngoodfiles = 10
     self.nevents = 10
コード例 #6
0
 def __init__(self, name, **kwargs):
     """"""
     dataset = Dataset(name, extract_info=False, cache=True)
     super(FCCComponent, self).__init__(
         dataset.name,
         dataset.list_of_good_files(),
         **kwargs
     )
コード例 #7
0
 def test_1(self):
     """Test FCC component creation"""
     dset = Dataset(dataset_name_fccsw, dataset_pattern_fccsw,
                    cache=False)
     comp = FCCComponent(dataset_name_fccsw, 
                         xSection=dset.xsection())
     self.assertListEqual(dset.list_of_good_files(),
                          comp.files)
コード例 #8
0
 def setUp(self):
     self.dataset = Dataset(dataset_name_pythia8, dataset_pattern_pythia8,
                            cache=False,
                            extract_info=True, 
                            cfg=cfg_name, xsection=1.8e-9)        
     self.dataset.write()
     self.nfiles = 5
     self.ngoodfiles = 5
     self.nevents = 50
コード例 #9
0
 def setUp(self):
     self.dataset = Dataset(dataset_name_heppy,
                            dataset_pattern_heppy,
                            cache=False,
                            cfg=cfg_name,
                            xsection=1.8e-9)
     self.nfiles = 1
     self.ngoodfiles = 1
     self.nevents = 100
コード例 #10
0
ファイル: publish.py プロジェクト: HEP-FCC-TEST/fcc_datasets
def process_dataset(dsname, options):
    ds = Dataset(dsname,
                 pattern=options.wildcard,
                 xsection=options.xsection,
                 cache=False)
    if options.verbose:
        print ds
    else:
        print dsname
コード例 #11
0
 def setUp(self):
     self.dataset = Dataset(dataset_name_fccsw,
                            dataset_pattern_fccsw,
                            extract_info=True, 
                            cache=False,
                            xsection=1.8e-9)
     self.dataset.write()
     self.nfiles = 10
     self.ngoodfiles = 10
     self.nevents = 10
コード例 #12
0
def process_dataset(dsname, options):
    ds = Dataset(dsname,
                 pattern=options.wildcard,
                 extract_info=options.extract,
                 xsection=options.xsection,
                 cache=False)
    ds.write()
    if options.verbose:
        print ds
    else:
        print dsname
コード例 #13
0
 def __init__(self,
              name,
              pattern='*.root',
              cache=True,
              cfg=None,
              xsection=None,
              **kwargs):
     """"""
     self.dataset = Dataset(name, pattern, cache, cfg, xsection)
     super(FCCComponent, self).__init__(self.dataset.name,
                                        self.dataset.list_of_good_files(),
                                        xSection=self.dataset.xsection(),
                                        **kwargs)
コード例 #14
0
 def test_empty(self):
     """Check that an exception is raised when trying to
     read a dataset with no root file"""
     with self.assertRaises(ValueError):
         dataset = Dataset('papas/empty_dataset', '*.root', extract_info=True)
コード例 #15
0
 def test_5_jobtype_fccsw(self):
     """test that the jobtype can be determined for fccsw"""
     dataset = Dataset(dataset_name_fccsw, cache=cache)
     self.assertEqual(dataset._jobtype, 'fccsw')
コード例 #16
0
 def test_3_nevents(self):
     """Test that the number of events is correct"""
     dataset = Dataset(dataset_name_fccsw, cache=cache)
     self.assertEqual(dataset.nevents(), 100)
コード例 #17
0
 def test_4_yaml(self):
     """Test that the yaml file can be written and read."""
     dataset = Dataset(dataset_name_fccsw, cache=True)
     data_written = dataset.write_yaml()
     data_read = dataset.read_yaml()
     self.assertDictEqual(data_written, data_read)
コード例 #18
0
 def test_5_jobtype_heppy(self):
     """test that the jobtype can be determined for heppy"""
     dataset = Dataset(dataset_name_heppy, cache=cache)
     self.assertEqual(dataset._jobtype, 'heppy')
コード例 #19
0
 def test_3_nevents(self):
     """Test that the number of events is correct"""
     dataset = Dataset(dataset_name_heppy, cache=True)
     self.assertEqual(dataset.nevents(), self.nevents)
コード例 #20
0
    #this is the directory where the root files are stored
    outdir = '/'.join(
        (condor_pars["base_outputdir"], condor_pars["subdirectory"]))

    #create a touch file (will be removed at the end if everything works)
    filename = '/'.join((outdir, "finish.txt"))
    os.system("touch " + filename)
    curdir = os.getcwd()

    #move to the output directory
    os.chdir(condor_pars["base_outputdir"])
    '''base directory where outputs are stored'''
    basedir.set_basename(condor_pars["base_outputdir"])
    ds = Dataset(condor_pars["subdirectory"],
                 pattern="*.root",
                 xsection=None,
                 extract_info=True,
                 cache=False)
    ds.write()

    print "ls"
    os.system("ls -al " + outdir)
    #put a copy of inf.yaml in the work directory for easy reference
    os.system("cp " + condor_pars["subdirectory"] + "/info.yaml " + curdir)

    #remove the touch file
    os.system("rm " + filename)

    #move back to the original directory
    os.chdir(curdir)
    print "finished creation of info.yaml"
コード例 #21
0
 def test_no_yaml(self):
     with self.assertRaises(IOError):
         dataset = Dataset('papas/empty_dataset', '*.root', extract_info=False)
コード例 #22
0
 def test_no_good_root_file(self):
     with self.assertRaises(ValueError):
         dataset = Dataset('papas/nogood_dataset', '*.root', extract_info=True)
コード例 #23
0
 def test_2_cache(self):
     '''Test dataset reading from cache'''
     dataset = Dataset(dataset_name_heppy, cache=True)
     self.assertEqual(len(dataset.all_files), self.nfiles)
     self.assertEqual(len(dataset.list_of_good_files()), self.ngoodfiles)
     self.assertEqual(dataset.uid(), self.dataset.uid())
コード例 #24
0
def process_dataset(dsname, options):
    ds = Dataset(dsname)
    print ds