def testDataFilesExternal(self): from Ganga.GPI import DiracFile # Methods lfn = DiracFile(lfn='/lhcb/LHCb/Collision16/DIMUON.DST/00053485/0000/00053485_00000424_1.dimuon.dst') assert lfn.getReplicas() assert lfn.getMetadata()
def test__init__(self): self.assertEqual(self.df.namePattern, 'np', 'namePattern not initialised as np') self.assertEqual(self.df.lfn, 'lfn', 'lfn not initialised as lfn') self.assertEqual(self.df.localDir, 'ld', 'localDir not initialised as ld') d1 = stripProxy(DiracFile()) self.assertEqual(d1.namePattern, '', 'namePattern not default initialised as empty') self.assertEqual(d1.lfn, '', 'lfn not default initialised as empty') self.assertEqual(d1.localDir, None, 'localDir not default initialised as None') self.assertEqual(d1.locations, [], 'locations not initialised as empty list') d2 = stripProxy(DiracFile(namePattern='np', lfn='lfn', localDir='ld')) self.assertEqual( d2.namePattern, 'np', 'namePattern not keyword initialised as np, initialized as: %s\n%s' % (d2.namePattern, str(d2))) self.assertEqual( d2.lfn, 'lfn', 'lfn not keyword initialised as lfn, initialized as: %s\n%s' % (d2.lfn, str(d2))) self.assertEqual( d2.localDir, 'ld', 'localDir not keyword initialised as ld, initializes as %s\n%s' % (d2.localDir, str(d2.localDir)))
def testDataFilesExternal(self): from Ganga.GPI import DiracFile # Methods lfn = DiracFile(lfn="/lhcb/data/2010/DIMUON.DST/00008395/0000/00008395_00000326_1.dimuon.dst") assert lfn.getReplicas() assert lfn.getMetadata()
def submit_job(brunel_app, reco_type, input_files=None, local=RUN_LOCAL): # Set EvtMax depending on if this is a local job brunel_app.extraOpts += 'from Configurables import Brunel\n' brunel_app.extraOpts += 'Brunel().EvtMax = {}'.format(2 * int(local) - 1) # Configure the corresponding Job job = Job(name='VP hybrid distortions', comment='{reco_type} reconstruction {suffix}'.format( reco_type=reco_type, suffix=['', '(local)'][local]), application=brunel_app, splitter=SplitByFiles(filesPerJob=1, ignoremissing=True), parallel_submit=True) if local: job.backend = Local() job.outputfiles = [LocalFile('*.xdst'), LocalFile('*.root')] job.inputdata = dataset[:1] else: job.backend = Dirac() job.outputfiles = [DiracFile('*.xdst'), DiracFile('*.root')] job.inputdata = dataset job.inputfiles = input_files or [] queues.add(job.submit) return True
def upload(self, lfn, diracSE, guid=None): from Ganga.GPI import DiracFile diracFile = DiracFile(namePattern=self.name, lfn=lfn) diracFile.put(force=True) return diracFile
def testDataFilesExternal(self): from Ganga.GPI import DiracFile # Methods lfn = DiracFile( lfn= '/lhcb/data/2010/DIMUON.DST/00008395/0000/00008395_00000326_1.dimuon.dst' ) assert lfn.getReplicas() assert lfn.getMetadata()
def testDataFilesExternal(self): from Ganga.GPI import DiracFile # Methods lfn = DiracFile( lfn= '/lhcb/LHCb/Collision16/DIMUON.DST/00053485/0000/00053485_00000424_1.dimuon.dst' ) assert lfn.getReplicas() assert lfn.getMetadata()
def testDataFiles(self): from Ganga.GPI import DiracFile, config # LFNs name = 'test.txt' lfn = DiracFile(lfn=name) assert lfn.lfn == name lfn = DiracFile('lfn:' + name) assert lfn.lfn == name lfn = DiracFile('LFN:' + name) assert lfn.lfn == name # not sure if this should raise an exception or not. The original test had a try..except that would always # pass so not sure :) lfn = DiracFile('pfn:' + name)
def testInterfaceLookFeel(self): # Just testing that the job construction works from Ganga.GPI import Job, Im3ShapeApp, Im3ShapeSplitter, DiracFile, LocalFile, GangaDataset, Dirac j=Job() app = Im3ShapeApp( im3_location=DiracFile(lfn='/lsst/y1a1-v2-z/software/2016-02-24/im3shape-grid.tar.gz'), ini_location=LocalFile('/afs/cern.ch/user/r/rcurrie/cmtuser/GANGA/GANGA_LSST/install/ganga/python/params_disc.ini'), blacklist=LocalFile('/afs/cern.ch/user/r/rcurrie/cmtuser/GANGA/GANGA_LSST/install/ganga/python/blacklist-y1.txt') ) j.application = app j.backend=Dirac() mydata=GangaDataset() mydata.append(DiracFile(lfn='/lsst/DES0005+0043-z-meds-y1a1-gamma.fits.fz')) j.inputdata = mydata j.splitter=Im3ShapeSplitter(size=20) j.outputfiles = [DiracFile('*.main.txt'), DiracFile('*.epoch.txt')]
def setUp(self): self.returnObject = None self.toCheck = {} def execute(command, timeout=60, env=None, cwd=None, shell=False): import inspect frame = inspect.currentframe() fedInVars = inspect.getargvalues(frame).locals del frame for key, value in self.toCheck.iteritems(): if key in fedInVars: self.assertEqual(fedInVars[key], value) return self.returnObject def add_process(this, command, command_args=(), command_kwargs={}, timeout=60, env=None, cwd=None, shell=False, priority=5, callback_func=None, callback_args=(), callback_kwargs={}): import inspect frame = inspect.currentframe() fedInVars = inspect.getargvalues(frame).locals del frame for key, value in self.toCheck.iteritems(): if key in fedInVars: self.assertEqual(fedInVars[key], value) return self.returnObject self.df = stripProxy(DiracFile('np', 'ld', 'lfn')) self.df.locations = ['location'] self.df.guid = 'guid' from Ganga.Core.GangaThread.WorkerThreads.WorkerThreadPool import WorkerThreadPool setattr(sys.modules[self.df.__module__], 'execute', execute) setattr(WorkerThreadPool, 'add_process', add_process)
def testDatasetsFunctions(self): from Ganga.GPI import DiracFile, PhysicalFile, LHCbDataset, Job, LocalFile # test constructors/setters ds = LHCbDataset(['lfn:a', 'pfn:b']) assert len(ds) == 2 print(ds[0]) assert isinstance(ds[0], DiracFile) assert isinstance(ds[1], LocalFile) ds = LHCbDataset() ds.files = ['lfn:a', 'pfn:b'] assert isinstance(ds[0], DiracFile) assert isinstance(ds[1], LocalFile) assert ds.getFullFileNames() == ['LFN:a', 'PFN:b'] ds.files.append('lfn:c') assert isinstance(ds[-1], DiracFile) # check job assignments j = Job() j.inputdata = ['lfn:a', 'pfn:b'] assert isinstance(j.inputdata, LHCbDataset) j.outputfiles = ['a', DiracFile('b')] assert isinstance(j.outputfiles[0], LocalFile) print(type(j.outputfiles[1])) assert isinstance(j.outputfiles[1], DiracFile) # check the LHCbDataset functions: assert ds.getLFNs() == ['a', 'c'] assert ds.getPFNs() == ['b'] ds2 = LHCbDataset(['lfn:a', 'lfn:d']) ds.extend(ds2, True) assert len(ds) == 4 # check the useful difference functions etc assert sorted(ds.difference(ds2).getFileNames()) == ['b', 'c'] assert sorted(ds.symmetricDifference(ds2).getFileNames()) == ['b', 'c'] assert sorted(ds.intersection(ds2).getFileNames()) == ['a', 'd'] assert sorted(ds.union(ds2).getFileNames()) == ['a', 'b', 'c', 'd']
def testInternal(self): from Ganga.GPI import GaudiExec, Job, LocalFile, DiracFile tmp_fol = gettempdir() gaudi_testFol = path.join(tmp_fol, 'GaudiExecTest') shutil.rmtree(gaudi_testFol, ignore_errors=True) makedirs(gaudi_testFol) gaudi_testOpts = path.join(gaudi_testFol, 'testOpts.py') with open(gaudi_testOpts, 'w+') as temp_opt: temp_opt.write("print('hello')") assert path.exists(gaudi_testOpts) gr = GaudiExec(directory=gaudi_testFol, options=[LocalFile(gaudi_testOpts)]) assert isinstance( stripProxy(gr).getOptsFiles()[0], stripProxy(LocalFile)) reconstructed_path = path.join( stripProxy(gr).getOptsFiles()[0].localDir, stripProxy(gr).getOptsFiles()[0].namePattern) assert reconstructed_path == gaudi_testOpts assert open(reconstructed_path).read() == "print('hello')" j = Job() j.application = gr assert isinstance(j.application, GaudiExec) df = DiracFile(lfn='/not/some/file') gr.options = [df] assert gr.options[0].lfn == df.lfn shutil.rmtree(gaudi_testFol, ignore_errors=True)
def testDatasets(self): from Ganga.GPI import DiracFile, PhysicalFile, LHCbDataset, Job, LocalFile # test constructors/setters ds = LHCbDataset(['lfn:a', 'pfn:b']) assert len(ds) == 2 print(ds[0]) assert isinstance(ds[0], DiracFile) assert isinstance(ds[1], PhysicalFile) ds = LHCbDataset() ds.files = ['lfn:a', 'pfn:b'] assert isinstance(ds[0], DiracFile) assert isinstance(ds[1], PhysicalFile) ds.files.append('lfn:c') assert isinstance(ds[-1], DiracFile) d = OutputData(['a', 'b']) assert isinstance(d.files[0], str) assert isinstance(d.files[1], str) # check job assignments j = Job() j.inputdata = ['lfn:a', 'pfn:b'] assert isinstance(j.inputdata, LHCbDataset) j.outputfiles = ['a', DiracFile('b')] assert isinstance(j.outputfiles[0], LocalFile) print(type(j.outputfiles[1])) assert isinstance(j.outputfiles[1], DiracFile) LFN_DATA = [ 'LFN:/lhcb/LHCb/Collision11/DIMUON.DST/00016768/0000/00016768_00000005_1.dimuon.dst', 'LFN:/lhcb/LHCb/Collision11/DIMUON.DST/00016768/0000/00016768_00000006_1.dimuon.dst' ] ds = LHCbDataset(LFN_DATA) assert len(ds.getReplicas().keys()) == 2 assert ds.getCatalog()
)) j.comment = ( '{1} {2} MC {0} ntuple creation for k3pi mixing measurement.' .format(event_type, year, polarity) ) j.application = DaVinci(version='v41r3') j.application.optsfile = [s.format(path=base, year=year) for s in OPTIONS] if args.test: # If testing, run over a couple of files locally, # saving the results to the sandbox j.inputdata = dataset[0:1] j.backend = Local() # Prepend test string to job name j.name = 'TEST_{0}'.format(j.name) j.outputfiles = [LocalFile(tfn)] else: # If not testing, run over everything on the grid, splitting jobs # into groups of 10 files, notifying me on job completion/subjob failure, # and save the results on the grid storage j.inputdata = dataset j.backend = Dirac() j.backend.settings['CPUTime'] = 60*60*24*7 j.do_auto_resubmit = True j.splitter = SplitByFiles(filesPerJob=5, ignoremissing=True) j.postprocessors = [Notifier(address=email)] j.outputfiles = [DiracFile(tfn)] if not args.inspect_job: queues.add(j.submit) # noqa
def submitDV(jname, tag, script, outfiles, appName='DaVinci', appPath='.', appVer='v42r3', appArgs=[], backend=None, infiles=None, gaudirun=False, test=False, maxFiles=-1, do_auto_resubmit = True): ''' Function for submitting a generic GaudiExec job to the grid with Ganga. If a local version of the application doesn't exist, prepareGaudiExec is used to create it. Otherwise GaudiExec is used. ''' #from GangaDirac.Lib.Backends import Dirac import os from Ganga.GPI import (Dirac, DiracFile, SplitByFiles, BKQuery, Job, prepareGaudiExec, GaudiExec ) if test: maxFiles = 1 if infiles == None: infiles = getInputList() if backend == None: backend = Dirac() appDir = appPath + '/' + appName + 'Dev_' + appVer appDir = os.path.expanduser(appDir) if os.path.isdir(appDir): app = GaudiExec() app.directory = appDir else: app = prepareGaudiExec(appName, appVer, myPath=appPath) app.options = [script] app.useGaudiRun = gaudirun nfiles = JobInfo[jname]['NFiles'] dpath = JobInfo[jname]['Input'] mySplit = SplitByFiles() mySplit.filesPerJob = nfiles mySplit.maxFiles = maxFiles mySplit.ignoremissing = True j = Job( name = jname + '.' + tag, application = app, splitter = mySplit, inputdata = BKQuery(path=dpath).getDataset(), inputfiles = infiles, outputfiles = [DiracFile(outfile) for outfile in outfiles], do_auto_resubmit = do_auto_resubmit, backend = backend ) j.application.extraArgs += appArgs j.submit()