def testInterfaceLookFeel(self): # Just testing that the job construction works from Ganga.GPI import Job, Im3ShapeApp, Im3ShapeSplitter, DiracFile, LocalFile, GangaDataset, Dirac j=Job() app = Im3ShapeApp( im3_location=DiracFile(lfn='/lsst/y1a1-v2-z/software/2016-02-24/im3shape-grid.tar.gz'), ini_location=LocalFile('/afs/cern.ch/user/r/rcurrie/cmtuser/GANGA/GANGA_LSST/install/ganga/python/params_disc.ini'), blacklist=LocalFile('/afs/cern.ch/user/r/rcurrie/cmtuser/GANGA/GANGA_LSST/install/ganga/python/blacklist-y1.txt') ) j.application = app j.backend=Dirac() mydata=GangaDataset() mydata.append(DiracFile(lfn='/lsst/DES0005+0043-z-meds-y1a1-gamma.fits.fz')) j.inputdata = mydata j.splitter=Im3ShapeSplitter(size=20) j.outputfiles = [DiracFile('*.main.txt'), DiracFile('*.epoch.txt')]
def test_g_Splitters(self): from Ganga.GPI import Job, GenericSplitter, GangaDataset, GangaDatasetSplitter, LocalFile # -- SPLITTERS BASICUSE START j = Job() j.splitter = GenericSplitter() j.splitter.attribute = 'application.args' j.splitter.values = [['hello', 1], ['world', 2], ['again', 3]] j.submit() # -- SPLITTERS BASICUSE STOP # -- SPLITTERS SUBJOBCHECK START j.subjobs j.subjobs(0).peek("stdout") # -- SPLITTERS SUBJOBCHECK STOP # -- SPLITTERS MULTIATTRS START j = Job() j.splitter = GenericSplitter() j.splitter.multi_attrs = { 'application.args': ['hello1', 'hello2'], 'application.env': [{ 'MYENV': 'test1' }, { 'MYENV': 'test2' }] } j.submit() # -- SPLITTERS MULTIATTRS STOP # -- SPLITTERS DATASETSPLITTER START j = Job() j.application.exe = 'more' j.application.args = ['__GangaInputData.txt__'] j.inputdata = GangaDataset(files=[LocalFile('*.txt')]) j.splitter = GangaDatasetSplitter() j.splitter.files_per_subjob = 2 j.submit()
def test_k_Tasks(self): from Ganga.GPI import CoreTask, CoreTransform, Executable, Local, GenericSplitter, LocalFile, GangaDataset, \ GangaDatasetSplitter, TaskChainInput, File, tasks # -- TASKS EXAMPLE START # First create the overall Task t = CoreTask() # Now create the Transform ( -> Job template) trf = CoreTransform() trf.application = Executable() trf.backend = Local() # Set the unit splitter (unique to CoreTransform - you may have better ways of creating units in your own # plugins). This will create a unit based on the splitting of any given splitter # If you put in your own splitter here, use the trf.fields_to_copy string list to tell Tasks which fields of # a Job to preserve from the split. Here, Tasks already knows about GenericSplitter and knows that we want to # change the 'application' object for each Unit/Master Job trf.unit_splitter = GenericSplitter() trf.unit_splitter.attribute = "application.args" trf.unit_splitter.values = ['arg 1', 'arg 2', 'arg 3'] # Append the transform t.appendTransform(trf) # set the maximum number of active jobs to have running (allows for throttling) t.float = 100 # run the Task t.run() # -- TASKS EXAMPLE STOP # -- TASKS OVERVIEW START tasks tasks(0).overview() # -- TASKS OVERVIEW STOP t = CoreTask() trf = CoreTransform() trf.application = Executable() trf.backend = Local() trf.unit_splitter = GenericSplitter() trf.unit_splitter.attribute = "application.args" trf.unit_splitter.values = ['arg 1', 'arg 2', 'arg 3'] t.appendTransform(trf) t.float = 100 # -- TASKS OPTIONS START # note - done at the transform level rather than task level as different backends may not need it trf.max_active_threads = 10 # optional - specifies the max number of submissions to queue up trf.submit_with_threads = True # -- TASKS OPTIONS STOP # -- TASKS JOBCHAIN START # Create a test script open('my_script3.sh', 'w').write("""#!/bin/bash echo $PATH ls -ltr more __GangaInputData.txt__ echo "MY TEST FILE" > output_file.txt sleep 120 """) # Create the parent task t = CoreTask() # Create the first transform trf1 = CoreTransform() trf1.application = Executable() trf1.application.exe = File('my_script3.sh') trf1.outputfiles = [LocalFile("*.txt")] d = GangaDataset() d.files = [LocalFile("*.txt")] d.treat_as_inputfiles = True trf1.addInputData(d) trf1.files_per_unit = 1 trf1.submit_with_threads = True trf1.splitter = GangaDatasetSplitter() trf1.splitter.files_per_subjob = 2 trf1.backend = Local() t.appendTransform(trf1) # Create the second transform trf2 = CoreTransform() trf2.application = Executable() trf1.application.exe = File('my_script3.sh') trf2.submit_with_threads = True d = TaskChainInput() d.input_trf_id = trf1.getID() trf2.addInputData(d) trf2.splitter = GangaDatasetSplitter() trf2.splitter.files_per_subjob = 2 trf2.backend = Local() t.appendTransform(trf2) # Set the Task running t.float = 1 t.run()
def test_f_InputAndOutputData(self): from Ganga.GPI import Job, File, LocalFile, GangaDataset, Local, plugins # -- INPUTANDOUTPUTDATA BASIC START # create a script to send open('my_script2.sh', 'w').write("""#!/bin/bash ls -ltr more "my_input.txt" echo "TESTING" > my_output.txt """) import os os.system('chmod +x my_script2.sh') # create a script to send open('my_input.txt', 'w').write('Input Testing works!') j = Job() j.application.exe = File('my_script2.sh') j.inputfiles = [LocalFile('my_input.txt')] j.outputfiles = [LocalFile('my_output.txt')] j.submit() # -- INPUTANDOUTPUTDATA BASIC STOP # -- INPUTANDOUTPUTDATA PEEKOUTPUT START j.peek() # list output dir contents j.peek('my_output.txt') # -- INPUTANDOUTPUTDATA PEEKOUTPUT STOP # -- INPUTANDOUTPUTDATA FAILJOB START # This job will fail j = Job() j.application.exe = File('my_script2.sh') j.inputfiles = [LocalFile('my_input.txt')] j.outputfiles = [LocalFile('my_output_FAIL.txt')] j.submit() # -- INPUTANDOUTPUTDATA FAILJOB STOP # -- INPUTANDOUTPUTDATA WILDCARD START # This job will pick up both 'my_input.txt' and 'my_output.txt' j = Job() j.application.exe = File('my_script2.sh') j.inputfiles = [LocalFile('my_input.txt')] j.outputfiles = [LocalFile('*.txt')] j.submit() # -- INPUTANDOUTPUTDATA WILDCARD STOP # -- INPUTANDOUTPUTDATA OUTPUTFILES START j.outputfiles # -- INPUTANDOUTPUTDATA OUTPUTFILES STOP # -- INPUTANDOUTPUTDATA INPUTDATA START # Create a test script open('my_script3.sh', 'w').write("""#!/bin/bash echo $PATH ls -ltr more __GangaInputData.txt__ echo "MY TEST FILE" > output_file.txt """) import os os.system('chmod +x my_script3.sh') # Submit a job j = Job() j.application.exe = File('my_script3.sh') j.inputdata = GangaDataset(files=[LocalFile('*.sh')]) j.backend = Local() j.submit() # -- INPUTANDOUTPUTDATA INPUTDATA STOP # -- INPUTANDOUTPUTDATA GANGAFILES START plugins('gangafiles')