def __init__(self, cpu_time=129600): """ Constructor Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__(self) self.setCPUTime(cpu_time) self.setName('Prod4_MC_Generation') self.setType('MCSimulation') self.package = 'corsika_simhessarray' self.program_category = 'airshower_sim' self.prog_name = 'corsika' self.version = '2018-09-19' self.configuration_id = 4 self.output_data_level = DATA_LEVEL_METADATA_ID['MC0'] self.start_run_number = '0' self.run_number = '10' self.n_shower = 100 self.cta_site = 'Paranal' self.particle = 'gamma' self.pointing_dir = 'South' self.zenith_angle = 20. self.output_pattern = 'Data/corsika/run*/*corsika.zst' self.n_output_files = 1 self.output_file_size = 1000 # kb self.base_path = '/vo.cta.in2p3.fr/MC/PROD4/' self.catalogs = json.dumps(['DIRACFileCatalog', 'TSCatalog']) self.metadata = collections.OrderedDict()
def __init__(self, cpuTime=432000): """ Constructor Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__(self) self.setCPUTime(cpuTime) # defaults self.setName('Evndisplay_Analysis') self.package = 'evndisplay' self.program_category = 'analysis' #'calibimgreco' self.version = 'prod3_d20150831b' self.prefix = 'CTA.prod3S' self.layout_list = '3HB1 3HB2 3HB3 3HD1 3HD2 3HI1' self.calibration_file = 'prod3.peds.20150820.dst.root' self.reconstructionparameter = 'EVNDISP.prod3.reconstruction.runparameter.NN' self.NNcleaninginputcard = 'EVNDISP.NNcleaning.dat' self.basepath = '/vo.cta.in2p3.fr/MC/PROD3/' self.outputpattern = './*evndisp.tar.gz' self.fcc = FileCatalogClient() self.metadata = collections.OrderedDict() self.filemetadata = {} self.catalogs = json.dumps(['DIRACFileCatalog', 'TSCatalog']) self.jobGroupID = 1
def execute(self): """ Execute it. """ jobDescription = self.paramDict['Job'] prodID = self.paramDict['TransformationID'] jobID = self.paramDict['TaskID'] inputData = self.paramDict['InputData'] job = Job(jobDescription) commons = job._getParameters() code = job.workflow.createCode() outputList = [] for line in code.split("\n"): if line.count("listoutput"): outputList += eval(line.split("#")[0].split("=")[-1]) commons['outputList'] = outputList commons['PRODUCTION_ID'] = prodID commons['JOB_ID'] = jobID if inputData: commons['InputData'] = inputData gLogger.debug(commons) result = constructProductionLFNs(commons) if not result['OK']: gLogger.error(result['Message']) return result
def __init__(self, cpuTime = 432000): """ Constructor Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__(self) self.setCPUTime(cpuTime) # defaults self.setName('Prod3MC_Generation') self.package='corsika_simhessarray' self.version='2015-07-21' self.nShower=5 self.start_run_number = '0' self.run_number = '10' self.array_layout='full' self.template_tag='6' self.cta_site='Paranal' self.particle='gamma' self.pointing_dir = 'South' self.zenith_angle = 20. self.no_sct=True self.inputpath = 'Data/sim_telarray/cta-prod3/0.0deg' self.basepath = '/vo.cta.in2p3.fr/MC/PROD3/scratch' self.catalogs = json.dumps(['DIRACFileCatalog','TSCatalog'])
def __init__(self, cpuTime=432000): """ Constructor Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__(self) self.setCPUTime(cpuTime) # defaults self.setName('Evndisplay_CalibReco') self.package = 'evndisplay' self.program_category = 'calibimgreco' self.version = 'prod4_d20181110' self.configuration_id = 4 self.output_data_level = DATA_LEVEL_METADATA_ID['DL1'] self.N_output_files = 1 self.prefix = 'CTA.prod4S' self.layout = '3HB9-SST' self.calibration_file = 'prod4b-SST-IPR.root' self.reconstructionparameter = 'EVNDISP.prod4.reconstruction.runparameter.NN.noLL' self.base_path = '/vo.cta.in2p3.fr/MC/PROD4/' self.fcc = FileCatalogClient() self.metadata = collections.OrderedDict() self.filemetadata = {} self.catalogs = json.dumps(['DIRACFileCatalog', 'TSCatalog']) self.ts_task_id = 0
def __init__(self, cpuTime=432000): """ Constructor Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__(self) self.setCPUTime(cpuTime) # defaults self.setName('Simtel') self.package = 'corsika_simhessarray' self.program_category = 'tel_sim' self.version = '2018-06-12' self.configuration_id = 4 self.output_data_level = 0 self.base_path = '/vo.cta.in2p3.fr/user/c/ciro.bigongiari/Miniarray15/Simtel' self.fcc = FileCatalogClient() self.metadata = collections.OrderedDict() self.filemetadata = {} self.catalogs = json.dumps(['DIRACFileCatalog', 'TSCatalog']) self.ts_task_id = 1000 self.simtel_config_file = 'ASTRI_MiniArray15_Paranal_ACDC_2018_06_12.cfg' self.thetaP = 20.0 self.phiP = 0.0 self.particle = 'Proton' self.output_path = os.path.join(self.base_path, self.particle, str(self.phiP)) self.se_list = json.dumps(['FRASCATI-USER', 'CNAF-USER'])
def __init__( self, cpuTime = 259200 ): """ Constructor Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__( self ) self.setCPUTime( cpuTime ) # defaults self.setName('Prod3MC_Generation') self.package='corsika_simhessarray' self.program_category='tel_sim' self.version='2017-04-19' self.configuration_id = 0 self.output_data_level=0 self.nShower=100 self.start_run_number = '0' self.run_number = '10' self.cta_site='Paranal' self.cta_site_tag='' self.array_layout = 'Baseline' self.particle='gamma' self.pointing_dir = 'South' self.zenith_angle = 20. self.outputquery = {'MCCampaign':'PROD3','outputType':{'in': ['Data', 'Log']}} #self.inputpath = 'Data/sim_telarray/cta-prod3-%s/0.0deg'%self.array_layout.lower() self.inputpath = 'Data/sim_telarray/cta-prod3-demo/0.0deg' self.N_output_files = 1 self.basepath = '/vo.cta.in2p3.fr/MC/PROD3/' self.catalogs = json.dumps(['DIRACFileCatalog','TSCatalog2'])
def __init__(self, cpuTime=432000): """ Constructor Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__(self) self.setCPUTime(cpuTime) # defaults self.setName('Evndisplay_CalibReco') self.package = 'evndisplay' self.program_category = 'calibimgreco' self.version = 'prod3b_d20180201' self.configuration_id = -1 self.output_data_level = 1 self.prefix = 'CTA.prod3Sb' self.layout_list = '3HB9-FD 3HB9-FG 3HB9-FA 3HB9-ND 3HB9-NG 3HB9-NA \ 3HB9-TS-BB-FD 3HB9-TS-BB-FG 3HB9-TS-BB-FA \ 3HB9-TS-BB-ND 3HB9-TS-BB-NG 3HB9-TS-BB-NA' self.focal_file = 'CTA.prod3b.EffectiveFocalLength.dat' self.calibration_file = 'prod3b.Paranal-20171214.ped.root' self.reconstructionparameter = 'EVNDISP.prod3.reconstruction.runparameter.NN.noLL' self.NNcleaninginputcard = 'EVNDISP.NNcleaning.dat' self.basepath = '/vo.cta.in2p3.fr/MC/PROD3/' self.fcc = FileCatalogClient() self.metadata = collections.OrderedDict() self.filemetadata = {} self.catalogs = json.dumps(['DIRACFileCatalog', 'TSCatalog']) self.ts_task_id = 0
def execute(self): """ Execute it. """ jobDescription = self.paramDict['Job'] prodID = self.paramDict['TransformationID'] jobID = self.paramDict['TaskID'] inputData = self.paramDict['InputData'] job = Job(jobDescription) commons = job._getParameters() #pylint: disable=protected-access code = job.workflow.createCode() outputList = [] for line in code.split("\n"): if line.count("listoutput"): outputList += eval(line.split("#")[0].split("=")[-1]) #pylint: disable=eval-used commons['outputList'] = outputList commons['PRODUCTION_ID'] = prodID commons['JOB_ID'] = jobID if inputData: commons['InputData'] = inputData result = constructProductionLFNs(commons) if not result['OK']: LOG.error(result['Message']) return result if commons['JobType'] in ILDJOBTYPES and commons['InputData']: for index, outputFile in enumerate( result['Value']['ProductionOutputData'] ): outputFileILD = getProdFilenameFromInput( commons['InputData'], outputFile, prodID, jobID ) result['Value']['ProductionOutputData'][index] = outputFileILD LOG.debug("Changed output file name from '%s' to '%s' " % (outputFile, outputFileILD)) return result
def __init__(self, cpu_time=259200): """ Constructor takes almosst everything from base class Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__(self) self.setCPUTime(cpu_time) self.setName('Prod5MC_Generation') self.setType('MCSimulation') self.package = 'corsika_simtelarray' self.version = '2020-06-29b' self.compiler = 'gcc83_matchcpu' self.program_category = 'tel_sim' self.prog_name = 'sim_telarray' self.configuration_id = 8 self.output_data_level = DATA_LEVEL_METADATA_ID['DL0'] self.n_shower = 100 self.start_run_number = '0' self.run_number = '10' self.cta_site = 'Paranal' self.particle = 'gamma' self.pointing_dir = 'South' self.zenith_angle = 20. self.no_sct = True self.output_pattern = './Data/sim_telarray/cta-prod5-%s/0.0deg/Data/*.zst'\ % self.cta_site.lower() self.base_path = '/vo.cta.in2p3.fr/MC/PROD5/' self.catalogs = json.dumps(['DIRACFileCatalog', 'TSCatalog']) self.n_output_files = 2 self.output_file_size = 1000 # kb self.metadata = collections.OrderedDict()
def job(): from DIRAC.Interfaces.API.Job import Job job = Job(stdout='printer', stderr='/dev/null') job.setInputSandbox(['LFN:/vo/user/i/initial/important.tar.gz', '/abspath/absfile.xml', 'file_in_pwd.xml']) return job
def __init__(self, cpuTime=259200): """ Constructor Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__(self) self.setCPUTime(cpuTime) # defaults self.setName('Prod3MC_Generation') self.package = 'corsika_simhessarray' self.program_category = 'tel_sim' self.version = '2017-04-19-div' self.configuration_id = 3 self.output_data_level = 0 self.nShower = 100 self.start_run_number = '0' self.run_number = '10' self.cta_site = 'Paranal' self.cta_site_tag = '' self.array_layout = 'Baseline' self.particle = 'gamma' self.pointing_dir = 'South' self.zenith_angle = 20. self.div_cfg_id = 0 self.inputpath = 'Data/sim_telarray/cta-prod3-demo/0.0deg' self.N_output_files = 1 self.basepath = '/vo.cta.in2p3.fr/MC/PROD3/' self.catalogs = json.dumps(['DIRACFileCatalog', 'TSCatalog']) self.metadata = collections.OrderedDict() self.metadata_field = dict() self.filemetadata = dict()
def __init__(self, script, parameters=None, softwarePackage='HESS/v0.1/root', compiled=False, cpuTime=3600): Job.__init__(self) self.workflow = Workflow() self.executable = '$DIRACROOT/scripts/cta-root-macro' self.setName(os.path.basename(script)) self.script = os.path.basename(script) self.setInputSandbox([script]) self.setCPUTime(cpuTime) arguments = [] toCompile = '' if compiled: toCompile = '+' if parameters: arguments = [ repr(k).replace('"', "\\\\'").replace("'", "\\\\'") for k in parameters ] argumentStr = "%s%s %s" % (os.path.basename(script), toCompile, ' '.join(arguments)) self.setConfigArgs(argumentStr) self.__addSoftwarePackage(softwarePackage)
def __init__(self, cpuTime=36000): """ Constructor Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__(self) self.setCPUTime(cpuTime) # defaults self.setName('dl1 data handler reduction') self.package = 'dl1_data_handler' self.program_category = 'calibimgreco' self.version = 'v0.7.4' self.configuration_id = 1 self.output_data_level = DATA_LEVEL_METADATA_ID['DL1'] self.N_output_files = 1 self.prefix = 'CTA.prod3Sb' self.layout = 'Baseline' self.base_path = '/vo.cta.in2p3.fr/MC/PROD3/' self.fcc = FileCatalogClient() self.metadata = collections.OrderedDict() self.catalogs = json.dumps(['DIRACFileCatalog', 'TSCatalog']) self.ts_task_id = 0 self.split_md = 'test' self.config_file_name = 'grid_config_train_02052019.yml'
def __init__(self, cpuTime=432000): """ Constructor Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__(self) self.setCPUTime(cpuTime) # defaults self.setName('Evndisplay_CalibReco') self.package='evndisplay' self.version = 'prod5_d20200702' self.compiler='gcc48_default' self.program_category = 'calibimgreco' self.prog_name = 'evndisp' self.configuration_id = 7 self.output_data_level = 1 self.prefix = 'CTA.prod5S' self.layout_list = 'BL-0LSTs15MSTs50SSTs-MSTF \ BL-0LSTs15MSTs50SSTs-MSTN \ BL-4LSTs25MSTs70SSTs-MSTF \ BL-4LSTs25MSTs70SSTs-MSTN' self.calibration_file = 'prod5/prod5-IPR.root' self.reconstructionparameter = 'EVNDISP.prod5.reconstruction.runparameter' self.base_path = '/vo.cta.in2p3.fr/MC/PROD5/' self.metadata = collections.OrderedDict() self.file_meta_data = dict() self.catalogs = json.dumps(['DIRACFileCatalog', 'TSCatalog']) self.ts_task_id = 0
def __init__(self, cpuTime=432000): """ Constructor Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__(self) self.setCPUTime(cpuTime) # defaults self.setName('Evndisplay_CalibReco') self.package = 'evndisplay' self.program_category = 'calibimgreco' self.version = 'prod3b_d20170602' # or later self.configuration_id = 0 self.output_data_level = 1 self.prefix = 'CTA.prod3Nb' self.layout = 'Baseline' self.calibration_file = 'gamma_20deg_180deg_run3___cta-prod3-lapalma3-2147m-LaPalma.ped.root' self.reconstructionparameter = 'EVNDISP.prod3.reconstruction.runparameter.NN' self.NNcleaninginputcard = 'EVNDISP.NNcleaning.dat' self.basepath = '/vo.cta.in2p3.fr/MC/PROD3/' self.fcc = FileCatalogClient() self.metadata = collections.OrderedDict() self.filemetadata = {} self.catalogs = json.dumps(['DIRACFileCatalog', 'TSCatalog2'])
def setUp(self): super(RegressionTestCase, self).setUp() gLogger.setLevel('DEBUG') self.dirac = Dirac() try: exeScriptLoc = find_all('exe-script.py', rootPath, '/DIRAC/tests/Workflow/Regression')[0] helloWorldLoc = find_all('helloWorld.py', rootPath, '/DIRAC/tests/Workflow/Regression')[0] except IndexError: # we are in Jenkins exeScriptLoc = find_all('exe-script.py', os.environ['WORKSPACE'], '/DIRAC/tests/Workflow/Regression')[0] helloWorldLoc = find_all('helloWorld.py', os.environ['WORKSPACE'], '/DIRAC/tests/Workflow/Regression')[0] shutil.copyfile(exeScriptLoc, './exe-script.py') shutil.copyfile(helloWorldLoc, './helloWorld.py') try: helloWorldXMLLocation = find_all('helloWorld.xml', rootPath, '/DIRAC/tests/Workflow/Regression')[0] except IndexError: # we are in Jenkins helloWorldXMLLocation = find_all('helloWorld.xml', os.environ['WORKSPACE'], '/DIRAC/tests/Workflow/Regression')[0] self.j_u_hello = Job(helloWorldXMLLocation) try: helloWorldXMLFewMoreLocation = find_all('helloWorld.xml', rootPath, '/DIRAC/tests/Workflow/Regression')[0] except IndexError: # we are in Jenkins helloWorldXMLFewMoreLocation = find_all( 'helloWorld.xml', os.environ['WORKSPACE'], '/DIRAC/tests/Workflow/Regression')[0] self.j_u_helloPlus = Job(helloWorldXMLFewMoreLocation)
def __init__(self, cpuTime=432000): """ Constructor Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__(self) self.setCPUTime(cpuTime) # defaults self.setName('Evndisplay_Reco') self.package = 'evndisplay' self.program_category = 'reconstruction' self.version = 'prod3b_d20170602' # or later self.configuration_id = 0 self.output_data_level = 2 self.prefix = 'CTA.prod3Nb' self.layout = 'Baseline' self.pointing = '180' self.table_file = 'tables_CTA-prod3b-LaPalma-NNq05-NN-ID0_0deg-d20160925m4-Nb.3AL4-BN15.root' self.disp_subdir_name = 'BDTdisp.Nb.3AL4-BN15.T1' self.recid = '0,1,2' # 0 = all teltescopes, 1 = LST only, 2 = MST only self.basepath = '/vo.cta.in2p3.fr/MC/PROD3/' self.fcc = FileCatalogClient() self.metadata = collections.OrderedDict() self.filemetadata = {} self.catalogs = json.dumps(['DIRACFileCatalog', 'TSCatalog'])
def __init__( self, cpuTime = 432000 ): """ Constructor Keyword arguments: cpuTime -- max cpu time allowed for the job """ Job.__init__( self ) self.setCPUTime( cpuTime ) # defaults self.setName( 'Mars_Analysis' ) self.package = 'chimp' self.version = 'prod3_xxx' self.PixelRequiredPhes = '-100.' self.outdir = './' self.MuonMode = '0' self.StarOutput = '-staroutput' # self.training_type = 'point-like' self.training_type = 'diffuse' #self.training_type = 'point-like diffuse' self.basepath = '/vo.cta.in2p3.fr/MC/PROD3/' #self.outputpattern = './stereo_All/*.root' # not used self.outputpattern = './data/*.tar.gz' # used for ctastereo #self.outputpattern = './*_I.root' # used for chimp self.fcc = FileCatalogClient() self.metadata = collections.OrderedDict() self.filemetadata = {} self.jobGroupID = 1
def __init__(self, cpuTime=3600): Job.__init__(self) self.workflow = Workflow() self.executable = '$DIRACROOT/scripts/cta-corsikasimteluser' self.setCPUTime(cpuTime) global argumentStr
def __init__(self, cpuTime=3600): Job.__init__(self) self.workflow = Workflow() self.executable = '$DIRACROOT/scripts/cta-read-cta' # self.executable = '$DIRACROOT/CTADIRAC/Core/scripts/cta-read_cta.py' self.setCPUTime(cpuTime) global argumentStr
def job(): from DIRAC.Interfaces.API.Job import Job job = Job(stdout="printer", stderr="/dev/null") job.setInputSandbox([ "LFN:/vo/user/i/initial/important.tar.gz", "/abspath/absfile.xml", "file_in_pwd.xml" ]) return job
def __init__(self, parameters=None, cpuTime=3600): Job.__init__(self) self.workflow = Workflow() self.executable = '$DIRACROOT/scripts/cta-hap-wf-param' self.setCPUTime(cpuTime) argumentStr = "%s" % (' '.join(parameters)) self.setConfigArgs(argumentStr)
def __init__(self, cpuTime=3600): Job.__init__(self) self.workflow = Workflow() self.executable = '$DIRACROOT/scripts/cta-evndispstandalone' self.setCPUTime(cpuTime) global parfileList parfileList = [] global argumentStr
def __init__(self, script, parameters=None, cpuTime=3600): Job.__init__(self) self.workflow = Workflow() self.executable = '$DIRACROOT/scripts/cta-hap-application' self.setName(os.path.basename(script)) self.setCPUTime(cpuTime) global argumentStr argumentStr = "%s" % (' '.join(parameters)) self.setConfigArgs(argumentStr)
def test_execute( self ): """ this one tests that I can execute a job that requires multi-processing """ j = Job() j.setName( "MP-test" ) j.setExecutable( self.mpExe ) j.setInputSandbox( find_all( 'mpTest.py', '.', 'Utilities' )[0] ) j.setTag( 'MultiProcessor' ) res = j.runLocal( self.d ) self.assertTrue( res['OK'] )
def test_execute(self): j = Job() j.setName("helloWorld-test") j.setExecutable(self.exeScriptLocation) j.setLogLevel('DEBUG') res = j.runLocal(self.d) self.assertTrue(res['OK'])
class RegressionTestCase(IntegrationTest): """Base class for the Regression test cases""" def setUp(self): super(RegressionTestCase, self).setUp() gLogger.setLevel("DEBUG") self.dirac = Dirac() try: exeScriptLoc = find_all("exe-script.py", rootPath, "/DIRAC/tests/Workflow")[0] helloWorldLoc = find_all("helloWorld.py", rootPath, "/DIRAC/tests/Workflow")[0] except IndexError: # we are in Jenkins exeScriptLoc = find_all("exe-script.py", os.environ["WORKSPACE"], "/DIRAC/tests/Workflow")[0] helloWorldLoc = find_all("helloWorld.py", os.environ["WORKSPACE"], "/DIRAC/tests/Workflow")[0] shutil.copyfile(exeScriptLoc, "./exe-script.py") shutil.copyfile(helloWorldLoc, "./helloWorld.py") try: helloWorldXMLLocation = find_all( "helloWorld.xml", rootPath, "/DIRAC/tests/Workflow/Regression")[0] except IndexError: # we are in Jenkins helloWorldXMLLocation = find_all( "helloWorld.xml", os.environ["WORKSPACE"], "/DIRAC/tests/Workflow/Regression")[0] self.j_u_hello = Job(helloWorldXMLLocation) self.j_u_hello.setConfigArgs("pilot.cfg") try: helloWorldXMLFewMoreLocation = find_all( "helloWorld.xml", rootPath, "/DIRAC/tests/Workflow/Regression")[0] except IndexError: # we are in Jenkins helloWorldXMLFewMoreLocation = find_all( "helloWorld.xml", os.environ["WORKSPACE"], "/DIRAC/tests/Workflow/Regression")[0] self.j_u_helloPlus = Job(helloWorldXMLFewMoreLocation) self.j_u_helloPlus.setConfigArgs("pilot.cfg") def tearDown(self): try: os.remove("exe-script.py") os.remove("helloWorld.py") except OSError: pass
def __init__(self, script = None): DiracJob.__init__(self, script) self.log = gLogger.getSubLogger("ILCJob") self.applicationlist = [] self.inputsandbox = [] self.outputsandbox = [] self.check = True self.systemConfig = '' self.stepnumber = 0 self.steps = [] self.nbevts = 0 self.energy = 0 self.setSystemConfig('x86_64-slc5-gcc43-opt')
def helloWorldJob(): job = Job() job.setName("helloWorld") exeScriptLocation = find_all('exe-script.py', '..', '/DIRAC/tests/Integration')[0] job.setInputSandbox(exeScriptLocation) job.setExecutable(exeScriptLocation, "", "helloWorld.log") return job
def helloWorldJob(): job = Job() job.setName( "helloWorld" ) exeScriptLocation = find_all( 'exe-script.py', '.', 'WorkloadManagementSystem' )[0] job.setInputSandbox( exeScriptLocation ) job.setExecutable( exeScriptLocation, "", "helloWorld.log" ) return job
def test_execute(self): """ just testing unix "ls" """ job = Job() job.setName("ls-test") job.setExecutable("/bin/ls", '-l') job.setLogLevel('DEBUG') res = job.runLocal(self.d) self.assertTrue(res['OK'])
def run_test_job(args): simtel_files = load_files_from_list(args[0]) #simtel_files = ["/vo.cta.in2p3.fr/MC/PROD3/LaPalma/proton/simtel/1260/Data/071xxx/proton_40deg_180deg_run71001___cta-prod3-lapalma3-2147m-LaPalma.simtel.gz", #"/vo.cta.in2p3.fr/MC/PROD3/LaPalma/proton/simtel/1260/Data/070xxx/proton_40deg_180deg_run70502___cta-prod3-lapalma3-2147m-LaPalma.simtel.gz"] dirac = Dirac() j = Job() j.setCPUTime(500) j.setInputData(simtel_files[0]) j.setExecutable('echo', 'Hello World!') j.setName('Hello World') res = dirac.submit(j) print('Submission Result: {}'.format(res)) return res
def setUp( self ): super( IntegrationTest, self ).setUp() self.dirac = Dirac() exeScriptLoc = find_all( 'exe-script.py', '.', 'Regression' )[0] helloWorldLoc = find_all( 'helloWorld.py', '.', 'Regression' )[0] shutil.copyfile( exeScriptLoc, './exe-script.py' ) shutil.copyfile( helloWorldLoc, './helloWorld.py' ) helloWorldXMLLocation = find_all( 'helloWorld.xml', '.', 'Regression' )[0] self.j_u_hello = Job( helloWorldXMLLocation ) helloWorldXMLFewMoreLocation = find_all( 'helloWorld.xml', '.', 'Regression' )[0] self.j_u_helloPlus = Job( helloWorldXMLFewMoreLocation )
def test_SimpleParametricJob(): job = Job() job.setExecutable('myExec') job.setLogLevel('DEBUG') parList = [1, 2, 3] job.setParameterSequence('JOB_ID', parList, addToWorkflow=True) inputDataList = [ [ '/lhcb/data/data1', '/lhcb/data/data2' ], [ '/lhcb/data/data3', '/lhcb/data/data4' ], [ '/lhcb/data/data5', '/lhcb/data/data6' ] ] job.setParameterSequence('InputData', inputDataList, addToWorkflow=True) jdl = job._toJDL() try: with open('./DIRAC/Interfaces/API/test/testWF.jdl') as fd: expected = fd.read() except IOError: with open('./Interfaces/API/test/testWF.jdl') as fd: expected = fd.read() assert jdl == expected clad = ClassAd('[' + jdl + ']') arguments = clad.getAttributeString('Arguments') job_id = clad.getAttributeString('JOB_ID') inputData = clad.getAttributeString('InputData') assert job_id == '%(JOB_ID)s' assert inputData == '%(InputData)s' assert 'jobDescription.xml' in arguments assert '-o LogLevel=DEBUG' in arguments assert'-p JOB_ID=%(JOB_ID)s' in arguments assert'-p InputData=%(InputData)s' in arguments
def test_SimpleParametricJob( self ): job = Job() job.setExecutable( 'myExec' ) job.setLogLevel( 'DEBUG' ) parList = [1,2,3] job.setParameterSequence( 'JOB_ID', parList, addToWorkflow=True ) inputDataList = [ [ '/lhcb/data/data1', '/lhcb/data/data2' ], [ '/lhcb/data/data3', '/lhcb/data/data4' ], [ '/lhcb/data/data5', '/lhcb/data/data6' ] ] job.setParameterSequence( 'InputData', inputDataList, addToWorkflow=True ) jdl = job._toJDL() print jdl clad = ClassAd( '[' + jdl + ']' ) arguments = clad.getAttributeString( 'Arguments' ) job_id = clad.getAttributeString( 'JOB_ID' ) inputData = clad.getAttributeString( 'InputData' ) print "arguments", arguments self.assertEqual( job_id, '%(JOB_ID)s' ) self.assertEqual( inputData, '%(InputData)s' ) self.assertIn( 'jobDescription.xml', arguments ) self.assertIn( '-o LogLevel=DEBUG', arguments ) self.assertIn( '-p JOB_ID=%(JOB_ID)s', arguments ) self.assertIn( '-p InputData=%(InputData)s', arguments )
# if file already in GRID storage, skip # (you cannot overwrite it there, delete it and resubmit) # (assumes tail and wave will always be written out together) if '/'.join([output_path.strip('/'), output_filename_wave]) in GRID_filelist: print("\n{} already on GRID SE\n".format(job_name)) continue if NJobs == 0: print("maximum number of jobs to submit reached") print("breaking loop now") break else: NJobs -= 1 j = Job() # runtime in seconds times 8 (CPU normalisation factor) j.setCPUTime(6 * 3600 * 8) j.setName(job_name) j.setInputSandbox(input_sandbox) if banned_sites: j.setBannedSites(banned_sites) # j.setDestination( 'LCG.IN2P3-CC.fr' ) # mr_filter loses its executable property by uploading it to the GRID SE; reset j.setExecutable('chmod', '+x mr_filter') j.setExecutable('ls -lah') for run_file in run_filelist:
"n_non_gamma" : f.getNumberOfNonGammas(), # "ismc" : int(f.isMC()) } file_dict["%s.txt" % (fn)] = metadata # Update the user. print("*") print("* Uploading the following files:") for fn in file_dict.keys(): print("*-> '%s'" % (fn)) print("*") ## The DIRAC job to submit. j = Job(stdout='StdOut', stderr='StdErr') # Set the name of the job (viewable in the web portal). j.setName(jobname) # As we're just copying the input sandbox to the storage element # via OutputData, we'll just list the files as a check for the # output written to StdOut. j.setExecutable('/bin/ls -l') # Here we add the names of the temporary copies of the frame data # files in the dataset to the input sandbox. These will be uploaded # to the grid with the job... j.setInputSandbox(file_dict.keys()) #...and added to the desired storage element with the corresponding
args = Script.getUnprocessedSwitches() if len(args) != 1: Script.showHelp() directory = args[0] UseFilter = None for switch, switchValue in Script.getUnprocessedSwitches(): if switch == "UseFilter": UseFilter = True if switchValue.lower() == "true" else False # Let's first create the prodJobuction prodJobType = 'Merge' transName = 'testProduction_' + str(int(time.time())) desc = 'just test' prodJob = Job() prodJob._addParameter(prodJob.workflow, 'PRODUCTION_ID', 'string', '00012345', 'ProductionID') prodJob._addParameter(prodJob.workflow, 'JOB_ID', 'string', '00006789', 'ProductionJobID') prodJob._addParameter(prodJob.workflow, 'eventType', 'string', 'TestEventType', 'Event Type of the prodJobuction') prodJob._addParameter(prodJob.workflow, 'numberOfEvents', 'string', '-1', 'Number of events requested') prodJob._addParameter(prodJob.workflow, 'ProcessingType', 'JDL', str('Test'), 'ProductionGroupOrType') prodJob._addParameter(prodJob.workflow, 'Priority', 'JDL', str(9), 'UserPriority') prodJob.setType(prodJobType) prodJob.workflow.setName(transName) prodJob.workflow.setDescrShort(desc) prodJob.workflow.setDescription(desc) prodJob.setCPUTime(86400) prodJob.setInputDataPolicy('Download') prodJob.setExecutable('/bin/ls', '-l') # Let's submit the prodJobuction now
def helloWorldJob(): job = Job() job.setName( "helloWorld" ) job.setInputSandbox( '../../Integration/exe-script.py' ) job.setExecutable( "exe-script.py", "", "helloWorld.log" ) return job
def submitProbeJobs(self, ce): """ Submit some jobs to the CEs """ #need credentials, should be there since the initialize from DIRAC.Interfaces.API.Dirac import Dirac d = Dirac() from DIRAC.Interfaces.API.Job import Job from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations import DIRAC ops = Operations() scriptname = ops.getValue("ResourceStatus/SofwareManagementScript", self.script) j = Job() j.setDestinationCE(ce) j.setCPUTime(1000) j.setName("Probe %s" % ce) j.setJobGroup("SoftwareProbe") j.setExecutable("%s/GlastDIRAC/ResourceStatusSystem/Client/%s" % (DIRAC.rootPath, scriptname), logFile='SoftwareProbe.log') j.setOutputSandbox('*.log') res = d.submit(j) if not res['OK']: return res return S_OK()
from DIRAC.Core.Base import Script Script.parseCommandLine() from DIRAC.Interfaces.API.Dirac import Dirac from DIRAC.Interfaces.API.Job import Job dirac = Dirac() j = Job() j.setName("compile_mrfilter") j.setCPUTime(80) j.setInputSandbox(["dirac_compile_mrfilter_pilot.sh"]) j.setExecutable("dirac_compile_mrfilter_pilot.sh", "") j.setOutputData(["mr_filter"], outputSE=None, outputPath="cta/bin/mr_filter/v3_1/") Dirac().submit(j)
def submit(self, param): j = Job() j.setName(param['jobName']) j.setExecutable(param['jobScript'],logFile = param['jobScriptLog']) if self.site: j.setDestination(self.site) if self.jobGroup: j.setJobGroup(self.jobGroup) j.setInputSandbox(param['inputSandbox']) j.setOutputSandbox(param['outputSandbox']) j.setOutputData(param['outputData'], outputSE = self.outputSE, outputPath = self.outputPath) dirac = GridDirac() result = dirac.submit(j) status = {} status['submit'] = result['OK'] if status['submit']: status['job_id'] = result['Value'] return status
def parametricJob(): job = Job() job.setName("parametric_helloWorld_%n") exeScriptLocation = find_all('exe-script.py', '..', '/DIRAC/tests/Integration')[0] job.setInputSandbox(exeScriptLocation) job.setParameterSequence("args", ['one', 'two', 'three']) job.setParameterSequence("iargs", [1, 2, 3]) job.setExecutable(exeScriptLocation, arguments=": testing %(args)s %(iargs)s", logFile='helloWorld_%n.log') return job
print "Usage %s <scriptName> <jobName> <nbJobs>"%sys.argv[0] sys.exit(1) scriptName = sys.argv[1] jobName = sys.argv[2] nbJobs = int(sys.argv[3]) if not os.path.exists(jobName): os.makedirs(jobName) os.makedirs("%s/Done"%jobName) os.makedirs("%s/Failed"%jobName) else: print "Folder %s exists"%jobName sys.exit(1) f = open("%s/jobIdList.txt"%jobName, 'w') for i in xrange(nbJobs): j = Job() j.setCPUTime(10000) j.setExecutable(scriptName) j.addToOutputSandbox.append('myLog.txt') j.addToOutputSandbox.append('clock.txt') j.addToOutputSandbox.append('time.txt') dirac = Dirac() jobID = dirac.submit(j) realId = jobID.get('JobID') f.write("%s\n"%realId) f.close()
def submitJob(jobPara): dirac = Dirac() j = Job() j.setName(jobPara['jobName']) j.setJobGroup(jobPara['jobGroup']) j.setExecutable(jobPara['jobScript'], logFile = jobPara['jobScriptLog']) j.setInputSandbox(jobPara['inputSandbox']) j.setOutputSandbox(jobPara['outputSandbox']) j.setOutputData(jobPara['outputData'], jobPara['SE']) j.setDestination(jobPara['sites']) j.setCPUTime(jobPara['CPUTime']) result = dirac.submit(j) if result['OK']: print 'Job %s submitted successfully. ID = %d' %(jobPara['jobName'],result['Value']) else: print 'Job %s submitted failed' %jobPara['jobName'] return result
def test_execute( self ): job = Job() job.setName( "helloWorld-test" ) job.setExecutable( find_all( "helloWorld.py", '.', 'Integration' )[0], arguments = "This is an argument", logFile = "aLogFileForTest.txt" , parameters=[('executable', 'string', '', "Executable Script"), ('arguments', 'string', '', 'Arguments for executable Script'), ( 'applicationLog', 'string', '', "Log file name" ), ( 'someCustomOne', 'string', '', "boh" )], paramValues = [( 'someCustomOne', 'aCustomValue' )] ) job.setBannedSites( ['LCG.SiteA.com', 'DIRAC.SiteB.org'] ) job.setOwner( 'ownerName' ) job.setOwnerGroup( 'ownerGroup' ) job.setName( 'jobName' ) job.setJobGroup( 'jobGroup' ) job.setType( 'jobType' ) job.setDestination( 'DIRAC.someSite.ch' ) job.setCPUTime( 12345 ) job.setLogLevel( 'DEBUG' ) res = job.runLocal( self.d ) self.assertTrue( res['OK'] )
""" simple hello world job """ from DIRAC.Interfaces.API.Job import Job from DIRAC.Interfaces.API.Dirac import Dirac from DIRAC.DataManagementSystem.Utilities.DMSHelpers import DMSHelpers j = Job() j.setName( "helloWorld-test" ) j.setExecutable( "exe-script.py", "", "Executable.log" ) # <-- user settings j.setCPUTime( 172800 ) tier1s = DMSHelpers().getTiers( tier = ( 0, 1 ) ) j.setBannedSites( tier1s ) # user settings --> # print j.workflow # submit the job to dirac result = Dirac().submitJob(j) print result
def _submitJob(self, result_id, executable, test_name, site_name): executable = executable.split('&') j = Job() j.setExecutable('python', arguments=executable[0] + " " + str(result_id)) sandBox = [] for file_name in executable: sandBox.append(SAM_TEST_DIR + file_name) j.setInputSandbox(sandBox) j.setName(test_name) j.setJobGroup('sam_test') j.setDestination(site_name) result = self.dirac.submit(j) return result
def __submit( self, site, CE, vo ): """ set the job and submit. """ job = Job() job.setName( self.testType ) job.setJobGroup( 'CE-Test' ) job.setExecutable( self.executable ) job.setInputSandbox( '%s/%s' % ( self.__scriptPath, self.executable ) ) if site and not CE: job.setDestination( site ) if CE: job.setDestinationCE( CE ) LOCK.acquire() proxyPath = BESUtils.getProxyByVO( 'zhangxm', vo ) if not proxyPath[ 'OK' ]: LOCK.release() return proxyPath proxyPath = proxyPath[ 'Value' ] oldProxy = os.environ.get( 'X509_USER_PROXY' ) os.environ[ 'X509_USER_PROXY' ] = proxyPath result = self.dirac.submit( job ) if oldProxy is None: del os.environ[ 'X509_USER_PROXY' ] else: os.environ[ 'X509_USER_PROXY' ] = oldProxy LOCK.release() return result
def test_basicJob(): job = Job() job.setOwner('ownerName') job.setOwnerGroup('ownerGroup') job.setName('jobName') job.setJobGroup('jobGroup') job.setExecutable('someExe') job.setType('jobType') job.setDestination('ANY') xml = job._toXML() try: with open('./DIRAC/Interfaces/API/test/testWF.xml') as fd: expected = fd.read() except IOError: with open('./Interfaces/API/test/testWF.xml') as fd: expected = fd.read() assert xml == expected try: with open('./DIRAC/Interfaces/API/test/testWFSIO.jdl') as fd: expected = fd.read() except IOError: with open('./Interfaces/API/test/testWFSIO.jdl') as fd: expected = fd.read() jdlSIO = job._toJDL(jobDescriptionObject=StringIO.StringIO(job._toXML())) assert jdlSIO == expected
# dirac job created by ganga from DIRAC.Interfaces.API.Job import Job from DIRAC.Interfaces.API.Dirac import Dirac j = Job() dirac = Dirac() # default commands added by ganga j.setName( "helloWorld-test" ) j.setInputSandbox( ['/afs/cern.ch/user/f/fstagni/userJobs/_inputHello.tar.bz2', '/afs/cern.ch/user/f/fstagni/userJobs/hello-script.py'] ) j.setExecutable( "exe-script.py", "", "Ganga_Executable.log" ) # <-- user settings j.setCPUTime( 172800 ) tier1s = DMSHelpers().getTiers( tier = ( 0, 1 ) ) j.setBannedSites( tier1s ) # user settings --> # print j.workflow # submit the job to dirac result = dirac.submit( j ) print result
def prepareTransformationTasks(self,transBody,taskDict,owner='',ownerGroup=''): if (not owner) or (not ownerGroup): res = getProxyInfo(False,False) if not res['OK']: return res proxyInfo = res['Value'] owner = proxyInfo['username'] ownerGroup = proxyInfo['group'] oJob = Job(transBody) for taskNumber in sortList(taskDict.keys()): paramsDict = taskDict[taskNumber] transID = paramsDict['TransformationID'] self.log.verbose('Setting job owner:group to %s:%s' % (owner,ownerGroup)) oJob.setOwner(owner) oJob.setOwnerGroup(ownerGroup) transGroup = str(transID).zfill(8) self.log.verbose('Adding default transformation group of %s' % (transGroup)) oJob.setJobGroup(transGroup) constructedName = str(transID).zfill(8)+'_'+str(taskNumber).zfill(8) self.log.verbose('Setting task name to %s' % constructedName) oJob.setName(constructedName) oJob._setParamValue('PRODUCTION_ID',str(transID).zfill(8)) oJob._setParamValue('JOB_ID',str(taskNumber).zfill(8)) inputData = None for paramName,paramValue in paramsDict.items(): self.log.verbose('TransID: %s, TaskID: %s, ParamName: %s, ParamValue: %s' %(transID,taskNumber,paramName,paramValue)) if paramName=='InputData': if paramValue: self.log.verbose('Setting input data to %s' %paramValue) oJob.setInputData(paramValue) elif paramName=='Site': if paramValue: self.log.verbose('Setting allocated site to: %s' %(paramValue)) oJob.setDestination(paramValue) elif paramValue: self.log.verbose('Setting %s to %s' % (paramName,paramValue)) oJob._addJDLParameter(paramName,paramValue) hospitalTrans = [int(x) for x in gConfig.getValue("/Operations/Hospital/Transformations",[])] if int(transID) in hospitalTrans: hospitalSite = gConfig.getValue("/Operations/Hospital/HospitalSite",'DIRAC.JobDebugger.ch') hospitalCEs = gConfig.getValue("/Operations/Hospital/HospitalCEs",[]) oJob.setType('Hospital') oJob.setDestination(hospitalSite) oJob.setInputDataPolicy('download',dataScheduling=False) if hospitalCEs: oJob._addJDLParameter('GridRequiredCEs',hospitalCEs) taskDict[taskNumber]['TaskObject'] = '' res = self.getOutputData({'Job':oJob._toXML(),'TransformationID':transID,'TaskID':taskNumber,'InputData':inputData}) if not res ['OK']: self.log.error("Failed to generate output data",res['Message']) continue for name,output in res['Value'].items(): oJob._addJDLParameter(name,string.join(output,';')) taskDict[taskNumber]['TaskObject'] = Job(oJob._toXML()) return S_OK(taskDict)
def test_execute(self): """ this one tests that I can execute a job that requires multi-processing """ j = Job() j.setName("MP-test") j.setExecutable(self.mpExe) j.setInputSandbox(find_all('mpTest.py', rootPath, 'DIRAC/tests/Utilities')[0]) j.setTag('MultiProcessor') j.setLogLevel('DEBUG') res = j.runLocal(self.d) if multiprocessing.cpu_count() > 1: self.assertTrue(res['OK']) else: self.assertFalse(res['OK'])
if pipeline: proxy = None op = Operations() #TODO: replace glast.org with VO-agnostic statement shifter = op.getValue("Pipeline/Shifter","/DC=org/DC=doegrids/OU=People/CN=Stephan Zimmer 799865") shifter_group = op.getValue("Pipeline/ShifterGroup","glast_user") result = gProxyManager.downloadProxyToFile(shifter,shifter_group,requiredTimeLeft=10000) if not result['OK']: gLogger.error("No valid proxy found; ",result['Message']) dexit(1) proxy = result[ 'Value' ] os.environ['X509_USER_PROXY'] = proxy gLogger.info("using proxy %s"%proxy) j = Job(stdout="logFile.txt",stderr="logFile.txt") # specifies the logfile input_sandbox_files = [] output_sandbox_files = ["logFile.txt", "jobmeta.inf"] if pipeline: j.setExecutionEnv(pipeline_dict) # that sets the env vars if pipeline_dict.has_key("GPL_CONFIGDIR"): GPL_CONFIGDIR = pipeline_dict['GPL_CONFIGDIR'] files = [] if os.path.isdir(GPL_CONFIGDIR): files_to_copy = glob.glob("%s/*"%GPL_CONFIGDIR) for f in files_to_copy: if os.path.isfile(f): input_sandbox_files.append(os.path.abspath(f)) else: input_sandbox_files.append(f)