Exemple #1
0
def test_SimpleParametricJob():

    job = Job()
    job.setExecutable("myExec")
    job.setLogLevel("DEBUG")
    parList = [1, 2, 3]
    job.setParameterSequence("JOB_ID", parList, addToWorkflow=True)
    inputDataList = [
        ["/lhcb/data/data1", "/lhcb/data/data2"],
        ["/lhcb/data/data3", "/lhcb/data/data4"],
        ["/lhcb/data/data5", "/lhcb/data/data6"],
    ]
    job.setParameterSequence("InputData", inputDataList, addToWorkflow=True)

    jdl = job._toJDL()

    with open(join(dirname(__file__), "testWF.jdl")) as fd:
        expected = fd.read()

    assert jdl == expected

    clad = ClassAd("[" + jdl + "]")

    arguments = clad.getAttributeString("Arguments")
    job_id = clad.getAttributeString("JOB_ID")
    inputData = clad.getAttributeString("InputData")

    assert job_id == "%(JOB_ID)s"
    assert inputData == "%(InputData)s"
    assert "jobDescription.xml" in arguments
    assert "-o LogLevel=DEBUG" in arguments
    assert "-p JOB_ID=%(JOB_ID)s" in arguments
    assert "-p InputData=%(InputData)s" in arguments
Exemple #2
0
def test_SimpleParametricJob():

    job = Job()
    job.setExecutable('myExec')
    job.setLogLevel('DEBUG')
    parList = [1, 2, 3]
    job.setParameterSequence('JOB_ID', parList, addToWorkflow=True)
    inputDataList = [['/lhcb/data/data1', '/lhcb/data/data2'],
                     ['/lhcb/data/data3', '/lhcb/data/data4'],
                     ['/lhcb/data/data5', '/lhcb/data/data6']]
    job.setParameterSequence('InputData', inputDataList, addToWorkflow=True)

    jdl = job._toJDL()

    try:
        with open('./DIRAC/Interfaces/API/test/testWF.jdl') as fd:
            expected = fd.read()
    except IOError:
        with open('./Interfaces/API/test/testWF.jdl') as fd:
            expected = fd.read()

    assert jdl == expected

    clad = ClassAd('[' + jdl + ']')

    arguments = clad.getAttributeString('Arguments')
    job_id = clad.getAttributeString('JOB_ID')
    inputData = clad.getAttributeString('InputData')

    assert job_id == '%(JOB_ID)s'
    assert inputData == '%(InputData)s'
    assert 'jobDescription.xml' in arguments
    assert '-o LogLevel=DEBUG' in arguments
    assert '-p JOB_ID=%(JOB_ID)s' in arguments
    assert '-p InputData=%(InputData)s' in arguments
Exemple #3
0
def test_basicJob():
  job = Job()

  job.setOwner('ownerName')
  job.setOwnerGroup('ownerGroup')
  job.setName('jobName')
  job.setJobGroup('jobGroup')
  job.setExecutable('someExe')
  job.setType('jobType')
  job.setDestination('ANY')

  xml = job._toXML()

  try:
    with open('./DIRAC/Interfaces/API/test/testWF.xml') as fd:
      expected = fd.read()
  except IOError:
    with open('./Interfaces/API/test/testWF.xml') as fd:
      expected = fd.read()

  assert xml == expected

  try:
    with open('./DIRAC/Interfaces/API/test/testWFSIO.jdl') as fd:
      expected = fd.read()
  except IOError:
    with open('./Interfaces/API/test/testWFSIO.jdl') as fd:
      expected = fd.read()

  jdlSIO = job._toJDL(jobDescriptionObject=StringIO.StringIO(job._toXML()))
  assert jdlSIO == expected
Exemple #4
0
    def test_SimpleParametricJob(self):

        job = Job()
        job.setExecutable('myExec')
        job.setLogLevel('DEBUG')
        parList = [1, 2, 3]
        job.setParameterSequence('JOB_ID', parList, addToWorkflow=True)
        inputDataList = [['/lhcb/data/data1', '/lhcb/data/data2'],
                         ['/lhcb/data/data3', '/lhcb/data/data4'],
                         ['/lhcb/data/data5', '/lhcb/data/data6']]
        job.setParameterSequence('InputData',
                                 inputDataList,
                                 addToWorkflow=True)

        jdl = job._toJDL()

        print jdl

        clad = ClassAd('[' + jdl + ']')

        arguments = clad.getAttributeString('Arguments')
        job_id = clad.getAttributeString('JOB_ID')
        inputData = clad.getAttributeString('InputData')

        print "arguments", arguments

        self.assertEqual(job_id, '%(JOB_ID)s')
        self.assertEqual(inputData, '%(InputData)s')
        self.assertIn('jobDescription.xml', arguments)
        self.assertIn('-o LogLevel=DEBUG', arguments)
        self.assertIn('-p JOB_ID=%(JOB_ID)s', arguments)
        self.assertIn('-p InputData=%(InputData)s', arguments)
Exemple #5
0
def SoftClean(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job

    if (len(args) != 3):
        Script.showHelp()

    package = args[0]
    version = args[1]
    site = args[2]

    j = Job()

    j.setInputSandbox(['cta-swclean.py'])

    arguments = package + ' ' + version
    j.setExecutable('./cta-swclean.py', arguments)

    j.setDestination([site])

    name = 'SoftClean_' + package + '_' + version
    j.setName('SoftClean')
    j.setJobGroup('SoftClean')

    j.setCPUTime(100000)

    Script.gLogger.info(j._toJDL())

    res = Dirac().submit(j)

    print res['Value']
Exemple #6
0
def test_basicJob():
    job = Job()

    job.setOwner('ownerName')
    job.setOwnerGroup('ownerGroup')
    job.setName('jobName')
    job.setJobGroup('jobGroup')
    job.setExecutable('someExe')
    job.setType('jobType')
    job.setDestination('ANY')

    xml = job._toXML()

    try:
        with open('./DIRAC/Interfaces/API/test/testWF.xml') as fd:
            expected = fd.read()
    except IOError:
        with open('./Interfaces/API/test/testWF.xml') as fd:
            expected = fd.read()

    assert xml == expected

    try:
        with open('./DIRAC/Interfaces/API/test/testWFSIO.jdl') as fd:
            expected = fd.read()
    except IOError:
        with open('./Interfaces/API/test/testWFSIO.jdl') as fd:
            expected = fd.read()

    jdlSIO = job._toJDL(jobDescriptionObject=StringIO.StringIO(job._toXML()))
    assert jdlSIO == expected
Exemple #7
0
def softinstall( args = None ) :

  from DIRAC.Interfaces.API.Dirac import Dirac
  from DIRAC.Interfaces.API.Job import Job

  if (len(args)!=2):
    Script.showHelp()

  version = args[0]
  site = args[1]

  if version not in ['prod-2_21122012','prod-2_08032013']:
    Script.gLogger.error('Version not valid')
    Script.showHelp()

  j = Job()

  j.setInputSandbox( ['cta-swinstall.py','SoftwareInstallation.py','CODE'] )   

  j.setExecutable('./cta-swinstall.py', version)

  j.setDestination([site])

  j.setName('SoftInstall')

  j.setCPUTime(100000)

  Script.gLogger.info( j._toJDL() )

  Dirac().submit( j )
Exemple #8
0
def softinstall(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job

    if (len(args) < 2):
        Script.showHelp()

    version = args[0]
    site = args[1]

    if version not in ['18122013', 'v0r7p0']:
        Script.gLogger.error('Version not valid')
        Script.showHelp()

    j = Job()

    j.setInputSandbox(['cta-ctools-install.py', 'SoftwareInstallation.py'])

    j.setExecutable('./cta-ctools-install.py', version)

    j.setDestination([site])

    j.setName('ctoolsInstall')

    j.setCPUTime(100000)

    Script.gLogger.info(j._toJDL())

    if site in ['LCG.GRIF.fr', 'LCG.M3PEC.fr']:
        if site == 'LCG.GRIF.fr':
            ceList = [
                'apcce02.in2p3.fr', 'grid36.lal.in2p3.fr',
                'lpnhe-cream.in2p3.fr', 'llrcream.in2p3.fr',
                'node74.datagrid.cea.fr'
            ]
        elif site == 'LCG.M3PEC.fr':
            #      ceList = ['ce0.bordeaux.inra.fr','ce0.m3pec.u-bordeaux1.fr']
            ceList = ['ce0.bordeaux.inra.fr']
        for ce in ceList:
            j.setDestinationCE(ce)
            name = 'ctoolsInstall' + '_' + ce
            j.setName(name)
            res = Dirac().submit(j)
            print res
        DIRAC.exit()
    else:
        name = 'ctoolsInstall'

    j.setName(name)
    res = Dirac().submit(j)
    print res
Exemple #9
0
def read_hessjob(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job

    if (len(args) != 1):
        Script.showHelp()

    version = args[0]

    user_script = './read_hess2dst.sh'

    sim_file = 'simtel_file.list'

    infileLFNList = [
        '/vo.cta.in2p3.fr/MC/PROD2/Config_310113/prod-2_21122012_corsika/gamma/prod-2_06052013_simtel_STD/Data/002xxx/gamma_20.0_180.0_alt2662.0_run002997.simtel.gz',
        '/vo.cta.in2p3.fr/MC/PROD2/Config_310113/prod-2_21122012_corsika/gamma/prod-2_06052013_simtel_STD/Data/002xxx/gamma_20.0_180.0_alt2662.0_run002998.simtel.gz'
    ]

    f = open(sim_file, 'w')

    for infileLFN in infileLFNList:
        filein = os.path.basename(infileLFN)
        f.write(filein)
        f.write('\n')

    f.close()

    j = Job()

    j.setInputData(infileLFNList)

    options = []
    options = [sim_file]

    executablestr = "%s %s %s" % (version, user_script, ' '.join(options))

    j.setExecutable('./cta-read_hess.py', executablestr)

    j.setInputSandbox(['cta-read_hess.py', user_script, sim_file])

    j.setOutputSandbox(['read_hess.log'])

    j.setOutputData(['*dst.gz'])

    j.setName(user_script)

    j.setCPUTime(100000)

    Script.gLogger.info(j._toJDL())

    Dirac().submit(j)
Exemple #10
0
def test_MPJob(proc, minProc, maxProc, expectedProc, expectedMinProc, expectedMaxProc):

    job = Job()
    job.setExecutable("myExec")
    job.setLogLevel("DEBUG")
    job.setNumberOfProcessors(proc, minProc, maxProc)
    jdl = job._toJDL()
    clad = ClassAd("[" + jdl + "]")
    processors = clad.getAttributeInt("NumberOfProcessors")
    minProcessors = clad.getAttributeInt("MinNumberOfProcessors")
    maxProcessors = clad.getAttributeInt("MaxNumberOfProcessors")
    assert processors == expectedProc
    assert minProcessors == expectedMinProc
    assert maxProcessors == expectedMaxProc
Exemple #11
0
def test_MPJob(proc, minProc, maxProc, expectedProc, expectedMinProc,
               expectedMaxProc):

    job = Job()
    job.setExecutable('myExec')
    job.setLogLevel('DEBUG')
    job.setNumberOfProcessors(proc, minProc, maxProc)
    jdl = job._toJDL()
    clad = ClassAd('[' + jdl + ']')
    processors = clad.getAttributeInt('NumberOfProcessors')
    minProcessors = clad.getAttributeInt('MinNumberOfProcessors')
    maxProcessors = clad.getAttributeInt('MaxNumberOfProcessors')
    assert processors == expectedProc
    assert minProcessors == expectedMinProc
    assert maxProcessors == expectedMaxProc
Exemple #12
0
def CorsikaSimtelInstall(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job

    if (len(args) != 2):
        Script.showHelp()

    version = args[0]
    site = args[1]

    if version not in ['prod-2_13112014']:
        Script.gLogger.error('Version not valid')
        Script.showHelp()

    j = Job()
    CorsikaSimtelPack = os.path.join('corsika_simhessarray', version,
                                     'corsika_simhessarray')
    CorsikaSimtelLFN = 'LFN:' + os.path.join('/vo.cta.in2p3.fr/software',
                                             CorsikaSimtelPack) + '.tar.gz'
    j.setInputSandbox(['cta-corsikasimtel-install.py', CorsikaSimtelLFN])
    j.setExecutable('./cta-corsikasimtel-install.py', version)
    j.setDestination([site])
    j.setJobGroup('SoftInstall')
    j.setCPUTime(100000)

    if site in ['LCG.GRIF.fr', 'LCG.M3PEC.fr']:
        if site == 'LCG.GRIF.fr':
            ceList = [
                'apcce02.in2p3.fr', 'grid36.lal.in2p3.fr',
                'lpnhe-cream.in2p3.fr', 'llrcream.in2p3.fr',
                'node74.datagrid.cea.fr'
            ]
        if site == 'LCG.M3PEC.fr':
            ceList = ['ce0.bordeaux.inra.fr', 'ce0.m3pec.u-bordeaux1.fr']

        for ce in ceList:
            j.setDestinationCE(ce)
            name = 'corsikasimtelInstall' + '_' + ce
            j.setName(name)
            Dirac().submit(j)
        DIRAC.exit()

    j.setName('corsikasimtelInstall')
    Script.gLogger.info(j._toJDL())
    Dirac().submit(j)
Exemple #13
0
def test_SimpleParametricJob():

  job = Job()
  job.setExecutable('myExec')
  job.setLogLevel('DEBUG')
  parList = [1, 2, 3]
  job.setParameterSequence('JOB_ID', parList, addToWorkflow=True)
  inputDataList = [
      [
          '/lhcb/data/data1',
          '/lhcb/data/data2'
      ],
      [
          '/lhcb/data/data3',
          '/lhcb/data/data4'
      ],
      [
          '/lhcb/data/data5',
          '/lhcb/data/data6'
      ]
  ]
  job.setParameterSequence('InputData', inputDataList, addToWorkflow=True)

  jdl = job._toJDL()

  try:
    with open('./DIRAC/Interfaces/API/test/testWF.jdl') as fd:
      expected = fd.read()
  except IOError:
    with open('./Interfaces/API/test/testWF.jdl') as fd:
      expected = fd.read()

  assert jdl == expected

  clad = ClassAd('[' + jdl + ']')

  arguments = clad.getAttributeString('Arguments')
  job_id = clad.getAttributeString('JOB_ID')
  inputData = clad.getAttributeString('InputData')

  assert job_id == '%(JOB_ID)s'
  assert inputData == '%(InputData)s'
  assert 'jobDescription.xml' in arguments
  assert '-o LogLevel=DEBUG' in arguments
  assert'-p JOB_ID=%(JOB_ID)s' in arguments
  assert'-p InputData=%(InputData)s' in arguments
Exemple #14
0
def simteljob(args = None ):

  from DIRAC.Interfaces.API.Dirac import Dirac
  from DIRAC.Interfaces.API.Job import Job

  if (len(args)!=1):
    Script.showHelp()

  version = args[0]

  user_script = './run_simtel.sh'
  
  infileLFNList = ['/vo.cta.in2p3.fr/MC/PROD2/Config_120213/prod-2_21122012_corsika/proton/Data/044xxx/proton_20.0_180.0_alt2662.0_run044019.corsika.gz',
'/vo.cta.in2p3.fr/MC/PROD2/Config_120213/prod-2_21122012_corsika/proton/Data/044xxx/proton_20.0_180.0_alt2662.0_run044085.corsika.gz']


  for infileLFN in infileLFNList:
    filein = os.path.basename(infileLFN)

    j = Job()

    j.setInputSandbox( ['cta-simtel.py', user_script] )  
    j.setInputData(infileLFN)
  
    user_args = []
    user_args = [filein]
  
    executablestr = "%s %s %s" % ( version, user_script, ' '.join( user_args ) )

    j.setExecutable('./cta-simtel.py', executablestr)

    sim_out = 'Data/sim_telarray/cta-ultra5/0.0deg/Data/*.simtel.gz'
    log_out = 'Data/sim_telarray/cta-ultra5/0.0deg/Log/*.log.gz'
    hist_out = 'Data/sim_telarray/cta-ultra5/0.0deg/Histograms/*.hdata.gz'
   
    j.setOutputData([sim_out,log_out,hist_out])
    j.setOutputSandbox('simtel.log')
    j.setName(user_script)
    j.setCPUTime(100000)

    Script.gLogger.info( j._toJDL() )

    Dirac().submit( j )
Exemple #15
0
  def test_SimpleParametricJob( self ):

    job = Job()
    job.setExecutable( 'myExec' )
    job.setLogLevel( 'DEBUG' )
    parList = [1,2,3]
    job.setParameterSequence( 'JOB_ID', parList, addToWorkflow=True )
    inputDataList = [
      [
        '/lhcb/data/data1',
        '/lhcb/data/data2'
      ],
      [
        '/lhcb/data/data3',
        '/lhcb/data/data4'
      ],
      [
        '/lhcb/data/data5',
        '/lhcb/data/data6'
      ]
    ]
    job.setParameterSequence( 'InputData', inputDataList, addToWorkflow=True )

    jdl = job._toJDL()

    print jdl

    clad = ClassAd( '[' + jdl + ']' )

    arguments = clad.getAttributeString( 'Arguments' )
    job_id = clad.getAttributeString( 'JOB_ID' )
    inputData = clad.getAttributeString( 'InputData' )

    print "arguments", arguments

    self.assertEqual( job_id, '%(JOB_ID)s' )
    self.assertEqual( inputData, '%(InputData)s' )
    self.assertIn( 'jobDescription.xml', arguments )
    self.assertIn( '-o LogLevel=DEBUG', arguments )
    self.assertIn( '-p JOB_ID=%(JOB_ID)s', arguments )
    self.assertIn( '-p InputData=%(InputData)s', arguments )
Exemple #16
0
def test_basicJob():
    job = Job()

    job.setOwner("ownerName")
    job.setOwnerGroup("ownerGroup")
    job.setName("jobName")
    job.setJobGroup("jobGroup")
    job.setExecutable("someExe")
    job.setType("jobType")
    job.setDestination("ANY")

    xml = job._toXML()

    with open(join(dirname(__file__), "testWF.xml")) as fd:
        expected = fd.read()

    assert xml == expected

    with open(join(dirname(__file__), "testWFSIO.jdl")) as fd:
        expected = fd.read()

    jdlSIO = job._toJDL(jobDescriptionObject=StringIO(job._toXML()))
    assert jdlSIO == expected
def Flux(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job
    import time
    import os.path

    user_script = './flux.sh'
    modmacro = './CTAflux_speeed.C'
    site = "PARANAL"

    if (len(args) != 5):
        Script.showHelp()

    typeofdata = "test"
    particle = args[0]
    direction = args[1]
    MOD = args[2]
    exten = args[3]
    zenName = args[4]

    # List of files over which flux should be run

    LFN_file = "./stereofiles/lfn_%s_%s_%s_%s.lfns" % (particle, exten,
                                                       zenName, direction)

    fileLength = sum(1 for line in open(LFN_file))
    f = open(LFN_file, 'r')

    if particle == "proton":
        filesPerJob = 10
    else:
        filesPerJob = 20

    fileList = []
    text_file_name = "lfnStereoFiles_%s_%s_%s_%s.txt" % (particle, exten,
                                                         typeofdata, direction)
    text_file = open(text_file_name, "w")

    # File containing the id number of files already produced. The relaunch of these jobs will be skipped
    done_file_name = "./stereofiles/done/done_%s_%s_%s_%s.lfns" % (
        particle, exten, zenName, direction)

    if os.path.exists(done_file_name):
        done_content = [
            int(line.strip()) for line in open(done_file_name, 'r')
        ]
    else:
        done_content = []

    loop = 0
    iJob = 0

    for line in f:
        loop = loop + 1
        infileLFN = line.strip()

        fileList.append(infileLFN)
        text_file.write("%s\n" % infileLFN)
        remain = loop % filesPerJob

        if remain == 0 or loop == fileLength:
            iJob = iJob + 1

            # Skipping of already finished jobs
            if iJob in done_content:
                text_file.close()
                fileList = []
                text_file = open(text_file_name, "w")
                continue

            else:
                j = Job()
                text_file.close()
                j.setInputSandbox([
                    user_script, "setupPackageMARS.sh", "CheckFileZombie.C",
                    text_file_name, modmacro
                ])

                jobName = "%s_%s_%s_%s_%s_%s_%s" % (user_script, site,
                                                    particle, direction, iJob,
                                                    exten, zenName)
                jobOut = "%s_%s_%s_%s_%s.out" % (user_script, site, particle,
                                                 direction, iJob)
                script_args = "%s %s %s %s %s %s %s" % (
                    particle, site, iJob, direction, MOD, exten, zenName)

                j.setExecutable(user_script, script_args)
                j.setOutputSandbox([jobOut, "applicationLog.txt"])
                j.setName(jobName)
                j.setBannedSites([
                    'LCG.MSFG.fr', 'LCG.M3PEC.fr', 'LCG.OBSPM.fr',
                    'LCG.UNI-DORTMUND.de', 'LCG.UNIV-LILLE.fr',
                    'LCG.Prague.cz', 'LCG.GRIF.fr'
                ])
                Script.gLogger.info(j._toJDL())
                print "Submitting job %s %s %s %s %s %s" % (
                    user_script, zenName, particle, direction, site, iJob)
                time.sleep(3)
                Dirac().submit(j)
                fileList = []
                text_file = open(text_file_name, "w")
Exemple #18
0
    j.setOutputSandbox([
        'StdOut', 'StdErr',
        'outputtxt_' + str(id_start) + '_' + str(id_end - 1) + '.txt',
        'prmon' + str(id_start) + '_' + str(id_end - 1) + '.txt'
    ])
    o_data_file = lfn + 'second/results_experiment_' + str(
        expmnt) + '/' + 'LOS_' + str(id_start) + '_to_' + str(id_end -
                                                              1) + '.npy'
    try:
        output_process = subprocess.check_output('dirac-dms-remove-files ' +
                                                 o_data_file,
                                                 stderr=subprocess.STDOUT,
                                                 shell=True)
    except subprocess.CalledProcessError as e:
        print 'Failed: ' + str(e.returncode) + ' ' + e.output
    else:
        print "Output: ", output_process
    j.setOutputData(
        ['LOS_' + str(id_start) + '_to_' + str(id_end - 1) + '.npy'],
        outputSE=SEList,
        outputPath='/second/results_experiment_' + str(expmnt))
    try:
        diracUsername = getProxyInfo()['Value']['username']
    except:
        print 'Failed to get DIRAC username. No proxy set up?'
        sys.exit(1)
    j.setJobGroup('rmsynthesis_by_' + expmnt + '_' + timestamp)
    jobID = dirac.submitJob(j)
    print 'Submission Result: ', j._toJDL()
    print '\n'
def Stereo(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job

    user_script = './stereo.sh'
    macro = './CTAstereo.C'

    if (len(args) != 5):
        Script.showHelp()

    particle = args[0]
    typeofdata = args[1]
    direction = args[2]
    zenith = args[3]
    diffuse = args[4]

    if typeofdata == 'train':
        # The master layout with all the telescopes
        candidates = './Prod3_3HB9All_Candidates.txt'
    elif typeofdata == 'test':
        # Different layouts
        candidates = './Prod3_3HB9_Candidates_Full.txt'
    else:
        print "Invalid type of data definition!"
        Script.showHelp()
        return 1

    if diffuse == "0":
        diffName = "point"
    elif diffuse == "1":
        diffName = "diff"
    else:
        print "Invalid extension definition!"
        Script.showHelp()
        return 1

    if zenith == "40":
        zenName = "40deg"
    elif zenith == "20":
        zenName = "20deg"
    else:
        print "Invalid zenith definition!"
        Script.showHelp()
        return 1

    if direction == "N":
        directionName = "north"
        # deg = "180"
    elif direction == "S":
        directionName = "south"
        # deg = "0"
    else:
        print 'Wrong direction. It can only be "N" or "S".'
        Script.showHelp()
        return 1

    filesPerJob = 5

    site = "PARANAL"

    listname = './training/gamma_trainLUT_%s_%s_%s.lfns' % (zenName, diffName, direction)

    loop = 0
    iJob = 0
    # totalEntries /= (2*filesPerJob)
    # print totalEntries

    f = open(listname, 'r')
    totalEntries = sum(1 for _ in f)
    f = open(listname, 'r')
    fileList = []
    text_file_name = "lfnFiles_%s_%s_%s_%s.txt" % (particle, direction, zenName, diffuse)
    text_file = open(text_file_name, "w")
    for line in f:
        loop = loop+1
        infileLFN = line.strip()
        # filein = os.path.basename(infileLFN)
        fileList.append(infileLFN)
        text_file.write("%s\n" % infileLFN)
        remain = loop % filesPerJob

        if iJob == 10:
            break

        if loop == totalEntries:
            remain = 0

        if remain == 0:
            iJob = iJob+1

            j = Job()
            text_file.close()
            j.setInputSandbox([user_script, "setupPackageMARS.sh", text_file_name, candidates, macro])
            jobName = "%s %s %s %s %s %s %s %s" % (user_script, site, particle, typeofdata, directionName, zenName, diffName, iJob)
            jobOut = "%s_%s_%s_%s_%s.out" % (user_script, site, typeofdata, directionName, iJob)
            script_args = "%s %s %s %s %s %s %s" % (particle, typeofdata, direction, zenName, diffName, site, iJob)

            j.setExecutable(user_script, script_args)
            j.setOutputSandbox([jobOut, "applicationLog.txt"])
            j.setName(jobName)
            j.setBannedSites(['LCG.MSFG.fr', 'LCG.M3PEC.fr', 'LCG.OBSPM.fr', 'LCG.UNI-DORTMUND.de', 'LCG.UNIV-LILLE.fr', 'LCG.GRIF.fr', 'ARC.SE-SNIC-T2.se'])
            Script.gLogger.info(j._toJDL())

            print "Submitting job %s" % (jobName)
            Dirac().submit(j)
            fileList = []
            text_file = open(text_file_name, "w")
Exemple #20
0
def TrainERF(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job

    user_script = './trainERF.sh'

    if (len(args) != 3):
        Script.showHelp()
    direction = args[0]
    zenith = args[1]
    diffuse = args[2]

    site = "PARANAL"

    if diffuse == "0":
        diffName = "point"
    elif diffuse == "1":
        diffName = "diff"
    else:
        print "Invalid extension definition!"
        Script.showHelp()
        return 1

    if zenith == "40":
        zenName = "40deg"
    elif zenith == "20":
        zenName = "20deg"
    else:
        print "Invalid zenith definition!"
        Script.showHelp()
        return 1

    if direction == "N":
        directionName = "north"
        deg = "180"
    elif direction == "S":
        directionName = "south"
        deg = "0"
    else:
        print 'Wrong direction. It can only be "N" or "S".'
        Script.showHelp()
        return 1

    # List of files over which the training should be done
    LFN_file = './training/gamma_trainERF_%s_%s_%s.lfns' % (zenName, diffName,
                                                            direction)

    StatFile = './Statistic_train.txt'
    for telType in range(0, 6):
        jobName = "%s_%s_%s_%s_%s" % (user_script, directionName, telType,
                                      diffName, zenName)
        jobOut = "%s_%s_%s.out" % (user_script, directionName, telType)
        script_args = "%s %s %s %s %s" % (direction, site, diffName, zenName,
                                          telType)
        j = Job()
        # create LFN list
        # LFNList = []
        # f = open(LFN_file, 'r')
        # for line in f:
        #    infileLFN = line.strip()
        #    LFNList.append(infileLFN)
        j.setInputSandbox(
            [user_script, "setupPackageMARS.sh", LFN_file, StatFile])
        j.setExecutable(user_script, script_args)
        j.setOutputSandbox([jobOut, "applicationLog.txt"])
        j.setName(jobName)
        Script.gLogger.info(j._toJDL())
        print "Launching %s %s" % (user_script, script_args)
        Dirac().submit(j)
def TrainRF(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job

    user_script = './trainRF.sh'

    if (len(args) != 3):
        Script.showHelp()
    direction = args[0]
    zenith = args[1]
    diffuse = args[2]

    site = "PARANAL"

    if diffuse == "0":
        diffName = "point"
    elif diffuse == "1":
        diffName = "diff"
    else:
        print "Invalid extension definition!"
        Script.showHelp()
        return 1

    if zenith == "40":
        zenName = "40deg"
    elif zenith == "20":
        zenName = "20deg"
    else:
        print "Invalid zenith definition!"
        Script.showHelp()
        return 1

    if direction == "N":
        directionName = "north"
        # deg = "180"
    elif direction == "S":
        directionName = "south"
        # deg = "0"
    else:
        print 'Wrong direction. It can only be "N" or "S".'
        Script.showHelp()
        return 1

    # Macro fixing the file check before continues.
    ROOTmacro = "CTAtrain.C"

    # List of files over which the training should be done
    LFN_file_gammas = './training/gamma_ghtrain_%s_%s_%s.lfns' % (
        zenName, diffName, direction)
    LFN_file_protons = './training/proton_ghtrain_%s_%s_%s.lfns' % (
        zenName, diffName, direction)

    StatFile = './Statistic_train.txt'

    for telType in range(0, 6):
        jobName = "%s_%s_%s_%s_%s" % (user_script, directionName, diffName,
                                      telType, zenName)
        jobOut = "%s_%s_%s_%s.out" % (user_script, directionName, diffName,
                                      telType)
        script_args = "%s %s %s %s %s" % (direction, site, diffName, telType,
                                          zenName)

        j = Job()

        j.setInputSandbox([
            user_script, "setupPackageMARS.sh", LFN_file_gammas,
            LFN_file_protons, ROOTmacro, StatFile
        ])
        j.setExecutable(user_script, script_args)
        j.setOutputSandbox([jobOut, "applicationLog.txt"])
        j.setName(jobName)
        Script.gLogger.info(j._toJDL())
        print "Launching %s %s" % (user_script, script_args)
        Dirac().submit(j)
            dexit(1)
        sites = result[ 'Value' ]
        j.setDestination(sites)

    if not opts.stagein is None:
        input_stage_files = []
        # we do add. input staging
        files = opts.stagein.split(",")
        for f in files:
            if f.startswith("LFN"):
                input_stage_files.append(f)
            else:
                input_stage_files+=extract_file(f)
        for f in input_stage_files:
            if not f.startswith("LFN"):
                gLogger.error("*ERROR* required inputfiles to be defined through LFN, could not find LFN in %s"%f)
                dexit(1)
        j.setInputData(input_stage_files)

    if opts.debug:
        gLogger.notice('*DEBUG* just showing the JDL of the job to be submitted')
        gLogger.notice(j._toJDL())
    
    d = Dirac(True,"myRepo.rep")
    res = d.submit(j)
    if not res['OK']:
        gLogger.error("Error during Job Submission ",res['Message'])
        dexit(1)
    JobID = res['Value']
    gLogger.notice("Your job %s (\"%s\") has been submitted."%(str(JobID),executable))
    
def dirLUT(args=None):

    from DIRAC.Interfaces.API.Dirac import Dirac
    from DIRAC.Interfaces.API.Job import Job

    user_script = './dirLUT.sh'

    if (len(args) != 3):
        Script.showHelp()
    direction = args[0]
    zenith = args[1]
    diffuse = args[2]

    if diffuse == "0":
        diffName = "point"
    elif diffuse == "1":
        diffName = "diff"
    else:
        print "Invalid extension definition!"
        Script.showHelp()
        return 1

    if zenith == "40":
        zenName = "40deg"
    elif zenith == "20":
        zenName = "20deg"
    else:
        print "Invalid zenith definition!"
        Script.showHelp()
        return 1

    if direction == "N":
        directionName = "north"
        # deg = "180"
    elif direction == "S":
        directionName = "south"
        # deg = "0"
    else:
        print 'Wrong direction. It can only be "N" or "S".'
        Script.showHelp()
        return 1

    listname = './training/gamma_trainLUT_%s_%s_%s.lfns' % (zenName, diffName,
                                                            direction)

    with open(listname) as f:
        totalEntries = sum(1 for _ in f)

    # Number of files used per job
    runN = 20

    runMin = 0
    runMax = totalEntries / runN

    for i in range(runMin, runMax):
        jobName = "%s_%s_%s_%s_%s" % (user_script, direction, zenName,
                                      diffName, i)
        jobOut = "%s_%s_%s%s.out" % (user_script, directionName, diffName, i)
        script_args = "%s %s %s %s %s" % (direction, zenName, diffName, i,
                                          runN)
        j = Job()
        j.setInputSandbox([
            user_script, listname, "setupPackageMARS.sh", "CheckFileZombie.C"
        ])
        j.setExecutable(user_script, script_args)
        j.setOutputSandbox([jobOut, "applicationLog.txt"])
        j.setName(jobName)
        j.setBannedSites([
            'LCG.MSFG.fr', 'LCG.M3PEC.fr', 'LCG.OBSPM.fr',
            'LCG.UNI-DORTMUND.de', 'LCG.UNIV-LILLE.fr'
        ])
        Script.gLogger.info(j._toJDL())
        print "Submitting job %s" % (script_args)
        Dirac().submit(j)