Ejemplo n.º 1
0
def submit_wms(job):
    """ Submit the job to the WMS
    @todo launch job locally
    """
    dirac = Dirac()
    base_path = '/vo.cta.in2p3.fr/MC/PROD5/LaPalma/gamma/ctapipe-stage1/2284/Data/000xxx'
    input_data = [
        '%s/gamma_20deg_0deg_run183___cta-prod5-lapalma_desert-2158m-LaPalma-dark.h5'
        % base_path,
        '%s/gamma_20deg_0deg_run184___cta-prod5-lapalma_desert-2158m-LaPalma-dark.h5'
        % base_path,
        '%s/gamma_20deg_0deg_run182___cta-prod5-lapalma_desert-2158m-LaPalma-dark.h5'
        % base_path,
        '%s/gamma_20deg_0deg_run181___cta-prod5-lapalma_desert-2158m-LaPalma-dark.h5'
        % base_path,
        '%s/gamma_20deg_0deg_run176___cta-prod5-lapalma_desert-2158m-LaPalma-dark.h5'
        % base_path
    ]

    job.setInputData(input_data)
    job.setJobGroup('prod5_ctapipe_stage1_merge')
    result = dirac.submitJob(job)
    if result['OK']:
        Script.gLogger.notice('Submitted job: ', result['Value'])
    return result
Ejemplo n.º 2
0
Archivo: Job.py Proyecto: ahaupt/DIRAC
  def runLocal( self, dirac = None ):
    """ The dirac (API) object is for local submission.
    """

    if dirac is None:
      dirac = Dirac()

    return dirac.submitJob( self, mode = 'local' )
Ejemplo n.º 3
0
Archivo: Job.py Proyecto: Eo300/DIRAC
  def runLocal(self, dirac=None):
    """ The dirac (API) object is for local submission.
    """

    if dirac is None:
      dirac = Dirac()

    return dirac.submitJob(self, mode='local')
Ejemplo n.º 4
0
def submit_WMS(job, infileList):
    """ Submit the job locally or to the WMS
    """
    dirac = Dirac()
    job.setInputData(infileList)
    res = dirac.submitJob(job)
    if res['OK']:
        Script.gLogger.notice('Submission Result: ', res['Value'])
    return res
Ejemplo n.º 5
0
def submit(name,
           job_group,
           task_id,
           input_sandbox,
           output_sandbox,
           executable,
           site=None,
           banned_site=None,
           sub_ids=[]):
    dirac = Dirac()

    submit_result = {'backend_job_ids': {}}
    jobInfos = {}

    for run in range(int((len(sub_ids) + 99) / 100)):
        ids_this_run = [x for x in sub_ids[run * 100:(run + 1) * 100]]
        job_names = ['%s.%s' % (name, sub_id) for sub_id in ids_this_run]
        j = Job()
        j.setName(name)
        j.setExecutable(executable)

        j.setParameterSequence('JobName', job_names, addToWorkflow=True)
        j.setParameterSequence('arguments', ids_this_run, addToWorkflow=True)

        if input_sandbox:
            j.setInputSandbox(input_sandbox)
        if output_sandbox:
            j.setOutputSandbox(output_sandbox)

        if job_group:
            j.setJobGroup(job_group)
        if site:  # set destination to a certain site; list not allowed
            j.setDestination(site)

        if banned_site:
            j.setBannedSites(banned_site)

        result = dirac.submitJob(j)

        if not result['OK']:
            sys.stdout.write('DIRAC job submit error: %s\n' %
                             result['Message'])
            sys.exit(1)

        for sub_id, dirac_id in zip(ids_this_run, result['Value']):
            submit_result['backend_job_ids'][sub_id] = dirac_id
            jobInfos[dirac_id] = {'SubID': sub_id}

    #Register on Task-manager Webapp of IHEPDIRAC
    task = RPCClient('WorkloadManagement/TaskManager')
    taskInfo = {'TaskName': name, 'JobGroup': job_group, 'JSUB-ID': task_id}
    task_result = task.createTask(name, taskInfo, jobInfos)
    task_web_id = task_result['Value']
    submit_result['backend_task_id'] = task_web_id

    return submit_result
Ejemplo n.º 6
0
def submit_wms(job):
    """ Submit the job to the WMS
    @todo launch job locally
    """
    dirac = Dirac()
    job.setJobGroup('Prod5MCPipeNSBJob')
    result = dirac.submitJob(job)
    if result['OK']:
        Script.gLogger.notice('Submitted job: ', result['Value'])
    return result
Ejemplo n.º 7
0
def main():
  Script.registerSwitch("f:", "File=", "Writes job ids to file <value>")
  Script.registerSwitch("r:", "UseJobRepo=", "Use the job repository")
  Script.parseCommandLine(ignoreErrors=True)
  args = Script.getPositionalArgs()

  if len(args) < 1:
    Script.showHelp()

  from DIRAC.Interfaces.API.Dirac import Dirac
  unprocessed_switches = Script.getUnprocessedSwitches()
  use_repo = False
  repo_name = ""
  for sw, value in unprocessed_switches:
    if sw.lower() in ["r", "usejobrepo"]:
      use_repo = True
      repo_name = value
      repo_name = repo_name.replace(".cfg", ".repo")
  dirac = Dirac(use_repo, repo_name)
  exitCode = 0
  errorList = []

  jFile = None
  for sw, value in unprocessed_switches:
    if sw.lower() in ('f', 'file'):
      if os.path.isfile(value):
        print('Appending job ids to existing logfile: %s' % value)
        if not os.access(value, os.W_OK):
          print('Existing logfile %s must be writable by user.' % value)
      jFile = open(value, 'a')

  for jdl in args:

    result = dirac.submitJob(jdl)
    if result['OK']:
      print('JobID = %s' % (result['Value']))
      if jFile is not None:
        # parametric jobs
        if isinstance(result['Value'], list):
          jFile.write('\n'.join(str(p) for p in result['Value']))
          jFile.write('\n')
        else:
          jFile.write(str(result['Value']) + '\n')
    else:
      errorList.append((jdl, result['Message']))
      exitCode = 2

  if jFile is not None:
    jFile.close()

  for error in errorList:
    print("ERROR %s: %s" % error)

  DIRAC.exit(exitCode)
Ejemplo n.º 8
0
def main():
    Script.registerSwitch("f:", "File=", "Writes job ids to file <value>")
    Script.registerSwitch("r:", "UseJobRepo=", "Use the job repository")
    # Registering arguments will automatically add their description to the help menu
    Script.registerArgument(["JDL:    Path to JDL file"])
    sws, args = Script.parseCommandLine(ignoreErrors=True)

    from DIRAC.Interfaces.API.Dirac import Dirac

    unprocessed_switches = sws
    use_repo = False
    repo_name = ""
    for sw, value in unprocessed_switches:
        if sw.lower() in ["r", "usejobrepo"]:
            use_repo = True
            repo_name = value
            repo_name = repo_name.replace(".cfg", ".repo")
    dirac = Dirac(use_repo, repo_name)
    exitCode = 0
    errorList = []

    jFile = None
    for sw, value in unprocessed_switches:
        if sw.lower() in ("f", "file"):
            if os.path.isfile(value):
                print("Appending job ids to existing logfile: %s" % value)
                if not os.access(value, os.W_OK):
                    print("Existing logfile %s must be writable by user." %
                          value)
            jFile = open(value, "a")

    for jdl in args:

        result = dirac.submitJob(jdl)
        if result["OK"]:
            print("JobID = %s" % (result["Value"]))
            if jFile is not None:
                # parametric jobs
                if isinstance(result["Value"], list):
                    jFile.write("\n".join(str(p) for p in result["Value"]))
                    jFile.write("\n")
                else:
                    jFile.write(str(result["Value"]) + "\n")
        else:
            errorList.append((jdl, result["Message"]))
            exitCode = 2

    if jFile is not None:
        jFile.close()

    for error in errorList:
        print("ERROR %s: %s" % error)

    DIRAC.exit(exitCode)
Ejemplo n.º 9
0
def submit_wms(job):
    """ Submit the job to the WMS
    @todo launch job locally
    """
    dirac = Dirac()
    base_path = '/vo.cta.in2p3.fr/MC/PROD5/LaPalma/gamma/sim_telarray/2104/Data/100xxx'
    input_data = ['%s/gamma_20deg_180deg_run100298___cta-prod5-lapalma_desert-2158m-LaPalma-dark.simtel.zst' % base_path,
    '%s/gamma_20deg_180deg_run100299___cta-prod5-lapalma_desert-2158m-LaPalma-dark.simtel.zst'%base_path]

    job.setInputData(input_data)
    job.setJobGroup('EvnDispProd5')
    result = dirac.submitJob(job)
    if result['OK']:
        Script.gLogger.notice('Submitted job: ', result['Value'])
    return result
def submit_wms(job):
    """ Submit the job to the WMS
    @todo launch job locally
    """
    dirac = Dirac()
    base_path = '/vo.cta.in2p3.fr/MC/PROD3/LaPalma/gamma-diffuse/simtel/1600/Data/000xxx'
    input_data = [
        '%s/gamma_20deg_0deg_run100___cta-prod3-demo-2147m-LaPalma-baseline_cone10.simtel.gz'
        % base_path
    ]

    job.setInputData(input_data)
    job.setJobGroup('ctapipe_stage1_prod3')
    result = dirac.submitJob(job)
    if result['OK']:
        Script.gLogger.notice('Submitted job: ', result['Value'])
    return result
Ejemplo n.º 11
0
def submit_wms(job):
    """ Submit the job to the WMS
    @todo launch job locally
    """
    dirac = Dirac()
    base_path = '/vo.cta.in2p3.fr/MC/PROD5/LaPalma/gamma-diffuse/sim_telarray/2185/Data/000xxx/'
    input_data = [
        '%s/gamma_20deg_0deg_run996___cta-prod5-lapalma_desert-2158m-LaPalma-dark_cone10.simtel.zst'
        % base_path
    ]

    job.setInputData(input_data)
    job.setJobGroup('ctapipe_stage1_prod5')
    result = dirac.submitJob(job)
    if result['OK']:
        Script.gLogger.notice('Submitted job: ', result['Value'])
    return result
Ejemplo n.º 12
0
def submit_wms(job):
    """ Submit the job to the WMS
    @todo launch job locally
    """
    dirac = Dirac()
    base_path = '/vo.cta.in2p3.fr/MC/PROD5b/LaPalma/proton/sim_telarray/2295/Data/000xxx'
    input_data = [
        '%s/proton_20deg_0deg_run744___cta-prod5b-lapalma_desert-2158m-LaPalma-dark.simtel.zst'
        % base_path
    ]  # CC-IN2P3

    job.setInputData(input_data)
    job.setJobGroup('EvnDispProd5')
    result = dirac.submitJob(job)
    if result['OK']:
        Script.gLogger.notice('Submitted job: ', result['Value'])
    return result
Ejemplo n.º 13
0
def runProd3(args=None):
    """ Simple wrapper to create a Prod3MCUserJob and setup parameters
      from positional arguments given on the command line.
      
      Parameters:
      args -- a list of 3 strings corresponding to job arguments
              runMin runMax input_card
  """
    # get arguments
    runMin = int(args[0])
    runMax = int(args[1])
    input_card = args[2]

    # ## Create Prod3 User Job
    job = Prod3MCUserJob()

    # set package version and corsika input card. to be set before setupWorkflow
    job.setPackage('corsika_simhessarray')
    job.setVersion('2017-04-19')
    job.setInputCard(input_card)
    job.runType = 'corsika'

    # ## setup workflow: set executable and parameters
    job.setupWorkflow()

    # # set run_number as parameter for parametric jobs
    ilist = []
    for run_number in range(runMin, runMax + 1):
        ilist.append(str(run_number))
    job.setParameterSequence('run', ilist)

    # ## set job attributes
    job.setName('corsika')
    job.setInputSandbox([input_card, 'dirac_prod3_corsika_only'])
    job.setOutputSandbox(['*Log.txt'])
    job.setOutputData(['*corsika.gz'], outputPath='corsika_data')

    # # submit job
    dirac = Dirac()
    res = dirac.submitJob(job)
    # debug
    Script.gLogger.info('Submission Result: ', res)
    Script.gLogger.info(job.workflow)

    return res
def submit_wms(job):
    """ Submit the job to the WMS
    @todo launch job locally
    """
    dirac = Dirac()
    base_path = '/vo.cta.in2p3.fr/MC/PROD3/Paranal/gamma/simtel/2066/Data/000xxx'
    input_data = [
        '%s/gamma_20deg_0deg_run100___cta-prod3-demo_desert-2150m-Paranal-baseline.simtel.zst'
        % base_path,
        '%s/gamma_20deg_0deg_run101___cta-prod3-demo_desert-2150m-Paranal-baseline.simtel.zst'
        % base_path
    ]

    job.setInputData(input_data)
    result = dirac.submitJob(job)
    if result['OK']:
        Script.gLogger.notice('Submitted job: ', result['Value'])
    return result
def submit_wms(job):
    """ Submit the job to the WMS
    @todo launch job locally
    """
    dirac = Dirac()
    base_path = '/vo.cta.in2p3.fr/MC/PROD3/scratch/Paranal/gamma/simtel/954/Data/000xxx'
    # input_data = ['%s/gamma_20deg_0deg_run107___cta-prod3_desert-2150m-Paranal-merged.simtel.gz' % base_path]  # CNAF
    input_data = [
        '%s/gamma_20deg_0deg_run312___cta-prod3-sct_desert-2150m-Paranal-SCT.simtel.gz'
        % base_path
    ]  # CC-IN2P3

    job.setInputData(input_data)
    job.setJobGroup('EvnDispProd5')
    result = dirac.submitJob(job)
    if result['OK']:
        Script.gLogger.notice('Submitted job: ', result['Value'])
    return result
Ejemplo n.º 16
0
def submitWMS(job, infileList):
    """ Submit the job locally or to the WMS  """

    dirac = Dirac()
    job.setParameterSequence('InputData',
                             infileList,
                             addToWorkflow='ParametricInputData')
    job.setOutputData(['*simtel-dst0.gz'], outputPath='read_cta_data')
    job.setName('readctajob')
    # To allow jobs run at other sites than the site where the InputData are located
    #job.setType( 'DataReprocessing' )

    res = dirac.submitJob(job)

    if res['OK']:
        Script.gLogger.info('Submission Result: ', res['Value'])

    return res
Ejemplo n.º 17
0
def submitWMS(args):

    first_line = args[0]

    job = Job()
    dirac = Dirac()

    job.setName('mandelbrot')

    job.setExecutable('git clone https://github.com/bregeon/mandel4ts.git')

    job.setExecutable('./mandel4ts/mandelbrot.py',
                      arguments="-P 0.0005 -M 1000 -L %s -N 200" % first_line)

    job.setOutputData(['data_*.bmp', 'data*.txt'])

    res = dirac.submitJob(job)

    return res
def submit_wms(job):
    """ Submit the job to the WMS
    @todo launch job locally
    """
    dirac = Dirac()
    # base_path = '/vo.cta.in2p3.fr/MC/PROD4/Paranal/proton/corsika/1829/Data/101xxx'
    # input_data = ['%s/run101000_proton_za20deg_azm0deg-paranal-sst.corsika.zst' % base_path,
    #               '%s/run101003_proton_za20deg_azm0deg-paranal-sst.corsika.zst' % base_path,
    #               '%s/run101006_proton_za20deg_azm0deg-paranal-sst.corsika.zst' % base_path]
    base_path = '/vo.cta.in2p3.fr/user/b/bregeon/Paranal/proton/corsika/0000/Data/000xxx'
    input_data = ['%s/run22_proton_za20deg_azm0deg-paranal-sst.corsika.zst' % base_path,
                  '%s/run23_proton_za20deg_azm0deg-paranal-sst.corsika.zst' % base_path]

    job.setInputData(input_data)
    job.setJobGroup('Prod4SimtelSSTJob')
    result = dirac.submitJob(job)
    if result['OK']:
        Script.gLogger.notice('Submitted job: ', result['Value'])
    return result
Ejemplo n.º 19
0
gLogger.setLevel('DEBUG')

cwd = os.path.realpath('.')

dirac = Dirac()

# Simple Hello Word job to DIRAC.Jenkins.ch
gLogger.info("\n Submitting hello world job targeting DIRAC.Jenkins.ch")
helloJ = Job()
helloJ.setName("helloWorld-TEST-TO-Jenkins")
helloJ.setInputSandbox([find_all('exe-script.py', '..', '/DIRAC/tests/Workflow/')[0]])
helloJ.setExecutable("exe-script.py", "", "helloWorld.log")
helloJ.setCPUTime(1780)
helloJ.setDestination('DIRAC.Jenkins.ch')
helloJ.setLogLevel('DEBUG')
result = dirac.submitJob(helloJ)
gLogger.info("Hello world job: ", result)
if not result['OK']:
  gLogger.error("Problem submitting job", result['Message'])
  exit(1)

# Simple Hello Word job to DIRAC.Jenkins.ch, that needs to be matched by a MP WN
gLogger.info("\n Submitting hello world job targeting DIRAC.Jenkins.ch and a MP WN")
helloJMP = Job()
helloJMP.setName("helloWorld-TEST-TO-Jenkins-MP")
helloJMP.setInputSandbox([find_all('exe-script.py', '..', '/DIRAC/tests/Workflow/')[0]])
helloJMP.setExecutable("exe-script.py", "", "helloWorld.log")
helloJMP.setCPUTime(1780)
helloJMP.setDestination('DIRAC.Jenkins.ch')
helloJMP.setLogLevel('DEBUG')
helloJMP.setNumberOfProcessors(2)
    args = visit + ' ' + insidename + ' ' + str(startsensor) + ' ' + str(
        numsensors) + ' ' + str(idx)
    outputname = 'fits_' + visit + '_' + str(idx) + '.tar'

    j.setCPUTime(1209600)
    j.setExecutable('runimsim2.1.sh', arguments=args)
    j.stderr = "std.err"
    j.stdout = "std.out"
    #!!! May need the 2.1i directory here depending on visit number !!!
    j.setInputSandbox([
        "runimsim2.1.sh", "run_imsim_nersc.py",
        "LFN:/lsst/user/j/james.perry/instcats/2.1i/" + instcatname
    ])
    j.setOutputSandbox(["std.out", "std.err"])
    j.setTag(["8Processors"])
    #j.setOutputData([visit + "/" + outputname], outputPath="", outputSE=["IN2P3-CC-disk"])
    j.setOutputData([visit + "/" + outputname],
                    outputPath="",
                    outputSE=["UKI-NORTHGRID-LANCS-HEP-disk"])
    j.setPlatform("AnyPlatform")

    j.setDestination(site)

    jobID = dirac.submitJob(j)
    print("Submitted job to " + site + " as ID " + str(jobID))
    print "Status is:", dirac.status(jobID['JobID'])

    joblistfile.write(str(jobID['JobID']) + '\n')

joblistfile.close()
Ejemplo n.º 21
0
from DIRAC.Core.Base import Script
Script.parseCommandLine()
from DIRAC.Interfaces.API.Job import Job
from DIRAC.Interfaces.API.Dirac import Dirac
dirac = Dirac()
j = Job()
j.setCPUTime(500)
j.setExecutable('ls')
j.setName('testjob')
res = dirac.submitJob(j)
print 'Submission Result: ',res['Value']

Ejemplo n.º 22
0
gLogger.setLevel('DEBUG')

cwd = os.path.realpath('.')

dirac = Dirac()

# Simple Hello Word job to DIRAC.Jenkins.ch
gLogger.info("\n Submitting hello world job targeting DIRAC.Jenkins.ch")
helloJ = Job()
helloJ.setName("helloWorld-TEST-TO-Jenkins")
helloJ.setInputSandbox([find_all('exe-script.py', '..', '/DIRAC/tests/Workflow/')[0]])
helloJ.setExecutable("exe-script.py", "", "helloWorld.log")
helloJ.setCPUTime(17800)
helloJ.setDestination('DIRAC.Jenkins.ch')
result = dirac.submitJob(helloJ)
gLogger.info("Hello world job: ", result)
if not result['OK']:
  gLogger.error("Problem submitting job", result['Message'])
  exit(1)

# Simple Hello Word job to DIRAC.Jenkins.ch, that needs to be matched by a MP WN
gLogger.info("\n Submitting hello world job targeting DIRAC.Jenkins.ch and a MP WN")
helloJMP = Job()
helloJMP.setName("helloWorld-TEST-TO-Jenkins-MP")
helloJMP.setInputSandbox([find_all('exe-script.py', '..', '/DIRAC/tests/Workflow/')[0]])
helloJMP.setExecutable("exe-script.py", "", "helloWorld.log")
helloJMP.setCPUTime(17800)
helloJMP.setDestination('DIRAC.Jenkins.ch')
helloJMP.setTag('MultiProcessor')
result = dirac.submitJob(helloJMP)  # this should make the difference!
Ejemplo n.º 23
0
  print "Usage %s <scriptName> <jobName> <nbJobs>"%sys.argv[0]
  sys.exit(1)

scriptName = sys.argv[1]
jobName = sys.argv[2]
nbJobs = int(sys.argv[3])

if not os.path.exists(jobName):
  os.makedirs(jobName)
  os.makedirs("%s/Done"%jobName)
  os.makedirs("%s/Failed"%jobName)
else:
  print "Folder %s exists"%jobName
  sys.exit(1)      

f = open("%s/jobIdList.txt"%jobName, 'w')

for i in xrange(nbJobs):
  j = Job()
  j.setCPUTime(10000)
  j.setExecutable(scriptName)
  j.addToOutputSandbox.append('myLog.txt')
  j.addToOutputSandbox.append('clock.txt')
  j.addToOutputSandbox.append('time.txt')
  dirac = Dirac()
  jobID = dirac.submitJob(j)
  realId = jobID.get('JobID')
  f.write("%s\n"%realId)

f.close()
def main():
    """
    Launch job on the GRID
    """
    # this thing pilots everything related to the GRID
    dirac = Dirac()

    if switches["output_type"] in "TRAINING":
        print("Preparing submission for TRAINING data")
    elif switches["output_type"] in "DL2":
        print("Preparing submission for DL2 data")
    else:
        print("You have to choose either TRAINING or DL2 as output type!")
        sys.exit()

    # Read configuration file
    cfg = load_config(switches["config_file"])

    # Analysis
    config_path = cfg["General"]["config_path"]
    config_file = cfg["General"]["config_file"]
    mode = cfg["General"]["mode"]  # One mode naw
    particle = cfg["General"]["particle"]
    estimate_energy = cfg["General"]["estimate_energy"]
    force_tailcut_for_extended_cleaning = cfg["General"][
        "force_tailcut_for_extended_cleaning"]

    # Take parameters from the analysis configuration file
    ana_cfg = load_config(os.path.join(config_path, config_file))
    config_name = ana_cfg["General"]["config_name"]
    cam_id_list = ana_cfg["General"]["cam_id_list"]

    # Regressor and classifier methods
    regressor_method = ana_cfg["EnergyRegressor"]["method_name"]
    classifier_method = ana_cfg["GammaHadronClassifier"]["method_name"]

    # Someone might want to create DL2 without score or energy estimation
    if regressor_method in ["None", "none", None]:
        use_regressor = False
    else:
        use_regressor = True

    if classifier_method in ["None", "none", None]:
        use_classifier = False
    else:
        use_classifier = True

    # GRID
    outdir = os.path.join(cfg["GRID"]["outdir"], config_name)
    n_file_per_job = cfg["GRID"]["n_file_per_job"]
    n_jobs_max = cfg["GRID"]["n_jobs_max"]
    model_dir = cfg["GRID"]["model_dir"]
    training_dir_energy = cfg["GRID"]["training_dir_energy"]
    training_dir_classification = cfg["GRID"]["training_dir_classification"]
    dl2_dir = cfg["GRID"]["dl2_dir"]
    home_grid = cfg["GRID"]["home_grid"]
    user_name = cfg["GRID"]["user_name"]
    banned_sites = cfg["GRID"]["banned_sites"]

    # HACK
    if force_tailcut_for_extended_cleaning is True:
        print("Force tail cuts for extended cleaning!!!")

    # Prepare command to launch script
    source_ctapipe = "source /cvmfs/cta.in2p3.fr/software/conda/dev/setupConda.sh"
    source_ctapipe += " && conda activate ctapipe_v0.11.0"

    if switches["output_type"] in "TRAINING":
        execute = "data_training.py"
        script_args = [
            "--config_file={}".format(config_file),
            "--estimate_energy={}".format(str(estimate_energy)),
            "--regressor_config={}.yaml".format(regressor_method),
            "--regressor_dir=./",
            "--outfile {outfile}",
            "--indir ./ --infile_list={infile_name}",
            "--max_events={}".format(switches["max_events"]),
            "--{mode}",
            "--cam_ids {}".format(cam_id_list),
        ]
        output_filename_template = "TRAINING"
    elif switches["output_type"] in "DL2":
        execute = "write_dl2.py"
        script_args = [
            "--config_file={}".format(config_file),
            "--regressor_config={}.yaml".format(regressor_method),
            "--regressor_dir=./",
            "--classifier_config={}.yaml".format(classifier_method),
            "--classifier_dir=./",
            "--outfile {outfile}",
            "--indir ./ --infile_list={infile_name}",
            "--max_events={}".format(switches["max_events"]),
            "--{mode}",
            "--force_tailcut_for_extended_cleaning={}".format(
                force_tailcut_for_extended_cleaning),
            "--cam_ids {}".format(cam_id_list),
        ]
        output_filename_template = "DL2"

    # Make the script save also the full calibrated images if required
    if switches["save_images"] is True:
        script_args.append("--save_images")

    # Make the script print debug information if required
    if switches["debug_script"] is True:
        script_args.append("--debug")

    cmd = [source_ctapipe, "&&", "./" + execute]
    cmd += script_args

    pilot_args_write = " ".join(cmd)

    # For table merging if multiple runs
    pilot_args_merge = " ".join([
        source_ctapipe,
        "&&",
        "./merge_tables.py",
        "--template_file_name",
        "{in_name}",
        "--outfile",
        "{out_name}",
    ])

    prod3b_filelist = dict()
    if estimate_energy is False and switches["output_type"] in "TRAINING":
        prod3b_filelist["gamma"] = cfg["EnergyRegressor"]["gamma_list"]
    elif estimate_energy is True and switches["output_type"] in "TRAINING":
        prod3b_filelist["gamma"] = cfg["GammaHadronClassifier"]["gamma_list"]
        prod3b_filelist["proton"] = cfg["GammaHadronClassifier"]["proton_list"]
    elif switches["output_type"] in "DL2":
        prod3b_filelist["gamma"] = cfg["Performance"]["gamma_list"]
        prod3b_filelist["proton"] = cfg["Performance"]["proton_list"]
        prod3b_filelist["electron"] = cfg["Performance"]["electron_list"]

    # from IPython import embed
    # embed()

    # Split list of files according to stoprage elements
    with open(prod3b_filelist[particle]) as f:
        filelist = f.readlines()

    filelist = ["{}".format(_.replace("\n", "")) for _ in filelist]
    res = dirac.splitInputData(filelist, n_file_per_job)
    list_run_to_loop_on = res["Value"]

    # define a template name for the file that's going to be written out.
    # the placeholder braces are going to get set during the file-loop
    output_filename = output_filename_template
    output_path = outdir
    if estimate_energy is False and switches["output_type"] in "TRAINING":
        output_path += "/{}/".format(training_dir_energy)
        step = "energy"
    if estimate_energy is True and switches["output_type"] in "TRAINING":
        output_path += "/{}/".format(training_dir_classification)
        step = "classification"
    if switches["output_type"] in "DL2":
        if force_tailcut_for_extended_cleaning is False:
            output_path += "/{}/".format(dl2_dir)
        else:
            output_path += "/{}_force_tc_extended_cleaning/".format(dl2_dir)
        step = ""
    output_filename += "_{}.h5"

    # sets all the local files that are going to be uploaded with the job
    # plus the pickled classifier
    # if file name starts with `LFN:`, it will be copied from the GRID
    input_sandbox = [
        # Utility to assign one job to one command...
        os.path.expandvars("$GRID/pilot.sh"),
        os.path.expandvars("$PROTOPIPE/protopipe/"),
        os.path.expandvars("$GRID/merge_tables.py"),
        # python wrapper for the mr_filter wavelet cleaning
        # os.path.expandvars("$PYWI/pywi/"),
        # os.path.expandvars("$PYWICTA/pywicta/"),
        # script that is being run
        os.path.expandvars("$PROTOPIPE/protopipe/scripts/" + execute),
        # Configuration file
        os.path.expandvars(os.path.join(config_path, config_file)),
    ]

    models_to_upload = []
    configs_to_upload = []
    if estimate_energy is True and switches["output_type"] in "TRAINING":
        config_path_template = "LFN:" + os.path.join(home_grid, outdir,
                                                     model_dir, "{}.yaml")
        config_to_upload = config_path_template.format(regressor_method)
        model_path_template = "LFN:" + os.path.join(
            home_grid, outdir, model_dir, "regressor_{}_{}.pkl.gz")
        for cam_id in cam_id_list:

            model_to_upload = model_path_template.format(
                cam_id, regressor_method)  # TBC
            print("The following model(s) will be uploaded to the GRID:")
            print(model_to_upload)
            models_to_upload.append(model_to_upload)

        print(
            "The following configs(s) for such models will be uploaded to the GRID:"
        )
        print(config_to_upload)
        configs_to_upload.append(config_to_upload)
        # input_sandbox.append(model_to_upload)
    elif estimate_energy is False and switches["output_type"] in "TRAINING":
        pass
    else:  # Charge also classifer for DL2
        model_type_list = ["regressor", "classifier"]
        model_method_list = [regressor_method, classifier_method]
        config_path_template = "LFN:" + os.path.join(home_grid, outdir,
                                                     model_dir, "{}.yaml")
        model_path_template = "LFN:" + os.path.join(
            home_grid, outdir, model_dir, "{}_{}_{}.pkl.gz")
        if force_tailcut_for_extended_cleaning is True:
            force_mode = mode.replace("wave", "tail")
            print("################")
            print(force_mode)
        else:
            force_mode = mode

        for idx, model_type in enumerate(model_type_list):

            print(
                "The following configuration file will be uploaded to the GRID:"
            )

            config_to_upload = config_path_template.format(
                model_method_list[idx])
            print(config_to_upload)
            configs_to_upload.append(config_to_upload)  # upload only 1 copy

            print(
                "The following model(s) related to such configuration file will be uploaded to the GRID:"
            )

            for cam_id in cam_id_list:

                if model_type in "regressor" and use_regressor is False:
                    print("Do not upload regressor model on GRID!!!")
                    continue

                if model_type in "classifier" and use_classifier is False:
                    print("Do not upload classifier model on GRID!!!")
                    continue

                model_to_upload = model_path_template.format(
                    model_type_list[idx], cam_id, model_method_list[idx])
                print(model_to_upload)

                models_to_upload.append(model_to_upload)
                # input_sandbox.append(model_to_upload)

    # summary before submitting
    print("\nDEBUG> running as:")
    print(pilot_args_write)
    print("\nDEBUG> with input_sandbox:")
    print(input_sandbox)
    print("\nDEBUG> with output file:")
    print(output_filename.format("{job_name}"))
    print("\nDEBUG> Particles:")
    print(particle)
    print("\nDEBUG> Energy estimation:")
    print(estimate_energy)

    # ########  ##     ## ##    ## ##    ## #### ##    ##  ######
    # ##     ## ##     ## ###   ## ###   ##  ##  ###   ## ##    ##
    # ##     ## ##     ## ####  ## ####  ##  ##  ####  ## ##
    # ########  ##     ## ## ## ## ## ## ##  ##  ## ## ## ##   ####
    # ##   ##   ##     ## ##  #### ##  ####  ##  ##  #### ##    ##
    # ##    ##  ##     ## ##   ### ##   ###  ##  ##   ### ##    ##
    # ##     ##  #######  ##    ## ##    ## #### ##    ##  ######

    # list of files on the GRID SE space
    # not submitting jobs where we already have the output
    batcmd = "dirac-dms-user-lfns --BaseDir {}".format(
        os.path.join(home_grid, output_path))
    result = subprocess.check_output(batcmd, shell=True)
    try:
        grid_filelist = open(result.split()[-1]).read()
    except IOError:
        raise IOError("ERROR> cannot read GRID filelist...")

    # get jobs from today and yesterday...
    days = []
    for i in range(2):  # how many days do you want to look back?
        days.append(
            (datetime.date.today() - datetime.timedelta(days=i)).isoformat())

    # get list of run_tokens that are currently running / waiting
    running_ids = set()
    running_names = []
    for status in ["Waiting", "Running", "Checking"]:
        for day in days:
            try:
                [
                    running_ids.add(id) for id in dirac.selectJobs(
                        status=status, date=day, owner=user_name)["Value"]
                ]
            except KeyError:
                pass

    n_jobs = len(running_ids)
    if n_jobs > 0:
        print("Scanning {} running/waiting jobs... please wait...".format(
            n_jobs))
        for i, id in enumerate(running_ids):
            if ((100 * i) / n_jobs) % 5 == 0:
                print("\r{} %".format(((20 * i) / n_jobs) * 5)),
            jobname = dirac.getJobAttributes(id)["Value"]["JobName"]
            running_names.append(jobname)
        else:
            print("\n... done")

    for bunch in list_run_to_loop_on:

        # for bunch in bunches_of_run:

        # from IPython import embed
        # embed()

        # this selects the `runxxx` part of the first and last file in the run
        # list and joins them with a dash so that we get a nice identifier in
        # the output file name.
        # if there is only one file in the list, use only that one
        # run_token = re.split('_', bunch[+0])[3]  # JLK JLK
        run_token = re.split("_", bunch[0])[3]
        if len(bunch) > 1:
            run_token = "-".join([run_token, re.split("_", bunch[-1])[3]])

        print("-" * 50)
        print("-" * 50)

        # setting output name
        output_filenames = dict()
        if switches["output_type"] in "DL2":
            job_name = "protopipe_{}_{}_{}_{}_{}".format(
                config_name, switches["output_type"], particle, run_token,
                mode)
            output_filenames[mode] = output_filename.format("_".join(
                [particle, mode, run_token]))
        else:
            job_name = "protopipe_{}_{}_{}_{}_{}_{}".format(
                config_name, switches["output_type"], step, particle,
                run_token, mode)
            output_filenames[mode] = output_filename.format("_".join(
                [step, particle, mode, run_token]))

        # if job already running / waiting, skip
        if job_name in running_names:
            print("\n WARNING> {} still running\n".format(job_name))
            continue

        print("Output file name: {}".format(output_filenames[mode]))

        # if file already in GRID storage, skip
        # (you cannot overwrite it there, delete it and resubmit)
        # (assumes tail and wave will always be written out together)
        already_exist = False
        file_on_grid = os.path.join(output_path, output_filenames[mode])
        print("DEBUG> check for existing file on GRID...")
        if file_on_grid in grid_filelist:
            print("\n WARNING> {} already on GRID SE\n".format(job_name))
            continue

        if n_jobs_max == 0:
            print("WARNING> maximum number of jobs to submit reached")
            print("WARNING> breaking loop now")
            break
        else:
            n_jobs_max -= 1

        j = Job()

        # runtime in seconds times 8 (CPU normalisation factor)
        j.setCPUTime(6 * 3600 * 8)
        j.setName(job_name)
        j.setInputSandbox(input_sandbox)

        if banned_sites:
            j.setBannedSites(banned_sites)

        # Add simtel files as input data
        j.setInputData(bunch)

        for run_file in bunch:
            file_token = re.split("_", run_file)[3]

            # wait for a random number of seconds (up to five minutes) before
            # starting to add a bit more entropy in the starting times of the
            # dirac queries.
            # if too many jobs try in parallel to access the SEs,
            # the interface crashes
            # #sleep = random.randint(0, 20)  # seconds
            # #j.setExecutable('sleep', str(sleep))

            # JLK: Try to stop doing that
            # consecutively downloads the data files, processes them,
            # deletes the input
            # and goes on to the next input file;
            # afterwards, the output files are merged
            # j.setExecutable('dirac-dms-get-file', "LFN:" + run_file)

            # source the miniconda ctapipe environment and
            # run the python script with all its arguments
            if switches["output_type"] in "DL2":
                output_filename_temp = output_filename.format("_".join(
                    [particle, mode, file_token]))
            if switches["output_type"] in "TRAINING":
                output_filename_temp = output_filename.format("_".join(
                    [step, particle, mode, file_token]))
            j.setExecutable(
                "./pilot.sh",
                pilot_args_write.format(
                    outfile=output_filename_temp,
                    infile_name=os.path.basename(run_file),
                    mode=mode,
                ),
            )

            # remove the current file to clear space
            j.setExecutable("rm", os.path.basename(run_file))

        # simple `ls` for good measure
        j.setExecutable("ls", "-lh")

        # if there is more than one file per job, merge the output tables
        if len(bunch) > 1:
            names = []

            names.append(("*_{}_".format(particle), output_filenames[mode]))

            for in_name, out_name in names:
                print("in_name: {}, out_name: {}".format(in_name, out_name))
                j.setExecutable(
                    "./pilot.sh",
                    pilot_args_merge.format(in_name=in_name,
                                            out_name=out_name),
                )

                print("DEBUG> args append: {}".format(
                    pilot_args_merge.format(in_name=in_name,
                                            out_name=out_name)))

        bunch.extend(models_to_upload)
        bunch.extend(configs_to_upload)
        j.setInputData(bunch)

        print("Input data set to job = {}".format(bunch))

        outputs = []
        outputs.append(output_filenames[mode])
        print("Output file path: {}{}".format(output_path,
                                              output_filenames[mode]))

        j.setOutputData(outputs, outputSE=None, outputPath=output_path)

        # check if we should somehow stop doing what we are doing
        if switches["dry"] is True:
            print("\nThis is a DRY RUN! -- NO job has been submitted!")
            print("Name of the job: {}".format(job_name))
            print("Name of the output file: {}".format(outputs))
            print("Output path from GRID home: {}".format(output_path))
            break

        # this sends the job to the GRID and uploads all the
        # files into the input sandbox in the process
        print("\nSUBMITTING job with the following INPUT SANDBOX:")
        print(input_sandbox)
        print("Submission RESULT: {}\n".format(dirac.submitJob(j)["Value"]))

        # break if this is only a test submission
        if switches["test"] is True:
            print("This is a TEST RUN! -- Only ONE job will be submitted!")
            print("Name of the job: {}".format(job_name))
            print("Name of the output file: {}".format(outputs))
            print("Output path from GRID home: {}".format(output_path))
            break

        # since there are two nested loops, need to break again
        if switches["test"] is True:
            break

    try:
        os.remove("datapipe.tar.gz")
        os.remove("modules.tar.gz")
    except:
        pass

    # Upload analysis configuration file for provenance

    SE_LIST = ['CC-IN2P3-USER', 'DESY-ZN-USER', 'CNAF-USER', 'CEA-USER']
    analysis_config_local = os.path.join(config_path, config_file)
    # the configuration file is uploaded to the data directory because
    # the training samples (as well as their cleaning settings) are independent
    analysis_config_dirac = os.path.join(home_grid, output_path, config_file)
    print("Uploading {} to {}...".format(analysis_config_local,
                                         analysis_config_dirac))

    if switches["dry"] is False:
        # Upload this file to all Dirac Storage Elements in SE_LIST
        for se in SE_LIST:
            # the uploaded config file overwrites any old copy
            ana_cfg_upload_cmd = "dirac-dms-add-file -f {} {} {}".format(
                analysis_config_dirac, analysis_config_local, se)
            ana_cfg_upload_result = subprocess.check_output(ana_cfg_upload_cmd,
                                                            shell=True)
            print(ana_cfg_upload_result)
    else:
        print("This is a DRY RUN! -- analysis.yaml has NOT been uploaded.")

    print("\nall done -- exiting now")
    exit()
 def submitJob(self):
     dirac = Dirac()
     res = dirac.submitJob(self.__job)
     gLogger.notice('Job submitted: {0}'.format(res["Value"]))
     return res
Ejemplo n.º 26
0
from DIRAC.Interfaces.API.Job import Job
from DIRAC.Interfaces.API.Dirac import Dirac

import pprint

dirac = Dirac()
j = Job()

j.setCPUTime(500)
j.setExecutable('/bin/echo hello')
j.setExecutable('/bin/hostname')
j.setExecutable('/bin/echo hello again')
j.setName('API')

result = dirac.submitJob(j)
print 'Submission Result: '
pprint.pprint(result)

jobid = result['JobID']

# print job id to file for future reference
joblog = open("jobid.log", "a")
joblog.write(str(jobid) + '\n')
joblog.close()

# to interactively check on job status do:
# dirac-wms-job-status -f jobid.log
print "\nThe current status of this job is:"
pprint.pprint(dirac.status(jobid))
Ejemplo n.º 27
0
dirac = Dirac(use_repo, repo_name)
exitCode = 0
errorList = []

jFile = None
for sw, value in unprocessed_switches:
  if sw.lower() in ( 'f', 'file' ):
    if os.path.isfile( value ):
      print 'Appending job ids to existing logfile: %s' %value
      if not os.access( value , os.W_OK ):
        print 'Existing logfile %s must be writable by user.' %value
    jFile = open( value, 'a' )

for jdl in args:

  result = dirac.submitJob( jdl )
  if result['OK']:
    print 'JobID = %s' % ( result['Value'] )
    if jFile != None:
      # parametric jobs
      if isinstance( result['Value'], list ):
        jFile.write( '\n'.join(str(p) for p in result['Value']) )
        jFile.write( '\n' )
      else:  
        jFile.write( str( result['Value'] )+'\n' )
  else:
    errorList.append( ( jdl, result['Message'] ) )
    exitCode = 2

if jFile != None:
  jFile.close()
Ejemplo n.º 28
0
diracJob.setName(jobName)

# Set the program/executable, arguments, logFile, ...
diracJob.setExecutable('echo', arguments='\"Hello world!\"')

# multiple executables can be set/appended
# diracJob.setExecutable('ls', arguments='-l')
# diracJob.setExecutable(executable, arguments='\"hello again\"')

# Set the job length, but not needed in this example
diracJob.setCPUTime(500)

print 'submitting job', jobName
dirac = Dirac()
result = dirac.submitJob(diracJob)
print 'Submission Result: ', result

# try to create job id file
try:
    jid = 'JobID'
    if jid in result.keys():
        jid_file = open('%s.jid' % (jobName), 'w')
        jid_file.write('%s\n' % (str(result[jid]).strip()))
        jid_file.close()
    else:
        print 'Unable to creaate jid file for this job', jobName
except Exception as exception:
    print str(exception)
    print 'Unable to creaate jid file for this job', jobName
Ejemplo n.º 29
0
class CEBaseTest(TestBase):
    """
    CEBaseTest is base class for all the CE test classes. Real  CE test should
    implement its _judge method.
  """
    def __init__(self, args=None, apis=None):
        super(CEBaseTest, self).__init__(args, apis)

        self.timeout = self.args.get('timeout', 1800)
        self.vo = self.args.get('VO')
        self.testType = self.args['TestType']
        self.executable = self.args['executable']
        self.__logPath = '/opt/dirac/work/ResourceStatus/SAMTestAgent/SAM/log'
        self.__scriptPath = '/opt/dirac/pro/IHEPDIRAC/ResourceStatusSystem/SAM/sam_script'

        if 'WMSAdministrator' in self.apis:
            self.wmsAdmin = self.apis['WMSAdministrator']
        else:
            self.wmsAdmin = RPCClient('WorkloadManagement/WMSAdministrator')

        if 'Dirac' in self.apis:
            self.dirac = self.apis['Dirac']
        else:
            self.dirac = Dirac()

    def doTest(self, elementDict):
        """
      submit test job to the specified ce or cloud..
    """

        elementName = elementDict['ElementName']
        elementType = elementDict['ElementType']
        vos = elementDict['VO']

        site = None
        ce = None
        if elementType == 'ComputingElement':
            ce = elementName
        if elementType == 'CLOUD':
            site = elementName

        if self.vo:
            submitVO = self.vo
        elif vos:
            submitVO = vos[0]
        else:
            submitVO = 'bes'

        submissionTime = datetime.utcnow().replace(microsecond=0)
        sendRes = self.__submit(site, ce, submitVO)
        if not sendRes['OK']:
            return sendRes
        jobID = sendRes['Value']

        result = {
            'Result': {
                'JobID': jobID,
                'VO': submitVO,
                'SubmissionTime': submissionTime
            },
            'Finish': False
        }

        return S_OK(result)

    def __submit(self, site, CE, vo):
        """
      set the job and submit.
    """

        job = Job()
        job.setName(self.testType)
        job.setJobGroup('CE-Test')
        job.setExecutable(self.executable)
        job.setInputSandbox('%s/%s' % (self.__scriptPath, self.executable))
        if site and not CE:
            job.setDestination(site)
        if CE:
            job.setDestinationCE(CE)

        LOCK.acquire()
        proxyPath = BESUtils.getProxyByVO('zhangxm', vo)
        if not proxyPath['OK']:
            LOCK.release()
            return proxyPath
        proxyPath = proxyPath['Value']
        oldProxy = os.environ.get('X509_USER_PROXY')
        os.environ['X509_USER_PROXY'] = proxyPath
        result = self.dirac.submitJob(job)
        if oldProxy is None:
            del os.environ['X509_USER_PROXY']
        else:
            os.environ['X509_USER_PROXY'] = oldProxy
        LOCK.release()

        return result

    def getTestResult(self, elementName, vo, jobID, submissionTime):
        """
      download output sandbox and judge the test status from the log file.
    """

        isFinish = False

        res = self.__getJobOutput(jobID, vo)
        if not res['OK']:
            return res
        output = res['Value']
        status = res['Status']

        resDict = {
            'CompletionTime': None,
            'Status': None,
            'Log': None,
            'ApplicationTime': None
        }
        utcNow = datetime.utcnow().replace(microsecond=0)

        if output:
            isFinish = True
            resDict['CompletionTime'] = utcNow
            log = output['Log']
            if not output['Download']:
                resDict['Status'] = 'Unknown'
                resDict['Log'] = 'Fail to download log file for job %s: %s' % (
                    jobID, log)
            else:
                resDict['Log'] = log
                resDict['Status'] = self._judge(log)
                resDict['AppliactionTime'] = self.__getAppRunningTime(log)

        else:
            if utcNow - submissionTime >= timedelta(seconds=self.timeout):
                isFinish = True
                if elementName.split('.')[0] == 'CLOUD':
                    site = elementName
                else:
                    site = BESUtils.getSiteForCE(elementName)
                jobCount = self.wmsAdmin.getSiteSummaryWeb({'Site': site}, [],
                                                           0, 0)
                if not jobCount['OK']:
                    return jobCount
                params = jobCount['Value']['ParameterNames']
                records = jobCount['Value']['Records'][0]
                run = records[params.index('Running')]
                done = records[params.index('Done')]
                if status == 'Waiting' and run == 0 and done == 0:
                    resDict['Status'] = 'Bad'
                    resDict[
                        'Log'] = 'The test job is waiting for %d seconds, but no running and done jobs at this site.' % self.timeout
                else:
                    if run != 0:
                        resDict['Status'] = 'Busy'
                        resDict[
                            'Log'] = 'Site %s is too busy to execute this test job, job status is %s' % (
                                site, status)
                    else:
                        resDict['Status'] = 'Unknown'
                        resDict[
                            'Log'] = 'Test did not complete within the timeout of %d seconds, job status is %s' % (
                                self.timeout, status)
                self.dirac.killJob(jobID)

        if not isFinish:
            return S_OK()
        else:
            return S_OK(resDict)

    def __getJobOutput(self, jobID, vo):
        status = self.dirac.getJobStatus(jobID)
        if not status['OK']:
            return status
        status = status['Value'][jobID]['Status']

        if status in ('Done', 'Failed'):
            LOCK.acquire()
            proxyPath = BESUtils.getProxyByVO('zhangxm', vo)
            if not proxyPath['OK']:
                LOCK.release()
                return proxyPath
            proxyPath = proxyPath['Value']
            oldProxy = os.environ.get('X509_USER_PROXY')
            os.environ['X509_USER_PROXY'] = proxyPath
            outputRes = self.dirac.getOutputSandbox(jobID, self.__logPath)
            if oldProxy is None:
                del os.environ['X509_USER_PROXY']
            else:
                os.environ['X509_USER_PROXY'] = oldProxy
            LOCK.release()

            if not outputRes['OK']:
                ret = S_OK({'Download': False, 'Log': outputRes['Message']})
            else:
                try:
                    logfile = open(
                        '%s/%d/Script1_CodeOutput.log' %
                        (self.__logPath, jobID), 'r')
                    log = logfile.read()
                    logfile.close()
                except IOError, e:
                    raise IOError
                os.system('rm -rf %s/%d' % (self.__logPath, jobID))
                ret = S_OK({'Download': True, 'Log': log})
        else: