def example(): # Creating a UserConfig object with the user's proxy # and the path of the trusted CA certificates uc = arc.UserConfig() uc.ProxyPath("/tmp/x509up_u%s" % os.getuid()) uc.CACertificatesDirectory("/etc/grid-security/certificates") # Create a new job object with a given JobID job = arc.Job() job.JobID = "https://piff.hep.lu.se:443/arex/hYDLDmyxvUfn5h5iWqkutBwoABFKDmABFKDmIpHKDmYBFKDmtRy9En" job.Flavour = "ARC1" job.ServiceInformationURL = job.JobStatusURL = job.JobManagementURL = arc.URL("https://piff.hep.lu.se:443/arex") sys.stdout.write("Get job information from the computing element...\n") # Put the job into a JobSupervisor and update its information job_supervisor = arc.JobSupervisor(uc, [job]) job_supervisor.Update() sys.stdout.write("Downloading results...\n") # Prepare a list for storing the directories for the downloaded job results (if there would be more jobs) downloadeddirectories = arc.StringList() # Start retrieving results of all the selected jobs # into the "/tmp" directory (first argument) # using the jobid and not the jobname as the name of the subdirectory (second argument, usejobname = False) # do not overwrite existing directories with the same name (third argument: force = False) # collect the downloaded directories into the variable "downloadeddirectories" (forth argument) success = job_supervisor.Retrieve("/tmp", False, False, downloadeddirectories) if not success: sys.stdout.write("Downloading results failed.\n") for downloadeddirectory in downloadeddirectories: sys.stdout.write("Job results were downloaded to %s\n"%str(downloadeddirectory)) sys.stdout.write("Contents of the directory:\n") for filename in os.listdir(downloadeddirectory): sys.stdout.write(" %s\n"%filename)
def workspec2arcjob(workspec): '''Convert WorkSpec.workAttributes to arc.Job object''' job = arc.Job() try: wsattrs = workspec.workAttributes['arcjob'] proxyrole = workspec.workAttributes['proxyrole'] except: # Job was not submitted yet return (job, arc.Time(), None) for attr in dir(job): if attr not in wsattrs or attr == 'CreationTime': continue attrtype = type(getattr(job, attr)) # Some object types need special treatment if attrtype == arc.StringList: strlist = arc.StringList() for item in wsattrs[attr].split('|'): strlist.append(str(item)) setattr(job, attr, strlist) elif attrtype == arc.StringStringMap: ssm = arc.StringStringMap() for (k, v) in json.loads(wsattrs[attr]).items(): ssm[str(k)] = str(v) setattr(job, attr, ssm) else: setattr(job, attr, attrtype(str(wsattrs[attr]))) return (job, arc.Time(str(wsattrs['ModificationTime'])), proxyrole)
def _db2job(self, dbinfo): ''' Convert a dictionary of DB key value into arc Job object ''' j = arc.Job() for attr in self.jobattrs: if attr not in dbinfo or dbinfo[attr] is None: continue # Some object types need special treatment if self.jobattrs[attr] == arc.StringList: l = arc.StringList() for item in dbinfo[attr].encode('utf-8').split('|'): l.append(item) setattr(j, attr, l) continue if self.jobattrs[attr] == arc.StringStringMap: m = arc.StringStringMap() d = eval(dbinfo[attr]) if not isinstance(d, dict): continue for (k,v) in d.items(): m[k] = v setattr(j, attr, m) continue setattr(j, attr, self.jobattrs[attr](str(dbinfo[attr]))) return j
def fetchAll(self, jobs): # Get all outputs using Job Supervisor job_supervisor = arc.JobSupervisor(self.uc, list(jobs.values())) job_supervisor.Update() dirs = arc.StringList() job_supervisor.Retrieve(self.tmpdir, False, False, dirs) return (list(job_supervisor.GetIDsProcessed()), list(job_supervisor.GetIDsNotProcessed()))
def resetJobs(self, jobstoreset): ''' Empty all StringLists in jobs so that when they are updated they do not contain duplicate values, since ARC always appends to these lists. ''' emptylist = arc.StringList() j = arc.Job() attrstoreset = [ attr for attr in dir(j) if type(getattr(j, attr)) == arc.StringList ] for jobs in jobstoreset.values(): for job in jobs: for attr in attrstoreset: setattr(job[2], attr, emptylist)