def _init(self): global xml_summary global xml_schema if not self.env_var: raise GangaException('XMLSummary env not set!') if not self.file: raise GangaException('File not specified!') if not os.path.exists(self.file): raise GangaException('%s does not exist!' % self.file) p = self._xmlPath() v = self.env_var if not xml_schema.has_key(v): if sys.modules.has_key('schema'): del sys.modules['schema'] xml_schema[v] = imp.load_source('schema', p + '/schema.py') if sys.modules.has_key('summary'): del sys.modules['summary'] xml_summary[v] = imp.load_source('summary', p + '/summary.py') xml_summary[v].__schema__ = xml_schema[v] sum = xml_summary[v].Summary(self._xmlSchema()) sum.parse(self.file) self.data = sum if sys.modules.has_key('schema'): del sys.modules['schema'] if sys.modules.has_key('summary'): del sys.modules['summary'] return
def mergefiles(self, file_list, output_file): if not self.env_var: raise GangaException('XMLSummary env not set!') script_name = tempfile.mktemp('.py') script = open(script_name, 'w') dummy = GaudiXMLSummary() dummy.env_var = self.env_var # write py script script.write('import sys\n') script.write('sys.path.append("%s") \n' % dummy._xmlPath()) script.write('import summary \n') script.write('sum = summary.Merge(%s,"%s") \n' \ % (str(file_list),dummy._xmlSchema())) script.write('sum.write("%s") \n' % output_file) script.close() # run it proc = subprocess.Popen(['python', script_name]) proc.wait() rc = proc.poll() if rc != 0: msg = 'Failed to merge XML summary file!' raise GangaException(msg) if not os.path.exists(output_file): raise GangaException('Failed to merge XML summary file!')
def buildGangaTarget(self): """ This builds the ganga target 'ganga-input-sandbox' for the project defined by self.directory This returns the absolute path to the file after it has been created. It will fail if things go wrong or the file fails to generate """ logger.info( "Make-ing target '%s' (This may take a few minutes depending on the size of your project)" % GaudiExec.build_target) # Up to the user to run something like make clean... (Although that would avoid some potential CMake problems) self.execCmd('make %s' % GaudiExec.build_target) targetPath = path.join(self.directory, 'build.%s' % self.platform, 'ganga') if not path.isdir(targetPath): raise GangaException("Target Path: %s NOT found!" % targetPath) sandbox_str = '%s' % GaudiExec.build_dest targetFile = path.join(targetPath, sandbox_str) if not path.isfile(targetFile): raise GangaException("Target File: %s NOT found!" % targetFile) wantedTargetFile = path.join(targetPath, GaudiExec.cmake_sandbox_name) rename(targetFile, wantedTargetFile) if not path.isfile(wantedTargetFile): raise GangaException("Wanted Target File: %s NOT found" % wantedTargetFile) logger.info("Built %s" % wantedTargetFile) return wantedTargetFile
def __init__(self,repo,what): self.what=what self.repository = repo logger.error("A severe error occurred in the Repository '%s': %s" % (repo.registry.name, what)) logger.error('If you believe the problem has been solved, type "reactivate()" to re-enable ') disableInternalServices() GangaException.__init__(self,what)
def __init__(self, repo=None, what=''): GangaException.__init__(self, what) self.what = what self.repository = repo logger.error("A severe error occurred in the Repository '%s': %s" % (repo.registry.name, what)) logger.error('If you believe the problem has been solved, type "reactivate()" to re-enable ') try: from Ganga.Core.InternalServices.Coordinator import disableInternalServices disableInternalServices() logger.error("Shutting Down Repository_runtime") from Ganga.Runtime import Repository_runtime Repository_runtime.shutdown() except: logger.error("Unable to disable Internal services, they may have already been disabled!")
def asksParameter(parameter): '''Interactive method requesting user the value of each parameter per session (FastSim, FullSim, Analysis)''' if parameter['customValue'] and len(parameter['values']) == 0: value = raw_input('\nEnter %s: ' % parameter['label']) elif not parameter['customValue'] and len( parameter['values']) == 0: raise GangaException( 'Invalid rule (customValue:False and values=0).') else: table = list() i = 0 for value in parameter['values']: table.append({'id': i, 'value': value}) i += 1 if parameter['customValue']: table.append({'id': i, 'value': 'Enter a custom value'}) print('\nChoose %s:' % parameter['label']) column_names = ('id', 'value') print(utils.format_dict_table(table, column_names)) index = utils.getIndex(maxExclusive=len(table)) if parameter['customValue'] and index == len(table) - 1: value = raw_input('Custom value: ') else: value = table[index]['value'] # parameter insertion in dictionary. It will be subsequently #inserted into dataset analysis bookkeeping table, hstore field new_dataset['parameters'][parameter['name']] = value return value
def master_resubmit(self, rjobs): '''Resubmit the master job to the grid''' profiler = ElapsedTimeProfiler(getLogger(name='Profile.LCG')) profiler.start() job = self.getJobObject() ick = False if not job.master and len(job.subjobs) == 0: # case 1: master job normal resubmission logger.debug('rjobs: %s' % str(rjobs)) logger.debug('mode: master job normal resubmission') ick = IBackend.master_resubmit(self, rjobs) elif job.master: # case 2: individual subjob resubmission logger.debug('mode: individual subjob resubmission') ick = IBackend.master_resubmit(self, rjobs) else: # case 3: master job bulk resubmission logger.debug('mode: master job resubmission') ick = self.master_bulk_resubmit(rjobs) if not ick: raise GangaException('ARC bulk submission failure') profiler.check('job re-submission elapsed time') return ick
def strip_filename(name): if len(name) >= 4 and name[0:4].upper() == 'PFN:': msg = 'Can not create LogicalFile from string that begins w/ "PFN:".'\ ' You probably want to create a PhysicalFile.' raise GangaException(msg) if len(name) >= 4 and name[0:4].upper() == 'LFN:': name = name[4:] return name
def __sbcurrent(self): '''Reads file .sbcurrent in user software directory to find session name and software version. If it can not find them, throws an exception.''' j = self.getJobObject() line = None try: sbcurrent = os.path.join(j.application.software_dir, '.sbcurrent') f = open(sbcurrent) try: line = f.read().splitlines()[0] finally: f.close() except: pass if line is None or len(line) == 0: raise GangaException( 'Unable to find software and the version in .sbcurrent') (self.session, self.soft_version) = line.split('/') logger.info('Found in .sbcurrent: %s %s' % (self.session, self.soft_version))
def replicate(self, destSE=''): '''Replicate all LFNs to destSE. For a list of valid SE\'s, type ds.replicate().''' if not destSE: from GangaDirac.Lib.Files.DiracFile import DiracFile DiracFile().replicate('') return if not self.hasLFNs(): raise GangaException('Cannot replicate dataset w/ no LFNs.') retry_files = [] for f in self.files: if not isDiracFile(f): continue try: result = f.replicate( destSE=destSE ) except Exception as err: msg = 'Replication error for file %s (will retry in a bit).' % f.lfn logger.warning(msg) logger.warning("Error: %s" % str(err)) retry_files.append(f) for f in retry_files: try: result = f.replicate( destSE=destSE ) except Exception as err: msg = '2nd replication attempt failed for file %s. (will not retry)' % f.lfn logger.warning(msg) logger.warning(str(err))
def do_collective_operation(self, keep_going, method, *args, **kwds): """ """ if not isinstance(keep_going, bool): raise GangaException( "The variable 'keep_going' must be a boolean. Probably you wanted to do %s(%s).%s()" % (self.name, keep_going, method)) result = [] for id, obj in self.objects.iteritems(): try: if isinstance(method, str): doc = method result.append(getattr(obj, method)(*args, **kwds)) else: try: doc = method.__doc__ except AttributeError: doc = str(method) result.append(method(obj, *args, **kwds)) except GangaException as x: if not keep_going: raise except Exception as x: logger.exception('%s %s %s: %s %s', doc, self.name, id, getName(x), str(x)) if not keep_going: raise return result
def get_result(cmd, log_msg, except_msg): from GangaBoss.Lib.DIRAC.Dirac import Dirac from GangaBoss.Lib.DIRAC.DiracUtils import result_ok result = Dirac.execAPI(cmd) if not result_ok(result): logger.warning('%s: %s' % (log_msg, str(result))) raise GangaException(except_msg) return result
def _checkOtherFiles(self, other ): if isType(other, GangaList) or isType(other, []): other_files = LHCbDataset(other).getFullFileNames() elif isType(other, LHCbDataset): other_files = other.getFullFileNames() else: raise GangaException("Unknown type for difference") return other_files
def getDataset(self, **kwargs): '''Get all metadata of all datasets. Public method, not exported to GPI.''' db_view_column = ['dataset_id', 'creation_date', 'occupancy'] sql = 'SELECT * FROM dataset_union WHERE true' kwargs['owner'] = kwargs.get('owner', ['official', utils.getOwner()]) # add filter to query if len(kwargs) > 0: for key, value in kwargs.iteritems(): if key in db_view_column: sql += " AND %s ILIKE '%s%%'" % (key, value) elif key == 'files': sql += " AND files > %s" % value elif key in ['status', 'session', 'owner']: if not isinstance(value, list): value = [value] sql += " AND (false" for s in value: sql += " OR %s ILIKE '%s%%'" % (key, s) sql += ")" else: sql += " AND parameters->'%s' ILIKE '%s%%'" % (key, value) # clean up the query sql = sql.replace('false OR ', '') sql = sql.replace('true AND ', '') # TODO: add control to prevent sql injection datasets = db.read(sql) if len(datasets) == 0: raise GangaException('No dataset found') i = 0 for dataset in datasets: dataset['id'] = i i += 1 dataset['occupancy_human'] = utils.sizeof_fmt_binary( dataset['occupancy']) if 'evt_file' in dataset[ 'parameters'] and not 'evt_tot' in dataset['parameters']: evt_file = int(dataset['parameters']['evt_file']) if dataset['files'] is None: dataset['files'] = 0 files = int(dataset['files']) dataset['parameters']['evt_tot'] = evt_file * files if 'evt_tot' in dataset['parameters']: dataset['parameters'][ 'evt_tot_human'] = utils.sizeof_fmt_decimal( int(dataset['parameters']['evt_tot'])) return datasets
def __init__(self, repo=None, what=''): GangaException.__init__(self, what) self.what = what self.repository = repo logger.error("A severe error occurred in the Repository '%s': %s" % (repo.registry.name, what)) logger.error( 'If you believe the problem has been solved, type "reactivate()" to re-enable ' ) try: from Ganga.Core.InternalServices.Coordinator import disableInternalServices disableInternalServices() logger.error("Shutting Down Repository_runtime") from Ganga.Runtime import Repository_runtime Repository_runtime.shutdown() except: logger.error( "Unable to disable Internal services, they may have already been disabled!" )
def full_expand_filename(name): if len(name) >= 4 and name[0:4].upper() == 'LFN:': msg = 'Can not create PhysicalFile from string that begins w/ "LFN:".'\ ' You probably want to create a LogicalFile.' raise GangaException(msg) urlprefix = re.compile('^(([a-zA-Z_][\w]*:)+/?)?/') if len(name) >= 4 and name[0:4].upper() == 'PFN:': name = name[4:] expanded_name = expandfilename(name) if urlprefix.match(expanded_name): return expanded_name return os.path.abspath(expanded_name)
def fillPackedSandbox(sandbox_files, destination): """Put all sandbox_files into tarball called name and write it into to the destination. Arguments: 'sandbox_files': a list of File or FileBuffer objects. 'destination': a string representing the destination filename Return: a list containing a path to the tarball """ if not sandbox_files: return [] # Generalised version from Ganga/Core/Sandbox/Sandbox.py import tarfile import stat #tf = tarfile.open(destination,"w:gz") # "a" = append with no compression # creates file if doesn't exist # cant append to a compressed tar archive so must compress later dir, filename = os.path.split(destination) if not os.path.isdir(dir): os.makedirs(dir) tf = tarfile.open(destination, "a") tf.dereference = True # --not needed in Windows for f in sandbox_files: try: contents = f.getContents() # is it FileBuffer? except AttributeError: # File try: fileobj = file(f.name) except: raise GangaException("File %s does not exist." % f.name) tinfo = tf.gettarinfo( f.name, os.path.join(f.subdir, os.path.basename(f.name))) else: # FileBuffer from StringIO import StringIO fileobj = StringIO(contents) tinfo = tarfile.TarInfo() tinfo.name = os.path.join(f.subdir, os.path.basename(f.name)) import time tinfo.mtime = time.time() tinfo.size = fileobj.len if f.isExecutable(): tinfo.mode = tinfo.mode | stat.S_IXUSR tf.addfile(tinfo, fileobj) tf.close() return [destination]
def __init__(self, files=None, persistency=None, depth=0, fromRef=False): super(LHCbDataset, self).__init__() if files is None: files = [] self.files = GangaList() process_files = True if fromRef: self.files._list.extend(files) process_files = False elif isinstance(files, GangaList): def isFileTest(_file): return isinstance(_file, IGangaFile) areFiles = all([isFileTest(f) for f in files._list]) if areFiles: self.files._list.extend(files._list) process_files = False elif isinstance(files, LHCbDataset): self.files._list.extend(files.files._list) process_files = False if process_files: if isType(files, LHCbDataset): for this_file in files: self.files.append(deepcopy(this_file)) elif isType(files, IGangaFile): self.files.append(deepcopy(this_file)) elif isType(files, (list, tuple, GangaList)): new_list = [] for this_file in files: if type(this_file) is str: new_file = string_datafile_shortcut_lhcb(this_file, None) elif isType(this_file, IGangaFile): new_file = stripProxy(this_file) else: new_file = strToDataFile(this_file) new_list.append(new_file) self.files.extend(new_list) elif type(files) is str: self.files.append(string_datafile_shortcut_lhcb(this_file, None), False) else: raise GangaException("Unknown object passed to LHCbDataset constructor!") self.files._setParent(self) logger.debug("Processed inputs, assigning files") # Feel free to turn this on again for debugging but it's potentially quite expensive #logger.debug( "Creating dataset with:\n%s" % self.files ) logger.debug("Assigned files") self.persistency = persistency self.depth = depth logger.debug("Dataset Created")
def strToDataFile(name, allowNone=True): if len(name) >= 4 and name[:4].upper() == 'LFN:': return DiracFile(lfn=name[4:]) elif len(name) >= 4 and name[:4].upper() == 'PFN:': logger.warning("PFN is slightly ambiguous, constructing LocalFile") return LocalFile(name[4:]) else: if allowNone: return None else: raise GangaException( "Cannot construct file object: %s" % str(name) )
def strToDataFile(name, allowNone=True): if len(name) >= 4 and name[0:4].upper() == 'LFN:': return LogicalFile(name) elif len(name) >= 4 and name[0:4].upper() == 'PFN:': return PhysicalFile(name) else: if not allowNone: msg = 'Can only convert strings that begin w/ PFN: or '\ 'LFN: to data files.' raise GangaException(msg) return None
def getDatasetMetadata(self): '''Gets the dataset from the bookkeeping for current path, etc.''' if not self.path: return None if not self.type in ['Path', 'RunsByDate', 'Run', 'Production']: raise GangaException('Type="%s" is not valid.' % self.type) if not self.type is 'RunsByDate': if self.startDate: msg = 'startDate not supported for type="%s".' % self.type raise GangaException(msg) if self.endDate: msg = 'endDate not supported for type="%s".' % self.type raise GangaException(msg) if self.selection: msg = 'selection not supported for type="%s".' % self.type raise GangaException(msg) cmd = "getDataset('%s','%s','%s','%s','%s','%s')" % ( self.path, self.dqflag, self.type, self.startDate, self.endDate, self.selection) from Ganga.GPIDev.Lib.GangaList.GangaList import GangaList knownLists = [tuple, list, GangaList] if isType(self.dqflag, knownLists): cmd = "getDataset('%s',%s,'%s','%s','%s','%s')" % ( self.path, self.dqflag, self.type, self.startDate, self.endDate, self.selection) result = get_result(cmd, 'BK query error.', 'BK query error.') files = [] metadata = {} value = result['Value'] if 'LFNs' in value: files = value['LFNs'] if not type(files) is list: # i.e. a dict of LFN:Metadata # if 'LFNs' in files: # i.e. a dict of LFN:Metadata metadata = files.copy() if metadata: return {'OK': True, 'Value': metadata} return {'OK': False, 'Value': metadata}
def replicate(self, destSE='', srcSE='', locCache=''): '''Replicate this file to destSE. For a list of valid SE\'s, type file.replicate().''' tokens = get_dirac_space_tokens() if not destSE: print "Please choose SE from:", tokens return if destSE not in tokens: msg = '"%s" is not a valid space token. Please choose from: %s' \ % (destSE,str(tokens)) raise GangaException(msg) cmd = 'result = DiracCommands.replicateFile("%s","%s","%s","%s")' % \ (self.name,destSE,srcSE,locCache) return get_result(cmd, 'Replication error', 'Error replicating file.')
def getFullFileNames(self): 'Returns all file names w/ PFN or LFN prepended.' names = [] from GangaDirac.Lib.Files.DiracFile import DiracFile for f in self.files: if isType(f, DiracFile): names.append('LFN:%s' % f.lfn) else: try: names.append('PFN:%s' % f.namePattern) except: logger.warning("Cannot determine filename for: %s " % f) raise GangaException("Cannot Get File Name") return names
def master_submit(self, rjobs, subjobconfigs, masterjobconfig): '''Submit the master job to the grid''' profiler = ElapsedTimeProfiler(getLogger(name='Profile.LCG')) profiler.start() job = self.getJobObject() # finding ARC CE endpoint for job submission #allowed_celist = [] # try: # allowed_celist = self.requirements.getce() # if not self.CE and allowed_celist: # self.CE = allowed_celist[0] # except: # logger.warning('ARC CE assigment from ARCRequirements failed.') # if self.CE and allowed_celist: # if self.CE not in allowed_celist: # logger.warning('submission to CE not allowed: %s, use %s instead' % ( self.CE, allowed_celist[0] ) ) # self.CE = allowed_celist[0] # use arc info to check for any endpoints recorded in the config file rc, output = grids['GLITE'].arc_info() if not self.CE and rc != 0: raise GangaException( "ARC CE endpoint not set and no default settings in '%s'. " % config['ArcConfigFile']) elif self.CE: logger.info('ARC CE endpoint set to: ' + str(self.CE)) else: logger.info("Using ARC CE endpoints defined in '%s'" % config['ArcConfigFile']) # delegate proxy to ARC CE # if not grids['GLITE'].arc_proxy_delegation(self.CE): # logger.warning('proxy delegation to %s failed' % self.CE) # doing massive job preparation if len(job.subjobs) == 0: ick = IBackend.master_submit( self, rjobs, subjobconfigs, masterjobconfig) else: ick = self.master_bulk_submit( rjobs, subjobconfigs, masterjobconfig) profiler.check('==> master_submit() elapsed time') return ick
def getOutputData(self, outputDir=None, names=None, force=False): """Retrieve data stored on SE to dir (default=job output workspace). If names=None, then all outputdata is downloaded otherwise names should be a list of files to download. If force=True then data will be redownloaded even if the file already exists. Note that if called on a master job then all subjobs outputwill be downloaded. If dir is None then the subjobs output goes into their individual outputworkspaces as expected. If however one specifies a dir then this is treated as a top dir and a subdir for each job will be created below it. This will avoid overwriting files with the same name from each subjob. Args: outputDir (str): This string represents the output dir where the sandbox is to be placed names (list): list of names which match namePatterns in the outputfiles force (bool): Force the download out data potentially overwriting existing objects """ j = self.getJobObject() if outputDir is not None and not os.path.isdir(outputDir): raise GangaException("Designated outupt path '%s' must exist and be a directory" % outputDir) def download(dirac_file, job, is_subjob=False): dirac_file.localDir = job.getOutputWorkspace().getPath() if outputDir is not None: output_dir = outputDir if is_subjob: output_dir = os.path.join(outputDir, job.fqid) if not os.path.isdir(output_dir): os.mkdir(output_dir) dirac_file.localDir = output_dir if os.path.exists(os.path.join(dirac_file.localDir, os.path.basename(dirac_file.lfn))) and not force: return try: if isType(dirac_file, DiracFile): dirac_file.get(localPath=dirac_file.localDir) else: dirac_file.get() return dirac_file.lfn # should really make the get method throw if doesn't suceed. todo except GangaException as e: logger.warning(e) suceeded = [] if j.subjobs: for sj in j.subjobs: suceeded.extend([download(f, sj, True) for f in outputfiles_iterator(sj, DiracFile) if f.lfn != '' and (names is None or f.namePattern in names)]) else: suceeded.extend([download(f, j, False) for f in outputfiles_iterator(j, DiracFile) if f.lfn != '' and (names is None or f.namePattern in names)]) return filter(lambda x: x is not None, suceeded)
def mergefiles(self, file_list, output_file, jobs): from Ganga.GPIDev.Lib.Job import Job from Ganga.GPIDev.Base.Proxy import isType gaudi_env = {} if isType(jobs, Job): gaudi_env = jobs.application.getenv() elif len(jobs) > 0: gaudi_env = jobs[0].application.getenv() self.env_var = gaudi_env['XMLSUMMARYBASEROOT'] # needed as exportmethods doesn't seem to cope with inheritance if not self.env_var: raise GangaException('XMLSummary env not set!') script_name = tempfile.mktemp('.py') script = open(script_name, 'w') dummy = GaudiXMLSummary() dummy.env_var = self.env_var # write py script script.write('import sys\n') script.write('sys.path.append("%s") \n' % dummy._xmlPath()) script.write('import summary \n') script.write('sum = summary.Merge(%s,"%s") \n' % (str(file_list), dummy._xmlSchema())) script.write('sum.write("%s") \n' % output_file) script.close() # run it proc = subprocess.Popen(['python', script_name]) proc.wait() rc = proc.poll() if rc != 0: msg = 'Failed to merge XML summary file!' raise GangaException(msg) if not os.path.exists(output_file): raise GangaException('Failed to merge XML summary file!')
def _getPartitionMasterJob(self, partition): """Get the master job from any number of partition jobs.""" partition_jobs = self.getPartitionJobs( partition) # only call method once if not len(partition_jobs): raise GangaException( None, 'Cant get partition master job when NO jobs assigned to partition' ) elif len(partition_jobs) is 1: return partition_jobs[0] # Need registry access here might be better to get registry directly # as in prepared stuff, see Executable for example or even # tasksregistry.py! return GPI.jobs(partition_jobs[0].fqid.split('.')[0])
def getFileNames(self): 'Returns a list of the names of all files stored in the dataset.' names = [] from GangaDirac.Lib.Files.DiracFile import DiracFile for i in self.files: if isType(i, DiracFile): names.append(i.lfn) else: try: names.append(i.namePattern) except: logger.warning("Cannot determine filename for: %s " % i) raise GangaException("Cannot Get File Name") return names
def _reallyUpdateLocks(self, index, failCount=0): this_index_file = self.fns[index] now = None try: oldnow = self.delayread(this_index_file) os.system('touch %s' % str(this_index_file)) now = self.delayread(this_index_file) # os.stat(self.fn).st_ctime except OSError as x: if x.errno != errno.ENOENT: logger.debug( "Session file timestamp could not be updated! Locks could be lost!" ) if now is None and failCount < 4: try: logger.debug( "Attempting to lock file again, unknown error:\n'%s'" % str(x)) import time time.sleep(0.5) failcount = failCount + 1 now = self._reallyUpdateLocks(index, failcount) except Exception as err: now = -999. logger.debug( "Received another type of exception, failing to update lockfile: %s" % str(this_index_file)) else: logger.warning("Failed to update lock file: %s 5 times." % str(this_index_file)) logger.warning( "This could be due to a filesystem problem, or multiple versions of ganga trying to access the same file" ) now = -999. else: if self.repos[index] != None: raise RepositoryError( self.repos[index], "[SessionFileUpdate] Run: Own session file not found! Possibly deleted by another ganga session.\n\ Possible reasons could be that this computer has a very high load, or that the system clocks on computers running Ganga are not synchronized.\n\ On computers with very high load and on network filesystems, try to avoid running concurrent ganga sessions for long.\n '%s' : %s" % (this_index_file, x)) else: from Ganga.Core import GangaException raise GangaException( "Error Opening global .session file for this session: %s" % this_index_file) return now
def __construct__(self, args): #super(GangaList, self).__construct__(args) if len(args) == 1: if isType(args[0], (len, GangaList, tuple)): for element_i in args[0]: self._list.expand(self.strip_proxy(element_i)) elif args[0] is None: self._list = None else: raise GangaException( "Construct: Attempting to assign a non list item: %s to a GangaList._list!" % str(args[0])) else: super(GangaList, self).__construct__(args) return
def process(self, sj_info): my_sc = sj_info[0] my_sj = sj_info[1] try: logger.debug("preparing job %s" % my_sj.getFQID('.')) jdlpath = my_sj.backend.preparejob(my_sc, master_input_sandbox) if (not jdlpath) or (not os.path.exists(jdlpath)): raise GangaException('job %s not properly prepared' % my_sj.getFQID('.')) self.__appendResult__(my_sj.id, jdlpath) return True except Exception as x: log_user_exception() return False
def master_submit(self, rjobs, subjobconfigs, masterjobconfig): '''Submit the master job to the grid''' profiler = ElapsedTimeProfiler(getLogger(name='Profile.LCG')) profiler.start() job = self.getJobObject() # finding CREAM CE endpoint for job submission allowed_celist = [] try: allowed_celist = self.requirements.getce() if not self.CE and allowed_celist: self.CE = allowed_celist[0] except: logger.warning( 'CREAM CE assigment from AtlasCREAMRequirements failed.') if self.CE and allowed_celist: if self.CE not in allowed_celist: logger.warning( 'submission to CE not allowed: %s, use %s instead' % (self.CE, allowed_celist[0])) self.CE = allowed_celist[0] if not self.CE: raise GangaException('CREAM CE endpoint not set') # delegate proxy to CREAM CE if not grids['GLITE'].cream_proxy_delegation(self.CE): logger.warning('proxy delegation to %s failed' % self.CE) # doing massive job preparation if len(job.subjobs) == 0: ick = IBackend.master_submit(self, rjobs, subjobconfigs, masterjobconfig) else: ick = self.master_bulk_submit(rjobs, subjobconfigs, masterjobconfig) profiler.check('==> master_submit() elapsed time') return ick
def __init__(self, message=''): GangaException.__init__(self, message) self.message = message
def __init__(self, repo=None, id='', orig=None): GangaException.__init__(self, "Inaccessible Object") self.repo = repo self.id = id self.orig = orig
def __init__(self, what=''): GangaException.__init__(self, what) self.what = what
def __init__(self,txt): GangaException.__init__(self,txt) self.txt=txt