def __init__(self): logger.info("crabbackend init") super(CRABBackend, self).__init__() config = Config.getConfig('CMSSW') shell = Shell(os.path.join(config['CMSSW_SETUP'], 'CMSSW_generic.sh')) #shell = Shell(os.path.join(config['CMSSW_SETUP'], 'CMSSW_generic.sh'), # [config['CMSSW_VERSION'], config['CRAB_VERSION']]) self.crab_env = shell.env config = Config.getConfig('CRAB_CFG') self.server_name = config['server_name'] self.apiresource = config['apiresource'] self.userproxy = config['userproxy'] self.asyncdest = config['asyncdest'] logger.info("asyncdest %s" % self.asyncdest)
def __init__(self): logger.info("crabbackend init") super(CRABBackend, self).__init__() config = Config.getConfig('CMSSW') shell = Shell(os.path.join(config['CMSSW_SETUP'], 'CMSSW_generic.sh')) #shell = Shell(os.path.join(config['CMSSW_SETUP'], 'CMSSW_generic.sh'), # [config['CMSSW_VERSION'], config['CRAB_VERSION']]) self.crab_env = shell.env config = Config.getConfig('CRAB_CFG') self.server_name = config['server_name'] self.apiresource = config['apiresource'] self.userproxy = config['userproxy'] self.asyncdest = config['asyncdest'] logger.info("asyncdest %s" % self.asyncdest )
def run(self): # create connection self.log.info('Connecting to LGI project %s server %s'%(self.res._project, self.res._url)) self.res.connect() self.queued = None config = Config.getConfig('LGI') # LGI update loop self.log.debug('Starting LGIResourceThread main loop') while not self.should_stop(): now = time.time() try: work = [self._workForApp(app) for app in self.res.getApplications()] totalwork = sum(work) if self.log.isEnabledFor('DEBUG'): self.log.debug('LGI pending work: %s'%(dict(zip([str(x) for x in self.res.getApplications()], work)))) if self.queued != totalwork: self.log.info('LGI jobs: %d waiting'%totalwork) self.queued = totalwork except Exception, e: self.log.warn(e) # and wait for next iteration while not self.should_stop() and time.time()-now < config['Poll']: time.sleep(1)
def submit(self, **opts): # called on client, so job_info is Job object """Log submit event on client.""" # if this job has a master and it is the first subjob then sent # submitted for master job if self.job_info.master is not None: if self.job_info.id == 0: masterjob_msg = self.getMessage('submitted') masterjob_msg['subjobs'] = len(self.job_info.master.subjobs) masterjob_msg['ganga_job_id'] = str( masterjob_msg['ganga_job_id']).split('.')[0] # override ganga_job_uuid as the message 'from the master' is # really sent from the subjob masterjob_msg['ganga_job_uuid'] = masterjob_msg[ 'ganga_master_uuid'] masterjob_msg['ganga_master_uuid'] = 0 self.send(masterjob_msg) from Ganga.Utility import Config gangausername = Config.getConfig('Configuration')['user'] self.job_info.info.monitoring_links.append( ('http://gangamon.cern.ch/ganga/#user=%s' % gangausername, 'dashboard')) # send submitted for this job msg = self.getMessage('submitted') from Ganga.GPI import queues queues.add(self.send, (msg))
def _initconfig(): """Initialize MSGMS configuration.""" try: from Ganga.Utility import Config # create configuration config = Config.makeConfig( 'MSGMS', 'Settings for the MSGMS monitoring plugin. Cannot be changed ruding the interactive Ganga session.') config.addOption( 'server', 'dashb-mb.cern.ch', 'The server to connect to') config.addOption('port', 61113, 'The port to connect to') config.addOption('username', 'ganga', '') config.addOption('password', 'analysis', '') config.addOption('message_destination', '/queue/ganga.status', '') config.addOption('usage_message_destination', "/queue/ganga.usage", '') config.addOption( 'job_submission_message_destination', "/queue/ganga.jobsubmission", '') # prevent modification during the interactive ganga session def deny_modification(name, x): raise Config.ConfigError( 'Cannot modify [MSGMS] settings (attempted %s=%s)' % (name, x)) config.attachUserHandler(deny_modification, None) except ImportError: # on worker node so Config is not needed since it is copied to MSGMS # constructor pass
def _initconfig(): """Initialize DashboardMS configuration.""" try: from Ganga.Utility import Config # create configuration config = Config.makeConfig( 'DashboardMS', 'Settings for Dashboard Messaging Service.') config.addOption('server', 'dashb-mb.cern.ch', 'The MSG server name.') config.addOption('port', 61113, 'The MSG server port.') config.addOption('user', 'ganga-atlas', '') config.addOption('password', 'analysis', '') config.addOption('destination_job_status', '/topic/dashboard.atlas.jobStatus', 'The MSG destination (topic or queue) for job status messages.') config.addOption('destination_job_processing_attributes', '/topic/dashboard.atlas.jobProcessingAttributes', 'The MSG destination (topic or queue) for job processing attributes messages.') config.addOption('destination_job_meta', '/topic/dashboard.atlas.jobMeta', 'The MSG destination (topic or queue) for job meta messages.') config.addOption('destination_task_meta', '/topic/dashboard.atlas.taskMeta', 'The MSG destination (topic or queue) for task meta messages.') config.addOption( 'task_type', 'analysis', 'The type of task. e.g. analysis, production, hammercloud,...') # prevent modification during the interactive ganga session def deny_modification(name, value): raise Config.ConfigError( 'Cannot modify [DashboardMS] settings (attempted %s=%s)' % (name, value)) config.attachUserHandler(deny_modification, None) except ImportError: # on worker node so Config is not needed since it is copied to # DashboardMS constructor pass
def __init__(self): GangaThread.__init__(self, 'LGI_Resource') self.log = getLogger('LGI.Resource.Thread') config = Config.getConfig('LGI') if not os.path.exists(config['PilotDist']): self.log.error('cannot connect to LGI server: pilotjob tarball not found: '+config['PilotDist']) self.res = LGI.Resource(config['PilotDist']) # number of queued LGI jobs self.queued = None
def getJobInfo(self): # called on client, so job_info is Job object """Create job_info from Job object.""" if self.job_info.master is None: # no master job; this job is not splitjob ganga_job_id = str(self.job_info.id) ganga_job_uuid = self.job_info.info.uuid ganga_master_uuid = 0 else: # there is a master job; we are in a subjob ganga_job_id = str(self.job_info.master.id) + \ '.' + str(self.job_info.id) ganga_job_uuid = self.job_info.info.uuid ganga_master_uuid = self.job_info.master.info.uuid from Ganga.Utility import Config return {'ganga_job_uuid': ganga_job_uuid, 'ganga_master_uuid': ganga_master_uuid, 'ganga_user_repository': Config.getConfig('Configuration')['user'] + '@' + Config.getConfig('System')['GANGA_HOSTNAME'] # place-holder updated in getMessage # place-holder updated in getMessage + ':' + Config.getConfig('Configuration')['gangadir'], 'ganga_job_id': ganga_job_id, 'subjobs': len(self.job_info.subjobs), 'backend': self.job_info.backend.__class__.__name__, 'application': self.job_info.application.__class__.__name__, 'job_name': self.job_info.name, 'hostname': '', 'event': '' }
def start(self): config = Config.getConfig("LGI") if config["StatsInterval"] == 0: self.log.debug("Not starting LGI stats thread because [LGI]StatsInterval is zero") return if not config["StatsFile"]: self.log.debug("Not starting LGI stats thread because [LGI]StatsFile is empty") return if config["Enable"] is False: self.log.debug("Not starting LGI stats thread because [LGI]Enable is False") return False return GangaThread.start(self)
def prepare_job_config(self, job): """ Generates a CRAB config object from the Ganga job configuration. """ from WMCore.Configuration import Configuration job_config = Configuration() for section in job.backend.CRABConfig._schema.datadict.keys(): section_config = getattr(job.backend.CRABConfig, section) ganga_section_config = Config.getConfig('CRABConfig_%s' % section) task_section_config = job_config.section_(section) for parameter_name, parameter_type in section_config._schema.allItems(): parameter_value = getattr(section_config, parameter_name) if parameter_value not in (None, [None]): # CRAB Config doesn't like Ganga sequence type instead of Lists if parameter_type._meta['sequence']: parameter_value = list(parameter_value) task_section_config.__setattr__(parameter_name, parameter_value) # Updating configuration in case of Ganga inline options specified ganga_option = ganga_section_config[parameter_name] if ganga_option: # CRAB Config doesn't like Ganga sequence (or tuples) type instead of Lists # Passing sequance with Ganga inline options makes it a tuple. if parameter_type._meta['sequence']: # Ugly but we need this because otherwise tuple of 1 element with a string would # be transformed in a list of chars ( ('ab') --> ['a', 'b'] ) import json ganga_option = json.loads(json.dumps(ganga_option)) if type(ganga_option) != list: ganga_option = [ganga_option] # loads makes strings 'utf' type, CRAB3 Server wants 'str' type ganga_option = map(lambda x: str(x), ganga_option) task_section_config.__setattr__(parameter_name, ganga_option) # Some internal configuration job_config.General.workArea = job.outputdir return job_config
def _initconfigFeed(): """Initialize Feedback configuration.""" try: from Ganga.Utility import Config # create configuration config = Config.makeConfig( 'Feedback', 'Settings for the Feedback plugin. Cannot be changed ruding the interactive Ganga session.') config.addOption( 'uploadServer', 'http://gangamon.cern.ch/django/errorreports', 'The server to connect to') def deny_modification(name, x): raise Config.ConfigError( 'Cannot modify [Feedback] settings (attempted %s=%s)' % (name, x)) config.attachUserHandler(deny_modification, None) except ImportError: # on worker node so Config is not needed since it is copied to Feedback # constructor pass
def run(self): config = Config.getConfig("LGI") # wait for GPI to become ready (taken from GangaJEM) while not self.should_stop(): try: from Ganga.GPI import jobs break except: pass time.sleep(1) from Ganga.GPI import LGI # LGI update loop self.log.debug("Starting LGI StatsThread main loop") self.data = [] self._writeStats(self.data, config["StatsFile"]) while not self.should_stop(): now = time.time() try: # add new line of data lgiQueued = LGI.resource.queued if lgiQueued is None: lgiQueued = 0 lgiRunning = 0 # TODO pilotQueued = sum([len(jobs.select(status=s)) for s in ["submitted", "submitting"]]) pilotRunning = len(jobs.select(status="running")) self.data.append( [int(now), lgiQueued + lgiRunning, lgiRunning, pilotQueued + pilotRunning, pilotRunning] ) # trash too old lines self.data = filter(lambda x: now - x[0] < config["StatsHistory"], self.data) # write data self._writeStats(self.data, config["StatsFile"]) except Exception, e: self.log.warn(e) # and wait for next iteration while not self.should_stop() and time.time() - now < config["StatsInterval"]: time.sleep(1)
def checkReport(self,jobDoc): job = self.getJobObject() config = Config.getConfig('Metrics') location = config['location'] if not os.path.exists(location): raise BackendError(0,'Location %s file doesnt exist.'%(location)) config = ConfigParser() config.read(location) PARAMS = [('status','status')] if config.has_section('report'): PARAMS += config.items('report') else: logger.warning('No report in metrics') for n,v in PARAMS: if v: job.backend.report[v] = jobDoc.getAttribute(v)
def checkReport(self, jobDoc): job = self.getJobObject() config = Config.getConfig('Metrics') location = config['location'] if not os.path.exists(location): raise BackendError('CRAB', 'Location %s file doesnt exist.' % (location)) config = ConfigParser() config.read(location) PARAMS = [('status', 'status')] if config.has_section('report'): PARAMS += config.items('report') else: logger.warning('No report in metrics') for n, v in PARAMS: if v: job.backend.report[v] = jobDoc.getAttribute(v)
def _getDefaultValueInternal(self, attr, val=None, check=False): """ Get the default value of a schema item, both simple and component. If check is True then val is used instead of default value: this is used to check if the val may be used as a default value (e.g. if it is OK to use it as a value in the config file) """ def_name = defaultConfigSectionName(self.name) item = self.getItem(attr) stored_attr_key = def_name + ':' + str(attr) from Ganga.Utility.Config import Config is_finalized = Config._after_bootstrap useDefVal = False try: # Attempt to get the relevant config section config = Config.getConfig(def_name) if is_finalized and stored_attr_key in _found_attrs and not config.hasModified: defvalue = _found_attrs[stored_attr_key] else: if attr in config.getEffectiveOptions(): defvalue = config[attr] from Ganga.GPIDev.Base.Proxy import isProxy if isProxy(defvalue): raise GangaException("(1)Proxy found where it shouldn't be in the Config: %s" % stored_attr_key) ## Just in case a developer puts the proxied object into the default value! _found_attrs[stored_attr_key] = defvalue else: useDefVal = True except (KeyError, Config.ConfigError): useDefVal = True if useDefVal: # hidden, protected and sequence values are not represented in config defvalue = item['defvalue'] from Ganga.GPIDev.Base.Proxy import isProxy if isProxy(defvalue): raise GangaException("(2)Proxy found where is shouldn't be in the Config" % stored_attr_key) ## Just in case a developer puts the proxied object into the default value! _found_attrs[stored_attr_key] = defvalue # in the checking mode, use the provided value instead if check is True: defvalue = val if isinstance(item, ComponentItem): # FIXME: limited support for initializing non-empty sequences (i.e. # apps => ['DaVinci','Executable'] is NOT correctly initialized) if not item['sequence']: if defvalue is None: if not item['load_default']: assert(item['optional']) return None # if a defvalue of a component item is an object (not string) just process it as for SimpleItems (useful for FileItems) # otherwise do a lookup via plugin registry category = item['category'] if isinstance(defvalue, str) or defvalue is None: try: config = Config.getConfig(def_name) has_modified = config.hasModified except KeyError: has_modified = False if category not in _found_components or has_modified: _found_components[category] = allPlugins.find(category, defvalue) return _found_components[category]() # If needed/requested make a copy of the function elsewhwre return defvalue
def report_inner(job=None, isJob=False, isTask=False): userInfoDirName = "userreport" tempDirName = "reportsRepository" # job relevant info jobSummaryFileName = "jobsummary.txt" jobFullPrintFileName = "jobfullprint.txt" repositoryPath = "repository/$usr/LocalXML/6.0/jobs/$thousandsNumxxx" # task relevant info taskSummaryFileName = "tasksummary.txt" taskFullPrintFileName = "taskfullprint.txt" tasksRepositoryPath = "repository/$usr/LocalXML/6.0/tasks/$thousandsNumxxx" # user's info environFileName = "environ.txt" userConfigFileName = "userconfig.txt" defaultConfigFileName = "gangarc.txt" ipythonHistoryFileName = "ipythonhistory.txt" gangaLogFileName = "gangalog.txt" jobsListFileName = "jobslist.txt" tasksListFileName = "taskslist.txt" from Ganga.Utility import Config uploadFileServer = Config.getConfig('Feedback')['uploadServer'] #uploadFileServer= "http://gangamon.cern.ch/django/errorreports/" #uploadFileServer= "http://ganga-ai-02.cern.ch/django/errorreports/" #uploadFileServer= "http://127.0.0.1:8000/errorreports" def printDictionary(dictionary, file=sys.stdout): for k, v in dictionary.iteritems(): print('%s: %s' % (k, v), file=file) if k == 'PYTHONPATH': global PYTHON_PATH PYTHON_PATH = v def extractFileObjects(fileName, targetDirectoryName): try: fileToRead = open(fileName, 'r') try: fileText = fileToRead.read() import re pattern = "File\(name=\'(.+?)\'" matches = re.findall(pattern, fileText) for fileName in matches: fileName = os.path.expanduser(fileName) targetFileName = os.path.join( targetDirectoryName, os.path.basename(fileName)) shutil.copyfile(fileName, targetFileName) finally: fileToRead.close() # except IOError, OSError: except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) def writeErrorLog(errorMessage): try: fileToWrite = open(errorLogPath, 'a') try: fileToWrite.write(errorMessage) fileToWrite.write("\n") except Exception as err: logger.debug("Err: %s" % str(err)) raise finally: fileToWrite.close() except Exception as err2: logger.debug("Err: %s" % str(err2)) pass def writeStringToFile(fileName, stringToWrite): try: # uncomment this to try the error logger #fileName = '~/' + fileName fileToWrite = open(fileName, 'w') try: fileToWrite.write(stringToWrite) except Exception as err: logger.debug("Err: %s" % str(err)) raise err finally: fileToWrite.close() # except IOError: except Exception as err: logger.debug("Err2: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) def renameDataFiles(directory): for fileName in os.listdir(directory): fullFileName = os.path.join(directory, fileName) if os.path.isfile(fullFileName): if fileName == 'data': os.rename(fullFileName, fullFileName + '.txt') else: renameDataFiles(fullFileName) import shutil import tarfile import tempfile import os userHomeDir = os.getenv("HOME") tempDir = tempfile.mkdtemp() errorLogPath = os.path.join(tempDir, 'reportErrorLog.txt') fullPathTempDir = os.path.join(tempDir, tempDirName) fullLogDirName = '' # create temp dir and specific dir for the job/user try: if not os.path.exists(fullPathTempDir): os.mkdir(fullPathTempDir) import datetime now = datetime.datetime.now() userInfoDirName = userInfoDirName + \ now.strftime("%Y-%m-%d-%H:%M:%S") fullLogDirName = os.path.join(fullPathTempDir, userInfoDirName) # if report directory exists -> delete it's content(we would like # last version of the report) if os.path.exists(fullLogDirName): shutil.rmtree(fullLogDirName) os.mkdir(fullLogDirName) # except OSError: except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import os.environ in a file fullEnvironFileName = os.path.join(fullLogDirName, environFileName) try: inputFile = open(fullEnvironFileName, 'w') try: printDictionary(os.environ, file=inputFile) print('OS VERSION : ' + platform.platform(), file=inputFile) finally: inputFile.close() # except IOError except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import user config in a file userConfigFullFileName = os.path.join( fullLogDirName, userConfigFileName) try: inputFile = open(userConfigFullFileName, 'w') try: print("#GANGA_VERSION = %s" % config.System.GANGA_VERSION, file=inputFile) global GANGA_VERSION GANGA_VERSION = config.System.GANGA_VERSION # this gets the default values # Ganga.GPIDev.Lib.Config.Config.print_config_file() # this should get the changed values for c in config: print(config[c], file=inputFile) finally: inputFile.close() # except IOError does not catch the exception ??? except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # write gangarc - default configuration defaultConfigFullFileName = os.path.join( fullLogDirName, defaultConfigFileName) try: outputFile = open(os.path.join(userHomeDir, '.gangarc'), 'r') try: writeStringToFile(defaultConfigFullFileName, outputFile.read()) finally: outputFile.close() # except IOError does not catch the exception ??? except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import ipython history in a file try: ipythonFile = open( os.path.join(os.environ['IPYTHONDIR'], 'history'), 'r') try: lastIPythonCommands = ipythonFile.readlines()[-20:] writeStringToFile(os.path.join( fullLogDirName, ipythonHistoryFileName), '\n'.join(lastIPythonCommands)) #writeStringToFile(os.path.join(fullLogDirName, ipythonHistoryFileName), ipythonFile.read()) finally: ipythonFile.close() # except IOError does not catch the exception ??? except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import gangalog in a file userLogFileLocation = config["Logging"]._logfile userLogFileLocation = os.path.expanduser(userLogFileLocation) try: gangaLogFile = open(userLogFileLocation, 'r') try: writeStringToFile( os.path.join(fullLogDirName, gangaLogFileName), gangaLogFile.read()) finally: gangaLogFile.close() # except IOError: except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import the result of jobs command in the report jobsListFullFileName = os.path.join(fullLogDirName, jobsListFileName) try: outputFile = open(jobsListFullFileName, 'w') try: from Ganga.GPI import jobs print(jobs, file=outputFile) finally: outputFile.close() # except IOError does not catch the exception ??? except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import the result of tasks command in the report tasksListFullFileName = os.path.join(fullLogDirName, tasksListFileName) try: outputFile = open(tasksListFullFileName, 'w') try: from Ganga.GPI import tasks print(tasks, file=outputFile) finally: outputFile.close() # except IOError does not catch the exception ??? except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # save it here because we will change fullLogDirName, but we want this # to be the archive and to be deleted folderToArchive = fullLogDirName # import job relevant info if (job is not None and isJob): global JOB_REPORT, APPLICATION_NAME, BACKEND_NAME JOB_REPORT = True APPLICATION_NAME = getName(job.application) BACKEND_NAME = getName(job.backend) # create job folder jobFolder = 'job_%s' % str(job.fqid) fullLogDirName = os.path.join(fullLogDirName, jobFolder) os.mkdir(fullLogDirName) # import job summary in a file fullJobSummaryFileName = os.path.join( fullLogDirName, jobSummaryFileName) writeStringToFile(fullJobSummaryFileName, str(job)) # import job full print in a file fullJobPrintFileName = os.path.join( fullLogDirName, jobFullPrintFileName) try: inputFile = open(fullJobPrintFileName, 'w') try: full_print(job, inputFile) finally: inputFile.close() # except IOError, OSError: except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # extract file objects try: fileObjectsPath = os.path.join(fullLogDirName, 'fileobjects') os.mkdir(fileObjectsPath) extractFileObjects(fullJobSummaryFileName, fileObjectsPath) # except OSError: except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # copy dir of the job ->input/output and subjobs try: parentDir, currentDir = os.path.split(job.inputdir[:-1]) workspaceDir = os.path.join(fullLogDirName, 'workspace') shutil.copytree(parentDir, workspaceDir) # except IOError, OSError except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # copy shared area of the job try: if hasattr(job.application, 'is_prepared'): if job.application.is_prepared is not None and job.application.is_prepared is not True: import os from Ganga.Utility.Config import getConfig from Ganga.Utility.files import expandfilename shared_path = os.path.join(expandfilename(getConfig( 'Configuration')['gangadir']), 'shared', getConfig('Configuration')['user']) shareddir = os.path.join( shared_path, job.application.is_prepared.name) if os.path.isdir(shareddir): sharedAreaDir = os.path.join( fullLogDirName, 'sharedarea') shutil.copytree(shareddir, sharedAreaDir) # except IOError, OSError except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # copy repository job file try: indexFileName = str(job.id) + '.index' repositoryPath = repositoryPath.replace( '$usr', os.getenv("USER")) # check if the job is subjob -> different way of forming the # path to the repository is_subjob = job.fqid.find('.') > -1 if is_subjob: jobid, subjobid = job.fqid.split( '.')[0], job.fqid.split('.')[1] repositoryPath = repositoryPath.replace( '$thousandsNum', str(int(jobid) / 1000)) repositoryPath = os.path.join(repositoryPath, jobid) else: repositoryPath = repositoryPath.replace( '$thousandsNum', str(job.id / 1000)) repositoryFullPath = os.path.join( config.Configuration.gangadir, repositoryPath) indexFileSourcePath = os.path.join( repositoryFullPath, indexFileName) repositoryFullPath = os.path.join( repositoryFullPath, str(job.id)) repositoryTargetPath = os.path.join( fullLogDirName, 'repository', str(job.id)) os.mkdir(os.path.join(fullLogDirName, 'repository')) shutil.copytree(repositoryFullPath, repositoryTargetPath) # data files are copied but can not be opened -> add .txt to # their file names renameDataFiles(repositoryTargetPath) if not is_subjob: # copy .index file indexFileTargetPath = os.path.join( fullLogDirName, 'repository', indexFileName) shutil.copyfile(indexFileSourcePath, indexFileTargetPath) # except OSError, IOError: except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import task relevant info if (job is not None and isTask): # job is actually a task object task = job # create task folder taskFolder = 'task_%s' % str(task.id) fullLogDirName = os.path.join(fullLogDirName, taskFolder) os.mkdir(fullLogDirName) # import task summary in a file fullTaskSummaryFileName = os.path.join( fullLogDirName, taskSummaryFileName) writeStringToFile(fullTaskSummaryFileName, str(task)) # import task full print in a file fullTaskPrintFileName = os.path.join( fullLogDirName, taskFullPrintFileName) try: inputFile = open(fullTaskPrintFileName, 'w') try: full_print(task, inputFile) except Exception as err: logger.debug("Err: %s" % str(err)) raise err finally: inputFile.close() # except IOError, OSError: except Exception as err: logger.debug("Err2: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # copy shared area of the task try: if len(task.transforms) > 0: if hasattr(task.transforms[0], 'application') and hasattr(task.transforms[0].application, 'is_prepared'): if task.transforms[0].application.is_prepared is not None and task.transforms[0].application.is_prepared is not True: import os from Ganga.Utility.Config import getConfig from Ganga.Utility.files import expandfilename shared_path = os.path.join(expandfilename(getConfig( 'Configuration')['gangadir']), 'shared', getConfig('Configuration')['user']) shareddir = os.path.join( shared_path, task.transforms[0].application.is_prepared.name) if os.path.isdir(shareddir): sharedAreaDir = os.path.join( fullLogDirName, 'sharedarea') shutil.copytree(shareddir, sharedAreaDir) # except IOError, OSError except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # copy repository task file try: indexFileName = str(task.id) + '.index' tasksRepositoryPath = tasksRepositoryPath.replace( '$usr', os.getenv("USER")) tasksRepositoryPath = tasksRepositoryPath.replace( '$thousandsNum', str(task.id / 1000)) repositoryFullPath = os.path.join( config.Configuration.gangadir, tasksRepositoryPath) indexFileSourcePath = os.path.join( repositoryFullPath, indexFileName) repositoryFullPath = os.path.join( repositoryFullPath, str(task.id)) repositoryTargetPath = os.path.join( fullLogDirName, 'repository', str(task.id)) os.mkdir(os.path.join(fullLogDirName, 'repository')) shutil.copytree(repositoryFullPath, repositoryTargetPath) # data files are copied but can not be opened -> add .txt to # their file names renameDataFiles(repositoryTargetPath) # copy .index file indexFileTargetPath = os.path.join( fullLogDirName, 'repository', indexFileName) shutil.copyfile(indexFileSourcePath, indexFileTargetPath) # except OSError, IOError: except Exception as err: logger.debug("Err %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) resultArchive = '%s.tar.gz' % folderToArchive try: resultFile = tarfile.TarFile.open(resultArchive, 'w:gz') try: resultFile.add( folderToArchive, arcname=os.path.basename(folderToArchive)) # put the error log in the archive if(os.path.exists(errorLogPath)): resultFile.add( errorLogPath, arcname=os.path.basename(errorLogPath)) except Exception as err: logger.debug("Err: %s" % str(err)) raise finally: resultFile.close() except Exception as err: logger.debug("Err2: %s" % str(err)) raise # pass # remove temp dir if(os.path.exists(folderToArchive)): shutil.rmtree(folderToArchive) # print the error if there is something if os.path.exists(errorLogPath): logger.error('') logger.error('An error occured while collecting report information : ' + open(errorLogPath, 'r').read()) logger.error('') # delete the errorfile from user's pc if(os.path.exists(errorLogPath)): os.remove(errorLogPath) # return the path to the archive and the path to the upload server return (resultArchive, uploadFileServer, tempDir)
def _getDefaultValueInternal(self, attr, val=None, check=False): """ Get the default value of a schema item, both simple and component. If check is True then val is used instead of default value: this is used to check if the val may be used as a default value (e.g. if it is OK to use it as a value in the config file) """ def_name = defaultConfigSectionName(self.name) item = self.getItem(attr) stored_option_key = def_name stored_attr_key = def_name + ':' + attr is_finalized = Config._after_bootstrap useDefVal = False try: # Attempt to get the relevant config section if is_finalized: if stored_option_key in _stored_configs: config = _stored_configs[stored_option_key] else: config = Config.getConfig(def_name) _stored_configs[stored_option_key] = config else: config = Config.getConfig(def_name) if is_finalized and not config.hasModified and stored_attr_key in _found_attrs: defvalue = _found_attrs[stored_attr_key] else: if is_finalized: if stored_option_key in _stored_options and not config.hasModified: eff_ops = _stored_options[stored_option_key] else: eff_ops = config.getEffectiveOptions() _stored_options[stored_option_key] = eff_ops else: eff_ops = config.getEffectiveOptions() if attr in eff_ops: defvalue = config[attr] from Ganga.GPIDev.Base.Proxy import isProxy if isProxy(defvalue): raise GangaException("(1)Proxy found where it shouldn't be in the Config: %s" % stored_attr_key) ## Just in case a developer puts the proxied object into the default value! _found_attrs[stored_attr_key] = defvalue else: useDefVal = True except (KeyError, Config.ConfigError): useDefVal = True if useDefVal: # hidden, protected and sequence values are not represented in config defvalue = item['defvalue'] from Ganga.GPIDev.Base.Proxy import isProxy if isProxy(defvalue): raise GangaException("(2)Proxy found where is shouldn't be in the Config" % stored_attr_key) ## Just in case a developer puts the proxied object into the default value! _found_attrs[stored_attr_key] = defvalue # in the checking mode, use the provided value instead if check is True: defvalue = val if isinstance(item, ComponentItem): # FIXME: limited support for initializing non-empty sequences (i.e. # apps => ['DaVinci','Executable'] is NOT correctly initialized) if not item['sequence']: if defvalue is None: if not item['load_default']: try: assert(item['optional']) except AssertionError: raise SchemaError("This item '%s' is not a sequence, doesn't have a load_default and is not optional. This is unsupported!" % type(item)) return None # if a defvalue of a component item is an object (not string) just process it as for SimpleItems (useful for FileItems) # otherwise do a lookup via plugin registry category = item['category'] if isinstance(defvalue, str) or defvalue is None: try: config = Config.getConfig(def_name) has_modified = config.hasModified except KeyError: has_modified = False if category not in _found_components or has_modified: _found_components[category] = allPlugins.find(category, defvalue) return _found_components[category]() if isclass(defvalue): return defvalue() # If needed/requested make a copy of the function elsewhwre return defvalue
def getConfig(): """Return MSGMS Config object.""" from Ganga.Utility import Config return Config.getConfig('MSGMS')
# # This is free software; you can redistribute it and/or modify it under the terms of # the GNU General Public License as published by the Free Software Foundation. # # http://www.gnu.org/licenses/gpl.txt import os import time import datetime from InterpoList import InterpoList from Ganga.Utility import Config from Ganga.Utility.logging import getLogger from Ganga.Core.GangaThread import GangaThread # Module configuration config = Config.makeConfig('LGI', 'Leiden Grid Initiative Pilot job framework settings') config.addOption('PilotDist', 'pilotdist/pilotjob.tar.gz', 'Pilot job resource daemon tarball, fully configured for your applications and project server') config.addOption('PilotScript', 'pilotdist/pilotrun.sh', 'Script to run inside pilotjob, which unpacks the tarball and executes the resource daemon') config.addOption('SchedMin', 1, 'Minimum number of pilotjobs at all times') config.addOption('SchedMax', 10, 'Maximum number of pilotjobs') config.addOption('Poll', 30, 'LGI thread polling time') config.addOption('Update', 10, 'Pilot thread update time') config.addOption('WaitNew', 60, 'If after this many seconds there are (still) more LGI jobs than pilotjobs, spawn new pilotjobs.') config.addOption('WaitTerm', 300, 'Terminate pilotjob after seconds of idle time') config.addOption('MaxRuntime', None, 'Maximum run-time of pilotjobs in seconds. Leave empty to run indefinitely until batch system terminates it.') config.addOption('StatsInterval', 0, 'Statistics logging interval, or 0 for no statistics')
def start(self): if Config.getConfig('LGI')['Enable'] is False: self.log.debug('Not starting LGI pilot thread because [LGI]Enable is False') return False return GangaThread.start(self)
def parseResults(self): job = self.getJobObject() server = CRABServer() try: server.status(job) server.getOutput(job) except: logger.error('Could not get the output of the job.') # Let's not raise this yet (in case of a double call). # raise CRABServerError('Impossible to get the output of the job') workdir = job.inputdata.ui_working_dir index = int(job.id) + 1 doc_path = '%s/res/crab_fjr_%d.xml'%(workdir,index) if not os.path.exists(doc_path): logger.error('FJR %s not found.'%(doc_path)) return try: doc = parse(doc_path) except: logger.error("Could not parse document. File not present?") return status = doc.firstChild.getAttribute("Status") if status in ["Failed"]: self.postMortem(job) job.updateStatus('failed') elif status in ["Success"]: if job.status == 'submitting': job.updateStatus('submitted') job.updateStatus('completed') else: logger.warning("UNKNOWN PARSE STATUS: "+str(status)) config = Config.getConfig('Metrics') location = config['location'] if not os.path.exists(location): raise BackendError(0,'Location %s file doesnt exist.'%(location)) config = ConfigParser() config.read(location) #Iterate over all them SECTIONS = config.sections() if 'report' in SECTIONS: SECTIONS.remove('report') # Only five sections work here... for section in SECTIONS: if not job.backend.fjr.has_key(section): job.backend.fjr[section] = {} performancereport = doc.getElementsByTagName("PerformanceReport")[0] performancesummary = performancereport.getElementsByTagName("PerformanceSummary") for pfs in performancesummary: if pfs.getAttribute("Metric") == section: metrics = pfs.getElementsByTagName("Metric") for metric in metrics: name = metric.getAttribute("Name") if config.has_option(section,name): # Due to the names with minus intead of underscore, we have to do thiw walkarround # to send them to the DB. name = config.get(section,name) if name: job.backend.fjr[section][name] = metric.getAttribute("Value")
'saveoutput', 'faillimit', 'ignorelocality', 'inputdata', 'publishdbsurl', 'activity', 'extrajdl', 'scheddname', 'collector' ] spec = {} for field in specFields: if getattr(job.inputdata, field) not in [None, [None]]: spec[field] = getattr(job.inputdata, field) config = Config.getConfig('TASK_CFG') for field in config: if field in specFields and config[field]: logger.info(field) logger.info(config[field]) spec[field] = config[field] spec['cachefilename'] = cachefilename spec['cacheurl'] = 'https://cmsweb.cern.ch/crabcache' if spec.has_key('userfiles'): # open usefiles source file and add userfiles list uf = open(spec['userfiles'], 'r') spec['userfiles'] = [] for line in uf: spec['userfiles'].append(line.replace('\n', ''))
"""Utilities for using MSG within Ganga. """ # default stomp.py logging to CRITICAL import Ganga.Utility.Config as Config config = Config.getConfig("Logging") # test if stomp.py logging is already set if "stomp.py" in config: pass # config['stomp.py'] else: from Ganga.Utility.logging import getLogger, _get_logging # set stomp.py logger to CRITICAL getLogger("stomp.py").setLevel(_get_logging().CRITICAL) def createPublisher(server, port, user="******", password="******", idle_timeout=None, exit_timeout=None): """Create a new publisher thread which extends GangaThread where available (i.e. on the client) or Thread otherwise (i.e. on the worker node). N.B. If GangaThread is not available then an exit handler is added, with the given timeout. @param server: The server host name. @param user: The user name. @param password: The password. @param logger: The logger instance. @param idle_timeout: Maximum seconds to idle before closing connection. Negative value indicates never close connection.
def __init__(self): super(CRABBackend, self).__init__() config = Config.getConfig('CMSSW') shell = Shell(os.path.join(config['CMSSW_SETUP'], 'CMSSW_generic.sh'), [config['CMSSW_VERSION'], config['CRAB_VERSION']]) self.crab_env = shell.env
def _getDefaultValueInternal(self, attr, val=None, check=False): """ Get the default value of a schema item, both simple and component. If check is True then val is used instead of default value: this is used to check if the val may be used as a default value (e.g. if it is OK to use it as a value in the config file) """ def_name = defaultConfigSectionName(self.name) item = self.getItem(attr) stored_attr_key = def_name + ':' + str(attr) from Ganga.Utility.Config import Config is_finalized = Config._after_bootstrap useDefVal = False try: # Attempt to get the relevant config section config = Config.getConfig(def_name, create=False) if is_finalized and stored_attr_key in _found_attrs and not config.hasModified(): defvalue = _found_attrs[stored_attr_key] else: if attr in config.getEffectiveOptions(): defvalue = config[attr] from Ganga.GPIDev.Base.Proxy import isProxy if isProxy(defvalue): raise GangaException("(1)Proxy found where it shouldn't be in the Config: %s" % stored_attr_key) ## Just in case a developer puts the proxied object into the default value! _found_attrs[stored_attr_key] = defvalue else: useDefVal = True except (KeyError, Config.ConfigError): useDefVal = True if useDefVal: # hidden, protected and sequence values are not represented in config defvalue = item['defvalue'] from Ganga.GPIDev.Base.Proxy import isProxy if isProxy(defvalue): raise GangaException("(2)Proxy found where is shouldn't be in the Config" % stored_attr_key) ## Just in case a developer puts the proxied object into the default value! _found_attrs[stored_attr_key] = defvalue # in the checking mode, use the provided value instead if check is True: defvalue = val if isinstance(item, ComponentItem): # FIXME: limited support for initializing non-empty sequences (i.e. # apps => ['DaVinci','Executable'] is NOT correctly initialized) if not item['sequence']: if defvalue is None: if not item['load_default']: assert(item['optional']) return None # if a defvalue of a component item is an object (not string) just process it as for SimpleItems (useful for FileItems) # otherwise do a lookup via plugin registry category = item['category'] if isinstance(defvalue, str) or defvalue is None: try: config = Config.getConfig(def_name, create=False) has_modified = config.hasModified() except KeyError: has_modified = False if category not in _found_components or has_modified: _found_components[category] = allPlugins.find(category, defvalue) return _found_components[category]() # make a copy of the default value (to avoid strange effects if the # original modified) try: from Ganga.GPIDev.Base.Proxy import isType, getRuntimeGPIObject, stripProxy, getName from Ganga.GPIDev.Base.Objects import Node if isinstance(defvalue, Node): return stripProxy(getRuntimeGPIObject(getName(defvalue))) else: return copy.deepcopy(defvalue) except ImportError: return copy.deepcopy(defvalue)
def report_inner(job=None, isJob=False, isTask=False): userInfoDirName = "userreport" tempDirName = "reportsRepository" # job relevant info jobSummaryFileName = "jobsummary.txt" jobFullPrintFileName = "jobfullprint.txt" repositoryPath = "repository/$usr/LocalXML/6.0/jobs/$thousandsNumxxx" # task relevant info taskSummaryFileName = "tasksummary.txt" taskFullPrintFileName = "taskfullprint.txt" tasksRepositoryPath = "repository/$usr/LocalXML/6.0/tasks/$thousandsNumxxx" # user's info environFileName = "environ.txt" userConfigFileName = "userconfig.txt" defaultConfigFileName = "gangarc.txt" ipythonHistoryFileName = "ipythonhistory.txt" gangaLogFileName = "gangalog.txt" jobsListFileName = "jobslist.txt" tasksListFileName = "taskslist.txt" from Ganga.Utility import Config uploadFileServer = Config.getConfig('Feedback')['uploadServer'] #uploadFileServer= "http://gangamon.cern.ch/django/errorreports/" #uploadFileServer= "http://ganga-ai-02.cern.ch/django/errorreports/" #uploadFileServer= "http://127.0.0.1:8000/errorreports" def printDictionary(dictionary, file=sys.stdout): for k, v in dictionary.iteritems(): print('%s: %s' % (k, v), file=file) if k == 'PYTHONPATH': global PYTHON_PATH PYTHON_PATH = v def extractFileObjects(fileName, targetDirectoryName): try: fileToRead = open(fileName, 'r') try: fileText = fileToRead.read() import re pattern = "File\(name=\'(.+?)\'" matches = re.findall(pattern, fileText) for fileName in matches: fileName = os.path.expanduser(fileName) targetFileName = os.path.join( targetDirectoryName, os.path.basename(fileName)) shutil.copyfile(fileName, targetFileName) finally: fileToRead.close() # except IOError, OSError: except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) def writeErrorLog(errorMessage): try: fileToWrite = open(errorLogPath, 'a') try: fileToWrite.write(errorMessage) fileToWrite.write("\n") except Exception as err: logger.debug("Err: %s" % str(err)) raise finally: fileToWrite.close() except Exception as err2: logger.debug("Err: %s" % str(err2)) pass def writeStringToFile(fileName, stringToWrite): try: # uncomment this to try the error logger #fileName = '~/' + fileName fileToWrite = open(fileName, 'w') try: fileToWrite.write(stringToWrite) except Exception as err: logger.debug("Err: %s" % str(err)) raise err finally: fileToWrite.close() # except IOError: except Exception as err: logger.debug("Err2: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) def renameDataFiles(directory): for fileName in os.listdir(directory): fullFileName = os.path.join(directory, fileName) if os.path.isfile(fullFileName): if fileName == 'data': os.rename(fullFileName, fullFileName + '.txt') else: renameDataFiles(fullFileName) import shutil import tarfile import tempfile import os userHomeDir = os.getenv("HOME") tempDir = tempfile.mkdtemp() errorLogPath = os.path.join(tempDir, 'reportErrorLog.txt') fullPathTempDir = os.path.join(tempDir, tempDirName) fullLogDirName = '' # create temp dir and specific dir for the job/user try: if not os.path.exists(fullPathTempDir): os.mkdir(fullPathTempDir) import datetime now = datetime.datetime.now() userInfoDirName = userInfoDirName + \ now.strftime("%Y-%m-%d-%H:%M:%S") fullLogDirName = os.path.join(fullPathTempDir, userInfoDirName) # if report directory exists -> delete it's content(we would like # last version of the report) if os.path.exists(fullLogDirName): shutil.rmtree(fullLogDirName) os.mkdir(fullLogDirName) # except OSError: except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import os.environ in a file fullEnvironFileName = os.path.join(fullLogDirName, environFileName) try: inputFile = open(fullEnvironFileName, 'w') try: printDictionary(os.environ, file=inputFile) print('OS VERSION : ' + platform.platform(), file=inputFile) finally: inputFile.close() # except IOError except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import user config in a file userConfigFullFileName = os.path.join( fullLogDirName, userConfigFileName) try: inputFile = open(userConfigFullFileName, 'w') try: print("#GANGA_VERSION = %s" % config.System.GANGA_VERSION, file=inputFile) global GANGA_VERSION GANGA_VERSION = config.System.GANGA_VERSION # this gets the default values # Ganga.GPIDev.Lib.Config.Config.print_config_file() # this should get the changed values for c in config: print(config[c], file=inputFile) finally: inputFile.close() # except IOError does not catch the exception ??? except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # write gangarc - default configuration defaultConfigFullFileName = os.path.join( fullLogDirName, defaultConfigFileName) try: outputFile = open(os.path.join(userHomeDir, '.gangarc'), 'r') try: writeStringToFile(defaultConfigFullFileName, outputFile.read()) finally: outputFile.close() # except IOError does not catch the exception ??? except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import ipython history in a file try: ipythonFile = open( os.path.join(os.environ['IPYTHONDIR'], 'history'), 'r') try: lastIPythonCommands = ipythonFile.readlines()[-20:] writeStringToFile(os.path.join( fullLogDirName, ipythonHistoryFileName), '\n'.join(lastIPythonCommands)) #writeStringToFile(os.path.join(fullLogDirName, ipythonHistoryFileName), ipythonFile.read()) finally: ipythonFile.close() # except IOError does not catch the exception ??? except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import gangalog in a file userLogFileLocation = config["Logging"]._logfile userLogFileLocation = os.path.expanduser(userLogFileLocation) try: gangaLogFile = open(userLogFileLocation, 'r') try: writeStringToFile( os.path.join(fullLogDirName, gangaLogFileName), gangaLogFile.read()) finally: gangaLogFile.close() # except IOError: except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import the result of jobs command in the report jobsListFullFileName = os.path.join(fullLogDirName, jobsListFileName) try: outputFile = open(jobsListFullFileName, 'w') try: from Ganga.GPI import jobs print(jobs, file=outputFile) finally: outputFile.close() # except IOError does not catch the exception ??? except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import the result of tasks command in the report tasksListFullFileName = os.path.join(fullLogDirName, tasksListFileName) try: outputFile = open(tasksListFullFileName, 'w') try: from Ganga.GPI import tasks print(tasks, file=outputFile) finally: outputFile.close() # except IOError does not catch the exception ??? except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # save it here because we will change fullLogDirName, but we want this # to be the archive and to be deleted folderToArchive = fullLogDirName # import job relevant info if (job is not None and isJob): global JOB_REPORT, APPLICATION_NAME, BACKEND_NAME JOB_REPORT = True APPLICATION_NAME = job.application.__class__.__name__ BACKEND_NAME = job.backend.__class__.__name__ # create job folder jobFolder = 'job_%s' % str(job.fqid) fullLogDirName = os.path.join(fullLogDirName, jobFolder) os.mkdir(fullLogDirName) # import job summary in a file fullJobSummaryFileName = os.path.join( fullLogDirName, jobSummaryFileName) writeStringToFile(fullJobSummaryFileName, str(job)) # import job full print in a file fullJobPrintFileName = os.path.join( fullLogDirName, jobFullPrintFileName) try: inputFile = open(fullJobPrintFileName, 'w') try: full_print(job, inputFile) finally: inputFile.close() # except IOError, OSError: except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # extract file objects try: fileObjectsPath = os.path.join(fullLogDirName, 'fileobjects') os.mkdir(fileObjectsPath) extractFileObjects(fullJobSummaryFileName, fileObjectsPath) # except OSError: except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # copy dir of the job ->input/output and subjobs try: parentDir, currentDir = os.path.split(job.inputdir[:-1]) workspaceDir = os.path.join(fullLogDirName, 'workspace') shutil.copytree(parentDir, workspaceDir) # except IOError, OSError except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # copy shared area of the job try: if hasattr(job.application, 'is_prepared'): if job.application.is_prepared is not None and job.application.is_prepared is not True: import os from Ganga.Utility.Config import getConfig from Ganga.Utility.files import expandfilename shared_path = os.path.join(expandfilename(getConfig( 'Configuration')['gangadir']), 'shared', getConfig('Configuration')['user']) shareddir = os.path.join( shared_path, job.application.is_prepared.name) if os.path.isdir(shareddir): sharedAreaDir = os.path.join( fullLogDirName, 'sharedarea') shutil.copytree(shareddir, sharedAreaDir) # except IOError, OSError except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # copy repository job file try: indexFileName = str(job.id) + '.index' repositoryPath = repositoryPath.replace( '$usr', os.getenv("USER")) # check if the job is subjob -> different way of forming the # path to the repository is_subjob = job.fqid.find('.') > -1 if is_subjob: jobid, subjobid = job.fqid.split( '.')[0], job.fqid.split('.')[1] repositoryPath = repositoryPath.replace( '$thousandsNum', str(int(jobid) / 1000)) repositoryPath = os.path.join(repositoryPath, jobid) else: repositoryPath = repositoryPath.replace( '$thousandsNum', str(job.id / 1000)) repositoryFullPath = os.path.join( config.Configuration.gangadir, repositoryPath) indexFileSourcePath = os.path.join( repositoryFullPath, indexFileName) repositoryFullPath = os.path.join( repositoryFullPath, str(job.id)) repositoryTargetPath = os.path.join( fullLogDirName, 'repository', str(job.id)) os.mkdir(os.path.join(fullLogDirName, 'repository')) shutil.copytree(repositoryFullPath, repositoryTargetPath) # data files are copied but can not be opened -> add .txt to # their file names renameDataFiles(repositoryTargetPath) if not is_subjob: # copy .index file indexFileTargetPath = os.path.join( fullLogDirName, 'repository', indexFileName) shutil.copyfile(indexFileSourcePath, indexFileTargetPath) # except OSError, IOError: except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # import task relevant info if (job is not None and isTask): # job is actually a task object task = job # create task folder taskFolder = 'task_%s' % str(task.id) fullLogDirName = os.path.join(fullLogDirName, taskFolder) os.mkdir(fullLogDirName) # import task summary in a file fullTaskSummaryFileName = os.path.join( fullLogDirName, taskSummaryFileName) writeStringToFile(fullTaskSummaryFileName, str(task)) # import task full print in a file fullTaskPrintFileName = os.path.join( fullLogDirName, taskFullPrintFileName) try: inputFile = open(fullTaskPrintFileName, 'w') try: full_print(task, inputFile) except Exception as err: logger.debug("Err: %s" % str(err)) raise err finally: inputFile.close() # except IOError, OSError: except Exception as err: logger.debug("Err2: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # copy shared area of the task try: if len(task.transforms) > 0: if hasattr(task.transforms[0], 'application') and hasattr(task.transforms[0].application, 'is_prepared'): if task.transforms[0].application.is_prepared is not None and task.transforms[0].application.is_prepared is not True: import os from Ganga.Utility.Config import getConfig from Ganga.Utility.files import expandfilename shared_path = os.path.join(expandfilename(getConfig( 'Configuration')['gangadir']), 'shared', getConfig('Configuration')['user']) shareddir = os.path.join( shared_path, task.transforms[0].application.is_prepared.name) if os.path.isdir(shareddir): sharedAreaDir = os.path.join( fullLogDirName, 'sharedarea') shutil.copytree(shareddir, sharedAreaDir) # except IOError, OSError except Exception as err: logger.debug("Err: %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) # copy repository task file try: indexFileName = str(task.id) + '.index' tasksRepositoryPath = tasksRepositoryPath.replace( '$usr', os.getenv("USER")) tasksRepositoryPath = tasksRepositoryPath.replace( '$thousandsNum', str(task.id / 1000)) repositoryFullPath = os.path.join( config.Configuration.gangadir, tasksRepositoryPath) indexFileSourcePath = os.path.join( repositoryFullPath, indexFileName) repositoryFullPath = os.path.join( repositoryFullPath, str(task.id)) repositoryTargetPath = os.path.join( fullLogDirName, 'repository', str(task.id)) os.mkdir(os.path.join(fullLogDirName, 'repository')) shutil.copytree(repositoryFullPath, repositoryTargetPath) # data files are copied but can not be opened -> add .txt to # their file names renameDataFiles(repositoryTargetPath) # copy .index file indexFileTargetPath = os.path.join( fullLogDirName, 'repository', indexFileName) shutil.copyfile(indexFileSourcePath, indexFileTargetPath) # except OSError, IOError: except Exception as err: logger.debug("Err %s" % str(err)) writeErrorLog(str(sys.exc_info()[1])) resultArchive = '%s.tar.gz' % folderToArchive try: resultFile = tarfile.TarFile.open(resultArchive, 'w:gz') try: resultFile.add( folderToArchive, arcname=os.path.basename(folderToArchive)) # put the error log in the archive if(os.path.exists(errorLogPath)): resultFile.add( errorLogPath, arcname=os.path.basename(errorLogPath)) except Exception as err: logger.debug("Err: %s" % str(err)) raise finally: resultFile.close() except Exception as err: logger.debug("Err2: %s" % str(err)) raise # pass # remove temp dir if(os.path.exists(folderToArchive)): shutil.rmtree(folderToArchive) # print the error if there is something if os.path.exists(errorLogPath): logger.error('') logger.error('An error occured while collecting report information : ' + open(errorLogPath, 'r').read()) logger.error('') # delete the errorfile from user's pc if(os.path.exists(errorLogPath)): os.remove(errorLogPath) # return the path to the archive and the path to the upload server return (resultArchive, uploadFileServer, tempDir)
"""Utilities for using MSG within Ganga. """ # default stomp.py logging to CRITICAL import Ganga.Utility.Config as Config config = Config.getConfig('Logging') # test if stomp.py logging is already set if 'stomp.py' in config: pass # config['stomp.py'] else: from Ganga.Utility.logging import getLogger import logging # set stomp.py logger to CRITICAL getLogger('stomp.py').setLevel(logging.CRITICAL) def createPublisher(server, port, user='******', password='******', idle_timeout=None, exit_timeout=None): """Create a new publisher thread which extends GangaThread where available (i.e. on the client) or Thread otherwise (i.e. on the worker node). N.B. If GangaThread is not available then an exit handler is added, with the given timeout. @param server: The server host name. @param user: The user name. @param password: The password. @param logger: The logger instance. @param idle_timeout: Maximum seconds to idle before closing connection. Negative value indicates never close connection.
def parseResults(self): job = self.getJobObject() server = CRABServer() try: server.status(job) server.getOutput(job) except: logger.error('Could not get the output of the job.') # Let's not raise this yet (in case of a double call). # raise CRABServerError('Impossible to get the output of the job') workdir = job.inputdata.ui_working_dir index = int(job.id) + 1 doc_path = '%s/res/crab_fjr_%d.xml' % (workdir, index) if not os.path.exists(doc_path): logger.error('FJR %s not found.' % (doc_path)) return try: doc = parse(doc_path) except: logger.error("Could not parse document. File not present?") return status = doc.firstChild.getAttribute("Status") if status in ["Failed"]: self.postMortem(job) job.updateStatus('failed') elif status in ["Success"]: if job.status == 'submitting': job.updateStatus('submitted') job.updateStatus('completed') else: logger.warning("UNKNOWN PARSE STATUS: " + str(status)) config = Config.getConfig('Metrics') location = config['location'] if not os.path.exists(location): raise BackendError(0, 'Location %s file doesnt exist.' % (location)) config = ConfigParser() config.read(location) #Iterate over all them SECTIONS = config.sections() if 'report' in SECTIONS: SECTIONS.remove('report') # Only five sections work here... for section in SECTIONS: if not job.backend.fjr.has_key(section): job.backend.fjr[section] = {} performancereport = doc.getElementsByTagName( "PerformanceReport")[0] performancesummary = performancereport.getElementsByTagName( "PerformanceSummary") for pfs in performancesummary: if pfs.getAttribute("Metric") == section: metrics = pfs.getElementsByTagName("Metric") for metric in metrics: name = metric.getAttribute("Name") if config.has_option(section, name): # Due to the names with minus intead of underscore, we have to do thiw walkarround # to send them to the DB. name = config.get(section, name) if name: job.backend.fjr[section][ name] = metric.getAttribute("Value")
def getConfig(): """Return DashboardMS Config object.""" from Ganga.Utility import Config return Config.getConfig('DashboardMS')