def __getitem__(self, p): try: return getattr(self, p) except AttributeError as x: msg = 'configuration section "%s" not found' % str(p) logger.error(msg) raise ConfigError(msg)
def __setattr__(self, o, v): if o in dir(self.__class__): stripProxy(self).setUserValue(o, v) else: if not stripProxy(self).is_open: raise ConfigError('Cannot set undefined option [%s]%s' % (stripProxy(self).name, o)) else: stripProxy(self)._addOpenOption(o, v) stripProxy(self).setUserValue(o, v) setattr(self.__class__, o, ConfigDescriptor(o))
def master_prepare(self, app, appconfig): """Prepare the master job""" job = app._getParent() # Returns job or subjob object logger.debug("AthenaLocalRTHandler master_prepare called, %s", job.id) if job._getRoot().subjobs: jobid = "%d" % (job._getRoot().id) else: jobid = "%d" % job.id # Generate output dataset name if job.outputdata: if job.outputdata._name == 'DQ2OutputDataset': dq2_datasetname = job.outputdata.datasetname dq2_isGroupDS = job.outputdata.isGroupDS dq2_groupname = job.outputdata.groupname else: dq2_datasetname = '' dq2_isGroupDS = False dq2_groupname = '' self.output_datasetname, self.output_lfn = dq2outputdatasetname( dq2_datasetname, jobid, dq2_isGroupDS, dq2_groupname) # Expand Athena jobOptions if not app.option_file and not app.command_line: raise ConfigError( "j.application.option_file='' - No Athena jobOptions files specified." ) athena_options = '' inputbox = [ File(os.path.join(os.path.dirname(__file__), 'athena-utility.sh')) ] if app.atlas_exetype in ['PYARA', 'ARES', 'ROOT', 'EXE']: for option_file in app.option_file: athena_options += ' ' + os.path.basename(option_file.name) inputbox += [File(option_file.name)] athena_options += ' %s ' % app.options else: for option_file in app.option_file: athena_option = os.path.basename(option_file.name) athena_options += ' ' + athena_option if app.options: athena_options = app.options + ' ' + athena_options inputbox += [File(option_file.name)] if app.command_line: athena_options = app.command_line athena_usersetupfile = os.path.basename(app.user_setupfile.name) # prepare input sandbox if app.user_setupfile.name: inputbox += [File(app.user_setupfile.name)] #CN: added extra test for TNTJobSplitter if job.inputdata and job.inputdata._name in [ 'DQ2Dataset', 'ATLASTier3Dataset' ] or (job._getRoot().splitter and job._getRoot().splitter._name == 'TNTJobSplitter'): _append_files(inputbox, 'ganga-stage-in-out-dq2.py') _append_files(inputbox, 'dq2_get') _append_files(inputbox, 'dq2info.tar.gz') _append_files(inputbox, 'libdcap.so') if job.inputdata and job.inputdata._name == 'ATLASDataset': if job.inputdata.lfc: _append_files(inputbox, 'ganga-stagein-lfc.py') else: _append_files(inputbox, 'ganga-stagein.py') ## insert more scripts to inputsandbox for FileStager if job.inputdata and job.inputdata._name in [ 'DQ2Dataset' ] and job.inputdata.type in ['FILE_STAGER']: _append_files(inputbox, 'make_filestager_joption.py', 'dm_util.py', 'fs-copy.py') if not 'getstats.py' in [ os.path.basename(file.name) for file in inputbox ]: _append_files(inputbox, 'getstats.py') if job.outputdata and job.outputdata._name == 'DQ2OutputDataset': if not job.outputdata.location: raise ApplicationConfigurationError( None, 'j.outputdata.location is empty - Please specify a DQ2 output location - job not submitted !' ) if not File( os.path.join(os.path.dirname(__file__), 'ganga-stage-in-out-dq2.py')) in inputbox: _append_files(inputbox, 'ganga-stage-in-out-dq2.py') _append_files(inputbox, 'dq2info.tar.gz') _append_files(inputbox, 'libdcap.so') _append_files(inputbox, 'ganga-joboption-parse.py') if job.inputsandbox: for file in job.inputsandbox: inputbox += [file] if app.user_area.name: if app.is_prepared is True: inputbox += [File(app.user_area.name)] else: inputbox += [ File( os.path.join( os.path.join(shared_path, app.is_prepared.name), os.path.basename(app.user_area.name))) ] if app.group_area.name and string.find(app.group_area.name, "http") < 0: if app.is_prepared is True: inputbox += [File(app.group_area.name)] else: inputbox += [ File( os.path.join( os.path.join(shared_path, app.is_prepared.name), os.path.basename(app.group_area.name))) ] # prepare environment try: atlas_software = config['ATLAS_SOFTWARE'] except ConfigError: raise ConfigError( 'No default location of ATLAS_SOFTWARE specified in the configuration.' ) if app.atlas_release == '' and app.atlas_project != "AthAnalysisBase": raise ApplicationConfigurationError( None, 'j.application.atlas_release is empty - No ATLAS release version found. Run prepare() or specify a version explictly.' ) environment = { 'ATLAS_RELEASE': app.atlas_release, 'ATHENA_OPTIONS': athena_options, 'ATLAS_SOFTWARE': atlas_software, 'ATHENA_USERSETUPFILE': athena_usersetupfile, 'ATLAS_PROJECT': app.atlas_project, 'ATLAS_EXETYPE': app.atlas_exetype, 'GANGA_VERSION': configSystem['GANGA_VERSION'], 'DQ2_SETUP_SCRIPT': configDQ2['setupScript'] } # Set athena architecture: 32 or 64 bit environment['ATLAS_ARCH'] = '32' cmtconfig = app.atlas_cmtconfig if cmtconfig.find('x86_64') >= 0: environment['ATLAS_ARCH'] = '64' environment['ATLAS_CMTCONFIG'] = app.atlas_cmtconfig environment['DCACHE_RA_BUFFER'] = str(config['DCACHE_RA_BUFFER']) if app.atlas_environment: for var in app.atlas_environment: vars = var.split('=') if len(vars) == 2: environment[vars[0]] = vars[1] if app.atlas_production and (app.atlas_project == 'AtlasPoint1' or app.atlas_release.find('12.') <= 0): environment['ATLAS_PRODUCTION'] = app.atlas_production if app.user_area.name: environment['USER_AREA'] = os.path.basename(app.user_area.name) if app.group_area.name: if string.find(app.group_area.name, "http") >= 0: environment['GROUP_AREA_REMOTE'] = "%s" % (app.group_area.name) else: environment['GROUP_AREA'] = os.path.basename( app.group_area.name) if app.max_events: if (app.max_events != -999) and (app.max_events > -2): environment['ATHENA_MAX_EVENTS'] = str(app.max_events) if job.inputdata and job.inputdata._name == 'StagerDataset': if job.inputdata.type not in ['LOCAL']: try: environment['X509CERTDIR'] = os.environ['X509_CERT_DIR'] except KeyError: environment['X509CERTDIR'] = '' try: proxy = os.environ['X509_USER_PROXY'] except KeyError: proxy = '/tmp/x509up_u%s' % os.getuid() REMOTE_PROXY = '%s:%s' % (socket.getfqdn(), proxy) environment['REMOTE_PROXY'] = REMOTE_PROXY try: environment['GANGA_GLITE_UI'] = configLCG['GLITE_SETUP'] except: pass if job.inputdata and job.inputdata._name == 'DQ2Dataset': if job.inputdata.dataset: datasetname = job.inputdata.dataset environment['DATASETNAME'] = ':'.join(datasetname) environment['DATASETLOCATION'] = ':'.join( job.inputdata.get_locations()) environment['DQ2_URL_SERVER'] = configDQ2['DQ2_URL_SERVER'] environment['DQ2_URL_SERVER_SSL'] = configDQ2[ 'DQ2_URL_SERVER_SSL'] #environment['DATASETTYPE']=job.inputdata.type # At present, DQ2 download is the only thing that works environment['DATASETTYPE'] = "DQ2_DOWNLOAD" if job.inputdata.accessprotocol: environment[ 'DQ2_LOCAL_PROTOCOL'] = job.inputdata.accessprotocol try: environment['X509CERTDIR'] = os.environ['X509_CERT_DIR'] except KeyError: environment['X509CERTDIR'] = '' try: proxy = os.environ['X509_USER_PROXY'] except KeyError: proxy = '/tmp/x509up_u%s' % os.getuid() REMOTE_PROXY = '%s:%s' % (socket.getfqdn(), proxy) environment['REMOTE_PROXY'] = REMOTE_PROXY try: environment['GANGA_GLITE_UI'] = configLCG['GLITE_SETUP'] except: pass else: raise ConfigError( "j.inputdata.dataset='' - DQ2 dataset name needs to be specified." ) if job.inputdata.tagdataset: environment['TAGDATASETNAME'] = ':'.join( job.inputdata.tagdataset) if job.outputdata and job.outputdata._name == 'DQ2OutputDataset': environment['DQ2_URL_SERVER'] = configDQ2['DQ2_URL_SERVER'] environment['DQ2_URL_SERVER_SSL'] = configDQ2['DQ2_URL_SERVER_SSL'] try: environment['X509CERTDIR'] = os.environ['X509_CERT_DIR'] except KeyError: environment['X509CERTDIR'] = '' try: proxy = os.environ['X509_USER_PROXY'] except KeyError: proxy = '/tmp/x509up_u%s' % os.getuid() REMOTE_PROXY = '%s:%s' % (socket.getfqdn(), proxy) environment['REMOTE_PROXY'] = REMOTE_PROXY try: environment['GANGA_GLITE_UI'] = configLCG['GLITE_SETUP'] except: pass if hasattr(job.backend, 'extraopts'): if job.backend.extraopts.find('site=hh') > 0: environment['DQ2_LOCAL_SITE_ID'] = 'DESY-HH_SCRATCHDISK' elif job.backend.extraopts.find('site=zn') > 0: environment['DQ2_LOCAL_SITE_ID'] = 'DESY-ZN_SCRATCHDISK' else: environment['DQ2_LOCAL_SITE_ID'] = configDQ2[ 'DQ2_LOCAL_SITE_ID'] else: environment['DQ2_LOCAL_SITE_ID'] = configDQ2['DQ2_LOCAL_SITE_ID'] exe = os.path.join(os.path.dirname(__file__), 'run-athena-local.sh') # output sandbox outputbox = [] outputGUIDs = 'output_guids' outputLOCATION = 'output_location' outputDATA = 'output_data' outputbox.append(outputGUIDs) outputbox.append(outputLOCATION) outputbox.append(outputDATA) outputbox.append('stats.pickle') if (job.outputsandbox): for file in job.outputsandbox: outputbox += [file] ## retrieve the FileStager log if job.inputdata and job.inputdata._name in [ 'DQ2Dataset' ] and job.inputdata.type in ['FILE_STAGER']: outputbox += ['FileStager.out', 'FileStager.err'] # Switch for DEBUG print-out in logfiles if app.useNoDebugLogs: environment['GANGA_LOG_DEBUG'] = '0' else: environment['GANGA_LOG_DEBUG'] = '1' return StandardJobConfig(File(exe), inputbox, [], outputbox, environment)
def deny_modification(name, x): raise ConfigError( 'Cannot modify [Feedback] settings (attempted %s=%s)' % (name, x))
def __getitem__(self, o): try: return getattr(self, o) except AttributeError as x: raise ConfigError('Undefined option %s (%s)' % (o, str(x)))
def __setattr__(self, p, v): msg = 'cannot create new configuration sections in GPI' logger.error(msg) raise ConfigError(msg)
def __init__(self,what): ConfigError.__init__(self, what)
def __init__(self, what): ConfigError.__init__(self, what)
def master_prepare(self, app, appconfig): """Prepare the master job""" job = app._getParent() # Returns job or subjob object logger.debug('TagPrepareLCGRTHandler master_prepare called: %s', job.id) self.username = gridProxy.identity(safe=True) # prepare input sandbox if app.atlas_release == '': logger.warning( 'No Athena release specified - defaulting to 15.6.9') app.atlas_release = '15.6.9' logger.warning( "Copying grid proxy to input sandbox for transfer to WN...") inputbox = [(File(os.path.join(__athdirectory__, 'athena-utility.sh'))), (File(os.path.join(__directory__, 'get_tag_info.py'))), (File(os.path.join(__directory__, 'get_tag_info2.py'))), (File(os.path.join(__directory__, 'template.root'))), (File(gridProxy.location()))] ## insert more scripts to inputsandbox for FileStager if job.inputdata and job.inputdata._name == 'DQ2Dataset' and job.inputdata.type in [ 'FILE_STAGER' ]: _append_files(inputbox, 'make_filestager_joption.py', 'dm_util.py', 'fs-copy.py') #_append_files(inputbox,'make_filestager_joption.py','dm_util.py') if job.inputsandbox: inputbox += job.inputsandbox # prepare environment try: atlas_software = config['ATLAS_SOFTWARE'] except ConfigError: raise ConfigError( 'No default location of ATLAS_SOFTWARE specified in the configuration.' ) # prepare environment environment = { 'MAXNUMREFS': str(app.max_num_refs), 'STREAM_REF': app.stream_ref, 'ATLAS_RELEASE': app.atlas_release, 'ATHENA_OPTIONS': '', 'ATLAS_SOFTWARE': atlas_software, 'ATHENA_USERSETUPFILE': '', 'ATLAS_PROJECT': '', 'ATLAS_EXETYPE': 'ATHENA', 'GANGA_GLITE_UI': getConfig('LCG')['GLITE_SETUP'], 'DQ2_SETUP': getConfig('defaults_DQ2SandboxCache')['setup'], 'GANGA_VERSION': configSystem['GANGA_VERSION'], 'PROXY_NAME': os.path.basename(gridProxy.location()), 'GANGA_OUTPUT_PATH': job.outputdir } if app.lcg_prepare: environment['LCG_PREPARE'] = '1' # jobscript exe = os.path.join(__directory__, 'run-tagprepare-local.sh') # output sandbox if app.lcg_prepare: outputbox = ['taginfo.pkl', 'subcoll.tar.gz'] else: outputbox = ['taginfo.pkl'] if job.outputsandbox: outputbox += job.outputsandbox return StandardJobConfig(File(exe), inputbox, [], outputbox, environment)
logger.warning("do not understand option %s in [Plugins]", opt) logger.debug('Reason: want %s' % str(err)) else: if tag == 'default': try: allPlugins.setDefault(category, default_plugins_cfg[opt]) except Ganga.Utility.Plugin.PluginManagerError as x: logger.warning('cannot set the default plugin "%s": %s' % (opt, x)) else: logger.warning("do not understand option %s in [Plugins]", opt) # set alias for default Batch plugin (it will not appear in the # configuration) batch_default_name = getConfig('Configuration').getEffectiveOption('Batch') try: batch_default = allPlugins.find('backends', batch_default_name) except Exception as x: from Ganga.Utility.Config import ConfigError raise ConfigError( 'Check configuration. Unable to set default Batch backend alias (%s)' % str(x)) else: allPlugins.add(batch_default, 'backends', 'Batch') from Ganga.Runtime.GPIexport import exportToInterface if not my_interface: import Ganga.GPI my_interface = Ganga.GPI exportToInterface(my_interface, 'Batch', batch_default, 'Classes')