def __loadConfigurationData( self ): try: os.makedirs( os.path.join( DIRAC.rootPath, "etc", "csbackup" ) ) except: pass gConfigurationData.loadConfigurationData() if gConfigurationData.isMaster(): bBuiltNewConfiguration = False if not gConfigurationData.getName(): DIRAC.abort( 10, "Missing name for the configuration to be exported!" ) gConfigurationData.exportName() sVersion = gConfigurationData.getVersion() if sVersion == "0": gLogger.info( "There's no version. Generating a new one" ) gConfigurationData.generateNewVersion() bBuiltNewConfiguration = True if self.sURL not in gConfigurationData.getServers(): gConfigurationData.setServers( self.sURL ) bBuiltNewConfiguration = True gConfigurationData.setMasterServer( self.sURL ) if bBuiltNewConfiguration: gConfigurationData.writeRemoteConfigurationToDisk()
def __loadConfigurationData(self): try: os.makedirs(os.path.join(DIRAC.rootPath, "etc", "csbackup")) except: pass gConfigurationData.loadConfigurationData() if gConfigurationData.isMaster(): bBuiltNewConfiguration = False if not gConfigurationData.getName(): DIRAC.abort( 10, "Missing name for the configuration to be exported!") gConfigurationData.exportName() sVersion = gConfigurationData.getVersion() if sVersion == "0": gLogger.info("There's no version. Generating a new one") gConfigurationData.generateNewVersion() bBuiltNewConfiguration = True if self.sURL not in gConfigurationData.getServers(): gConfigurationData.setServers(self.sURL) bBuiltNewConfiguration = True gConfigurationData.setMasterServer(self.sURL) if bBuiltNewConfiguration: gConfigurationData.writeRemoteConfigurationToDisk()
def autoRefreshAndPublish(self, sURL): gLogger.debug("Setting configuration refresh as automatic") if not gConfigurationData.getAutoPublish(): gLogger.debug("Slave server won't auto publish itself") if not gConfigurationData.getName(): import DIRAC DIRAC.abort(10, "Missing configuration name!") self.__url = sURL self.__automaticUpdate = True self.setDaemon(1) self.start()
def autoRefreshAndPublish( self, sURL ): gLogger.debug( "Setting configuration refresh as automatic" ) if not gConfigurationData.getAutoPublish(): gLogger.debug( "Slave server won't auto publish itself" ) if not gConfigurationData.getName(): import DIRAC DIRAC.abort( 10, "Missing configuration name!" ) self.__url = sURL self.__automaticUpdate = True self.setDaemon( 1 ) self.start()
def __createTables( self ): """ Create tables if not already created """ self.log.info( "Creating tables in db" ) try: filePath = "%s/monitoringSchema.sql" % os.path.dirname( __file__ ) fd = open( filePath ) buff = fd.read() fd.close() except IOError, e: DIRAC.abort( 1, "Can't read monitoring schema", filePath )
def __createTables(self): """ Create tables if not already created """ self.log.info("Creating tables in db") try: filePath = "%s/monitoringSchema.sql" % os.path.dirname(__file__) fd = open(filePath) buff = fd.read() fd.close() except IOError, e: DIRAC.abort(1, "Can't read monitoring schema", filePath)
def autoRefreshAndPublish(self, sURL): """ Start the autorefresh background task :param str sURL: URL of the configuration server """ gLogger.debug("Setting configuration refresh as automatic") if not gConfigurationData.getAutoPublish(): gLogger.debug("Slave server won't auto publish itself") if not gConfigurationData.getName(): import DIRAC DIRAC.abort(10, "Missing configuration name!") self._url = sURL self._automaticUpdate = True self.setDaemon(1) self.start()
class MonitoringCatalog( object ): def __init__( self, dataPath ): """ Initialize monitoring catalog """ self.dbConn = False self.dataPath = dataPath self.log = gLogger.getSubLogger( "ActivityCatalog" ) self.createSchema() def __connect( self ): """ Connect to database """ if not self.dbConn: dbPath = "%s/monitoring.db" % self.dataPath self.dbConn = sqlite3.connect( dbPath, isolation_level = None ) def __dbExecute( self, query, values = False ): """ Execute a sql statement """ cursor = self.dbConn.cursor() #pylint: disable=no-member self.log.debug( "Executing %s" % query ) executed = False while not executed: try: if values: cursor.execute( query, values ) else: cursor.execute( query ) executed = True except: time.sleep( 0.01 ) return cursor def __createTables( self ): """ Create tables if not already created """ self.log.info( "Creating tables in db" ) try: filePath = "%s/monitoringSchema.sql" % os.path.dirname( __file__ ) fd = open( filePath ) buff = fd.read() fd.close() except IOError, e: DIRAC.abort( 1, "Can't read monitoring schema", filePath ) while buff.find( ";" ) > -1: limit = buff.find( ";" ) + 1 sqlQuery = buff[ : limit ].replace( "\n", "" ) buff = buff[ limit : ] try: self.__dbExecute( sqlQuery ) except Exception as e: DIRAC.abort( 1, "Can't create tables", str( e ) )
def __createTables(self): """ Creates tables if not already created """ self.log.info("Creating tables in db") try: filePath = "%s/monitoringSchema.sql" % os.path.dirname(__file__) fd = open(filePath) buff = fd.read() fd.close() except IOError as e: DIRAC.abort(1, "Can't read monitoring schema", filePath) while buff.find(";") > -1: limit = buff.find(";") + 1 sqlQuery = buff[:limit].replace("\n", "") buff = buff[limit:] try: self.__dbExecute(sqlQuery) except Exception as e: DIRAC.abort(1, "Can't create tables", str(e))
def autoRefreshAndPublish(self, sURL): """ Start the autorefresh background task, called by ServiceInterface (the class behind the Configuration/Server handler) :param str sURL: URL of the configuration server """ gLogger.debug("Setting configuration refresh as automatic") if not gConfigurationData.getAutoPublish(): gLogger.debug("Slave server won't auto publish itself") if not gConfigurationData.getName(): import DIRAC DIRAC.abort(10, "Missing configuration name!") self._url = sURL self._automaticUpdate = True # Tornado replacement solution to the classic thread # It start the method self.__refreshLoop on the next IOLoop iteration _IOLoop.current().spawn_callback(self.__refreshLoop)
def __configureDirector( self, submitPool = None ): # Update Configuration from CS # if submitPool == None then, # disable all Directors # else # Update Configuration for the BigDataDirector of that SubmitPool if submitPool == None: self.workDir = self.am_getOption( 'WorkDirectory' ) # By default disable all directors for director in self.directors: self.directors[director]['isEnabled'] = False else: if submitPool not in self.directors: DIRAC.abort( -1, "Submit Pool not available", submitPool ) director = self.directors[submitPool]['director'] # Pass reference to our CS section so that defaults can be taken from there director.configure( self.am_getModuleParam( 'section' ), submitPool ) # Enable director for jot submission self.directors[submitPool]['isEnabled'] = True
def __configureDirector(self, submitPool=None): # Update Configuration from CS # if submitPool == None then, # disable all Directors # else # Update Configuration for the VMDirector of that SubmitPool if submitPool == None: self.workDir = self.am_getOption('WorkDirectory') # By default disable all directors for director in self.directors: self.directors[director]['isEnabled'] = False else: if submitPool not in self.directors: DIRAC.abort(-1, "Submit Pool not available", submitPool) director = self.directors[submitPool]['director'] # Pass reference to our CS section so that defaults can be taken from there director.configure(self.am_getModuleParam('section'), submitPool) # Enable director for pilot submission self.directors[submitPool]['isEnabled'] = True
def __configureDirector( self, submitPool = None ): """ Update Configuration from CS if submitPool == None then, disable all Directors else Update Configuration for the PilotDirector of that SubmitPool """ if not submitPool: self.workDir = self.am_getWorkDirectory() # By default disable all directors for director in self.directors: self.directors[director]['isEnabled'] = False else: if submitPool not in self.directors: DIRAC.abort( -1, "Submit Pool not available", submitPool ) director = self.directors[submitPool]['director'] # Pass reference to our CS section so that defaults can be taken from there director.configure( self.am_getModuleParam( 'section' ), submitPool ) # Enable director for pilot submission self.directors[submitPool]['isEnabled'] = True
print output print '**************************' if not output['Value'][0]: ldlibs = output['Value'][1].split( '\n' ) for lib in ldlibs: if os.path.exists( lib ): if re.search( 'RELAX', lib ) is not None: filename = os.path.basename( lib ) output = shellCall( 0, 'ln -s ' + str( lib ) + ' ' + str( filename ) ) if DEBUG: if not output['OK']: print '********************************' print 'Warning, problem creating link:' print 'File: ', filename print 'Path: ', lib print output print '********************************' os.chdir( start ) sys.stdout.flush() Script.parseCommandLine() positionalArgs = Script.getPositionalArgs() if len( positionalArgs ) != 3: DIRAC.abort( 1, "Must specify which is the role you want" ) fixLDPath( *positionalArgs )
workflow.addTool( 'JobReport', JobReport( jobID ) ) workflow.addTool( 'AccountingReport', DataStoreClient() ) workflow.addTool( 'Request', RequestContainer() ) # Propagate the command line parameters to the workflow if any for name, value in wfParameters.items(): workflow.setValue( name, value ) result = workflow.execute() return result positionalArgs = Script.getPositionalArgs() if len( positionalArgs ) != 1: gLogger.debug( 'Positional arguments were %s' % ( positionalArgs ) ) DIRAC.abort( 1, "Must specify the Job XML file description" ) if os.environ.has_key( 'JOBID' ): gLogger.info( 'JobID: %s' % ( os.environ['JOBID'] ) ) jobXMLfile = positionalArgs[0] parList = Script.getUnprocessedSwitches() parDict = {} for switch, parameter in parList: if switch == "p": name, value = parameter.split( '=' ) value = value.strip() # The comma separated list in curly brackets is interpreted as a list if value.startswith("{"): value = value[1:-1].replace('"','').replace(" ",'').split(',')
def main(): # Register workflow parameter switch Script.registerSwitch( 'p:', 'parameter=', 'Parameters that are passed directly to the workflow') Script.parseCommandLine() # from DIRAC.Core.Workflow.Parameter import * from DIRAC import gLogger from DIRAC.Core.Workflow.Workflow import fromXMLFile from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport from DIRAC.AccountingSystem.Client.DataStoreClient import DataStoreClient from DIRAC.RequestManagementSystem.Client.Request import Request # Forcing the current directory to be the first in the PYTHONPATH sys.path.insert(0, os.path.realpath('.')) gLogger.showHeaders(True) def jobexec(jobxml, wfParameters): jobfile = os.path.abspath(jobxml) if not os.path.exists(jobfile): gLogger.warn('Path to specified workflow %s does not exist' % (jobfile)) sys.exit(1) workflow = fromXMLFile(jobfile) gLogger.debug(workflow) code = workflow.createCode() gLogger.debug(code) jobID = 0 if 'JOBID' in os.environ: jobID = os.environ['JOBID'] gLogger.info('DIRAC JobID %s is running at site %s' % (jobID, DIRAC.siteName())) workflow.addTool('JobReport', JobReport(jobID)) workflow.addTool('AccountingReport', DataStoreClient()) workflow.addTool('Request', Request()) # Propagate the command line parameters to the workflow if any for pName, pValue in wfParameters.items(): workflow.setValue(pName, pValue) # Propagate the command line parameters to the workflow module instances of each step for stepdefinition in workflow.step_definitions.values(): for moduleInstance in stepdefinition.module_instances: for pName, pValue in wfParameters.items(): if moduleInstance.parameters.find(pName): moduleInstance.parameters.setValue(pName, pValue) return workflow.execute() positionalArgs = Script.getPositionalArgs() if len(positionalArgs) != 1: gLogger.debug('Positional arguments were %s' % (positionalArgs)) DIRAC.abort(1, "Must specify the Job XML file description") if 'JOBID' in os.environ: gLogger.info('JobID: %s' % (os.environ['JOBID'])) jobXMLfile = positionalArgs[0] parList = Script.getUnprocessedSwitches() parDict = {} for switch, parameter in parList: if switch == "p": name, value = parameter.split('=') value = value.strip() # The comma separated list in curly brackets is interpreted as a list if value.startswith("{"): value = value[1:-1].replace('"', '').replace(" ", '').split(',') value = ';'.join(value) parDict[name] = value gLogger.debug('PYTHONPATH:\n%s' % ('\n'.join(sys.path))) jobExec = jobexec(jobXMLfile, parDict) if not jobExec['OK']: gLogger.debug('Workflow execution finished with errors, exiting') if jobExec['Errno']: sys.exit(jobExec['Errno']) else: sys.exit(1) else: gLogger.debug('Workflow execution successful, exiting') sys.exit(0)
print '**************************' if not output['Value'][0]: ldlibs = output['Value'][1].split('\n') for lib in ldlibs: if os.path.exists(lib): if re.search('RELAX', lib) is not None: filename = os.path.basename(lib) output = shellCall( 0, 'ln -s ' + str(lib) + ' ' + str(filename)) if DEBUG: if not output['OK']: print '********************************' print 'Warning, problem creating link:' print 'File: ', filename print 'Path: ', lib print output print '********************************' os.chdir(start) sys.stdout.flush() Script.parseCommandLine() positionalArgs = Script.getPositionalArgs() if len(positionalArgs) != 3: DIRAC.abort(1, "Must specify which is the role you want") fixLDPath(*positionalArgs)
workflow.setValue(pName, pValue) # Propagate the command line parameters to the workflow module instances of each step for stepdefinition in workflow.step_definitions.itervalues(): for moduleInstance in stepdefinition.module_instances: for pName, pValue in wfParameters.iteritems(): if moduleInstance.parameters.find(pName): moduleInstance.parameters.setValue(pName, pValue) return workflow.execute() positionalArgs = Script.getPositionalArgs() if len(positionalArgs) != 1: gLogger.debug('Positional arguments were %s' % (positionalArgs)) DIRAC.abort(1, "Must specify the Job XML file description") if 'JOBID' in os.environ: gLogger.info('JobID: %s' % (os.environ['JOBID'])) jobXMLfile = positionalArgs[0] parList = Script.getUnprocessedSwitches() parDict = {} for switch, parameter in parList: if switch == "p": name, value = parameter.split('=') value = value.strip() # The comma separated list in curly brackets is interpreted as a list if value.startswith("{"): value = value[1:-1].replace('"', '').replace(" ", '').split(',')
def main(): # Register workflow parameter switch Script.registerSwitch( "p:", "parameter=", "Parameters that are passed directly to the workflow") # Registering arguments will automatically add their description to the help menu Script.registerArgument( "jobXMLfile: specify path to the Job XML file description") Script.parseCommandLine() # from DIRAC.Core.Workflow.Parameter import * from DIRAC import gLogger from DIRAC.Core.Workflow.Workflow import fromXMLFile from DIRAC.Core.Utilities.Proxy import executeWithoutServerCertificate from DIRAC.WorkloadManagementSystem.Client.JobReport import JobReport from DIRAC.AccountingSystem.Client.DataStoreClient import DataStoreClient from DIRAC.RequestManagementSystem.Client.Request import Request # Forcing the current directory to be the first in the PYTHONPATH sys.path.insert(0, os.path.realpath(".")) gLogger.showHeaders(True) @executeWithoutServerCertificate def jobexec(jobxml, wfParameters): jobfile = os.path.abspath(jobxml) if not os.path.exists(jobfile): gLogger.warn("Path to specified workflow %s does not exist" % (jobfile)) sys.exit(1) workflow = fromXMLFile(jobfile) gLogger.debug(workflow) code = workflow.createCode() gLogger.debug(code) jobID = 0 if "JOBID" in os.environ: jobID = os.environ["JOBID"] gLogger.info("DIRAC JobID %s is running at site %s" % (jobID, DIRAC.siteName())) workflow.addTool("JobReport", JobReport(jobID)) workflow.addTool("AccountingReport", DataStoreClient()) workflow.addTool("Request", Request()) # Propagate the command line parameters to the workflow if any for pName, pValue in wfParameters.items(): workflow.setValue(pName, pValue) # Propagate the command line parameters to the workflow module instances of each step for stepdefinition in workflow.step_definitions.values(): for moduleInstance in stepdefinition.module_instances: for pName, pValue in wfParameters.items(): if moduleInstance.parameters.find(pName): moduleInstance.parameters.setValue(pName, pValue) return workflow.execute() positionalArgs = Script.getPositionalArgs() if len(positionalArgs) != 1: gLogger.debug("Positional arguments were %s" % (positionalArgs)) DIRAC.abort(1, "Must specify the Job XML file description") if "JOBID" in os.environ: gLogger.info("JobID: %s" % (os.environ["JOBID"])) jobXMLfile = positionalArgs[0] parList = Script.getUnprocessedSwitches() parDict = {} for switch, parameter in parList: if switch == "p": name, value = parameter.split("=") value = value.strip() # The comma separated list in curly brackets is interpreted as a list if value.startswith("{"): value = value[1:-1].replace('"', "").replace(" ", "").split(",") value = ";".join(value) parDict[name] = value gLogger.debug("PYTHONPATH:\n%s" % ("\n".join(sys.path))) jobExec = jobexec(jobXMLfile, parDict) if not jobExec["OK"]: gLogger.debug("Workflow execution finished with errors, exiting") if jobExec["Errno"]: os._exit(jobExec["Errno"]) else: os._exit(1) else: gLogger.debug("Workflow execution successful, exiting") # dirac_jobexec might interact with ARC library which cannot be closed using a simple sys.exit(0) # See https://bugzilla.nordugrid.org/show_bug.cgi?id=4022 for further details os._exit(0)