def getModelSavePath(self): """ Get Model Save Location """ now = datetime.datetime.now() saveDir = "%s/%s" % (self.config.get(self.sitename, "privatedir"), "LookUpService") createDirs(saveDir) return "%s/%s-%s-%s:%s:%s:%s.mrml" % (saveDir, now.year, now.month, now.day, now.hour, now.minute, now.second)
def startwork(config=None, logger=None): """Main start """ fullURL = getFullUrl(config) agents = getDataFromSiteFE({}, fullURL, "/sitefe/json/frontend/ips") if agents[2] != 'OK': print 'Received a failure getting information from Site Frontend %s' % str( agents) return workDir = config.get('frontend', 'privatedir') + "/forwardingService/" createDirs(workDir) copy2("/etc/httpd/conf.d/sitefe-httpd.conf", str(workDir + "httpd-copy.conf")) httpdCopy = readFile(str(workDir + "httpd-copy.conf")) try: newDict = evaldict(agents[0]) except FailedToParseError as ex: print 'Server returned not a json loadable object. Raising error. Output %s. Errors: %s' % ( str(agents), ex) return if not newDict: print 'Seems server returned empty dictionary. Exiting.' return newOut, changed = prepareNewHTTPDConfig(newDict, httpdCopy) if changed: writeNewConfig(newOut, workDir) copy2(str(workDir + "httpd-new.conf"), "/etc/httpd/conf.d/sitefe-httpd.conf") stdout = externalCommand("service httpd restart") print stdout # Restart apache... return
def __init__(self, configIn, loggerIn): self.config = configIn self.logger = loggerIn self.logDir = self.config.get('general', 'private_dir') + "/DTNRM/NetTester/logs/" createDirs(self.logDir) self.fdtLoc = '%s/fdt.jar' % self.config.get('general', 'sense_client') self.serverCmd = "java -jar %s -p %%s -P %%s -noupdates" % self.fdtLoc self.clientCmd = "java -jar %s -p %%s -P %%s -noupdates -c %%s -nettest" % self.fdtLoc
def startwork(self): """Main start """ for siteName in self.config.get('general', 'sites').split(','): workDir = self.config.get(siteName, 'privatedir') + "/ProvisioningService/" createDirs(workDir) self.sitename = siteName self.logger.info('Working on Site %s' % self.sitename) self.startworkmain()
def startwork(self): self.logger.info("=" * 80) self.logger.info("Component PolicyService Started") for siteName in self.config.get('general', 'sites').split(','): workDir = self.config.get(siteName, 'privatedir') + "/PolicyService/" createDirs(workDir) self.logger.info('Working on Site %s' % siteName) self.startworkmain(siteName)
def __init__(self, configFile): self.dbfile = configFile createdb = False if not os.path.isfile(self.dbfile): createDirs(self.dbfile) createdb = True self.conn = sqlite3.connect(self.dbfile) self.cursor = self.conn.cursor() if createdb: self._createdb()
def __init__(self, config, logger): # You can call and use the DTNRM Libraries for getting configuration # or site frontend URL from configuration file, so that any information # is available at the plugin level self.config, self.logger = getDefaultConfigAgent(COMPONENT, config, logger) self.workDir = self.config.get('general', 'private_dir') + "/DTNRM/DTNRMPrint/" createDirs(self.workDir) self.fullURL = getFullUrl(self.config, self.config.get('general', 'siteName')) self.hostname = self.config.get('agent', 'hostname') self.logger.info("====== DTNRMPrint Start Work at __init__. Hostname: %s", self.hostname) self.debug = self.config.getboolean('general', "debug")
def __init__(self, configIn, loggerIn, args=None): self.config, self.logger = getDefaultConfigAgent(COMPONENT, configIn, loggerIn) self.workDir = self.config.get('general', 'private_dir') + "/DTNRM/NetTester/jsons/" self.fdtworker = FDTWorker(self.config, self.logger) self.senseclient = self.config.get('general', 'sense_client') self.customInput = args createDirs(self.workDir) self.IPs = [] self.vlanConfDir = self.config.get('general', 'private_dir') + "/DTNRM/RulerAgent/" self.logger.info("==== NetTester Start Work.") self.agentdb = contentDB(logger=self.logger, config=self.config)
def __init__(self, config, logger): self.config, self.logger = getDefaultConfigAgent( COMPONENT, config, logger) self.workDir = self.config.get('general', 'private_dir') + "/DTNRM/QOS/" self.configDir = self.config.get('general', 'private_dir') + "/DTNRM/RulerAgent/" self.hostname = self.config.get('agent', 'hostname') createDirs(self.workDir) self.debug = self.config.getboolean('general', "debug") self.agentdb = contentDB(logger=self.logger, config=self.config)
def startwork(self): """Start Provisioning Service main worker.""" fullURL = getFullUrl(self.config, sitename=self.sitename) jOut = self.getData(fullURL, "/sitefe/json/frontend/getdata") workDir = self.config.get('general', 'privatedir') + "/ProvisioningService/" createDirs(workDir) if not jOut: self.logger.info( 'Seems server returned empty dictionary. Exiting.') return # Get switch information... switchPlugin = self.config.get(self.sitename, 'plugin') self.logger.info('Will load %s switch plugin' % switchPlugin) method = importlib.import_module( "SiteFE.ProvisioningService.Plugins.%s" % switchPlugin.lower()) switchruler = method.mainCaller() topology = method.topology() switches = topology.getTopology() alliases = self.getAllAliases(switches) outputDict = {} allDeltas = self.getData(fullURL, "/sitefe/v1/deltas?oldview=true") for switchName in list(list(switches['switches'].keys()) + alliases): newDeltas = self.checkdeltas(switchName, allDeltas) for newDelta in newDeltas: outputDict.setdefault(newDelta['ID']) for actionKey in ['reduction', 'addition']: try: newvlan = self.getnewvlan(newDelta, newDelta['ID'], switchName, actionKey) if actionKey == 'reduction' and newDelta[ 'ParsedDelta'][actionKey]: self.deltaRemoval(newDelta, newDelta['ID'], newvlan, switchName, switchruler, fullURL) elif actionKey == 'addition' and newDelta[ 'ParsedDelta'][actionKey]: if newDelta['State'] in ['cancel']: newDelta['ReductionID'] = newDelta['ID'] self.deltaRemoval(newDelta, newDelta['ID'], newvlan, switchName, switchruler, fullURL) else: self.deltaCommit(newDelta, newDelta['ID'], newvlan, switchName, switchruler, fullURL) else: self.logger.warning('Unknown delta state') pretty = pprint.PrettyPrinter(indent=4) pretty.pprint(evaldict(newDelta)) except IOError as ex: print(ex) raise Exception('Received IOError')
def __init__(self, config, logger): self.config = config if config else getConfig() self.logger = logger if logger else getLogger( "%s/%s/" % (self.config.get('general', 'logDir'), COMPONENT), self.config.get('general', 'logLevel')) self.workDir = self.config.get('general', 'private_dir') + "/DTNRM/QOS/" self.configDir = self.config.get('general', 'private_dir') + "/DTNRM/RulerAgent/" self.hostname = self.config.get('agent', 'hostname') createDirs(self.workDir) self.debug = self.config.getboolean('general', "debug") self.agentdb = contentDB(logger=self.logger, config=self.config)
def startwork(config=None, logger=None): """Main start """ errors = [] agents = getDataFromSiteFE({}, "http://localhost/", "/sitefe/json/frontend/getdata") if agents[2] != 'OK': print 'Received a failure getting information from Site Frontend %s' % str( agents) return workDir = CONFIG.get('frontend', 'privatedir') + "/notificationService/" mailingSender = CONFIG.get('NotificationService', 'mailingSender') mailingList = CONFIG.get('NotificationService', 'mailingList').split(',') createDirs(workDir) jOut = {} try: jOut = evaldict(agents[0]) except FailedToParseError as ex: print 'Server returned not a json loadable object. Raising error. Output %s. Errors: %s' % ( str(agents), ex) return # We start with simple error messages for ipaddr, values in jOut.items(): # Check if there is any error first checkPluginErrors(ipaddr, values, errors) checkCertLifeTime(ipaddr, values, errors) warningsFromMonComponent(ipaddr, values, errors) # Compare errors with previous run and send email only if there is something new... lastErrors = readFile(str(workDir + "lastRunErrors.json")) if lastErrors: try: lastErrors = evaldict(lastErrors[0]) except FailedToParseError as ex: print 'Loaded object from the system is not evaluable. Raising error. \ Output %s. Errors: %s' % (str(lastErrors), ex) print 'Ignoring and continue as there was no errors before' lastErrors = [] newErrors = [] if lastErrors and errors: newErrors = compareErrors(lastErrors, errors) elif errors: # Means there is no previous errors. print errors elif lastErrors and not errors: print 'All errors were resolved...' print lastErrors, errors, newErrors if newErrors: prepareMailSend(newErrors, mailingSender, mailingList) writeNewFile(errors, workDir) return
def __init__(self, config, logger): self.config = config if config else getConfig() self.logger = logger if logger else getLogger( "%s/%s/" % (self.config.get('general', 'logDir'), COMPONENT), self.config.get('general', 'logLevel')) self.workDir = self.config.get('general', 'private_dir') + "/DTNRM/RulerAgent/" createDirs(self.workDir) self.fullURL = getFullUrl(self.config, self.config.get('general', 'siteName')) self.noRules = self.config.getboolean('agent', 'norules') self.hostname = self.config.get('agent', 'hostname') self.logger.info("====== Ruler Start Work. Hostname: %s", self.hostname) self.vInterface = VInterfaces(self.config, self.logger)
def __init__(self, config, logger): self.config, self.logger = getDefaultConfigAgent(COMPONENT, config, logger) self.workDir = self.config.get('general', 'private_dir') + "/DTNRM/RulerAgent/" createDirs(self.workDir) self.fullURL = getFullUrl(self.config, self.config.get('general', 'siteName')) self.noRules = False if self.config.has_option('agent', 'norules'): self.noRules = self.config.getboolean('agent', 'norules') self.hostname = self.config.get('agent', 'hostname') self.logger.info("====== Ruler Start Work. Hostname: %s", self.hostname) self.debug = self.config.getboolean('general', "debug") self.pretty = pprint.PrettyPrinter(indent=4) self.agentdb = contentDB(logger=self.logger, config=self.config) self.vInterface = VInterfaces(self.config, self.logger) self.qosruler = QOS(self.config, self.logger)
def startwork(self): """ Start Policy Service """ self.logger.info("=" * 80) self.logger.info("Component PolicyService Started") for siteName in self.config.get('general', 'sites').split(','): workDir = self.config.get(siteName, 'privatedir') + "/PolicyService/" createDirs(workDir) # Committed to activating... # committing, committed, activating, activated, remove, removing, cancel dbobj = getVal(self.dbI, sitename=self.sitename) for job in [['committing', self.stateMachine.committing], ['committed', self.stateMachine.committed], ['activating', self.stateMachine.activating], ['activated', self.stateMachine.activated], ['remove', self.stateMachine.remove], ['removing', self.stateMachine.removing], ['cancel', self.stateMachine.cancel], ['cancelConn', self.stateMachine.cancelledConnections]]: self.logger.info("Starting check on %s deltas" % job[0]) job[1](dbobj)
def startWork(config=None, logger=None): """ Execute main script for DTN-RM Agent output preparation """ workDir = config.get('general', 'private_dir') + "/DTNRM/" createDirs(workDir) dic = prepareJsonOut(config, logger) fullUrl = getFullUrl(config) dic = appendConfig(config, dic) if config.getboolean('general', "debug"): pretty = pprint.PrettyPrinter(indent=4) logger.debug(pretty.pformat(dic)) agent = contentDB(logger=logger, config=config) agent.dumpFileContentAsJson(workDir + "/latest-out.json", dic) logger.info('Will try to publish information to SiteFE') fullUrl += '/sitefe' outVals = publishToSiteFE(dic, fullUrl, '/json/frontend/updatehost') if outVals[2] != 'OK' or outVals[1] != 200: if outVals[3]: publishToSiteFE(dic, fullUrl, '/json/frontend/addhost')
def startwork(self): """Main start """ self.logger.info('Started LookupService work') dbObj = getVal(self.dbI, **{'sitename': self.sitename}) workDir = self.config.get(self.sitename, 'privatedir') + "/LookUpService/" createDirs(workDir) self.newGraph = Graph() jOut = getAllHosts(self.sitename, self.logger) # ================================================================================== # 1. Define Basic MRML Prefixes # ================================================================================== self.defineMRMLPrefixes() # ================================================================================== # 2. Define Basic MRML Definition # ================================================================================== self.defineMRMLServices() self.hosts = {} for _, nodeDict in jOut.items(): # ================================================================================== # 3. Define Node inside yaml # ================================================================================== self.defineNodeInformation(nodeDict) # ================================================================================== # 4. Define Routing Service information # ================================================================================== self.defineLayer3MRML(nodeDict) # ================================================================================== # 5. Define Host Information and all it's interfaces. # ================================================================================== self.defineHostInfo(nodeDict) # ================================================================================== # 6. Define Switch information from Switch Lookup Plugin # ================================================================================== self.addSwitchInfo(jOut) saveName = self.getModelSavePath() with open(saveName, "w") as fd: fd.write(self.newGraph.serialize(format='turtle')) hashNum = generateHash(self.newGraph.serialize(format='turtle')) # Append all deltas to the model self.appendDeltas(dbObj, saveName) if dbObj.get('models', limit=1, search=[['uid', hashNum]]): raise Exception('hashNum %s is already in database...' % hashNum) self.logger.info('Checking if new model is different from previous') modelsEqual, modelinDB = self.checkForModelDiff(dbObj, saveName) lastKnownModel = {'uid': hashNum, 'insertdate': getUTCnow(), 'fileloc': saveName, 'content': str(self.newGraph.serialize(format='turtle'))} if modelsEqual: if modelinDB[0]['insertdate'] < int(getUTCnow() - 3600): # Force to update model every hour, Even there is no update; self.logger.info('Forcefully update model in db as it is older than 1h') dbObj.insert('models', [lastKnownModel]) else: self.logger.info('Models are equal.') lastKnownModel = modelinDB[0] os.unlink(saveName) else: self.logger.info('Models are different. Update DB') dbObj.insert('models', [lastKnownModel]) self.logger.debug('Last Known Model: %s' % str(lastKnownModel)) # Clean Up old models (older than 24h.) for model in dbObj.get('models', limit=100, orderby=['insertdate', 'ASC']): if model['insertdate'] < int(getUTCnow() - 86400): self.logger.debug('delete %s', model) try: os.unlink(model['fileloc']) except OSError as ex: self.logger.debug('Got OS Error removing this model %s. Exc: %s' % (model, str(ex))) dbObj.delete('models', [['id', model['id']]])