def acceptDelta(self, deltapath): """Accept delta.""" jOut = getAllHosts(self.sitename, self.logger) fileContent = self.siteDB.getFileContentAsJson(deltapath) os.unlink(deltapath) # File is not needed anymore. toDict = dict(fileContent) toDict["State"] = "accepting" outputDict = {'addition': '', 'reduction': ''} try: self.logger.info(toDict["Content"]) self.logger.info(type(toDict["Content"])) for key in ['reduction', 'addition']: if key in toDict["Content"] and toDict["Content"][key]: self.logger.info('Got Content %s for key %s', toDict["Content"][key], key) tmpFile = tempfile.NamedTemporaryFile(delete=False, mode="w+") try: tmpFile.write(toDict["Content"][key]) except ValueError as ex: self.logger.info( 'Received ValueError. More details %s. Try to write normally with decode', ex) tmpFile.write(decodebase64(toDict["Content"][key])) tmpFile.close() outputDict[key] = self.parseDeltaRequest( tmpFile.name, jOut) os.unlink(tmpFile.name) except (IOError, KeyError, AttributeError, IndentationError, ValueError, BadSyntax, HostNotFound, UnrecognizedDeltaOption) as ex: outputDict = getError(ex) dbobj = getVal(self.dbI, sitename=self.sitename) if 'errorType' in list(outputDict.keys()): toDict["State"] = "failed" toDict["Error"] = outputDict toDict['ParsedDelta'] = {'addition': '', 'reduction': ''} self.stateMachine.failed(dbobj, toDict) else: toDict["State"] = "accepted" connID = [] toDict["ParsedDelta"] = outputDict toDict['modadd'] = 'idle' for key in outputDict: if not outputDict[key]: continue toDict['Type'] = 'modify' if 'Type' in toDict.keys() else key # In case of modify, only addition connection IDs are stored; # otherwise, corresponding type connectionIDs if toDict['Type'] == 'modify': connID = [] for item in outputDict['addition']: connID.append(item['connectionID']) else: for item in outputDict[key]: connID.append(item['connectionID']) toDict['ConnID'] = connID self.stateMachine.accepted(dbobj, toDict) # ================================= return toDict
def startwork(self): """Main start """ self.logger.info('Started LookupService work') dbObj = getVal(self.dbI, **{'sitename': self.sitename}) workDir = self.config.get(self.sitename, 'privatedir') + "/LookUpService/" createDirs(workDir) self.newGraph = Graph() jOut = getAllHosts(self.sitename, self.logger) # ================================================================================== # 1. Define Basic MRML Prefixes # ================================================================================== self.defineMRMLPrefixes() # ================================================================================== # 2. Define Basic MRML Definition # ================================================================================== self.defineMRMLServices() self.hosts = {} for _, nodeDict in jOut.items(): # ================================================================================== # 3. Define Node inside yaml # ================================================================================== self.defineNodeInformation(nodeDict) # ================================================================================== # 4. Define Routing Service information # ================================================================================== self.defineLayer3MRML(nodeDict) # ================================================================================== # 5. Define Host Information and all it's interfaces. # ================================================================================== self.defineHostInfo(nodeDict) # ================================================================================== # 6. Define Switch information from Switch Lookup Plugin # ================================================================================== self.addSwitchInfo(jOut) saveName = self.getModelSavePath() with open(saveName, "w") as fd: fd.write(self.newGraph.serialize(format='turtle')) hashNum = generateHash(self.newGraph.serialize(format='turtle')) # Append all deltas to the model self.appendDeltas(dbObj, saveName) if dbObj.get('models', limit=1, search=[['uid', hashNum]]): raise Exception('hashNum %s is already in database...' % hashNum) self.logger.info('Checking if new model is different from previous') modelsEqual, modelinDB = self.checkForModelDiff(dbObj, saveName) lastKnownModel = {'uid': hashNum, 'insertdate': getUTCnow(), 'fileloc': saveName, 'content': str(self.newGraph.serialize(format='turtle'))} if modelsEqual: if modelinDB[0]['insertdate'] < int(getUTCnow() - 3600): # Force to update model every hour, Even there is no update; self.logger.info('Forcefully update model in db as it is older than 1h') dbObj.insert('models', [lastKnownModel]) else: self.logger.info('Models are equal.') lastKnownModel = modelinDB[0] os.unlink(saveName) else: self.logger.info('Models are different. Update DB') dbObj.insert('models', [lastKnownModel]) self.logger.debug('Last Known Model: %s' % str(lastKnownModel)) # Clean Up old models (older than 24h.) for model in dbObj.get('models', limit=100, orderby=['insertdate', 'ASC']): if model['insertdate'] < int(getUTCnow() - 86400): self.logger.debug('delete %s', model) try: os.unlink(model['fileloc']) except OSError as ex: self.logger.debug('Got OS Error removing this model %s. Exc: %s' % (model, str(ex))) dbObj.delete('models', [['id', model['id']]])
def getdeltaAll(sitename, deltaUID): dbI = getDBConn('analyzedelta') dbobj = getVal(dbI, sitename=sitename) policer = polS.PolicyService(CONFIG, LOGGER) for delta in dbobj.get('deltas'): if delta['uid'] != deltaUID: continue delta['addition'] = evaldict(delta['addition']) delta['reduction'] = evaldict(delta['reduction']) print('=' * 80) print('Delta UID : ', delta['uid']) print('Delta RedID: ', delta['reductionid']) print('Delta State: ', delta['state']) print('Delta ModAdd: ', delta['modadd']) print('Delta InsDate:', delta['insertdate']) print('Delta Update: ', delta['updatedate']) print('Delta Model: ', delta['modelid']) print('Delta connID: ', delta['connectionid']) print('Delta Deltatype: ', delta['deltat']) print('-' * 20) import pprint pprint.pprint(delta) print('Delta times') for deltatimes in dbobj.get('states', search=[['deltaid', delta['uid']]]): print('State: %s Date: %s' % (deltatimes['state'], deltatimes['insertdate'])) if delta['deltat'] in ['reduction', 'addition']: for hostname in list(delta[delta['deltat']]['hosts'].keys()): print('-' * 20) print('Host States %s' % hostname) for hoststate in dbobj.get('hoststates', search=[['deltaid', delta['uid']], ['hostname', hostname]]): print('Host %s State %s' % (hostname, hoststate['state'])) print('Insertdate %s UpdateDate %s' % (hoststate['insertdate'], hoststate['updatedate'])) print('-' * 20) print('Host State History') for hstatehistory in dbobj.get( 'hoststateshistory', search=[['deltaid', delta['uid']], ['hostname', hostname]]): print('State: %s, Date: %s' % (hstatehistory['state'], hstatehistory['insertdate'])) toDict = ast.literal_eval(str(delta['content'])) jOut = getAllHosts(sitename, LOGGER) for key in ['reduction', 'addition']: print(list(toDict.keys())) if key in toDict and toDict[key]: print('Got Content %s for key %s', toDict[key], key) tmpFile = tempfile.NamedTemporaryFile(delete=False, mode="w+") try: tmpFile.write(toDict[key]) except ValueError as ex: print( 'Received ValueError. More details %s. Try to write normally with decode', ex) tmpFile.write(decodebase64(toDict["Content"][key])) tmpFile.close() # outputDict[key] = self.parseDeltaRequest(tmpFile.name, jOut) print("For %s this is delta location %s" % (key, tmpFile.name)) out = policer.parseDeltaRequest(tmpFile.name, jOut) if not out: out = policer.parseDeltaRequest(tmpFile.name, jOut, sitename) print(out)
def acceptDelta(self, deltapath, sitename): jOut = getAllHosts(sitename, self.logger) fileContent = self.siteDB.getFileContentAsJson(deltapath) os.unlink(deltapath) # File is not needed anymore. toDict = dict(fileContent) toDict["State"] = "accepting" outputDict = {'addition': '', 'reduction': ''} try: self.logger.info(toDict["Content"]) for key in ['reduction', 'addition']: if key in toDict["Content"] and toDict["Content"][key]: self.logger.info('Got Content %s for key %s', toDict["Content"][key], key) tmpFile = tempfile.NamedTemporaryFile(delete=False) try: tmpFile.write(toDict["Content"][key]) except ValueError as ex: self.logger.info( 'Received ValueError. More details %s. Try to write normally with decode', ex) tmpFile.write(decodebase64(toDict["Content"][key])) tmpFile.close() outputDict[key] = self.parseDeltaRequest( tmpFile.name, jOut, sitename) self.logger.info("For %s this is delta location %s" % (key, tmpFile.name)) # os.unlink(tmpFile.name) except (IOError, KeyError, AttributeError, IndentationError, ValueError, BadSyntax, HostNotFound, UnrecognizedDeltaOption) as ex: outputDict = getError(ex) dbobj = getVal(self.dbI, sitename=sitename) if 'errorType' in outputDict.keys(): toDict["State"] = "failed" toDict["Error"] = outputDict toDict['ParsedDelta'] = {'addition': '', 'reduction': ''} self.stateMachine.failed(dbobj, toDict) else: toDict["State"] = "accepted" toDict["ParsedDelta"] = outputDict dtype = None connID = None for key in outputDict: if not outputDict[key]: continue # If key is reduction. Find out which one. # So this check will not be needed anymore. dtype = key connID = outputDict[key]['connectionID'] if key == 'reduction': if "ReductionID" not in outputDict.keys(): self.logger.info('Trying to identify which to delete') reductionIDMap = self.reductionCompare( sitename, outputDict[key]['connectionID']) toDict["ReductionID"] = reductionIDMap else: self.logger.info('ReductionID is already defined.') toDict['Type'] = dtype toDict['ConnID'] = connID toDict['modadd'] = 'idle' self.stateMachine.accepted(dbobj, toDict) # ================================= return toDict