class PolicyService(object): """ Policy Service to accept deltas """ def __init__(self, config, logger, sitename): self.sitename = sitename self.logger = logger self.config = config self.siteDB = contentDB(logger=self.logger, config=self.config) self.dbI = getDBConn('PolicyService') self.stateMachine = StateMachine(self.logger) def queryGraph(self, graphIn, sub=None, pre=None, obj=None, search=None): """ Does search inside the graph based on provided parameters """ foundItems = [] self.logger.debug('Searching for subject: %s predica: %s object: %s searchLine: %s' % (sub, pre, obj, search)) for sIn, pIn, oIn in graphIn.triples((sub, pre, obj)): if search: if search == pIn: self.logger.debug('Found item with search parameter') self.logger.debug("s(subject) %s" % sIn) self.logger.debug("p(predica) %s" % pIn) self.logger.debug("o(object ) %s" % oIn) self.logger.debug("-" * 50) foundItems.append(oIn) else: self.logger.debug('Found item without search parameter') self.logger.debug("s(subject) %s" % sIn) self.logger.debug("p(predica) %s" % pIn) self.logger.debug("o(object ) %s" % oIn) self.logger.debug("-" * 50) foundItems.append(oIn) return foundItems def getTimeScheduling(self, out, gIn, prefixes): """ This is for identifying LIFETIME! In case it fails to get correct timestamp, resources will be provisioned right away """ for timeline in out: times = {} for timev in ['end', 'start']: tout = self.queryGraph(gIn, timeline, search=URIRef('%s%s' % (prefixes['nml'], timev))) temptime = None try: temptime = int(time.mktime(parser.parse(str(tout[0])).timetuple())) if time.daylight: temptime -= 3600 except Exception: continue times[timev] = temptime if len(times.keys()) == 2: return times return {} def parseDeltaRequest(self, inFileName, allKnownHosts): """Parse delta request to json""" self.logger.info("Parsing delta request %s ", inFileName) prefixes = {} allOutput = [] prefixes['site'] = "%s:%s:%s" % (self.config.get('prefixes', 'site'), self.config.get(self.sitename, 'domain'), self.config.get(self.sitename, 'year')) for switchName in self.config.get(self.sitename, 'switch').split(','): try: vsw = self.config.get(switchName, 'vsw') except ConfigParser.NoOptionError: self.logger.debug('ERROR: vsw parameter is not defined for %s.', switchName) continue prefixes['main'] = URIRef("%s:service+vsw:%s" % (prefixes['site'], vsw)) prefixes['nml'] = self.config.get('prefixes', 'nml') prefixes['mrs'] = self.config.get('prefixes', 'mrs') gIn = Graph() gIn.parse(inFileName, format='turtle') out = [] self.logger.info('Trying to parse L2 info from delta') out = self.parsel2Request(allKnownHosts, prefixes, gIn, out) self.logger.info('Trying to parse L3 info from delta') out = self.parsel3Request(allKnownHosts, prefixes, gIn, out) allOutput.append(out) allOutput = list(filter(None, allOutput)) if len(allOutput) > 1: msg = 'Got multiple Service definitions. Not Supported. Output: %s' % allOutput self.logger.info(msg) return {"errorType": 'MultipleDefs', "errorNo": '-9', "errMsg": msg} elif not allOutput: return [] return allOutput[0] def parsel3Request(self, allKnownHosts, prefixes, gIn, returnout): """ Parse Layer 3 Delta Request """ for hostname in allKnownHosts.keys(): prefixes['mainrst'] = URIRef("%s:%s:service+rst" % (prefixes['site'], hostname)) self.logger.info('Lets try to get connection ID subject for %s' % prefixes['mainrst']) out = self.queryGraph(gIn, prefixes['mainrst'], search=URIRef('%s%s' % (prefixes['mrs'], 'providesRoutingTable'))) if not out: msg = 'Connection ID was not received. Continue' self.logger.info(msg) continue outall = {} outall.setdefault('hosts', {}) outall['hosts'].setdefault(hostname, {}) for connectionID in out: outall['connectionID'] = str(connectionID) outall['hosts'][hostname]['routes'] = [] self.logger.info('This is our connection ID: %s' % connectionID) self.logger.info('Now lets get all info what it wants to do. Mainly nextHop, routeFrom, routeTo') bidPorts = self.queryGraph(gIn, connectionID, search=URIRef('%s%s' % (prefixes['mrs'], 'hasRoute'))) for bidPort in bidPorts: route = {} for flag in ['nextHop', 'routeFrom', 'routeTo']: route.setdefault(flag, {}) out = self.queryGraph(gIn, bidPort, search=URIRef('%s%s' % (prefixes['mrs'], flag))) if not out: continue for item in out: outt = self.queryGraph(gIn, item, search=URIRef('%s%s' % (prefixes['mrs'], 'type'))) outv = self.queryGraph(gIn, item, search=URIRef('%s%s' % (prefixes['mrs'], 'value'))) if not outt or not outv: continue route[flag]['type'] = str(outt[0]) route[flag]['value'] = str(outv[0]) outall['hosts'][hostname]['routes'].append(route) returnout.append(outall) self.logger.debug('L3 Parse output: %s', outall) return returnout def parsel2Request(self, allKnownHosts, prefixes, gIn, returnout): """ Parse L2 request """ self.logger.info('Lets try to get connection ID subject for %s' % prefixes['main']) connectionID = None out = self.queryGraph(gIn, prefixes['main'], search=URIRef('%s%s' % (prefixes['mrs'], 'providesSubnet'))) if not out: msg = 'Connection ID was not received. Something is wrong...' self.logger.info(msg) return [] for connectionID in out: output = {} output['connectionID'] = str(connectionID) self.logger.info('This is our connection ID: %s' % connectionID) self.logger.info('Now lets get all info what it wants to do. Mainly bidPorts and labelSwapping flag') bidPorts = self.queryGraph(gIn, connectionID, search=URIRef('%s%s' % (prefixes['nml'], 'hasBidirectionalPort'))) out = self.queryGraph(gIn, connectionID, search=URIRef('%s%s' % (prefixes['nml'], 'labelSwapping'))) output['labelSwapping'] = str(out[0]) out = self.queryGraph(gIn, connectionID, search=URIRef('%s%s' % (prefixes['nml'], 'existsDuring'))) out = self.getTimeScheduling(out, gIn, prefixes) if len(out.keys()) == 2: output['timestart'] = out['start'] output['timeend'] = out['end'] # ======================================================= self.logger.info('Now lets get all info for each bidirectionalPort, like vlan, ip, serviceInfo ') # We need mainly hasLabel, hasNetworkAddress for bidPort in bidPorts: print bidPort # Get first which labels it has. # This provides us info about vlan tag connInfo, output = getConnInfo(bidPort, prefixes['site'], output, nostore=True) if connInfo not in allKnownHosts: print 'Ignore %s' % connInfo continue connInfo, output = getConnInfo(bidPort, prefixes['site'], output) alias = self.queryGraph(gIn, bidPort, search=URIRef('%s%s' % (prefixes['nml'], 'isAlias'))) if alias and alias[0] not in bidPorts: self.logger.info('Received alias for %s to %s' % (bidPort, alias)) bidPorts.append(alias[0]) # Now let's get vlan ID out = self.queryGraph(gIn, bidPort, search=URIRef('%s%s' % (prefixes['nml'], 'hasLabel'))) if not out: continue out = self.queryGraph(gIn, out[0], search=URIRef('%s%s' % (prefixes['nml'], 'value'))) output['hosts'][connInfo]['vlan'] = str(out[0]) # Now Let's get IP out = self.queryGraph(gIn, bidPort, search=URIRef('%s%s' % (prefixes['mrs'], 'hasNetworkAddress'))) for item in out: outtype = self.queryGraph(gIn, item, search=URIRef('%s%s' % (prefixes['mrs'], 'type'))) outval = self.queryGraph(gIn, item, search=URIRef('%s%s' % (prefixes['mrs'], 'value'))) if Literal('ipv4-address') in outtype: if len(outval[0].split('/')) == 2: output['hosts'][connInfo]['ip'] = str(outval[0]) # Now lets get service Info and what was requested. out = self.queryGraph(gIn, bidPort, search=URIRef('%s%s' % (prefixes['nml'], 'hasService'))) output['hosts'][connInfo].setdefault('params', []) serviceparams = {} if out: for key in ['availableCapacity', 'granularity', 'maximumCapacity', 'priority', 'reservableCapacity', 'type', 'unit']: tmpout = self.queryGraph(gIn, out[0], search=URIRef('%s%s' % (prefixes['mrs'], key))) if len(tmpout) >= 1: serviceparams[key] = str(tmpout[0]) output['hosts'][connInfo]['params'].append(serviceparams) returnout.append(output) return returnout def startwork(self): """ Start Policy Service """ self.logger.info("=" * 80) self.logger.info("Component PolicyService Started") for siteName in self.config.get('general', 'sites').split(','): workDir = self.config.get(siteName, 'privatedir') + "/PolicyService/" createDirs(workDir) # Committed to activating... # committing, committed, activating, activated, remove, removing, cancel dbobj = getVal(self.dbI, sitename=self.sitename) for job in [['committing', self.stateMachine.committing], ['committed', self.stateMachine.committed], ['activating', self.stateMachine.activating], ['activated', self.stateMachine.activated], ['remove', self.stateMachine.remove], ['removing', self.stateMachine.removing], ['cancel', self.stateMachine.cancel], ['cancelConn', self.stateMachine.cancelledConnections]]: self.logger.info("Starting check on %s deltas" % job[0]) job[1](dbobj) def acceptDelta(self, deltapath): """ Accept delta """ jOut = getAllHosts(self.sitename, self.logger) fileContent = self.siteDB.getFileContentAsJson(deltapath) os.unlink(deltapath) # File is not needed anymore. toDict = dict(fileContent) toDict["State"] = "accepting" outputDict = {'addition': '', 'reduction': ''} try: self.logger.info(toDict["Content"]) self.logger.info(type(toDict["Content"])) for key in ['reduction', 'addition']: if key in toDict["Content"] and toDict["Content"][key]: self.logger.info('Got Content %s for key %s', toDict["Content"][key], key) tmpFile = tempfile.NamedTemporaryFile(delete=False) try: tmpFile.write(toDict["Content"][key]) except ValueError as ex: self.logger.info('Received ValueError. More details %s. Try to write normally with decode', ex) tmpFile.write(decodebase64(toDict["Content"][key])) tmpFile.close() outputDict[key] = self.parseDeltaRequest(tmpFile.name, jOut) os.unlink(tmpFile.name) except (IOError, KeyError, AttributeError, IndentationError, ValueError, BadSyntax, HostNotFound, UnrecognizedDeltaOption) as ex: outputDict = getError(ex) dbobj = getVal(self.dbI, sitename=self.sitename) if 'errorType' in outputDict.keys(): toDict["State"] = "failed" toDict["Error"] = outputDict toDict['ParsedDelta'] = {'addition': '', 'reduction': ''} self.stateMachine.failed(dbobj, toDict) else: toDict["State"] = "accepted" connID = None for key in outputDict: toDict["State"] = "accepted" if not outputDict[key]: continue toDict['dtype'] = key toDict['Type'] = key self.logger.info('%s' % str(outputDict[key])) toDict["ParsedDelta"] = outputDict connID = [] for item in outputDict[key]: connID.append(item['connectionID']) toDict['ConnID'] = connID toDict['modadd'] = 'idle' self.stateMachine.accepted(dbobj, toDict) # ================================= return toDict
class PolicyService(object): """ Policy Service to accept deltas """ def __init__(self, config, logger): self.logger = logger self.config = config self.siteDB = contentDB(logger=self.logger, config=self.config) self.dbI = getDBConn() self.stateMachine = StateMachine(self.logger) def queryGraph(self, graphIn, sub=None, pre=None, obj=None, search=None): """ Does search inside the graph based on provided parameters """ foundItems = [] for sIn, pIn, oIn in graphIn.triples((sub, pre, obj)): if search: if search == pIn: self.logger.debug('Found item with search parameter') self.logger.debug("s(subject) %s" % sIn) self.logger.debug("p(predica) %s" % pIn) self.logger.debug("o(object ) %s" % oIn) self.logger.debug("-" * 50) foundItems.append(oIn) else: self.logger.debug('Found item without search parameter') self.logger.debug("s(subject) %s" % sIn) self.logger.debug("p(predica) %s" % pIn) self.logger.debug("o(object ) %s" % oIn) self.logger.debug("-" * 50) foundItems.append(oIn) return foundItems def getTimeScheduling(self, out, gIn, prefixes): # This is for identifying LIFETIME! In case it fails to get correct timestamp, # resources will be provisioned right away # ====================================================== for timeline in out: times = {} for timev in ['end', 'start']: tout = self.queryGraph(gIn, timeline, search=URIRef('%s%s' % (prefixes['nml'], timev))) temptime = None try: temptime = int( time.mktime(parser.parse(str(tout[0])).timetuple())) if time.daylight: temptime -= 3600 except: continue times[timev] = temptime if len(times.keys()) == 2: return times return {} def parseDeltaRequest(self, inFileName, allKnownHosts, sitename): """Parse delta request to json""" output = {} self.logger.info("Parsing delta request %s ", inFileName) prefixes = {} prefixes['site'] = "%s:%s:%s" % (self.config.get( 'prefixes', 'site'), self.config.get( sitename, 'domain'), self.config.get(sitename, 'year')) prefixes['main'] = URIRef("%s:service+vsw" % prefixes['site']) prefixes['nml'] = self.config.get('prefixes', 'nml') prefixes['mrs'] = self.config.get('prefixes', 'mrs') gIn = Graph() gIn.parse(inFileName, format='turtle') self.logger.info('Lets try to get connection ID subject') connectionID = None out = self.queryGraph(gIn, prefixes['main']) if not out: msg = 'Connection ID was not received. Something is w' self.logger.info(msg) return {} if len(out) > 1: msg = 'Received multiple connection IDs. Something is wrong...' self.logger.info(msg) return {} output['connectionID'] = str(out[0]) connectionID = out[0] self.logger.info('This is our connection ID: %s' % connectionID) self.logger.info( 'Now lets get all info what it wants to do. Mainly bidPorts and labelSwapping flag' ) bidPorts = self.queryGraph( gIn, connectionID, search=URIRef('%s%s' % (prefixes['nml'], 'hasBidirectionalPort'))) out = self.queryGraph(gIn, connectionID, search=URIRef( '%s%s' % (prefixes['nml'], 'labelSwapping'))) output['labelSwapping'] = str(out[0]) out = self.queryGraph(gIn, connectionID, search=URIRef('%s%s' % (prefixes['nml'], 'existsDuring'))) out = self.getTimeScheduling(out, gIn, prefixes) if len(out.keys()) == 2: output['timestart'] = out['start'] output['timeend'] = out['end'] # ======================================================= self.logger.info( 'Now lets get all info for each bidirectionalPort, like vlan, ip, serviceInfo ' ) # We need mainly hasLabel, hasNetworkAddress for bidPort in bidPorts: # Get first which labels it has. # This provides us info about vlan tag connInfo, output = getConnInfo(bidPort, prefixes['site'], output, nostore=True) print connInfo, allKnownHosts if connInfo not in allKnownHosts: print 'Ignore %s' % connInfo continue connInfo, output = getConnInfo(bidPort, prefixes['site'], output) alias = self.queryGraph(gIn, bidPort, search=URIRef( '%s%s' % (prefixes['nml'], 'isAlias'))) print alias, bidPorts if alias and alias[0] not in bidPorts: self.logger.info('Received alias for %s to %s' % (bidPort, alias)) bidPorts.append(alias[0]) # Now let's get vlan ID out = self.queryGraph(gIn, bidPort, search=URIRef('%s%s' % (prefixes['nml'], 'hasLabel'))) if not out: continue out = self.queryGraph(gIn, out[0], search=URIRef('%s%s' % (prefixes['nml'], 'value'))) output['hosts'][connInfo]['vlan'] = str(out[0]) # Now Let's get IP out = self.queryGraph( gIn, bidPort, search=URIRef('%s%s' % (prefixes['mrs'], 'hasNetworkAddress'))) if out: out = self.queryGraph(gIn, out[0], search=URIRef( '%s%s' % (prefixes['mrs'], 'value'))) output['hosts'][connInfo]['ip'] = str(out[0]) # Now lets get service Info and what was requested. out = self.queryGraph( gIn, bidPort, search=URIRef('%s%s' % (prefixes['nml'], 'hasService'))) output['hosts'][connInfo].setdefault('params', []) serviceparams = {} if out: for key in [ 'availableCapacity', 'granularity', 'maximumCapacity', 'priority', 'reservableCapacity', 'type', 'unit' ]: print key tmpout = self.queryGraph( gIn, out[0], search=URIRef('%s%s' % (prefixes['mrs'], key))) if len(tmpout) >= 1: serviceparams[key] = str(tmpout[0]) output['hosts'][connInfo]['params'].append(serviceparams) print output return output def reductionCompare(self, sitename, redID): dbobj = getVal(self.dbI, sitename=sitename) out = dbobj.get('deltas', search=[['connectionid', redID]]) if out: return out[0]['uid'] return None def startwork(self): self.logger.info("=" * 80) self.logger.info("Component PolicyService Started") for siteName in self.config.get('general', 'sites').split(','): workDir = self.config.get(siteName, 'privatedir') + "/PolicyService/" createDirs(workDir) self.logger.info('Working on Site %s' % siteName) self.startworkmain(siteName) def acceptDelta(self, deltapath, sitename): jOut = getAllHosts(sitename, self.logger) fileContent = self.siteDB.getFileContentAsJson(deltapath) os.unlink(deltapath) # File is not needed anymore. toDict = dict(fileContent) toDict["State"] = "accepting" outputDict = {'addition': '', 'reduction': ''} try: self.logger.info(toDict["Content"]) for key in ['reduction', 'addition']: if key in toDict["Content"] and toDict["Content"][key]: self.logger.info('Got Content %s for key %s', toDict["Content"][key], key) tmpFile = tempfile.NamedTemporaryFile(delete=False) try: tmpFile.write(toDict["Content"][key]) except ValueError as ex: self.logger.info( 'Received ValueError. More details %s. Try to write normally with decode', ex) tmpFile.write(decodebase64(toDict["Content"][key])) tmpFile.close() outputDict[key] = self.parseDeltaRequest( tmpFile.name, jOut, sitename) self.logger.info("For %s this is delta location %s" % (key, tmpFile.name)) # os.unlink(tmpFile.name) except (IOError, KeyError, AttributeError, IndentationError, ValueError, BadSyntax, HostNotFound, UnrecognizedDeltaOption) as ex: outputDict = getError(ex) dbobj = getVal(self.dbI, sitename=sitename) if 'errorType' in outputDict.keys(): toDict["State"] = "failed" toDict["Error"] = outputDict toDict['ParsedDelta'] = {'addition': '', 'reduction': ''} self.stateMachine.failed(dbobj, toDict) else: toDict["State"] = "accepted" toDict["ParsedDelta"] = outputDict dtype = None connID = None for key in outputDict: if not outputDict[key]: continue # If key is reduction. Find out which one. # So this check will not be needed anymore. dtype = key connID = outputDict[key]['connectionID'] if key == 'reduction': if "ReductionID" not in outputDict.keys(): self.logger.info('Trying to identify which to delete') reductionIDMap = self.reductionCompare( sitename, outputDict[key]['connectionID']) toDict["ReductionID"] = reductionIDMap else: self.logger.info('ReductionID is already defined.') toDict['Type'] = dtype toDict['ConnID'] = connID toDict['modadd'] = 'idle' self.stateMachine.accepted(dbobj, toDict) # ================================= return toDict def startworkmain(self, sitename): """Main start """ # Committed to activating... # committing, committed, activating, activated, remove, removing, cancel dbobj = getVal(self.dbI, sitename=sitename) for job in [['committing', self.stateMachine.committing], ['committed', self.stateMachine.committed], ['activating', self.stateMachine.activating], ['activated', self.stateMachine.activated], ['remove', self.stateMachine.remove], ['removing', self.stateMachine.removing], ['cancel', self.stateMachine.cancel]]: self.logger.info("Starting check on %s deltas" % job[0]) job[1](dbobj)