def SetPrecisionFromPlugin(self, collectorID, preicsionValue, customNamespaceString=None): objCollector = self._NamespaceObject.GetCollector( self.__PrefixStr + collectorID + self.__SuffixStr + self.__specialSuffix(customNamespaceString)) if None == objCollector: Log.getLogger().error( "User defined DynamicCollector tried to Set a value to a collector that does not exist, with ID: " + collectorID) return try: objCollector.Precision = float(preicsionValue) except: Log.getLogger().error( "User defined DynamicCollector tried to Set an invalid Precision value of {0} to a collector that does not exist, with ID: {1}" .format(preicsionValue, collectorID))
def SetCollectorValueFromPlugin(self, collectorID, Value, elapsedTime=None, customNamespaceString=None): objCollector = self._NamespaceObject.GetCollector( self.__PrefixStr + collectorID + self.__SuffixStr + self.__specialSuffix(customNamespaceString)) if None == objCollector: Log.getLogger().error( "User defined DynamicCollector tried to Set a value to a collector that does not exist, with ID: " + collectorID) return False if None == elapsedTime: elapsedTime = Time.GetCurrMS() - objCollector._LastCollectionTime objCollector.SetDynamicData(Value, elapsedTime) return True
def getInsertTime(self, baseNode): insertTimeEntry = getChildNodes(baseNode, "InsertTime") if None == insertTimeEntry or 0 == len(insertTimeEntry): self.insertTime = None elif len(insertTimeEntry) > 1: Log.getLogger().error("Only 1 insert time per source.") raise pickle.UnpicklingError() elif insertTimeEntry[0].firstChild.nodeValue == "Append": self.insertTime = "Append" else: try: self.insertTime = int(insertTimeEntry[0].firstChild.nodeValue) except: Log.getLogger().error( "Invalid numeric value for <InsertTime>: " + insertTimeEntry[0].firstChild.nodeValue) raise pickle.UnpicklingError()
def __init__(self, objDyna): self.DoesCollectorExist = objDyna.CollectorExistsFromPlugin self.AddCollector = objDyna.AddCollectorFromPlugin self.SetCollectorValue = objDyna.SetCollectorValueFromPlugin self.SetNormilization = objDyna.SetNormilizationFromPlugin self.SetPrecision = objDyna.SetPrecisionFromPlugin self.SetScale = objDyna.SetScaleFromPlugin self.KillThreadSignalled = None self.LockFileName = objDyna.GetLockFile() self.Interval = objDyna._PollingInterval self.Logger = Log.getLogger()
def Collect(self): list = self.GetCollectors() if len(list) < 1: return None try: for objCollector in list: if not hasattr(objCollector, "MaxCollectedValue"): objCollector.MaxCollectedValue = objCollector.GetLastValue( ) if not Utility.IsNumeric(list[0].MaxCollectedValue): if not self._InvalidInpWarningSent: Log.getLogger().warn( "An Input to Operator MaxValue is non numeric.--> " + objCollector.MaxCollectedValue) self._InvalidInpWarningSent = True return "HelenKeller" max = float(list[0].MaxCollectedValue) for objCollector in list: if not Utility.IsNumeric(objCollector.MaxCollectedValue): return None val = float(objCollector.MaxCollectedValue) if not objCollector.ReadyForConsumption( ) and not objCollector.IsDefaultValue(): return None # hasn't yet collecte if val > max: max = val except Exception as Ex: if not self._InvalidInpWarningSent: Log.getLogger().warn( "An Input to Operator MaxValue is non numeric.--> " + objCollector.MaxCollectedValue) self._InvalidInpWarningSent = True max = "" return str(max)
def __ReadDownstreamTargets(self, domDoc): retList = [] nodeList = domDoc.getElementsByTagName("TargetConnection") if None != nodeList and len(nodeList) > 0: for node in nodeList: attributes = node.attributes if "IP" in attributes: IP = Alias.Alias(attributes["IP"].nodeValue) else: Log.getLogger().error("No Target IP specified") return False if "PORT" in attributes: try: Port = int(Alias.Alias(attributes["PORT"].nodeValue)) except Exception as Ex: Log.getLogger().error(str(Ex)) Log.getLogger().error( "Invalid Port set for Target Connection") return False else: Log.getLogger().error("No Target IP specified") return False connType = ConnectionType.Unknown objTarget = Target.Target( IP, Port, connType, True) # could be Marvin or another Oscar Key = IP + ":" + str(Port) TargetManager.GetTargetManager().AddDownstreamTarget( objTarget, Key) retList.append(Target) elif None == self.__DynamicConnectMarvinMap: Log.getLogger().error("TargetConnection not defined") return retList
def __init__(self,ID,TargetIP,TargetPort,DefaultInterval): self.__TargetIP = TargetIP self.__TargetPort = int(TargetPort) self._DefaultInterval = int(DefaultInterval) self.__ListenPort = 0 self.__ListenIP = "0.0.0.0" self._Socket = socket.socket(socket.AF_INET,socket.SOCK_DGRAM) self._Socket.setsockopt(socket.SOL_SOCKET,socket.SO_REUSEADDR,1) self._DefaultPrecision = 0 # default precision for namespace, unless overridden in namespace or # collector, is set when reading configuration (even if not specified) self._CheckMTU = True self._MTUWarningThreshold = 1500 - 20 - 8 # IPV4 + UDP Header self._LastFreshUniqueID = 0 #self.__SEND_BUFFER_SIZE = #self._Socket.getsockopt(socket.SOL_SOCKET,socket.SO_SNDBUF) self._ID = ID self._Collectors = [] self._CollectorMap = {} # TODO, added a map because DynamicCollecters were super slow > 1000. Get rid # of the_Collectors and make all use Map self.__Actors = [] self.__PacketNumber = 1 self.__objPacketNumberLock = threading.Lock() self._Server = None self._SentSizeLock = threading.Lock() self._SentBytes = 0 self.__ProcessThreadGroupings = {} # a map of collector ProcessThreads self.__LastActorCalled="No Actors Called Yet" Log.getLogger().info("Namespace [" + ID + "] Target is " + TargetIP + ":" + str(TargetPort)) if sys.version_info < (3, 0): Namespace.SendPacket = Namespace.SendPacket_Python2 else: Namespace.SendPacket = Namespace.SendPacket_Python3 try: self.__TargetIP = socket.gethostbyname(self.__TargetIP) except: pass # was likely a bad bad dns name, or was an IP address to start with
def Alias(input): orig = input index = input.find("$(") #Alias is surrounded by -->$( ) <-- stopIndex = input.find(")") while index != -1 and stopIndex != -1: # do it in a loop in case there are more than one alias strAlias = input[index + 2:stopIndex] if AliasMgr.IsAliased(strAlias): newStr = "" newStr = newStr + input[0:index] + AliasMgr.GetAlias( strAlias) + input[stopIndex + 1:len(input)] input = newStr index = input.find("$(") stopIndex = input.find(")") else: Log.getLogger().warn( "Something looks like an Alias, but there is no alias registered for it --> " + input) return orig return input
def copyNamespace(args): inpFiles = glob.glob(g_args.input) totalCopied = 0 fCount = 0 for inpName in inpFiles: fHandler = Actions.FileHandler(inpName) copiedInFileCount = 0 for namespace in args.namespace: copiedInFileCount += fHandler.Copy_Namespace(namespace, args.new) targetFn = GetTargetFileName(inpName, g_args.output) fHandler.writeFile(targetFn, g_args.overwrite) Log.getLogger().info("{} namespaces copied in {}".format( copiedInFileCount, inpName)) totalCopied += copiedInFileCount if copiedInFileCount > 0: fCount += 1 Log.getLogger().info("Copied {} namespaces in {} files".format( totalCopied, fCount))
def deleteId(args): inpFiles = glob.glob(g_args.input) totalDeleted = 0 fCount = 0 for inpName in inpFiles: fHandler = Actions.FileHandler(inpName) deletedFromFileCount = 0 for namespace in args.namespace: deletedFromFileCount += fHandler.Delete_Id(namespace, args.id) targetFn = GetTargetFileName(inpName, g_args.output) fHandler.writeFile(targetFn, g_args.overwrite) Log.getLogger().info("{} datapoints deleted from {}".format( deletedFromFileCount, inpName)) totalDeleted += deletedFromFileCount if deletedFromFileCount > 0: fCount += 1 Log.getLogger().info("Deleted {} datapoints from {} files".format( totalDeleted, fCount))
def StrokeWatchdogTimer(self): if True == self.m_CanTimeout: self.m_lastHeartbeat = Time.GetCurrMS() self.m_hasTimedOut = False if False == self.m_InitialRefreshSent: self.m_InitialRefreshSent = True buffer = "<?xml version=\"1.0\" encoding=\"utf-8\"?>" buffer = buffer + "<Oscar Type=\"Refresh\">" buffer = buffer + "<Version>1.0</Version>" uID = str(random.randint(0, 500000)) buffer = buffer + "<UniqueID>" + uID + "</UniqueID>" buffer = buffer + "</Oscar>" from Helpers import TargetManager if TargetManager.GetTargetManager().BroadcastUpstream(buffer): Log.getLogger().info("Sending Refresh Request to Minions [" + uID + ']') TargetManager.GetTargetManager().BroadcastUpstream( buffer ) # is UDP, so send a couple more times, dups will be filtered on Minion TargetManager.GetTargetManager().BroadcastUpstream(buffer)
def DeltaValue(entry, delta): retVal = False try: float(entry.Value) except: Log.getLogger().info( "Invalid <Namespace> - BoundID tried to bound non numeric data point, ID=" + entry.ID) return if None != delta: try: delta = float(delta) entry.Value = str(float(entry.Value) + delta) retVal = True except: Log.getLogger().error("Invalid <Namespace> - delta value of " + delta + " is invalid.") raise pickle.UnpicklingError() return retVal
def __GetRepeatAttribute(self, node, strItem, required=False): attributes = node.attributes if not strItem in attributes.keys(): if True == required: Log.getLogger().error( "Error parsing " + self._ConfigFilename + ": " + "Repeat specified for Operator Input, but no " + strItem + " given. Ignoring") return -1 return -1 try: retVal = int(Alias.Alias(attributes[strItem].nodeValue)) except: Log.getLogger().error("Error parsing " + self._ConfigFilename + ": " + "inalid " + required + " given for Repeat. Ignoring") return -1 if retVal < 0: Log.getLogger().error("Error parsing " + self._ConfigFilename + ": " + "inalid " + required + " given for Repeat. Ignoring") return -1 return retVal
def PerformLoadFileTask(self, Params): #<?xml version="1.0" encoding="utf-8"?> #<Marvin Type="OscarTask"> # <Version>1.0</Version> # <OscarID>DemoOscar</OscarID> # <Task>LoadFile</Task> # <Param>filename</Param> #</Marvin> if len(Params) != 1: Log.getLogger().error( "Oscar Task to load file failed - no file specified.") filename = Alias.Alias(Params[0]) Log.getLogger().info("Performing Oscar task: Load File -->" + str(filename)) GuiMgr.OnStopPlayback() GuiMgr.OnStopRecording(True) #drop all recorded packets GuiMgr.OnStopLiveData() if GuiMgr.ReadFromFile(filename): GuiMgr.OnEnablePlayback() GuiMgr.SetPlaybackFilename(filename) else: Log.getLogger().warning("Oscar Task to load file [" + filename + "] failed") return False return True
def deltaId(args): inpFiles = glob.glob(g_args.input) totalModified = 0 fCount = 0 for inpName in inpFiles: fHandler = Actions.FileHandler(inpName) modifiedFromFileCount = 0 for namespace in args.namespace: modifiedFromFileCount = fHandler.ApplyDelta_Id( namespace, args.id, args.delta) targetFn = GetTargetFileName(inpName, g_args.output) fHandler.writeFile(targetFn, g_args.overwrite) Log.getLogger().info("{} datapoints delta's from {}".format( modifiedFromFileCount, inpName)) totalModified += modifiedFromFileCount if modifiedFromFileCount > 0: fCount += 1 Log.getLogger().info("delta'd {} datapoints from {} files".format( totalModified, fCount))
def __sendConnectionInfoProc(self, fnKillSignalled, userData): buffer = "<?xml version=\"1.0\" encoding=\"utf-8\"?>" buffer = buffer + "<Minion Type=\"ConnectionInformation\">" buffer = buffer + "<Version>1.0</Version>" buffer = buffer + "<MinionVersion>" + VersionMgr.ReadVer( ) + "</MinionVersion>" buffer += "<Namespace>" + str(self) + "</Namespace>" buffer += "<Port>" + str(self._Server.getPort()) + "</Port>" buffer = buffer + "</Minion>" lastUpdate = 0 while not fnKillSignalled(): if lastUpdate + Namespace.ConnectionInfoUpdateInterval < Time.GetCurrMS( ): if self.SendPacket(buffer): Log.getLogger().debug("Sent announcement to Oscar") lastUpdate = Time.GetCurrMS() Sleep.SleepMs( Namespace.ConnectionUpdateThreadSleepinterval ) # Don't want to sleep for Namespace.ConnectionInfoUpdateInterval in case
def ScaleID(self, namespace, node): if not "Factor" in node.attributes: Log.getLogger().error( "Invalid <Namespace> - Scale - no Factor specified.") raise pickle.UnpicklingError() if not "ID" in node.attributes: Log.getLogger().error( "Invalid <Namespace> - Scale - no ID specified.") raise pickle.UnpicklingError() idLow = node.attributes["ID"].nodeValue.lower() # if not self.existsID(namespace,idLow): # Log.getLogger().error("Invalid <Namespace> - Scale - ID: " + node.attributes["ID"].nodeValue + " does not exist.") # raise pickle.UnpicklingError() if "Precision" in node.attributes: try: Precision = int(node.attributes["Precision"].nodeValue) except Exception as Ex: Log.getLogger().error( "Invalid <Namespace> - Scale Precision - invalid value: " + node.attributes["Precision"]) raise pickle.UnpicklingError() else: Precision = None try: factorVal = float(node.attributes["Factor"].nodeValue) except Exception as Ex: Log.getLogger().error( "Invalid <Namespace> - Scale - invalid value: " + node.attributes["Factor"]) raise pickle.UnpicklingError() scaleCount = 0 for entryObj in self._namespaceMap[namespace]: if isinstance(entryObj, MarvinGroupData.MarvinDataGroup): for entry in entryObj._DataList: if Matches(entry.ID, idLow): ScaleValue(entry, factorVal, Precision) scaleCount += 1 elif Matches(entryObj.ID, idLow): ScaleValue(entryObj, factorVal, Precision) scaleCount += 1 return scaleCount
def writeFile(self, outfile, overWrite): if False == overWrite and exists(outfile): overwrite = input( '{} already exists. Overwrite? Y = yes, N = no\n'.format( outfile)) if overwrite.lower() == 'y': pass else: Log.getLogger().warn( "Skipping writing to file {}".format(outfile)) return resultList = self.createMergedList() try: with open(outfile, 'w+b') as fp: pickle.dump(resultList, fp, pickle.DEFAULT_PROTOCOL) Log.getLogger().info("New file [" + outfile + "] created with " + str(len(resultList)) + " entries.") except Exception as ex: print(str(ex)) return False
def alternateCollectionProc(self): if self.IsOnDemand(): Log.getLogger().error("On Demand Collector called with alternateCollectionProc") if self.NeedsCollecting(): startCollectionTime = Time.GetCurrMS() buffer = self.PerformCollection() TimeToCollect = Time.GetCurrMS() - startCollectionTime #print(TimeToCollect) if None != buffer: buffer = "<?xml version=\"1.0\" encoding=\"utf-8\"?>" + buffer if not self._NamespaceObject.SendPacket(buffer): return 0 #self._NamespaceObject.CheckMTU(len(buffer),self._MinionID) if TimeToCollect > self._PollingInterval: # do a sanity check to see if collection time is longer than collector frequency Log.getLogger().warning("Collector: " + self.GetID() + " took longer to perform the actual collection than the specified frequency for it (" + str(self._PollingInterval) + ". It is suggested you change something to fix this.") return len(buffer) return 0
def __init__(self, ip=None, Port=None, ConnType=ConnectionType.Unknown, canTimeout=True): super(Target, self).__init__(ip, Port, ConnType) self.ConfigurationDefinedTarget = ip self.m_IP_InUse = None #m_ip could be DNS name self.m_socket = None self.m_lastHeartbeat = Time.GetCurrMS() self.m_PacketsSent = 0 self.m_BytestSent = 0 self.m_InitialRefreshSent = False self.m_objLockDict = threading.Lock() self.m_SendList = [] self.m_hasTimedOut = False self.m_LastDNSResolution = Time.GetCurrMS() self.m_DNSResolutionPeriod = 30000 #30 seconds self.m_CanTimeout = canTimeout self.threadName = None self.lastRefreshRequestID = 0 self.MarkedForRemoval = False try: self.m_socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) self.m_socket.setblocking(True) self.m_socket.settimeout(0.001) except Exception as _: Log.getLogger().error("Error setting up Target Socket -->" + str(super.m_Connection)) self.threadName = "Target:" + self.getIP() + "[" + str( self.getPort()) + "]" ThreadManager.GetThreadManager().CreateThread(self.threadName, self.WorkerProc) ThreadManager.GetThreadManager().StartThread(self.threadName)
def __AlternateCollectionMethodMultiThread(self, fnKillSignalled, startIndex): processedWithoutRestdummymakelooklikeother = 0 ThreadCount = -1 ProcessThreadCount = self.__CreateInitialCollectorThreadGroupings() AddActiveProcessingThreads(ProcessThreadCount) firstGroupID = None firstGroupCollectors = [] collectorCount = len(self._Collectors) if collectorCount < 1: Log.getLogger().error("No Collectors to process") return for processThreadID, collectorList in self.__ProcessThreadGroupings.items( ): if None == firstGroupID: firstGroupID = processThreadID firstGroupCollectors = collectorList else: ID = str(self) + processThreadID ThreadManager.GetThreadManager().CreateThread( ID, self.__SlicedThreadProc, processThreadID ) # create a worker thread and pass it a list of collectors to update ThreadManager.GetThreadManager().StartThread(ID) ThreadCount += 1 while not fnKillSignalled( ): # now go process the 1st group in this thread processed = self.__CollectSingleRange(fnKillSignalled, firstGroupID) if processed == 0: Sleep.SleepMs(Namespace.SleepIntervalIfNoDataCollected) if collectorCount != len( self._Collectors ): # dynamic collectos must have added some pass
def AddCollectorFromPlugin(self, collectorID, customNamespaceString=None): if self.CollectorExistsFromPlugin(collectorID): Log.getLogger().error( "User defined DynamicCollector tried to Add a collector with ID that already exists: " + collectorID) return False objCollector = self.__createCollector( self.__PrefixStr + collectorID + self.__SuffixStr + self.__specialSuffix(customNamespaceString), True) if objCollector == None: Log.getLogger().error( "Error creating collector using User defined DynamicCollector ID: " + collectorID) return False objCollector._OverrideID = self.__PrefixStr + collectorID + self.__SuffixStr if None != customNamespaceString: objCollector.SetOverrideNamespaceString(customNamespaceString) return True
def __CreateSendBuffer(self,value,elapsedtime,normalized): if None == value: #whoa, this should not happen Log.getLogger().error("Asked to send a non existant value. ID=" + self.GetID()) return None if len(str(value)) == 0: Log.getLogger().warn("Collector [" + self.GetID() +"] returned empty string. Dropping.") return None if None == self._NamespaceOverride: namespaceStr = str(self._NamespaceObject) else: namespaceStr = self._NamespaceOverride buffer = "" buffer = buffer + "<Minion Type=\"Data\">" buffer = buffer + "<Version>1</Version>" buffer = buffer + "<PacketNumber>" + str(self._NamespaceObject.getNextPacketNumber()) + "</PacketNumber>" buffer = buffer + "<Namespace>" + namespaceStr + "</Namespace>" buffer = buffer + "<ID>" + self.GetTransmitID() + "</ID>" buffer = buffer + "<Value>" + value + "</Value>" buffer = buffer + "<Normalized>" + str(normalized) + "</Normalized>" buffer = buffer + "<ElapsedTime>" + str(elapsedtime) + "</ElapsedTime>" buffer = buffer + "</Minion>" return buffer
def __ReadAutoConnectInfo(self, domDoc): nodeList = domDoc.getElementsByTagName("MarvinAutoConnect") if None != nodeList and len(nodeList) > 0: for node in nodeList: attributes = node.attributes if "Key" in attributes: Key = Alias.Alias(attributes["Key"].nodeValue) else: Log.getLogger().error("No MarvinAutoConnect Key specified") return False hashGen = hashlib.md5(str.encode(Key)) HashOfKey = hashGen.hexdigest() if HashOfKey in self.__DynamicConnectMarvinMap.keys(): Log.getLogger().error( "Duplicate MarvinAutoConnect Keys: " + Key) return False self.__DynamicConnectMarvinMap[HashOfKey] = Key return True
def AddCollector(self,objCollector,beforeID=None): if objCollector.GetID().lower() in self._CollectorMap: Log.getLogger().error("Duplicate Collector found: " + objCollector.GetID()) return False #Collectors are in a MAP for fast retrieval self._CollectorMap[objCollector.GetID().lower()] = objCollector # Dynamic Collectors should be inserted right AFTER the # DynamicCollector collector, otherwise if appended to the end, # operators that use data from # a dynamic collector will be run using stale data if None == beforeID: self._Collectors.append(objCollector) else: InsertAfterInList(self._Collectors,beforeID, objCollector) if objCollector.GetProcessThreadID() in self.__ProcessThreadGroupings.keys(): InsertAfterInList(self.__ProcessThreadGroupings[objCollector.GetProcessThreadID()],beforeID, objCollector) else: Log.getLogger().error("Not supposed to end up here!") return True
def Copy_Namespace(self, origName, newName): namespaces = self.getMatchingNamespacesNameList(origName) copiedCount = 0 for namespace in namespaces: newNamespaceName = HandleWildcardUpdate(namespace, newName) if newNamespaceName in self._namespaceMap[namespace]: Log.getLogger().error( "Cannot copy namespace {} to {} - it already exists". format(namespace, newNamespaceName)) else: copiedCount += 1 self._namespaceMap[newNamespaceName] = copy.deepcopy( self._namespaceMap[namespace]) for entry in self._namespaceMap[newNamespaceName]: if isinstance(entry, MarvinGroupData.MarvinDataGroup): for subEntry in entry._DataList: subEntry.Namespace = newNamespaceName else: entry.Namespace = newNamespaceName return copiedCount
def HandleBullhornAnnouncement(self,node,rawData,fromAddr): #<?xml version="1.0" encoding="utf-8"?> #<Marvin Type="Bullhorn"> # <Version>1.0</Version> # <UniqueID>3236</UniqueID> # <Hostname>pgkutch.beervana.net</Hostname> # <Key>md5 hash</Key> # <Port>5000</Port> #</Marvin> try: version = node.getElementsByTagName('Version')[0].firstChild.nodeValue Hash = node.getElementsByTagName('Key')[0].firstChild.nodeValue Port = node.getElementsByTagName('Port')[0].firstChild.nodeValue UniqueID = node.getElementsByTagName('UniqueID')[0].firstChild.nodeValue IP = fromAddr[0].lower() Hostname = node.getElementsByTagName('Hostname')[0].firstChild.nodeValue except Exception as _: Statistics.GetStatistics().OnMalformedPacketReceived("Received invalid Marvin Bullhorn Packet : " + rawData) return RemoteKey = Configuration.get().GetMarvinAutoConnectKeyFromHash(Hash) strID = Hostname + ":[" + IP + ":" + Port +"]" if None == RemoteKey: #don't have anything configured that matches Log.getLogger().warning("Received Marvin Dynamic Connection Message, with no corropsonding Key from: " + strID) return strID += " Key=" + RemoteKey HashMapKey = IP + ":" + str(Port) objExisting = TargetManager.GetTargetManager().GetDownstreamTarget(HashMapKey) if None == objExisting: # could be NDS name not resolved, so try by IP address objExisting = TargetManager.GetTargetManager().GetDownstreamTargetEx(IP,Port) if None != objExisting: if hasattr(objExisting,'_ReceivedOnUniqueID') and UniqueID != objExisting._ReceivedOnUniqueID: Log.getLogger().warning("Received Marvin Dynamic Connection Message, for already active connection: " + strID) else: pass # is simply the additional packets (marvin sends multiples as it is UDP traffic) return # doesn't already exist, so let's to add! if "1.0" == version: objTarget = Target.Target(IP,Port,ConnectionType.DynamicMarvin,True) else: objTarget = Target.Target(IP,Port,ConnectionType.DynamicOscar,True) objTarget._ReceivedOnUniqueID = UniqueID # so we can filter out dups due to UDP objTarget._UserKey = RemoteKey TargetManager.GetTargetManager().AddDownstreamTarget(objTarget,HashMapKey) Log.getLogger().info("Creating Dynamic Connection:" + strID) return
def __BoundData(self, sendValue): # self._Bound_Max=None # self._Bound_Min=None # self._Bound_Action=BoundAction.Invalid if None == self._Bound_Max and None == self._Bound_Min: return sendValue try: value = float(sendValue) except Exception: Log.getLogger().warning( "Collector [" + self.GetID() + "] tried to perform data bounding, but data is not numeric.") return sendValue if None != self._Bound_Min and value < self._Bound_Min: if self._Bound_Action == BoundAction.Set: returnVal = self._Bound_Min elif self._Bound_Action == BoundAction.RepeatLast: returnVal = self._LastSentValue else: returnVal = None elif None != self._Bound_Max and value > self._Bound_Max: if self._Bound_Action == BoundAction.Set: returnVal = self._Bound_Max elif self._Bound_Action == BoundAction.RepeatLast: returnVal = self._LastSentValue else: returnVal = None else: returnVal = sendValue returnVal = self.__AssignPrecisionAndScale(returnVal, True) return returnVal
def HandleIncomingWatchdogPacket(self, node, rawData, fromAddress): #<?xml version="1.0" encoding="utf-8"?> #<Oscar Type=?WatchdogTimer"> # <Version>1.0</Version> # <Port>5000</Port> #</Oscar> Statistics.GetStatistics().OnPacketReceivedFromDownstream(rawData) try: version = node.getElementsByTagName( 'Version')[0].firstChild.nodeValue IP = fromAddress[0].lower() Port = node.getElementsByTagName('Port')[0].firstChild.nodeValue except Exception as Ex: Statistics.GetStatistics().OnMalformedPacketReceived( "Received invalid Oscar WatchdogTimer Packet : " + rawData) return Key = IP + ":" + Port objTarget = TargetManager.GetTargetManager().GetDownstreamTarget( Key) # Chained Oscar if None == objTarget: objTarget = TargetManager.GetTargetManager().GetDownstreamTargetEx( IP, Port) # Chained Oscar, used resolved IP if None == objTarget: Log.getLogger().warning( "Received Oscar Watchdog for unknown downstream Target: ", IP + ":" + Port) return if objTarget.getType( ) != ConnectionType.DownstreamOscar: # would not know what this is until you hear back objTarget.Type = ConnectionType.DownstreamOscar objTarget.StrokeWatchdogTimer()
def BoundID(self, namespace, node): if not "ID" in node.attributes: Log.getLogger().error( "Invalid <Namespace> - Bound - no ID specified.") raise pickle.UnpicklingError() id = node.attributes["ID"].nodeValue # if not self.existsID(namespace,idLow): # Log.getLogger().error("Invalid <Namespace> - Bound - ID: " + node.attributes["ID"].nodeValue + " does not exist.") # raise pickle.UnpicklingError() max = None min = None if "Max" in node.attributes: max = node.attributes['Max'].nodeValue if "Min" in node.attributes: min = node.attributes['Min'].nodeValue boundCount = 0 for entryObj in self._namespaceMap[namespace]: if isinstance(entryObj, MarvinGroupData.MarvinDataGroup): for entry in entryObj._DataList: if Matches(entryObj.ID, id): if BoundValue(entry, min, max): boundCount += 1 elif Matches(entryObj.ID, id): oldVAl = entryObj.Value if BoundValue(entryObj, min, max): boundCount += 1 Log.getLogger().info("Bound {} entries".format(boundCount)) return boundCount