def process_event_series(self,times,eventStrings): """ process one event series we update the self.state according to the current events Args: times[ndarray] the epochs eventStrings: [list of strings] of the events, which can be converted per map """ newAnnotations = [] for index in range(len(times)): evStr = eventStrings[index] tim = times[index] if evStr not in self.startEvents and evStr not in self.endEvents: self.logger.debug(f"event {evStr} unknown, ignore") if evStr in self.startEvents: # this is a start of a new event tag = self.startEvents[evStr] self.logger.debug(f" {evStr} is start of {tag}") if tag in self.openAnnos: # we are in this state already, and get a repeated start event, this is an error # so we submit an anomaly annotation anno = self.openAnnos[tag].copy() # take the annotation away from the open list del self.openAnnos[tag] anno["endTime"] = dates.epochToIsoString(tim, zone='Europe/Berlin') anno["tags"] = ["anomaly", tag] newAnnotations.append(anno) # put the anomaly node self.openAnnos[tag]={ "type": "time", "endTime": dates.now_iso(), "startTime": dates.epochToIsoString(tim, zone='Europe/Berlin'), "tags":[tag,"open"] } # not an else if, because it can be in both start and end events if evStr in self.endEvents: tag = self.endEvents[evStr] self.logger.debug(f" {evStr} is end of {tag}") # this is an end event, see if we have a matching running state if tag in self.openAnnos: anno = self.openAnnos[tag].copy() del self.openAnnos[tag] anno["endTime"] = dates.epochToIsoString(tim, zone='Europe/Berlin') anno["tags"]=[tag] newAnnotations.append(anno) else: self.logger.warning(f"end event without start {tim} {evStr} ") if self.stateNode: self.stateNode.set_value(list(self.openAnnos.keys())) return newAnnotations
def alarm_clock(functionNode): # this function never ends until we get the signal "stop" # it works on an own thread signalNode = functionNode.get_child("control").get_child("signal") logger = functionNode.get_logger() functionNode.get_child("isRunning").set_value(True) while True: if signalNode.get_value() == "stop": signalNode.set_value("nosignal") break timeout = dates.date2secs( functionNode.get_child("nextTimeout").get_value()) now = time.time() if now > timeout: #set the next timeout period = functionNode.get_child("periodSeconds").get_value() timeout = timeout + period functionNode.get_child("nextTimeout").set_value( dates.epochToIsoString(timeout)) for node in functionNode.get_child("target").get_leaves(): logger.debug(f"alarm_clock executes", node.get_name()) node.execute() counterNode = functionNode.get_child("executionCounter") counterNode.set_value(counterNode.get_value() + 1) time.sleep( 5) #only sleep short to be able to break the loop with the signal functionNode.get_child("isRunning").set_value(False) return True
def thread_func(self): while self.running: status = self.objectNode.get_child("isRunning").get_value() if not status: self.objectNode.get_child("isRunning").set_value(True) timeout = dates.date2secs( self.objectNode.get_child("nextTimeout").get_value()) now = time.time() if now > timeout: # set the next timeout period = self.objectNode.get_child("periodSeconds").get_value() timeout = timeout + period self.objectNode.get_child("nextTimeout").set_value( dates.epochToIsoString(timeout)) for node in self.objectNode.get_child("target").get_leaves(): self.logger.debug(f"AutoTimerClass executes", node.get_name()) node.execute() counterNode = self.objectNode.get_child("executionCounter") counterNode.set_value(counterNode.get_value() + 1) time.sleep( 5 ) # only sleep short to be able to break the loop with the signal self.objectNode.get_child("isRunning").set_value(False)
def find_first_timeout(self): nextTimeOut = dates.date2secs( self.objectNode.get_child("nextTimeout").get_value()) now = dates.date2secs(dates.now_iso()) period = self.objectNode.get_child("periodSeconds").get_value() while nextTimeOut < now: nextTimeOut = nextTimeOut + period self.objectNode.get_child("nextTimeout").set_value( dates.epochToIsoString(nextTimeOut)) status = self.objectNode.get_child("isRunning").set_value(False)
def __generate_alarm(self, name, values, times): alarmTime = dates.epochToIsoString(times[0], zone='Europe/Berlin') messagetemplate = { "name": None, "type": "alarm", "children": [{ "name": "text", "type": "const", "value": f"Variable {name} out of threshold" }, { "name": "level", "type": "const", "value": "automatic" }, { "name": "confirmed", "type": "const", "value": "unconfirmed", "enumValues": ["unconfirmed", "critical", "continue", "accepted"] }, { "name": "startTime", "type": "const", "value": alarmTime }, { "name": "endTime", "type": "const", "value": None }, { "name": "confirmTime", "type": "const", "value": None }, { "name": "mustEscalate", "type": "const", "value": True }, { "name": "summary", "type": "const", "value": f"21data alarm: Variable {name} out of threshold ({values[numpy.isfinite(values)]}) at {alarmTime}" }] } path = self.alarmFolder.get_browse_path( ) + ".thresholdAlarm_" + getRandomId() self.model.create_template_from_path(path, messagetemplate) return
def jump(functionNode): widget = functionNode.get_parent().get_child("StumpyMASS").get_child("widget").get_target() widgetStartTime = dates.date2secs(widget.get_child("startTime").get_value()) widgetEndTime = dates.date2secs(widget.get_child("endTime").get_value()) #now get the user selection, it will be the index of the results list matchIndex=int(functionNode.get_child("match").get_value()) if matchIndex==-1: motif = functionNode.get_parent().get_child("StumpyMASS").get_child("motif").get_target() match = {} match["epochStart"] = dates.date2secs(motif.get_child("startTime").get_value()) match["epochEnd"] = dates.date2secs(motif.get_child("endTime").get_value()) else: results = functionNode.get_parent().get_child("StumpyMASS").get_child("results").get_value() match = results[matchIndex] middle = match["epochStart"]+(match["epochEnd"]-match["epochStart"])/2 newStart = middle - (widgetEndTime-widgetStartTime)/2 newEnd = middle + (widgetEndTime - widgetStartTime) / 2 widget.get_child("startTime").set_value(dates.epochToIsoString(newStart)) widget.get_child("endTime").set_value(dates.epochToIsoString(newEnd)) return True
def jump(functionNode): widget = functionNode.get_parent().get_child("EnvelopeMiner").get_child( "widget").get_target() widgetStartTime = dates.date2secs( widget.get_child("startTime").get_value()) widgetEndTime = dates.date2secs(widget.get_child("endTime").get_value()) #now get the user selection, it will be the index of the results list matchIndex = int(functionNode.get_child("match").get_value()) if matchIndex == -1: motif = functionNode.get_parent().get_child("EnvelopeMiner").get_child( "motif").get_target() match = {} match["epochStart"] = dates.date2secs( motif.get_child("startTime").get_value()) match["epochEnd"] = dates.date2secs( motif.get_child("endTime").get_value()) #update(functionNode) # re-write the band: ! the match will not 100% align in time with the motif: this is because we are searching for motif only with a given step else: results = functionNode.get_parent().get_child( "EnvelopeMiner").get_child("results").get_value() match = results[matchIndex] #update(functionNode,startTime=match["epochStart"]) middle = match["epochStart"] + (match["epochEnd"] - match["epochStart"]) / 2 newStart = middle - (widgetEndTime - widgetStartTime) / 2 newEnd = middle + (widgetEndTime - widgetStartTime) / 2 widget.get_child("startTime").set_value(dates.epochToIsoString(newStart)) widget.get_child("endTime").set_value(dates.epochToIsoString(newEnd)) if matchIndex == -1: update(functionNode) else: update(functionNode, startTime=match["epochStart"], offset=match["offset"]) return True
def __generate_alarm(self,lastTime): try: alarmTime = dates.epochToIsoString(time.time(),zone='Europe/Berlin') lastTime = dates.epochToIsoString(lastTime,zone='Europe/Berlin') messagetemplate = { "name":None,"type":"alarm","children":[ {"name": "text","type":"const","value":f"Stream data missing since {lastTime}"}, {"name": "level", "type": "const", "value":"automatic"}, {"name": "confirmed", "type": "const", "value": "unconfirmed","enumValues":["unconfirmed","critical","continue","accepted"]}, {"name": "startTime", "type": "const", "value": alarmTime}, {"name": "endTime", "type": "const", "value": None}, {"name": "confirmTime", "type": "const", "value": None}, {"name": "mustEscalate", "type": "const", "value":True}, {"name": "summary","type":"const","value":f"21data alarm: Stream data missing since {lastTime}"} ] } path = self.alarmFolder.get_browse_path()+".StreamDataAlarm_"+getRandomId() self.model.create_template_from_path(path,messagetemplate) except: self.model.log_error() return
def __generate_alarm(self, name, values, times): messagetemplate = { "name": None, "type": "alarm", "children": [{ "name": "text", "type": "const", "value": f"Variable {name} out of threshold" }, { "name": "level", "type": "const", "value": "automatic" }, { "name": "confirmed", "type": "const", "value": "unconfirmed", "enumValues": ["unconfirmed", "critical", "continue", "accepted"] }, { "name": "startTime", "type": "const", "value": dates.epochToIsoString(times[0]) }, { "name": "endTime", "type": "const", "value": None }, { "name": "confirmTime", "type": "const", "value": None }, { "name": "mustEscalate", "type": "const", "value": True }] } path = self.alarmFolder.get_browse_path( ) + ".thresholdAlarm_" + getRandomId() self.model.create_template_from_path(path, messagetemplate) return
def envelope_miner(functionNode): logger = functionNode.get_logger() signal = functionNode.get_child("control.signal") logger.info("==>>>> in envelope_miner " + functionNode.get_browse_path()) progressNode = functionNode.get_child("control").get_child("progress") functionNode.get_child("results").set_value([]) progressNode.set_value(0) signal.set_value(None) # also make sure we are in the right jump # when we have jumped to a different result, the envelope time series are set to different values # (the ones from the according match) # but now we want to use the envelope time series for the mining so we need to set it back to the # motif time with the chosen parameters, we do that with the help of the update function update(functionNode) motif = functionNode.get_child("motif").get_target() variable = motif.get_child("variable").get_target() holeSize = functionNode.get_child("holeSize").get_value() ts = variable.get_time_series() samplePeriod = motif.get_child("envelope.samplingPeriod").get_value() samplePointsPerWindow = motif.get_child( "envelope.numberSamples").get_value() stepSizePercent = motif.get_child( "envelope.step").get_value() # in percent if stepSizePercent == 0: stepSize = 1 # choose one point for 0% settings else: stepSize = int(float(samplePointsPerWindow) * float(stepSizePercent)) if stepSize < 1: stepSize = 1 # at least one point advances per step samplePointsPerWindow = motif.get_child( "envelope.numberSamples").get_value() windowMaker = streaming.Windowing( samplePeriod=samplePeriod, stepSize=stepSize, maxHoleSize=holeSize, samplePointsPerWindow=samplePointsPerWindow) numberOfWindows = (ts["__time"][-1] - ts["__time"][0]) / samplePeriod / stepSize #approx windowTime = samplePointsPerWindow * samplePeriod logger.debug( f"producing {numberOfWindows} windows, point per window ={samplePointsPerWindow}, stepsize {stepSizePercent*100}% => {stepSize} pt, sample Period {samplePeriod}" ) windowMaker.insert(ts["__time"], ts["values"]) #old #upper = motif.get_child("envelope."+variable.get_name()+"_limitMax").get_time_series()["values"] #lower = motif.get_child("envelope."+variable.get_name()+"_limitMin").get_time_series()["values"] #expected = motif.get_child("envelope."+variable.get_name()+"_expected").get_time_series()["values"] # get the motif data with times motifStart = dates.date2secs(motif.get_child("startTime").get_value()) motifEnd = dates.date2secs(motif.get_child("endTime").get_value()) upper = motif.get_child("envelope." + variable.get_name() + "_limitMax").get_time_series(motifStart, motifEnd) lower = motif.get_child("envelope." + variable.get_name() + "_limitMin").get_time_series(motifStart, motifEnd) expected = motif.get_child("envelope." + variable.get_name() + "_expected").get_time_series( motifStart, motifEnd) if functionNode.get_child("maxNumberOfMatches"): maxMatches = functionNode.get_child("maxNumberOfMatches").get_value() else: maxMatches = None matches = [] i = 0 last = 0 for w in windowMaker.iterate(): # now we have the window w =[t,v] which is of correct length and resampled, let's compare it # to the motif # first the offset offset = w[1][0] - expected["values"][0] x = w[1] - offset below = upper["values"] - x above = x - lower["values"] diff = numpy.sum(numpy.power(x - expected["values"], 2)) if numpy.all(below > 0) and numpy.all(above > 0): logger.debug( f"match @ {w[1][0]}, iteration: {float(i)/float(numberOfWindows)}" ) matches.append({ "startTime": dates.epochToIsoString(w[0][0], 'Europe/Berlin'), "endTime": dates.epochToIsoString(w[0][0] + windowTime, 'Europe/Berlin'), "match": diff, "epochStart": w[0][0], "epochEnd": w[0][0] + windowTime, "offset": offset, "format": my_date_format(w[0][0]) + "  (match=%2.3f)" % diff #"below":list(numpy.copy(below)), #"above":list(numpy.copy(above)), #"x":list(numpy.copy(x)), #"w":list(numpy.copy(w[1])), #"upper":list(numpy.copy(upper["values"])), #"lower":list(numpy.copy(lower["values"])), #"expected":list(numpy.copy(expected["values"])) }) if maxMatches and len(matches) == maxMatches: break i = i + 1 progress = round(float(i) / numberOfWindows * 20) #only 5% units on the progress bar if progress != last: progressNode.set_value(float(i) / numberOfWindows) last = progress if signal.get_value() == "stop": break #remove trivial matches inside half of the window len, we just take the best inside that area cleanMatches = [] tNow = 0 for m in matches: dif = m["epochStart"] - tNow if dif < (windowTime / 2): #check if this is a better match if m["match"] < cleanMatches[-1]["match"]: #exchange it to the better match cleanMatches[-1] = m continue else: cleanMatches.append(m) tNow = m["epochStart"] #now sort the matches by match value and rescale them if cleanMatches == []: functionNode.get_child("results").set_value([]) else: matchlist = numpy.asarray([m["match"] for m in cleanMatches]) scaleMax = numpy.max(matchlist) scaleMin = numpy.min(matchlist) matchlist = (matchlist - scaleMin) / (scaleMax - scaleMin) * 100 sortIndices = numpy.argsort([m["match"] for m in cleanMatches]) sortMatches = [] for idx in sortIndices: m = cleanMatches[idx] m["format"] = my_date_format( m["epochStart"]) + "  (distance=%.3g)" % matchlist[idx] sortMatches.append(cleanMatches[idx]) #now create the annotations and notify them in one event if functionNode.get_child( "createAnnotations") and functionNode.get_child( "createAnnotations").get_value(): myModel = functionNode.get_model() myModel.disable_observers() annoFolder = functionNode.get_child("annotations") if maxMatches: _create_annos_from_matches(annoFolder, sortMatches, maxMatches=maxMatches) myModel.enable_observers() if maxMatches != 0: myModel.notify_observers(annoFolder.get_id(), "children") if maxMatches != 0: display_matches(functionNode, True) # turn on the annotations functionNode.get_child("results").set_value(sortMatches) progressNode.set_value(1) return True
def my_date_format(epoch): dat = dates.epochToIsoString(epoch, zone='Europe/Berlin') my = dat[0:10] + "  " + dat[11:19] return my
def events_to_annotations(functionNode): """ this function creates annotations from events """ logger = functionNode.get_logger() logger.info("==>>>> events_to_annotations "+functionNode.get_browse_path()) progressNode = functionNode.get_child("control").get_child("progress") newAnnosNode = functionNode.get_child("annotations") differential = functionNode.get_child("differential").get_value() valueNotifyNodeIds = [] #this variable will hold node id to post a value notification (the nodes that were open before) interleaved = functionNode.get_child("interleaved") if interleaved: #if the node is there interleaved = interleaved.get_value() if differential: processedUntil = dates.date2secs(functionNode.get_child("processedUntil").get_value(),ignoreError=False) if not processedUntil: processedUntil = 0 else: processedUntil = 0 lastTimeSeen = processedUntil # initialize, this variable with find the latest time entry # eventSelection is a dict to translate and select the events from the event series # eg {"machine1.init":"Preparation", "machine1.op2":"Printing"} # we will select the "machine1.init and the "macine1.op2" events and name the annotation type as Preparation, Printing eventSelection = functionNode.get_child("eventSelection").get_value() progressNode.set_value(0) m = functionNode.get_model() startEvents = {v[0]:k for k,v in eventSelection.items()} # always the first event endEvents = {v[1]:k for k, v in eventSelection.items()} # always the second event evs = functionNode.get_child("eventSeries").get_targets() # a list of nodes where the events are located if not differential: #delete all old annos annos = newAnnosNode.get_children() if annos: try: m.disable_observers() for anno in annos: anno.delete() finally: m.enable_observers() m.notify_observers(newAnnosNode.get_id(), "children") openAnnos = [] # no open annotatations per definition else: #differential process: we don't delete the old but get all open annotations openAnnos = [] annos = newAnnosNode.get_children() for anno in annos: if "open" in anno.get_child("tags").get_value(): openAnnotation = { "tags": anno.get_child("tags").get_value(), #using "tag" to note that an open annotation has only one tag "startTime": anno.get_child("startTime").get_value(), "node": anno } openAnnos.append(openAnnotation) #now collect all events filtered by the selection of events and the time (if differential process) #we build up a list of dicts with ["event"."hello", "startTime":1234494., "endTime":2345346.: filter = list(startEvents.keys())+list(endEvents.keys()) #from now on we use these lists newAnnotations = [] # the annotations to be created new, a list of dicts, it will also contain "updates" for existing nodes # those update entries will be found by the key "node" which holds the node that has to be updated #openAnnos : a list of # {"key":"litu":{"startTime":132412334,"node"} # will also hold the node if we have one already #put the open annotations in a fast look up table if len(openAnnos) > 1 and not interleaved: logger.error("there should be only one open annotation at max in non-interleaved mode") #now iterate over all events for ev in evs: data = ev.get_event_series(eventFilter = filter) #["__time":... "eventStrings"....] times = data["__time"] eventStrings = data["eventStrings"] if differential: #also filter by time new = times > processedUntil times = times[new] indexableEventStrings = numpy.asarray(eventStrings,dtype=numpy.str) eventStrings = indexableEventStrings[new] for index in range(len(times)): evStr = eventStrings[index] tim = times[index] if tim>lastTimeSeen: lastTimeSeen = tim print(f"ev:{evStr}, tag:{tag}, open Annos {openAnnos}") if evStr in startEvents: tag = startEvents[evStr] #this is a start of a new event print("is start") if openAnnos: # we have at least on e annotation running already: # in non-interleaved wo submitt all open annotations as anomaly # in interleaved mode we only submit those with the same event as anomaly # if the "open" entry was from an existing annotation in the tree, we also put that # node handle to later use if for updating the annotation and not creating it new newOpenAnnos = [] for openAnnotation in openAnnos: if not interleaved or tag in openAnnotation["tags"]: # we must close this open annotation as anomaly # build the anomaly annotations anno = { "type": "time", "endTime": dates.epochToIsoString(tim, zone='Europe/Berlin'), "startTime": openAnnotation["startTime"], "tags": ["anomaly",tag] } if "node" in openAnnotation: anno["node"] = openAnnotation["node"] newAnnotations.append(anno) # put the anomaly node there else: #keep this annotation newOpenAnnos.append(openAnnotation) openAnnos = newOpenAnnos #now remember the current as the open one openAnnotation = { "startTime":dates.epochToIsoString(tim, zone='Europe/Berlin'), "tags":[tag] } openAnnos.append(openAnnotation) #not an else if, because it can be in both start and end events if evStr in endEvents: print("is end") #this is an end event, see if we have a matching open annotation tag = endEvents[evStr] newOpenAnnos = [] for openAnnotation in openAnnos: if tag in openAnnotation["tags"]: #take this annotation, we can close it anno = { "type": "time", "endTime": dates.epochToIsoString(tim, zone='Europe/Berlin'), "startTime": openAnnotation["startTime"], "tags": [tag] } if "node" in openAnnotation: # if it was an existing annotation, we also put the anno handle to make an "update" # instead of a new creation further down anno["node"]=openAnnotation["node"] newAnnotations.append(anno) else: newOpenAnnos.append(openAnnotation)#keep this one if len(newOpenAnnos) == len(openAnnos): logger.warning(f"annotation creation ende without start {tim} {evStr}") openAnnos = newOpenAnnos #print(f"open annotations {openAnnos} new annos {newAnnotations}") #now create the annotations logger.debug(f"creating {len(newAnnotations)} annotation, have {openAnnos} open annotations") #print(f"creating {len(newAnnotations)} annotation, have {len(openAnnos)} open annotations") m.disable_observers() try: if differential: # in the open annotations list, we will find # - open annotations that have existed before and have not found an update yet and were not closed # - new open annotations that have started now for openAnnotation in openAnnos: nowIso = dates.epochToIsoString(time.time(),zone='Europe/Berlin') if "node" in openAnnotation: # this existed before, we just update the endtime node = openAnnotation["node"] node.get_child("endTime").set_value(nowIso) # set the current time as end time valueNotifyNodeIds.append(node.get_child("endTime").get_id()) else: #this is a new open anno: entry = { "type": "time", "endTime": nowIso, "startTime": openAnnotation["startTime"], "tags": openAnnotation["tags"]+["open"] } newAnnotations.append(entry) # put it to the creation list #create and update new annotations for anno in newAnnotations: if not "node" in anno: newAnno = newAnnosNode.create_child(type="annotation") for k, v in anno.items(): newAnno.create_child(properties={"name": k, "value": v, "type": "const"}) else: #this is an update, typically a "close" or an extension of an open annotation node = anno["node"] node.get_child("endTime").set_value(anno["endTime"]) node.get_child("tags").set_value(anno["tags"]) valueNotifyNodeIds.append(node.get_child("endTime").get_id()) except Exception as ex: logger.error(f"error in events_to_annotations {ex}") m.enable_observers() m.notify_observers(newAnnosNode.get_id(), "children") #also notify the value changes of annotations that existed before if valueNotifyNodeIds: m.notify_observers(valueNotifyNodeIds,"value")# also notify the value change for the UI to adjust the annotation #if time.time()>lastTimeSeen: # lastTimeSeen=time.time() isoDate = dates.epochToIsoString(lastTimeSeen,zone="Europe/Berlin") functionNode.get_child("processedUntil").set_value(isoDate) # this is for me, next time I can check what has been processed return True
def feed(self,blob): """ expected format of blob {"type":"eventseries", "data":{ "__time":[11,12,13,14], "23488923400":["p1.start","p2.start",...] } } #the blob will have only one event variable """ notification = {"new": {}, "delete": {},"modify": {}} # keep the info which ids have changed and notify them at the end self.logger.debug("Events2StateClass.feed()") #we look for eventseries to switch the state: if blob["type"] == "eventseries": times = blob["data"]["__time"] evSeries = [v for k,v in blob["data"].items() if k!="__time"][0] # we take the first entry that is not __time, we expect there to be only ONE! newAnnos = self.process_event_series(times, evSeries) self.model.disable_observers() try: for anno in newAnnos: if "node" in anno: #this is an update of an open existing annotation existingAnno = anno["node"] existingAnno.get_child("endTime").set_value(anno["endTime"]) existingAnno.get_child("tags").set_value(anno["tags"]) #build a special info dictionary for the receiver of this event notification["modify"][existingAnno.get_id()]=self.__build_info(existingAnno) else: #this is a new annotation newAnno = self.newAnnosNode.create_child(type="annotation") for k, v in anno.items(): newAnno.create_child(properties={"name": k, "value": v, "type": "const"}) notification["new"][newAnno.get_id()]=self.__build_info(newAnno) #now process the open annotations for tag,anno in self.openAnnos.items(): if "node" in anno: #update of exisiting existingAnno = anno["node"] existingAnno.get_child("endTime").set_value(anno["endTime"]) notification["modify"][existingAnno.get_id()] = self.__build_info(existingAnno) else: newAnno = self.newAnnosNode.create_child(type="annotation") for k, v in anno.items(): newAnno.create_child(properties={"name": k, "value": v, "type": "const"}) self.openAnnos[tag]["node"]=newAnno # remember open annotation notification["new"][newAnno.get_id()] = self.__build_info(newAnno) finally: self.model.enable_observers() elif blob["type"] == "timeseries": """ # we need to add the state to the timeseries in the form { "type": "timeseries", "data": { "__time": [120, 130, 140, 150, 160, 170, ....] "var1": [20, 30, 40, 50, 60, 70, ......] // "var2":[2, 3, 4, 5, 6, ....] "__states": { "euv": [True, False, True, .....] "evacuating": [False, False, False, ....] } } } """ self.model.disable_observers() try: times = blob["data"]["__time"] addStates = {} length = len(times) for tag,anno in self.openAnnos.items(): if "node" in anno: #update the endTime anno["node"].get_child("endTime").set_value(dates.epochToIsoString(times[-1]))#take the last time point of the data as the end of the annotation dates.now_iso()) notification["modify"][anno["node"].get_id()] = self.__build_info(anno["node"]) addStates[tag]=numpy.full(length,True) blob["data"]["__states"]=addStates finally: self.model.enable_observers() #now notify the changes to annotations if there are any if any([v for k,v in notification.items()]):#if any change is marked in the notifications self.model.notify_observers(self.newAnnosNode.get_id(),"children",eventInfo=notification ) return blob
def minerMass(functionNode): logger = functionNode.get_logger() logger.info("==>>>> in stumpy mass split miner " + functionNode.get_browse_path()) progressNode = functionNode.get_child("control").get_child("progress") progressNode.set_value(0) signal = functionNode.get_child("control.signal") signal.set_value(None) functionNode.get_child("results").set_value([]) motifNode = functionNode.get_child("motif").get_target() varNode = motifNode.get_child("variable").get_target() startTime = motifNode.get_child("startTime").get_value() endTime = motifNode.get_child("endTime").get_value() actualMatches_before = 0 actualMatches_after = 0 if functionNode.get_child("maxNumberOfMatches"): maxMatches = functionNode.get_child("maxNumberOfMatches").get_value() else: maxMatches = None maxMatches_before = round(maxMatches /4) # roughly 25 % of the matches will be in the pattern before the motif maxMatches_after = round(maxMatches / 4 * 3) # the remaining matches are afterwards queryTimeSeries = varNode.get_time_series(start=startTime, end=endTime) fullTimeSeries = varNode.get_time_series() queryTimeSeriesTimes = queryTimeSeries['__time'] fullTimeSeriesTimes = fullTimeSeries['__time'] endLeftPartTs = (numpy.where(fullTimeSeriesTimes == queryTimeSeriesTimes[0]))[0][0] startRightPartTs = (numpy.where(fullTimeSeriesTimes == queryTimeSeriesTimes[len(queryTimeSeriesTimes) - 1]))[0][0] queryTimeSeriesValues = queryTimeSeries['values'] queryLength = queryTimeSeriesValues.size fullTimeSeriesValues = fullTimeSeries['values'] timeSeriesLeftValues = fullTimeSeriesValues[:endLeftPartTs] timeSeriesRightValues = fullTimeSeriesValues[startRightPartTs:] timeSeriesLeftTimes = fullTimeSeriesTimes[:endLeftPartTs] timeSeriesRightTimes = fullTimeSeriesTimes[startRightPartTs:] profile_before = stp.core.mass(queryTimeSeriesValues, timeSeriesLeftValues, normalize=True) profile_after = stp.core.mass(queryTimeSeriesValues, timeSeriesRightValues, normalize=True) maxValue_before = numpy.max(profile_before) profile_before = numpy.where(profile_before < 0.05, maxValue_before, profile_before) maxValue_after = numpy.max(profile_after) profile_after = numpy.where(profile_after < 0.05, maxValue_after, profile_after) # peaks_before, _ = scy.signal.find_peaks(-profile_before, distance=round(queryLength / 12), width = round(queryLength / 10), threshold=0.07) # peaks_after, _ = scy.signal.find_peaks(-profile_after, distance=round(queryLength / 12 ), width = round(queryLength / 10), threshold = 0.07) peaks_before, _ = scy.signal.find_peaks(-profile_before, distance=round(queryLength / 12), width = round(queryLength / 10)) peaks_after, _ = scy.signal.find_peaks(-profile_after, distance=round(queryLength / 12 ), width = round(queryLength / 10)) # profile (before / after) peaks --> the profile values (at peak positions) profile_before_peaks = profile_before[peaks_before] profile_after_peaks = profile_after[peaks_after] sorted_peaks_before = numpy.argsort(profile_before_peaks) sorted_peaks_after = numpy.argsort(profile_after_peaks) sorted_peaks_full_before = [] for idx_short in range(len(sorted_peaks_before)): sorted_peaks_full_before.append(peaks_before[sorted_peaks_before[idx_short]]) sorted_peaks_full_after = [] for idx_short in range(len(sorted_peaks_after)): sorted_peaks_full_after.append(peaks_after[sorted_peaks_after[idx_short]]) matches = [] actualMatches_before = len(sorted_peaks_before) actualMatches_after = len(sorted_peaks_after) matches_after = [] matches_before = [] last = 0 for j in range(min(maxMatches_after, actualMatches_after)): matches_after.append({ "startTime": dates.epochToIsoString((timeSeriesRightTimes)[sorted_peaks_full_after[j]]), "endTime": dates.epochToIsoString((timeSeriesRightTimes)[sorted_peaks_full_after[j] + queryLength]), "match": (profile_after[peaks_after])[sorted_peaks_after[j]], "epochStart": (timeSeriesRightTimes)[sorted_peaks_full_after[j]], "epochEnd": (timeSeriesRightTimes)[sorted_peaks_full_after[j] + queryLength], "offset": 0, "format": my_date_format( (timeSeriesRightTimes)[sorted_peaks_full_after[j]]) + "  (match=%2.3f)" % (profile_after[peaks_after])[sorted_peaks_after[j]] }) progress = round(float(j) / maxMatches_after * 15) if progress != last: progressNode.set_value(float(j) / maxMatches_after) last = progress if signal.get_value() == "stop": break for j in range(min(maxMatches_before, actualMatches_before)): matches_before.append({ "startTime": dates.epochToIsoString((timeSeriesLeftTimes)[sorted_peaks_full_before[j]]), "endTime": dates.epochToIsoString((timeSeriesLeftTimes)[sorted_peaks_full_before[j] + queryLength]), "match": (profile_before[peaks_before])[sorted_peaks_before[j]], "epochStart": (timeSeriesLeftTimes)[sorted_peaks_full_before[j]], "epochEnd": (timeSeriesLeftTimes)[sorted_peaks_full_before[j] + queryLength], "offset": 0, "format": my_date_format( (timeSeriesLeftTimes)[sorted_peaks_full_before[j]]) + "  (match=%2.3f)" % (profile_before[peaks_before])[sorted_peaks_before[j]] }) progress = round(float(j) / maxMatches_before * 15) if progress != last: progressNode.set_value(float(j) / maxMatches_before) last = progress if signal.get_value() == "stop": break idx_before = 0 idx_after = 0 while idx_before < len(matches_before) and idx_after < len(matches_after): if (matches_before[idx_before])['match'] < (matches_after[idx_after])['match']: matches.append(matches_before[idx_before]) idx_before = idx_before + 1 else: matches.append(matches_after[idx_after]) idx_after = idx_after + 1 while idx_after < len(matches_after): matches.append(matches_after[idx_after]) idx_after = idx_after + 1 while idx_before < len(matches_before): matches.append(matches_before[idx_before]) idx_before = idx_before +1 functionNode.get_child("results").set_value(matches) show_timeseries_results(functionNode) progressNode.set_value(1) return True
def process_event_series(self, times, eventStrings): """ process one event series we update the self.state according to the current events Args: times[ndarray] the epochs eventStrings: [list of strings] of the events, which can be converted per map """ newAnnotations = [] for index in range(len(times)): evStr = eventStrings[index] tim = times[index] #first check in the running states, if we have to do something for tag in list( self.openAnnos): #create a list() copy for iteration # this is a running state, check for restart, end or error if self.__match(evStr, self.eventSelection[tag]["start"]): # a restart event so we submit an anomaly annotation anno = self.openAnnos[tag].copy( ) # take the annotation away from the open list del self.openAnnos[ tag] # remove this from the running state list anno["endTime"] = dates.epochToIsoString( tim, zone='Europe/Berlin') anno["tags"] = ["anomaly", tag] newAnnotations.append(anno) # put the anomaly node elif self.__match(evStr, self.eventSelection[tag]["end"]): # close the annotation, this is a correct standard end event anno = self.openAnnos[tag].copy() del self.openAnnos[tag] anno["endTime"] = dates.epochToIsoString( tim, zone='Europe/Berlin') anno["tags"] = [tag] newAnnotations.append(anno) elif self.__match(evStr, self.eventSelection[tag]["ignore"]): continue else: # any other not allowed event so we submit an anomaly annotation anno = self.openAnnos[tag].copy( ) # take the annotation away from the open list del self.openAnnos[ tag] # remove this from the running state list anno["endTime"] = dates.epochToIsoString( tim, zone='Europe/Berlin') anno["tags"] = ["anomaly", tag] newAnnotations.append(anno) # put the anomaly node #now check if we start a new annotation with this event if evStr in self.startEvents: # this is a start of a new event, if it was a restart (start after start) then we have covered that before and created an anomaly tag = self.startEvents[evStr] self.logger.debug(f" {evStr} is start of {tag}") self.openAnnos[tag] = { "type": "time", "endTime": dates.now_iso(), "startTime": dates.epochToIsoString(tim, zone='Europe/Berlin'), "tags": [tag, "open"] } if self.stateNode: self.stateNode.set_value(list(self.openAnnos.keys())) return newAnnotations