def getQualityValues(self, jOb, name, avgQualityType): q = JSONObject() q.absoluteValues = self.qList.get(avgQualityType).absoluteBuffer.items q.ratedValues = self.qList.get(avgQualityType).ratedBuffer.items q.name = name q.unit = self.unit jOb.values.append(q)
def listwrapper(self): r = JOb() c = 0 r.wrappers = [] for w in self.rm.wrappers: sd = w.getSensorDescription() if isinstance(sd, list): for _sd in sd: r.wrappers.append(JOb()) r.wrappers[c].information = _sd.information r.wrappers[c].uuid = _sd.uuid r.wrappers[c].sensorName = _sd.sensorName r.wrappers[c].author = _sd.author r.wrappers[c].sensorID = _sd.sensorID r.wrappers[c].fullSensorID = _sd.fullSensorID r.wrappers[c].source = _sd.source r.wrappers[c].sensorType = _sd.sensorType r.wrappers[c].sourceFormat = _sd.sourceFormat r.wrappers[c].messagebus.routingKey = _sd.messagebus.routingKey c += 1 else: r.wrappers.append(JOb()) r.wrappers[c].information = sd.information r.wrappers[c].uuid = sd.uuid r.wrappers[c].sensorName = sd.sensorName r.wrappers[c].author = sd.author r.wrappers[c].sensorID = sd.sensorID r.wrappers[c].fullSensorID = sd.fullSensorID r.wrappers[c].source = sd.source r.wrappers[c].sensorType = sd.sensorType r.wrappers[c].sourceFormat = sd.sourceFormat r.wrappers[c].messagebus.routingKey = sd.messagebus.routingKey c += 1 return r.dumps()
def data_timeframe(self, uuid): """ Returns the time frame (start and end date) for which data from the stream, identified by the UUID, is available. :param uuid: :return: """ resp = JOb() resp.uuid = uuid try: if ThreadedTriplestoreAdapter.triplestore: sd = self.rm.getSensorDescriptionByUUID(uuid) if sd: data = ThreadedTriplestoreAdapter.triplestore.getStreamMinMaxDate(sd.graphName, formatSensorID(sd)) resp.status = "Ok" resp.message = "" resp.data = data else: raise Exception("no stream with given uuid known") else: resp.status = "Fail" resp.message = "Triplestore not activated." except Exception as e: resp.status = "Fail" resp.message = e.message return resp.dumps()
def deploy(self, f, autostart=False): """ :param f: :param autostart: :return: a tuple with 3 elements. 1. status as string, 2. error message as string, 3. list of uuids of added wrapper """ L.i("Deploying", f) sensordescriptions = [] try: zFile = zipfile.ZipFile(f) if "deploy.json" in zFile.namelist(): deployDescription = JOb(zFile.open("deploy.json", "r")) sys.path.insert(0, f) if deployDescription.isList(): for dd in deployDescription: module = __import__(dd.module) wrapper = getattr(module, dd["class"])() self.addWrapper(wrapper) sensordescriptions.append(wrapper.getSensorDescription()) if autostart: self.startWrapper(wrapper) else: module = __import__(deployDescription.module) wrapper = getattr(module, deployDescription["class"])() self.addWrapper(wrapper) sensordescriptions.append(wrapper.getSensorDescription()) if autostart: self.startWrapper(wrapper) return "OK", "", sensordescriptions except Exception as e: L.w("Deployment of wrapper", f, "failed.", e.message) return "Fail", e.message, []
def listeventwrapper(self): r = JOb() r.eventwrapper = [] eds = self.rm.getEventWrapperDescriptions() for ed in eds: r.eventwrapper.append(ed) return r.dumps()
def valueMapToJson(self, test, joblist, job=None, parent=None): if not parent: keys = [key for key in test if "." not in key] else: parentString = parent + "." keys = [key for key in test if parentString in key] keys = [key for key in keys if "." not in key.split(parentString)[1]] for key in keys: job2 = JSONObject() job2.name = key.split(".")[len(key.split("."))-1] job2.value = test[key] if job: if job.values: job.values.append(job2) else: job.values = [] job.values.append(job2) else: joblist.append(job2) test.pop(key) if len(test) > 0: self.valueMapToJson(test, joblist, job2, key)
def getAvgQualities(self, currentDate): avgQualities = JSONObject() for t in self.avgTypes: avgQualities[t] = JSONObject() avgQualities[t].values = [] avgQualities[t].startDate = str(self.getStartDate(currentDate, t)) for key in self.qoiMetrics: self.qoiMetrics[key].getAvgQualities(avgQualities, key) return avgQualities
def finish(cls): jobs = JSONObject() jobs.stats = [] for e in Stats.instances: job = JSONObject() job.name = e job.value = e.getvalue() jobs.stats.append(job) return jobs
def listwrapperfull(self): r = JOb() r.wrappers = [] for w in self.rm.wrappers: sd = w.getSensorDescription() if isinstance(sd, list): for _sd in sd: r.wrappers.append(_sd) else: r.wrappers.append(sd) return r.dumps()
def snapshot(self, uuid, start=None, end=None): """ Get previous observations :param uuid: The uuid of the wrapper :param start: The start date in the format %Y-%m-%dT%H:%M:%S :param end: The end date in the format %Y-%m-%dT%H:%M:%S :return: a JSON answer """ resp = JOb() resp.uuid = uuid try: from virtualisation.resourcemanagement.resourcemanagement import ResourceManagement if ResourceManagement.args.triplestore: sd = self.rm.getSensorDescriptionByUUID(uuid) if sd: data = ThreadedTriplestoreAdapter.triplestore.getObservationGraph(sd.graphName, formatSensorID(sd), start, end, False) resp.status = "Ok" resp.message = "" resp.data = data else: raise Exception("no stream with given uuid known") else: raise Exception("Triplestore not enabled in Resource Management") except Exception as e: resp.status = "Fail" resp.message = e.message return resp.dumps()
def getAllLastQualities(self): from virtualisation.wrapper.abstractwrapper import AbstractWrapper, AbstractComposedWrapper qualities = [] wrappers = self.rm.wrappers for wrapper in wrappers: if isinstance(wrapper, AbstractWrapper): if wrapper.qoiSystem.initialised: qualities.append(wrapper.qoiSystem.getLastQoI()) qualities[-1].uuid = wrapper.getSensorDescription().uuid else: resp = JOb() resp.uuid = wrapper.getSensorDescription().uuid resp.status = "Fail" resp.message = "Quality System not initialised for given uuid" qualities.append(resp) elif isinstance(wrapper, AbstractComposedWrapper): for aWrapper in wrapper.wrappers: if aWrapper.qoiSystem.initialised: qualities.append(aWrapper.qoiSystem.getLastQoI()) qualities[-1].uuid = aWrapper.getSensorDescription().uuid else: resp = JOb() resp.uuid = aWrapper.getSensorDescription().uuid resp.status = "Fail" resp.message = "Quality System not initialised for given uuid" qualities.append(resp) return JOb(qualities).dumps()
def update(self, data): self.updateDescription() # special case when no fields are in data # (fault recovery is not ready yet) if len(data.fields) == 0: self.rewardAndPunishment.update(False) self.absoluteValue = float("inf") self.ratedValue = self.rewardAndPunishment.value() return ts = self.repsys.timestamp self.updatecounter += 1 samplingTime = datetime.datetime.strptime(data[data.fields[0]].observationSamplingTime, AbstractClock.parserformat) age = (samplingTime - ts).total_seconds() # print "age:", age, "ts", ts, "sampling", samplingTime if self.lastUpdate == None: self.lastUpdate = ts self.rewardAndPunishment.update(True) self.absoluteValue = age self.ratedValue = 1.0 self.min = age self.mean = age else: # delta = ts - self.lastUpdate self.lastUpdate = ts if data.recovered: self.rewardAndPunishment.update(False) else: self.rewardAndPunishment.update(age <= self.annotatedAge) # delay = delta.days * 86400 + delta.seconds self.absoluteValue = age self.ratedValue = self.rewardAndPunishment.value() age = float(age) self.min = min(self.min, age) self.mean = ((self.updatecounter - 1) * self.mean) / self.updatecounter + age / self.updatecounter ageReturn = JSONObject() ageReturn.absoluteValue = self.absoluteValue ageReturn.ratedValue = self.ratedValue ageReturn.unit = self.unit return (self.name, ageReturn)
def getAvgQualities(self, jOb, name): for key in self.qList: q = JSONObject() q.absoluteValue = self.qList.get(key).getAbsoluteAverage() q.absoluteValueMin = self.qList.get(key).getAbsoluteMin() q.absoluteValueMax = self.qList.get(key).getAbsoluteMax() q.ratedValue = self.qList.get(key).getRatedAverage() q.ratedValueMin = self.qList.get(key).getRatedMin() q.ratedValueMax = self.qList.get(key).getRatedMax() q.name = name q.unit = self.unit jOb[key].values.append(q)
def snapshot_sql(self, uuid, start=None, end=None, format='json', last=False, fields=None, offset=0): if self.rm.sql: offset = int(offset) observations = self.rm.sql.get_observations(uuid, start, end, format, last, fields, offset) data = JOb() data.data = observations #TODO remove the 2 below data.next_url = "/api/snapshot_sql2?uuid=%s%s%s%s%s%s%s" % (uuid, "&start=" + start if start else "", "&end=" + end if end else "" , "&format=" + format, "&last=" + last if last else "", "&fields=" + fields if fields else "", "&offset=" + str(offset + self.rm.sql.PAGINATION_LIMIT)) if len(observations) == self.rm.sql.PAGINATION_LIMIT else "" return data.dumps() else: return "Error. SQL feature not enabled in Resource Management"
def aggregate(self, data, sensordescription): result = [] try: dftobjs = self.dftobjects[sensordescription.uuid] for f in dftobjs: g = dftobjs[f].control(data[f]) if g: r = JSONObject() r.graph = g r.sensorID = sensordescription.sensorID r.propertyType = sensordescription.field[f].propertyName r.category = sensordescription.sensorType result.append(r) return result except KeyError: Log.e("Dft aggregation failed") return None
def get_description(self, uuid): resp = JOb() sd = self.rm.getSensorDescriptionByUUID(uuid) if sd: resp.status = "Ok" resp.message = "" resp.data = sd else: ed = self.rm.getEventDescriptionByUUID(uuid) if ed: resp.status = "Ok" resp.message = "" resp.data = ed else: resp.status = "Fail" resp.message = "no stream with given uuid known" return resp.dumps()
def update(self, data): self.updateDescription() # special case when no fields are in data # (fault recovery is not ready yet) if len(data.fields) == 0: self.rewardAndPunishment.update(False) self.absoluteValue = float("inf") self.ratedValue = self.rewardAndPunishment.value() return ts = self.repsys.timestamp self.updatecounter += 1 latency = data.latency annotatedLatency = self.repsys.description.maxLatency if self.lastUpdate is None: self.lastUpdate = ts self.rewardAndPunishment.update(True) self.absoluteValue = latency self.ratedValue = 1.0 self.min = latency self.mean = latency else: self.lastUpdate = ts if data.recovered: self.rewardAndPunishment.update(False) else: self.rewardAndPunishment.update(annotatedLatency > latency) self.absoluteValue = latency self.ratedValue = self.rewardAndPunishment.value() self.min = min(self.min, latency) self.mean = ((self.updatecounter - 1) * self.mean) / self.updatecounter + latency / self.updatecounter lat = JSONObject() lat.absoluteValue = self.absoluteValue lat.ratedValue = self.ratedValue lat.unit = self.unit return (self.name, lat)
def makeRandomEvent(self): eventData = JSONObject() eventData.ceID = 123456 eventData.ceType = "Aarhus_Road_Traffic_Event" eventData.ceName = "traffic jam" eventData.ceTime = 1438591234000L eventData.ceCoordinate = "(56.12 10.13)" eventData.ceLevel = 1 eventData.dummy = True return eventData
def get_static_stream_data(self, uuid): resp = JOb() resp.uuid = uuid if uuid in self.static_stream_data_cache: resp.status = "Ok" resp.message = "" resp.data = self.static_stream_data_cache[uuid] else: resp.status = "Fail" resp.message = "No sensor stream with uuid " + uuid + " known." return resp.dumps()
def avg_processing_time(self, uuid = None, category = None): job = JSONObject() if uuid: uuid = uuid.split(",") for u in uuid: stats = Stats.getOrMake(u) job[u] = [] pTimes = stats.getAverageProcessingTimes() if pTimes: job[u].extend(pTimes) else: failJob = JSONObject() failJob.status = "Fail" failJob.message = "UUID not found" job[u] = failJob elif category: category = category.split(",") avgList = {} for c in category: uuidList = self.rm.getUUIDsForCategory(c) statList = [] valueMap={} for uuid in uuidList: stats = Stats.getOrMake(uuid) self.test(valueMap, stats.getAverageProcessingTimes()) statList += stats.getAverageProcessingTimes() for key, value in valueMap.items(): valueMap[key] = value / len(uuidList) avgList[c] = [] self.valueMapToJson(valueMap, avgList[c], None, None) job = JSONObject() for element in avgList: job[element] = [] pTimes = avgList[element] if pTimes: job[element].extend(pTimes) else: failJob = JSONObject() failJob.status = "Fail" failJob.message = "Category not found" job[element] = failJob else: statList = Stats.getAllStats() for s in statList: job[s.name] = [] job[s.name].extend(s.getAverageProcessingTimes()) return job.dumps()
def getValues(self, t, avg=None, minimum=None, maximum=None): q = JSONObject() if avg: q.absoluteAvg = self.qList.get(t).getAbsoluteAverage() q.ratedAvg = self.qList.get(t).getRatedAverage() if minimum: q.absoluteMin = self.qList.get(t).getAbsoluteMin() q.ratedMin = self.qList.get(t).getRatedMin() if maximum: q.absoluteMax = self.qList.get(t).getAbsoluteMax() q.ratedMax = self.qList.get(t).getRatedMax() return q
def data_timeframe_sql(self, uuid): """ Returns the time frame (start and end date) for which data from the stream, identified by the UUID, is available. :param uuid: :return: """ resp = JOb() resp.uuid = uuid try: if self.rm.sql: data = self.rm.sql.data_timeframe(uuid) resp.status = "Ok" resp.message = "" resp.data = data else: resp.status = "Fail" resp.message = "SQL feature not activated." except Exception as e: resp.status = "Fail" resp.message = e.message return resp.dumps()
def aggregate(self, data, sensordescription): if not data.fields: # Log.e("There was no data available so it could not be aggregated") return None result = [] try: saxobjs = self.aggregation_objs[sensordescription.uuid] for f in saxobjs: g = saxobjs[f].control(data[f]) if g: r = JSONObject() r.graph = g r.sensorID = sensordescription.sensorID r.propertyType = sensordescription.field[f].propertyName r.category = sensordescription.sensorType result.append(r) del data if result is not None and len(result) is not 0: Log.d2('SaxAggregator: Aggregation successful %s' % str(result)) return result except Exception as e: Log.e("aggregation failed due to Exception", e) return None
def getQualityValues(self, uuid=None, types=None, avg=None, minimum=None, maximum=None): if types: types = types.split(",") if uuid: uuid = uuid.split(",") else: #get all uuids from wrapper list from virtualisation.wrapper.abstractwrapper import AbstractWrapper, AbstractComposedWrapper uuid = [] wrappers = self.rm.wrappers for wrapper in wrappers: if isinstance(wrapper, AbstractWrapper): uuid.append(wrapper.getSensorDescription().uuid) elif isinstance(wrapper, AbstractComposedWrapper): for aWrapper in wrapper.wrappers: uuid.append(aWrapper.getSensorDescription().uuid) qualities = [] for _uuid in uuid: wrapper = self.rm.getWrapperByUUID(_uuid) message = "" if wrapper: if wrapper.qoiSystem.initialised: avgQualities = [] avgQualities.append(wrapper.qoiSystem.getLastQoI(types=types, avg=valueToBoolean(avg), minimum=valueToBoolean(minimum), maximum=valueToBoolean(maximum))) avgQualities[-1].uuid = wrapper.getSensorDescription().uuid qualities.extend(JOb(avgQualities)) else: message = "AVG Quality System not initialised for given uuid" else: message = "no stream with given uuid known" if message: resp = JOb() resp.uuid = _uuid resp.status = "Fail" resp.message = message qualities.append(resp) return JOb(qualities).dumps()
def snapshot_last(self, uuid): """ Get previous observations :param uuid: The uuid of the wrapper :param start: The start date in the format %Y-%m-%dT%H:%M:%S :param end: The end date in the format %Y-%m-%dT%H:%M:%S :return: a JSON answer """ resp = JOb() resp.uuid = uuid if uuid in self.observation_cache: resp.status = "Ok" resp.message = "" resp.data = self.observation_cache[uuid] else: resp.status = "Fail" resp.message = "No observation with the UUID " + uuid + " cached." return resp.dumps()
def getAverageProcessingTimes(self): job = JSONObject() job.name = self.name job.value = self.mean() if len(self.timeElementMap) > 0: job.values = [] for element in self.timeElementMap: if len(self.timeElementMap[element].timeElementMap) > 0: job.values.append(self.timeElementMap[element].getAverageProcessingTimes()) else: job2 = JSONObject() job2.name = element job2.value = self.timeElementMap[element].mean() job.values.append(job2) return job
def getLastQoI(self, types=None, avg=None, minimum=None, maximum=None): qoiData = JSONObject() for metric in self.reputationSystem.metrics: q = JSONObject() currentValues = JSONObject() currentValues.absoluteValue = metric.absoluteValue currentValues.ratedValue = metric.ratedValue q.unit = metric.unit qoiData[metric.name] = q q.CURRENT = currentValues if types: self.reputationSystem.avgQoIManager.getValues(q, metric.name, types, avg, minimum, maximum) return qoiData
def register_event(self, eventdescription): resp = JOb() if eventdescription.filename.endswith(".json"): from virtualisation.resourcemanagement.resourcemanagement import ResourceManagement trg = os.path.join(ResourceManagement.eventdescriptionfoldername, eventdescription.filename) dst = open(trg, "w") dst.write(eventdescription.file.read()) dst.close() try: resp.status, resp.message = self.rm.registerEvent(trg) except Exception as e: resp.status = "Fail" resp.message = e.message else: resp.status = "Fail" resp.message = "Wrong file type." return resp.dumps()
def deactivate_fault_recovery(self, uuid): """ Disable the fault recovery for a wrapper :param uuid: UUID identifying the wrapper :return: """ resp = JOb() resp.uuid = uuid w = self.rm.getWrapperByUUID(uuid) if w: w.deactivateFaultRecovery() resp.status = "Ok" resp.message = "" else: resp.status = "Fail" resp.message = "no stream with given uuid known" return resp.dumps()
def deploy(self, deployunit): resp = JOb() if deployunit.filename.endswith(".zip"): from virtualisation.resourcemanagement.resourcemanagement import ResourceManagement trg = os.path.join(ResourceManagement.deployfoldername, deployunit.filename) dst = open(trg, "w") dst.write(deployunit.file.read()) dst.close() try: resp.status, resp.message, resp.sensordescriptions = self.rm.deploy(trg, True) except Exception as e: resp.status = "Fail" resp.message = e.message else: resp.status = "Fail" resp.message = "Wrong file type." return resp.dumps()