def deleteGraph(self, graphName):
     queryString = "DEFINE sql:log-enable 3 DROP SILENT GRAPH <" + self.getGraphURI(graphName) + ">"
     L.d("deleteGraph using query:", queryString)
     sparql = self.getSparqlObject(graphName, queryString)
     sparql.setTimeout(300)
     try:
         ret = sparql.query()
         return True
     except Exception as e:
         L.e("Error in deleteGraph:", e.message)
         return False
 def checkTimeRelevantMetrics(self, lastUpdate):
     L.d("ReputationSystem: checkTimeRelevantMetrics called for Stream", self.description.fullSensorID)
     L.d("ReputationSystem:", lastUpdate, self.timestamp)
     if (lastUpdate is not None) and (lastUpdate == self.timestamp):  # check if there was an update in the meanwhile
         L.d("ReputationSystem: There was no update, lets punish!")
         qoiValues = {}
         for metric in self.metrics:
             value = metric.nonValueUpdate()
             if value:
                 qoiValues[value[0]] = value[1]
             
         self.avgQoIManager.calculateAvgQualities(qoiValues)
     self.addClockJob()
    def get_observations(self, uuid, start=None, end=None, format='json', onlyLast=False, fields=None, offset=0):
        from virtualisation.resourcemanagement.resourcemanagement import ResourceManagement

        w = self.rm.getWrapperByUUID(uuid)
        if not w:
            return None

        sd = w.getSensorDescription()

        # prepare query
        _filter = ["sensor_uuid = '%s'" % uuid]
        order = "ORDER BY sampling_time"
        limitation = ""
        if onlyLast:
            order += " DESC"
        else:
            if start:
                _filter.append("sampling_time >= TIMESTAMP '%s'" % start)
            if end:
                _filter.append("sampling_time <= TIMESTAMP '%s'" % end)
        _filter = "WHERE " + " and ".join(_filter)

        if fields:
            fields = fields.split(',')
            fields_ = []
            for ft in fields:
                fields_.append("data->'%s' AS %s" % (ft, ft))
            fields_.append("quality")
        else:
            fields_ = SQL.cp_observation_fields
        limitation = "LIMIT %d" % (1 if onlyLast else self.PAGINATION_LIMIT)
        query = "SELECT %s FROM %s.cp_observations %s %s %s OFFSET %d;" % (",".join(fields_), SQL.SCHEMA, _filter, order, limitation, offset)
        # query = "SELECT %s FROM %s.cp_observations %s %s;" % (",".join(fields_), SQL.SCHEMA, _filter, order)

        L.d("SQL: executing query", query)

        try:
            # need a new cursor object to no interfere with the state of the class's inserting cursor
            cursor = self.conn.cursor()
            cursor.execute(query)
            data = cursor.fetchall()
            data2 = [list(x) for x in data]
            del data
            if format in ('n3', 'nt', 'xml', 'turtle', 'pretty-xml', 'trix'):
                if ResourceManagement.args.messagebus or ResourceManagement.args.triplestore:
                    if fields:
                        observations = []
                        qualities = []
                        for x in data2:
                            tmp = JOb()
                            for i in range(0, len(fields)):
                                ft = fields[i]
                                tmp[ft] = JOb(x[i])
                            tmp.fields = fields
                            observations.append(tmp)
                            qualities.append(JOb(x[-1]))
                    else:
                        observations = [JOb(x[3]) for x in data2]
                        qualities = [JOb(x[4]) for x in data2]
                    g = self.rm.annotator.annotateObservation(observations, sd, None, qualities)
                    del observations
                    del qualities
                    del query
                    return g.serialize(format=format)
                else:
                    return "Error: requires messagebus or triplestore to be enabled"
            else:
                # search in all columns in each row for a datetime.datetime and parse it
                for i in range(0, len(data2)):
                    data2[i] = map(lambda x: x.strftime("%Y-%m-%d %H:%M:%S") if isinstance(x, datetime.datetime) else x, data2[i])
                json_list = []
                for x in data2:
                    if fields:
                        # y = JOb({})
                        y = {}
                        for i in range(0, len(fields)):
                            # ft = fields[i]
                            # y[ft] = JOb(x[i])
                            y[fields[i]] = x[i]
                        # y.quality = JOb(x[-1])
                        # y.fields = fields
                        y["fields"] = fields
                        y["quality"] = x[-1]

                    else:
                        # y = JOb(x[3])
                        # y.quality = JOb(x[4])
                        y = x[3]
                        y["quality"] = x[4]
                    json_list.append(y)
                del query
                del data2
                # return JOb(json_list).dumps()
                return json_list

        except Exception as e:
            L.e("SQL:", e)
            L.e("SQL query used:", query)
            return "Error: " + str(e)
    def update(self, data):
        self.updatecounter += 1

        # special case when no fields are in data
        # (fault recovery is not ready yet)
        if len(data.fields) == 0:
            self.rewardAndPunishment.update(False)
            self.absoluteValue = float("inf")
            self.ratedValue = self.rewardAndPunishment.value()
            return


        wrongFieldList = []
        for field in data.fields:
            if field not in data:
                wrongFieldList.append(field)
                continue

            dataTypeStr = self.repsys.description.field[field].dataType
            dataType = utils.getType(dataTypeStr)
            minValue, maxValue = self.getMinMaxValue(field, data)

            value = data[field].value
            # print "field:", field, "value:", value, "min:", minValue, "max:", maxValue, "dataType:", dataTypeStr, dataType, "value type:", type(value)

            if minValue and maxValue:
                if dataTypeStr == "datetime.datetime":
                    minValue = datetime.datetime.strptime(minValue, AbstractClock.parserformat)
                    maxValue = datetime.datetime.strptime(maxValue, AbstractClock.parserformat)
                else:
                    maxValue = dataType(maxValue)
                    minValue = dataType(minValue)

            # everything might be a string => first check for type, then try to cast, afterwards check min and max
            wrongValue = False
            if not isinstance(value, dataType): # type(value) is not dataType:
                try:
                    # special handling for datetime as format is needed
                    if dataTypeStr == "datetime.datetime":
                        value = datetime.datetime.strptime(value, self.repsys.description.field[field].format)
                    else:
                        value = dataType(value)
                except ValueError:
                    wrongFieldList.append(field)
                    wrongValue = True

            if not wrongValue:
                # now check if value is within min max interval
                if minValue and minValue is not "":
                    if value < minValue:
                        wrongFieldList.append(field)
                elif maxValue and maxValue is not "":
                    if value > maxValue:
                        wrongFieldList.append(field)
# 			print "Correctness for", self.repsys.description.fullSensorID, len(wrongFieldList), value, minValue, maxValue

        nrWrongFields = len(wrongFieldList)
        if nrWrongFields > 0:
            L.d("Correctness wrong fields:", nrWrongFields, "(", ",".join(wrongFieldList), ")")

        if data.recovered or (nrWrongFields >= 1):
            self.rewardAndPunishment.update(False)
        else:
            self.rewardAndPunishment.update(True)

        self.ratedValue = self.rewardAndPunishment.value()
        self.absoluteValue = 1 - nrWrongFields / len(data.fields)
        self.min = min(self.min, self.absoluteValue)
        self.mean = ((self.updatecounter - 1) * self.mean) / self.updatecounter + float(
            self.absoluteValue) / self.updatecounter

        correctness = JSONObject()
        correctness.wrongFields = wrongFieldList
        correctness.absoluteValue = self.absoluteValue
        correctness.ratedValue = self.ratedValue
        correctness.unit = self.unit
        # 		print "correctness:", self.ratedValue, self.absoluteValue
        return (self.name, correctness)
    def update(self, data):
        # special case when no fields are in data
        # (fault recovery is not ready yet)
        if len(data.fields) == 0:
            self.rewardAndPunishment.update(False)
            self.absoluteValue = float("inf")
            self.ratedValue = self.rewardAndPunishment.value()
            return

        # look for expected fields in sensor description, look only for non optional fields
        fields = self.repsys.description.fields
        fields = [x for x in fields if not self.repsys.description.field[x].optional]
        
        receivedFields = data.fields

        # check if expected and received identical, how to handle received fields with no values?
        nrOfMissingFields = 0
        missingFields = set()
        if set(fields).difference(set(receivedFields)):
            # lists are different
            missingFields = set(fields).difference(set(receivedFields))
            nrOfMissingFields = len(missingFields)

        # now go through all fields and check for NULL, NA,...
        nrOfWrongFields = 0
        wrongFields = set()
        wrongValues = ['None', 'Null', '', 'NA']  #TODO make the list of wrong values configurable
        for field in data.fields:
            if field in data:
                value = data[field].value
                if value is None or value in wrongValues:
                    nrOfWrongFields += 1
                    wrongFields.add(field)
            else:
                nrOfWrongFields += 1
                wrongFields.add(field)

        if nrOfMissingFields > 0:
            L.d("Completeness missing fields:", nrOfMissingFields, "(", ",".join(missingFields), ")")
        if nrOfWrongFields > 0:
            L.d("Completeness wrong fields:", nrOfWrongFields, "(", ",".join(wrongFields), ")")

        length = len(self.repsys.description.fields)
        currentLength = length - nrOfMissingFields - nrOfWrongFields
        self.updatecounter += 1
        if not self.goal:
            self.goal = length
            self.min = float(length)
            self.mean = float(length)
        # 			return (length, self.rewardAndPunishment.value())
        else:
            self.min = min(self.min, currentLength)
            self.mean = ((self.updatecounter - 1) * self.mean) / self.updatecounter + float(
                currentLength) / self.updatecounter
                
        if data.recovered:
            self.rewardAndPunishment.update(False)
        else:
            self.rewardAndPunishment.update(self.goal == currentLength)
        self.absoluteValue = currentLength
        self.ratedValue = self.rewardAndPunishment.value()

        completeness = JSONObject()
        completeness.missingFields = list(missingFields | wrongFields)
        completeness.absoluteValue = self.absoluteValue
        completeness.ratedValue = self.ratedValue
        completeness.unit = self.unit


        # 		print completeness.dumps()

        # 		print "completeness:", self.name, completeness
        # 		print (self.name, missingFields)
        return (self.name, completeness)
    def update(self):
        from virtualisation.resourcemanagement.resourcemanagement import ResourceManagement

        # print "time", self.clock.now()
        latStart = datetime.now()
        L.d("processing:", self.getSensorDescription().sensorID)
        # L.d(self.clock.now())
        if self.replaymode:
            self.stats.startMeasurement("Update_replay")
            #             self.clock.pause()
            if self.historyreader:
                L.d2("abstractwrapper get data")
                self.stats.startMeasurement("Update_replay.Historyreader")
                data_raw = self.historyreader.tick(self.clock)
                self.stats.stopMeasurement("Update_replay.Historyreader")
                L.d2("abstractwrapper received data:", str(data_raw))
                if data_raw:
                    data_list = [data_raw] if not self.historyreader.multiple_observations else data_raw
                    for data in data_list:
                        try:
                            L.d2("abstractwrapper parse data")
                            # print "data to parse", data
                            self.stats.startMeasurement("Update_replay.Historyparser")
                            parsed = self.historyparser.parse(data, self.clock)
                            self.stats.stopMeasurement("Update_replay.Historyparser")
                            L.d2("abstractwrapper parsed data:", str(parsed))
                            del data
                            if parsed:
                                self.stats.startMeasurement("Update_replay.Preparation")
                                ObservationIDGenerator.addObservationIDToFields(parsed)
                                parsed.producedInReplayMode = True
                                parsed.recovered = False
                                parsed.latency = (datetime.now() - latStart).total_seconds()
                                self.stats.stopMeasurement("Update_replay.Preparation")

                                # QoI Start
                                quality = None
                                if self.qoiSystem:
                                    L.d2("abstractwrapper get quality")
                                    self.stats.startMeasurement("Update_replay.Quality")
                                    quality = self.qoiSystem.addData(self.getSensorDescription(), parsed, self.clock)
                                    self.stats.stopMeasurement("Update_replay.Quality")
                                    L.d2("abstractwrapper quality:", quality)
                                if self.faultRecoveryActive:
                                    L.d2("abstractwrapper update fault recovery")
                                    self.stats.startMeasurement("Update_replay.FaultRecoveryUpdate")
                                    self.updateFaultRecoveries(parsed, quality)
                                    self.stats.stopMeasurement("Update_replay.FaultRecoveryUpdate")
                                    L.d2("abstractwrapper fault recovery updated")

                                self.stats.startMeasurement("Update_replay.Receiver")
                                for r in self.receiver:
                                    L.d2("abstractwrapper start receiver", r)
                                    r.receive(parsed, self.getSensorDescription(), self.clock, quality)
                                    L.d2("abstractwrapper receiver", r, "finished")
                                self.stats.stopMeasurement("Update_replay.Receiver")
                        except Exception as e:
                            L.e("Error while updating sensor", self.getSensorDescription().fullSensorID, e)
                        finally:
                            if ResourceManagement.args.gentle:
                                self.clock.sleep()
                else:
                    L.d("there is no data, ask fault recovery1")
                    # L.i(self.getSensorDescription().sensorID)
                    # L.i(self.clock.now())
                    try:
                        self.stats.startMeasurement("Update_replay.Recovery")
                        data = JSONObject()
                        data.latency = 0
                        data.producedInReplayMode = True
                        data.recovered = True

                        data.fields = []
                        for n in self.getSensorDescription().fields:
                            if n in self.faultRecoveries and self.faultRecoveries[n].isReady():
                                data.fields.append(n)
                                data[n] = JSONObject()
                                # at this point the dataType is in FAULT_RECOVERY_SUPPORTED_DATATYPES and we can safely use cast
                                data[n].value = self.faultRecoveryCast(
                                    self.faultRecoveries[n].getEstimation(),
                                    self.getSensorDescription().field[n].dataType,
                                )
                                data[n].propertyName = self.getSensorDescription().field[n].propertyName
                                data[n].propertyURI = self.getSensorDescription().field[n].propertyURI
                                if "unit" in self.getSensorDescription().field[n]:
                                    data[n].unit = self.getSensorDescription().field[n].unit
                                data[n].sensorID = self.getSensorDescription().fullSensorID
                                data[n].observationSamplingTime = self.clock.timeAsString()
                                data[n].observationResultTime = data[n].observationSamplingTime
                        self.stats.stopMeasurement("Update_replay.Recovery")

                        self.stats.startMeasurement("Update_replay.ObservationIDGenerator")
                        ObservationIDGenerator.addObservationIDToFields(data)
                        self.stats.stopMeasurement("Update_replay.ObservationIDGenerator")

                        quality = None
                        if self.qoiSystem:
                            self.stats.startMeasurement("Update_replay.Quality")
                            quality = self.qoiSystem.addData(self.getSensorDescription(), data, self.clock)
                            self.stats.stopMeasurement("Update_replay.Quality")

                        self.stats.startMeasurement("Update_replay.Receiver")
                        for r in self.receiver:
                            r.receive(data, self.getSensorDescription(), self.clock, quality)
                        self.stats.stopMeasurement("Update_replay.Receiver")
                    except Exception as e:
                        L.e("Error while updating sensor", self.getSensorDescription().fullSensorID, e)
                    finally:
                        pass
                        # if ResourceManagement.args.gentle:
                        #     self.clock.sleep()
            else:
                pass  # no history reader - nothing to do
            self.stats.stopMeasurement("Update_replay")
        else:  # no replay mode
            self.stats.startMeasurement("Update_live")
            if self.connection:
                try:
                    self.stats.startMeasurement("Update_live.Connection")
                    data_raw = self.connection.next()
                    self.stats.stopMeasurement("Update_live.Connection")
                    if data_raw:
                        data_list = [data_raw] if not self.connection.multiple_observations else data_raw
                        for data in data_list:
                            self.stats.startMeasurement("Update_live.Parser")
                            parsed = self.parser.parse(data, self.clock)
                            self.stats.stopMeasurement("Update_live.Parser")
                            if parsed:
                                self.stats.startMeasurement("Update_live.Preparation")
                                ObservationIDGenerator.addObservationIDToFields(parsed)
                                parsed.producedInReplayMode = False
                                parsed.recovered = False
                                parsed.latency = (datetime.now() - latStart).total_seconds()
                                self.stats.stopMeasurement("Update_live.Preparation")

                                # QoI Start
                                quality = None
                                if self.qoiSystem:
                                    # TODO update the timestamp
                                    self.stats.startMeasurement("Update_live.Quality")
                                    quality = self.qoiSystem.addData(self.getSensorDescription(), parsed, self.clock)
                                    self.stats.stopMeasurement("Update_live.Quality")
                                if self.faultRecoveryActive:
                                    L.d2("abstractwrapper update fault recovery")
                                    self.stats.startMeasurement("Update_live.FaultRecoveryUpdate")
                                    self.updateFaultRecoveries(parsed, quality)
                                    self.stats.stopMeasurement("Update_live.FaultRecoveryUpdate")
                                    L.d2("abstractwrapper fault recovery updated")

                                self.stats.startMeasurement("Update_live.Receiver")
                                for r in self.receiver:
                                    r.receive(parsed, self.getSensorDescription(), self.clock, quality)
                                self.stats.stopMeasurement("Update_live.Receiver")
                    else:
                        # fault recovery
                        L.i("there is no data, ask fault recovery2")
                        try:
                            self.stats.startMeasurement("Update_live.Recovery")
                            data = JSONObject()
                            data.latency = 0
                            data.recovered = True
                            data.fields = []
                            for n in self.getSensorDescription().fields:
                                if n in self.faultRecoveries and self.faultRecoveries[n].isReady():
                                    data.fields.append(n)
                                    data[n] = JSONObject()
                                    data[n].value = self.faultRecoveryCast(
                                        self.faultRecoveries[n].getEstimation(),
                                        self.getSensorDescription().field[n].dataType,
                                    )
                                    data[n].propertyName = self.getSensorDescription().field[n].propertyName
                                    data[n].propertyURI = self.getSensorDescription().field[n].propertyURI
                                    if "unit" in self.getSensorDescription().field[n]:
                                        data[n].unit = self.getSensorDescription().field[n].unit
                                    data[n].sensorID = self.getSensorDescription().fullSensorID
                                    data[n].observationSamplingTime = self.clock.timeAsString()
                                    data[n].observationResultTime = data[n].observationSamplingTime
                            self.stats.stopMeasurement("Update_live.Recovery")

                            ObservationIDGenerator.addObservationIDToFields(data)
                            quality = None
                            if self.qoiSystem:
                                self.stats.startMeasurement("Update_live.Quality")
                                quality = self.qoiSystem.addData(self.getSensorDescription(), data, self.clock)
                                self.stats.stopMeasurement("Update_live.Quality")

                            self.stats.startMeasurement("Update_live.Receiver")
                            for r in self.receiver:
                                r.receive(data, self.getSensorDescription(), self.clock, quality)
                            self.stats.stopMeasurement("Update_live.Receiver")
                        except Exception as e:
                            L.e(
                                "Error while updating sensor (fault recovery)",
                                self.getSensorDescription().fullSensorID,
                                str(e),
                            )
                        finally:
                            pass
                            # if ResourceManagement.args.gentle:
                            #     self.clock.sleep()
                except Exception as e:
                    L.e(
                        "Error while updating sensor (not fault recovery)",
                        self.getSensorDescription().fullSensorID,
                        str(e),
                    )
            else:
                pass  # no live mode supported
            self.stats.stopMeasurement("Update_live")