def start(self, restart=False):
        self._startQueues()
        if self.clock:
            self.clock.stop()
        self.clock = RealClock(self.end)

        if ResourceManagement.args.pt:
            from virtualisation.resourcemanagement.performancetestreceiver import PerformanceMeterMinutes

            performancetest = PerformanceMeterMinutes()

        for w in self.wrappers:
            self.startWrapper(w, restart)
            if ResourceManagement.args.pt:
                w.addReceiver(performancetest)
        L.i(datetime.datetime.now())

        if not self.args.noQuality:
            if not self.averageStreamQuality:
                self.averageStreamQuality = AverageStreamQuality(self, self.clock)
            else:
                self.averageStreamQuality.setClock(self.clock)

        self.clock.runAsync()

        raw_input("press Enter to end.\n")
        self.clock.stop()
    def deploy(self, f, autostart=False):
        """

        :param f:
        :param autostart:
        :return: a tuple with 3 elements. 1. status as string, 2. error message as string, 3. list of uuids of added wrapper
        """
        L.i("Deploying", f)
        sensordescriptions = []
        try:
            zFile = zipfile.ZipFile(f)
            if "deploy.json" in zFile.namelist():
                deployDescription = JOb(zFile.open("deploy.json", "r"))
                sys.path.insert(0, f)
                if deployDescription.isList():
                    for dd in deployDescription:
                        module = __import__(dd.module)
                        wrapper = getattr(module, dd["class"])()
                        self.addWrapper(wrapper)
                        sensordescriptions.append(wrapper.getSensorDescription())
                        if autostart:
                            self.startWrapper(wrapper)
                else:
                    module = __import__(deployDescription.module)
                    wrapper = getattr(module, deployDescription["class"])()
                    self.addWrapper(wrapper)
                    sensordescriptions.append(wrapper.getSensorDescription())
                    if autostart:
                        self.startWrapper(wrapper)
            return "OK", "", sensordescriptions
        except Exception as e:
            L.w("Deployment of wrapper", f, "failed.", e.message)
            return "Fail", e.message, []
 def registerExchanges(cls):
     for ex in RabbitMQ.exchanges:
         try:
             RabbitMQ.declareExchange(ex, _type="topic")
         except Exception as e:
             L.e('Exchange %s could not be declared: %s' % (ex, e.message))
             L.e('Exception:', str(e))
    def receiveHandler(self, item):
        parsedData, sensordescription, clock, quality = item
        L.d2(parsedData)
        # print parsedData
        if self.sql:
            self.sql.insert_observation(sensordescription, parsedData, dictdeepcopy(quality))

        if ResourceManagement.args.messagebus or ResourceManagement.args.triplestore:
            # if len(parsedData.fields)>0:
            g = self.annotator.annotateObservation(parsedData, sensordescription, clock, quality)

        del quality
        if (
            ResourceManagement.args.messagebus
            and not sensordescription.no_publish_messagebus
            and "fields" in parsedData
            and len(parsedData.fields) > 0
        ):
            message = g.serialize(format="n3")
            # print message
            key = sensordescription.messagebus.routingKey
            # self.messageBusQueue.add((parsedData.dumps(), self.rabbitmqchannel, RabbitMQ.exchange_data, key))
            self.messageBusQueue.add((message, RabbitMQ.exchange_annotated_data, key))
            if self.ui.api:
                self.ui.api.update_observation_cache(str(sensordescription.uuid), message)
        if ResourceManagement.args.triplestore:
            # TODO: The following line is commented out, since the Virtuoso makes so much trouble
            # ThreadedTriplestoreAdapter.getOrMake(sensordescription.graphName).addGraph(g)
            pass
        if ResourceManagement.args.messagebus or ResourceManagement.args.triplestore:
            del g
        if ResourceManagement.args.aggregate:
            self.aggregationQueue.add((parsedData, sensordescription))
        else:
            del parsedData
    def aggregate(self, data, sensordescription):
        """
        this method is called when stream data has been annotated.
        :param data:
        :param sensordescription:
        :return: The aggregated data
        """
        if not data.fields:
            return None
        result = []
        try:
            aggregation_objs = self.aggregation_objs[sensordescription.uuid]
            for key, agg in aggregation_objs.items():
                agg_result = agg.control(data[key])
                if agg_result:
                    g, start, end, size = agg_result

                    r = JSONObject()
                    r.graph = g
                    r.field = sensordescription.field[key]
                    r.sensorID = sensordescription.sensorID
                    r.propertyName = sensordescription.field[key].propertyName
                    r.category = sensordescription.sensorType
                    r.aggregationMethod = sensordescription.field[key].aggregationMethod
                    r.aggregationConfiguration = sensordescription.field[key].aggregationConfiguration
                    r.start = start
                    r.end = end
                    r.size = size
                    result.append(r)
                return result
        except Exception as e:
            Log.e("aggregation failed due to Exception", e)
            return None
    def __init__(self, gdi_config, rm):
        self.rm = rm
        self.PAGINATION_LIMIT = 100
        connect_str = "host='%s' dbname='%s' user='******' password='******' port=%d" % (
            gdi_config.host, gdi_config.database, gdi_config.username, gdi_config.password, gdi_config.port)
        self.conn = psycopg2.connect(connect_str)
        self.curs = self.conn.cursor()
        try:
            self.curs.execute("CREATE SCHEMA IF NOT EXISTS %s;" % SQL.SCHEMA)
            # self.curs.execute("CREATE TABLE IF NOT EXISTS %s.cp_sensors (sensor_uuid UUID CONSTRAINT uuid_key PRIMARY KEY, sensor_annotation_id VARCHAR, sercvice_category VARCHAR, traffic INTEGER, geom GEOMETRY(GEOMETRY, 4326) );" % (SQL.SCHEMA,))
            self.curs.execute("CREATE TABLE IF NOT EXISTS %s.cp_sensors (sensor_uuid UUID CONSTRAINT uuid_key PRIMARY KEY, sensor_annotation_id VARCHAR, sercvice_category VARCHAR, traffic INTEGER, geom GEOMETRY(GEOMETRY, 4326) );" % ("public",))
            cols = ["sampling_time TIMESTAMP", "sensor_uuid UUID", "observation_uuid UUID", "data JSON", "quality JSON"]
            query = 'CREATE TABLE IF NOT EXISTS %s.cp_observations ( %s, PRIMARY KEY (%s), FOREIGN KEY (sensor_uuid) REFERENCES %s.cp_sensors(sensor_uuid));\n' % (SQL.SCHEMA, ', '.join(cols),  ", ".join(["observation_uuid"]), "public")
            self.curs.execute(query)

            # index over sampling_time and sensor_uuid
            # since a 'IF NOT EXISTS' is not available for us (version < 9.5)
            # the error is catched in a separate try-catch
            try:
                query = 'CREATE INDEX "timeindex" ON %s.cp_observations USING btree (sampling_time);' % (SQL.SCHEMA,)
                self.curs.execute(query)
                query = 'CREATE INDEX uuidindex ON %s.cp_observations USING btree (sensor_uuid);' % (SQL.SCHEMA,)
                self.curs.execute(query)
            except:
                pass

            # primary secondary observation_uuid map
            query = 'CREATE TABLE IF NOT EXISTS %s.p_s_observation_uuid (main UUID, secondary UUID);' % (SQL.SCHEMA,)
            self.curs.execute(query)

            self.conn.commit()
            L.i("SQL: schema/tables created")
        except Exception as e:
            L.e("SQL: Could not create schema/tables", e)
            self.conn.rollback()
    def setReplayMode(self, mode):
        super(BrasovIncidentWrapper, self).setReplayMode(mode)
        # fieldnames of service: "id", "comments", "createdon", "description", "guid", "incidentState", "incidentid", "indsoft_publiclyvisible", "statecode", "ticketnumber", "timestamp", "title", "x", "y"
        fieldnames = [
            "id",
            "comments",
            "createdon",
            "description",
            "guid",
            "i",
            "incidentState",
            "incidentid",
            "indsoft_publiclyvisible",
            "statecode",
            "ticketnumber",
            "timestamp",
            "title",
            "x",
            "y",
        ]
        try:
            fobj = AbstractWrapper.getFileObject(__file__, "incidents%d.csv" % self.number, "rU")
            self.historyreader = CSVHistoryReader(self, fobj, delimiter=",")
            self.historyreader.multiple_observations = False
            self.historyparser = CSVParser(self, fieldnames)

        except Exception as e:
            Log.e("setReplayMode in Brasov Incident Wrapper", self.number, e)
            self.historyreader = None
 def next(self):
     url = self.source or self.wrapper.getSensorDescription().source
     try:
         return self.load(url)
     except:
         Log.e("HttpPullConnection: failed to load", url)
         return None
    def __run(self):
        while not self.stop:
            sleep(self.delay)
            Log.i(self.counter, self.txt)
#             print self.counter, self.txt
#             print "ThreadedTriplestoreAdapter Buffer Size:", ThreadedTriplestoreAdapter.getTotalBufferSize()
            Log.i("ThreadedTriplestoreAdapter Buffer Size:", ThreadedTriplestoreAdapter.getTotalBufferSize())
            self.counter = 0
 def setReplayMode(self, mode):
     if mode:
         try:
             self.historyreader = CSVHistoryReader(self, AbstractWrapper.getFileObject(__file__, os.path.join("historicdata", "pollution-%s.csv" % self.sensorDescription.sensorID), "rU"), delimiter=';')
             self.historyparser = CSVParser(self, self.historyreader.headers)
         except Exception as e:
             Log.e(e)
             self.historyreader = None
     super(InternalBrasovWrapper, self).setReplayMode(mode)
 def load(self, url):
     try:
         f = urllib2.urlopen(url, timeout=10)
         r = f.read()
         f.close()
         return r
     except:
         Log.e(self.__class__.__name__, "error in load")
         return None
 def start(self):
     if self.replaymode:
         try:
             self.historyreader = CSVHistoryReader(self, AbstractWrapper.getFileObject(__file__, os.path.join("historicdata", "weatherAW-%s.csv" % self.sensorDescription.sensorID), "rU"), delimiter=';')
             self.historyparser = CSVParser(self, self.historyreader.headers)
         except Exception as e:
             Log.e(e)
             self.historyreader = None
     super(InternalWeatherAWWrapper, self).start()
 def saveGraph(self, graph, graphName):
     serialisation = graph.serialize(destination=None, format='nt', encoding=None)
     queryString = "".join(["INSERT DATA INTO GRAPH <", self.getGraphURI(graphName), "> {", serialisation, "}"])
     sparql = self.getSparqlObject(graphName, queryString)
     try:
         sparql.query()
     except HTTPError as e:
         L.e("Sparql Endpoint HTTPError in saveGraph:", str(e.code), e.reason)
     except Exception as e:
         L.e("Error in saveGraph:", e.message)
 def removeSensorStream(self, sensor_uuid):
     sql = ("DELETE FROM cp_sensors WHERE sensor_uuid='%(sensor_uuid)s'" % {'sensor_uuid': sensor_uuid})
     try:
         self.curs.execute(sql)
         self.conn.commit()
         return True
     except:
         self.conn.rollback()
         L.e("Cannot delete Sensor: " + str({'sensor_uuid': sensor_uuid}))
         L.e("SQL query used:", sql)
         return False
 def removeAllSensorStreams(self):
     try:
         sql = "DELETE FROM cp_sensors"
         self.curs.execute(sql)
         self.conn.commit()
         return True
     except:
         self.conn.rollback()
         L.e("Cannot delete all Sensors")
         L.e("SQL query used", sql)
         return False
 def deleteGraph(self, graphName):
     queryString = "DEFINE sql:log-enable 3 DROP SILENT GRAPH <" + self.getGraphURI(graphName) + ">"
     L.d("deleteGraph using query:", queryString)
     sparql = self.getSparqlObject(graphName, queryString)
     sparql.setTimeout(300)
     try:
         ret = sparql.query()
         return True
     except Exception as e:
         L.e("Error in deleteGraph:", e.message)
         return False
 def createGraph(self, graphName):
     queryString = "CREATE GRAPH <" + self.getGraphURI(graphName) + ">"
     sparql = self.getSparqlObject(graphName, queryString)
     try:
         ret = sparql.query().convert()
         return True
     except HTTPError as e:
         L.e("Sparql Endpoint HTTPError in createGraph:", str(e.code), e.reason)
     except Exception as e:
         L.e("Error in createGraph:", e.message)
         return False
 def saveTriple(self, graphName, subject, predicate, object):
     sparql = self.getSparqlObject(graphName)
     # insert into doesn't work with set default graph, have to "... INSERT DATA INTO <graph>..."
     queryString = "INSERT DATA INTO <" + self.getGraphURI(
         graphName) + "> { <" + subject + "> <" + predicate + "> <" + object + "> }"
     sparql.setQuery(queryString)
     try:
         sparql.query()
     except HTTPError as e:
         L.e("Sparql Endpoint HTTPError in saveTriple:", str(e.code), e.reason)
     except Exception as e:
         L.e("Error in saveTriple:", e.message)
 def saveMultipleGraphs(self, serialisedGraph, graphName=None):
     queryString = "".join(["INSERT DATA { GRAPH <", self.getGraphURI(graphName), "> {", serialisedGraph, "}}"])
     sparql = self.getSparqlObject(graphName, queryString)
     sparql.queryType = INSERT
     try:
         ret = sparql.query()
     except EndPointInternalError as e:  #transaction deadlock case
         raise SPARQL_Exception()
     except EndPointNotFound as e:   #temporarily 404 error
         raise SPARQL_Exception()
     except Exception as e:
         L.e("Error in saveMultipleGraphs:", e.message)
         raise StoreOffline_Exception()
 def graphExists(self, graphName):
     queryString = "ASK { GRAPH <" + self.getGraphURI(graphName) + "> { ?s ?p ?o . }}"
     sparql = self.getSparqlObject(graphName, queryString)
     # print queryString
     try:
         ret = sparql.query()
         retList = ret.convert()
         #             print retList
         return retList["boolean"]
     except HTTPError as e:
         L.e("Sparql Endpoint HTTPError in graphExists:", str(e.code), e.reason)
     except Exception as e:
         L.e("Error in graphExists:", e.message)
 def initialise(self, description, clock):
     self.reputationSystem = ReputationSystem(description, clock)
     # self.reputationSystem.addSink(CSVSink())
     # self.reputationSystem.addSink(VirtuosoSink(self.messageBusQueue))
     # self.reputationSystem.addSink(TerminalSink())
     self.reputationSystem.addSink(BlackHoleSink())
     self.reputationSystem.addQoIMetric(Frequency())
     self.reputationSystem.addQoIMetric(Completeness())
     self.reputationSystem.addQoIMetric(Age())
     self.reputationSystem.addQoIMetric(Latency())
     self.reputationSystem.addQoIMetric(Correctness())
     self.initialised = True
     L.d2("CpQoiSystem initialised")
    def getStreamMinMaxDate(self, graphName, sensorName):
        queryString = """prefix ssn: <http://purl.oclc.org/NET/ssnx/ssn#> prefix tl: <http://purl.org/NET/c4dm/timeline.owl#> 
                        prefix sao: <http://purl.oclc.org/NET/UNIS/sao/sao#> prefix ces: <http://www.insight-centre.org/ces#> 
                        prefix so: <http://www.daml.org/services/owl-s/1.2/Service.owl#> prefix qoi: <http://purl.oclc.org/NET/UASO/qoi#> 
                        prefix prov: <http://www.w3.org/ns/prov#> 
                        SELECT   MAX(str(?timeValue)) as ?maxDateTime MIN(str(?timeValue)) as ?minDateTime 
                        WHERE { ?observation ssn:observationResultTime ?time . ?observation ssn:observedBy <""" + sensorName + """> . ?time tl:at ?timeValue .  }"""

        sparql = self.getSparqlObject(graphName, queryString)
        try:
            ret = sparql.query().convert()
            return ret
        except Exception as e:
            L.e("Error in getStreamMinMaxDate:", e.message)
 def end(self):
     for w in self.wrappers:
         w.stop()
     if ResourceManagement.args.messagebus:
         self.messageBusQueue.stop()
     if ResourceManagement.args.aggregate:
         self.aggregationQueue.stop()
     if self.eventWrapper:
         self.eventWrapper.stop()
     self.receiverQueue.stop()
     if ResourceManagement.args.triplestore:
         ThreadedTriplestoreAdapter.stop()
     self.stopInterface()
     L.i("REPLAY ENDED")
    def write(self, graphString):
        if ThreadedTriplestoreAdapter.triplestore:
            attempts = self.maxAttempts
            while attempts > 0:
                try:
                    ThreadedTriplestoreAdapter.triplestore.saveMultipleGraphs(graphString, self.graphName)
                    return
                except SPARQL_Exception as e:
#                     self.buffer.put(graphString)
                    attempts -= 1
                    L.e("Error inserting into virtuoso, reattempt")
                except StoreOffline_Exception as e:
                    L.e( "ThreadedTriplestoreAdapter was unable to save data, store might be offline!")
                    break
 def update(self, data):
     L.d2("ReputationSystem: update called for Stream", self.description.fullSensorID)
     self.setTimestamp(data)
     returnValues = {}
     if data is not None:
         for m in self.metrics:
             value = m.update(data)
             if value:
                 returnValues[value[0]] = value[1]
             for sink in self.sinkList:
                 sink.update(m)
     self.addClockJob(True)
     self.avgQoIManager.calculateAvgQualities(returnValues)
     return returnValues
    def _run(self):
        self.abort.clear()
        if not self.clock:
            raise Exception("no clock set?")

        while not self.abort.is_set():
            if self.event.wait(1.0):
                if not self.abort.is_set():
                    try:
                        self.update()
                    except Exception as e:
                        L.e("in _run", e.message)
                    finally:
                        self.clock.continue_running()
                self.event.clear()
 def __registerSensorStream(self, sensor_uuid, sensor_id, sercvice_category, geom, epsg):
     sql = (
     "INSERT INTO cp_sensors(sensor_uuid, sensor_annotation_id, sercvice_category, geom) VALUES('%(sensor_uuid)s' , '%(sensor_id)s' , '%(sercvice_category)s' ,ST_Transform(st_setsrid('%(geom)s'::geometry,%(epsg)s),4326))" %
     {'sensor_uuid': sensor_uuid, 'sensor_id': sensor_id, 'sercvice_category': sercvice_category,
      'geom': geom.wkb_hex, 'epsg': epsg})
     try:
         self.curs.execute(sql)
         self.conn.commit()
         return True
     except:
         self.conn.rollback()
         L.e("Cannot insert Sensor:", str(
             {'sensor_uuid': sensor_uuid, 'sensor_id': sensor_id, 'sercvice_category': sercvice_category,
              'geom': geom.wkb_hex, 'epsg': epsg}))
         L.e("SQL query used:", sql)
         return False
    def _runReplay(self):
        self.abort.clear()
        if not self.clock:
            raise Exception("no clock set?")

        while not self.abort.is_set():
            if self.event.wait(1.0):
                if not self.abort.is_set():
                    try:
                        self.__forEachWrapper("update")
                    except Exception as e:
                        L.e("in _run", e.message)
                    finally:
                        self.clock.continue_running()
                        pass

                self.event.clear()
 def aggregate(self, data, sensordescription):
     result = []
     try:
         dftobjs = self.dftobjects[sensordescription.uuid]
         for f in dftobjs:
             g = dftobjs[f].control(data[f])
             if g:
                 r = JSONObject()
                 r.graph = g
                 r.sensorID = sensordescription.sensorID
                 r.propertyType = sensordescription.field[f].propertyName
                 r.category = sensordescription.sensorType
                 result.append(r)
         return result
     except KeyError:
         Log.e("Dft aggregation failed")
         return None
    def addWrapper(self, wrapper):
        # TODO: this should not be here
        if ResourceManagement.args.cleartriplestore:
            self.deleteGraphs(wrapper)
        sd = wrapper.getSensorDescription()
        try:
            if isinstance(sd, list):
                for _sd in sd:
                    try:
                        _sd.test()
                        if ResourceManagement.args.aggregate:
                            self.aggregator.wrapper_added(_sd)
                        if self.gdiInterface:
                            self.gdiInterface.registerSensorStreamFromWKT(
                                _sd.uuid, _sd.sensorID, _sd.sensorType, _sd.location, _sd.location_epsg or 4326
                            )
                        # if self.sql:
                        #     self.sql.create_table(_sd)
                        L.i("added wrapper with ID", _sd.sensorID)
                    except Exception as ex:
                        L.e("Error deploying wrapper:", str(ex))
            else:
                try:
                    sd.test()
                    if ResourceManagement.args.aggregate:
                        self.aggregator.wrapper_added(sd)
                    if self.gdiInterface:
                        self.gdiInterface.registerSensorStreamFromWKT(
                            sd.uuid, sd.sensorID, sd.sensorType, sd.location, sd.location_epsg or 4326
                        )
                    # if self.sql:
                    #     self.sql.create_table(sd)
                    L.i("added wrapper with ID", sd.sensorID)
                except Exception as ex:
                    L.e("Error deploying wrapper:", str(ex))

            if ResourceManagement.args.triplestore or ResourceManagement.args.messagebus:
                # StaticAnnotator.staticAnnotationSensor(wrapper, self.config, self.messageBusQueue, self.rabbitmqchannel)
                StaticAnnotator.threadedStaticAnnotationSensor(wrapper, self.config, self.messageBusQueue, self.ui.api)
            if ResourceManagement.args.messagebus:
                wrapper.setMessageBusQueue(self.messageBusQueue)
            self.wrappers.append(wrapper)
        except Exception as ex:
            L.e(self.__class__.__name__, "Error in addWrapper:", str(ex))