Exemple #1
0
    def create_group_description(self, properties):
        """
        Converts a dictionary of stream or group properties into a string
        describing the group.
        """
        for prop in self.groupproperties:
            if prop not in properties:
                log("Required group property '%s' not present in %s group" % \
                        (prop, self.collection_name))
                return None

            if prop == 'persist' and properties[prop] is True:
                properties[prop] = "PERSIST"
            elif prop == 'persist' and properties[prop] is False:
                properties[prop] = "NOPERSIST"

            if prop == 'pipelining' and properties[prop] is True:
                properties[prop] = "PIPELINING"
            elif prop == 'pipelining' and properties[prop] is False:
                properties[prop] = "NOPIPELINING"

            if prop == 'caching' and properties[prop] is True:
                properties[prop] = "CACHING"
            elif prop == 'caching' and properties[prop] is False:
                properties[prop] = "NOCACHING"

        return "FROM %s FETCH %s MC %s %s %s %s %s %s %s" % (
            properties['source'], properties['destination'],
            properties['max_connections'],
            properties['max_connections_per_server'], properties['persist'],
            properties['max_persistent_connections_per_server'],
            properties['pipelining'], properties['pipelining_max_requests'],
            properties['caching'])
Exemple #2
0
    def parse_group_description(self, description):
        regex = "FROM (?P<source>[.a-zA-Z0-9_-]+) "
        regex += "TO (?P<destination>[.a-zA-Z0-9_-]+) "
        regex += "OPTION (?P<option>[a-zA-Z0-9]+) "
        regex += "(?P<split>[A-Z0-9]+)"

        parts = self._apply_group_regex(regex, description)

        if parts is None:
            return None

        if parts.group("split") not in self.splits:
            log("%s group description has no aggregation method" % \
                    (self.collection_name))
            log(description)
            return None

        keydict = {
            "source": parts.group("source"),
            "destination": parts.group("destination"),
            "packet_size": parts.group("option"),
            "aggregation": parts.group("split")
        }

        return keydict
Exemple #3
0
    def group_to_labels(self, groupid, description, lookup=True):
        labels = []

        groupparams = self.parse_group_description(description)
        if groupparams is None:
            log("Failed to parse group description to generate group labels")
            return None

        baselabel = 'group_%s' % (groupid)

        search = dict(groupparams)
        del search['direction']

        # XXX aggregation vs family
        if groupparams['direction'] in ['IN', 'BOTH']:
            label = self._generate_direction_labels(baselabel, search, 'rx',
                                                    groupparams['aggregation'],
                                                    lookup)
            #groupparams['family'], lookup)
            if label is None:
                return None
            labels += label

        if groupparams['direction'] in ['OUT', 'BOTH']:
            label = self._generate_direction_labels(baselabel, search, 'tx',
                                                    groupparams['aggregation'],
                                                    lookup)
            if label is None:
                return None
            labels += label

        return sorted(labels, key=itemgetter('shortlabel'))
Exemple #4
0
    def process_data(self, timestamp, data, source):
        for result in data['results']:
            result['source'] = source
            result['command'] = data['command']
            # TODO should we use a string like "none" instead? Or the command?
            # if the test had no destination, use the source as the destination
            if result['destination'] is None:
                result['destination'] = result['source']

            key = self._stream_key(self._stream_properties(result))

            if key in self.streams:
                stream_id = self.streams[key]
            else:
                stream_id = self.create_new_stream(
                        self._stream_properties(result),
                        timestamp, not self.have_influx)
                if stream_id < 0:
                    logger.log("AMPModule: Cannot create stream for: ")
                    logger.log("AMPModule: external %s %s\n", source,
                            result['destination'])
                    return
                self.streams[key] = stream_id

            self.insert_data(stream_id, timestamp, result)
            self.db.update_timestamp(self.datatable, [stream_id], timestamp,
                    self.have_influx)
Exemple #5
0
    def create_group_description(self, properties):
        # TODO tcpreused should always be false now, can we remove the need
        # for it to be part of the group description?
        if 'tcpreused' in properties:
            if properties['tcpreused'] is True:
                reuse = "T"
            else:
                reuse = "F"
        else:
            reuse = "F"

        if 'direction' not in properties:
            properties['direction'] = "BOTH"
        if 'family' not in properties and 'address' in properties:
            properties['family'] = \
                    self._address_to_family(properties['address'])

        for prop in self.groupproperties:
            if prop not in properties:
                log("Required group property '%s' not present in %s group" % \
                        (prop, self.collection_name))
                return None

        properties['direction'] = properties['direction'].upper()
        properties['family'] = properties['family'].upper()

        return "FROM %s TO %s DURATION %s WRITESIZE %s %s DIRECTION %s FAMILY %s PROTOCOL %s" \
                % (properties['source'], properties['destination'],
                   properties['duration'], properties['writesize'], reuse,
                   properties['direction'], properties['family'],
                   properties['protocol'])
Exemple #6
0
    def schedule_new_test(self, settings):
        query = """ INSERT INTO schedule (schedule_test, schedule_frequency,
                    schedule_start, schedule_end, schedule_period,
                    schedule_args, schedule_mesh_offset)
                    VALUES (%s, %s, %s, %s, %s, %s, %s)
                    RETURNING schedule_id """

        # TODO sanity check arguments? make sure test exists etc
        try:
            params = (settings["test"], settings["frequency"],
                      settings["start"], settings["end"], settings["period"],
                      settings["args"], settings["mesh_offset"])
        except KeyError:
            return None

        self.dblock.acquire()
        if self.db.executequery(query, params) == -1:
            log("Error while scheduling new test")
            self.dblock.release()
            return None

        schedule_id = self.db.cursor.fetchone()['schedule_id']
        self.db.closecursor()
        self.dblock.release()

        # add the initial set of endpoints for this test
        self.add_endpoints_to_test(schedule_id, settings["source"],
                                   settings["destination"])

        return schedule_id
Exemple #7
0
    def delete_endpoints(self, schedule_id, src, dst):
        query = """ DELETE FROM endpoint WHERE endpoint_schedule_id=%s """

        if self._is_mesh(src):
            # TODO set mesh_is_src=false if the mesh is no longer the source
            # of any tests
            query += " AND endpoint_source_mesh=%s"
        elif self._is_site(src):
            query += " AND endpoint_source_site=%s"
        else:
            print("source is neither mesh nor site")
            return None

        if self._is_mesh(dst):
            query += " AND endpoint_destination_mesh=%s"
        elif self._is_site(dst):
            query += " AND endpoint_destination_site=%s"
        else:
            print("destination is neither mesh nor site")
            return None

        params = (schedule_id, src, dst)
        self.dblock.acquire()
        if self.db.executequery(query, params) == -1:
            log("Error while deleting endpoints")
            self.dblock.release()
            return None
        count = self.db.cursor.rowcount
        self.db.closecursor()
        if count > 0:
            self._update_last_modified_schedule(schedule_id)
        self.dblock.release()
        return count > 0
Exemple #8
0
    def get_user(self, username):
        query = """ SELECT username, longname, email, roles, enabled, password
                    FROM users WHERE username = %s """
        params = (username, )

        self.dblock.acquire()
        if self.db.executequery(query, params) == -1:
            log("Error while fetching users")
            self.dblock.release()
            return None

        row = self.db.cursor.fetchone()

        self.db.closecursor()
        self.dblock.release()

        if row is None:
            return False

        return {
            "username": row[0],
            "longname": row[1],
            "email": row[2],
            "roles": row[3] if row[3] is not None else [],
            "enabled": row[4],
            "password": row[5],
        }
Exemple #9
0
    def create_event_filter(self, username, filtername, filterstring):
        """
        Inserts a new event filter into the filter table.

        Parameters:
          username -- the user who the new filter belongs to.
          filtername -- the name to be associated with this new filter.
          filterstring -- a string containing stringified JSON that describes
                          the filter options.

        Returns:
          the tuple (username, filtername) if the new filter is successfully
          inserted into the filter table, or None if the insertion fails.
        """
        if self.disabled:
            return None

        query = """INSERT INTO eventing.userfilters (user_id, filter_name, filter) VALUES (%s, %s, %s) """
        params = (username, filtername, filterstring)

        self.dblock.acquire()
        if self.db.executequery(query, params) == -1:
            log("Error while inserting new event filter")
            self.dblock.release()
            return None
        self.dblock.release()
        return username, filtername
Exemple #10
0
    def getASNsByName(self, pagesize=30, offset=0, term=""):
        # sanitize the term so we don't get sql-injected
        query = """SELECT count(*) FROM asmap WHERE CAST(asn AS TEXT) ILIKE
                %s OR asname ILIKE %s"""
        params = ("%" + term + "%", "%" + term + "%")

        self.dblock.acquire()
        if self.db.executequery(query, params) == -1:
            self.dblock.release()
            log("Error while counting ASNs in the database")
            return (0, {})
        ascount = self.db.cursor.fetchone()[0]
        self.db.closecursor()
        self.dblock.release()

        query = """SELECT * FROM asmap WHERE CAST(asn AS TEXT) ILIKE
                %s OR asname ILIKE %s ORDER BY asn LIMIT %s OFFSET %s"""
        params = ("%" + term + "%", "%" + term + "%", pagesize, offset)

        self.dblock.acquire()
        if self.db.executequery(query, params) == -1:
            self.dblock.release()
            log("Error while querying for all AS names")
            return (0, {})

        allasns = []
        for row in self.db.cursor:
            asstring = "AS%s %s" % (row[0], row[1])
            allasns.append({'id': str(row[0]), 'text': asstring})

            if len(allasns) > pagesize:
                break
        self.db.closecursor()
        self.dblock.release()
        return ascount, allasns
Exemple #11
0
    def __init__(self, rrds, nntsc_conf, routekey, exchange, queueid):

        self.exporter = None
        self.pubthread = None

        dbconf = get_nntsc_db_config(nntsc_conf)
        if dbconf == {}:
            sys.exit(1)

        self.db = DBInsert(dbconf["name"],
                           dbconf["user"],
                           dbconf["pass"],
                           dbconf["host"],
                           cachetime=dbconf['cachetime'])
        self.db.connect_db(15)

        self.influxconf = get_influx_config(nntsc_conf)
        if self.influxconf == {}:
            sys.exit(1)

        if self.influxconf["useinflux"]:
            self.influxdb = InfluxInsertor(self.influxconf["name"],
                                           self.influxconf["user"],
                                           self.influxconf["pass"],
                                           self.influxconf["host"],
                                           self.influxconf["port"])
        else:
            self.influxdb = None

        self.smokeparser = RRDSmokepingParser(self.db, self.influxdb)
        self.smokepings = {}
        self.rrds = {}
        for r in rrds:
            if r['modsubtype'] == 'smokeping':
                lastts = self.smokeparser.get_last_timestamp(r['stream_id'])
                r['lasttimestamp'] = lastts
                self.smokepings[r['stream_id']] = r
            else:
                continue

            r['lastcommit'] = r['lasttimestamp']
            filename = str(r['filename'])
            if filename in self.rrds:
                self.rrds[filename].append(r)
            else:
                self.rrds[filename] = [r]

        liveconf = get_nntsc_config_bool(nntsc_conf, "liveexport", "enabled")
        if liveconf == "NNTSCConfigError":
            logger.log("Bad 'enabled' option for liveexport -- disabling")
            liveconf = False

        if liveconf == "NNTSCConfigMissing":
            liveconf = True

        if liveconf:
            self.exporter, self.pubthread = \
                    initExportPublisher(nntsc_conf, routekey, exchange, queueid)

            self.smokeparser.add_exporter(self.exporter)
Exemple #12
0
    def get_legend_label(self, description):
        """
        Converts a group description string into an appropriate label for
        placing on a graph legend.
        """
        groupparams = self.parse_group_description(description)
        if groupparams is None:
            log("Failed to parse group description to generate legend label")
            return None

        flags = ""
        if groupparams["flags"][0] == "T":
            flags += "+recurse "
        if groupparams["flags"][1] == "T":
            flags += "+dnssec "
        if groupparams["flags"][2] == "T":
            flags += "+nsid "

        if groupparams['aggregation'] == "FULL":
            agg = "combined instances"
        elif groupparams['aggregation'] == "FAMILY":
            agg = "IPv4/IPv6"
        elif groupparams['aggregation'] == "IPV4":
            agg = "IPv4"
        elif groupparams['aggregation'] == "IPV6":
            agg = "IPv6"
        else:
            agg = ""

        label = "%s to %s DNS, %s %s %s %s %s" % ( \
                groupparams['source'], groupparams['destination'],
                groupparams['query'], groupparams['query_class'],
                groupparams['query_type'], groupparams['udp_payload_size'],
                flags)
        return label, agg
Exemple #13
0
    def _generate_family_label(self, baselabel, search, family, lookup):
        key = baselabel + "_" + family
        shortlabel = family

        if lookup:
            streams = self.streammanager.find_streams(search)
            if streams is None:
                log("Failed to find streams for label %s, %s" % \
                        (key, self.collection_name))
                return None

            famstreams = []
            for sid, store in streams:
                if 'address' not in store:
                    continue
                if family.lower() == self._address_to_family(store['address']):
                    famstreams.append(sid)
        else:
            famstreams = []

        return {
            'labelstring': key,
            'streams': famstreams,
            'shortlabel': shortlabel
        }
Exemple #14
0
 def _start_consume(self):
     if self._prefetch != 0:
         self._channel.basic_qos(prefetch_count=self._prefetch)
     self._channel.add_on_cancel_callback(self._pikaCancelled)
     logger.log("Started consuming from %s" % (self._queuename))
     self._consumer_tag = self._channel.basic_consume(
         self.callback, self._queuename, self.noack)
Exemple #15
0
    def add_mesh_member(self, meshname, ampname):
        if self._is_mesh(ampname):
            return
        elif not self._is_site(ampname):
            # assume the destination is a site that was entered with the
            # text input box, and create it if it doesn't exist
            if self._add_basic_site(ampname) is None:
                return

        query = """ INSERT INTO member (member_meshname, member_ampname)
                    VALUES (%s, %s) """
        params = (meshname, ampname)

        self.dblock.acquire()
        if self.db.executequery(query, params) == -1:
            log("Error while adding mesh member")
            self.dblock.release()
            return None
        self.db.closecursor()

        # update the site so it will fetch tests belonging to the new mesh
        self._update_last_modified_site(ampname)

        # update all sites that test to this mesh to include this target
        for schedule in self.get_destination_schedule(meshname, lock=False):
            self._update_last_modified_schedule(schedule["id"])

        self.dblock.release()

        return True
Exemple #16
0
    def update_event_filter(self, username, filtername, filterstring, email):
        """
        Replaces the filter string for an existing event filter.

        Parameters:
          username -- the user who the updated filter belongs to.
          filtername -- the name of the filter to be updated.
          filterstring -- a string containing stringified JSON that describes
                          the new filter options.

        Returns:
          the tuple (username, filtername) if the filter is successfully
          updated, or None if the filter doesn't exist or the update fails.
        """
        if self.disabled:
            return None

        query = """ UPDATE eventing.userfilters SET filter = %s, email = %s
                    WHERE user_id=%s AND filter_name=%s """
        params = (filterstring, email, username, filtername)
        self.dblock.acquire()
        if self.db.executequery(query, params) == -1:
            log("Error while updating event filter")
            self.dblock.release()
            return None
        self.dblock.release()
        return username, filtername
Exemple #17
0
    def _sitequery(self, query, params):
        """
        Performs a basic query for sites and returns a list of results.

        Parameters:

          query -- the query to perform, as a parameterised string
          params -- a tuple containing parameters to substitute into the query

        Returns:
          a list of results returned by the query
        """
        sites = []

        self.dblock.acquire()
        if self.db.executequery(query, params) == -1:
            log("Error while querying for sites")
            self.dblock.release()
            return None

        for row in self.db.cursor.fetchall():
            sites.append({
                'ampname': row[0],
                'longname': row[1] if row[1] else row[0],
                'location': row[2] if row[2] else None,
                'description': row[3] if row[3] else None,
            })

        self.db.closecursor()
        self.dblock.release()
        return sites
Exemple #18
0
    def delete_event_filter(self, username, filtername=None):
        """
        Removes an existing event filter from the filter table.

        Parameters:
          username -- the user who owns the filter to be removed.
          filtername -- the name of the filter to be removed

        Returns:
          the tuple (username, filtername) if the filter is successfully
          removed from the filter table, or None if the removal fails.
        """
        if self.disabled:
            return username, filtername

        query = "DELETE FROM eventing.userfilters WHERE user_id=%s"
        params = [username]
        if filtername is not None:
            query += " AND filter_name=%s"
            params.append(filtername)
        self.dblock.acquire()
        if self.db.executequery(query, tuple(params)) == -1:
            log("Error while removing event filter")
            self.dblock.release()
            return None
        self.dblock.release()
        return username, filtername
Exemple #19
0
    def update_test(self, schedule_id, settings):
        changes = []
        params = []

        for option in self.SCHEDULE_OPTIONS:
            if option in settings:
                changes.append("schedule_%s=%%s" % option)
                params.append(settings[option])

        # no valid options were set, do nothing and report that we did it ok
        if len(changes) < 1:
            return True

        query = "UPDATE schedule SET " + ",".join(
            changes) + " WHERE schedule_id=%s"
        params.append(schedule_id)

        self.dblock.acquire()
        if self.db.executequery(query, tuple(params)) == -1:
            log("Error while updating test")
            self.dblock.release()
            return None
        count = self.db.cursor.rowcount
        self.db.closecursor()
        if count > 0:
            self._update_last_modified_schedule(schedule_id)
        self.dblock.release()
        return count > 0
Exemple #20
0
    def fetch_specific_event(self, stream, eventid):
        """
        Fetches a specific event in the database, given the stream ID and the
        event ID.

        Parameters:
          stream -- the stream that the requested event belongs to
          eventid -- the ID number of the requested event

        Returns:
          a dictionary describing the event in question, or None if an error
          occurs or no such event exists.
        """

        if self.disabled:
            return None

        self.dblock.acquire()

        stable = "eventing.events_str%s" % (stream)
        query = "SELECT * FROM " + stable
        query += " WHERE event_id = %s"
        params = (eventid, )

        if self.db.executequery(query, params) == -1:
            log("Error while querying for a specific event (%s %s)" % \
                    (stream, eventid))
            self.dblock.release()
            return None

        result = self.db.cursor.fetchone()
        self.dblock.release()
        return dict(result)
Exemple #21
0
    def _meshquery(self, query, params, lock=True):
        """
        Performs a basic query for mesh members and returns a list of results.

        Parameters:

          query -- the query to perform, as a parameterised string
          params -- a tuple containing parameters to substitute into the query

        Returns:
          a list of results returned by the query
        """
        sites = []

        if lock:
            self.dblock.acquire()
        if self.db.executequery(query, params) == -1:
            log("Error while querying mesh members")
            self.dblock.release()
            return None

        for row in self.db.cursor.fetchall():
            sites.append(row['ampname'])
        self.db.closecursor()
        if lock:
            self.dblock.release()
        return sites
Exemple #22
0
    def group_to_labels(self, groupid, description, lookup=True):
        """
        Converts a group description string into a set of labels describing
        each of the lines that would need to be drawn on a graph for that group.
        """
        labels = []

        groupparams = self.parse_group_description(description)
        if groupparams is None:
            log("Failed to parse group description to generate legend label")
            return None

        baselabel = 'group_%s' % (groupid)

        for key, value in groupparams.items():
            if key in self.integerproperties:
                groupparams[key] = int(value)

        if lookup:
            streams = self.streammanager.find_streams(groupparams)
            if streams is None:
                log("Failed to find streams for label %s, %s" % \
                        (baselabel, self.collection_name))
                return None
        else:
            streams = []

        labels.append({
            'labelstring': baselabel,
            'streams': streams,
            'shortlabel': '%s' % (groupparams['destination'])
        })

        return labels
Exemple #23
0
    def _generate_family_label(self, baselabel, search, family, lookup):
        key = baselabel + "_" + family
        search['family'] = family.lower()
        if search['direction'] in self.dirlabels:
            shortlabel = family + " " + self.dirlabels[search['direction']]
        else:
            shortlabel = family

        if 'protocol' in search and search['protocol'] == 'http':
            shortlabel += " (as HTTP)"

        if lookup:
            streams = self.streammanager.find_streams(search)
            if streams is None:
                log("Failed to find streams for label %s, %s" % \
                        (key, self.collection_name))
                return None

            famstreams = [item[0] for item in streams]
        else:
            famstreams = []

        return {
            'labelstring': key,
            'streams': famstreams,
            'shortlabel': shortlabel
        }
Exemple #24
0
    def _cachefetch(self, key, errorstr):
        """
        Internal helper function for finding a cache entry.

        Parameters:
          key -- the cache key to search for.
          errorstr -- a string describing what is being fetched for error
                      reporting purposes.

        Returns:
          None if no cache entry is found, otherwise the data stored using
          the given key.

        If an error occurs while searching the cache, a warning will be
        printed and None will be returned.
        """

        result = None
        with self.mcpool.reserve() as mc:
            try:
                if key in mc:
                    result = mc.get(key)
            except pylibmc.SomeErrors as e:
                log("Warning: pylibmc error when searching for %s: %s" %
                    (key, errorstr))
                log(e)

        return result
Exemple #25
0
    def group_to_labels(self, groupid, description, lookup=True):
        labels = []

        groupparams = self.parse_group_description(description)
        if groupparams is None:
            log("Failed to parse group description to generate labels")
            return None

        baselabel = 'group_%s' % (groupid)
        search = self._group_to_search(groupparams)

        if groupparams['aggregation'] in ['IPV4', 'FAMILY']:
            nextlab = self._generate_label(baselabel, search, "IPv4", lookup)
            if nextlab is None:
                return None
            labels.append(nextlab)

        if groupparams['aggregation'] in ['IPV6', 'FAMILY']:
            nextlab = self._generate_label(baselabel, search, "IPv6", lookup)
            if nextlab is None:
                return None
            labels.append(nextlab)

        if groupparams['aggregation'] == "FULL":
            nextlab = self._generate_label(baselabel, search, None, lookup)
            if nextlab is None:
                return None
            labels.append(nextlab)

        return sorted(labels, key=itemgetter('shortlabel'))
Exemple #26
0
    def insert_aspath(self, stream, ts, result):
        filtered = {}
        for col in self.asdatacolumns:
            if col["name"] in result:
                filtered[col["name"]] = result[col["name"]]
            else:
                filtered[col["name"]] = None

        try:
            self.db.insert_data(self.asdatatable, "amp-astraceroute", stream,
                                ts, filtered, {'aspath': 'varchar[]'})
        except DBQueryException as e:
            logger.log("Failed to insert new data for %s stream %d" % \
                    ("amp-astraceroute", stream))
            logger.log("Error was: %s" % (str(e)))
            raise

        colid = self._get_astraceroute_collection_id()

        if self.exporter != None and colid > 0:
            filtered['aspath'] = result['aspath']
            filtered['aspath_length'] = result['aspathlen']
            filtered['uniqueas'] = result["uniqueas"]
            filtered['responses'] = result["responses"]
            self.exporter.publishLiveData(colid, stream, ts, filtered)
Exemple #27
0
    def create_properties_from_list(self, options, proplist):
        """
        Converts an ordered list of group properties into a dictionary
        with the property names as keys.

        Parameters:
          options -- the list of properties describing the group. The
                     properties MUST be in the same order as they are
                     listed in proplist.
          proplist -- the list of group properties, in order.

        Returns:
          a dictionary describing the group or None if no dictionary
          can be formed using the provided property list
        """
        if proplist is None:
            # Child collection hasn't provided any group property list!
            return None

        if len(options) > len(proplist):
            log("Cannot convert list of properties for %s -- too many properties" % (self.collection_name))
            return None

        props = {}
        for i in range(0, len(options)):
            sp = proplist[i]
            if sp in self.integerproperties:
                props[sp] = int(options[i])
            else:
                props[sp] = self.convert_property(sp, options[i])

        return props
Exemple #28
0
    def get_legend_label(self, description):
        groupparams = self.parse_group_description(description)
        if groupparams is None:
            log("Failed to parse group description to generate %s legend label"
                % (self.collection_name))
            return None, ""

        if groupparams['family'] == "IPV4":
            family = "IPv4"
        elif groupparams['family'] == "IPV6":
            family = "IPv6"
        elif groupparams['family'] == "FAMILY":
            family = "IPv4/IPv6"
        else:
            family = ""

        if groupparams['direction'] == "BOTH":
            dirstr = ""
        elif groupparams['direction'] == "IN":
            dirstr = " Inward"
        else:
            dirstr = " Outward"

        label = "%s : %s, %s %sB pkts, %s usec apart (DSCP %s)" % (
            groupparams['source'], groupparams['destination'],
            groupparams['packet_count'], groupparams['packet_size'],
            groupparams['packet_spacing'], groupparams['dscp'])
        return label, "%s%s" % (family, dirstr)
Exemple #29
0
    def insert_data(self, stream, ts, result, casts=None):
        filtered = {}
        if casts is None:
            casts = {}

        for col in self.datacolumns:
            if col["name"] in result:
                filtered[col["name"]] = result[col["name"]]
            else:
                filtered[col["name"]] = None

        try:
            if self.influxdb:
                self.influxdb.insert_data(self.datatable,
                                          stream, ts, filtered, casts)
            else:
                self.db.insert_data(self.datatable, self.colname, stream, ts,
                                filtered, casts)
        except DBQueryException as e:
            logger.log("Failed to insert new data for %s stream %d" % \
                    (self.colname, stream))
            logger.log("Error was: %s" % (str(e)))
            raise

        # NOTE colname is actually unused by the exporter, so don't panic
        # that we export a collection id number for streams and a string
        # for live data.
        # TODO get rid of this to avoid confusion

        colid = self._get_collection_id()

        if self.exporter != None and colid > 0:
            self.exporter.publishLiveData(colid, stream, ts, filtered)
Exemple #30
0
    def _get_nntsc_message(self):
        """
        Waits for NNTSC to send a response to a query. Will block until
        a complete message arrives.

        Returns None if an error occurs, otherwise will return a tuple
        representing the NNTSC message. The first element of the tuple
        is the message type and the second element is a dictionary
        containing the message contents.
        """
        if self.client is None:
            return None

        while 1:
            msg = self.client.parse_message()

            if msg[0] == -1:
                received = self.client.receive_message()
                if received <= 0:
                    log("Failed to receive message from NNTSC")
                    self._disconnect()
                    return None
                continue

            return msg