Exemple #1
0
    def get(self, etype, key, limit=None, dt_from=None):
        """
        Return all events where given IP is among Sources.
        
        Arguments:
        etype   entity type (str), must be 'ip'
        key     entity identifier (str), e.g. '192.0.2.42'
        limit   max number of returned events
        dt_from minimal value of DetectTime (datetime)
        
        Return a list of IDEA messages (strings).
        
        Raise BadEntityType if etype is not 'ip'.
        """
        if etype != 'ip':
            raise BadEntityType("etype must be 'ip'")

        cur = self.db.cursor()
        if dt_from is None:
            sql = "SELECT e.idea FROM events_sources as es INNER JOIN events as e ON es.message_id = e.id WHERE es.source_ip = %s ORDER BY es.detecttime DESC LIMIT %s"
            data = (Inet(key), limit)
        elif isinstance(dt_from, datetime.datetime):
            sql = "SELECT e.idea FROM events_sources as es INNER JOIN events as e ON es.message_id = e.id WHERE es.source_ip = %s AND es.detecttime >= %s ORDER BY es.detecttime DESC LIMIT %s"
            data = (Inet(key), dt_from, limit)
        else:
            raise TypeError("dt_from must be datetime instance")
        cur.execute(sql, data)
        self.db.commit(
        )  # Every query automatically opens a transaction, close it.

        result = cur.fetchall()
        result = [row[0] for row in result]

        return result
Exemple #2
0
    def test_inet_conform(self):
        i = Inet("192.168.1.0/24")
        a = psycopg2.extensions.adapt(i)
        a.prepare(self.conn)
        self.assertQuotedEqual(a.getquoted(), b"'192.168.1.0/24'::inet")

        # adapts ok with unicode too
        i = Inet(u"192.168.1.0/24")
        a = psycopg2.extensions.adapt(i)
        a.prepare(self.conn)
        self.assertQuotedEqual(a.getquoted(), b"'192.168.1.0/24'::inet")
Exemple #3
0
    def test_inet_conform(self):
        from psycopg2.extras import Inet
        i = Inet("192.168.1.0/24")
        a = psycopg2.extensions.adapt(i)
        a.prepare(self.conn)
        self.assertEqual(filter_scs(self.conn, b"E'192.168.1.0/24'::inet"),
                         a.getquoted())

        # adapts ok with unicode too
        i = Inet("192.168.1.0/24")
        a = psycopg2.extensions.adapt(i)
        a.prepare(self.conn)
        self.assertEqual(filter_scs(self.conn, b"E'192.168.1.0/24'::inet"),
                         a.getquoted())
    def store_paths(self):
        """
        Iterates over :py:attr:`.sane_paths` and inserts/updates the appropriate prefix/asn/community objects
        into the database.

        :return:
        """
        db = self.db
        session = db.session
        log.info('Saving sane paths to Postgres.')
        total_prefixes = len(self.sane_paths)
        for i, p in enumerate(self.sane_paths):
            if (i % CHUNK_SIZE) == 0:
                log.info('Saved %s out of %s prefixes.', i, total_prefixes)
                session.commit()
            p_dic = dict(p)
            if p.source_asn in self._cache['asn_in_db']:
                asn = self._cache['asn_in_db'][p.source_asn]
            else:
                asn = ASN.query.filter_by(asn=p.source_asn).first()
                if not asn:
                    asn = ASN(asn=p.source_asn,
                              as_name=self.get_as_name(p.source_asn))
                    session.add(asn)
                self._cache['asn_in_db'][p.source_asn] = asn

            communities = list(p_dic['communities'])

            del p_dic['family']
            del p_dic['source_asn']
            del p_dic['first_hop']
            del p_dic['communities']

            for x, nh in enumerate(p_dic['next_hops']):
                p_dic['next_hops'][x] = Inet(nh)

            pfx = Prefix.query.filter_by(
                source_asn=asn, prefix=p_dic['prefix']).first()  # type: Prefix
            if not pfx:
                pfx = Prefix(**p_dic,
                             source_asn=asn,
                             last_seen=datetime.utcnow())
            for k, v in p_dic.items():
                setattr(pfx, k, v)
            pfx.last_seen = datetime.utcnow()

            for c in communities:
                if c in self._cache['community_in_db']:
                    comm = self._cache['community_in_db'][c]
                else:
                    comm = Community.query.filter_by(id=c).first()
                    if not comm: comm = Community(id=c)
                pfx.communities.append(comm)

            session.add(pfx)

        session.commit()
        log.info('Finished saving paths.')
Exemple #5
0
    def update_slave(self, slave):
        slave_nickname = slave.nickname
        slave_ip = slave.ip
        slave_password = md5(slave.password.encode()).hexdigest()
        slave_owner = slave.owner

        try:
            query = "UPDATE slaves SET (slave_ip, slave_owner, slave_password) = (%s, %s, %s) " \
                    "WHERE slave_nickname = %s;"
            self._c.execute(
                query,
                [Inet(slave_ip), slave_owner, slave_password, slave_nickname])
            self._conn.commit()
        # except IntegrityError:
        #     self._conn.rollback()
        #     raise ValueError("Slave already exists")
        except DataError as e:
            self._conn.rollback()
            if len(slave_nickname) > 50:
                raise ValueError(self._rm.get_string("slave_name_too_long"))
            else:
                raise e
Exemple #6
0
 def add_slave(self, slave):
     slave_nickname = slave.nickname
     slave_ip = slave.ip
     slave_password = md5(slave.password.encode()).hexdigest()
     slave_owner = slave.owner
     try:
         query = "INSERT INTO slaves (slave_nickname, slave_ip, slave_owner, slave_password) " \
                 "VALUES (%s, %s, %s, %s);"
         self._c.execute(
             query,
             [slave_nickname,
              Inet(slave_ip), slave_owner, slave_password])
         self._conn.commit()
     except IntegrityError:
         self._conn.rollback()
         raise ValueError("Slave already exists")
     except DataError as e:
         self._conn.rollback()
         if len(slave_nickname) > 50:
             raise ValueError(self._rm.get_string("slave_name_too_long"))
         else:
             raise e
Exemple #7
0
 def adapt_ipaddressfield_value(self, value):
     if value:
         return Inet(value)
     return None
Exemple #8
0
    def put(self, ideas):
        """
        Store IDEA messages into the database.
        
        Arguments:
        ideas    list of IDEA message parsed into Python-native structures
        """
        def idea2sqlvalues(idea):
            # Auxiliary function parsing IDEA message to VALUES part of SQL query
            # Get ID
            try:
                id = idea['ID']
            except KeyError:
                self.log.error("ID field not found, skipping...")
                return None

            # Get all source and target IPv4 addresses in the IDEA message
            sources = [
                ip for src in idea.get('Source', [])
                for ip in src.get('IP4', [])
            ]
            targets = [
                ip for tgt in idea.get('Target', [])
                for ip in tgt.get('IP4', [])
            ]

            # Parse timestamps
            try:
                detecttime = parse_rfc_time(idea['DetectTime'])
            except KeyError:
                self.log.error(
                    "Message ID {}: DetectTime not found, skipping...".format(
                        idea.get('ID', 'N/A')))
                return None
            except ValueError:
                self.log.error(
                    "Message ID {}: Unknown format of DetectTime, skipping ..."
                    .format(idea.get('ID', 'N/A')))
                return None

            # If EventTime is not present, try WinStartTime instead (similiary for CeaseTime and WinEndTime)
            starttime = idea.get('EventTime', idea.get('WinStartTime', None))
            if starttime:
                starttime = parse_rfc_time(starttime)
            endtime = idea.get('CeaseTime', idea.get('WinEndTime', None))
            if endtime:
                endtime = parse_rfc_time(endtime)

            self.log.debug("New event: %s, %s, %s, %s, %s, %s\n%s", id,
                           sources, targets, detecttime, starttime, endtime,
                           idea)
            #print("New event: %s, %s, %s, %s, %s, %s\n%s" % (id, sources, targets, detecttime, starttime, endtime, idea))

            return (id, sources, targets, detecttime, starttime, endtime, idea)

        # Handle \u0000 characters in Attach.Content field.
        # The \u0000 char can't be stored in PSQL - encode the attachment into base64
        for idea in ideas:
            for attachment in idea.get('Attach', []):
                # TEMPORARY/FIXME:
                # one detector sends 'data' instead of 'Content', fix it:
                if 'data' in attachment and not 'Content' in attachment:
                    attachment['Content'] = attachment['data']
                    del attachment['data']

                if 'Content' in attachment and 'ContentEncoding' not in attachment and '\u0000' in attachment[
                        'Content']:
                    self.log.info(
                        "Attachment of IDEA message {} contains '\\u0000' char - converting attachment to base64."
                        .format(idea.get('ID', '???')))
                    # encode to bytes, then to b64 and back to str
                    attachment['Content'] = base64.b64encode(
                        str(attachment['Content']).encode('utf-8')).decode(
                            'ascii')
                    attachment['ContentEncoding'] = 'base64'

#         values = []
#         for idea in ideas:
#             val = idea2values(idea)
#             if val is not None:
#                 values.append(val)
# This is equivalent to the above, but should be more efficient
        values = list(filter(None, map(idea2sqlvalues, ideas)))

        # Get aggregated lists of values to write to events, events_srouces and events_targets tables
        vals_events = [
            (id, None, None, detecttime, starttime, endtime, Json(idea))
            for (id, _, _, detecttime, starttime, endtime, idea) in values
        ]
        vals_sources = [(Inet(source), id, detecttime)
                        for (id, sources, _, detecttime, _, _, _) in values
                        for source in sources]
        vals_targets = [(Inet(target), id, detecttime)
                        for (id, _, targets, detecttime, _, _, _) in values
                        for target in targets]

        # Try to store all the records to the database
        cur = self.db.cursor()
        try:
            # execute_values should be much faster than many individual inserts
            execute_values(
                cur,
                "INSERT INTO events (id, sources, targets, detecttime, starttime, endtime, idea) VALUES %s",
                vals_events, "(%s, %s, %s, %s, %s, %s, %s)", 100)
            execute_values(
                cur,
                "INSERT INTO events_sources (source_ip, message_id, detecttime) VALUES %s",
                vals_sources, "(%s, %s, %s)", 100)
            execute_values(
                cur,
                "INSERT INTO events_targets (target_ip, message_id, detecttime) VALUES %s",
                vals_targets, "(%s, %s, %s)", 100)
            self.db.commit()
        except Exception as e:
            self.log.error(str(e))
            if len(values) == 1:
                self.db.rollback()
                return
            # If there was more than one message in the batch, try it again, one-by-one
            self.log.error(
                "There was an error during inserting a batch of {} IDEA messages, performing rollback of the transaction and trying to put the messages one-by-one (expect repetition of the error message) ..."
                .format(len(values)))
            # Rollback all non-committed changes
            self.db.rollback()

            # Try it again, one by one
            # (otherwise we could throw away whole bunch of messages because of a single bad one)
            cnt_success = 0
            for id, sources, targets, detecttime, starttime, endtime, idea in values:
                cur = self.db.cursor()
                try:
                    cur.execute(
                        """
                        INSERT INTO events 
                        (id, sources, targets, detecttime, starttime, endtime, idea)
                        VALUES (%s, %s, %s, %s, %s, %s, %s)
                        """,
                        #(id, list(map(Inet,sources)), list(map(Inet,targets)), detecttime, starttime, endtime, Json(idea))
                        (id, None, None, detecttime, starttime, endtime,
                         Json(idea)))
                    for source in sources:
                        cur.execute(
                            "INSERT INTO events_sources (source_ip, message_id, detecttime) VALUES (%s, %s, %s)",
                            (Inet(source), id, detecttime))
                    for target in targets:
                        cur.execute(
                            "INSERT INTO events_targets (target_ip, message_id, detecttime) VALUES (%s, %s, %s)",
                            (Inet(target), id, detecttime))
                    self.db.commit(
                    )  # Close transaction (the first command opens it automatically)
                    cnt_success += 1
                except Exception as e:
                    self.log.error(str(e))
                    if str(e).startswith("duplicate"):
                        self.log.error("IDEA: " + str(idea))
                    self.db.rollback()  # Rollback all non-committed changes
            self.log.error(
                "{} messages successfully inserted.".format(cnt_success))
Exemple #9
0
 def value_to_db_ipaddress(self, value):
     if value:
         return Inet(value)
     return None
Exemple #10
0
    async def _store_path(self, p: SanePath) -> Prefix:
        p_dic = dict(p)
        # Obtain AS name via in-memory cache, database cache, or DNS lookup
        await self.get_as_name(p.source_asn)
        asn_id = p.source_asn
        communities = list(p_dic['communities'])
        del p_dic['family']
        del p_dic['source_asn']
        del p_dic['first_hop']
        del p_dic['communities']
        for x, nh in enumerate(p_dic['next_hops']):
            p_dic['next_hops'][x] = Inet(nh)

        async with self.pg_pool.acquire() as conn:
            pfx = await conn.fetchrow(
                "SELECT * FROM prefix WHERE asn_id = $1 AND prefix.prefix = $2 LIMIT 1;",
                asn_id, p_dic['prefix'])  # type: Record

            # pfx = Prefix.query.filter_by(source_asn=asn_id, prefix=p_dic['prefix']).first()  # type: Prefix
            # new_pfx = dict(**p_dic, asn_id=asn_id, last_seen=datetime.utcnow())
            age = convert_datetime(p_dic.get('age'),
                                   fail_empty=False,
                                   if_empty=None)
            if age is not None:
                age = age.replace(tzinfo=None)
            if not pfx:
                await conn.execute(
                    "INSERT INTO prefix ("
                    "  asn_id, asn_path, prefix, next_hops, neighbor, ixp, last_seen, "
                    "  age, created_at, updated_at"
                    ")"
                    "VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10);",
                    asn_id, p_dic.get('asn_path', []), p_dic['prefix'],
                    p_dic.get('next_hops', []), p_dic.get('neighbor'),
                    p_dic.get('ixp'), datetime.utcnow(), age,
                    datetime.utcnow(), datetime.utcnow())

                pfx = await conn.fetchrow(
                    "SELECT * FROM prefix WHERE asn_id = $1 AND prefix.prefix = $2 LIMIT 1;",
                    asn_id, p_dic['prefix'])
            else:
                await conn.execute(
                    "UPDATE prefix SET asn_path = $1, next_hops = $2, neighbor = $3, ixp = $4, last_seen = $5, "
                    "age = $6, updated_at = $7 WHERE prefix = $8 AND asn_id = $9;",
                    p_dic.get('asn_path', []), p_dic.get('next_hops', []),
                    p_dic.get('neighbor'), p_dic.get('ixp'), datetime.utcnow(),
                    age, datetime.utcnow(), p_dic['prefix'], asn_id)
                # for k, v in p_dic.items():
                #     setattr(pfx, k, v)

            for c in communities:  # type: LargeCommunity
                if c not in self._cache['community_in_db']:
                    try:
                        await conn.execute(
                            "insert into community (id, created_at, updated_at) values ($1, $2, $2);",
                            c, datetime.utcnow())
                    except asyncpg.UniqueViolationError:
                        pass

                try:
                    await conn.execute(
                        "insert into prefix_communities (prefix_id, community_id) values ($1, $2);",
                        pfx['id'], c)
                except asyncpg.UniqueViolationError:
                    pass
                # pfx.communities.append(comm)
        return pfx