Esempio n. 1
0
    def process(self):
        event = self.receive_message()

        data = event.to_json(hierarchical=self.parameters.hierarchical_output)
        try:
            while True:
                if self.separator:
                    self.con.sendall(utils.encode(data) + self.separator)
                else:
                    d = utils.encode(data)
                    msg = struct.pack('>I', len(d)) + d
                    self.con.sendall(msg)
                if self.to_intelmq:
                    response = self.con.recv(2)
                    if response == b"Ok":
                        break
                    self.logger.warn("Message not delivered, retrying.")
                    time.sleep(1)
                else:
                    break
        except socket.error as e:
            self.logger.exception("Reconnecting, %s", e)
            self.con.close()
            self.connect()
        except AttributeError:
            self.logger.info('Reconnecting.')
            self.connect()
        else:
            self.acknowledge_message()
Esempio n. 2
0
 def send(self, message):
     """Sends a message to the destination queues"""
     for destination_queue in self.destination_queues:
         if destination_queue in self.state:
             self.state[destination_queue].append(utils.encode(message))
         else:
             self.state[destination_queue] = [utils.encode(message)]
Esempio n. 3
0
    def process(self):
        event = self.receive_message()

        data = event.to_json(hierarchical=self.hierarchical_output)
        try:
            while True:
                if self.separator:
                    self.con.sendall(utils.encode(data) + self.separator)
                else:
                    d = utils.encode(data)
                    msg = struct.pack('>I', len(d)) + d
                    self.con.sendall(msg)
                if self.to_intelmq:
                    response = self.con.recv(2)
                    if response == b"Ok":
                        break
                    self.logger.warning("Message not delivered, retrying.")
                    time.sleep(1)
                else:
                    break
        except socket.error as e:
            self.logger.exception("Reconnecting, %s", e)
            self.con.close()
            self.connect()
        except AttributeError:
            self.logger.info('Reconnecting.')
            self.connect()
        else:
            self.acknowledge_message()
Esempio n. 4
0
    def hash(self, *, filter_keys=frozenset(), filter_type="blacklist"):
        """Return a SHA256 hash of the message as a hexadecimal string.
        The hash is computed over almost all key/value pairs. Depending on
        filter_type parameter (blacklist or whitelist), the keys defined in
        filter_keys_list parameter will be considered as the keys to ignore
        or the only ones to consider. If given, the filter_keys_list
        parameter should be a set.

        'time.observation' will always be ignored.
        """

        if filter_type not in ["whitelist", "blacklist"]:

            raise exceptions.InvalidArgument('filter_type',
                                             got=filter_type,
                                             expected=['whitelist', 'blacklist'])

        event_hash = hashlib.sha256()

        for key, value in sorted(self.items()):
            if "time.observation" == key:
                continue

            if filter_type == "whitelist" and key not in filter_keys:
                continue

            if filter_type == "blacklist" and key in filter_keys:
                continue

            event_hash.update(utils.encode(key))
            event_hash.update(b"\xc0")
            event_hash.update(utils.encode(repr(value)))
            event_hash.update(b"\xc0")

        return event_hash.hexdigest()
Esempio n. 5
0
    def hash(self, *, filter_keys=frozenset(), filter_type="blacklist"):
        """Return a SHA256 hash of the message as a hexadecimal string.
        The hash is computed over almost all key/value pairs. Depending on
        filter_type parameter (blacklist or whitelist), the keys defined in
        filter_keys_list parameter will be considered as the keys to ignore
        or the only ones to consider. If given, the filter_keys_list
        parameter should be a set.

        'time.observation' will always be ignored.
        """

        if filter_type not in ["whitelist", "blacklist"]:

            raise exceptions.InvalidArgument('filter_type',
                                             got=filter_type,
                                             expected=['whitelist', 'blacklist'])

        event_hash = hashlib.sha256()

        for key, value in sorted(self.items()):
            if "time.observation" == key:
                continue

            if filter_type == "whitelist" and key not in filter_keys:
                continue

            if filter_type == "blacklist" and key in filter_keys:
                continue

            event_hash.update(utils.encode(key))
            event_hash.update(b"\xc0")
            event_hash.update(utils.encode(repr(value)))
            event_hash.update(b"\xc0")

        return event_hash.hexdigest()
Esempio n. 6
0
 def send(self, message):
     """Sends a message to the destination queues"""
     for destination_queue in self.destination_queues:
         if destination_queue in self.state:
             self.state[destination_queue].append(utils.encode(message))
         else:
             self.state[destination_queue] = [utils.encode(message)]
Esempio n. 7
0
    def send(self, message, path="_default", path_permissive=False):
        """Sends a message to the destination queues"""
        if path not in self.destination_queues and path_permissive:
            return

        for destination_queue in self.destination_queues[path]:
            if destination_queue in self.state:
                self.state[destination_queue].append(utils.encode(message))
            else:
                self.state[destination_queue] = [utils.encode(message)]
Esempio n. 8
0
    def send(self, message, path="_default", path_permissive=False):
        """Sends a message to the destination queues"""
        if path not in self.destination_queues and path_permissive:
            return

        for destination_queue in self.destination_queues[path]:
            if destination_queue in self.state:
                self.state[destination_queue].append(utils.encode(message))
            else:
                self.state[destination_queue] = [utils.encode(message)]
Esempio n. 9
0
    def __hash__(self):
        event_hash = hashlib.sha256()

        for key, value in sorted(self.items()):
            if "time.observation" == key:
                continue

            event_hash.update(utils.encode(key))
            event_hash.update(b"\xc0")
            event_hash.update(utils.encode(repr(value)))
            event_hash.update(b"\xc0")

        return int(event_hash.hexdigest(), 16)
Esempio n. 10
0
    def __hash__(self):
        event_hash = hashlib.sha256()

        for key, value in sorted(self.items()):
            if "time.observation" == key:
                continue

            event_hash.update(utils.encode(key))
            event_hash.update(b"\xc0")
            event_hash.update(utils.encode(repr(value)))
            event_hash.update(b"\xc0")

        return int(event_hash.hexdigest(), 16)
Esempio n. 11
0
    def send(self, message, path="_default", path_permissive=False):
        if path not in self.destination_queues and path_permissive:
            return

        message = utils.encode(message)

        try:
            queues = self.destination_queues[path]
        except KeyError as exc:
            raise exceptions.PipelineError(exc)
        if self.load_balance:
            queues = [queues[self.load_balance_iterator]]
            self.load_balance_iterator += 1
            if self.load_balance_iterator == len(self.destination_queues[path]):
                self.load_balance_iterator = 0

        for destination_queue in queues:
            try:
                self.pipe.lpush(destination_queue, message)
            except Exception as exc:
                if 'Cannot assign requested address' in exc.args[0] or \
                        "OOM command not allowed when used memory > 'maxmemory'." in exc.args[0]:
                    raise MemoryError(exc.args[0])
                elif 'Redis is configured to save RDB snapshots, but is currently not able to persist on disk' in exc.args[0]:
                    raise IOError(28, 'No space left on device or in memory. Redis can\'t save its snapshots. '
                                      'Look at redis\'s logs.')
                raise exceptions.PipelineError(exc)
Esempio n. 12
0
    def init(self):
        self.to_intelmq = self.counterpart_is_intelmq

        self.address = (self.ip, int(self.port))
        self.separator = utils.encode(
            self.separator) if self.separator is not None else None
        self.connect()
Esempio n. 13
0
    def process(self):
        report = self.receive_message()

        if report:
            report = encode(report)
            
            columns = ["source_time", "source_url", "source_ip", "source_reverse_dns", "malware", "__IGNORE__", "source_asn"]

            for row in unicodecsv.reader(StringIO(report), encoding='utf-8'):
                event = Event()

                for key, value in zip(columns, row):
                    
                    if key is "__IGNORE__":
                        continue
                    
                    if key is "source_time":
                        value = value.replace('_',' ')
                        value += " UTC"
                        
                    if key is "malware":
                        value = value.lower()
                        
                    event.add(key, value)
                    
                event.add('feed', 'malwaredomainslist')
                event.add('feed_url', 'http://www.malwaredomainlist.com/updatescsv.php')
                event.add('type', 'malware')    # FIXME
                
                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                    
                self.send_message(event)
        self.acknowledge_message()
Esempio n. 14
0
    def send(self, message, path="_default", path_permissive=False):
        if path not in self.destination_queues and path_permissive:
            return

        message = utils.encode(message)

        try:
            queues = self.destination_queues[path]
        except KeyError as exc:
            raise exceptions.PipelineError(exc)
        if self.load_balance:
            queues = [queues[self.load_balance_iterator]]
            self.load_balance_iterator += 1
            if self.load_balance_iterator == len(
                    self.destination_queues[path]):
                self.load_balance_iterator = 0

        for destination_queue in queues:
            try:
                self.pipe.lpush(destination_queue, message)
            except Exception as exc:
                if 'Cannot assign requested address' in exc.args[0] or \
                        "OOM command not allowed when used memory > 'maxmemory'." in exc.args[0]:
                    raise MemoryError(exc.args[0])
                elif 'Redis is configured to save RDB snapshots, but is currently not able to persist on disk' in exc.args[
                        0]:
                    raise IOError(
                        28,
                        'No space left on device or in memory. Redis can\'t save its snapshots. '
                        'Look at redis\'s logs.')
                raise exceptions.PipelineError(exc)
Esempio n. 15
0
    def send(self, message):
        message = utils.encode(message)
        if self.load_balance:
            destination_queue = self.destination_queues[
                self.load_balance_iterator]

            try:
                self.pipe.lpush(destination_queue, message)
            except Exception as exc:
                if 'Cannot assign requested address' in exc.args[0]:
                    raise MemoryError
                elif 'Redis is configured to save RDB snapshots, but is currently not able to persist on disk' in exc.args[0]:
                    raise IOError(28, 'No space left on device. Redis can\'t save its snapshots.')
                raise exceptions.PipelineError(exc)

            self.load_balance_iterator += 1
            if self.load_balance_iterator == len(self.destination_queues):
                self.load_balance_iterator = 0

        else:
            for destination_queue in self.destination_queues:
                try:
                    self.pipe.lpush(destination_queue, message)
                except Exception as exc:
                    raise exceptions.PipelineError(exc)
Esempio n. 16
0
    def process(self):
        report = self.receive_message()
        
        if report:
            event = Event()
            report = encode(report)

            # colums according to https://www.phishtank.com/developer_info.php as of 2015/04/30:
            #   phish_id,url,phish_detail_url,submission_time,verified,verification_time,online,target
            # example:
            # 123456,http://www.example.com/,http://www.phishtank.com/phish_detail.php?phish_id=123456,2009-06-19T15:15:47+00:00,yes,2009-06-19T15:37:31+00:00,yes,1st National Example Bank
            columns = ["__IGNORE__", "source_url", "description_url", "source_time", "__IGNORE__", "__IGNORE__", "__IGNORE__", "target"]
            
            for row in unicodecsv.reader(StringIO(report), encoding='utf-8'):

                if "phish_id" in row:
                    continue		# skip header
                
                for key, value in zip(columns, row):

                    if key == "__IGNORE__":
                        continue
                    
                    event.add(key, value.strip())
                
                event.add('feed', 'phishtank')
                event.add('type', 'phishing')

                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                    
                self.send_message(event)
             
        self.acknowledge_message()
Esempio n. 17
0
    def send(self,
             message: str,
             path="_default",
             path_permissive=False) -> None:
        """
        In principle we could use AMQP's exchanges here but that architecture is incompatible
        to the format of our pipeline.conf file.
        """
        if path not in self.destination_queues and path_permissive:
            return

        message = utils.encode(message)
        try:
            queues = self.destination_queues[path]
        except KeyError as exc:
            raise exceptions.PipelineError(exc)
        if self.load_balance:
            queues = [queues[self.load_balance_iterator]]
            self.load_balance_iterator += 1
            if self.load_balance_iterator == len(
                    self.destination_queues[path]):
                self.load_balance_iterator = 0

        for destination_queue in queues:
            self._send(destination_queue, message)
Esempio n. 18
0
    def send(self, message):
        message = utils.encode(message)
        if self.load_balance:
            destination_queue = self.destination_queues[
                self.load_balance_iterator]

            try:
                self.pipe.lpush(destination_queue, message)
            except Exception as exc:
                if 'Cannot assign requested address' in exc.args[0]:
                    raise MemoryError
                elif 'Redis is configured to save RDB snapshots, but is currently not able to persist on disk' in exc.args[0]:
                    raise IOError(28, 'No space left on device. Redis can\'t save its snapshots.')
                raise exceptions.PipelineError(exc)

            self.load_balance_iterator += 1
            if self.load_balance_iterator == len(self.destination_queues):
                self.load_balance_iterator = 0

        else:
            for destination_queue in self.destination_queues:
                try:
                    self.pipe.lpush(destination_queue, message)
                except Exception as exc:
                    raise exceptions.PipelineError(exc)
Esempio n. 19
0
    def process(self):
        report = self.receive_message()
        
        if report:
            event = Event()
            report = encode(report)

            columns = ["__IGNORE__", "source_url", "description_url", "source_time", "__IGNORE__", "__IGNORE__", "__IGNORE__", "target"]
            
            for row in unicodecsv.reader(StringIO(report), encoding='utf-8'):

                if "phish_id" in row:
                    continue
                
                for key, value in zip(columns, row):

                    if key == "__IGNORE__":
                        continue
                    
                    event.add(key, value.strip())
                
                event.add('feed', 'phishtank')
                event.add('type', 'phishing')

                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
                    
                self.send_message(event)
             
        self.acknowledge_message()
Esempio n. 20
0
    def init(self):
        self.to_intelmq = getattr(self.parameters, "counterpart_is_intelmq",
                                  False)

        self.address = (self.parameters.ip, int(self.parameters.port))
        self.separator = utils.encode(self.parameters.separator) if (hasattr(
            self.parameters, "separator")) else None
        self.connect()
Esempio n. 21
0
 def set(self, key, value, ttl=None):
     if ttl is None:
         ttl = self.ttl
     if isinstance(value, six.text_type):
         value = utils.encode(value)
     # backward compatibility (Redis v2.2)
     self.redis.setnx(key, value)
     self.redis.expire(key, ttl)
Esempio n. 22
0
 def set(self, key: str, value: Any, ttl: Optional[int]=None):
     if ttl is None:
         ttl = self.ttl
     if isinstance(value, str):
         value = utils.encode(value)
     # backward compatibility (Redis v2.2)
     self.redis.setnx(key, value)
     self.redis.expire(key, ttl)
Esempio n. 23
0
 def set(self, key, value, ttl=None):
     if ttl is None:
         ttl = self.ttl
     if isinstance(value, str):
         value = utils.encode(value)
     # backward compatibility (Redis v2.2)
     self.redis.setnx(key, value)
     self.redis.expire(key, ttl)
Esempio n. 24
0
 def send(self, rawdata):
     data = utils.encode(self.remove_control_char(rawdata) + '\n')
     try:
         self.udp.sendto(data, self.upd_address)
     except Exception:
         self.logger.exception('Failed to send message to %s:%s!',
                               self.udp_host, self.udp_port)
     else:
         self.acknowledge_message()
Esempio n. 25
0
 def cache_set(self, key: str, value: Any, ttl: Optional[int] = None):
     if self.redis_cache_ttl is None:
         ttl = self.redis_cache_ttl
     if isinstance(value, str):
         value = utils.encode(value)
     # backward compatibility (Redis v2.2)
     self.__redis.set(key, value)
     if self.redis_cache_ttl:
         self.__redis.expire(key, self.redis_cache_ttl)
Esempio n. 26
0
 def chunked_process_replacement(self):
     event = self.receive_message()
     data = event.to_json(hierarchical=self.parameters.hierarchical_output)
     d = utils.encode(data)
     msg = struct.pack('>I', len(d)) + d
     chunk_length = 40
     for chunk in [msg[i:i + chunk_length] for i in range(0, len(msg), chunk_length)]:
         self.con.sendall(chunk)
     self.con.recv(2)
Esempio n. 27
0
 def send(self, rawdata):
     data = utils.encode(self.remove_control_char(rawdata) + '\n')
     try:
         self.udp.sendto(data, self.upd_address)
     except:
         self.logger.exception('Failled to sent message to {}:{} !'
                               .format(self.udp_host, self.udp_port))
     else:
         self.acknowledge_message()
Esempio n. 28
0
 def chunked_process_replacement(self):
     event = self.receive_message()
     data = event.to_json(hierarchical=self.parameters.hierarchical_output)
     d = utils.encode(data)
     msg = struct.pack('>I', len(d)) + d
     chunk_length = 40
     for chunk in [msg[i:i + chunk_length] for i in range(0, len(msg), chunk_length)]:
         self.con.sendall(chunk)
     self.con.recv(2)
Esempio n. 29
0
 def send(self, rawdata):
     data = utils.encode(self.remove_control_char(rawdata) + '\n')
     try:
         self.udp.sendto(data, self.upd_address)
     except Exception:
         self.logger.exception('Failed to send message to %s:%s!',
                               self.udp_host, self.udp_port)
     else:
         self.acknowledge_message()
Esempio n. 30
0
    def hash(self, blacklist=frozenset()):
        """Return a sha256 hash of the message as a hexadecimal string.
        The hash is computed over almost all key/value pairs. The only
        keys omitted are 'time.observation' and all keys contained in
        the optional blacklist parameter. If given, the blacklist
        parameter should be a set.
        """
        event_hash = hashlib.sha256()

        for key, value in sorted(self.items()):
            if "time.observation" == key or key in blacklist:
                continue

            event_hash.update(utils.encode(key))
            event_hash.update(b"\xc0")
            event_hash.update(utils.encode(repr(value)))
            event_hash.update(b"\xc0")

        return event_hash.hexdigest()
Esempio n. 31
0
    def hash(self, blacklist=frozenset()):
        """Return a sha256 hash of the message as a hexadecimal string.
        The hash is computed over almost all key/value pairs. The only
        keys omitted are 'time.observation' and all keys contained in
        the optional blacklist parameter. If given, the blacklist
        parameter should be a set.
        """
        event_hash = hashlib.sha256()

        for key, value in sorted(self.items()):
            if "time.observation" == key or key in blacklist:
                continue

            event_hash.update(utils.encode(key))
            event_hash.update(b"\xc0")
            event_hash.update(utils.encode(repr(value)))
            event_hash.update(b"\xc0")

        return event_hash.hexdigest()
Esempio n. 32
0
 def process(self):
     event = self.receive_message()
     
     if event:
         event_data = unicode(event)
         event_data = encode(event_data)
         self.file.write(event_data)
         self.file.write("\n")
         self.file.flush()
     self.acknowledge_message()
Esempio n. 33
0
 def send_data(self, data):
     while True:
         try:
             self.con.send(utils.encode(data))
             self.con.sendall(b"\n")
             break
         except socket.error as exc:
             self.logger.error(exc.args[1] + ". Reconnecting..")
             self.con.close()
             self.connect()
         except AttributeError:
             self.connect()
Esempio n. 34
0
 def send_data(self, data):
     while True:
         try:
             self.con.send(utils.encode(data))
             self.con.sendall(b"")
             break
         except socket.error as exc:
             self.logger.error(exc.args[1] + ". Reconnecting..")
             self.con.close()
             self.connect()
         except AttributeError:
             self.connect()
Esempio n. 35
0
    def process(self):
        # self.logger.exception("EDCVARD TEST.")
        event = self.receive_message()

        data = event.to_json(hierarchical=self.parameters.hierarchical_output)
        try:
            if self.separator:
                self.con.sendall(utils.encode(data) + self.separator)
            else:
                d = utils.encode(data)
                msg = struct.pack('>I', len(d)) + d
                self.con.sendall(msg)
        except socket.error:
            # self.logger.exception("Reconnecting.")
            self.logger.info("Reconnecting!")
            self.con.close()
            self.connect()
        except AttributeError:
            self.logger.info('Reconnecting.')
            self.connect()
        else:
            self.acknowledge_message()
Esempio n. 36
0
    def process(self):
        event = self.receive_message()

        data = event.to_json(hierarchical=self.parameters.hierarchical_output)
        try:
            self.con.sendall(utils.encode(data) + self.separator)
        except socket.error as exc:
            self.logger.exception(exc.args[1] + ". Reconnecting..")
            self.con.close()
            self.connect()
        except AttributeError:
            self.logger.info('Reconnecting.')
            self.connect()
        else:
            self.acknowledge_message()
Esempio n. 37
0
    def process(self):
        event = self.receive_message()

        data = event.to_json(hierarchical=self.parameters.hierarchical_output)
        try:
            self.con.sendall(utils.encode(data) + self.separator)
        except socket.error as exc:
            self.logger.exception(exc.args[1] + ". Reconnecting..")
            self.con.close()
            self.connect()
        except AttributeError:
            self.logger.info('Reconnecting.')
            self.connect()
        else:
            self.acknowledge_message()
Esempio n. 38
0
    def send(self, message):
        message = utils.encode(message)
        if self.load_balance:
            destination_queue = self.destination_queues[self.load_balance_iterator]

            try:
                self.pipe.lpush(destination_queue, message)
            except Exception as exc:
                raise exceptions.PipelineError(exc)

            self.load_balance_iterator += 1
            if self.load_balance_iterator == len(self.destination_queues):
                self.load_balance_iterator = 0

        else:
            for destination_queue in self.destination_queues:
                try:
                    self.pipe.lpush(destination_queue, message)
                except Exception as exc:
                    raise exceptions.PipelineError(exc)
Esempio n. 39
0
    def send(self, message: str, path="_default", path_permissive=False) -> None:
        """
        In principle we could use AMQP's exchanges here but that architecture is incompatible
        to the format of our pipeline.conf file.
        """
        if path not in self.destination_queues and path_permissive:
            return

        message = utils.encode(message)
        try:
            queues = self.destination_queues[path]
        except KeyError as exc:
            raise exceptions.PipelineError(exc)
        if self.load_balance:
            queues = [queues[self.load_balance_iterator]]
            self.load_balance_iterator += 1
            if self.load_balance_iterator == len(self.destination_queues[path]):
                self.load_balance_iterator = 0

        for destination_queue in queues:
            self._send(destination_queue, message)
Esempio n. 40
0
    def send(self, message):
        message = utils.encode(message)
        if self.load_balance:
            destination_queue = self.destination_queues[
                self.load_balance_iterator]

            try:
                self.pipe.lpush(destination_queue, message)
            except Exception as exc:
                raise exceptions.PipelineError(exc)

            self.load_balance_iterator += 1
            if self.load_balance_iterator == len(self.destination_queues):
                self.load_balance_iterator = 0

        else:
            for destination_queue in self.destination_queues:
                try:
                    self.pipe.lpush(destination_queue, message)
                except Exception as exc:
                    raise exceptions.PipelineError(exc)
    def process(self):
        report = self.receive_message()
        
        if report:
            report = encode(report)
	    columns = {
		"phish_id": "__IGNORE__",   
		"url": "source_url",            
		"phish_detail_url": "description_url",
		"submission_time": "__IGNORE__",
		"verified": "__IGNORE__",
		"verification_time": "source_time",
		"online": "__IGNORE__",
		"target": "__IGNORE__"
            }
          
            
            for row in csv.DictReader(StringIO.StringIO(report)):
		event = Event()
                               
                for key, value in row.items():
		    
		    key = columns[key]
                    
		    if key == "__IGNORE__":
                        continue
                    
                    event.add(key, value.strip())
                
                event.add('feed', 'phishtank')
                event.add('type', 'phishing')

                event = utils.parse_source_time(event, "source_time")
                event = utils.generate_observation_time(event, "observation_time")
                event = utils.generate_reported_fields(event)
		
                    
                self.send_message(event)
             
        self.acknowledge_message()
Esempio n. 42
0
 def set_input_queue(self, seq):
     """Setter for the input queue of this bot"""
     self.pipe.state["%s-input" % self.bot_id] = [utils.encode(text) for
                                                  text in seq]
    def process(self):
        report = self.receive_message()
	report = encode(report)

	if report:
            columns = {
                "Router Name": "routername",
                "Country Code": "countrycode",
                "Bandwidth (KB/s)": "__IGNORE__",
                "Uptime (Hours)": "uptime",
                "IP Address": "ipaddress",
                "Hostname": "hostname",
                "ORPort": "orport",
                "DirPort": "dirport",
                "Flag - Authority": "__IGNORE__",
                "Flag - Exit": "flagexit",
                "Flag - Fast": "__IGNORE__",
                "Flag - Guard": "__IGNORE__",
                "Flag - Named": "__IGNORE__",
                "Flag - Stable": "__IGNORE__",
                "Flag - Running": "flagrunning",
                "Flag - Valid":  "__IGNORE__",
                "Flag - V2Dir":  "__IGNORE__",
                "Platform":  "__IGNORE__",
                "Flag - Hibernating":  "__IGNORE__",
                "Flag - Bad Exit":  "__IGNORE__",
                "FirstSeen": "firstseen",
                "ASName": "asname",
                "ASNumber": "asnumber",
                "ConsensusBandwidth":  "__IGNORE__",
                "OrAddress": "__IGNORE__"
            }
            
            rows = csv.DictReader(StringIO.StringIO(report))
            record_list=[]
            for row in rows:
                event = Event()
                
                for key, value in row.items():

                    key = columns[key]
		    value = value.strip()


                    if (not value) or (value.lower()=='none') or (value.lower()=='n/a'):
                        continue
                    
                    
                    
                    if key is "__IGNORE__":
                        continue
                    
             
                    if key is "firstseen":
			self.logger.info(value)
			value=datetime.strptime(value,'%Y-%m-%d').strftime('%Y-%m-%d')                        
                        
                    event.add(key, value)
                record_list.append(unicode(event))    
            self.send_message(record_list)
            self.acknowledge_message()
Esempio n. 44
0
    def init(self):
        self.to_intelmq = getattr(self.parameters, "counterpart_is_intelmq", False)

        self.address = (self.parameters.ip, int(self.parameters.port))
        self.separator = utils.encode(self.parameters.separator) if (hasattr(self.parameters, "separator")) else None
        self.connect()
Esempio n. 45
0
 def init(self):
     self.address = (self.parameters.ip, int(self.parameters.port))
     self.separator = utils.encode(self.parameters.separator) if (hasattr(
         self.parameters, "separator")) else None
     self.connect()
Esempio n. 46
0
 def test_encode_unicode(self):
     """Tests if the decode can handle bytes."""
     self.assertEqual(SAMPLES['unicode'][0],
                      utils.encode(SAMPLES['unicode'][1]))
Esempio n. 47
0
 def test_encode_force(self):
     """ Test ASCII encoding enforcement. """
     self.assertEqual(b'fobar',
                      utils.encode('fo\xe4bar', encodings=('ascii', ),
                                   force=True))
Esempio n. 48
0
 def init(self):
     self.address = (self.parameters.ip, int(self.parameters.port))
     self.separator = utils.encode(self.parameters.separator) if (hasattr(self.parameters, "separator")) else None
     self.connect()
Esempio n. 49
0
    def send(self, taxonomy, contact, query, incident_id, requestor=None):
        if not query:
            self.logger.error("No data!")
            return False
        if not requestor:
            requestor = contact

        # PREPARATION
        query = self.shrink_dict(query)
        ids = list(str(row["id"]) for row in query)

        subject = "{tax} in your network: {date}" "".format(
            date=datetime.datetime.now().strftime("%Y-%m-%d"), tax=lib.SUBJECT[taxonomy]
        )
        text = self.get_text(taxonomy)
        if six.PY2:
            csvfile = io.BytesIO()
        else:
            csvfile = io.StringIO()
        if lib.CSV_FIELDS:
            fieldnames = lib.CSV_FIELDS
        else:
            fieldnames = query[0].keys()  # send all
        writer = csv.DictWriter(
            csvfile,
            fieldnames=fieldnames,
            quoting=csv.QUOTE_MINIMAL,
            delimiter=str(";"),
            extrasaction="ignore",
            lineterminator="\n",
        )
        writer.writeheader()
        query_unicode = query
        if six.PY2:
            query = [
                {key: utils.encode(val) if isinstance(val, six.text_type) else val for key, val in row.items()}
                for row in query
            ]
        writer.writerows(query)
        # note this might contain UTF-8 chars! let's ignore utf-8 errors. sorry.
        if six.PY2:
            data = unicode(csvfile.getvalue(), "utf-8")
        else:
            data = csvfile.getvalue()
        attachment_text = data.encode("ascii", "ignore")
        attachment_lines = attachment_text.splitlines()

        if self.verbose:
            self.logger.info(text)

        showed_text = (
            "=" * 100
            + """
To: {to}
Subject: {subj}

{text}
    """.format(
                to=requestor, subj=subject, text=text
            )
        )
        showed_text_len = showed_text.count("\n")

        # SHOW DATA
        if self.table_mode and six.PY2:
            self.logger.error("Sorry, no table mode for ancient python versions!")
            self.table_mode = False
        elif self.table_mode and not six.PY2:
            if self.quiet:
                height = 80  # assume anything for quiet mode
            else:
                height = lib.getTerminalHeight() - 3 - showed_text_len
            csvfile.seek(0)
            if len(query) > height:
                with tempfile.NamedTemporaryFile(mode="w+") as handle:
                    handle.write(showed_text + "\n")
                    handle.write(tabulate.tabulate(query, headers="keys", tablefmt="psql"))
                    handle.seek(0)
                    subprocess.call(["less", handle.name])
            else:
                self.logger.info(showed_text)
                self.logger.info(tabulate.tabulate(query_unicode, headers="keys", tablefmt="psql"))
        else:
            if self.quiet:
                height = 80
            else:
                height = lib.getTerminalHeight() - 4
            if 5 + len(query) > height:  # cut query too, 5 is length of text
                self.logger.info("\n".join(showed_text.splitlines()[:5]))
                self.logger.info("...")
                self.logger.info("\n".join(attachment_lines[: height - 5]))
                self.logger.info("...")
            elif showed_text_len + len(query) > height > 5 + len(query):
                self.logger.info("\n".join(showed_text.splitlines()[: height - len(query)]))
                self.logger.info("...")
                self.logger.info(attachment_text)
            else:
                self.logger.info(showed_text)
                self.logger.info(attachment_text)
        self.logger.info("-" * 100)

        # MENU
        if self.batch and requestor:
            answer = "s"
        else:
            answer = "q"
            if self.batch:
                self.logger.error("You need to set a valid requestor!")
            else:
                answer = input(
                    "{i}{b}[a]{i}utomatic, {b}[n]{i}ext, {i}{b}[s]{i}end, show "
                    "{b}[t]{i}able, change {b}[r]{i}equestor or {b}[q]{i}uit?{r} "
                    "".format(b=bold, i=myinverted, r=reset)
                ).strip()
        if answer == "q":
            exit(0)
        elif answer == "n":
            return False
        elif answer == "a":
            self.batch = True
        elif answer == "t":
            self.table_mode = bool((self.table_mode + 1) % 2)
            return self.send(taxonomy, contact, query, incident_id, requestor)
        elif answer == "r":
            answer = input(inverted("New requestor address:") + " ").strip()
            if len(answer) == 0:
                requestor = contact
            else:
                requestor = answer
            return self.send(taxonomy, contact, query, incident_id, requestor)
        elif answer != "s":
            self.logger.error("Unknow command {!r}.".format(answer))
            return self.send(taxonomy, contact, query, incident_id, requestor)

        if text is None:
            self.logger.error("I won't send with a missing text!")
            return False

        # INVESTIGATION
        if self.dryrun:
            self.logger.info("Simulate creation of investigation.")
            investigation_id = -1
        else:
            investigation_id = self.rt.create_ticket(
                Queue="Investigations", Subject=subject, Owner=self.config["rt"]["user"], Requestor=requestor
            )

            if investigation_id == -1:
                self.logger.error("Could not create Investigation.")
                return False

            self.logger.info("Created Investigation {}.".format(investigation_id))
            if not self.rt.edit_link(incident_id, "HasMember", investigation_id):
                self.logger.error("Could not link Investigation to Incident.")
                return False

            self.executemany(
                "UPDATE events SET rtir_investigation_id = %s WHERE id = %s", [(investigation_id, evid) for evid in ids]
            )
            self.logger.info("Linked events to investigation.")

        # CORRESPOND
        filename = "%s-%s.csv" % (datetime.datetime.now().strftime("%Y-%m-%d"), taxonomy)
        if self.zipme or len(query) > self.config["rt"]["zip_threshold"]:
            attachment = io.BytesIO()
            ziphandle = zipfile.ZipFile(attachment, mode="w", compression=zipfile.ZIP_DEFLATED)
            data = csvfile.getvalue()
            if six.PY2:
                data = unicode(data, "utf-8")
            ziphandle.writestr("events.csv", data.encode("utf-8"))
            ziphandle.close()
            attachment.seek(0)
            filename += ".zip"
            mimetype = "application/octet-stream"
        else:
            attachment = csvfile
            attachment.seek(0)
            mimetype = "text/csv"

        try:
            # TODO: CC
            if self.dryrun:
                self.logger.info("Simulate creation of correspondence.")
            else:
                correspond = self.rt.reply(investigation_id, text=text, files=[(filename, attachment, mimetype)])
                if not correspond:
                    self.logger.error("Could not correspond with text and file.")
                    return False
                self.logger.info("Correspondence added to Investigation.")

            self.execute(
                "UPDATE events SET sent_at = LOCALTIMESTAMP WHERE " "rtir_investigation_id = %s", (investigation_id,)
            )
            self.logger.info("Marked events as sent.")
        except:
            self.con.rollback()
            raise
        else:
            self.con.commit()

            # RESOLVE
            try:
                if not self.dryrun and not self.rt.edit_ticket(investigation_id, Status="resolved"):
                    self.logger.error("Could not close investigation {}.".format(investigation_id))
            except IndexError:
                # Bug in RT/python-rt
                pass

        if requestor != contact:
            asns = set(str(row["source.asn"]) for row in query)
            answer = input(
                inverted("Save recipient {!r} for ASNs {!s}? [Y/n] " "".format(requestor, ", ".join(asns)))
            ).strip()
            if answer.strip().lower() in ("", "y", "j"):
                self.executemany(lib.QUERY_UPDATE_CONTACT, [(requestor, asn) for asn in asns])
                self.con.commit()
                if self.cur.rowcount == 0:
                    self.query_insert_contact(asns=asns, contact=requestor)

        return True
Esempio n. 50
0
 def init(self):
     self.address = (self.parameters.ip, int(self.parameters.port))
     self.separator = utils.encode(self.parameters.separator)
     self.connect()
Esempio n. 51
0
 def test_encode_unicode(self):
     """Tests if the decode can handle bytes."""
     self.assertEqual(SAMPLES['unicode'][0],
                      utils.encode(SAMPLES['unicode'][1]))
Esempio n. 52
0
 def test_encode_force(self):
     """ Test ASCII encoding enforcement. """
     self.assertEqual(
         b'fobar',
         utils.encode('fo\xe4bar', encodings=('ascii', ), force=True))
Esempio n. 53
0
 def set_input_queue(self, seq):
     """Setter for the input queue of this bot"""
     self.pipe.state["%s-input" %
                     self.bot_id] = [utils.encode(text) for text in seq]
Esempio n. 54
0
 def init(self):
     self.address = (self.parameters.ip, int(self.parameters.port))
     self.separator = utils.encode(self.parameters.separator)
     self.connect()