def run(self): if self.real_time: # refresh new packet log.info("Start real-time") time.sleep(self.refresh_interval) t = datetime.datetime.utcnow() self.filter['time'] = { 'start': t - datetime.timedelta(seconds=self.refresh_interval), 'stop': t } while self.real_time: t = datetime.datetime.utcnow() # Collect packets self.packet_process() # Compute next time now = datetime.datetime.utcnow() d = self.refresh_interval - (now - t).seconds if d < 0: log.warning( "Real time issue: time to process data takes more time to refresh" ) else: time.sleep(d) # Adapt the filter for the next iteration self.filter['time'] = { 'start': t, 'stop': datetime.datetime.utcnow() } log.info("End of real-time") else: # Inform only new packet self.packet_process() # Check if we don't need to run again if self.real_time: self.run()
def db_connect_start(): dbconn = pymysql.connect(host=config.config['db']['host'], port=3306, user=config.config['db']['user'], passwd=config.config['db']['passwd'], db='smp') log.info("DB connection started with %s" % config.config['db']['host']) return dbconn
def __set_to_network_level(self): """ Take all packet from the server level (in sniffer) and place it in the network level (edges and vertexes). Do packets correlation in the network level. :return: """ log.info("Set packet to network level (correlation)") node_server = [n.server for n in self.net.nodes] for node in self.net.nodes: for sniffer in node.server.sniffers: for packet in sniffer.packets: # Node packet if packet.internal or (packet.src and packet.dst and packet.src["server"] == packet.dst["server"]): node.set_packets(packet) packet.state = Packet.ST_NEW continue # Link packet if packet.src and packet.dst and packet.src[ 'server'] == node.server and packet.dst[ 'server'] in node_server: # Find other packet remote_packet = [ p for sniffer in packet.dst['server'].sniffers for p in sniffer.packets if packet.equals(p) ] if len(remote_packet) == 1: remote_packet = remote_packet[0] # Update info packet.dst = remote_packet.dst packet.state = Packet.ST_UPDATED elif len(remote_packet) > 2: log.warning("Multiple remote packet is found !") # TODO take the short time delta # Add to links links_matched = [ l for n, l in node.remote_vertex if n.server == packet.dst['server'] ] for l in links_matched: l.set_packets(packet) packet.state = Packet.ST_NEW log.debug("Packet correlation done for sniffer %s on %s" % (sniffer.__class__.__name__, node.server.name))
def __get_packets(self): """ Collect all packets from servers with a filter :return: """ log.info("Getting packets from remote devices") threads = [] try: for node in self.net.nodes: for sniffer in node.server.sniffers: th = threading.Thread(target=sniffer.get_packets, args=(self.filter, self.tmp_dir)) threads.append(th) th.start() log.debug("Waiting all download...") for th in threads: th.join() log.debug("Packets received") except Exception as e: log.error("Server execution:" + str(e)) return
def clean(self): log.info("Sniffer washed :)")
def capture_stop(self): log.info("Stopping capture...") self.capture_enabled = False
def capture_start(self): log.info("Capturing...") time.sleep(2) self.capture_enabled = True
def exe(self): # TODO fast remote execution (one line). Don't use remote transfer in tmp script # Define script name path_script = '/tmp/brorig_{0!s}'.format( base64.b32encode(uuid.uuid4().bytes)[:26]) # Create local script file if not self.file_name: f = open(path_script, 'w') f.write(self.code) f.close() chan = None # Transfer script to remote server if needed if self.exe_remote: self.connection.open_ssh_connexion() chan = self.connection.connection.get_transport().open_session() t = Transfer(self.connection.transport) t.put(self.file_name if self.file_name else path_script, path_script) # Script execution cmd = '{sudo}{interpret} {script} {args}'.format( sudo=("sudo " if self.sudo else ""), interpret=self.interpret, script=path_script, args=" ".join([("-" if len(str(arg)) == 1 else "--") + str(arg) + " " + str(val) for arg, val in self.args.iteritems()])) log.info("{1} code execution: {0:.100}".format( self.code, "Remote" if self.exe_remote else "Local")) log.debug("Launch {1} command: {0}".format( cmd, "remote" if self.exe_remote else "local")) if self.exe_remote: # Remote execution chan.exec_command(cmd) stdout = chan.makefile('r', -1) stderr = chan.makefile_stderr('r', -1) self.err = stderr.read() self.out = stdout.read() return_code = chan.recv_exit_status() else: # Local execution p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True) self.out, self.err = p.communicate() return_code = p.returncode # Remove script os.remove(path_script) if self.exe_remote: self.connection.connection.exec_command( "rm -rf {}".format(path_script)) # Close remote connection if self.exe_remote: self.connection.close_ssh_connexion() # Error handler if return_code != 0 and not self.ignore_error: raise Exception('{1} script execution error: {0}'.format( self.err, "Remote" if self.exe_remote else "Local")) return self.out
def db_connect_stop(dbconn): dbconn.close() log.info("DB connction stopped")