def run(self): if self.real_time: # refresh new packet log.info("Start real-time") time.sleep(self.refresh_interval) t = datetime.datetime.utcnow() self.filter['time'] = { 'start': t - datetime.timedelta(seconds=self.refresh_interval), 'stop': t } while self.real_time: t = datetime.datetime.utcnow() # Collect packets self.packet_process() # Compute next time now = datetime.datetime.utcnow() d = self.refresh_interval - (now - t).seconds if d < 0: log.warning( "Real time issue: time to process data takes more time to refresh" ) else: time.sleep(d) # Adapt the filter for the next iteration self.filter['time'] = { 'start': t, 'stop': datetime.datetime.utcnow() } log.info("End of real-time") else: # Inform only new packet self.packet_process() # Check if we don't need to run again if self.real_time: self.run()
def __init__(self, index): self.index = "brorig_%s" % index.lower() self.es = Elasticsearch() if not self.es.ping(): log.warning("Search engine not available") self.es = None return self.es.indices.create(index=self.index)
def list(self): """ Build farm of server based on configuration file according to following structure: "farm": { "servers": [ { "key": <unique_key>, "name": <name _of_server>, "cluster": <name_of_cluster_defined>, "group": <group_id>, "hostname": <server_hostanme>, "type": basic|vagrant "ssh": { "user": <login_user>, "passwd": <user_password> (optional) "pkey": <ssh_private_key> (optional) } }, ... ], "clusters": [ {"name": <id_of_cluster>}, ... ] } """ if self.servers: return self.servers self.clusters = [ server.Cluster(c["name"]) for c in config.config["farm"]["clusters"] ] for s in config.config["farm"]["servers"]: if 'type' not in s["type"] and s[ "type"] not in self.type_of_server_available: serverClass = self.type_of_server_available["basic"] log.warning( "Type of server undefined: using basic sever by default") else: serverClass = self.type_of_server_available[s["type"]] s_obj = serverClass(str(s["key"]), s["name"], [ c for c in self.clusters if c.name == s["cluster"] ][0], s["group"]) s_obj.set_ssh_info( s["hostname"], s["ssh"]["user"], passwd=s["ssh"]["passwd"] if 'passwd' in s['ssh'] else None, pkey_path=s['ssh']['pkey'] if 'pkey' in s['ssh'] else None) self.servers.append(s_obj) for s in self.servers: s.cluster.add_server(s) return self.servers
def connectivity(self, data): net = self.user.network for c in data: node_key = c['node'] node = net.get_node(node_key) remote_conn = c['remote_conn'] status = c['status'] if not node: log.warning("Ask change connectivity of unknown node") continue if status == "enable": # Add new virtual node virtual_nodes = [ n for n in net.nodes for c in n.server.connectivity() if remote_conn in c ] if len(virtual_nodes) == 1: virtual_node = virtual_nodes[0] if not isinstance(virtual_node.server, server.VirtualServer): continue virtual_node.server.add_connectivity(node_key) net.set_connectivity(node) net.set_connectivity(virtual_node) else: vs = server.VirtualServer(remote_conn) vs.add_connectivity(node_key) net.add_node([vs]) elif status == "disable": # Remove connectivity to and from the node remote_nodes = [ n for n in net.nodes for c in n.server.connectivity() if remote_conn in c ] for n in remote_nodes: if isinstance(n.server, server.VirtualServer): net.remove_connectivity(node, remote_conn) if not isinstance(node.server, server.VirtualServer): for c in node.server.connectivity(): net.remove_connectivity(n, c) # Clean all virtual server v_node = [ n for n in net.nodes if isinstance(n.server, server.VirtualServer) if n.remote_vertex == [] ] for n in v_node: net.remove_node(n) else: log.error("Invalid connectivity status")
def __set_to_network_level(self): """ Take all packet from the server level (in sniffer) and place it in the network level (edges and vertexes). Do packets correlation in the network level. :return: """ log.info("Set packet to network level (correlation)") node_server = [n.server for n in self.net.nodes] for node in self.net.nodes: for sniffer in node.server.sniffers: for packet in sniffer.packets: # Node packet if packet.internal or (packet.src and packet.dst and packet.src["server"] == packet.dst["server"]): node.set_packets(packet) packet.state = Packet.ST_NEW continue # Link packet if packet.src and packet.dst and packet.src[ 'server'] == node.server and packet.dst[ 'server'] in node_server: # Find other packet remote_packet = [ p for sniffer in packet.dst['server'].sniffers for p in sniffer.packets if packet.equals(p) ] if len(remote_packet) == 1: remote_packet = remote_packet[0] # Update info packet.dst = remote_packet.dst packet.state = Packet.ST_UPDATED elif len(remote_packet) > 2: log.warning("Multiple remote packet is found !") # TODO take the short time delta # Add to links links_matched = [ l for n, l in node.remote_vertex if n.server == packet.dst['server'] ] for l in links_matched: l.set_packets(packet) packet.state = Packet.ST_NEW log.debug("Packet correlation done for sniffer %s on %s" % (sniffer.__class__.__name__, node.server.name))