def __generate_batch_packets(from_timestamp: int, time_interval: int,
                             packet_density: float):
    '''Generate batch packets from specified timestamp, time interval and
    packet density
    '''
    to_timestamp = from_timestamp + time_interval
    packet_count = round(packet_density * time_interval)

    logger.log_debug(
        f'=> Generating batch packets\n'
        f'From     : {datetime.fromtimestamp(from_timestamp).isoformat()}\n'
        f'To       : {datetime.fromtimestamp(to_timestamp).isoformat()}\n'
        f'Interval : {time_interval}s\n'
        f'Density  : {packet_density}\n'
        f'Packets  : {packet_count}')

    generated_timestamps = []
    while len(generated_timestamps) < packet_count:
        timestamp = np.random.randint(from_timestamp, to_timestamp)
        if timestamp not in generated_timestamps:
            generated_timestamps.append(timestamp)

    generated_timestamps.sort()

    generated_packets = [
        __generate_packet(timestamp) for timestamp in generated_timestamps
    ]

    return generated_packets
 def is_configured(self):
     if self.__es_client.indices.exists(self.index_name):
         logger.log_debug(f"Existing index '{self.index_name}' found")
         return True
     else:
         logger.log_debug(f"No existing index '{self.index_name}' found")
         return False
Exemple #3
0
def __extract_packets(pcap_file_path,
                      filter="tcp.flags.syn==1 and tcp.flags.ack==0"):

    logger.log_debug(f'Extract "{pcap_file_path}" packets using "{filter}"')

    tshark_result = subprocess.run(["tshark",
                                    f"-r{os.path.abspath(pcap_file_path)}",
                                    f"-Tjson",
                                    f"-eframe.time_epoch",
                                    f"-eip.src",
                                    f"-eip.src_host",
                                    f"-eip.dst",
                                    f"-eip.dst_host",
                                    f"-etcp.srcport",
                                    f"-etcp.dstport",
                                    f"-etcp.stream",
                                    f"-NmnNtdv",
                                    f"{filter}"
                                    ],
                                   capture_output=True,
                                   text=True,
                                   check=True)

    tshark_packets = json.loads(tshark_result.stdout)

    def convert_packet(packet):
        return {
            'timestamp': datetime.fromtimestamp(
                float(packet['_source']['layers']['frame.time_epoch'][0])),
            'tcp_stream': int(packet['_source']['layers']['tcp.stream'][0]),
            'src': {
                'ip': packet['_source']['layers']['ip.src'][0],
                'hostname': str.lower(
                    packet['_source']['layers']['ip.src_host'][0]),
                'port': int(packet['_source']['layers']['tcp.srcport'][0]),
                'domain': None,
                'subdomain': None,
                'fld': None
            },
            'dst': {
                'ip': packet['_source']['layers']['ip.dst'][0],
                'hostname': str.lower(
                    packet['_source']['layers']['ip.dst_host'][0]),
                'port': int(packet['_source']['layers']['tcp.dstport'][0]),
                'domain': None,
                'subdomain': None,
                'fld': None
            },
            'metadata': {
                'resource_type': None
            }
        }

    packets = [convert_packet(packet) for packet in tshark_packets]
    logger.log_debug(f'Extracted {len(packets)} packets')

    return packets
def __delete_space(host, port):
    logger.log_debug("Deleting kibana 'metanet' space")

    response = requests.delete(
        url=f'http://{host}:{port}/api/spaces/space/metanet',
        headers={"kbn-xsrf": "true"}
    )

    response.raise_for_status()
    logger.log_debug("Kibana 'metanet' space deleted")
def __space_exists(host, port):
    response = requests.get(
        url=f'http://{host}:{port}/api/spaces/space/metanet')

    if response.status_code == 200:
        logger.log_debug("Kibana 'metanet' space exists")
        return True
    else:
        logger.log_debug(f"Kibana 'metanet' space not found. "
                         f"Code: {response.status_code}")
        return False
def verify_connection(hostname, port):
    logger.log_debug(f'Trying to connect to Kibana instance '
                     f'at {hostname}:{port}')

    try:
        response = requests.get(f'http://{hostname}:{port}/api/status')
        response.raise_for_status()

        kibana_status = response.json()
        logger.log_debug(f"Instance available. "
                         f"Version: {kibana_status['version']['number']}")
    except requests.exceptions.RequestException as ex:
        logger.log_error(ex)
        raise ConnectionError("Failed to connect to Kibana") from ex
def verify_connection(hostname, port):
    es_client = Elasticsearch(
        hosts=[{'host': hostname, 'port': port}])

    logger.log_debug(f'Trying to connect to ES instance '
                     f'at {hostname}:{port}')

    try:
        if not es_client.ping():
            raise ConnectionError(f'Connection failed for ES instance '
                                  f'at {hostname}:{port}')

        logger.log_debug(f'Instance available')
    except Exception as ex:
        logger.log_error(ex)
        raise ConnectionError("Failed to connect to ElasticSearch") from ex
    def cleanup(self):
        logger.log_debug(f"ES cleanup'")
        if self.__es_client.indices.exists(self.index_name):
            logger.log_debug(f"Removing index '{self.index_name}'")
            self.__es_client.indices.delete(self.index_name)
        else:
            logger.log_debug(f"Index '{self.index_name}' not found")

        logger.log_debug("ES cleanup completed")
Exemple #9
0
        def fill_tld_data(field):
            try:
                lookup_hostname = packet[field]['hostname']

                if re.match(ipv4_regex, lookup_hostname):
                    logger.log_debug(
                        f'TLD lookup skipped: [{field}] {lookup_hostname}')
                    return

                tld_result = tld.get_tld(url=lookup_hostname,
                                         fix_protocol=True,
                                         as_object=True)
                packet[field]['domain'] = tld_result.domain
                packet[field]['subdomain'] = tld_result.subdomain
                packet[field]['fld'] = tld_result.fld
            except tld.exceptions.TldDomainNotFound:
                logger.log_error(
                    f'TLD lookup failed: [{field}] {lookup_hostname}')
def __create_space(host, port):
    logger.log_debug("Creating kibana 'metanet' space")

    request_data = {
        "id": "metanet",
        "name": "MetaNet",
        "description": "MetaNet Space",
        "color": "#1562A2",
        "initials": "MN",
    }

    response = requests.post(
        url=f'http://{host}:{port}/api/spaces/space',
        headers={"kbn-xsrf": "true"},
        json=request_data)

    response.raise_for_status()
    logger.log_debug("Kibana 'metanet' space created")
    def index_packets(self, packets):
        logger.log_debug(f'Indexing {len(packets)} packets in ES')

        def convert_to_document(packet):
            return {
                'datetime': {
                    'timestamp': packet['timestamp'].timestamp(),
                    'year': packet['timestamp'].year,
                    'month': packet['timestamp'].month,
                    'day': packet['timestamp'].day,
                    'hour': packet['timestamp'].hour,
                    'minute': packet['timestamp'].minute,
                    'second': packet['timestamp'].second
                },
                'tcp_stream': packet['tcp_stream'],
                'source': {
                    'ip': packet['src']['ip'],
                    'port': packet['src']['port'],
                    'hostname': packet['src']['hostname'],
                    'domain': packet['src']['domain'],
                    'subdomain': packet['src']['subdomain'],
                    'fld': packet['src']['fld']
                },
                'destination': {
                    'ip': packet['dst']['ip'],
                    'port': packet['dst']['port'],
                    'hostname': packet['dst']['hostname'],
                    'domain': packet['dst']['domain'],
                    'subdomain': packet['dst']['subdomain'],
                    'fld': packet['dst']['fld']
                },
                'resource': {
                    'type': packet['metadata']['resource_type'],
                    'category': None
                }
            }

        with click.progressbar(packets, label='Indexing') as packets_bar:
            for packet in packets_bar:
                self.__es_client.index(index=self.index_name,
                                       body=convert_to_document(packet))

        logger.log_debug("Indexing completed")
def cleanup(host, port):
    if is_configured(host, port):
        logger.log_debug("Cleaning up kibana")
        __delete_space(host, port)
        logger.log_debug("Cleanup completed")
    else:
        logger.log_debug("Nothing to cleanup")
Exemple #13
0
def analize_packets(packets):
    logger.log_debug("Analize packets from file")

    logger.log_debug("Loading hosts lists")
    asset_hosts = [line.strip() for line in open(__assets_hosts_path, 'r')]
    ads_hosts = [line.strip() for line in open(__ads_hosts_path, 'r')]
    ipv4_regex = r'^(\d{1,3}\.){3}\d{1,3}'

    def analize_packet(packet):
        def fill_tld_data(field):
            try:
                lookup_hostname = packet[field]['hostname']

                if re.match(ipv4_regex, lookup_hostname):
                    logger.log_debug(
                        f'TLD lookup skipped: [{field}] {lookup_hostname}')
                    return

                tld_result = tld.get_tld(url=lookup_hostname,
                                         fix_protocol=True,
                                         as_object=True)
                packet[field]['domain'] = tld_result.domain
                packet[field]['subdomain'] = tld_result.subdomain
                packet[field]['fld'] = tld_result.fld
            except tld.exceptions.TldDomainNotFound:
                logger.log_error(
                    f'TLD lookup failed: [{field}] {lookup_hostname}')

        def fill_resource_type():
            is_asset = any(asset_host in packet['dst']['hostname']
                           for asset_host in asset_hosts)
            if is_asset:
                packet['metadata']['resource_type'] = 'asset'
                return

            is_ad = any(ads_host in packet['dst']['hostname']
                        for ads_host in ads_hosts)
            if is_ad:
                packet['metadata']['resource_type'] = 'ads'
                return

            packet['metadata']['resource_type'] = 'other'

        fill_tld_data('src')
        fill_tld_data('dst')
        fill_resource_type()

    with click.progressbar(packets) as packets_bar:
        for packet in packets_bar:
            analize_packet(packet)

    logger.log_debug("Analize packets completed")
    return packets
def configure(host, port, resource_file, overwrite=False):
    logger.log_debug(
        f'Setup kibanna using objects from "{resource_file.name}"')

    if not __space_exists(host, port):
        __create_space(host, port)

    logger.log_debug('Importing kibana objects')
    import_response = requests.post(
        url=f'http://{host}:{port}/s/metanet/api/saved_objects/_import?'
            f'overwrite={overwrite}',
        headers={"kbn-xsrf": "true"},
        files={'file': resource_file}
    )

    import_status = import_response.json()
    if not import_status['success']:
        logger.log_error(f"Failed to setup kibana: {import_status['errors']}. "
                         f"Use --force to overwrite existing objects")
        return

    logger.log_debug(f"Kibana setup completed: "
                    f"successCount={import_status['successCount']}")
    def configure(self, force_create=False):
        logger.log_debug(f"Create ES index")

        index_mapping = {
            "settings": {
                "index.mapping.ignore_malformed": True
            },
            "mappings": {
                "properties": {
                    "datetime": {
                        "properties": {
                            "timestamp": {
                                "type": "date",
                                "format": "epoch_second"
                            },
                            "year": {"type": "integer"},
                            "month": {"type": "integer"},
                            "day": {"type": "integer"},
                            "hour": {"type": "integer"},
                            "minute": {"type": "integer"},
                            "second": {"type": "integer"}
                        }
                    },
                    "tcp_stream": {"type": "integer"},
                    "source": {
                        "properties": {
                            "ip": {"type": "ip"},
                            "port": {"type": "integer"},
                            "hostname": {"type": "text"},
                            "domain": {"type": "keyword"},
                            "subdomain": {"type": "keyword"},
                            "fld": {"type": "keyword"}
                        }
                    },
                    "destination": {
                        "properties": {
                            "ip": {"type": "ip"},
                            "port": {"type": "integer"},
                            "hostname": {"type": "text"},
                            "domain": {"type": "keyword"},
                            "subdomain": {"type": "keyword"},
                            "fld": {"type": "keyword"}
                        },
                    },
                    "resource": {
                        "properties": {
                            "type": {"type": "keyword"},
                            "category": {"type": "keyword"}
                        }
                    }
                }
            }
        }

        if self.__es_client.indices.exists(self.index_name):
            logger.log_debug(f"Found existing index '{self.index_name}'")
            if not force_create:
                logger.log_error("Setup aborted. Use --force to overwrite")
                return
            logger.log_debug(f"Removing existing index '{self.index_name}'")
            self.__es_client.indices.delete(self.index_name)

        self.__es_client.indices.create(self.index_name,
                                        json.dumps(index_mapping))

        logger.log_debug(f"Index '{self.index_name}' created: "
                         f"{json.dumps(index_mapping)}")
 def truncate(self):
     logger.log_debug(f"Truncate index data")
     self.cleanup()
     self.configure()
     logger.log_debug("Index truncated")