def verify_connection(hostname, port): logger.log_debug(f'Trying to connect to Kibana instance ' f'at {hostname}:{port}') try: response = requests.get(f'http://{hostname}:{port}/api/status') response.raise_for_status() kibana_status = response.json() logger.log_debug(f"Instance available. " f"Version: {kibana_status['version']['number']}") except requests.exceptions.RequestException as ex: logger.log_error(ex) raise ConnectionError("Failed to connect to Kibana") from ex
def verify_connection(hostname, port): es_client = Elasticsearch( hosts=[{'host': hostname, 'port': port}]) logger.log_debug(f'Trying to connect to ES instance ' f'at {hostname}:{port}') try: if not es_client.ping(): raise ConnectionError(f'Connection failed for ES instance ' f'at {hostname}:{port}') logger.log_debug(f'Instance available') except Exception as ex: logger.log_error(ex) raise ConnectionError("Failed to connect to ElasticSearch") from ex
def fill_tld_data(field): try: lookup_hostname = packet[field]['hostname'] if re.match(ipv4_regex, lookup_hostname): logger.log_debug( f'TLD lookup skipped: [{field}] {lookup_hostname}') return tld_result = tld.get_tld(url=lookup_hostname, fix_protocol=True, as_object=True) packet[field]['domain'] = tld_result.domain packet[field]['subdomain'] = tld_result.subdomain packet[field]['fld'] = tld_result.fld except tld.exceptions.TldDomainNotFound: logger.log_error( f'TLD lookup failed: [{field}] {lookup_hostname}')
def configure(host, port, resource_file, overwrite=False): logger.log_debug( f'Setup kibanna using objects from "{resource_file.name}"') if not __space_exists(host, port): __create_space(host, port) logger.log_debug('Importing kibana objects') import_response = requests.post( url=f'http://{host}:{port}/s/metanet/api/saved_objects/_import?' f'overwrite={overwrite}', headers={"kbn-xsrf": "true"}, files={'file': resource_file} ) import_status = import_response.json() if not import_status['success']: logger.log_error(f"Failed to setup kibana: {import_status['errors']}. " f"Use --force to overwrite existing objects") return logger.log_debug(f"Kibana setup completed: " f"successCount={import_status['successCount']}")
def configure(self, force_create=False): logger.log_debug(f"Create ES index") index_mapping = { "settings": { "index.mapping.ignore_malformed": True }, "mappings": { "properties": { "datetime": { "properties": { "timestamp": { "type": "date", "format": "epoch_second" }, "year": {"type": "integer"}, "month": {"type": "integer"}, "day": {"type": "integer"}, "hour": {"type": "integer"}, "minute": {"type": "integer"}, "second": {"type": "integer"} } }, "tcp_stream": {"type": "integer"}, "source": { "properties": { "ip": {"type": "ip"}, "port": {"type": "integer"}, "hostname": {"type": "text"}, "domain": {"type": "keyword"}, "subdomain": {"type": "keyword"}, "fld": {"type": "keyword"} } }, "destination": { "properties": { "ip": {"type": "ip"}, "port": {"type": "integer"}, "hostname": {"type": "text"}, "domain": {"type": "keyword"}, "subdomain": {"type": "keyword"}, "fld": {"type": "keyword"} }, }, "resource": { "properties": { "type": {"type": "keyword"}, "category": {"type": "keyword"} } } } } } if self.__es_client.indices.exists(self.index_name): logger.log_debug(f"Found existing index '{self.index_name}'") if not force_create: logger.log_error("Setup aborted. Use --force to overwrite") return logger.log_debug(f"Removing existing index '{self.index_name}'") self.__es_client.indices.delete(self.index_name) self.__es_client.indices.create(self.index_name, json.dumps(index_mapping)) logger.log_debug(f"Index '{self.index_name}' created: " f"{json.dumps(index_mapping)}")