def WriteHeader(self): """Sets up the Elasticsearch index and the Timesketch database object. Creates the Elasticsearch index with Timesketch specific settings and the Timesketch SearchIndex database object. """ # This cannot be static because we use the value of self._document_type # from arguments. mappings = { self._document_type: { 'properties': { 'timesketch_label': { 'type': 'nested' } } } } # Get Elasticsearch host and port from Timesketch configuration. with self._timesketch.app_context(): self._host = current_app.config['ELASTIC_HOST'] self._port = current_app.config['ELASTIC_PORT'] self._Connect() self._CreateIndexIfNotExists(self._index_name, mappings) user = None if self._timeline_owner: user = timesketch_user.User.query.filter_by( username=self._timeline_owner).first() if not user: raise RuntimeError( 'Unknown Timesketch user: {0:s}'.format(self._timeline_owner)) else: logger.warning('Timeline will be visible to all Timesketch users') with self._timesketch.app_context(): search_index = timesketch_sketch.SearchIndex.get_or_create( name=self._timeline_name, description=self._timeline_name, user=user, index_name=self._index_name) # Grant the user read permission on the mapping object and set status. # If user is None the timeline becomes visible to all users. search_index.grant_permission(user=user, permission='read') # In case we have a user grant additional permissions. if user: search_index.grant_permission(user=user, permission='write') search_index.grant_permission(user=user, permission='delete') # Let the Timesketch UI know that the timeline is processing. search_index.set_status('processing') # Save the mapping object to the Timesketch database. timesketch_db_session.add(search_index) timesketch_db_session.commit() logger.debug('Adding events to Timesketch.')
def SetIndexName(self, index_name): """Set the index name. Args: index_name (str): name of the index. """ self._index_name = index_name logger.debug('Elasticsearch index name: {0:s}'.format(index_name))
def SetUsername(self, username): """Sets the username. Args: username (str): username to authenticate with. """ self._username = username logger.debug('Elasticsearch username: {0!s}'.format(username))
def SetURLPrefix(self, url_prefix): """Sets the URL prefix. Args: url_prefix (str): URL prefix. """ self._url_prefix = url_prefix logger.debug('Elasticsearch URL prefix: {0!s}')
def SetDocumentType(self, document_type): """Sets the document type. Args: document_type (str): document type. """ self._document_type = document_type logger.debug('Elasticsearch document type: {0:s}'.format(document_type))
def SetPassword(self, password): """Set the password. Args: password (str): password to authenticate with. """ self._password = password logger.debug('Elastic password: ********')
def SetUseSSL(self, use_ssl): """Sets the use of ssl. Args: use_ssl (bool): enforces use of ssl. """ self._use_ssl = use_ssl logger.debug('Elasticsearch use_ssl: {0!s}'.format(use_ssl))
def SetFlushInterval(self, flush_interval): """Set the flush interval. Args: flush_interval (int): number of events to buffer before doing a bulk insert. """ self._flush_interval = flush_interval logger.debug('Elasticsearch flush interval: {0:d}'.format(flush_interval))
def SetServerInformation(self, server, port): """Set the server information. Args: server (str): IP address or hostname of the server. port (int): Port number of the server. """ self._host = server self._port = port logger.debug('Elasticsearch server: {0!s} port: {1:d}'.format(server, port))
def SetServerInformation(self, server, port): """Set the server information. Args: server (str): IP address or hostname of the server. port (int): Port number of the server. """ self._host = server self._port = port logger.debug('Elasticsearch server: {0!s} port: {1:d}'.format( server, port))
def _Connect(self): """Connects to an Elasticsearch server.""" elastic_hosts = [{'host': self._host, 'port': self._port}] elastic_http_auth = None if self._username is not None: elastic_http_auth = (self._username, self._password) self._client = elasticsearch.Elasticsearch( elastic_hosts, http_auth=elastic_http_auth) logger.debug('Connected to Elasticsearch server: {0:s} port: {1:d}.'.format( self._host, self._port))
def WriteHeader(self): """Sets up the Elasticsearch index and the Timesketch database object. Creates the Elasticsearch index with Timesketch specific settings and the Timesketch SearchIndex database object. """ # Get Elasticsearch host and port from Timesketch configuration. with self._timesketch.app_context(): self._host = current_app.config['ELASTIC_HOST'] self._port = current_app.config['ELASTIC_PORT'] self._Connect() self._CreateIndexIfNotExists(self._index_name, self._mappings) user = None if self._timeline_owner: user = timesketch_user.User.query.filter_by( username=self._timeline_owner).first() if not user: raise RuntimeError('Unknown Timesketch user: {0:s}'.format( self._timeline_owner)) else: logger.warning('Timeline will be visible to all Timesketch users') with self._timesketch.app_context(): search_index = timesketch_sketch.SearchIndex.get_or_create( name=self._timeline_name, description=self._timeline_name, user=user, index_name=self._index_name) # Grant the user read permission on the mapping object and set status. # If user is None the timeline becomes visible to all users. search_index.grant_permission(user=user, permission='read') # In case we have a user grant additional permissions. if user: search_index.grant_permission(user=user, permission='write') search_index.grant_permission(user=user, permission='delete') # Let the Timesketch UI know that the timeline is processing. search_index.set_status('processing') # Save the mapping object to the Timesketch database. timesketch_db_session.add(search_index) timesketch_db_session.commit() logger.debug('Adding events to Timesketch.')
def __init__(self, output_mediator): """Initializes a Timesketch output module. Args: output_mediator (OutputMediator): mediates interactions between output modules and other components, such as storage and dfvfs. """ hostname = output_mediator.GetStoredHostname() if hostname: logger.debug('Hostname: {0:s}'.format(hostname)) super(TimesketchOutputModule, self).__init__(output_mediator) self._timeline_name = hostname self._timeline_owner = None self._timesketch = timesketch.create_app()
def SetRawFields(self, raw_fields): """Set raw (non-analyzed) fields. This is used for sorting and aggregations in Elasticsearch. https://www.elastic.co/guide/en/elasticsearch/reference/5.6/ mapping-types.html#_multi_fields Args: raw_fields (bool): True if raw (non-analyzed) fields should be added. """ self._raw_fields = raw_fields if raw_fields: logger.debug('Elasticsearch adding raw (non-analyzed) fields.') else: logger.debug('Elasticsearch not adding raw (non-analyzed) fields.')
def _Connect(self): """Connects to an Elasticsearch server.""" elastic_hosts = [{'host': self._host, 'port': self._port}] elastic_http_auth = None if self._username is not None: elastic_http_auth = (self._username, self._password) self._client = elasticsearch5.Elasticsearch( elastic_hosts, http_auth=elastic_http_auth, use_ssl=self._use_ssl, ca_certs=self._ca_certs ) logger.debug('Connected to Elasticsearch server: {0:s} port: {1:d}.'.format( self._host, self._port))
def _Connect(self): """Connects to an Elasticsearch server.""" elastic_host = {'host': self._host, 'port': self._port} if self._url_prefix: elastic_host['url_prefix'] = self._url_prefix elastic_http_auth = None if self._username is not None: elastic_http_auth = (self._username, self._password) self._client = elasticsearch.Elasticsearch([elastic_host], http_auth=elastic_http_auth) logger.debug(('Connected to Elasticsearch server: {0:s} port: {1:d}' 'URL prefix {2!s}.').format(self._host, self._port, self._url_prefix))
def SetCACertificatesPath(self, ca_certificates_path): """Sets the path to the CA certificates. Args: ca_certificates_path (str): path to file containing a list of root certificates to trust. Raises: BadConfigOption: if the CA certificates file does not exist. """ if not ca_certificates_path: return if not os.path.exists(ca_certificates_path): raise errors.BadConfigOption( 'No such certificate file: {0:s}.'.format(ca_certificates_path)) self._ca_certs = ca_certificates_path logger.debug('Elasticsearch ca_certs: {0!s}'.format(ca_certificates_path))
def _FlushEvents(self): """Inserts the buffered event documents into Elasticsearch.""" try: # pylint: disable=unexpected-keyword-arg # pylint does not recognizes request_timeout as a valid kwarg. According # to http://elasticsearch-py.readthedocs.io/en/master/api.html#timeout # it should be supported. self._client.bulk( body=self._event_documents, doc_type=self._document_type, index=self._index_name, request_timeout=self._DEFAULT_REQUEST_TIMEOUT) except ValueError as exception: # Ignore problematic events logger.warning('Unable to bulk insert with error: {0!s}'.format( exception)) logger.debug('Inserted {0:d} events into Elasticsearch'.format( self._number_of_buffered_events)) self._event_documents = [] self._number_of_buffered_events = 0
def _Connect(self): """Connects to an Elasticsearch server.""" elastic_host = {'host': self._host, 'port': self._port} if self._url_prefix: elastic_host['url_prefix'] = self._url_prefix elastic_http_auth = None if self._username is not None: elastic_http_auth = (self._username, self._password) self._client = elasticsearch.Elasticsearch( [elastic_host], http_auth=elastic_http_auth, use_ssl=self._use_ssl, ca_certs=self._ca_certs ) logger.debug( ('Connected to Elasticsearch server: {0:s} port: {1:d}' 'URL prefix {2!s}.').format(self._host, self._port, self._url_prefix))
def _FlushEvents(self): """Inserts the buffered event documents into Elasticsearch.""" try: # pylint: disable=unexpected-keyword-arg bulk_arguments = { 'body': self._event_documents, 'index': self._index_name, 'request_timeout': self._DEFAULT_REQUEST_TIMEOUT } self._client.bulk(**bulk_arguments) except (ValueError, elasticsearch.exceptions.ElasticsearchException) as exception: # Ignore problematic events logger.warning( 'Unable to bulk insert with error: {0!s}'.format(exception)) logger.debug('Inserted {0:d} events into Elasticsearch'.format( self._number_of_buffered_events)) self._event_documents = [] self._number_of_buffered_events = 0