def _base_query(self, performer_id=None, repository_id=None, account_id=None, filter_kinds=None, index=None): if filter_kinds is not None: assert all( isinstance(kind_name, str) for kind_name in filter_kinds) if index is not None: search = LogEntry.search(index=index) else: search = LogEntry.search() if performer_id is not None: assert isinstance(performer_id, int) search = search.filter("term", performer_id=performer_id) if repository_id is not None: assert isinstance(repository_id, int) search = search.filter("term", repository_id=repository_id) if account_id is not None and repository_id is None: assert isinstance(account_id, int) search = search.filter("term", account_id=account_id) if filter_kinds is not None: kind_map = model.log.get_log_entry_kinds() ignore_ids = [kind_map[kind_name] for kind_name in filter_kinds] search = search.exclude("terms", kind_id=ignore_ids) return search
def log_action(self, kind_name, namespace_name=None, performer=None, ip=None, metadata=None, repository=None, repository_name=None, timestamp=None, is_free_namespace=False): if self._should_skip_logging and self._should_skip_logging( kind_name, namespace_name, is_free_namespace): return if repository_name is not None: assert repository is None assert namespace_name is not None repository = model.repository.get_repository( namespace_name, repository_name) if timestamp is None: timestamp = datetime.today() account_id = None performer_id = None repository_id = None if namespace_name is not None: account_id = model.user.get_namespace_user(namespace_name).id if performer is not None: performer_id = performer.id if repository is not None: repository_id = repository.id metadata_json = json.dumps(metadata or {}, default=_json_serialize) kind_id = model.log._get_log_entry_kind(kind_name) log = LogEntry(random_id=_random_id(), kind_id=kind_id, account_id=account_id, performer_id=performer_id, ip=ip, metadata_json=metadata_json, repository_id=repository_id, datetime=timestamp) try: self._logs_producer.send(log) except LogSendException as lse: strict_logging_disabled = config.app_config.get( 'ALLOW_PULLS_WITHOUT_STRICT_LOGGING') logger.exception('log_action failed', extra=({ 'exception': lse }).update(log.to_dict())) if not (strict_logging_disabled and kind_name in ACTIONS_ALLOWED_WITHOUT_AUDIT_LOGGING): raise
import pytest from data.logs_model.logs_producer.util import logs_json_serializer from data.logs_model.elastic_logs import LogEntry logger = logging.getLogger(__name__) TEST_DATETIME = datetime.utcnow() TEST_JSON_STRING = '{"a": "b", "c": "d"}' TEST_JSON_STRING_WITH_UNICODE = '{"éëê": "îôû"}' VALID_LOGENTRY = LogEntry( random_id="123-45", ip="0.0.0.0", metadata_json=TEST_JSON_STRING, datetime=TEST_DATETIME ) VALID_LOGENTRY_WITH_UNICODE = LogEntry( random_id="123-45", ip="0.0.0.0", metadata_json=TEST_JSON_STRING_WITH_UNICODE, datetime=TEST_DATETIME, ) VALID_LOGENTRY_EXPECTED_OUTPUT = ( '{"datetime": "%s", "ip": "0.0.0.0", "metadata_json": "{\\"a\\": \\"b\\", \\"c\\": \\"d\\"}", "random_id": "123-45"}' % TEST_DATETIME.isoformat() ).encode("ascii") VALID_LOGENTRY_WITH_UNICODE_EXPECTED_OUTPUT = ( '{"datetime": "%s", "ip": "0.0.0.0", "metadata_json": "{\\"\\u00e9\\u00eb\\u00ea\\": \\"\\u00ee\\u00f4\\u00fb\\"}", "random_id": "123-45"}' % TEST_DATETIME.isoformat()
def _base_query(self): search = LogEntry.search(index=self.index) return search