Esempio n. 1
0
            def load_logs():
                logger.debug(
                    "Retrieving logs over range %s -> %s with namespace %s and repository %s",
                    current_start_datetime,
                    current_end_datetime,
                    namespace_id,
                    repository_id,
                )

                logs_query = model.log.get_logs_query(
                    namespace_id=namespace_id,
                    repository=repository_id,
                    start_time=current_start_datetime,
                    end_time=current_end_datetime,
                )
                logs = list(logs_query)
                for log in logs:
                    assert isinstance(log, BaseModel)
                    if namespace_id is not None:
                        assert log.account_id == namespace_id, "Expected %s, Found: %s" % (
                            namespace_id,
                            log.account.id,
                        )

                    if repository_id is not None:
                        assert log.repository_id == repository_id

                logs = [Log.for_logentry(log) for log in logs]
                return logs
Esempio n. 2
0
def _for_elasticsearch_logs(logs, repository_id=None, namespace_id=None):
    namespace_ids = set()
    for log in logs:
        namespace_ids.add(log.account_id)
        namespace_ids.add(log.performer_id)
        assert namespace_id is None or log.account_id == namespace_id
        assert repository_id is None or log.repository_id == repository_id

    id_user_map = model.user.get_user_map_by_ids(namespace_ids)
    return [Log.for_elasticsearch_log(log, id_user_map) for log in logs]
Esempio n. 3
0
    def log_action(
        self,
        kind_name,
        namespace_name=None,
        performer=None,
        ip=None,
        metadata=None,
        repository=None,
        repository_name=None,
        timestamp=None,
        is_free_namespace=False,
    ):
        timestamp = timestamp or datetime.today()

        if not repository and repository_name and namespace_name:
            repository = model.repository.get_repository(namespace_name, repository_name)

        account = None
        account_id = None
        performer_id = None
        repository_id = None

        if namespace_name is not None:
            account = model.user.get_namespace_user(namespace_name)
            account_id = account.id

        if performer is not None:
            performer_id = performer.id

        if repository is not None:
            repository_id = repository.id

        metadata_json = json.dumps(metadata or {})
        kind_id = model.log.get_log_entry_kinds()[kind_name]

        stored_log = StoredLog(
            kind_id, account_id, performer_id, ip, metadata_json, repository_id, timestamp
        )

        log = Log(
            metadata_json=metadata,
            ip=ip,
            datetime=timestamp,
            performer_email=performer.email if performer else None,
            performer_username=performer.username if performer else None,
            performer_robot=performer.robot if performer else None,
            account_organization=account.organization if account else None,
            account_username=account.username if account else None,
            account_email=account.email if account else None,
            account_robot=account.robot if account else None,
            kind_id=kind_id,
        )

        self.logs.append(LogAndRepository(log, stored_log, repository))
Esempio n. 4
0
        def get_latest_logs(m):
            logs_query = model.log.get_latest_logs_query(
                performer=performer,
                repository=repository,
                namespace=namespace_name,
                ignore=filter_kinds,
                model=m,
                size=size)

            logs = list(logs_query)
            return [Log.for_logentry(log) for log in logs]
Esempio n. 5
0
            def load_logs():
                logger.debug(
                    'Retrieving logs over range %s -> %s with namespace %s and repository %s',
                    current_start_datetime, current_end_datetime, namespace_id,
                    repository_id)

                logs_query = model.log.get_logs_query(
                    namespace=namespace_id,
                    repository=repository_id,
                    start_time=current_start_datetime,
                    end_time=current_end_datetime)
                logs = list(logs_query)
                for log in logs:
                    if namespace_id is not None:
                        assert log.account_id == namespace_id

                    if repository_id is not None:
                        assert log.repository_id == repository_id

                logs = [Log.for_logentry(log) for log in logs]
                return logs
Esempio n. 6
0
    def lookup_logs(
        self,
        start_datetime,
        end_datetime,
        performer_name=None,
        repository_name=None,
        namespace_name=None,
        filter_kinds=None,
        page_token=None,
        max_page_count=None,
    ):
        if filter_kinds is not None:
            assert all(
                isinstance(kind_name, str) for kind_name in filter_kinds)

        assert start_datetime is not None
        assert end_datetime is not None

        repository = None
        if repository_name and namespace_name:
            repository = model.repository.get_repository(
                namespace_name, repository_name)
            assert repository

        performer = None
        if performer_name:
            performer = model.user.get_user(performer_name)
            assert performer

        def get_logs(m, page_token):
            logs_query = model.log.get_logs_query(
                start_datetime,
                end_datetime,
                performer=performer,
                repository=repository,
                namespace=namespace_name,
                ignore=filter_kinds,
                model=m,
            )

            logs, next_page_token = model.modelutil.paginate(
                logs_query,
                m,
                descending=True,
                page_token=page_token,
                limit=20,
                max_page=max_page_count,
                sort_field_name="datetime",
            )

            return logs, next_page_token

        TOKEN_TABLE_ID = "tti"
        table_index = 0
        logs = []
        next_page_token = page_token or None

        # Skip empty pages (empty table)
        while len(logs) == 0 and table_index < len(LOG_MODELS) - 1:
            table_specified = (next_page_token is not None and
                               next_page_token.get(TOKEN_TABLE_ID) is not None)
            if table_specified:
                table_index = next_page_token.get(TOKEN_TABLE_ID)

            logs_result, next_page_token = get_logs(LOG_MODELS[table_index],
                                                    next_page_token)
            logs.extend(logs_result)

            if next_page_token is None and table_index < len(LOG_MODELS) - 1:
                next_page_token = {TOKEN_TABLE_ID: table_index + 1}

        return LogEntriesPage([Log.for_logentry(log) for log in logs],
                              next_page_token)
Esempio n. 7
0
        "repository_id": 1,
        "ip": "192.168.1.2",
        "metadata_json":
        '{"\\ud83d\\ude02": "\\ud83d\\ude02\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c", "key": "value", "time": 1522639800}',
        "datetime": "2018-04-02T03:30",
    },
    "sort": [1522639800000, 233],
}

_log1 = Log(
    "{}",
    "192.168.1.1",
    parse("2018-03-08T03:30"),
    "user1.email",
    "user1.username",
    "user1.robot",
    "user1.organization",
    "user1.username",
    "user1.email",
    "user1.robot",
    1,
)
_log2 = Log(
    "{}",
    "192.168.1.2",
    parse("2018-04-02T03:30"),
    "user1.email",
    "user1.username",
    "user1.robot",
    "user1.organization",
    "user1.username",
Esempio n. 8
0
        "repository_id": 1,
        "ip": "192.168.1.2",
        "metadata_json":
        '{"\\ud83d\\ude02": "\\ud83d\\ude02\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c", "key": "value", "time": 1522639800}',
        "datetime": "2018-04-02T03:30",
    },
    "sort": [1522639800000, 233],
}

_log1 = Log(
    '{"\\ud83d\\ude02": "\\ud83d\\ude02\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c", "key": "value", "time": 1520479800}',
    "192.168.1.1",
    parse("2018-03-08T03:30"),
    "user1.email",
    "user1.username",
    "user1.robot",
    "user1.organization",
    "user1.username",
    "user1.email",
    "user1.robot",
    1,
)
_log2 = Log(
    '{"\\ud83d\\ude02": "\\ud83d\\ude02\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c", "key": "value", "time": 1522639800}',
    "192.168.1.2",
    parse("2018-04-02T03:30"),
    "user1.email",
    "user1.username",
    "user1.robot",
    "user1.organization",
    "user1.username",
Esempio n. 9
0
        "random_id": 233,
        "kind_id": 2,
        "account_id": 1,
        "performer_id": 1,
        "repository_id": 1,
        "ip": "192.168.1.2",
        "metadata_json":
        "{\"\\ud83d\\ude02\": \"\\ud83d\\ude02\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c\", \"key\": \"value\", \"time\": 1522639800}",
        "datetime": "2018-04-02T03:30",
    },
    "sort": [1522639800000, 233]
}

_log1 = Log(
    "{\"\\ud83d\\ude02\": \"\\ud83d\\ude02\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c\", \"key\": \"value\", \"time\": 1520479800}",
    "192.168.1.1", parse("2018-03-08T03:30"), "user1.email", "user1.username",
    "user1.robot", "user1.organization", "user1.username", "user1.email",
    "user1.robot", 1)
_log2 = Log(
    "{\"\\ud83d\\ude02\": \"\\ud83d\\ude02\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c\\ud83d\\udc4c\", \"key\": \"value\", \"time\": 1522639800}",
    "192.168.1.2", parse("2018-04-02T03:30"), "user1.email", "user1.username",
    "user1.robot", "user1.organization", "user1.username", "user1.email",
    "user1.robot", 2)

SEARCH_RESPONSE_START = _status(_shards(_hits([_hit1, _hit2])))
SEARCH_RESPONSE_END = _status(_shards(_hits([_hit2])))
SEARCH_REQUEST_START = {
    "sort": [{
        "datetime": "desc"
    }, {
        "random_id.keyword": "desc"