Пример #1
0
def test_build_UserListItem_model(fake_user: User, faker: Faker):
    # model for response payload of List method https://google.aip.dev/132)

    # Typically a light version of the Get model
    _UserListItem = copy_model(
        UserGet,
        name="UserListItem",
        exclude={"display_name"},
        skip_validators=True,
    )

    assert _trim_descriptions(UserListItem.schema()) == _trim_descriptions(
        _UserListItem.schema())

    #  to build the pagination model, simply apply the Page generic
    assert _trim_descriptions(
        Page[_UserListItem].schema()) == _trim_descriptions(
            Page[UserListItem].schema())

    # parse stored data
    item_user = _UserListItem.parse_obj(fake_user).dict(exclude_unset=True)

    page: PageDict = paginate_data(
        chunk=[item_user],
        request_url=URL(faker.url()).with_path("/users"),
        total=100,
        limit=1,
        offset=0,
    )
    page_users = Page[_UserListItem].parse_obj(page)
    print(page_users.json(indent=2, exclude_unset=True))
async def _list_meta_project_iterations_handler(
        request: web.Request) -> web.Response:
    # TODO: check access to non owned projects user_id = request[RQT_USERID_KEY]
    # SEE https://github.com/ITISFoundation/osparc-simcore/issues/2735

    # parse and validate request ----
    q = parse_query_parameters(request)
    meta_project_uuid = q.project_uuid
    meta_project_commit_id = q.ref_id

    url_for = create_url_for_function(request)
    vc_repo = VersionControlForMetaModeling(request)

    # core function ----
    iterations_range = await _get_project_iterations_range(
        vc_repo,
        meta_project_uuid,
        meta_project_commit_id,
        offset=q.offset,
        limit=q.limit,
    )

    if iterations_range.total_count == 0:
        raise web.HTTPNotFound(
            reason=
            f"No iterations found for project {meta_project_uuid=}/{meta_project_commit_id=}"
        )

    assert len(iterations_range.items) <= q.limit  # nosec

    # parse and validate response ----
    page_items = [
        ProjectIterationItem.create(
            meta_project_uuid,
            meta_project_commit_id,
            item.iteration_index,
            item.project_id,
            url_for,
        ) for item in iterations_range.items
    ]

    page = Page[ProjectIterationItem].parse_obj(
        paginate_data(
            chunk=page_items,
            request_url=request.url,
            total=iterations_range.total_count,
            limit=q.limit,
            offset=q.offset,
        ))
    return web.Response(
        text=page.json(**RESPONSE_MODEL_POLICY),
        content_type="application/json",
    )
Пример #3
0
async def list_projects(request: web.Request):
    # TODO: implement all query parameters as
    # in https://www.ibm.com/support/knowledgecenter/en/SSCRJU_3.2.0/com.ibm.swg.im.infosphere.streams.rest.api.doc/doc/restapis-queryparms-list.html
    from servicelib.aiohttp.rest_utils import extract_and_validate

    user_id, product_name = request[RQT_USERID_KEY], request[RQ_PRODUCT_KEY]
    _, query, _ = await extract_and_validate(request)

    project_type = ProjectTypeAPI(query["type"])
    offset = query["offset"]
    limit = query["limit"]
    show_hidden = query["show_hidden"]

    db: ProjectDBAPI = request.config_dict[APP_PROJECT_DBAPI]

    async def set_all_project_states(projects: List[Dict[str, Any]],
                                     project_types: List[bool]):
        await logged_gather(
            *[
                projects_api.add_project_states_for_user(
                    user_id=user_id,
                    project=prj,
                    is_template=prj_type == ProjectTypeDB.TEMPLATE,
                    app=request.app,
                ) for prj, prj_type in zip(projects, project_types)
            ],
            reraise=True,
            max_concurrency=100,
        )

    user_available_services: List[
        Dict] = await catalog.get_services_for_user_in_product(
            request.app, user_id, product_name, only_key_versions=True)

    projects, project_types, total_number_projects = await db.load_projects(
        user_id=user_id,
        filter_by_project_type=ProjectTypeAPI.to_project_type_db(project_type),
        filter_by_services=user_available_services,
        offset=offset,
        limit=limit,
        include_hidden=show_hidden,
    )
    await set_all_project_states(projects, project_types)
    page = Page[ProjectDict].parse_obj(
        paginate_data(
            chunk=projects,
            request_url=request.url,
            total=total_number_projects,
            limit=limit,
            offset=offset,
        ))
    return page.dict(**RESPONSE_MODEL_POLICY)
Пример #4
0
async def _list_checkpoints_handler(request: web.Request):
    url_for = create_url_for_function(request)
    vc_repo = VersionControlRepository(request)

    _project_uuid = request.match_info["project_uuid"]
    _limit = int(request.query.get("limit", DEFAULT_NUMBER_OF_ITEMS_PER_PAGE))
    _offset = int(request.query.get("offset", 0))

    checkpoints: List[Checkpoint]

    checkpoints, total = await list_checkpoints_safe(
        vc_repo,
        project_uuid=_project_uuid,  # type: ignore
        offset=_offset,
        limit=_limit,
    )

    # parse and validate
    checkpoints_list = [
        CheckpointApiModel.parse_obj({
            "url":
            url_for(
                f"{__name__}._get_checkpoint_handler",
                project_uuid=_project_uuid,
                ref_id=checkpoint.id,
            ),
            **checkpoint.dict(),
        }) for checkpoint in checkpoints
    ]

    page = Page[CheckpointApiModel].parse_obj(
        paginate_data(
            chunk=checkpoints_list,
            request_url=request.url,
            total=total,
            limit=_limit,
            offset=_offset,
        ))
    return web.Response(
        text=page.json(**RESPONSE_MODEL_POLICY),
        content_type="application/json",
    )
Пример #5
0
async def _list_repos_handler(request: web.Request):
    # FIXME: check access to non owned projects user_id = request[RQT_USERID_KEY]
    url_for = create_url_for_function(request)
    vc_repo = VersionControlRepository(request)

    _limit = int(request.query.get("limit", DEFAULT_NUMBER_OF_ITEMS_PER_PAGE))
    _offset = int(request.query.get("offset", 0))

    repos_rows, total_number_of_repos = await list_repos_safe(vc_repo,
                                                              offset=_offset,
                                                              limit=_limit)

    assert len(repos_rows) <= _limit  # nosec

    # parse and validate
    repos_list = [
        RepoApiModel.parse_obj({
            "url":
            url_for(
                f"{__name__}._list_checkpoints_handler",
                project_uuid=row.project_uuid,
            ),
            **dict(row.items()),
        }) for row in repos_rows
    ]

    page = Page[RepoApiModel].parse_obj(
        paginate_data(
            chunk=repos_list,
            request_url=request.url,
            total=total_number_of_repos,
            limit=_limit,
            offset=_offset,
        ))
    return web.Response(
        text=page.json(**RESPONSE_MODEL_POLICY),
        content_type="application/json",
    )
async def _list_meta_project_iterations_results_handler(
    request: web.Request, ) -> web.Response:
    # parse and validate request ----
    q = parse_query_parameters(request)
    meta_project_uuid = q.project_uuid
    meta_project_commit_id = q.ref_id

    url_for = create_url_for_function(request)
    vc_repo = VersionControlForMetaModeling(request)

    # core function ----
    iterations_range = await _get_project_iterations_range(
        vc_repo,
        meta_project_uuid,
        meta_project_commit_id,
        offset=q.offset,
        limit=q.limit,
    )

    if iterations_range.total_count == 0:
        raise web.HTTPNotFound(
            reason=
            f"No iterations found for projects/{meta_project_uuid}/checkpoint/{meta_project_commit_id}"
        )

    assert len(iterations_range.items) <= q.limit  # nosec

    # get every project from the database and extract results
    _prj_data = {}
    for item in iterations_range.items:
        # TODO: fetch ALL project iterations at once. Otherwise they will have different results
        # TODO: if raises?
        prj = await vc_repo.get_project(f"{item.project_id}",
                                        include=["workbench"])
        _prj_data[item.project_id] = prj["workbench"]

    def _get_project_results(project_id) -> ExtractedResults:
        # TODO: if raises?
        results = extract_project_results(_prj_data[project_id])
        return results

    # parse and validate response ----
    page_items = [
        ProjectIterationResultItem.create(
            meta_project_uuid,
            meta_project_commit_id,
            item.iteration_index,
            item.project_id,
            _get_project_results(item.project_id),
            url_for,
        ) for item in iterations_range.items
    ]

    page = Page[ProjectIterationResultItem].parse_obj(
        paginate_data(
            chunk=page_items,
            request_url=request.url,
            total=iterations_range.total_count,
            limit=q.limit,
            offset=q.offset,
        ))
    return web.Response(
        text=page.json(**RESPONSE_MODEL_POLICY),
        content_type="application/json",
    )
async def _list_meta_project_iterations_handler(
        request: web.Request) -> web.Response:
    # TODO: check access to non owned projects user_id = request[RQT_USERID_KEY]
    # SEE https://github.com/ITISFoundation/osparc-simcore/issues/2735

    # parse and validate request ----
    url_for = create_url_for_function(request)
    vc_repo = VersionControlForMetaModeling(request)

    _project_uuid = ProjectID(request.match_info["project_uuid"])
    _ref_id = request.match_info["ref_id"]

    _limit = int(request.query.get("limit", DEFAULT_NUMBER_OF_ITEMS_PER_PAGE))
    _offset = int(request.query.get("offset", 0))

    try:
        commit_id = CommitID(_ref_id)
    except ValueError as err:
        # e.g. HEAD
        raise NotImplementedError(
            "cannot convert ref (e.g. HEAD) -> commit id") from err

    # core function ----
    iterations = await _get_project_iterations_range(vc_repo,
                                                     _project_uuid,
                                                     commit_id,
                                                     offset=_offset,
                                                     limit=_limit)

    if iterations.total_count == 0:
        raise web.HTTPNotFound(
            reason=
            f"No iterations found for project {_project_uuid=}/{commit_id=}")

    assert len(iterations.items) <= _limit  # nosec

    # parse and validate response ----
    page_items = [
        ProjectIterationAsItem(
            name=
            f"projects/{_project_uuid}/checkpoint/{commit_id}/iterations/{iter_id}",
            parent=ParentMetaProjectRef(project_id=_project_uuid,
                                        ref_id=commit_id),
            workcopy_project_id=wcp_id,
            workcopy_project_url=url_for(
                "get_project",
                project_id=wcp_id,
            ),
            url=url_for(
                f"{__name__}._list_meta_project_iterations_handler",
                project_uuid=_project_uuid,
                ref_id=commit_id,
            ),
        ) for wcp_id, iter_id in iterations.items
    ]

    page = Page[ProjectIterationAsItem].parse_obj(
        paginate_data(
            chunk=page_items,
            request_url=request.url,
            total=iterations.total_count,
            limit=_limit,
            offset=_offset,
        ))
    return web.Response(
        text=page.json(**RESPONSE_MODEL_POLICY),
        content_type="application/json",
    )
Пример #8
0
def test_paginating_data(base_url):
    # create random data
    total_number_of_items = 29
    limit = 9
    data_chunk = list(range(limit))
    request_url = URL(f"{base_url}?some=1&random=4&query=true")

    number_of_chunks = total_number_of_items // limit + 1
    last_chunk_size = total_number_of_items % limit
    last_chunk_offset = (number_of_chunks - 1) * len(data_chunk)

    # first "call"
    offset = 0
    data_obj: PageDict = paginate_data(
        data_chunk,
        total=total_number_of_items,
        limit=limit,
        offset=offset,
        request_url=request_url,
    )
    assert data_obj

    model_instance = Page[int].parse_obj(data_obj)
    assert model_instance
    assert model_instance.meta == PageMetaInfoLimitOffset(
        total=total_number_of_items,
        count=len(data_chunk),
        limit=limit,
        offset=offset)
    assert model_instance.links == PageLinks(
        self=str(
            URL(base_url).with_query(
                f"some=1&random=4&query=true&offset={offset}&limit={limit}")),
        first=str(
            URL(base_url).with_query(
                f"some=1&random=4&query=true&offset=0&limit={limit}")),
        prev=None,
        next=str(
            URL(base_url).with_query(
                f"some=1&random=4&query=true&offset={offset+limit}&limit={limit}"
            )),
        last=str(
            URL(base_url).with_query(
                f"some=1&random=4&query=true&offset={last_chunk_offset}&limit={limit}"
            )),
    )

    # next "call"s
    for _ in range(1, number_of_chunks - 1):
        offset += len(data_chunk)
        assert model_instance.links.next is not None

        data_obj: PageDict = paginate_data(
            data_chunk,
            request_url=URL(model_instance.links.next),
            total=total_number_of_items,
            limit=limit,
            offset=offset,
        )

        model_instance = Page[int].parse_obj(data_obj)
        assert model_instance
        assert model_instance.meta == PageMetaInfoLimitOffset(
            total=total_number_of_items,
            count=len(data_chunk),
            limit=limit,
            offset=offset,
        )
        assert model_instance.links == PageLinks(
            self=str(
                URL(base_url).with_query(
                    f"some=1&random=4&query=true&offset={offset}&limit={limit}"
                )),
            first=str(
                URL(base_url).with_query(
                    f"some=1&random=4&query=true&offset=0&limit={limit}")),
            prev=str(
                URL(base_url).with_query(
                    f"some=1&random=4&query=true&offset={offset-limit}&limit={limit}"
                )),
            next=str(
                URL(base_url).with_query(
                    f"some=1&random=4&query=true&offset={offset+limit}&limit={limit}"
                )),
            last=str(
                URL(base_url).with_query(
                    f"some=1&random=4&query=true&offset={last_chunk_offset}&limit={limit}"
                )),
        )

    # last "call"
    #
    offset += len(data_chunk)
    data_chunk = data_chunk[:last_chunk_size]

    assert offset == last_chunk_offset

    assert model_instance.links.next is not None
    data_obj: PageDict = paginate_data(
        data_chunk,
        request_url=URL(model_instance.links.next),
        total=total_number_of_items,
        limit=limit,
        offset=offset,
    )
    assert data_obj

    model_instance = Page[int].parse_obj(data_obj)
    assert model_instance

    assert model_instance.meta == PageMetaInfoLimitOffset(
        total=total_number_of_items,
        count=len(data_chunk),
        limit=limit,
        offset=offset,
    )
    assert model_instance.links == PageLinks(
        self=str(
            URL(base_url).with_query(
                f"some=1&random=4&query=true&offset={offset}&limit={limit}")),
        first=str(
            URL(base_url).with_query(
                f"some=1&random=4&query=true&offset=0&limit={limit}")),
        prev=str(
            URL(base_url).with_query(
                f"some=1&random=4&query=true&offset={last_chunk_offset - limit}&limit={limit}"
            )),
        next=None,
        last=str(
            URL(base_url).with_query(
                f"some=1&random=4&query=true&offset={last_chunk_offset}&limit={limit}"
            )),
    )