def test_list_root(self, mock_assets_response): ASSETS_API.list(root_ids=[{"id": 1}, {"externalId": "abc"}], limit=10) calls = mock_assets_response.calls assert 1 == len(calls) assert { "cursor": None, "limit": 10, "filter": { "rootIds": [{ "id": 1 }, { "externalId": "abc" }] } } == jsgz_load(calls[0].request.body)
def test_upload_using_file_handle(self, mock_file_upload_response): path = os.path.join(os.path.dirname(__file__), "files_for_test_upload", "file_for_test_upload_1.txt") with open(path) as fh: res = FILES_API.upload_bytes(fh, name="bla") response_body = mock_file_upload_response.calls[0].response.json() del response_body["uploadUrl"] assert FileMetadata._load(response_body) == res assert "https://upload.here/" == mock_file_upload_response.calls[ 1].request.url assert { "name": "bla" } == jsgz_load(mock_file_upload_response.calls[0].request.body) assert isinstance(mock_file_upload_response.calls[1].request.body, TextIOWrapper)
def test_list_root_ids_list(self, mock_events_response): EVENTS_API.list(root_asset_ids=[1, 2], limit=10) calls = mock_events_response.calls assert 1 == len(calls) assert { "cursor": None, "limit": 10, "filter": { "rootAssetIds": [{ "id": 1 }, { "id": 2 }] } } == jsgz_load(calls[0].request.body)
def test_list_root_extids_list(self, mock_events_response): EVENTS_API.list(root_asset_external_ids=["1", "2"], limit=10) calls = mock_events_response.calls assert 1 == len(calls) assert { "cursor": None, "limit": 10, "filter": { "rootAssetIds": [{ "externalId": "1" }, { "externalId": "2" }] }, } == jsgz_load(calls[0].request.body)
def test_search_dict_filter(self, mock_seq_response, filter_field): res = SEQ_API.search(filter={filter_field: True}) assert mock_seq_response.calls[0].response.json()["items"] == res.dump( camel_case=True) assert { "search": { "name": None, "description": None, "query": None }, "filter": { "isString": True }, "limit": None, } == jsgz_load(mock_seq_response.calls[0].request.body)
def test_many_sources_only(self, mock_rel_response): sources = [{"resource": "Asset", "resourceId": str(i)} for i in range(2500)] with pytest.raises(ValueError): res = REL_API(sources=sources) res = REL_API.list(sources=sources, limit=-1) assert 3 == len(mock_rel_response.calls) assert isinstance(res, RelationshipList) assert 3 == len(res) requested_sources = [] for call in mock_rel_response.calls: json = jsgz_load(call.request.body) assert "targets" not in json["filter"] requested_sources.extend([s["resourceId"] for s in json["filter"]["sources"]]) assert set([s["resourceId"] for s in sources]) == set(requested_sources)
def test_search_with_filter(self, mock_seq_response): res = SEQ_API.search( name="n", description="d", query="q", filter=SequenceFilter(last_updated_time={"max": 42})) assert mock_seq_response.calls[0].response.json()["items"] == res.dump( camel_case=True) req_body = jsgz_load(mock_seq_response.calls[0].request.body) assert 42 == req_body["filter"]["lastUpdatedTime"]["max"] assert { "name": "n", "description": "d", "query": "q" } == req_body["search"]
def test_search(self, mock_seq_response): res = SEQ_API.search(filter=SequenceFilter(external_id_prefix="e")) assert mock_seq_response.calls[0].response.json()["items"] == res.dump( camel_case=True) assert { "search": { "name": None, "description": None, "query": None }, "filter": { "externalIdPrefix": "e" }, "limit": None, } == jsgz_load(mock_seq_response.calls[0].request.body)
def test_download(self, mock_file_download_response): with TemporaryDirectory() as dir: res = FILES_API.download(directory=dir, id=[1], external_id=["2"]) assert {"items": [{"id": 1}, {"externalId": "2"}]} == jsgz_load( mock_file_download_response.calls[0].request.body ) assert res is None fp1 = os.path.join(dir, "file1") fp2 = os.path.join(dir, "file2") assert os.path.isfile(fp1) assert os.path.isfile(fp2) with open(fp1, "rb") as fh: assert b"content1" == fh.read() with open(fp2, "rb") as fh: assert b"content2" == fh.read()
def test_upload_from_directory(self, mock_file_upload_response): path = os.path.join(os.path.dirname(__file__), "files_for_test_upload") res = FILES_API.upload(path=path) response_body = mock_file_upload_response.calls[0].response.json() del response_body["uploadUrl"] assert FileMetadataList([FileMetadata._load(response_body), FileMetadata._load(response_body)]) == res assert 4 == len(mock_file_upload_response.calls) for call in mock_file_upload_response.calls: payload = call.request.body if isinstance(payload, BufferedReader): continue elif jsgz_load(payload)["name"] in ["file_for_test_upload_1.txt", "file_for_test_upload_2.txt"]: continue else: raise AssertionError("incorrect payload: {}".format(payload))
def test_search(self, mock_ts_response): res = TS_API.search(filter=TimeSeriesFilter(is_string=True)) assert mock_ts_response.calls[0].response.json()["items"] == res.dump( camel_case=True) assert { "search": { "name": None, "description": None, "query": None }, "filter": { "isString": True }, "limit": None, } == jsgz_load(mock_ts_response.calls[0].request.body)
def download_link_callback(request): identifier = jsgz_load(request.body)["items"][0] if "id" in identifier: return 200, {}, json.dumps({ "items": [{ "id": 1, "downloadUrl": "https://download.file1.here" }] }) elif "externalId" in identifier: return (400, {}, json.dumps( {"error": { "message": "User error", "code": 400 }}))
def test_call_root(self, mock_events_response): list( EVENTS_API.__call__( root_asset_ids=[23], root_asset_external_ids=["a", "b"], asset_subtree_external_ids=["a"], limit=10 ) ) calls = mock_events_response.calls assert 1 == len(calls) assert { "cursor": None, "limit": 10, "filter": { "rootAssetIds": [{"id": 23}, {"externalId": "a"}, {"externalId": "b"}], "assetSubtreeIds": [{"externalId": "a"}], }, } == jsgz_load(calls[0].request.body)
def test_insert_tuple(self, mock_post_sequence_data): data = [(i, [2 * i]) for i in range(1, 11)] SEQ_API.data.insert(column_external_ids=["col0"], rows=data, external_id="eid") assert { "items": [{ "externalId": "eid", "columns": ["col0"], "rows": [{ "rowNumber": i, "values": [2 * i] } for i in range(1, 11)], }] } == jsgz_load(mock_post_sequence_data.calls[0].request.body)
def test_create_schedule_with_data_spec_objects(self, mock_post_schedule, mock_data_spec): res = SCHEDULES_API.create_schedule( model_name="model1", schedule_name="test-schedule", schedule_data_spec=mock_data_spec, args={"k": "v"}, metadata={"k": "v"}, ) assert isinstance(res, Schedule) assert res.name == "test-schedule" data_sent_to_api = jsgz_load(mock_post_schedule.calls[0].request.body) actual_data_spec = data_sent_to_api["dataSpec"] assert {"spec": "spec"} == actual_data_spec
def test_download_ids_over_limit(self, mock_file_download_response): with set_request_limit(FILES_API, 1): with TemporaryDirectory() as dir: res = FILES_API.download(directory=dir, id=[1], external_id=["2"]) bodies = [ jsgz_load( mock_file_download_response.calls[i].request.body) for i in range(2) ] assert {"items": [{"id": 1}]} in bodies assert {"items": [{"externalId": "2"}]} in bodies assert res is None assert os.path.isfile(os.path.join(dir, "file1")) assert os.path.isfile(os.path.join(dir, "file2"))
def test_insert_multiple_DTO(self, mock_raw_row_response): res = RAW_API.rows.insert( "db1", "table1", row=[Row(key="row1", columns={ "c1": 1, "c2": "2" })]) assert res is None assert [{ "key": "row1", "columns": { "c1": 1, "c2": "2" } }] == jsgz_load(mock_raw_row_response.calls[0].request.body)["items"]
def test_delete_multiple_id_and_multiple_external_id( self, mock_functions_delete_response): _ = FUNCTIONS_API.delete(id=[1, 2, 3], external_id=["func1", "func2"]) assert { "items": [{ "id": 1 }, { "id": 2 }, { "id": 3 }, { "externalId": "func1" }, { "externalId": "func2" }] } == jsgz_load(mock_functions_delete_response.calls[0].request.body)
def test_insert_w_rows_as_dict(self, mock_raw_row_response): res = RAW_API.rows.insert(db_name="db1", table_name="table1", row={"row1": { "c1": 1, "c2": "2" }}, ensure_parent=True) assert res is None assert [{ "key": "row1", "columns": { "c1": 1, "c2": "2" } }] == jsgz_load(mock_raw_row_response.calls[0].request.body)["items"]
def test_list_with_filters(self, mock_ts_response): res = TS_API.list( is_step=True, is_string=False, metadata={"a": "b"}, last_updated_time={"min": 45}, created_time={"max": 123}, asset_ids=[1, 2], asset_external_ids=["aeid"], data_set_ids=[1, 2], data_set_external_ids=["x"], root_asset_ids=[1231], include_metadata=False, asset_subtree_ids=[1], asset_subtree_external_ids=["a"], ) assert mock_ts_response.calls[0].response.json()["items"] == res.dump( camel_case=True) assert { "isString": False, "isStep": True, "metadata": { "a": "b" }, "assetIds": [1, 2], "assetExternalIds": ["aeid"], "assetSubtreeIds": [{ "id": 1 }, { "externalId": "a" }], "dataSetIds": [{ "id": 1 }, { "id": 2 }, { "externalId": "x" }], "rootAssetIds": [1231], "createdTime": { "max": 123 }, "lastUpdatedTime": { "min": 45 }, } == jsgz_load(mock_ts_response.calls[0].request.body)["filter"]
def test_insert_single_DTO(self, mock_raw_row_response): res = RAW_API.rows.insert(db_name="db1", table_name="table1", row=Row(key="row1", columns={ "c1": 1, "c2": "2" }), ensure_parent=False) assert res is None assert [{ "key": "row1", "columns": { "c1": 1, "c2": "2" } }] == jsgz_load(mock_raw_row_response.calls[0].request.body)["items"]
def test_suggest_fields(self, mock_suggest_ok): res = EMAPI.suggest_fields(sources=[{ "name": "a" }], targets=[{ "name": "b" }]) assert { "sources": [{ "name": "a" }], "targets": [{ "name": "b" }], "scoreThreshold": 0.5 } == jsgz_load(mock_suggest_ok.calls[0].request.body) assert isinstance(res, list)
def test_fit_cognite_resource(self, mock_fit): entities_from = [TimeSeries(id=1, name="x")] entities_to = [Asset(id=1, external_id="abc", name="x")] EMAPI.fit(match_from=entities_from, match_to=entities_to, true_matches=[(1, "abc")], feature_type="bigram") assert { "matchFrom": [entities_from[0].dump(camel_case=True)], "matchTo": [entities_to[0].dump(camel_case=True)], "trueMatches": [{ "fromId": 1, "toExternalId": "abc" }], "featureType": "bigram", "ignoreMissingFields": False, } == jsgz_load(mock_fit.calls[0].request.body)
def test_update_single_with_update_class(self, mock_int_response): up = ExtractionPipelineUpdate(external_id="py test id") up.description.set("New description") res = TEST_API.update(up) assert isinstance(res, ExtractionPipeline) assert mock_int_response.calls[0].response.json( )["items"][0] == res.dump(camel_case=True) assert { "items": [{ "externalId": "py test id", "update": { "description": { "set": "New description" } } }] } == jsgz_load(mock_int_response.calls[0].request.body)
def test_upload_artifacts_from_directory(self, mock_upload_artifact): artifacts_directory = os.path.join( os.path.dirname(__file__), "source_package_for_tests/artifacts") VERSIONS_API.upload_artifacts_from_directory( model_name="model1", version_name="version1", directory=artifacts_directory) for call in mock_upload_artifact.calls: try: res = jsgz_load(call.request.body) assert res in [{ "name": "artifact1.txt" }, { "name": os.path.join("sub_dir", "artifact2.txt") }] except OSError: assert call.request.body in [b"content\n", b"content\r\n"]
def test_create_and_deploy_model_version(self, mock_create_and_deploy_model_version): artifacts_directory = os.path.join(os.path.dirname(__file__), "source_package_for_tests/artifacts") model_version = MODELS_API.deploy_model_version( model_id=1, name="mymodel", source_package_id=1, artifacts_directory=artifacts_directory ) calls = mock_create_and_deploy_model_version.calls assert model_version.id == 1 assert {"description": "", "metadata": {}, "name": "mymodel", "sourcePackageId": 1} == jsgz_load( calls[0].request.body ) for call in calls[1:5]: try: res = jsgz_load(call.request.body) assert res in [{"name": "artifact1.txt"}, {"name": os.path.join("sub_dir", "artifact2.txt")}] except OSError: assert call.request.body in [b"content\n", b"content\r\n"] assert b"{}" == calls[5].request.body
def test_list_subtree(self, mock_assets_response): ASSETS_API.list(asset_subtree_ids=1, asset_subtree_external_ids=["a"], limit=10) calls = mock_assets_response.calls assert 1 == len(calls) assert { "cursor": None, "limit": 10, "filter": { "assetSubtreeIds": [{ "id": 1 }, { "externalId": "a" }] }, } == jsgz_load(calls[0].request.body)
def test_insert_dataframe_extids(self, mock_post_sequence_data): import pandas as pd df = pd.DataFrame(index=[123, 456]) df["aa"] = [1, 2] df["bb"] = [5.0, 6.0] res = SEQ_API.data.insert_dataframe(df, id=42, external_id_headers=True) assert res is None request_body = jsgz_load(mock_post_sequence_data.calls[0].request.body) assert { "items": [ { "id": 42, "columns": ["aa", "bb"], "rows": [{"rowNumber": 123, "values": [1, 5.0]}, {"rowNumber": 456, "values": [2, 6.0]}], } ] } == request_body
def test_source_target_packing(self, mock_rel_response): res = REL_API.list( source_resource="asset", source_resource_id="bla", target_resource="timeseries", target_resource_id="foo", relationship_type="belongs_to", ) assert { "filter": { "sources": [{"resource": "asset", "resourceId": "bla"}], "targets": [{"resource": "timeseries", "resourceId": "foo"}], "relationshipTypes": ["belongs_to"], }, "limit": 25, "cursor": None, } == jsgz_load(mock_rel_response.calls[0].request.body) assert mock_rel_response.calls[0].response.json()["items"] == res.dump(camel_case=True)
def download_link_callback(request): identifier = jsgz_load(request.body)["items"][0] response = {} if "id" in identifier: response = { "items": [{ "id": 1, "downloadUrl": "https://download.file1.here" }] } elif "externalId" in identifier: response = { "items": [{ "externalId": "2", "downloadUrl": "https://download.file2.here" }] } return 200, {}, json.dumps(response)