def create_asset_hierarchy(): assets = [] assets.append(Asset(name="Tesla Model 3", external_id="tesla")) assets.append( Asset(name="Climate", description="Climate data", external_id="tesla_climate", parent_external_id="tesla")) assets.append( Asset(name="Charge", description="Charge data", external_id="tesla_charge", parent_external_id="tesla")) assets.append( Asset(name="Drive", description="Drive data", external_id="tesla_drive", parent_external_id="tesla")) assets.append( Asset(name="Vehicle", description="Vehicle data", external_id="tesla_vehicle", parent_external_id="tesla")) client.assets.create_hierarchy(assets)
def test_init_empty_cdf(self): self.client.time_series.retrieve_multiple = Mock( side_effect=CogniteNotFoundError([{ "externalId": "pre_gauge" }])) return_asset = Asset(id=123, external_id="asset", name="asset") new_asset = Asset(external_id="asset", name="asset") self.client.assets.create = Mock(return_value=return_asset) pusher = CognitePusher(self.client, external_id_prefix="pre_", asset=new_asset, push_interval=1) # Assert time series created # Hacky assert_called_once_with as the TimeSeries object is not the same obj, just equal content self.client.time_series.create.assert_called_once() print(self.client.time_series.create.call_args_list) self.assertDictEqual( self.client.time_series.create.call_args_list[0][0][0][0].dump(), TimeSeries(external_id="pre_gauge", name="gauge", legacy_name="pre_gauge", description="Test gauge", asset_id=123).dump(), ) # Assert asset created self.client.assets.create.assert_called_once_with(new_asset)
def test_to_pandas_nullable_int(self): import pandas as pd for camel_case in [False, True]: assert (pd.Int64Dtype() == AssetList([ Asset(parent_id=123), Asset(parent_id=None) ]).to_pandas(camel_case=camel_case).dtypes[0])
def test_find_objects_to_delete_not_replicated_in_dst(): assets = [ Asset(id=3, name="holy grenade", metadata={ "_replicatedSource": "source_tenant", "_replicatedInternalId": 123 }), Asset(id=7, name="not holy grenade", metadata={}), Asset(id=5, name="in-holy grenade", metadata={"source": "None"}), ] to_delete = find_objects_to_delete_not_replicated_in_dst(assets) assert len(to_delete) == 2 assert set(to_delete) == {5, 7} assert find_objects_to_delete_not_replicated_in_dst([]) == []
def new_root_asset(): external_id = "my_root_{}".format(utils._auxiliary.random_string(10)) root = Asset(external_id=external_id, name="my_root") root = COGNITE_CLIENT.assets.create(root) yield root COGNITE_CLIENT.assets.delete(external_id=external_id, recursive=True) assert COGNITE_CLIENT.assets.retrieve(external_id=external_id) is None
def create_countries(client, locations): for location in locations: country = location['country'] province = location['province'] country_external_id = country country_asset = retrieve_or_create_asset( client, Asset(name=country, external_id=country_external_id, parent_external_id="covid19")) if province != "": province_external_id = country_external_id + "_" + province province_asset = retrieve_or_create_asset( client, Asset(name=province, external_id=province_external_id, parent_external_id=country_external_id))
def test_existing_mapping(): assets = [ Asset(id=3, name="holy grenade", metadata={"_replicatedInternalId": 33}), Asset(id=7, name="not holy grenade", parent_id=3, metadata={"_replicatedInternalId": 77}), Asset(id=5, name="in-holy grenade", parent_id=7, metadata={"_replicatedInternalId": 55}), ] ids = existing_mapping(*assets) assert ids[assets[0].metadata["_replicatedInternalId"]] == assets[0].id assert ids[assets[1].metadata["_replicatedInternalId"]] == assets[1].id assert ids[assets[2].metadata["_replicatedInternalId"]] == assets[2].id
def test_get_related_resources_should_not_return_duplicates(self, resource_class, resource_list_class, method): r1 = resource_class(id=1) r2 = resource_class(id=2) r3 = resource_class(id=3) resources_a1 = resource_list_class([r1]) resources_a2 = resource_list_class([r2, r3]) resources_a3 = resource_list_class([r2, r3]) mock_cognite_client = mock.MagicMock() mock_method = getattr(mock_cognite_client, method) mock_method.list.side_effect = [resources_a1, resources_a2, resources_a3] mock_method._config = mock.Mock(max_workers=3) assets = AssetList([Asset(id=1), Asset(id=2), Asset(id=3)], cognite_client=mock_cognite_client) assets._retrieve_chunk_size = 1 resources = getattr(assets, method)() expected = [r1, r2, r3] assert expected == resources
def generate_asset_tree(root_external_id: str, depth: int, children_per_node: int, current_depth=1): assert 1 <= children_per_node <= 10, "children_per_node must be between 1 and 10" assets = [] if current_depth == 1: assets = [Asset(external_id=root_external_id, name=root_external_id)] if depth > current_depth: for i in range(children_per_node): external_id = "{}{}".format(root_external_id, i) asset = Asset(parent_external_id=root_external_id, external_id=external_id, name=external_id) assets.append(asset) if depth > current_depth + 1: assets.extend( generate_asset_tree(root_external_id + str(i), depth, children_per_node, current_depth + 1)) return assets
def test_init_existing_all(self): return_asset = Asset(id=123, external_id="assetid", name="asset") new_asset = Asset(external_id="assetid", name="asset") self.client.assets.create = Mock( side_effect=CogniteDuplicatedError(["assetid"])) self.client.assets.retrieve = Mock(return_value=return_asset) pusher = CognitePusher(self.client, external_id_prefix="pre_", asset=new_asset, push_interval=1) # Assert time series created self.client.time_series.create.assert_not_called() # Assert asset created self.client.assets.create.assert_called_once_with(new_asset) self.client.assets.retrieve.assert_called_once_with( external_id="assetid")
def test_fit_cognite_resource(self, mock_fit): entities_from = [TimeSeries(id=1, name="x")] entities_to = [Asset(id=1, name="x")] EMAPI.fit(match_from=entities_from, match_to=entities_to, true_matches=[(1, 2)], feature_type="bigram") assert { "matchFrom": [entities_from[0].dump()], "matchTo": [entities_to[0].dump()], "idField": "id", "trueMatches": [[1, 2]], "featureType": "bigram", "completeMissing": False, } == jsgz_load(mock_fit.calls[0].request.body)
def create_asset_and_timeseries(ext_id, name, symbol, asset_ext_id, root, client): res = [] try: res = client.assets.retrieve(external_id=asset_ext_id) except CogniteAPIError as e: if e.code == 400: asset = Asset(external_id=asset_ext_id, name=symbol, parent_id=root, description=name) res = client.assets.create(asset) print(res) ts = client.time_series.create( TimeSeries(external_id=ext_id, name=name, unit='USD', asset_id=res.id)) return ts
def test_fit_cognite_resource(self, mock_fit): entities_from = [TimeSeries(id=1, name="x")] entities_to = [Asset(id=1, external_id="abc", name="x")] EMAPI.fit(match_from=entities_from, match_to=entities_to, true_matches=[(1, "abc")], feature_type="bigram") assert { "matchFrom": [entities_from[0].dump(camel_case=True)], "matchTo": [entities_to[0].dump(camel_case=True)], "trueMatches": [{ "fromId": 1, "toExternalId": "abc" }], "featureType": "bigram", "ignoreMissingFields": False, } == jsgz_load(mock_fit.calls[0].request.body)
def test_update_asset_with_label(self, new_label): ac = COGNITE_CLIENT.assets.create( Asset(name="any", description="delete me")) assert not ac.labels update = AssetUpdate(id=ac.id) update = update.add_label(external_id=new_label.external_id) assert isinstance(update, AssetUpdate) ua = COGNITE_CLIENT.assets.update(update) assert len(ua.labels) == 1 assert new_label.external_id == ua.labels[0]["externalId"] update = AssetUpdate(id=ac.id) update = update.remove_label(external_id=new_label.external_id) assert isinstance(update, AssetUpdate) ua = COGNITE_CLIENT.assets.update(update) assert not ua.labels COGNITE_CLIENT.assets.delete(id=ac.id)
def start_pushers( self, cdf_client: CogniteClient, cancelation_token: Event = Event()) -> None: self._pushers: List[AbstractMetricsPusher] = [] self._clear_on_stop: Dict[PrometheusPusher, int] = {} push_gateways = self.push_gateways or [] for counter, push_gateway in enumerate(push_gateways): pusher = PrometheusPusher( job_name=push_gateway.job_name, username=push_gateway.username, password=push_gateway.password, url=push_gateway.host, push_interval=push_gateway.push_interval, thread_name=f"MetricsPusher_{counter}", cancelation_token=cancelation_token, ) pusher.start() self._pushers.append(pusher) if push_gateway.clear_after is not None: self._clear_on_stop[pusher] = push_gateway.clear_after if self.cognite: asset = None if self.cognite.asset_name is not None: asset = Asset(name=self.cognite.asset_name, external_id=self.cognite.asset_external_id) pusher = CognitePusher( cdf_client=cdf_client, external_id_prefix=self.cognite.external_id_prefix, push_interval=self.cognite.push_interval, asset=asset, thread_name= "CogniteMetricsPusher", # There is only one Cognite project as a target cancelation_token=cancelation_token, ) pusher.start() self._pushers.append(pusher)
def create_assets(weather_stations: List[WeatherStation], config: WeatherConfig, cdf: CogniteClient) -> Dict[WeatherStation, int]: """ Create assets in CDF for all WeatherStation objects Args: weather_stations: List of weather stations config: Config parameters cdf: Cognite client Returns: Mapping from WeatherStation object to (internal) asset ID in CDF """ assets = [] for weather_station in weather_stations: assets.append( Asset( external_id= f"{config.cognite.external_id_prefix}{weather_station.id}", name=weather_station.name, source="Frost", metadata={ "longitude": str(weather_station.longitude), "latitude": str(weather_station.latitude), "station_id": weather_station.id, }, )) # Todo: handle if (some) assets exists created_assets = cdf.assets.create(assets) station_to_asset_id = {} for asset in created_assets: weather_station = [ s for s in weather_stations if s.id == asset.metadata["station_id"] ][0] station_to_asset_id[weather_station] = asset.id return station_to_asset_id
def start_pushers(self, cdf_client: CogniteClient) -> None: self._pushers: List[AbstractMetricsPusher] = [] self._clear_on_stop: Dict[PrometheusPusher, int] = {} counter = 0 push_gateways = self.push_gateways or [] for push_gateway in push_gateways: pusher = PrometheusPusher( job_name=push_gateway.job_name, username=push_gateway.username, password=push_gateway.password, url=push_gateway.host, push_interval=push_gateway.push_interval, thread_name=f"MetricsPusher_{counter}", ) pusher.start() self._pushers.append(pusher) if push_gateway.clear_after is not None: self._clear_on_stop[pusher] = push_gateway.clear_after counter += 1 if self.cognite: if self.cognite.asset_name is not None: asset = Asset(name=self.cognite.asset_name, external_id=self.cognite.asset_external_id) else: asset = None pusher = CognitePusher( cdf_client=cdf_client, external_id_prefix=self.cognite.external_id_prefix, push_interval=self.cognite.push_interval, asset=asset, thread_name=f"MetricsPusher_{counter}", ) pusher.start() self._pushers.append(pusher)
def test_find_objects_to_delete_if_not_in_src(): assets_dst = [ Asset(id=3, name="holy grenade", metadata={ "_replicatedSource": "source_tenant", "_replicatedInternalId": 3 }), Asset(id=13, name="unlucky holy grenade", metadata={"_replicatedInternalId": 123}), Asset(id=7, name="not holy grenade", metadata={}), Asset(id=5, name="in-holy grenade", metadata={"_replicatedInternalId": 5}), ] assets_src = [ Asset(id=3, name="holy grenade", metadata={}), Asset(id=5, name="in-holy grenade", metadata={}) ] to_delete = find_objects_to_delete_if_not_in_src(assets_src, assets_dst) assert len(to_delete) == 1 assert to_delete[0] == 13 assert find_objects_to_delete_if_not_in_src([], []) == []
def test_delete_with_nonexisting(self): a = COGNITE_CLIENT.assets.create(Asset(name="any")) COGNITE_CLIENT.assets.delete(id=a.id, external_id="this asset does not exist", ignore_unknown_ids=True) assert COGNITE_CLIENT.assets.retrieve(id=a.id) is None
def new_asset(): ts = COGNITE_CLIENT.assets.create(Asset(name="any")) yield ts COGNITE_CLIENT.assets.delete(id=ts.id) assert COGNITE_CLIENT.assets.retrieve(ts.id) is None
def test_get_files(self): c.files.list = mock.MagicMock() a = AssetList(resources=[Asset(id=1)], cognite_client=c) a.files() assert c.files.list.call_args == call(asset_ids=[1], limit=-1) assert c.files.list.call_count == 1
def test_get_subtree(self): c.assets.retrieve_subtree = mock.MagicMock() a1 = Asset(id=1, cognite_client=c) a1.subtree(depth=1) assert c.assets.retrieve_subtree.call_args == call(id=1, depth=1) assert c.assets.retrieve_subtree.call_count == 1
def test_get_children(self): c.assets.list = mock.MagicMock() a1 = Asset(id=1, cognite_client=c) a1.children() assert c.assets.list.call_args == call(parent_ids=[1], limit=None) assert c.assets.list.call_count == 1
def test_get_parent(self): c.assets.retrieve = mock.MagicMock() a1 = Asset(parent_id=1, cognite_client=c) a1.parent() assert c.assets.retrieve.call_args == call(id=1) assert c.assets.retrieve.call_count == 1
def test_get_files(self): c.files.list = mock.MagicMock() a = Asset(id=1, cognite_client=c) a.files() assert c.files.list.call_args == call(asset_ids=[1]) assert c.files.list.call_count == 1
def test_create_asset_with_label(self, new_label): labels = [{"externalId": new_label.external_id}] ac = COGNITE_CLIENT.assets.create(Asset(name="any", labels=labels)) assert isinstance(ac, Asset) assert labels == ac.labels COGNITE_CLIENT.assets.delete(id=ac.id)
def test_make_id_object_map(): assets = [Asset(id=3, metadata={"_replicatedInternalId": 55}), Asset(id=2)] mapping = make_id_object_map(assets) assert 1 == len(mapping) assert 3 == mapping[55].id