def test_all_in_cdf(self): time_series = [ TimeSeries(external_id="a"), TimeSeries(external_id="b") ] ensure_time_series(self.client, time_series) self.client.time_series.create.assert_not_called()
def update_time_series(src_ts: TimeSeries, dst_ts: TimeSeries, src_dst_ids_assets: Dict[int, int], project_src: str, runtime: int) -> TimeSeries: """ Makes an updated version of the destination time series based on the corresponding source time series. Args: src_ts: The time series from the source to be replicated. dst_ts: The time series from the destination that needs to be updated to reflect changes made to its source time series. src_dst_ids_assets: A dictionary of all the mappings of source asset id to destination asset id. project_src: The name of the project the object is being replicated from. runtime: The timestamp to be used in the new replicated metadata. Returns: The updated time series object for the replication destination. """ logging.debug( f"Updating existing time series {dst_ts.id} based on source time series id {src_ts.id}" ) dst_ts.external_id = src_ts.external_id dst_ts.name = src_ts.name dst_ts.is_string = src_ts.is_string dst_ts.metadata = replication.new_metadata(src_ts, project_src, runtime) dst_ts.unit = src_ts.unit dst_ts.asset_id = replication.get_asset_ids( [src_ts.asset_id], src_dst_ids_assets)[0] if src_ts.asset_id else None dst_ts.is_step = src_ts.is_step dst_ts.description = src_ts.description dst_ts.security_categories = src_ts.security_categories return dst_ts
def test_some_in_cdf(self): existing = [TimeSeries(external_id="a")] new = [TimeSeries(external_id="b")] self.client.time_series.retrieve_multiple = Mock( side_effect=CogniteNotFoundError([{ "externalId": ts.external_id } for ts in new])) ensure_time_series(self.client, existing + new) self.client.time_series.create.assert_called_once_with(new)
def test_time_series_upload_queue1(self): created = self.client.time_series.create([ TimeSeries(external_id=self.time_series1), TimeSeries(external_id=self.time_series2, is_string=True) ]) last_point = {"timestamp": 0} def store_latest(points): last_point["timestamp"] = max( last_point["timestamp"], *[ts["datapoints"][-1][0] for ts in points]) queue = TimeSeriesUploadQueue(cdf_client=self.client, post_upload_function=store_latest, max_upload_interval=1) queue.start() # Create some synthetic data now = int(datetime.now(tz=timezone.utc).timestamp() * 1000) points1_1 = [(now + i * 107, random.randint(0, 10)) for i in range(10)] points1_2 = [(now + i * 107, random.randint(0, 10)) for i in range(10, 100)] points2 = [(now + i * 93, chr(97 + i)) for i in range(26)] queue.add_to_upload_queue(external_id=self.time_series1, datapoints=points1_1) queue.add_to_upload_queue(external_id=self.time_series1, datapoints=points1_2) queue.add_to_upload_queue(id=created[1].id, datapoints=points2) time.sleep(30) recv_points1 = self.client.datapoints.retrieve( external_id=self.time_series1, start="1w-ago", end="now", limit=None) recv_points2 = self.client.datapoints.retrieve( external_id=self.time_series2, start="1w-ago", end="now", limit=None) self.assertListEqual([int(p) for p in recv_points1.value], [p[1] for p in points1_1 + points1_2]) self.assertListEqual(recv_points2.value, [p[1] for p in points2]) self.assertEqual(last_point["timestamp"], points1_2[-1][0]) queue.stop()
def test_nothing_in_cdf(self): time_series = [ TimeSeries(external_id="a"), TimeSeries(external_id="b") ] self.client.time_series.retrieve_multiple = Mock( side_effect=CogniteNotFoundError([{ "externalId": ts.external_id } for ts in time_series])) ensure_time_series(self.client, time_series) self.client.time_series.create.assert_called_once_with(time_series)
def test_filter_objects(): time_series = [ TimeSeries(id=1, asset_id=100), TimeSeries(id=2), TimeSeries(id=3, asset_id=101) ] events = [ Event(id=10, asset_ids=[100, 101]), Event(id=11), Event(id=12, asset_ids=[101]) ] src_dst_asset_id_map = {100: 1000} dummy_filtered_events = filter_objects(events, src_dst_asset_id_map) dummy_filtered_ts = filter_objects(time_series, src_dst_asset_id_map) assert dummy_filtered_events == events assert dummy_filtered_ts == time_series asset_events = filter_objects(events, src_dst_asset_id_map, skip_nonasset=True) asset_ts = filter_objects(time_series, src_dst_asset_id_map, skip_nonasset=True) assert len(asset_events) == 2 assert len(asset_ts) == 2 for i in range(len(asset_ts)): assert asset_ts[i].asset_id is not None assert asset_events[i].asset_ids is not None linkable_events = filter_objects(events, src_dst_asset_id_map, skip_nonasset=True, skip_unlinkable=True) linkable_ts = filter_objects(time_series, src_dst_asset_id_map, skip_nonasset=True, skip_unlinkable=True) assert len(linkable_events) == 1 assert len(linkable_ts) == 1 assert linkable_events[0] == events[0] assert linkable_ts[0] == time_series[0] odd_id_events = filter_objects(events, src_dst_asset_id_map, filter_fn=lambda x: x.id % 2 == 1) assert len(odd_id_events) == 1 for event in odd_id_events: assert event.id % 2 == 1
def list_time_series( weather_stations: List[WeatherStation], config: WeatherConfig, assets: Optional[Dict[WeatherStation, int]]) -> List[TimeSeries]: """ Create TimeSeries Objects (without creating them in CDF) for all the sensors at all the weather stations configured. Args: weather_stations: List of weather stations to track config: Configuration parameters, among other containing the list of elements to track assets: (Optional) Dictionary from WeatherStation object to of asset ID. If configured to create assets, the time series will be associated with an asset ID. Returns: List of TimeSeries objects """ time_series = [] for weather_station in weather_stations: for element in config.frost.elements: external_id = create_external_id(config.cognite.external_id_prefix, weather_station, element) args = { "external_id": external_id, "legacy_name": external_id, "name": f"{weather_station.name}: {element.replace('_', ' ')}", } if config.extractor.create_assets: args["asset_id"] = assets[weather_station] time_series.append(TimeSeries(**args)) return time_series
def _init_cdf(self) -> None: """ Initialize the CDF tenant with the necessary time series and asset. """ time_series: List[TimeSeries] = [] if self.asset is not None: # Ensure that asset exist, and retrieve internal ID try: asset = self.cdf_client.assets.create(self.asset) except CogniteDuplicatedError: asset = self.cdf_client.assets.retrieve( external_id=self.asset.external_id) asset_id = asset.id if asset is not None else None else: asset_id = None for metric in REGISTRY.collect(): if type(metric) == Metric and metric.type in ["gauge", "counter"]: external_id = self.external_id_prefix + metric.name time_series.append( TimeSeries( external_id=external_id, name=metric.name, legacy_name=external_id, description=metric.documentation, asset_id=asset_id, )) ensure_time_series(self.cdf_client, time_series)
def test_time_series_upload_queue2(self): self.client.time_series.create( TimeSeries(external_id=self.time_series1)) queue = TimeSeriesUploadQueue(cdf_client=self.client, max_upload_interval=1) queue.start() # Create some synthetic data now = int(datetime.now(tz=timezone.utc).timestamp() * 1000) points1 = [(now + i * 107, random.randint(0, 10)) for i in range(10)] points2 = [(now + i * 107, random.randint(0, 10)) for i in range(10, 20)] queue.add_to_upload_queue(external_id=self.time_series1, datapoints=points1) queue.add_to_upload_queue(external_id="noSuchExternalId", datapoints=points2) time.sleep(20) recv_points1 = self.client.datapoints.retrieve( external_id=self.time_series1, start="1w-ago", end="now", limit=None) self.assertListEqual([int(p) for p in recv_points1.value], [p[1] for p in points1]) queue.stop()
def create_time_series(src_ts: TimeSeries, src_dst_ids_assets: Dict[int, int], project_src: str, runtime: int) -> TimeSeries: """ Make a new copy of the time series to be replicated based on a source time series. Args: src_ts: The time series from the source to be replicated to the destination. src_dst_ids_assets: A dictionary of all the mappings of source asset id to destination asset id. project_src: The name of the project the object is being replicated from. runtime: The timestamp to be used in the new replicated metadata. Returns: The replicated time series to be created in the destination. """ logging.debug( f"Creating a new time series based on source time series id {src_ts.id}" ) return TimeSeries( external_id=src_ts.external_id, name=src_ts.name, is_string=src_ts.is_string, metadata=replication.new_metadata(src_ts, project_src, runtime), unit=src_ts.unit, asset_id=replication.get_asset_ids([src_ts.asset_id], src_dst_ids_assets)[0] if src_ts.asset_id else None, is_step=src_ts.is_step, description=src_ts.description, security_categories=src_ts.security_categories, legacy_name=src_ts.external_id, )
def test_init_empty_cdf(self): self.client.time_series.retrieve_multiple = Mock( side_effect=CogniteNotFoundError([{ "externalId": "pre_gauge" }])) return_asset = Asset(id=123, external_id="asset", name="asset") new_asset = Asset(external_id="asset", name="asset") self.client.assets.create = Mock(return_value=return_asset) pusher = CognitePusher(self.client, external_id_prefix="pre_", asset=new_asset, push_interval=1) # Assert time series created # Hacky assert_called_once_with as the TimeSeries object is not the same obj, just equal content self.client.time_series.create.assert_called_once() print(self.client.time_series.create.call_args_list) self.assertDictEqual( self.client.time_series.create.call_args_list[0][0][0][0].dump(), TimeSeries(external_id="pre_gauge", name="gauge", legacy_name="pre_gauge", description="Test gauge", asset_id=123).dump(), ) # Assert asset created self.client.assets.create.assert_called_once_with(new_asset)
def _upload_batch(self, upload_this: List[Dict], retries=5) -> List[Dict]: if len(upload_this) == 0: return upload_this try: self.cdf_client.datapoints.insert_multiple(upload_this) except CogniteNotFoundError as ex: if not retries: raise ex if not self.create_missing: self.logger.error("Could not upload data points to %s: %s", str(ex.not_found), str(ex)) # Get IDs of time series that exists, but failed because of the non-existing time series retry_these = [EitherId(**id_dict) for id_dict in ex.failed if id_dict not in ex.not_found] if self.create_missing: # Get the time series that can be created create_these = [id_dict["externalId"] for id_dict in ex.not_found if "externalId" in id_dict] is_string = { ts_dict["externalId"]: isinstance(ts_dict["datapoints"][0][1], str) for ts_dict in upload_this if ts_dict["externalId"] in create_these } self.logger.info(f"Creating {len(create_these)} time series") self.cdf_client.time_series.create( [TimeSeries(external_id=i, is_string=is_string[i]) for i in create_these] ) retry_these.extend([EitherId(external_id=i) for i in create_these]) if len(ex.not_found) != len(create_these): missing = [id_dict for id_dict in ex.not_found if id_dict.get("externalId") not in retry_these] self.logger.error( f"{len(ex.not_found) - len(create_these)} time series not found, and could not be created automatically:\n" + str(missing) + "\nData will be dropped" ) # Remove entries with non-existing time series from upload queue upload_this = [ entry for entry in upload_this if EitherId(id=entry.get("id"), external_id=entry.get("externalId")) in retry_these ] # Upload remaining self._upload_batch(upload_this, retries - 1) return upload_this
def create_time_series(client, data): types = ['confirmed', 'deaths', 'recovered'] subtree = client.assets.retrieve_subtree(external_id='covid19') time_series = [] for asset in subtree: for t in types: external_id = asset.external_id + "_" + t name = asset.external_id + " " + t time_series.append( TimeSeries(name=name, legacy_name=external_id, external_id=external_id, asset_id=asset.id)) client.time_series.create(time_series)
def test_fit_cognite_resource(self, mock_fit): entities_from = [TimeSeries(id=1, name="x")] entities_to = [Asset(id=1, name="x")] EMAPI.fit(match_from=entities_from, match_to=entities_to, true_matches=[(1, 2)], feature_type="bigram") assert { "matchFrom": [entities_from[0].dump()], "matchTo": [entities_to[0].dump()], "idField": "id", "trueMatches": [[1, 2]], "featureType": "bigram", "completeMissing": False, } == jsgz_load(mock_fit.calls[0].request.body)
def default_time_series_factory(external_id: str, datapoints: DataPointList) -> TimeSeries: """ Default time series factory used when create_missing in a TimeSeriesUploadQueue is given as a boolean. Args: external_id: External ID of time series to create datapoints: The list of datapoints that were tried to be inserted Returns: A TimeSeries object with external_id set, and the is_string automatically detected """ is_string = (isinstance(datapoints[0].get("value"), str) if isinstance( datapoints[0], dict) else isinstance(datapoints[0][1], str)) return TimeSeries(external_id=external_id, is_string=is_string)
def create_asset_and_timeseries(ext_id, name, symbol, asset_ext_id, root, client): res = [] try: res = client.assets.retrieve(external_id=asset_ext_id) except CogniteAPIError as e: if e.code == 400: asset = Asset(external_id=asset_ext_id, name=symbol, parent_id=root, description=name) res = client.assets.create(asset) print(res) ts = client.time_series.create( TimeSeries(external_id=ext_id, name=name, unit='USD', asset_id=res.id)) return ts
def test_fit_cognite_resource(self, mock_fit): entities_from = [TimeSeries(id=1, name="x")] entities_to = [Asset(id=1, external_id="abc", name="x")] EMAPI.fit(match_from=entities_from, match_to=entities_to, true_matches=[(1, "abc")], feature_type="bigram") assert { "matchFrom": [entities_from[0].dump(camel_case=True)], "matchTo": [entities_to[0].dump(camel_case=True)], "trueMatches": [{ "fromId": 1, "toExternalId": "abc" }], "featureType": "bigram", "ignoreMissingFields": False, } == jsgz_load(mock_fit.calls[0].request.body)
def check_timeseries(self): if self.myself.property.timeseries_id: ts = self.client.time_series.retrieve(external_id=self.ts_ext_id) if ts and ts.id: self.myself.property.timeseries_id = str(ts.id) return ts.id try: ts = self.client.time_series.create( TimeSeries(name=self.ts_name, external_id=self.ts_ext_id, unit="beats")) except CogniteAPIError: self.is_ok = False except CogniteDuplicatedError: ts = self.client.time_series.retrieve(external_id=self.ts_ext_id) if ts and ts.id: self.myself.property.timeseries_id = str(ts.id) return int(self.myself.property.timeseries_id)
def test_filter_away_service_account_ts(): ts_src = [ TimeSeries(name="holy_timeseries_service_account_metrics", metadata={}), TimeSeries(name="not holy timeseries service_account_metrics", metadata={}), TimeSeries(name="in-holy timeseries", metadata={}), TimeSeries(name="secure timeseries", metadata={}, security_categories=[2]), TimeSeries(name="insecure timeseries 1", metadata={}, security_categories=[]), TimeSeries(name="insecure timeseries 2", metadata={}), ] ts_list = filter_objects(ts_src, {}, filter_fn=_is_copyable) assert len(ts_list) == 3 assert ts_list[0].name == "in-holy timeseries" assert ts_list[1].name == "insecure timeseries 1" assert ts_list[2].name == "insecure timeseries 2"
def new_ts(): ts = COGNITE_CLIENT.time_series.create(TimeSeries(name="any")) yield ts COGNITE_CLIENT.time_series.delete(id=ts.id) assert COGNITE_CLIENT.time_series.retrieve(ts.id) is None
def test_delete_with_nonexisting(self): a = COGNITE_CLIENT.time_series.create(TimeSeries(name="any")) COGNITE_CLIENT.assets.delete(id=a.id, external_id="this ts does not exist", ignore_unknown_ids=True) assert COGNITE_CLIENT.assets.retrieve(id=a.id) is None
def test_create_multiple(self, mock_ts_response): res = TS_API.create([TimeSeries(external_id="1", name="blabla")]) assert isinstance(res, TimeSeriesList) assert mock_ts_response.calls[0].response.json()["items"] == res.dump( camel_case=True)
def test_update_with_resource_class(self, mock_ts_response): res = TS_API.update(TimeSeries(id=1)) assert isinstance(res, TimeSeries) assert mock_ts_response.calls[0].response.json( )["items"][0] == res.dump(camel_case=True)
def create_time_series(): climate_time_series = [] assets = client.assets.retrieve_subtree(external_id="tesla") asset_by_external_id = {} for asset in assets: asset_by_external_id[asset.external_id] = asset # Create time series for Vehicle print("Creating time series for Vehicle") client.time_series.create( TimeSeries(name="api_version", external_id="api_version", asset_id=asset_by_external_id["tesla_vehicle"].id, is_step=True)) client.time_series.create( TimeSeries(name="car_version", external_id="car_version", asset_id=asset_by_external_id["tesla_vehicle"].id, is_string=True)) client.time_series.create( TimeSeries(name="df", external_id="df", asset_id=asset_by_external_id["tesla_vehicle"].id)) client.time_series.create( TimeSeries(name="dr", external_id="dr", asset_id=asset_by_external_id["tesla_vehicle"].id)) client.time_series.create( TimeSeries(name="fd_window", external_id="fd_window", asset_id=asset_by_external_id["tesla_vehicle"].id)) client.time_series.create( TimeSeries(name="fp_window", external_id="fp_window", asset_id=asset_by_external_id["tesla_vehicle"].id)) client.time_series.create( TimeSeries(name="ft", external_id="ft", asset_id=asset_by_external_id["tesla_vehicle"].id)) client.time_series.create( TimeSeries(name="is_user_present", external_id="is_user_present", asset_id=asset_by_external_id["tesla_vehicle"].id, is_step=True)) client.time_series.create( TimeSeries(name="locked", external_id="locked", asset_id=asset_by_external_id["tesla_vehicle"].id, is_step=True)) client.time_series.create( TimeSeries(name="odometer", external_id="odometer", asset_id=asset_by_external_id["tesla_vehicle"].id, unit="km")) client.time_series.create( TimeSeries(name="pf", external_id="pf", asset_id=asset_by_external_id["tesla_vehicle"].id)) client.time_series.create( TimeSeries(name="pr", external_id="pr", asset_id=asset_by_external_id["tesla_vehicle"].id)) client.time_series.create( TimeSeries(name="rd_window", external_id="rd_window", asset_id=asset_by_external_id["tesla_vehicle"].id)) client.time_series.create( TimeSeries(name="rp_window", external_id="rp_window", asset_id=asset_by_external_id["tesla_vehicle"].id)) client.time_series.create( TimeSeries(name="rt", external_id="rt", asset_id=asset_by_external_id["tesla_vehicle"].id)) client.time_series.create( TimeSeries(name="sentry_mode", external_id="sentry_mode", asset_id=asset_by_external_id["tesla_vehicle"].id, is_step=True)) client.time_series.create( TimeSeries(name="valet_mode", external_id="valet_mode", asset_id=asset_by_external_id["tesla_vehicle"].id, is_step=True)) # Create time series for Climate print("Creating time series for Climate") client.time_series.create( TimeSeries(name="battery_heater", external_id="battery_heater", asset_id=asset_by_external_id["tesla_climate"].id, is_step=True)) client.time_series.create( TimeSeries(name="defrost_mode", external_id="defrost_mode", asset_id=asset_by_external_id["tesla_climate"].id, is_step=True)) client.time_series.create( TimeSeries(name="driver_temp_setting", external_id="driver_temp_setting", asset_id=asset_by_external_id["tesla_climate"].id, unit="°C")) client.time_series.create( TimeSeries(name="is_auto_conditioning_on", external_id="is_auto_conditioning_on", asset_id=asset_by_external_id["tesla_climate"].id, is_step=True)) client.time_series.create( TimeSeries(name="is_climate_on", external_id="is_climate_on", asset_id=asset_by_external_id["tesla_climate"].id, is_step=True)) client.time_series.create( TimeSeries(name="is_front_defroster_on", external_id="is_front_defroster_on", asset_id=asset_by_external_id["tesla_climate"].id, is_step=True)) client.time_series.create( TimeSeries(name="is_preconditioning", external_id="is_preconditioning", asset_id=asset_by_external_id["tesla_climate"].id, is_step=True)) client.time_series.create( TimeSeries(name="is_rear_defroster_on", external_id="is_rear_defroster_on", asset_id=asset_by_external_id["tesla_climate"].id, is_step=True)) client.time_series.create( TimeSeries(name="remote_heater_control_enabled", external_id="remote_heater_control_enabled", asset_id=asset_by_external_id["tesla_climate"].id, is_step=True)) client.time_series.create( TimeSeries(name="side_mirror_heaters", external_id="side_mirror_heaters", asset_id=asset_by_external_id["tesla_climate"].id, is_step=True)) client.time_series.create( TimeSeries(name="wiper_blade_heater", external_id="wiper_blade_heater", asset_id=asset_by_external_id["tesla_climate"].id, is_step=True)) client.time_series.create( TimeSeries(name="fan_status", external_id="fan_status", asset_id=asset_by_external_id["tesla_climate"].id)) client.time_series.create( TimeSeries(name="inside_temp", external_id="inside_temp", asset_id=asset_by_external_id["tesla_climate"].id, unit="°C")) client.time_series.create( TimeSeries(name="left_temp_direction", external_id="left_temp_direction", asset_id=asset_by_external_id["tesla_climate"].id)) client.time_series.create( TimeSeries(name="max_avail_temp", external_id="max_avail_temp", asset_id=asset_by_external_id["tesla_climate"].id, unit="°C")) client.time_series.create( TimeSeries(name="min_avail_temp", external_id="min_avail_temp", asset_id=asset_by_external_id["tesla_climate"].id, unit="°C")) client.time_series.create( TimeSeries(name="outside_temp", external_id="outside_temp", asset_id=asset_by_external_id["tesla_climate"].id, unit="°C")) client.time_series.create( TimeSeries(name="passenger_temp_setting", external_id="passenger_temp_setting", asset_id=asset_by_external_id["tesla_climate"].id, unit="°C")) client.time_series.create( TimeSeries(name="right_temp_direction", external_id="right_temp_direction", asset_id=asset_by_external_id["tesla_climate"].id)) client.time_series.create( TimeSeries(name="seat_heater_left", external_id="seat_heater_left", asset_id=asset_by_external_id["tesla_climate"].id)) client.time_series.create( TimeSeries(name="seat_heater_rear_center", external_id="seat_heater_rear_center", asset_id=asset_by_external_id["tesla_climate"].id)) client.time_series.create( TimeSeries(name="seat_heater_rear_left", external_id="seat_heater_rear_left", asset_id=asset_by_external_id["tesla_climate"].id)) client.time_series.create( TimeSeries(name="seat_heater_rear_right", external_id="seat_heater_rear_right", asset_id=asset_by_external_id["tesla_climate"].id)) client.time_series.create( TimeSeries(name="seat_heater_right", external_id="seat_heater_right", asset_id=asset_by_external_id["tesla_climate"].id)) # Create time series for Drive print("Creating time series for Drive") client.time_series.create( TimeSeries(name="heading", external_id="heading", asset_id=asset_by_external_id["tesla_drive"].id)) client.time_series.create( TimeSeries(name="latitude", external_id="latitude", asset_id=asset_by_external_id["tesla_drive"].id)) client.time_series.create( TimeSeries(name="longitude", external_id="longitude", asset_id=asset_by_external_id["tesla_drive"].id)) client.time_series.create( TimeSeries(name="native_latitude", external_id="native_latitude", asset_id=asset_by_external_id["tesla_drive"].id)) client.time_series.create( TimeSeries(name="native_longitude", external_id="native_longitude", asset_id=asset_by_external_id["tesla_drive"].id)) client.time_series.create( TimeSeries(name="power", external_id="power", asset_id=asset_by_external_id["tesla_drive"].id)) client.time_series.create( TimeSeries(name="shift_state", external_id="shift_state", asset_id=asset_by_external_id["tesla_drive"].id, is_string=True)) client.time_series.create( TimeSeries(name="speed", external_id="speed", asset_id=asset_by_external_id["tesla_drive"].id, unit="km/h")) client.time_series.create( TimeSeries(name="elevation", external_id="elevation", asset_id=asset_by_external_id["tesla_drive"].id)) # Create time series for Charge print("Creating time series for Charge") client.time_series.create( TimeSeries(name="battery_heater_on", external_id="battery_heater_on", asset_id=asset_by_external_id["tesla_charge"].id, is_step=True)) client.time_series.create( TimeSeries(name="battery_level", external_id="battery_level", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="battery_range", external_id="battery_range", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charge_current_request", external_id="charge_current_request", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charge_current_request_max", external_id="charge_current_request_max", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charge_energy_added", external_id="charge_energy_added", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charge_limit_soc", external_id="charge_limit_soc", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charge_limit_soc_max", external_id="charge_limit_soc_max", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charge_limit_soc_min", external_id="charge_limit_soc_min", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charge_limit_soc_std", external_id="charge_limit_soc_std", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charge_miles_added_ideal", external_id="charge_miles_added_ideal", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charge_miles_added_rated", external_id="charge_miles_added_rated", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charge_port_cold_weather_mode", external_id="charge_port_cold_weather_mode", asset_id=asset_by_external_id["tesla_charge"].id, is_step=True)) client.time_series.create( TimeSeries(name="charge_port_door_open", external_id="charge_port_door_open", asset_id=asset_by_external_id["tesla_charge"].id, is_step=True)) client.time_series.create( TimeSeries(name="charge_port_latch", external_id="charge_port_latch", asset_id=asset_by_external_id["tesla_charge"].id, is_string=True)) client.time_series.create( TimeSeries(name="charge_rate", external_id="charge_rate", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charge_to_max_range", external_id="charge_to_max_range", asset_id=asset_by_external_id["tesla_charge"].id, is_step=True)) client.time_series.create( TimeSeries(name="charger_actual_current", external_id="charger_actual_current", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charger_pilot_current", external_id="charger_pilot_current", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charger_power", external_id="charger_power", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charger_voltage", external_id="charger_voltage", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="charging_state", external_id="charging_state", asset_id=asset_by_external_id["tesla_charge"].id, is_string=True)) client.time_series.create( TimeSeries(name="conn_charge_cable", external_id="conn_charge_cable", asset_id=asset_by_external_id["tesla_charge"].id, is_string=True)) client.time_series.create( TimeSeries(name="est_battery_range", external_id="est_battery_range", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="fast_charger_brand", external_id="fast_charger_brand", asset_id=asset_by_external_id["tesla_charge"].id, is_string=True)) client.time_series.create( TimeSeries(name="fast_charger_present", external_id="fast_charger_present", asset_id=asset_by_external_id["tesla_charge"].id, is_step=True)) client.time_series.create( TimeSeries(name="fast_charger_type", external_id="fast_charger_type", asset_id=asset_by_external_id["tesla_charge"].id, is_string=True)) client.time_series.create( TimeSeries(name="ideal_battery_range", external_id="ideal_battery_range", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="managed_charging_active", external_id="managed_charging_active", asset_id=asset_by_external_id["tesla_charge"].id, is_step=True)) client.time_series.create( TimeSeries(name="max_range_charge_counter", external_id="max_range_charge_counter", asset_id=asset_by_external_id["tesla_charge"].id, is_step=True)) client.time_series.create( TimeSeries(name="minutes_to_full_charge", external_id="minutes_to_full_charge", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="not_enough_power_to_heat", external_id="not_enough_power_to_heat", asset_id=asset_by_external_id["tesla_charge"].id, is_step=True)) client.time_series.create( TimeSeries(name="scheduled_charging_pending", external_id="scheduled_charging_pending", asset_id=asset_by_external_id["tesla_charge"].id, is_step=True)) client.time_series.create( TimeSeries(name="time_to_full_charge", external_id="time_to_full_charge", asset_id=asset_by_external_id["tesla_charge"].id)) client.time_series.create( TimeSeries(name="trip_charging", external_id="trip_charging", asset_id=asset_by_external_id["tesla_charge"].id, is_step=True)) client.time_series.create( TimeSeries(name="usable_battery_level", external_id="usable_battery_level", asset_id=asset_by_external_id["tesla_charge"].id))