def create_stream(self, schema_name: str, stream_name: str, columns: Any) -> None: if self.try_get_stream(schema_name, stream_name) is None: get_mapi_connection().create_stream(schema_name, stream_name, columns) metric_name = get_metric_name(schema_name, stream_name) self._cache[metric_name] = columns
def get_all_streams_details(self) -> List[Dict[str, Any]]: database_streams = get_mapi_connection().get_database_streams() for entry in database_streams: next_metric_name = get_metric_name(entry['schema'], entry['stream']) cached_metric = self._cache.get(next_metric_name, None) if cached_metric is None: self._cache[next_metric_name] = entry['columns'] return database_streams
def get_stream_details_by_schema(self, schema_name: str, stream_name: str) -> Dict[str, Any]: metric_name = get_metric_name(schema_name, stream_name) cached_metric = self._cache.get(metric_name, None) if cached_metric is None: cached_metric = get_mapi_connection().get_single_database_stream( schema_name, stream_name) self._cache[metric_name] = cached_metric['columns'] return cached_metric
def exitLine(self, ctx: influxdbParser.LineContext) -> None: if self._there_is_error is None: # append only when there are no errors metric_name = get_metric_name(self._current_schema, self._current_stream) if metric_name not in self._grouped_streams: self._grouped_streams[metric_name] = { 'schema': self._current_schema, 'stream': self._current_stream, 'values': [self._current_values], 'tags': self._current_tags } else: self._grouped_streams[metric_name]['values'].append( self._current_values) else: self._found_errors.append(self._there_is_error)
def try_get_stream(self, schema_name: str, stream_name: str) -> List[Dict[str, Any]]: metric_name = get_metric_name(schema_name, stream_name) return self._cache.get(metric_name, None)
def delete_stream(self, schema_name: str, stream_name: str) -> None: metric_name = get_metric_name(schema_name, stream_name) self._cache.pop(metric_name, None) get_mapi_connection().delete_stream(schema_name, stream_name)