def _convert_timestamp(timestamp, precision=DEFAULT_WRITE_PRECISION): date_helper = get_date_helper() if isinstance(timestamp, Integral): return timestamp # assume precision is correct if timestamp is int if isinstance(timestamp, str): timestamp = date_helper.parse_date(timestamp) if isinstance(timestamp, timedelta) or isinstance(timestamp, datetime): if isinstance(timestamp, datetime): timestamp = date_helper.to_utc(timestamp) - EPOCH ns = date_helper.to_nanoseconds(timestamp) if precision is None or precision == WritePrecision.NS: return ns elif precision == WritePrecision.US: return ns / 1e3 elif precision == WritePrecision.MS: return ns / 1e6 elif precision == WritePrecision.S: return ns / 1e9 raise ValueError(timestamp)
def _to_value(self, str_val, column): if str_val == '' or str_val is None: default_value = column.default_value if default_value == '' or default_value is None: return None return self._to_value(default_value, column) if "string" == column.data_type: return str_val if "boolean" == column.data_type: return "true" == str_val if "unsignedLong" == column.data_type or "long" == column.data_type: return int(str_val) if "double" == column.data_type: return float(str_val) if "base64Binary" == column.data_type: return base64.b64decode(str_val) if "dateTime:RFC3339" == column.data_type or "dateTime:RFC3339Nano" == column.data_type: return get_date_helper().parse_date(str_val) if "duration" == column.data_type: # todo better type ? return int(str_val)
def _params_to_extern_ast(params: dict) -> List['OptionStatement']: statements = [] for key, value in params.items(): if value is None: continue if isinstance(value, bool): literal = BooleanLiteral("BooleanLiteral", value) elif isinstance(value, int): literal = IntegerLiteral("IntegerLiteral", str(value)) elif isinstance(value, float): literal = FloatLiteral("FloatLiteral", value) elif isinstance(value, datetime): value = get_date_helper().to_utc(value) literal = DateTimeLiteral("DateTimeLiteral", value.strftime('%Y-%m-%dT%H:%M:%S.%fZ')) elif isinstance(value, timedelta): # convert to microsecodns _micro_delta = int(value / timedelta(microseconds=1)) if _micro_delta < 0: literal = UnaryExpression("UnaryExpression", argument=DurationLiteral("DurationLiteral", [ Duration(magnitude=-_micro_delta, unit="us")]), operator="-") else: literal = DurationLiteral("DurationLiteral", [Duration(magnitude=_micro_delta, unit="us")]) elif isinstance(value, str): literal = StringLiteral("StringLiteral", str(value)) else: literal = value statements.append(OptionStatement("OptionStatement", VariableAssignment("VariableAssignment", Identifier("Identifier", key), literal))) return statements
def _parm_to_extern_ast(value) -> Union[Expression, None]: if value is None: return None if isinstance(value, bool): return BooleanLiteral("BooleanLiteral", value) elif isinstance(value, int): return IntegerLiteral("IntegerLiteral", str(value)) elif isinstance(value, float): return FloatLiteral("FloatLiteral", value) elif isinstance(value, datetime): value = get_date_helper().to_utc(value) return DateTimeLiteral("DateTimeLiteral", value.strftime('%Y-%m-%dT%H:%M:%S.%fZ')) elif isinstance(value, timedelta): _micro_delta = int(value / timedelta(microseconds=1)) if _micro_delta < 0: return UnaryExpression( "UnaryExpression", argument=DurationLiteral( "DurationLiteral", [Duration(magnitude=-_micro_delta, unit="us")]), operator="-") else: return DurationLiteral( "DurationLiteral", [Duration(magnitude=_micro_delta, unit="us")]) elif isinstance(value, str): return StringLiteral("StringLiteral", str(value)) elif isinstance(value, Iterable): return ArrayExpression( "ArrayExpression", elements=list( map(lambda it: QueryApi._parm_to_extern_ast(it), value))) else: return value
def test_pass_parameters(self): unique = get_date_helper().to_nanoseconds(datetime.utcnow() - datetime.utcfromtimestamp(0)) # write data with MultiprocessingWriter(url=self.url, token=self.token, org=self.org, write_options=SYNCHRONOUS) as writer: writer.write(bucket="my-bucket", record=f"mem_{unique},tag=a value=5i 10", write_precision=WritePrecision.S) # query data with InfluxDBClient(url=self.url, token=self.token, org=self.org) as client: query_api = client.query_api() tables = query_api.query( f'from(bucket: "my-bucket") |> range(start: 0) |> filter(fn: (r) => r._measurement == "mem_{unique}")', self.org) record = tables[0].records[0] self.assertIsNotNone(record) self.assertEqual("a", record["tag"]) self.assertEqual(5, record["_value"]) self.assertEqual(get_date_helper().to_utc(datetime.utcfromtimestamp(10)), record["_time"])
def _prepare_predicate_request(self, start, stop, predicate): date_helper = get_date_helper() if isinstance(start, datetime): start = date_helper.to_utc(start) if isinstance(stop, datetime): stop = date_helper.to_utc(stop) predicate_request = DeletePredicateRequest(start=start, stop=stop, predicate=predicate) return predicate_request
def test_query_profiler_present(self): client = self.client q = ''' import "profiler" option profiler.enabledProfilers = ["query", "operator"] from(bucket:stringParam) |> range(start: 0, stop: callParam) |> last() ''' p = { "stringParam": "my-bucket", "stopParam": get_date_helper().parse_date("2021-03-20T15:59:10.607352Z"), "durationParam": DurationLiteral("DurationLiteral", [Duration(magnitude=1, unit="d")]), "callParam": CallExpression(type="CallExpression", callee=Identifier(type="Identifier", name="now")), } csv_result = client.query_api(query_options=QueryOptions( profilers=None)).query(query=q, params=p) self.assertIsNotNone(csv_result) found_profiler_table = False found_profiler_records = False for table in csv_result: if any( filter( lambda column: (column.default_value == "_profiler"), table.columns)): found_profiler_table = True print(f"Profiler table : {table} ") for flux_record in table: if flux_record["_measurement"].startswith("profiler/"): found_profiler_records = True print(f"Profiler record: {flux_record}") self.assertTrue(found_profiler_table) self.assertTrue(found_profiler_records) records = client.query_api().query_stream(query=q, params=p) found_profiler_records = False for flux_record in records: if flux_record["_measurement"].startswith("profiler/"): found_profiler_records = True print(f"Profiler record: {flux_record}") self.assertTrue(found_profiler_records)
def test_query_ast(self): q = ''' from(bucket:stringParam) |> range(start: startDuration, stop: callParam) |> filter(fn: (r) => r["_measurement"] == "my_measurement") |> filter(fn: (r) => r["_value"] > intParam) |> filter(fn: (r) => r["_value"] > floatParam) |> aggregateWindow(every: durationParam, fn: mean, createEmpty: true) |> sort(columns: ["_time"], desc: booleanParam) ''' p = { "stringParam": "my-bucket", "stopParam": get_date_helper().parse_date("2021-03-20T15:59:10.607352Z"), "intParam": 2, "durationParam": DurationLiteral("DurationLiteral", [Duration(magnitude=1, unit="d")]), "startDuration": UnaryExpression( type="UnaryExpression", argument=DurationLiteral("DurationLiteral", [Duration(magnitude=30, unit="d")]), operator="-"), "callParam": CallExpression(type="CallExpression", callee=Identifier(type="Identifier", name="now")), "timedelta": datetime.timedelta(minutes=10), "floatParam": 14.01, "booleanParam": True, } csv_result = self.client.query_api().query_csv(query=q, params=p) self.assertIsNotNone(csv_result) val_count = 0 for row in csv_result: for cell in row: val_count += 1 print("Values count: ", val_count)
def test_query_profiler_enabled(self): q = ''' from(bucket:stringParam) |> range(start: 0, stop: callParam) |> last() ''' p = { "stringParam": "my-bucket", "stopParam": get_date_helper().parse_date("2021-03-20T15:59:10.607352Z"), "durationParam": DurationLiteral("DurationLiteral", [Duration(magnitude=1, unit="d")]), "callParam": CallExpression(type="CallExpression", callee=Identifier(type="Identifier", name="now")), } query_api = self.client.query_api(query_options=QueryOptions( profilers=["query", "operator"])) csv_result = query_api.query(query=q, params=p) for table in csv_result: self.assertFalse( any( filter( lambda column: (column.default_value == "_profiler"), table.columns))) for flux_record in table: self.assertFalse( flux_record["_measurement"].startswith("profiler/")) records = self.client.query_api().query_stream(query=q, params=p) for flux_record in records: self.assertFalse( flux_record["_measurement"].startswith("profiler/")) self.assertIsNotNone(csv_result)
def delete(self, start: datetime, stop: object, predicate: object, bucket: str, org: str) -> None: """ Delete Time series data from InfluxDB. :param start: start time :param stop: stop time :param predicate: predicate :param bucket: bucket id or name from which data will be deleted :param org: organization id or name :return: """ date_helper = get_date_helper() if isinstance(start, datetime): start = date_helper.to_utc(start) if isinstance(stop, datetime): stop = date_helper.to_utc(stop) predicate_request = DeletePredicateRequest(start=start, stop=stop, predicate=predicate) return self._service.post_delete( delete_predicate_request=predicate_request, bucket=bucket, org=org)
def test_parameter_ast(self): test_data = [ [ "stringParam", "my-bucket", { "imports": [], "body": [ { "type": "OptionStatement", "assignment": { "type": "VariableAssignment", "id": { "type": "Identifier", "name": "stringParam" }, "init": { "type": "StringLiteral", "value": "my-bucket" } } } ] } ], [ "datetimeParam", get_date_helper().parse_date("2021-03-20T15:59:10.607352Z"), { "body": [{ "assignment": { "id": { "name": "datetimeParam", "type": "Identifier" }, "init": { "type": "DateTimeLiteral", "value": "2021-03-20T15:59:10.607352Z" }, "type": "VariableAssignment" }, "type": "OptionStatement" }], "imports": [] } ], [ "datetimeNoTZParam", datetime.datetime(2021, 3, 20, 15, 59, 10, 607352), { "body": [{ "assignment": { "id": { "name": "datetimeNoTZParam", "type": "Identifier" }, "init": { "type": "DateTimeLiteral", "value": "2021-03-20T15:59:10.607352Z" }, "type": "VariableAssignment" }, "type": "OptionStatement" }], "imports": [] } ], [ "timeDeltaParam", datetime.timedelta(hours=1), { "body": [{ "assignment": { "id": { "name": "timeDeltaParam", "type": "Identifier" }, "init": { "type": "DurationLiteral", "values": [{ "magnitude": 3600000000, "unit": "us" }] }, "type": "VariableAssignment" }, "type": "OptionStatement" }], "imports": [] } ], [ "timeDeltaNegativeParam", datetime.timedelta(minutes=-5), { "body": [{ "assignment": { "id": { "name": "timeDeltaNegativeParam", "type": "Identifier" }, "init": { "argument": { "type": "DurationLiteral", "values": [{ "magnitude": 300000000, "unit": "us" }] }, "operator": "-", "type": "UnaryExpression" }, "type": "VariableAssignment" }, "type": "OptionStatement" }], "imports": [] } ], [ "booleanParam", True, { "body": [{ "assignment": { "id": { "name": "booleanParam", "type": "Identifier" }, "init": { "type": "BooleanLiteral", "value": True }, "type": "VariableAssignment" }, "type": "OptionStatement" }], "imports": [] } ], [ "intParam", int(10), { "body": [{ "assignment": { "id": { "name": "intParam", "type": "Identifier" }, "init": { "type": "IntegerLiteral", "value": "10" }, "type": "VariableAssignment" }, "type": "OptionStatement" }], "imports": [] } ], [ "floatParam", 10.333, { "body": [{ "assignment": { "id": { "name": "floatParam", "type": "Identifier" }, "init": { "type": "FloatLiteral", "value": 10.333 }, "type": "VariableAssignment" }, "type": "OptionStatement" }], "imports": [] } ] ] for data in test_data: param = {data[0]: data[1]} print("testing: ", param) ast = QueryApi._build_flux_ast(param) got_sanitized = self.client.api_client.sanitize_for_serialization( ast) self.assertEqual( json.dumps(got_sanitized, sort_keys=True, indent=2), json.dumps(data[2], sort_keys=True, indent=2))