def BestDayReport(startTime):

    # Connects to the database
    url = "http://{}:{}".format(INFLUXDB_HOST, INFLUXDB_PORT)
    client = IF(url=url, token="", org="")

    # Build trace query
    query = QUERY_BEST_DAY % (startTime, TABELA_TOTAL)
    tables = client.query_api().query(query)
    csv = client.query_api().query_csv(query)
    client.__del__()

    # Query result is a list of all tables creted in TRACE_QUERY, each of them of type FluxTable
    # see https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/flux_table.py#L5 for more info

    # Build result as a list of all found records in suspecTable
    result = []
    for table in tables:
        for record in table.records:
            subdict = {
                x: record.values[x]
                for x in ['_value', 'day', 'hour', 'location']
            }
            result.append(subdict)
    return result
Exemple #2
0
    def test_query_without_credentials(self):
        _client = InfluxDBClient(url="http://localhost:9999", token="my-token-wrong-credentials", org="my-org")

        with self.assertRaises(ApiException) as ae:
            query = 'from(bucket: "my-bucket")' \
                    '|> range(start: 2020-02-19T23:30:00Z, stop: now())' \
                    f'|> filter(fn: (r) => r._measurement == "my-measurement")'
            _client.query_api().query_data_frame(query=query)

        exception = ae.exception
        self.assertEqual(401, exception.status)
        self.assertEqual("Unauthorized", exception.reason)

        _client.close()
class InfluxDBEntityContainer(object):
    """Object used to represent an Entity Container (influxdb database)

    modelled after the SQLEntityContainer in pyslet (sqlds.py)

    container
        pyslet.odata2.csdl.EntityContainer

    connection
        dict with connection configuration
        format : {url:"http://localhost:9999", token:"my-token", org:"my-org"}
    """
    def __init__(self, container, connection, topmax, **kwargs):
        self.container = container
        try:
            self.client = InfluxDBClient(url=connection['url'], token=connection['token'], org=connection["org"])
            self.query_api = self.client.query_api()
            self._topmax = topmax

            for es in self.container.EntitySet:
                self.bind_entity_set(es)
        except Exception as e:
            logger.info("Failed to connect to initialize Influx Odata container")
            logger.exception(str(e))

    def bind_entity_set(self, entity_set):
        entity_set.bind(self.get_collection_class(), container=self)

    def get_collection_class(self):
        return InfluxDBMeasurement
Exemple #4
0
class InfluxClient:
    def __init__(
        self,
        url="http://localhost:8086",
        token="IpLnoNkWhqmnSLO2ieeqmHejYrrokycO5Be8HRgM6UI1S_CO-Py2_opA2E1z6iCzJrv5U_gHGVHh5JMCFsgwjQ=="
    ):

        # You can generate a Token from the "Tokens Tab" in the UI @ localhost:9999

        self.org = "vwa"
        self.bucket = "vwa"

        self.client = InfluxDBClient(url=url, token=token)
        self.write_api = self.client.write_api(write_options=SYNCHRONOUS)
        self.b = Batcher(500, 5, self._send)
        self.q = self.client.query_api()

    def send(self, line):
        self.b.send(line)

    def sendSequence(self, sequence):
        self._send(sequence)

    def _send(self, sequence):
        try:
            self.write_api.write(self.bucket, self.org, sequence)
            print("%d items sent!" % len(sequence))
        except Exception as e:
            print("%d items not sent!" % len(sequence), e)
def main():
    parse_row.progress = 0

    url = "https://github.com/influxdata/influxdb-client-python/wiki/data/stock-prices-example.csv"
    response = requests.get(url, stream=True)
    data = rx \
        .from_iterable(DictReader(response.iter_lines(decode_unicode=True))) \
        .pipe(ops.map(lambda row: parse_row(row)))

    client = InfluxDBClient(url="http://localhost:9999", token="my-token", org="my-org", debug=False)
    write_api = client.write_api(write_options=WriteOptions(batch_size=50_000, flush_interval=10_000))

    write_api.write(bucket="my-bucket", record=data)
    write_api.__del__()

    query = '''
    from(bucket:"my-bucket")
            |> range(start: 0, stop: now())
            |> filter(fn: (r) => r._measurement == "financial-analysis")
            |> filter(fn: (r) => r.symbol == "AAPL")
            |> filter(fn: (r) => r._field == "close")
            |> drop(columns: ["_start", "_stop", "table", "_field","_measurement"])
    '''

    result = client.query_api().query_data_frame(query=query)
    print(result.head(100))

    """
    Close client
    """
    client.__del__()
Exemple #6
0
class BaseTest(unittest.TestCase):
    def setUp(self) -> None:
        self.conf = influxdb_client.configuration.Configuration()

        self.host = os.getenv('INFLUXDB_V2_URL', "http://localhost:8086")
        self.debug = False
        self.auth_token = os.getenv('INFLUXDB_V2_TOKEN', "my-token")
        self.org = os.getenv('INFLUXDB_V2_ORG', "my-org")

        self.client = InfluxDBClient(url=self.host,
                                     token=self.auth_token,
                                     debug=self.debug,
                                     org=self.org)
        self.api_client = self.client.api_client

        self.query_api = self.client.query_api()
        self.buckets_api = self.client.buckets_api()
        self.users_api = self.client.users_api()
        self.organizations_api = self.client.organizations_api()
        self.authorizations_api = self.client.authorizations_api()
        self.labels_api = self.client.labels_api()

        self.my_organization = self.find_my_org()

    def tearDown(self) -> None:
        self.client.close()

    def create_test_bucket(self):
        bucket_name = generate_bucket_name()
        bucket = self.buckets_api.create_bucket(bucket_name=bucket_name,
                                                org=self.my_organization,
                                                description=bucket_name +
                                                "description")
        return bucket

    def delete_test_bucket(self, bucket):
        return self.buckets_api.delete_bucket(bucket)

    def find_my_org(self) -> Organization:
        return self.client.organizations_api().find_organizations(
            org=self.org)[0]

    @staticmethod
    def log(args):
        print(">>>", args)

    @staticmethod
    def generate_name(prefix):
        assert prefix != "" or prefix is not None
        return prefix + str(datetime.datetime.now().timestamp()) + "-IT"

    @classmethod
    def retention_rule(cls) -> BucketRetentionRules:
        return BucketRetentionRules(type='expire', every_seconds=3600)

    def assertEqualIgnoringWhitespace(self, first, second, msg=None) -> None:
        whitespace_pattern = re.compile(r"\s+")
        self.assertEqual(whitespace_pattern.sub("", first),
                         whitespace_pattern.sub("", second),
                         msg=msg)
Exemple #7
0
def insert_to_influx(data, routing_key):
    client = InfluxDBClient(
        url="192.168.43.70:8086",
        token=
        "a6WAt3mAwy4MP2Cq1koohw4wQTiK09M8_gx9WLpzQwtrH_w5eDRyQe1q4bMYCcKn0hT0UTdV94zsBcJgInLFgw==",
        org="pnup")
    write_api = client.write_api()
    query_api = client.query_api()

    keys = routing_key.split('-')
    measurement = keys[0]
    namenode = keys[1]
    # client.switch_database('iot_multinode_DB')
    data = [{
        "measurement": measurement,
        "tags": {
            "name": namenode
        },
        "fields": {
            "value": data,
            # "namenode": namenode
        }
    }]

    write_api.write(bucket="iot_multinode_DB", org="pnup", record=data)
def post_influxdb():

    url = u_weather_uri
    mtk_weather = requests.get(url).json()

    logr.debug('UG Weather API Results', mtk_weather)

    weather = {
        'stationid': mtk_weather['observations'][0]['stationID'],
        'obsTimeUTC': mtk_weather['observations'][0]['obsTimeUtc'],
        'localtime': mtk_weather['observations'][0]['obsTimeLocal'],
        'winddir': mtk_weather['observations'][0]['winddir'],
        'humidity': mtk_weather['observations'][0]['humidity'],
        'temp': mtk_weather['observations'][0]['metric']['temp'],
        'heatindex': mtk_weather['observations'][0]['metric']['heatIndex'],
        'dewPoint': mtk_weather['observations'][0]['metric']['dewpt'],
        'windchill': mtk_weather['observations'][0]['metric']['windChill'],
        'windspeed': mtk_weather['observations'][0]['metric']['windSpeed'],
        'windgust': mtk_weather['observations'][0]['metric']['windGust'],
        'psure': mtk_weather['observations'][0]['metric']['pressure'],
        'precip': mtk_weather['observations'][0]['metric']['precipRate'],
        'obsTimeUTC': mtk_weather['observations'][0]['obsTimeUtc'],
    }

    # Calculate FFDI data point
    df = drought()
    fdi = ffdi(weather["temp"], weather["humidity"], df[0],
               weather["windspeed"])

    # Setup connection to the DB
    client = InfluxDBClient(url='{}'.format(influx_server),
                            token='{}'.format(influx_token),
                            org='{}'.format(influx_org))
    write_api = client.write_api(write_options=SYNCHRONOUS)
    query_api = client.query_api()

    # Form the data to ingest into InfluxDB database
    p = Point("weather") \
        .tag("location", "Mount Kuring-Gai") \
        .field("temperature", float(weather['temp'])) \
        .field("Heat Index", float(weather['heatindex'])) \
        .field("Dew Point", float(weather['dewPoint'])) \
        .field("humidity", int(weather['humidity'])) \
        .field("Wind Speed", float(weather['windspeed'])) \
        .field("Wind Gust", float(weather['windgust'])) \
        .field("Pressure", float(weather['psure'])) \
        .field("Wind Direction", float(weather['winddir'])) \
        .field("Precipitation", float(weather['precip'])) \
        .field("Heat Index", float(weather['heatindex'])) \
        .field("DroughtF", float(df[0])) \
        .field("FFDI", float(fdi)) \
        .time((weather['obsTimeUTC']))

    # Write that into the InfluxDB
    write_api.write(record=p,
                    bucket='{}'.format(influx_bucket),
                    time_precision='s')

    return None
Exemple #9
0
def flux_view_suhu(client: FluxClient = Depends(get_flux_session)):
    query_api = client.query_api()
    tables = query_api.query('from(bucket:"test") |> range(start: -1d)')
    response = []
    for table in tables:
        print(len(table.records))
        for record in table.records:
            response.append(record.values)
    return response
Exemple #10
0
def sendQueryInflux(query):
    token = app.config['INFLUX_TOKEN']
    org = app.config['INFLUX_ORG']
    url = app.config['INFLUX_URL']

    client = InfluxDBClient(url=url, token=token, org=org, debug=False)
    df_result = client.query_api().query_data_frame(org=org, query=query)

    return df_result
Exemple #11
0
def send_to_influxdb():
    client = InfluxDBClient(
        url="https://eu-central-1-1.aws.cloud2.influxdata.com", token=my_token)
    try:
        kind = 'temperature'
        device = 'opt-123'
        """
		Get sensor data
		"""
        # pressure = sense.get_pressure()
        pressure = 339.3
        # pressure = round(pressure, 1)
        temp = 12.0
        # temp = sense.get_temperature()
        # temp = round(temp, 1)
        # humid = sense.get_humidity()
        humid = 99
        """
		Write data by Point structure
		"""
        point = Point(kind).tag('owner', owner).tag('device', device).field(
            'value', round(outside_temp(), 2)).time(time=datetime.utcnow())
        print(f'Writing to InfluxDB cloud: {point.to_line_protocol()} ...')

        write_api = client.write_api(write_options=SYNCHRONOUS)
        write_api.write(bucket=bucket, org=org, record=point)

        print()
        print('success')
        print()
        print()

        if False:
            """
			Query written data
			"""
            query = f'from(bucket: "{bucket}") |> range(start: -1d) |> filter(fn: (r) => r._measurement == "{kind}")'
            print(f'Querying from InfluxDB cloud: "{query}" ...')
            print()

            query_api = client.query_api()
            tables = query_api.query(query=query, org=org)

            for table in tables:
                for row in table.records:
                    print(
                        f'{row.values["_time"]}: owner={row.values["owner"]},device={row.values["device"]} '
                        f'{row.values["_value"]} °C')

            print()
            print('success')
    except Exception as e:
        print(e)
    finally:
        client.close()
    return
Exemple #12
0
class InfluxDBLogger:
    def __init__(
        self,
        bucket_name=BUCKET,
        batch_size=LOG_BATCH_SIZE,
        data_retention=3600,
    ):
        self.organization = ORGANIZATION
        self.client = InfluxDBClient(url=INFLUXDB_URL,
                                     token=INFLUXDB_TOKEN,
                                     org=self.organization)
        self.batch_size = batch_size
        self.bucket_name = bucket_name

        self.write_api = self.client.write_api(write_options=WriteOptions(
            batch_size=self.batch_size))
        self.query_api = self.client.query_api()
        self.buckets_api = self.client.buckets_api()
        bucket = self.buckets_api.find_bucket_by_name(self.bucket_name)
        if bucket is None:
            logger.warning(f"Bucket {self.bucket_name!r} not found. "
                           f"Creating a bucket {self.bucket_name!r}.")
            retention_rules = None
            if data_retention is not None:
                retention_rules = BucketRetentionRules(
                    type="expire", every_seconds=data_retention)
            self.buckets_api.create_bucket(
                bucket_name=self.bucket_name,
                retention_rules=retention_rules,
                org=self.organization,
            )

    def send_event(self, record_type, message):
        point = Point(record_type)
        for key, value in message.items():
            point = point.field(key, value)
        self.write_api.write(bucket=self.bucket_name, record=point)

    def get_events(self, record_type):
        query = '''
            from(bucket: currentBucket)
            |> range(start: -5m, stop: now())
            |> filter(fn: (r) => r._measurement == recordType)
            |> pivot(rowKey:["_time"], columnKey: ["_field"], \
                valueColumn: "_value")
        '''
        params = {"currentBucket": self.bucket_name, "recordType": record_type}
        tables = self.query_api.query(query=query, params=params)
        if len(tables) > 0:
            table, *_ = tables
            events = table.records
        else:
            events = []
        return events
def OccupancyReport(LocationInventory, startTime):

    # Connects to the database
    url = "http://{}:{}".format(INFLUXDB_HOST, INFLUXDB_PORT)
    client = IF(url=url, token="", org="")

    # Preapre to create a table with maximum occupancy per room
    TABELA_LOCATION = "LocationInventory"
    write_api = client.write_api(write_options=SYNCHRONOUS)

    #Build a record per local
    record = []
    #location_dict = json.loads(LocationInventory)
    for key, value in LocationInventory["rooms"].items():
        _point = Point(TABELA_LOCATION).tag("location",
                                            key).field("occupancy", value)
        record.append(_point)

    # Write to temporary table
    write_api.write(bucket=INFLUXDB_DBNAME, record=record)

    # Build max occupancy query
    query = QUERY_MAX_OCCUPANCY % (startTime, TABELA_TOTAL, TABELA_LOCATION)
    tables = client.query_api().query(query)
    client.__del__()

    # Query result is a list of all tables creted in TRACE_QUERY, each of them of type FluxTable
    # see https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/flux_table.py#L5 for more info

    # Build result as a list of all found records in history table
    result = {"PerHourHistory": [], "PerShiftHistory": []}
    for table in tables:
        for record in table.records:
            if (record.values['result'] == 'historyHour'):
                subdict = {
                    x: record.values[x]
                    for x in [
                        'location', '_value', 'max', 'year', 'month', 'day',
                        'hour'
                    ]
                }
                result["PerHourHistory"].append(subdict)
            elif (record.values['result'] == 'historyShift'):
                subdict = {
                    x: record.values[x]
                    for x in [
                        'location', '_value', 'max', 'year', 'month', 'day',
                        'shift'
                    ]
                }
                result["PerShiftHistory"].append(subdict)
    return result
class InfluxDBClientTestMock(unittest.TestCase):
    def setUp(self) -> None:
        httpretty.enable()
        httpretty.reset()

    def tearDown(self) -> None:
        if self.influxdb_client:
            self.influxdb_client.close()
        httpretty.disable()

    def test_init_without_token(self):
        httpretty.register_uri(httpretty.POST,
                               uri="http://localhost/api/v2/query",
                               status=200,
                               body="")
        self.influxdb_client = InfluxDBClient("http://localhost")
        self.assertIsNotNone(self.influxdb_client)
        self.influxdb_client.query_api().query("buckets()", "my-org")

    def test_redacted_auth_header(self):
        httpretty.register_uri(httpretty.POST,
                               uri="http://localhost/api/v2/query",
                               status=200,
                               body="")
        self.influxdb_client = InfluxDBClient("http://localhost",
                                              "my-token",
                                              debug=True)

        log_stream = StringIO()
        logger = logging.getLogger("influxdb_client.client.http")
        logger.addHandler(logging.StreamHandler(log_stream))

        self.influxdb_client.query_api().query("buckets()", "my-org")
        requests = httpretty.httpretty.latest_requests
        self.assertEqual(1, len(requests))
        self.assertEqual("Token my-token",
                         requests[0].headers["Authorization"])

        self.assertIn("Authorization: ***", log_stream.getvalue())
def compute_mrt_for_simulation_ss(simulation_id: int,
                                  autocorrelation_plot=True
                                  ) -> MeanResponseTime:
    client = InfluxDBClient(url="http://localhost:8086",
                            token="my-token",
                            org="BBM SpA")
    query_api = client.query_api()

    requests_duration = query_api.query_data_frame(
        'import "experimental"'
        'from(bucket:"k6") '
        '|> range(start: -1y)'
        f'|> filter(fn: (r) => r._measurement == "http_req_duration" and r._field == "value" and r.status == "200" and r.simulation == "{simulation_id}")'
        '|> map(fn:(r) => ({r with _time: experimental.subDuration(d: duration(v: int(v: r._value*1000000.0)), from: r._time)}))'
        '|> sort(columns: ["_time"], desc: false)')
    values = requests_duration['_value']

    if autocorrelation_plot:
        # Plot sample ACF
        plot_sample_autocorrelation(values, simulation_id)

    durations = values.to_numpy()
    print(f'Initial number of samples: {len(durations)}.')

    modulo = len(durations) % NUM_BATCH

    # If the samples are not a multiple of the number of batches
    # remove a number of initial samples corresponding to the modulo.
    if modulo != 0:
        durations = durations[modulo:]
        print(
            f'Removing {modulo} samples to get an equal number of samples in each batch ({len(durations) / NUM_BATCH}).'
        )

    batches = np.split(durations, NUM_BATCH)

    batches_mean = [np.mean(b) for b in batches]
    grand_batches_mean = np.mean(batches_mean)

    batches_mean_est = sum([(b - grand_batches_mean)**2
                            for b in batches_mean]) / (NUM_BATCH - 1)

    t_quantile = t.ppf(1 - CI_LEVEL, df=NUM_BATCH - 1)
    ci_interval = (t_quantile * math.sqrt(batches_mean_est / NUM_BATCH))

    ci_min = grand_batches_mean - ci_interval
    ci_max = grand_batches_mean + ci_interval

    mrt = MeanResponseTime(grand_batches_mean, ci_min, ci_max, ci_interval)
    print(mrt)
    return mrt
Exemple #16
0
def main():
    # Main logic resides here.

    # Connect to local influx db container.
    config_json = load_config()
    client = InfluxDBClient(url='http://localhost:8086',
                            token=config_json['token'],
                            org=config_json['org'])

    try:
        while True:
            is_chrome_open = False
            list_procs = ""
            output = subprocess.Popen(["ps", "aux"], stdout=subprocess.PIPE)
            list_procs = output.stdout.read()
            if 'Google Chrome'.encode('utf-8') in list_procs:
                is_chrome_open = True

            if is_chrome_open:
                all_tabs = subprocess.Popen(
                    ["osascript", "show_tabs_chrome.scpt"],
                    stdout=subprocess.PIPE)
                all_tabs = all_tabs.stdout.read()
                all_tabs = all_tabs.decode('utf-8').split('\n')

                parsed_links = set()
                parsed_active_link = ''
                for link in all_tabs:
                    if link:
                        string_form = link
                        if string_form.split()[0] != 'Active:':
                            parsed_url = parse_url(string_form.split()[0])
                            parsed_links.add(parsed_url)
                        elif string_form.split()[0] == 'Active:':
                            parsed_active_link = parse_url(
                                string_form.split()[1])

                write_api = client.write_api(write_options=SYNCHRONOUS)
                query_api = client.query_api()

                for link in parsed_links:
                    active_time = 0 if parsed_active_link != link else 1
                    inactive_time = 1 if parsed_active_link != link else 0
                    insert_link_query(write_api, link,
                                      config_json['bucket_name'], active_time,
                                      inactive_time)

            time.sleep(1)

    except Exception as e:
        client.close()
Exemple #17
0
def main():
    # By default the arguments are:
    my_token = 'rbuCuV6gRHPJRPIRrLB3kOp874S5mUywVUGXJIUe_o1bf2HpxSqy7E6VB9ZUHKzMK4vGNqo6g6TZipJ2PIEXog=='
    my_org = "UNIPI"
    bucket = "Bucket"
    interface = "en0"

    if len(sys.argv) == 5:
        my_token = sys.argv[1]
        my_org = sys.argv[2]
        bucket = sys.argv[3]
        interface = sys.argv[4]
    else:
        if len(sys.argv) != 1:
            print("Error: Number of arguments")
            exit(1)

    # Query for received bytes
    query1 = 'from(bucket: "' + bucket + '") |> range(start:-1h, stop: now()) ' \
                                         '|> filter(fn: (r) => r._measurement == "net" )' \
                                         '|> filter(fn: (r) => r._field == "bytes_recv" )' \
                                         '|> filter(fn: (r) => r.interface == "' + interface + '" )' \
                                                                                               '|> derivative(unit: 1s, nonNegative: true,columns: ["_value"])'

    # Query for sent bytes
    query2 = 'from(bucket: "' + bucket + '") |> range(start:-1h, stop: now()) ' \
                                         '|> filter(fn: (r) => r._measurement == "net" ) ' \
                                         '|> filter(fn: (r) => r._field == "bytes_sent" ) ' \
                                         '|> filter(fn: (r) => r.interface == "' + interface + '" ) ' \
                                                                                               '|> derivative(unit: 1s, nonNegative: true, columns: ["_value"] )'

    client = InfluxDBClient(url="http://localhost:9999", token=my_token, org=my_org)
    query_api = client.query_api()

    data1 = query_api.query_data_frame(query=query1)
    data2 = query_api.query_data_frame(query=query2)

    try:
        plt.plot(data1['_time'], data1['_value'], label='Bytes Received')
        plt.plot(data2['_time'], data2['_value'], label='Bytes Sent')
    except:
        print("Error: check the arguments")
        exit(1)
    plt.legend(loc='upper center', bbox_to_anchor=(0.5, -0.05),
               fancybox=True, shadow=True, ncol=5)
    plt.show()

    # Close client
    client.__del__()
    print("close client")
def test(options):
    answer = False
    try:
        client = InfluxDBClient(url=options["server"],
                                token=options["token"],
                                org=options["organization"],
                                timeout=10000)
        query_api = client.query_api()
        query_api.query_stream('from(bucket:"ups") |> range(start: -10m)')
        answer = True
    except Exception as e:
        logger.error('could not connect to InfluxDB server: %s.', str(e))
    finally:
        client.__del__()
        return answer
Exemple #19
0
class HiveData(object):
    def __init__(self, host: str, port: int, bucket: str, token: str,
                 org: str):
        url = f"http://{host}:{port}"
        self._bucket = bucket
        self._client = InfluxDBClient(url=url, token=token, org=org)
        self._write_api = self._client.write_api(write_options=SYNCHRONOUS)
        self._query_api = self._client.query_api()

    async def write_point(self, mac: str, weight: float, temp_in: float,
                          temp_out: float) -> None:
        p = (Point("hivedata").tag("board", mac).field("weight", weight).field(
            "temperature_inside", temp_in).field("temperature_outside",
                                                 temp_out))
        self._write_api.write(bucket=self._bucket, record=p)
Exemple #20
0
    def get_events(measurement):
        client = InfluxDBClient(url=host, token=token, org=org)
        query_api = client.query_api()

        tables = query_api.query(
            'from(bucket: "capitol_tracker") |> range(start: 1700-01-01T00:01:00Z) |> filter(fn: (r) => r["_measurement"] == "event")'
        )

        result = dict()
        # {'result': '_result', 'table': 2, '_start': datetime.datetime(1883, 7, 20, 2, 28, 48, 249303, tzinfo=datetime.timezone.utc), '_stop': datetime.datetime(2020, 6, 11, 2, 28, 48, 249303, tzinfo=datetime.timezone.utc), '_time': datetime.datetime(2016, 12, 5, 0, 0, tzinfo=datetime.timezone.utc), '_value': 2.0, '_field': 'lifecycle', '_measurement': 'event_candidate', 'effect': 'Gained_a_degree', 'name': 'Graduation'}
        for table in tables:
            for record in table.records:
                print(record.values)

                lifecycle_enum = dict([(1.0, "stop"), (0.0, "start")])

                if record.values["_value"] == 2.0:
                    event = {}

                    event["name"] = record.values["name"].replace("_", " ")
                    event["effect"] = record.values["effect"].replace("_", " ")
                    event["time"] = record.values["_time"]

                    if "event_id" in record.values:
                        event["event_id"] = record.values["event_id"].replace(
                            "_", " ")

                    result[record.values["name"]] = event
                elif record.values["_value"] < 2.0:
                    event = {}

                    if record.values["name"] in result:
                        event = result.get(record.values["name"])

                    event["name"] = record.values["name"].replace("_", " ")
                    event["effect"] = record.values["effect"].replace("_", " ")

                    if not "time" in event:
                        event["time"] = {}

                    event["time"][lifecycle_enum.get(
                        record.values["_value"])] = record.values["_time"]

                    result[record.values["name"]] = event

        return result.values()
Exemple #21
0
def setup(request):
    print("Load testing data")

    client = InfluxDBClient(url=url, token=token, org=my_org, debug=False)
    write_api = client.write_api()
    buckets_api = client.buckets_api()
    query_api = client.query_api()

    org = find_my_org(client, my_org)

    bucket = buckets_api.find_bucket_by_name(bucket_name=test_bucket)
    if bucket is not None:
        buckets_api.delete_bucket(bucket=bucket)

    bucket = buckets_api.create_bucket(bucket_name=test_bucket, org_id=org.id)

    num_records = 10
    num_series = 10

    today = datetime.utcnow()
    print("*** Write test series ***")
    for loc in range(num_series):
        for i in range(num_records):
            time_ = today - timedelta(hours=i + 1)
            point = Point(measurement_name="h2o_feet") \
                .time(time_) \
                .field("water_level", float(i)) \
                .tag("location", "location_" + str(loc)) \
                .tag("country", "country_" + str(loc))
            write_api.write(bucket=bucket.name, record=point)

    time.sleep(1)

    query = 'from(bucket:"{0}") |> range(start: 1970-01-01T00:00:00.000000001Z)'.format(
        test_bucket)

    flux_result = query_api.query(query)

    assert len(flux_result) == num_series
    records = flux_result[0].records
    assert len(records) == num_records

    ip.run_line_magic("load_ext", "flux")
    ip.run_line_magic("flux",
                      "http://localhost:9999 --token my-token --org my-org")
    request.addfinalizer(cleanup)
def compute_mrt_for_simulation_fh(simulation_id: int) -> MeanResponseTime:
    client = InfluxDBClient(url="http://localhost:8086",
                            token="my-token",
                            org="BBM SpA")
    query_api = client.query_api()

    replications_mean = []

    assert REPLICATIONS_NUM > 0, 'Replications should be greater than 0.'
    for i in range(REPLICATIONS_NUM, 0, -1):
        sim_id = simulation_id - i + 1
        print(f'Analyzing simulation {sim_id}')

        query = query_api.query_data_frame(
            'import "experimental"'
            'from(bucket:"k6") '
            '|> range(start: -1y)'
            f'|> filter(fn: (r) => r._measurement == "http_req_duration" and r._field == "value" and r.status == "200" and r.simulation == "{sim_id}")'
            '|> map(fn:(r) => ({r with _time: experimental.subDuration(d: duration(v: int(v: r._value*1000000.0)), from: r._time)}))'
            '|> sort(columns: ["_time"], desc: false)')

        # extract the series with the request duration values
        req_durations = query['_value']

        # store the mean of this series
        replications_mean.append(req_durations.mean())

    # the grand mean is the mean of means of each replication
    grand_replications_mean = np.mean(replications_mean)

    replications_mean_est = sum([
        (b - grand_replications_mean)**2 for b in replications_mean
    ]) / (REPLICATIONS_NUM - 1)

    t_quantile = t.ppf(1 - CI_LEVEL, df=REPLICATIONS_NUM - 1)
    ci_interval_half_width = (
        t_quantile * math.sqrt(replications_mean_est / REPLICATIONS_NUM))

    ci_min = grand_replications_mean - ci_interval_half_width
    ci_max = grand_replications_mean + ci_interval_half_width

    mrt = MeanResponseTime(grand_replications_mean, ci_min, ci_max,
                           ci_interval_half_width)
    print(mrt)
    return mrt
    def get(self):
        client = InfluxDBClient(url=host, token=token, org=org)
        query_api = client.query_api()

        def format_list_filter(key):
            def format_list_filter_value(value):
                return 'r["{key}"] == "{value}"'.format(key=key, value=value)

            return format_list_filter_value

        def format_filter_statement(k_v_pair):
            if type(k_v_pair[1]) is list:
                return "filter(fn: (r) => {or_statement})".format(
                    or_statement=" or ".join(
                        list(map(format_list_filter(k_v_pair[0]),
                                 k_v_pair[1]))))

            return 'filter(fn: (r) => r["{key}"] == "{value}")'.format(
                key=k_v_pair[0], value=k_v_pair[1])

        query = 'from(bucket: "capitol_tracker") |> range(start: {start})'.format(
            start=self.time["start"])

        if type(self.measurement) is list:
            query = "{base_query} |> {measurement_filter}".format(
                base_query=query,
                measurement_filter=" |> ".join(
                    list(
                        map(format_filter_statement,
                            ["_measurement", self.measurement]))),
            )
        else:
            query = '{base_query} |> filter(fn: (r) => r["_measurement"] == "{value}")'.format(
                value=self.measurement)

        if self.tags:
            query = "{base_query} |> {tag_filter}".format(
                base_query=query,
                tag_filter=" |> ".join(
                    list(map(format_filter_statement, self.tags.items()))),
            )

        tables = query_api.query(query)

        return tables
    def get_last_data(self, hostsname=None) -> None:
        out_data = {}
        if hostsname:
            db_client = InfluxDBClient(url=self.url, token=self.token, org=self.org)
            self.logger.debug(self.query_string % hostsname)
            db_data = db_client.query_api().query_stream(query=(self.query_string % hostsname), org=self.org)
            for record in db_data:
                self.logger.debug(
                    f'Time {record["_time"]} Down {record["DownloadBandwidth"]}, UploadBandwidth {record["UploadBandwidth"]}, Ping {record["PingLatency"]}')
                out_data = record.values
                out_data["time"] = out_data.pop("_time")
                del out_data["result"]
                del out_data["table"]
            self.logger.debug(out_data)

            db_client.__del__()

        return out_data
Exemple #25
0
def get_handled_data(params):
    #Connection Influxdb
    token = current_app.config['INFLUXDB_TOKEN']
    org = current_app.config['INFLUXDB_ORG']

    # Connection to InfluxDB database
    client = InfluxDBClient(url=current_app.config['INFLUXDB_URL'],
                            token=token,
                            debug=True)

    # Query
    query_api = client.query_api()

    dict_params = {
        'begin_date': datetime.strptime(params['beginDate'], '%Y-%m-%d'),
        'end_date': datetime.strptime(params['endDate'], '%Y-%m-%d')
    }

    # InfluxDB Exception
    # # Adding 1 day to end date when beginDate and endDate are same, else Bad Request is thrown
    if (dict_params.get('begin_date') == dict_params.get('end_date')):
        dict_params['end_date'] = dict_params.get('end_date') + timedelta(
            days=1)
    """
    Query: using csv library
    1st parameter : query,
    2nd parameter : Dialect instance (object) to specify details / options about CSV result
    3rd parameter : org (for organization)

    All informations above have been copied from InfluxDB UI : Telegraf 
    """
    csv_result = query_api.query_csv(
        f'''from(bucket:"{current_app.config['INFLUXDB_BUCKET']}")  
        |> range(start: begin_date, stop: end_date)''',
        dialect=Dialect(header=True,
                        delimiter=",",
                        comment_prefix="#",
                        annotations=[],
                        date_time_format="RFC3339"),
        org=org,
        params=dict_params)

    return csv_result
Exemple #26
0
    def search():
        client = InfluxDBClient(url=host, token=token, org=org)
        query_api = client.query_api()

        tables = query_api.query(
            'from(bucket: "capitol_tracker") |> filter(fn: (r) => r["_measurement"] != "event_candidate") |> filter(fn: (r) => r["_measurement"] != "event") |> range(start: 1700-01-01T00:01:00Z)'
        )

        result = []
        for table in tables:
            for record in table.records:
                poll_node = {}

                poll_node["stance"] = record.values["stance"]
                poll_node[record.values["_field"]] = record.values["_value"]
                poll_node["time"] = record.values["_time"]

                result.append(poll_node)

        return result
Exemple #27
0
def get_the_data(time):
    import pandas as pd
    from influxdb_client import InfluxDBClient
    from influxdb_client.client.write_api import SYNCHRONOUS

    token = "CCnNI4aAzqWKUMMMFLTxE9jZLembWconNmgLNHxNpHLlR9VfvKcfexcLYWTCrvN6HGcvqMBKEXdLvfU-6l8KJA=="
    org = "autointelli"
    bucket = "autointelli"

    client = InfluxDBClient(url="http://172.16.1.4:8086", token=token, org=org)
    query_api = client.query_api()
    tm = str(time)
    query = '''
    from(bucket: "autointelli")
    |> range(start:''' + '-' + tm + ''', stop: now())
    |> filter(fn: (r) => r._measurement == "cpu")
    |> filter(fn: (r) => r._field == "usage_user")
    |> filter(fn: (r) => r.cpu == "cpu-total")'''

    tables = query_api.query_data_frame(query)
    return tables
def get_influx_data_sensor(org="test", bucket="test"):
    influx_client = InfluxDBClient(
        url=globals()["INFLUXDB_URI"],
        token=
        "dpVzJpGpdTiNhjHLKOWpXf8OlY-rZUwi4Cvd10kPU86upOKBRO_TA5R6PClkVKjGj_TIXQGAm5g27wDggHJHcw==",
        org=org)
    query_api = influx_client.query_api()
    query = 'from(bucket:"test")|> range(start: -141400080m)|> group() |> filter(fn:(r) => r._measurement == "humidity")' \
            '|> filter(fn:(r) => r.uri == "u4/302/humidity/ilot2")'
    result = query_api.query(org=org, query=query)
    results = {}
    aux = 0
    val_min = None
    val_max = None
    for table in result:
        for record in table.records:
            if record["uri"] not in results:
                results[record["uri"]] = {
                    "id": record["uri"],
                    "values": [{
                        "val_min": val_min,
                        "val_max": val_max
                    }]
                }
            results[record["uri"]]["values"].append({
                "value":
                record.get_value(),
                "value_unit":
                record["value_units"],
                "date":
                datetime.datetime.timestamp(record["_time"])
            })
            if val_max is None or record.get_value() > val_max:
                val_max = record.get_value()
            if val_min is None or record.get_value() < val_min:
                val_min = record.get_value()
    for uri in results:
        results[uri]["values"][0]["val_min"] = val_min
        results[uri]["values"][0]["val_max"] = val_max
    return build_actual_response(jsonify(results))
def TraceReport(targetUser, startTime, stopTime):

    # Connects to the database
    url = "http://{}:{}".format(INFLUXDB_HOST, INFLUXDB_PORT)
    client = IF(url=url, token="", org="")

    # Build trace query
    query = QUERY_TRACE % (targetUser, startTime, stopTime, TABELA_TRACE)
    tables = client.query_api().query(query)
    client.__del__()

    # Query result is a list of all tables creted in TRACE_QUERY, each of them of type FluxTable
    # see https://github.com/influxdata/influxdb-client-python/blob/master/influxdb_client/client/flux_table.py#L5 for more info

    # Build result as a list of all found records in suspecTable
    result = []
    for table in tables:
        for record in table.records:
            subdict = {x: record.values[x] for x in ['local', 'userid']}
            if subdict not in result:
                result.append(subdict)
    return result
class InfluxIntegrator:
    def __init__(self, influx_url="http://141.115.103.33:9999",
                 token = None, org=None, **kwargs):
        self.org = org
        self.token = token
        self.influx_client = InfluxDBClient(url=influx_url, token=token, org=org, **kwargs)
        # Default write option is Batching
        self.write_api = self.influx_client.write_api(write_options=SYNCHRONOUS)
        self.query_api = self.influx_client.query_api()
        # self.database_list = self.influx_client.get_list_database()


    def write(self,bucket,measurement : str,
              time ,
              field_list : list,
              tag_list : list = [],
              **kwargs):
        '''
        :param bucket : the bucket on which write the data
        :param measurement: name of measurement
        :param time: timestamp
        :param field_list: field list : containing tuple (key,value)
        :param tag_list: tag_lisit : containing tuple (key,value)
        optionnal parameter
        :return:
        '''
        point = Point(measurement)
        point.time(time,WritePrecision.MS)
        if not field_list :
            # TODO : Create an exception NoDataException
            raise Exception("Not point to write in database.")
        for field_tuple in field_list:
            point.field(field_tuple[0],field_tuple[1])
        for tag_tuple in tag_list:
            point.tag(tag_tuple[0], tag_tuple[1])

        self.write_api.write(bucket=bucket, record=point, org= self.org, **kwargs)