def test_cqs_recompute_data(self):
        from core.data.cq_aggregates import cqs_recompute_data
        from core.data.influx import get_influxdb_client
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import db_aggregated_sensor_data

        result = db_aggregated_sensor_data(
            "ecotype-1_pdu-Z1.51",
            start_date=f"{TestCqAggregates.start_timestamp}s",
            end_date=f"{TestCqAggregates.end_timestamp}s",
            how="hourly")

        self.assertEqual(len(result.get("means")), 0)

        result = cqs_recompute_data()
        self.assertIsNotNone(result)

        db_name = get_influxdb_parameters().get("database")

        db_client = get_influxdb_client()

        result = db_aggregated_sensor_data(
            "ecotype-1_pdu-Z1.51",
            start_date=f"{TestCqAggregates.start_timestamp}s",
            end_date=f"{TestCqAggregates.end_timestamp}s",
            how="hourly")

        self.assertGreaterEqual(len(result.get("means")), 48)
        self.assertLessEqual(len(result.get("means")), 49)
    def test_adding_new_reading(self):
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client
        from core.config.crawlers_config import get_flukso_sensors, get_socomec_sensors
        from bin.sensors_crawler import new_socomec_reading
        from core.data.influx import db_last_sensors_updates

        with requests_mock.Mocker(real_http=True) as m:

            parameters = get_influxdb_parameters()
            db_client = get_influxdb_client()

            socomec_sensors = get_socomec_sensors()

            for socomec_sensor in socomec_sensors:
                new_socomec_reading(socomec_sensor)

            last_sensor_updates = db_last_sensors_updates()

            self.assertEqual(len(last_sensor_updates), 1)

            for sensor_reading in last_sensor_updates:
                self.assertIn(sensor_reading.get("sensor"),
                              ['wattmeter_condensator'])
                self.assertGreaterEqual(sensor_reading.get("last_value"), 42)
                self.assertLessEqual(sensor_reading.get("last_value"), 42)
                self.assertIn(sensor_reading.get("location"), ['B232'])
                self.assertIn(sensor_reading.get("unit"), ['W'])
                self.assertIn(sensor_reading.get("sensor_type"), ['wattmeter'])

                reading_date = arrow.get(sensor_reading.get("time"))
                self.assertLessEqual(abs(reading_date.timestamp - time.time()),
                                     120)
    def test_cqs_recreate_all(self):
        from core.data.cq_aggregates import cqs_recreate_all
        from core.data.influx import get_influxdb_client
        from core.data.influx import get_influxdb_parameters

        result = cqs_recreate_all(force_creation=True)

        self.assertIsNotNone(result)

        db_name = get_influxdb_parameters().get("database")

        db_client = get_influxdb_client()
        influx_continuous_queries = db_client.get_list_continuous_queries(
        )[1][db_name]

        for expected_cq in TestCqAggregates.expected_continuous_queries:
            expected_continuous_query_name = expected_cq.get("name")
            expected_continuous_query_query = expected_cq.get("query")

            influx_continuous_query_candidate = [
                influx_continuous_query
                for influx_continuous_query in influx_continuous_queries
                if influx_continuous_query["name"] ==
                expected_continuous_query_name
            ]
            self.assertEqual(len(influx_continuous_query_candidate), 1)

            influx_continuous_query = influx_continuous_query_candidate[0]

            self.assertEqual(influx_continuous_query.get("name"),
                             expected_continuous_query_name)
            self.assertEqual(influx_continuous_query.get("query"),
                             expected_continuous_query_query)
Beispiel #4
0
def cq_drop_cqs_and_series_names(cq_name):
    db_name = get_influxdb_parameters().get("database")
    db_client = get_influxdb_client()

    multitree_data = cq_get_cqs_and_series_names(cq_name)
    series = multitree_data["series"]
    cqs = multitree_data["cqs"]

    for cq_name in cqs:
        query = """DROP CONTINUOUS QUERY %s ON %s""" % (cq_name, db_name)
        logging.debug("Dropping all continuous queries")
        #print(query)
        try:
            db_client.query(query, database=db_name)
        except InfluxDBClientError:
            print("cq \"%s\" was already destroyed" % (cq_name))
    for serie_name in series:
        query = """DROP SERIES from %s""" % (serie_name)
        #print(query)
        logging.debug("Dropping all continuous queries' series")
        try:
            db_client.query(query, database=db_name)
        except InfluxDBClientError:
            print("serie %s was already destroyed" % (serie_name))

    db_client.close()
Beispiel #5
0
    def test_get_influxdb_client(self):
        from core.data.influx import get_influxdb_client

        test_failed = True
        try:
            db_client = get_influxdb_client()
            test_failed = False
        except:
            print("Could not connect to the database")

        self.assertFalse(test_failed)
Beispiel #6
0
    def tearDownClass(cls):
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client

        db_name = get_influxdb_parameters().get("database")
        db_client = get_influxdb_client()

        if db_name == "pidiou":
            raise Exception("Abort: modifying 'pidiou' database")

        db_client.drop_database(db_name)
        db_client.close()
Beispiel #7
0
    def test_adding_new_reading(self):
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client
        from core.config.crawlers_config import get_modbus_sensors
        from bin.modbus_crawler import new_modbus_reading
        from core.data.influx import db_last_sensors_updates

        expected_values = {
            "inrow_group_cool_output": 4200,  # big_endian_int32
            "inrow_group_cool_output2": 1200,  # little_endian_uint32
            "inrow_group_cool_output3": 6400,  # big_endian_int16
            "inrow_group_cool_output4": 12800,  # little_endian_uint16
            "inrow_group_cool_output5": 25600,  # big_endian_float32
            "inrow_group_cool_output_little": 4200,  # big_endian_int32
            "inrow_group_cool_output2_little": 1200,  # little_endian_uint32
            "inrow_group_cool_output3_little": 6400,  # big_endian_int16
            "inrow_group_cool_output4_little": 12800,  # little_endian_uint16
            "inrow_group_cool_output5_little": 25600,  # big_endian_float32
        }

        # with requests_mock.Mocker(real_http=True) as m:

        parameters = get_influxdb_parameters()
        db_client = get_influxdb_client()

        modbus_sensors = get_modbus_sensors()

        for modbus_sensor in modbus_sensors:
            new_modbus_reading(modbus_sensor)

        last_sensor_updates = db_last_sensors_updates()

        self.assertEqual(len(last_sensor_updates),
                         len(list(expected_values.keys())))

        for sensor_reading in last_sensor_updates:

            expected_value = expected_values.get(sensor_reading.get("sensor"))

            self.assertIn(sensor_reading.get("sensor"),
                          list(expected_values.keys()))
            self.assertGreaterEqual(sensor_reading.get("last_value"),
                                    expected_value)
            self.assertLessEqual(sensor_reading.get("last_value"),
                                 expected_value)
            self.assertIn(sensor_reading.get("location"), ["not\\ specified"])
            self.assertIn(sensor_reading.get("unit"), ['W'])
            self.assertIn(sensor_reading.get("sensor_type"), ['wattmeter'])

            reading_date = arrow.get(sensor_reading.get("time"))
            self.assertLessEqual(abs(reading_date.timestamp - time.time()),
                                 120)
    def setUpClass(cls):
        # Init InfluxDB
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client

        db_name = get_influxdb_parameters().get("database")
        db_client = get_influxdb_client()

        if db_name == "pidiou":
            raise Exception("Abort: modifying 'pidiou' database")

        db_client.create_database(db_name)
        db_client.close()
    def tearDown(self):
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client

        db_name = get_influxdb_parameters().get("database")
        db_client = get_influxdb_client()

        if db_name == "pidiou":
            raise Exception("Abort: modifying 'pidiou' database")

        db_client.drop_database(db_name)
        db_client.close()

        # Stop the fake modbus agent
        self.modbus_agent.stop()
Beispiel #10
0
    def tearDownClass(cls):
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client

        db_name = get_influxdb_parameters().get("database")
        db_client = get_influxdb_client()

        if db_name == "pidiou":
            raise Exception("Abort: modifying 'pidiou' database")

        db_client.drop_database(db_name)
        db_client.close()

        # Stop the TemperatureRegister agent
        cls.fake_temperature_registerer.stop()
def list_continuous_queries():
    db_client = get_influxdb_client()

    query = "show continuous queries"

    db_name = get_influxdb_parameters().get("database")

    cqs = list(db_client.query(query, database=db_name).get_points())

    multitree_nodes = get_nodes() + get_production_nodes()
    multitree_nodes_cq_ids = [n.get("id") for n in multitree_nodes if "target" not in n]

    for cq in cqs:
        _extend_description_of_cq(cq, multitree_nodes_cq_ids)

    return cqs
    def setUp(self):
        # Init InfluxDB
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client

        db_name = get_influxdb_parameters().get("database")
        db_client = get_influxdb_client()

        if db_name == "pidiou":
            raise Exception("Abort: modifying 'pidiou' database")

        db_client.create_database(db_name)
        db_client.close()

        # Create a fake modbus agent
        self.modbus_agent = FakeModbusAgent(port=5021)
        self.modbus_agent.start()
    def setUpClass(cls):
        # Init InfluxDB
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client

        db_name = get_influxdb_parameters().get("database")
        db_client = get_influxdb_client()

        if db_name == "pidiou":
            raise Exception("Abort: modifying 'pidiou' database")

        db_client.create_database(db_name)

        # Create few queries in charge of computing aggregated data
        from core.data.cq_aggregates import cq_multitree_recreate_all
        cq_multitree_recreate_all(recreate_all=True)

        # Insert some data
        data = []
        start_timestamp = int(time.time() - 48 * 3600)
        sensors = {
            "wattmeter": ["ecotype-1_pdu-Z1.51", "ecotype-1_pdu-Z1.50"]
        }
        for sensor_type in sensors:
            for sensor in sensors.get(sensor_type):
                for value in range(0, 48 * 3600, 15):
                    data += [{
                        "measurement": "sensors",
                        "fields": {
                            "value": value
                        },
                        "time": start_timestamp + value,
                        "tags": {
                            "location": "room exterior",
                            "sensor": f"{sensor}",
                            "unit": "celsius",
                            "sensor_type": f"{sensor_type}"
                        }
                    }]
            db_client.write_points(data, time_precision="s", batch_size=8 * 3600)

        # Update data of the continuous queries
        cq_multitree_recreate_all(recreate_all=False)

        db_client.close()
def flush_records():
    global influx_lock
    global RECORDS
    db_client = get_influxdb_client()

    flush_data = RECORDS[:]
    RECORDS = []

    try:
        db_client.write_points(flush_data, time_precision="s")
        LOGGER.info("[influx] %s rows have been inserted in the database" %
                    (len(flush_data)))
    except:
        LOGGER.exception("write failure")

    db_client.close()

    return False
Beispiel #15
0
    def setUpClass(cls):
        # Init InfluxDB
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client

        db_name = get_influxdb_parameters().get("database")
        db_client = get_influxdb_client()

        if db_name == "pidiou":
            raise Exception("Abort: modifying 'pidiou' database")

        db_client.create_database(db_name)
        db_client.close()

        # Create a TemperatureRegister agent
        from tests.utils import FakeTemperatureRegistererAgent

        cls.fake_temperature_registerer = FakeTemperatureRegistererAgent()
        cls.fake_temperature_registerer.start()
    def test_adding_new_reading(self):
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client
        from core.config.crawlers_config import get_flukso_sensors
        from bin.sensors_crawler import new_flukso_reading
        from core.data.influx import db_last_sensors_updates

        with requests_mock.Mocker(real_http=True) as m:

            now = int(time.time())
            now_60s_later = now + 60

            fake_response = [[ts, random.choice([7, 8])]
                             for ts in range(now, now_60s_later)]

            m.get(
                'http://192.168.1.3:8080/sensor/c6a2caade50d7532ee3a3292238fd587?version=1.0&interval=minute&unit=watt',
                json=fake_response)

            parameters = get_influxdb_parameters()
            db_client = get_influxdb_client()

            flukso_sensors = get_flukso_sensors()

            for flukso_sensor in flukso_sensors:
                new_flukso_reading(flukso_sensor)

            last_sensor_updates = db_last_sensors_updates()

            self.assertEqual(len(last_sensor_updates), 1)

            for sensor_reading in last_sensor_updates:
                self.assertIn(sensor_reading.get("sensor"),
                              ['watt_cooler_b232_1'])
                self.assertGreaterEqual(sensor_reading.get("last_value"), 7)
                self.assertLessEqual(sensor_reading.get("last_value"), 8)
                self.assertIn(sensor_reading.get("location"), ['B232'])
                self.assertIn(sensor_reading.get("unit"), ['W'])
                self.assertIn(sensor_reading.get("sensor_type"), ['wattmeter'])

                reading_date = arrow.get(sensor_reading.get("time"))
                self.assertGreaterEqual(reading_date.timestamp, now)
                self.assertLessEqual(reading_date.timestamp, now_60s_later)
Beispiel #17
0
def flush_records(args):
    global influx_lock
    global RECORDS
    db_client = get_influxdb_client()

    influx_lock.acquire()
    flush_data = RECORDS
    RECORDS = []
    influx_lock.release()

    try:
        db_client.write_points(flush_data, time_precision="s")
        LOGGER.info("%s rows have been inserted in the database" %
                    len(flush_data))
    except:
        LOGGER.exception("Writing failure: ")

    db_client.close()

    return False
Beispiel #18
0
    def setUpClass(cls):
        # Init InfluxDB
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client

        db_name = get_influxdb_parameters().get("database")
        db_client = get_influxdb_client()

        if db_name == "pidiou":
            raise Exception("Abort: modifying 'pidiou' database")

        db_client.create_database(db_name)
        db_client.close()

        # Create a fake modbus agent
        cls.modbus_agent = FakeModbusAgent()
        cls.modbus_agent.start()

        cls.modbus_agent_little_endian = FakeModbusAgent(port=5021,
                                                         endian="little")
        cls.modbus_agent_little_endian.start()
def get_continuous_query_by_name(query_name):
    db_client = get_influxdb_client()

    query = "show continuous queries"

    db_name = get_influxdb_parameters().get("database")

    cqs = list(db_client.query(query, database=db_name).get_points())

    multitree_nodes = get_nodes()
    multitree_nodes_cq_ids = [n.get("id") for n in multitree_nodes if "target" not in n]

    # for cq in cqs:
    cq_candidates = [cq for cq in cqs if cq.get("name") == query_name]

    if not cq_candidates:
        return None

    cq = cq_candidates[0]
    _extend_description_of_cq(cq, multitree_nodes_cq_ids)

    return cq
Beispiel #20
0
    def test_adding_new_reading(self):
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client
        from bin.temperature_registerer import flush_records
        from core.data.influx import db_last_sensors_updates

        parameters = get_influxdb_parameters()
        db_client = get_influxdb_client()

        import requests
        requests.post("http://127.0.0.1:5500/temperature/list",
                      json=[{
                          "sensor": "3b96f85809fc2c27",
                          "v": 42.1
                      }, {
                          "sensor": "3ba6b75809fc0c6f",
                          "v": 42.2
                      }])

        time.sleep(2)

        last_sensor_updates = db_last_sensors_updates()

        self.assertEqual(len(last_sensor_updates), 2)

        for sensor_reading in last_sensor_updates:
            self.assertIn(sensor_reading.get("sensor"),
                          ['3b96f85809fc2c27', '3ba6b75809fc0c6f'])
            self.assertGreaterEqual(sensor_reading.get("last_value"), 42.1)
            self.assertLessEqual(sensor_reading.get("last_value"), 42.2)
            self.assertIn(sensor_reading.get("location"), ["room\\ exterior"])
            self.assertIn(sensor_reading.get("unit"), ['celsius'])
            self.assertIn(sensor_reading.get("sensor_type"), ['temperature'])

            reading_date = arrow.get(sensor_reading.get("time"))
            self.assertLessEqual(abs(reading_date.timestamp - time.time()),
                                 120)
Beispiel #21
0
    def setUpClass(cls):
        # Init InfluxDB
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client
        from core.config.crawlers_config import get_sensors_by_collect_method
        from core.config.room_config import get_temperature_sensors_infrastructure

        db_name = get_influxdb_parameters().get("database")
        db_client = get_influxdb_client()

        if db_name == "pidiou":
            raise Exception("Abort: modifying 'pidiou' database")

        db_client.drop_database(db_name)
        db_client.create_database(db_name)

        # Create few queries in charge of computing aggregated data
        from core.data.cq_aggregates import cq_multitree_recreate_all
        cq_multitree_recreate_all(recreate_all=True)

        # Insert some data
        data = []
        start_timestamp = int(time.time() - 12 * 3600)

        all_sensors = _load_sensors_data()
        sensors = [*all_sensors.get("temperature").values()]\
                  + [*all_sensors.get("socomec").values()]\
                  + [*all_sensors.get("flukso").values()]\
                  + [*all_sensors.get("pdus").values()]

        count = 0
        for timestamp in range(start_timestamp, start_timestamp + 12 * 3600,
                               15):
            count += 1
            for sensor in sensors:

                sensor_type = sensor.get("sensor_type")
                unit = sensor.get("unit")
                sensor_name = sensor.get("name")

                if sensor_type == "temperature":
                    value = 10 + (count % 20)
                else:
                    value = 100 + (count % 30)

                data += [{
                    "measurement": "sensors",
                    "fields": {
                        "value": value
                    },
                    "time": timestamp,
                    "tags": {
                        "location": "room exterior",
                        "sensor": f"{sensor_name}",
                        "unit": unit,
                        "sensor_type": f"{sensor_type}"
                    }
                }]
        db_client.write_points(data, time_precision="s")

        # Update data of the continuous queries
        cq_multitree_recreate_all(recreate_all=False)
        db_client.close()

        # Launch web-app
        from core.misc import ensure_admin_user_exists
        import logging
        from bin.app import app, db, login_manager

        login_manager._login_disabled = True

        logging.basicConfig(level=logging.DEBUG)
        # Create DB
        print("Creating database")
        db.create_all()

        cls.app = app
        cls.test_client = app.test_client()
Beispiel #22
0
    def setUpClass(cls):
        # Init InfluxDB
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client

        db_name = get_influxdb_parameters().get("database")
        db_client = get_influxdb_client()

        if db_name == "pidiou":
            raise Exception("Abort: modifying 'pidiou' database")

        db_client.create_database(db_name)

        # Insert some data
        data = []
        cls.start_timestamp = 1566368000
        cls.end_timestamp = cls.start_timestamp + 48 * 3600
        cls.sensors = {
            "wattmeter": ["wattmeter1", "wattmeter2"],
            "temperature": [
                "3b96f85809fc2c27", "3ba6b75809fc0c6f", "3bd10a5909fc6c3b",
                "3bbefa5809fc2cb3"
            ]
        }
        for sensor_type in cls.sensors:
            for sensor in cls.sensors.get(sensor_type):
                for value in range(0, 48 * 3600, 15):
                    data += [{
                        "measurement": "sensors",
                        "fields": {
                            "value": value
                        },
                        "time": cls.start_timestamp + value,
                        "tags": {
                            "location": "room exterior",
                            "sensor": f"{sensor}",
                            "unit": "celsius",
                            "sensor_type": f"{sensor_type}"
                        }
                    }]
            db_client.write_points(data,
                                   time_precision="s",
                                   batch_size=8 * 3600)

        # Create few queries in charge of computing aggregated data
        cqs = [{
            "name":
            "cq_measurement_downsample_1m",
            "query":
            "SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value), mean(value) AS value"
            " INTO measurement_downsample_1m"
            " FROM sensors GROUP BY time(1m), sensor"
        }, {
            "name":
            "cq_measurement_downsample_1h",
            "query":
            "SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value), mean(mean) AS value"
            " INTO measurement_downsample_1h "
            " FROM measurement_downsample_1m GROUP BY time(1h), sensor"
        }, {
            "name":
            "cq_measurement_downsample_1d",
            "query":
            "SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value), mean(mean) AS value"
            " INTO measurement_downsample_1d"
            " FROM measurement_downsample_1h GROUP BY time(1d), sensor"
        }, {
            "name":
            "cq_measurement_downsample_all_1m",
            "query":
            "SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value), mean(value) AS value"
            " INTO measurement_downsample_all_1m"
            " FROM sensors GROUP BY time(1m), sensor_type"
        }, {
            "name":
            "cq_measurement_downsample_all_1h",
            "query":
            "SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value), mean(mean) AS value"
            " INTO measurement_downsample_all_1h"
            " FROM measurement_downsample_all_1m GROUP BY time(1h), sensor_type"
        }, {
            "name":
            "cq_measurement_downsample_all_1d",
            "query":
            "SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value), mean(mean) AS value"
            " INTO measurement_downsample_all_1d"
            " FROM measurement_downsample_all_1h GROUP BY time(1d), sensor_type"
        }]

        for cq in cqs:
            db_client.create_continuous_query(cq.get("name"), cq.get("query"))

        # Execute the CQs
        for cq in cqs:
            db_client.query(cq.get("query"))

        db_client.close()
Beispiel #23
0
def cqs_recompute_data():
    db_name = get_influxdb_parameters().get("database")
    db_client = get_influxdb_client()

    first_value_candidates = list(db_client.query("select first(value) from sensors", database=db_name).get_points())
    if len(first_value_candidates) == 0:
        return False

    first_value = first_value_candidates[0]
    oldest_timestamp = first_value["time"]

    aggregated_fields = """sum("value"), mean("value"), stddev("value"), count("value"), median("value"), min("value"), max("value")"""

    logging.debug("Recomputing '%s' continuous query" % ("measurement_downsample_1m"))
    query = """SELECT %s, mean("value") as value
    INTO "measurement_downsample_1m"
    FROM "sensors"
    WHERE time >= '%s'
    GROUP BY time(1m), sensor""" % (aggregated_fields, oldest_timestamp)
    #print(query)
    db_client.query(query, database=db_name)

    logging.debug("Recomputing '%s' continuous query" % ("measurement_downsample_1h"))
    query = """SELECT %s, mean("mean") as value
    INTO "measurement_downsample_1h"
    FROM "measurement_downsample_1m"
    WHERE time >= '%s'
    GROUP BY time(1h), sensor""" % (aggregated_fields, oldest_timestamp)
    #print(query)
    db_client.query(query, database=db_name)

    logging.debug("Recomputing '%s' continuous query" % ("measurement_downsample_1d"))
    query = """SELECT %s, mean("mean") as value
    INTO "measurement_downsample_1d"
    FROM "measurement_downsample_1h"
    WHERE time >= '%s'
    GROUP BY time(1d), sensor""" % (aggregated_fields, oldest_timestamp)
    #print(query)
    db_client.query(query, database=db_name)

    logging.debug("Recomputing '%s' continuous query" % ("measurement_downsample_all_1m"))
    query = """SELECT %s, mean("value") as value
    INTO "measurement_downsample_all_1m"
    FROM "sensors"
    WHERE time >= '%s'
    GROUP BY time(1m), sensor_type""" % (aggregated_fields, oldest_timestamp)
    #print(query)
    db_client.query(query, database=db_name)

    logging.debug("Recomputing '%s' continuous query" % ("measurement_downsample_all_1h"))
    query = """SELECT %s, mean("mean") as value
    INTO "measurement_downsample_all_1h"
    FROM "measurement_downsample_all_1m"
    WHERE time >= '%s'
    GROUP BY time(1h), sensor_type""" % (aggregated_fields, oldest_timestamp)
    #print(query)
    db_client.query(query, database=db_name)

    logging.debug("Recomputing '%s' continuous query" % ("measurement_downsample_all_1d"))
    query = """SELECT %s, mean("mean") as value
    INTO "measurement_downsample_all_1d"
    FROM "measurement_downsample_all_1h"
    WHERE time >= '%s'
    GROUP BY time(1d), sensor_type""" % (aggregated_fields, oldest_timestamp)
    #print(query)
    db_client.query(query, database=db_name)

    for aggregate_name in AGGREGATES_CONFIG:
        aggregate_type = AGGREGATES_CONFIG[aggregate_name]["type"]
        filter_expression = AGGREGATES_CONFIG[aggregate_name]["filter_expression"]
        aggregate_function_level1 = AGGREGATES_CONFIG[aggregate_name]["aggregate_function_level1"]
        aggregate_function_level2 = AGGREGATES_CONFIG[aggregate_name]["aggregate_function_level2"]
        aggregate_frequency = AGGREGATES_CONFIG[aggregate_name]["aggregate_frequency"]

        # CQ for making an average data periodically collected for the current aggregate
        cq_name = "cq_measurement_%s_aggregate_%s" % (aggregate_name, aggregate_frequency)
        logging.debug("Recomputing '%s' continuous query" % (cq_name))
        query = """select %s(%s) as value
        INTO "%s"
        from (
            select %s(value)
            from sensors
            where (%s) and time > '%s'
            group by time(%s), sensor
            )
        where time > '%s'
        group by time(%s)""" % (aggregate_function_level1,
                                aggregate_function_level2,
                                cq_name,
                                aggregate_function_level2,
                                filter_expression,
                                oldest_timestamp,
                                aggregate_frequency,
                                oldest_timestamp,
                                aggregate_frequency)
        # print(query)
        db_client.query(query, database=db_name)

        # CQ for aggregating data from cq_measurement_wattmeters_aggregate_10s view
        cq_name_1m = "cq_measurement_%s_aggregate_1m" % (aggregate_name)
        logging.debug("Recomputing '%s' continuous query" % (cq_name_1m))
        query = """SELECT %s
        INTO "cq_measurement_%s_aggregate_1m"
        FROM "%s"
        WHERE time >= '%s'
        GROUP BY time(1m), sensor_type""" % (aggregated_fields,
                                             aggregate_name,
                                             cq_name,
                                             oldest_timestamp)
        #print(query)
        db_client.query(query, database=db_name)

        cq_name_1h = "cq_measurement_%s_aggregate_1h" % (aggregate_name)
        logging.debug("Recomputing '%s' continuous query" % (cq_name_1h))
        query = """SELECT %s
        INTO "cq_measurement_%s_aggregate_1h"
        FROM "%s"
        WHERE time >= '%s'
        GROUP BY time(1h), sensor_type""" % (aggregated_fields,
                                             aggregate_name,
                                             cq_name,
                                             oldest_timestamp)
        #print(query)
        db_client.query(query, database=db_name)

        cq_name_1d = "cq_measurement_%s_aggregate_1d" % (aggregate_name)
        logging.debug("Recomputing '%s' continuous query" % (cq_name_1d))
        query = """SELECT %s
        INTO "cq_measurement_%s_aggregate_1d"
        FROM "%s"
        WHERE time >= '%s'
        GROUP BY time(1d), sensor_type""" % (aggregated_fields,
                                             aggregate_name,
                                             cq_name,
                                             oldest_timestamp)
        #print(query)
        db_client.query(query, database=db_name)

    db_client.close()

    return True
    def setUpClass(cls):
        # Init InfluxDB
        from core.data.influx import get_influxdb_parameters
        from core.data.influx import get_influxdb_client

        db_name = get_influxdb_parameters().get("database")
        db_client = get_influxdb_client()

        if db_name == "pidiou":
            raise Exception("Abort: modifying 'pidiou' database")

        db_client.create_database(db_name)

        # Insert some data
        data = []
        start_timestamp = int(time.time() - 48 * 3600)
        sensors = {"wattmeter": ["ecotype-1_pdu-Z1.51", "ecotype-1_pdu-Z1.50"]}
        for sensor_type in sensors:
            for sensor in sensors.get(sensor_type):
                for value in range(0, 48 * 3600, 15):
                    data += [{
                        "measurement": "sensors",
                        "fields": {
                            "value": value
                        },
                        "time": start_timestamp + value,
                        "tags": {
                            "location": "room exterior",
                            "sensor": f"{sensor}",
                            "unit": "w",
                            "sensor_type": f"{sensor_type}"
                        }
                    }]
            db_client.write_points(data,
                                   time_precision="s",
                                   batch_size=8 * 3600)

        cls.start_timestamp = start_timestamp
        cls.end_timestamp = start_timestamp + 48 * 3600

        # Set expected continuous queries
        cls.expected_continuous_queries = [{
            'name':
            'cq_measurement_downsample_1m',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_downsample_1m ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value), mean(value) AS value INTO tests.autogen.measurement_downsample_1m FROM tests.autogen.sensors GROUP BY time(1m), sensor END'
        }, {
            'name':
            'cq_measurement_downsample_1h',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_downsample_1h ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value), mean(mean) AS value INTO tests.autogen.measurement_downsample_1h FROM tests.autogen.measurement_downsample_1m GROUP BY time(1h), sensor END'
        }, {
            'name':
            'cq_measurement_downsample_1d',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_downsample_1d ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value), mean(mean) AS value INTO tests.autogen.measurement_downsample_1d FROM tests.autogen.measurement_downsample_1h GROUP BY time(1d), sensor END'
        }, {
            'name':
            'cq_measurement_downsample_all_1m',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_downsample_all_1m ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value), mean(value) AS value INTO tests.autogen.measurement_downsample_all_1m FROM tests.autogen.sensors GROUP BY time(1m), sensor_type END'
        }, {
            'name':
            'cq_measurement_downsample_all_1h',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_downsample_all_1h ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value), mean(mean) AS value INTO tests.autogen.measurement_downsample_all_1h FROM tests.autogen.measurement_downsample_all_1m GROUP BY time(1h), sensor_type END'
        }, {
            'name':
            'cq_measurement_downsample_all_1d',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_downsample_all_1d ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value), mean(mean) AS value INTO tests.autogen.measurement_downsample_all_1d FROM tests.autogen.measurement_downsample_all_1h GROUP BY time(1d), sensor_type END'
        }, {
            'name':
            'cq_measurement_wattmeters_aggregate_10s',
            'query':
            "CREATE CONTINUOUS QUERY cq_measurement_wattmeters_aggregate_10s ON tests BEGIN SELECT sum(mean) AS value INTO tests.autogen.cq_measurement_wattmeters_aggregate_10s FROM (SELECT mean(value) FROM tests.autogen.sensors WHERE (sensor = 'watt_cooler_b232_1' OR sensor = 'watt_cooler_ext_1') GROUP BY time(10s), sensor) GROUP BY time(10s) END"
        }, {
            'name':
            'cq_measurement_wattmeters_aggregate_1m',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_wattmeters_aggregate_1m ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value) INTO tests.autogen.cq_measurement_wattmeters_aggregate_1m FROM tests.autogen.cq_measurement_wattmeters_aggregate_10s GROUP BY time(1m), sensor_type END'
        }, {
            'name':
            'cq_measurement_wattmeters_aggregate_1h',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_wattmeters_aggregate_1h ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value) INTO tests.autogen.cq_measurement_wattmeters_aggregate_1h FROM tests.autogen.cq_measurement_wattmeters_aggregate_10s GROUP BY time(1h), sensor_type END'
        }, {
            'name':
            'cq_measurement_wattmeters_aggregate_1d',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_wattmeters_aggregate_1d ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value) INTO tests.autogen.cq_measurement_wattmeters_aggregate_1d FROM tests.autogen.cq_measurement_wattmeters_aggregate_10s GROUP BY time(1d), sensor_type END'
        }, {
            'name':
            'cq_measurement_socomecs_aggregate_30s',
            'query':
            "CREATE CONTINUOUS QUERY cq_measurement_socomecs_aggregate_30s ON tests BEGIN SELECT sum(mean) AS value INTO tests.autogen.cq_measurement_socomecs_aggregate_30s FROM (SELECT mean(value) FROM tests.autogen.sensors WHERE (sensor = 'wattmeter_condensator' OR sensor = 'wattmeter_servers' OR sensor = 'wattmeter_cooling') GROUP BY time(30s), sensor) GROUP BY time(30s) END"
        }, {
            'name':
            'cq_measurement_socomecs_aggregate_1m',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_socomecs_aggregate_1m ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value) INTO tests.autogen.cq_measurement_socomecs_aggregate_1m FROM tests.autogen.cq_measurement_socomecs_aggregate_30s GROUP BY time(1m), sensor_type END'
        }, {
            'name':
            'cq_measurement_socomecs_aggregate_1h',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_socomecs_aggregate_1h ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value) INTO tests.autogen.cq_measurement_socomecs_aggregate_1h FROM tests.autogen.cq_measurement_socomecs_aggregate_30s GROUP BY time(1h), sensor_type END'
        }, {
            'name':
            'cq_measurement_socomecs_aggregate_1d',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_socomecs_aggregate_1d ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value) INTO tests.autogen.cq_measurement_socomecs_aggregate_1d FROM tests.autogen.cq_measurement_socomecs_aggregate_30s GROUP BY time(1d), sensor_type END'
        }, {
            'name':
            'cq_measurement_external_temperature_aggregate_30s',
            'query':
            "CREATE CONTINUOUS QUERY cq_measurement_external_temperature_aggregate_30s ON tests BEGIN SELECT mean(mean) AS value INTO tests.autogen.cq_measurement_external_temperature_aggregate_30s FROM (SELECT mean(value) FROM tests.autogen.sensors WHERE (location = 'room exterior' AND unit = 'celsius') GROUP BY time(30s), sensor) GROUP BY time(30s) END"
        }, {
            'name':
            'cq_measurement_external_temperature_aggregate_1m',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_external_temperature_aggregate_1m ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value) INTO tests.autogen.cq_measurement_external_temperature_aggregate_1m FROM tests.autogen.cq_measurement_external_temperature_aggregate_30s GROUP BY time(1m), sensor_type END'
        }, {
            'name':
            'cq_measurement_external_temperature_aggregate_1h',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_external_temperature_aggregate_1h ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value) INTO tests.autogen.cq_measurement_external_temperature_aggregate_1h FROM tests.autogen.cq_measurement_external_temperature_aggregate_30s GROUP BY time(1h), sensor_type END'
        }, {
            'name':
            'cq_measurement_external_temperature_aggregate_1d',
            'query':
            'CREATE CONTINUOUS QUERY cq_measurement_external_temperature_aggregate_1d ON tests BEGIN SELECT sum(value), mean(value), stddev(value), count(value), median(value), min(value), max(value) INTO tests.autogen.cq_measurement_external_temperature_aggregate_1d FROM tests.autogen.cq_measurement_external_temperature_aggregate_30s GROUP BY time(1d), sensor_type END'
        }]

        db_client.close()
Beispiel #25
0
def cq_create_continuous_query(cq_name, sub_query_sets):
    db_name = get_influxdb_parameters().get("database")
    db_client = get_influxdb_client()

    # Generate a a criterion
    filter_expression = " or ".join(map(lambda x: """sensor='%s' """ % (x), sub_query_sets))

    aggregate_frequency = "30s"
    aggregate_function_level1 = "sum"
    aggregate_function_level2 = "mean"
    aggregated_fields = """sum("value"), mean("value"), stddev("value"), count("value"), median("value"), min("value"), max("value")"""

    # CQ for summing data collected periodically according to a sensor type
    cq_name_freq = "%s_%s" % (cq_name, aggregate_frequency)
    logging.debug("Computing '%s' continuous query" % (cq_name))
    query = f"""CREATE CONTINUOUS QUERY "%s" ON "{db_name}"
            BEGIN
                select %s(%s) as value
                INTO "%s"
                from (
                    select %s(value)
                    from sensors
                    where (%s)
                    group by time(%s), sensor
                    )
                group by time(%s)
            END""" % (cq_name_freq,
                      aggregate_function_level1,
                      aggregate_function_level2,
                      cq_name_freq,
                      aggregate_function_level2,
                      filter_expression,
                      aggregate_frequency,
                      aggregate_frequency)
    #print(query)
    db_client.query(query, database=db_name)
    cqs_updated = True

    # CQ for aggregating data from cq_measurement_wattmeters_aggregate_10s view (aggregate every minute)
    cq_name_1m = "%s_1m" % (cq_name)
    logging.debug("Computing '%s' continuous query" % (cq_name_1m))
    query = f"""CREATE CONTINUOUS QUERY "%s" ON "{db_name}"
            BEGIN
                SELECT %s INTO "%s" FROM "%s" GROUP BY time(1m), sensor_type
            END""" % (cq_name_1m,
                      aggregated_fields,
                      cq_name_1m,
                      cq_name_freq)
    #print(query)
    db_client.query(query, database=db_name)
    cqs_updated = True

    # CQ for aggregating data from cq_measurement_wattmeters_aggregate_10s view (aggregate every hour)
    cq_name_1h = "%s_1h" % (cq_name)
    logging.debug("Computing '%s' continuous query" % (cq_name_1h))
    query = f"""CREATE CONTINUOUS QUERY "%s" ON "{db_name}"
            BEGIN
                SELECT %s INTO "%s" FROM "%s" GROUP BY time(1h), sensor_type
            END""" % (cq_name_1h,
                      aggregated_fields,
                      cq_name_1h,
                      cq_name_freq)
    #print(query)
    db_client.query(query, database=db_name)
    cqs_updated = True

    # CQ for aggregating data from cq_measurement_wattmeters_aggregate_10s view (aggregate every day)
    cq_name_1d = "%s_1d" % (cq_name)
    logging.debug("Computing '%s' continuous query" % (cq_name_1d))
    query = f"""CREATE CONTINUOUS QUERY "%s" ON "{db_name}"
            BEGIN
                SELECT %s INTO "%s" FROM "%s" GROUP BY time(1d), sensor_type
            END""" % (cq_name_1d,
                      aggregated_fields,
                      cq_name_1d,
                      cq_name_freq)
    #print(query)
    db_client.query(query, database=db_name)

    db_client.close()

    return True
Beispiel #26
0
def cq_rebuild_continuous_query(cq_name, sub_query_sets):
    db_name = get_influxdb_parameters().get("database")
    db_client = get_influxdb_client()

    # Generate a a criterion
    filter_expression = " or ".join(map(lambda x: """sensor='%s' """ % (x), sub_query_sets))

    first_value_candidates = list(db_client.query("select first(value) from sensors", database=db_name).get_points())
    if len(first_value_candidates) == 0:
        return False

    first_value = first_value_candidates[0]
    oldest_timestamp = first_value["time"]
    aggregate_frequency = "30s"
    aggregate_function_level1 = "sum"
    aggregate_function_level2 = "mean"
    aggregated_fields = """sum("value"), mean("value"), stddev("value"), count("value"), median("value"), min("value"), max("value")"""

    # CQ for making an average data periodically collected for the current aggregate
    cq_name_freq = "%s_%s" % (cq_name, aggregate_frequency)
    logging.debug("Recomputing '%s' continuous query" % (cq_name))
    query = """select %s(%s) as value
            INTO "%s"
            from (
                select %s(value)
                from sensors
                where (%s) and time > '%s'
                group by time(%s), sensor
                )
            where time > '%s'
            group by time(%s)""" % (aggregate_function_level1,
                                    aggregate_function_level2,
                                    cq_name_freq,
                                    aggregate_function_level2,
                                    filter_expression,
                                    oldest_timestamp,
                                    aggregate_frequency,
                                    oldest_timestamp,
                                    aggregate_frequency)
    #print(query)
    db_client.query(query, database=db_name)

    # CQ for aggregating data from cq_measurement_wattmeters_aggregate_10s view
    cq_name_1m = "%s_1m" % (cq_name)
    logging.debug("Recomputing '%s' continuous query" % (cq_name_1m))
    query = """SELECT %s
            INTO "%s_1m"
            FROM "%s"
            WHERE time >= '%s'
            GROUP BY time(1m), sensor_type""" % (aggregated_fields,
                                                 cq_name,
                                                 cq_name_freq,
                                                 oldest_timestamp)
    # #print(query)
    db_client.query(query, database=db_name)

    cq_name_1h = "%s_1h" % (cq_name)
    logging.debug("Recomputing '%s' continuous query" % (cq_name_1h))
    query = """SELECT %s
            INTO "%s_1h"
            FROM "%s"
            WHERE time >= '%s'
            GROUP BY time(1h), sensor_type""" % (aggregated_fields,
                                                 cq_name,
                                                 cq_name_freq,
                                                 oldest_timestamp)
    # #print(query)
    db_client.query(query, database=db_name)

    cq_name_1d = "%s_1d" % (cq_name)
    logging.debug("Recomputing '%s' continuous query" % (cq_name_1d))
    query = """SELECT %s
            INTO "%s_1d"
            FROM "%s"
            WHERE time >= '%s'
            GROUP BY time(1d), sensor_type""" % (aggregated_fields,
                                                 cq_name,
                                                 cq_name_freq,
                                                 oldest_timestamp)
    # #print(query)
    db_client.query(query, database=db_name)

    db_client.close()

    return True
Beispiel #27
0
def cqs_recreate_all(force_creation=False):
    db_name = get_influxdb_parameters().get("database")
    db_client = get_influxdb_client()

    query = "show continuous queries"
    # #print(query)
    cqs = list(db_client.query(query, database=db_name).get_points())
    cqs_names = map(lambda x: x["name"], cqs)

    if force_creation:
        logging.debug("Dropping all continuous queries")
        for cq_name in cqs_names:
            query = """DROP CONTINUOUS QUERY %s ON %s""" % (cq_name, db_name)
            logging.debug("Dropping all continuous queries")
            # #print(query)
            db_client.query(query, database=db_name)
            query = """DROP SERIES from %s""" % (cq_name)
            # #print(query)
            logging.debug("Dropping all continuous queries' series")
            db_client.query(query, database=db_name)

    aggregated_fields = """sum("value"), mean("value"), stddev("value"), count("value"), median("value"), min("value"), max("value")"""

    cqs_updated = False

    if force_creation or "cq_measurement_downsample_1m" not in cqs_names:
        logging.debug("Computing '%s' continuous query" % ("cq_measurement_downsample_1m"))
        query = f"""CREATE CONTINUOUS QUERY "cq_measurement_downsample_1m" ON "{db_name}"
        BEGIN
            SELECT %s, mean("value") as value INTO "measurement_downsample_1m" FROM "sensors" GROUP BY time(1m), sensor
        END""" % (aggregated_fields)
        # #print(query)
        db_client.query(query, database=db_name)
        cqs_updated = True

    if force_creation or "cq_measurement_downsample_1h" not in cqs_names:
        logging.debug("Computing '%s' continuous query" % ("cq_measurement_downsample_1h"))
        query = f"""CREATE CONTINUOUS QUERY "cq_measurement_downsample_1h" ON "{db_name}"
        BEGIN
            SELECT %s, mean("mean") as value INTO "measurement_downsample_1h" FROM "measurement_downsample_1m" GROUP BY time(1h), sensor
        END""" % (aggregated_fields)
        # #print(query)
        db_client.query(query, database=db_name)
        cqs_updated = True

    if force_creation or "cq_measurement_downsample_1d" not in cqs_names:
        logging.debug("Computing '%s' continuous query" % ("cq_measurement_downsample_1d"))
        query = f"""CREATE CONTINUOUS QUERY "cq_measurement_downsample_1d" ON "{db_name}"
        BEGIN
            SELECT %s, mean("mean") as value INTO "measurement_downsample_1d" FROM "measurement_downsample_1h" GROUP BY time(1d), sensor
        END""" % (aggregated_fields)
        # #print(query)
        db_client.query(query, database=db_name)
        cqs_updated = True

    if force_creation or "cq_measurement_downsample_all_1m" not in cqs_names:
        logging.debug("Computing '%s' continuous query" % ("cq_measurement_downsample_all_1m"))
        query = f"""CREATE CONTINUOUS QUERY "cq_measurement_downsample_all_1m" ON "{db_name}"
        BEGIN
            SELECT %s, mean("value") as value INTO "measurement_downsample_all_1m" FROM "sensors" GROUP BY time(1m), sensor_type
        END""" % (aggregated_fields)
        # #print(query)
        db_client.query(query, database=db_name)
        cqs_updated = True

    if force_creation or "cq_measurement_downsample_all_1h" not in cqs_names:
        logging.debug("Computing '%s' continuous query" % ("cq_measurement_downsample_all_1h"))
        query = f"""CREATE CONTINUOUS QUERY "cq_measurement_downsample_all_1h" ON "{db_name}"
        BEGIN
            SELECT %s, mean("mean") as value INTO "measurement_downsample_all_1h" FROM "measurement_downsample_all_1m" GROUP BY time(1h), sensor_type
        END""" % (aggregated_fields)
        # #print(query)
        db_client.query(query, database=db_name)
        cqs_updated = True

    if force_creation or "cq_measurement_downsample_all_1d" not in cqs_names:
        logging.debug("Computing '%s' continuous query" % ("cq_measurement_downsample_all_1d"))
        query = f"""CREATE CONTINUOUS QUERY "cq_measurement_downsample_all_1d" ON "{db_name}"
        BEGIN
            SELECT %s, mean("mean") as value INTO "measurement_downsample_all_1d" FROM "measurement_downsample_all_1h" GROUP BY time(1d), sensor_type
        END""" % (aggregated_fields)
        # #print(query)
        db_client.query(query, database=db_name)
        cqs_updated = True

    for aggregate_name in AGGREGATES_CONFIG:
        aggregate_type = AGGREGATES_CONFIG[aggregate_name]["type"]
        filter_expression = AGGREGATES_CONFIG[aggregate_name]["filter_expression"]
        aggregate_function_level1 = AGGREGATES_CONFIG[aggregate_name]["aggregate_function_level1"]
        aggregate_function_level2 = AGGREGATES_CONFIG[aggregate_name]["aggregate_function_level2"]
        aggregate_frequency = AGGREGATES_CONFIG[aggregate_name]["aggregate_frequency"]

        # CQ for summing data collected periodically according to a sensor type
        cq_name = "cq_measurement_%s_aggregate_%s" % (aggregate_name, aggregate_frequency)
        if force_creation or cq_name not in cqs_names:
            logging.debug("Computing '%s' continuous query" % (cq_name))
            query = f"""CREATE CONTINUOUS QUERY "%s" ON "{db_name}"
            BEGIN
                select %s(%s) as value
                INTO "%s"
                from (
                    select %s(value)
                    from sensors
                    where (%s)
                    group by time(%s), sensor
                    )
                group by time(%s)
            END""" % (cq_name,
                      aggregate_function_level1,
                      aggregate_function_level2,
                      cq_name,
                      aggregate_function_level2,
                      filter_expression,
                      aggregate_frequency,
                      aggregate_frequency)
            # #print(query)
            db_client.query(query, database=db_name)
            cqs_updated = True

        # CQ for aggregating data from cq_measurement_wattmeters_aggregate_10s view (aggregate every minute)
        cq_name_1m = "cq_measurement_%s_aggregate_1m" % (aggregate_name)
        if force_creation or cq_name_1m not in cqs_names:
            logging.debug("Computing '%s' continuous query" % (cq_name_1m))
            query = f"""CREATE CONTINUOUS QUERY "%s" ON "{db_name}"
            BEGIN
                SELECT %s INTO "%s" FROM "%s" GROUP BY time(1m), sensor_type
            END""" % (cq_name_1m,
                      aggregated_fields,
                      cq_name_1m,
                      cq_name)
            #print(query)
            db_client.query(query, database=db_name)
            cqs_updated = True

        # CQ for aggregating data from cq_measurement_wattmeters_aggregate_10s view (aggregate every hour)
        cq_name_1h = "cq_measurement_%s_aggregate_1h" % (aggregate_name)
        if force_creation or cq_name_1h not in cqs_names:
            logging.debug("Computing '%s' continuous query" % (cq_name_1h))
            query = f"""CREATE CONTINUOUS QUERY "%s" ON "{db_name}"
            BEGIN
                SELECT %s INTO "%s" FROM "%s" GROUP BY time(1h), sensor_type
            END""" % (cq_name_1h,
                      aggregated_fields,
                      cq_name_1h,
                      cq_name)
            #print(query)
            db_client.query(query, database=db_name)
            cqs_updated = True

        # CQ for aggregating data from cq_measurement_wattmeters_aggregate_10s view (aggregate every day)
        cq_name_1d = "cq_measurement_%s_aggregate_1d" % (aggregate_name)
        if force_creation or cq_name_1d not in cqs_names:
            logging.debug("Computing '%s' continuous query" % (cq_name_1d))
            query = f"""CREATE CONTINUOUS QUERY "%s" ON "{db_name}"
            BEGIN
                SELECT %s INTO "%s" FROM "%s" GROUP BY time(1d), sensor_type
            END""" % (cq_name_1d,
                      aggregated_fields,
                      cq_name_1d,
                      cq_name)
            #print(query)
            db_client.query(query, database=db_name)
            cqs_updated = True

    db_client.close()

    return cqs_updated