def forwards(self, orm):
     "Write your forwards methods here."
     # Note: Don't use "from appname.models import ModelName". 
     # Use orm.ModelName to refer to models in this application,
     # and orm['appname.ModelName'] for models in other applications.
     influx_client.post('query', '''
          CREATE CONTINUOUS QUERY "cq_1h" ON "{0}" 
              RESAMPLE EVERY 1h 
              BEGIN 
                  SELECT max("value"), min("value"), mean("value"), count("value"), sum("value")  
                  INTO "{1}" FROM "{2}" GROUP BY "sensor_id", time(1h), *
              END
          '''.format(INFLUX_DATABASE, INFLUX_MEASUREMENT + '_1h', INFLUX_MEASUREMENT), True)
     influx_client.post('query', '''
         CREATE CONTINUOUS QUERY "cq_1d" ON "{0}"
             RESAMPLE FOR 2d
             BEGIN
                 SELECT max("max"), min("min"), sum("sum")/sum("count") as "mean", sum("count") as "count", sum("sum")
                 INTO "{1}" FROM "{2}" GROUP BY "sensor_id", time(1d), *
             END
         '''.format(INFLUX_DATABASE, INFLUX_MEASUREMENT + '_1d', INFLUX_MEASUREMENT + '_1h'), True)
     influx_client.post('query', '''
         CREATE CONTINUOUS QUERY "cq_1w" ON "{0}"
             RESAMPLE FOR 2w
             BEGIN
                 SELECT max("max"), min("min"), sum("sum")/sum("count") as "mean", sum("count") as "count", sum("sum")
                 INTO "{1}" FROM "{2}" GROUP BY "sensor_id", time(1w), *
             END
         '''.format(INFLUX_DATABASE, INFLUX_MEASUREMENT + '_1w', INFLUX_MEASUREMENT + '_1d'), True)
Beispiel #2
0
 def forwards(self, orm):
     "Write your forwards methods here."
     # Note: Don't use "from appname.models import ModelName".
     # Use orm.ModelName to refer to models in this application,
     # and orm['appname.ModelName'] for models in other applications.
     influx_client.post(
         'query', '''
          CREATE CONTINUOUS QUERY "cq_1h" ON "{0}" 
              RESAMPLE EVERY 1h 
              BEGIN 
                  SELECT max("value"), min("value"), mean("value"), count("value"), sum("value")  
                  INTO "{1}" FROM "{2}" GROUP BY "sensor_id", time(1h), *
              END
          '''.format(INFLUX_DATABASE, INFLUX_MEASUREMENT + '_1h',
                     INFLUX_MEASUREMENT), True)
     influx_client.post(
         'query', '''
         CREATE CONTINUOUS QUERY "cq_1d" ON "{0}"
             RESAMPLE FOR 2d
             BEGIN
                 SELECT max("max"), min("min"), sum("sum")/sum("count") as "mean", sum("count") as "count", sum("sum")
                 INTO "{1}" FROM "{2}" GROUP BY "sensor_id", time(1d), *
             END
         '''.format(INFLUX_DATABASE, INFLUX_MEASUREMENT + '_1d',
                    INFLUX_MEASUREMENT + '_1h'), True)
     influx_client.post(
         'query', '''
         CREATE CONTINUOUS QUERY "cq_1w" ON "{0}"
             RESAMPLE FOR 2w
             BEGIN
                 SELECT max("max"), min("min"), sum("sum")/sum("count") as "mean", sum("count") as "count", sum("sum")
                 INTO "{1}" FROM "{2}" GROUP BY "sensor_id", time(1w), *
             END
         '''.format(INFLUX_DATABASE, INFLUX_MEASUREMENT + '_1w',
                    INFLUX_MEASUREMENT + '_1d'), True)
Beispiel #3
0
def add_cqs(apps, schema_editor):
    influx_client.post(
        'query', '''
     CREATE CONTINUOUS QUERY "cq_1h" ON "{0}"
         RESAMPLE EVERY 1h
         BEGIN
             SELECT max("value"), min("value"), mean("value"), count("value"), sum("value")
             INTO "{1}" FROM "{2}" GROUP BY "sensor_id", time(1h), *
         END
     '''.format(INFLUX_DATABASE, INFLUX_MEASUREMENT + '_1h',
                INFLUX_MEASUREMENT), True)
    influx_client.post(
        'query', '''
        CREATE CONTINUOUS QUERY "cq_1d" ON "{0}"
            RESAMPLE FOR 2d
            BEGIN
                SELECT max("max"), min("min"), sum("sum")/sum("count") as "mean", sum("count") as "count", sum("sum")
                INTO "{1}" FROM "{2}" GROUP BY "sensor_id", time(1d), *
            END
        '''.format(INFLUX_DATABASE, INFLUX_MEASUREMENT + '_1d',
                   INFLUX_MEASUREMENT + '_1h'), True)
    influx_client.post(
        'query', '''
        CREATE CONTINUOUS QUERY "cq_1w" ON "{0}"
            RESAMPLE FOR 2w
            BEGIN
                SELECT max("max"), min("min"), sum("sum")/sum("count") as "mean", sum("count") as "count", sum("sum")
                INTO "{1}" FROM "{2}" GROUP BY "sensor_id", time(1w), *
            END
        '''.format(INFLUX_DATABASE, INFLUX_MEASUREMENT + '_1w',
                   INFLUX_MEASUREMENT + '_1d'), True)
 def backwards(self, orm):
     "Write your backwards methods here."
     influx_client.post('query',
                        'DROP CONTINUOUS QUERY "cq_1h" on "{}"'.format(INFLUX_DATABASE), True)
     influx_client.post('query',
                        'DROP CONTINUOUS QUERY "cq_1d" on "{}"'.format(INFLUX_DATABASE), True)
     influx_client.post('query',
                        'DROP CONTINUOUS QUERY "cq_1w" on "{}"'.format(INFLUX_DATABASE), True)
Beispiel #5
0
 def old_post_data(self,
                   site_id,
                   device_id,
                   sensor_id,
                   value,
                   timestamp=None):
     timestamp = InfluxClient.convert_timestamp(timestamp)
     data = '{0},sensor_id={1},site_id={2},device_id={3} value={4}'.format(
         INFLUX_MEASUREMENT, sensor_id, site_id, device_id, value)
     if timestamp:
         data += ' ' + str(timestamp)
     response = influx_client.post('write', data)
     if response.status_code != HTTP_STATUS_SUCCESSFUL_WRITE:
         raise IntegrityError('Error storing data')
     return response
Beispiel #6
0
def remove_cqs(apps, schema_editor):
    influx_client.post(
        'query',
        'DROP CONTINUOUS QUERY "cq_1h" on "{}"'.format(INFLUX_DATABASE), True)
    influx_client.post(
        'query',
        'DROP CONTINUOUS QUERY "cq_1d" on "{}"'.format(INFLUX_DATABASE), True)
    influx_client.post(
        'query',
        'DROP CONTINUOUS QUERY "cq_1w" on "{}"'.format(INFLUX_DATABASE), True)
Beispiel #7
0
 def backwards(self, orm):
     "Write your backwards methods here."
     influx_client.post(
         'query',
         'DROP CONTINUOUS QUERY "cq_1h" on "{}"'.format(INFLUX_DATABASE),
         True)
     influx_client.post(
         'query',
         'DROP CONTINUOUS QUERY "cq_1d" on "{}"'.format(INFLUX_DATABASE),
         True)
     influx_client.post(
         'query',
         'DROP CONTINUOUS QUERY "cq_1w" on "{}"'.format(INFLUX_DATABASE),
         True)
Beispiel #8
0
 def setUpBeforeMigration(self, apps):
     # add the data in the old format (without the metric tag)
     for data in self.scalar_data:
         self.old_post_data(data['sensor'].device.site.id,
                            data['sensor'].device.id, data['sensor'].id,
                            data['value'], data['timestamp'])
     influx_client.post(
         'query', '''
             SELECT max("value"), min("value"), mean("value"), count("value"), sum("value")
             INTO "{0}" FROM "{1}" GROUP BY "sensor_id", time(1h), *
         '''.format(INFLUX_MEASUREMENT + '_1h', INFLUX_MEASUREMENT), True)
     influx_client.post(
         'query', '''
             SELECT max("max"), min("min"), sum("sum")/sum("count") as "mean", sum("count") as "count", sum("sum")
             INTO "{0}" FROM "{1}" GROUP BY "sensor_id", time(1d), *
         '''.format(INFLUX_MEASUREMENT + '_1d', INFLUX_MEASUREMENT + '_1h'),
         True)
     influx_client.post(
         'query', '''
             SELECT max("max"), min("min"), sum("sum")/sum("count") as "mean", sum("count") as "count", sum("sum")
             INTO "{0}" FROM "{1}" GROUP BY "sensor_id", time(1w), *
         '''.format(INFLUX_MEASUREMENT + '_1w', INFLUX_MEASUREMENT + '_1d'),
         True)
def add_convenience_tags(apps, schema_editor):
    sensors = ScalarSensor.objects.all()
    print("\n\nMigrating data for {} sensors".format(len(sensors)))
    stdout.flush()
    for agg in ["", "_1h", "_1d", "_1w"]:
        measurement = influx_client._measurement + agg
        print("Migrating data from {} measurement...".format(measurement))
        stdout.flush()

        sensorsmigrated = 0
        datamigrated = 0
        for sensor in sensors:
            device = sensor.device
            site = device.site

            print(
                "\rMigrating {} of {} sensors (requesting count)                  "
                .format(sensorsmigrated + 1, len(sensors)),
                end='')
            stdout.flush()
            # doesn't really matter which column we use for the aggregates
            countcol = "value" if agg == "" else "mean"
            countdata = influx_client.get(
                "SELECT COUNT({}) FROM {} WHERE sensor_id = '{}' AND metric = ''"
                .format(countcol, measurement, sensor.id), True).json()
            try:
                assert len(countdata["results"]) == 1
                result = countdata["results"][0]
                if result == {}:
                    sensorsmigrated += 1
                    continue
                assert len(result["series"]) == 1
                series = result["series"][0]
                assert len(series["columns"]) == 2
                assert len(series["values"]) == 1
                count = series["values"][0][series["columns"].index("count")]
            except:
                print("\n==============================")
                print(countdata)
                print("================================")
                raise
            # select all this sensor's data that doesn't yet have a metric
            # apparently working through large datasets with LIMIT and OFFSET
            # is slow, so we'll go through time instead
            starttime = 0
            offset = 0
            while True:
                # note this doesn't work if you specify the time as {}ns explicitly
                query = "SELECT * FROM {} WHERE time > {} AND sensor_id = '{}' AND metric = '' LIMIT {}".format(
                    measurement, starttime, sensor.id, CHUNK_LIMIT)

                print(
                    "\rMigrating {} of {} sensors (requesting data {} of {})                  "
                    .format(sensorsmigrated + 1, len(sensors), offset + 1,
                            count),
                    end='')
                stdout.flush()
                db_data = influx_client.get_values(
                    influx_client.get(query, True, epoch="ns"))
                if len(db_data) == 0:
                    break
                print(
                    "\rMigrating {} of {} sensors (processing values for {} of {})             "
                    .format(sensorsmigrated + 1, len(sensors), offset + 1,
                            count),
                    end='')
                stdout.flush()
                if agg == "":
                    values = [d["value"] for d in db_data]
                    print(
                        "\rMigrating {} of {} sensors (processing timestamps for {} of {})             "
                        .format(sensorsmigrated + 1, len(sensors), offset + 1,
                                count),
                        end='')
                    stdout.flush()
                    try:
                        timestamps = [
                            ms_to_dt(d["time"] / 1000000) for d in db_data
                        ]
                    except:
                        print(d["time"])
                        raise
                    # TODO: I think under-the-hood this ends up converting back and forth
                    # between dict-of-arrays and array-of-dicts format, so there's some
                    # opportunity for optimizastion
                    print(
                        "\rMigrating {} of {} sensors (posting data {} of {})                "
                        .format(sensorsmigrated + 1, len(sensors), offset + 1,
                                count),
                        end='')
                    stdout.flush()
                    influx_client.post_data_bulk(site.id, device.id, sensor.id,
                                                 sensor.metric, values,
                                                 timestamps)
                else:
                    querylines = []
                    print(
                        "\rMigrating {} of {} sensors (building query for data {} of {})                "
                        .format(sensorsmigrated + 1, len(sensors), offset + 1,
                                count),
                        end='')
                    stdout.flush()
                    for data in db_data:
                        fieldstrs = []
                        for rollup in ['min', 'max', 'sum', 'mean']:
                            if data[rollup] is not None:
                                fieldstrs.append('{}={}'.format(
                                    rollup, data[rollup]))
                        if data['count'] is not None:
                            fieldstrs.append('count={}i'.format(data['count']))
                        querylines.append(
                            "{},sensor_id={},site_id={},device_id={},metric={} {} {}"
                            .format(
                                measurement, sensor.id, site.id, device.id,
                                sensor.metric, ",".join(fieldstrs),
                                InfluxClient.convert_timestamp(
                                    ms_to_dt(data['time'] / 1000000))))

                    print(
                        "\rMigrating {} of {} sensors (consolidating query for data {} of {})                "
                        .format(sensorsmigrated + 1, len(sensors), offset + 1,
                                count),
                        end='')
                    stdout.flush()
                    query = '\n'.join(querylines)
                    print(
                        "\rMigrating {} of {} sensors (posting data {} of {})                "
                        .format(sensorsmigrated + 1, len(sensors), offset + 1,
                                count),
                        end='')
                    stdout.flush()
                    response = influx_client.post('write', query)
                    if response.status_code != HTTP_STATUS_SUCCESSFUL_WRITE:
                        raise IntegrityError(
                            'Failed Query(status {}):\n{}\nResponse:\n{}'.
                            format(response.status_code, data,
                                   response.json()))
                starttime = db_data[-1]["time"]

                offset += len(db_data)
                datamigrated += len(db_data)

            # we've finished all the data for this sensor, delete the old data
            query = "DELETE FROM {} WHERE sensor_id = '{}' AND metric = ''".format(
                measurement, sensor.id)

            print(
                "\rMigrating {} of {} sensors (deleting old data)                  "
                .format(sensorsmigrated + 1, len(sensors)),
                end='')
            stdout.flush()
            influx_client.post("query", query, True)

            sensorsmigrated += 1
        print("\nMigrated {} data points for measurement {}\n".format(
            datamigrated, measurement))
        stdout.flush()