def save(self, **kwargs): """ Custom save method. This function will do several things if it has not done so already: * Create a secret key. 3 digits followed by 4 letters. Not currently in use (could be used for pairing devices) * Register fanout queries in database. * Device name cannot be None, default is "Device <serial>". * Create a :class:`farmer.models.DeviceSettings` object. * Create a :class:`webapp.models.DeviceWebSettings` object. * Give default values to the channels. """ if self.secret_key == None: secret_key = ''.join(random.choice(string.digits) for i in range(3)) secret_key += ''.join(random.choice(string.ascii_uppercase) for i in range(4)) self.secret_key = secret_key if self.fanout_query_registered == False: db = influxdb.InfluxDBClient(settings.INFLUXDB_URI,8086,'root','root','seads') serial = str(self.serial) db.query('select * from device.'+serial+' into device.'+serial+'.[circuit_pk]') db.query('select sum(cost) from device.'+serial+' into cost.device.'+serial+'.[circuit_pk]') db.query('select mean(wattage) from /^device.'+serial+'.*/ group by time(1y) into 1y.:series_name') db.query('select mean(wattage) from /^device.'+serial+'.*/ group by time(1M) into 1M.:series_name') db.query('select mean(wattage) from /^device.'+serial+'.*/ group by time(1w) into 1w.:series_name') db.query('select mean(wattage) from /^device.'+serial+'.*/ group by time(1d) into 1d.:series_name') db.query('select mean(wattage) from /^device.'+serial+'.*/ group by time(1h) into 1h.:series_name') db.query('select mean(wattage) from /^device.'+serial+'.*/ group by time(1m) into 1m.:series_name') db.query('select mean(wattage) from /^device.'+serial+'.*/ group by time(1s) into 1s.:series_name') db.query('select sum(cost) from "device.'+serial+'" into cost.device.'+serial) self.fanout_query_registered = True if self.name == '': self.name = "Device "+str(self.serial) from farmer.models import DeviceSettings device_settings = DeviceSettings.objects.filter(device=self) if not device_settings: DeviceSettings.objects.create(device=self) from webapp.models import DeviceWebSettings, Tier, UtilityCompany, RatePlan, Territory websettings = DeviceWebSettings.objects.filter(device=self) if len(websettings) == 0: DeviceWebSettings.objects.create(device=self) #self.devicewebsettings.utility_companies.add(UtilityCompany.objects.get(pk=1)) #self.devicewebsettings.rate_plans.add(RatePlan.objects.get(pk=4)) #self.devicewebsettings.territories.add(Territory.objects.get(pk=1)) #self.devicewebsettings.current_tier = Tier.objects.get(pk=1) #self.devicewebsettings.save() #if self.channel_1 == None: # self.channel_1 = CircuitType.objects.get(pk=1) #if self.channel_2 == None: # self.channel_2 = CircuitType.objects.get(pk=3) #if self.channel_3 == None: # self.channel_3 = CircuitType.objects.get(pk=4) super(Device, self).save()
def save(self, **kwargs): db = influxdb.InfluxDBClient(settings.INFLUXDB_URI, 8086, 'root', 'root', 'seads') data = [] query = {} query['points'] = [] for point in json.loads(self.dataPoints): query['points'].append([point['timestamp'], point['wattage']]) query['name'] = 'zz_debug' query['columns'] = ['time', 'wattage'] data.append(query) db.write_points(data) super(TestEvent, self).save()
def get_average_usage(user, notification): start = 'now() - 1w' unit = 'h' time_interval = notification.recurrences.occurrences( )[1] - notification.recurrences.occurrences()[0] if time_interval == datetime.timedelta(days=30): start = 'now() - 1M' unit = 'd' elif time_interval == datetime.timedelta(days=1): start = 'now() - 1d' unit = 'm' elif time_interval == datetime.timedelta(days=365): start = 'now() - 1y' unit = 'd' stop = 'now()' db = influxdb.InfluxDBClient(settings.INFLUXDB_URI, 8086, 'root', 'root', 'seads') result = db.query('list series')[0] averages = {} for device in Device.objects.filter(owner=user): appliances = Set() for series in result['points']: rg = re.compile('device.' + str(device.serial)) if re.match(rg, series[1]): appliance = series[1].split('device.' + str(device.serial) + '.') if (len(appliance) < 2): continue else: appliances.add(appliance[-1]) average_wattage = 0 hungriest_appliance = [None, 0] for appliance in appliances: try: wattage = db.query('select * from 1'+unit+'.device.'+str(device.serial)+'.'+appliance +\ ' where time > '+start+' and time < '+stop)[0]['points'][0][2] average_wattage += wattage if wattage > hungriest_appliance[1]: hungriest_appliance = [appliance, int(wattage)] except: pass averages[str( device.serial)] = [int(average_wattage), hungriest_appliance] return averages
def delete(self, *args, **kwargs): """ Custom delete method. Drop the series from the influxdb database. """ db = influxdb.InfluxDBClient(settings.INFLUXDB_URI,8086,'root','root','seads') serial = str(self.serial) series = db.query('list series')[0]['points'] # delete series for s in series: if 'device.'+serial in s[1]: db.query('drop series '+s[1]) # delete continuous queries queries = db.query('list continuous queries')[0]['points'] for q in queries: if 'device.'+serial in q[2]: db.query('drop continuous query '+str(q[1])) super(Device, self).delete()
def handle(self, *args, **options): for device in Device.objects.all(): if (args[0] == 'daily'): device.kilowatt_hours_daily = 0 device.cost_daily = 0 else: db = influxdb.InfluxDBClient(settings.INFLUXDB_URI, 8086, "root", "root", "seads") tier_dict = {} tier_dict['name'] = "tier.device." + str(device.serial) tier_dict['columns'] = ['tier_level'] tier_dict['points'] = {"tier_level": 1} db.write_points([tier_dict]) device.kilowatt_hours_monthly = 0 rate_plan = device.devicewebsettings.rate_plans.all()[0] tiers = Tier.objects.filter(rate_plan=rate_plan) device.devicewebsettings.current_tier = tiers.get(tier_level=1) device.devicewebsettings.save() device.save() with open('/home/ubuntu/reset.log', 'a') as f: f.write('Reset ' + device.__unicode__()) f.write('\n')
def handle(self, *args, **options): part_size = 8388608 print 'Contacting Amazon AWS...' glacier = boto3.client('glacier') multipart_upload = glacier.initiate_multipart_upload( vaultName=settings.GLACIER_VAULT_NAME, partSize=str(part_size)) print 'Connected to Glacier Vault "' + settings.GLACIER_VAULT_NAME + '"' upload_id = multipart_upload['uploadId'] treehash_archive = TreeHash() db = influxdb.InfluxDBClient(settings.INFLUXDB_URI, 8086, 'root', 'root', 'seads') archive_size = 0 for device in Device.objects.all(): start = datetime.fromtimestamp(0) end = datetime.now() - timedelta(days=31 * device.data_retention_policy) start = (datetime.now() - start).total_seconds() start = 0 end = int((datetime.now() - end).total_seconds()) end = time.time() - end print 'Trying ' + str(device) + '...' print 'Data Retention Policy: ' + str( device.data_retention_policy) + ' Months' series = 'device.' + str(device.serial) try: query = 'select * from ' + series + ' where time > ' + str( start) + 's and time < ' + str(end) + 's' points = db.query(query) except: print 'No data found for ' + series + '. Skipping.' continue print "Uploading " + series + "..." print "Querying from " + str(datetime.fromtimestamp( int(start))) + " to " + str(datetime.fromtimestamp(int(end))) # store points in temporary file, break into 8MB parts with open('/tmp/temp_archive', 'wb') as f: f.write(json.dumps(points)) bytes_read = 0 bytes_sent = 0 with open('/tmp/temp_archive', 'rb') as f: treehash_part = TreeHash() part = f.read(part_size) treehash_part.update(part) bytes_read += len(part) while part: response = glacier.upload_multipart_part( vaultName=settings.GLACIER_VAULT_NAME, uploadId=upload_id, range='bytes ' + str(bytes_sent) + '-' + str(bytes_read - 1) + '/*', body=part, checksum=treehash_part.hexdigest()) bytes_sent += len(part) part = f.read(part_size) treehash_part.update(part) bytes_read += len(part) archive_size += 1 print "Successfully uploaded " + str( bytes_sent) + " bytes to Glacier" print "Deleting points from database..." # drop from fanout series as well series = db.query('list series')[0]['points'] rg = re.compile('device.' + str(device.serial)) for s in series: if rg.search(s[1]): db.query('delete from ' + s[1] + ' where time > ' + str(start) + 's and time < ' + str(end) + 's') print "[DONE]" try: with open('/tmp/temp_archive', 'rb') as f: treehash_archive.update(f.read()) response = glacier.complete_multipart_upload( vaultName=settings.GLACIER_VAULT_NAME, uploadId=upload_id, archiveSize=str(archive_size), checksum=treehash_archive.hexdigest()) with open(settings.STATIC_PATH + 'archive_ids.log', 'a') as f: line = { 'archiveId': response['archiveId'], 'timeEnd': str(end) } f.write(json.dumps(line)) f.write(';') os.remove('/tmp/temp_archive') print "Archival Successful" except: print "No data to archive. Exiting."
def save(self, **kwargs): """ Custom save method. This method is the powerhouse of the API. It can take an array of data points from a device and convert them into database entries in InfluxDB. The method will also keep a running count of how many kwh have been consumed this day and this month. If it exceeds the allotted kwh for the device's tier, advance the tier a level. If the data coming in is sufficiently in the past such that the database will not calculate its mean value, refresh the query to trigger a backfill of the data. When a model is being saved, it has already been created by :class:`microdata.views.EventViewSet`. The Event is parsed as follows:: start = self.start frequency = self.frequency count = 0 for point in dataPoints: time = start + count * (1/frequency) db.write_points(time, wattage) """ dataPoints = json.loads(self.dataPoints) db = influxdb.InfluxDBClient(settings.INFLUXDB_URI,8086,'root','root','seads') self.dataPoints = dataPoints count = 0 now = time.time() timestamp = now*1000 query = {} query['points'] = [] for point in dataPoints: wattage = point.get('wattage') if wattage == 0xFFFF: continue # Issue where a device sends overflow. Ignore for now. current = point.get('current') voltage = point.get('voltage') appliance_pk = point.get('appliance_pk') event_code = point.get('event_code') channel = point.get('channel', 1) circuit_pk = 7 if self.device.channel_1 and self.device.channel_2 and self.device.channel_3: circuit_pk = self.device.channel_1.pk or 7 if channel == 2: circuit_pk = self.device.channel_2.pk or 7 elif channel == 3: circuit_pk = self.device.channel_3.pk or 7 # timestamp is millisecond resolution always timestamp = self.start + ((1.0/self.frequency)*count*1000) count += 1 kwh = 0.0 kwh = (wattage/1000.0)*(1.0/self.frequency)*(1/3600.0) self.device.kilowatt_hours_monthly += kwh self.device.kilowatt_hours_daily += kwh self.device.save() tier_dict = {} tier_dict['name'] = "tier.device."+str(self.device.serial) tier_dict['columns'] = ['tier_level'] tier_dict['points'] = [] # Calculate percent of baseline to get tier level # Start by determining current time of year # Only do this if models exist try: this_year = datetime.now().year this_month = datetime.now().month days_this_month = monthrange(this_year,this_month)[1] summer_start = datetime(year=this_year,month=self.device.devicewebsettings.territories.all()[0].summer_start,day=1) winter_start = datetime(year=this_year,month=self.device.devicewebsettings.territories.all()[0].winter_start,day=1) current_season = 'summer' if (summer_start <= datetime.now() < winter_start) == False: current_season = 'winter' # check if we need to upgrade a tier. If at max tier, do nothing. if (self.device.devicewebsettings.current_tier.max_percentage_of_baseline != None): max_kwh_for_tier = (self.device.devicewebsettings.current_tier.max_percentage_of_baseline/100.0)*self.device.devicewebsettings.territories.all()[0].summer_rate*days_this_month if current_season == 'winter': max_kwh_for_tier = (self.device.devicewebsettings.current_tier.max_percentage_of_baseline/100.0)*self.device.devicewebsettings.territories.all()[0].winter_rate*days_this_month if (self.device.kilowatt_hours_monthly > max_kwh_for_tier): current_tier = self.device.devicewebsettings.current_tier self.device.devicewebsettings.current_tier = Tier.objects.get(tier_level=(current_tier.tier_level + 1)) self.device.devicewebsettings.save() tier_dict['points'].append([current_tier.tier_level + 1]) db.write_points([tier_dict]) cost = self.device.devicewebsettings.current_tier.rate * kwh self.device.cost_daily += cost if (timestamp and (wattage or current or voltage)): query['points'].append([timestamp, wattage, current, voltage, circuit_pk, cost]) except: pass data = [] query['name'] = 'device.'+str(self.device.serial) query['columns'] = ['time', 'wattage', 'current', 'voltage', 'circuit_pk', 'cost'] data.append(query) self.query += str(data) db.write_points(data, time_precision="ms") # If data is older than the present, must backfill fanout queries by reloading the continuous query. # Could be fixed with an influxdb update # https://github.com/influxdb/influxdb/issues/510 now = time.time() last_timestamp = (self.start + ((1.0/self.frequency)*count*1000))/1000 existing_queries = db.query('list continuous queries')[0]['points'] new_queries = [] #if last_timestamp < now - 1: # new_queries.append('select mean(wattage) from /^device.'+str(self.device.serial)+'.*/ group by time(1s) into 1s.:series_name') if last_timestamp < now - 60: new_queries.append('select mean(wattage) from /^device.'+str(self.device.serial)+'.*/ group by time(1m) into 1m.:series_name') if last_timestamp < now - 3600: new_queries.append('select mean(wattage) from /^device.'+str(self.device.serial)+'.*/ group by time(1h) into 1h.:series_name') if last_timestamp < now - 86400: new_queries.append('select mean(wattage) from /^device.'+str(self.device.serial)+'.*/ group by time(1d) into 1d.:series_name') if last_timestamp < now - 86400*7: new_queries.append('select mean(wattage) from /^device.'+str(self.device.serial)+'.*/ group by time(1w) into 1w.:series_name') if last_timestamp < now - 86400*days_this_month: new_queries.append('select mean(wattage) from /^device.'+str(self.device.serial)+'.*/ group by time(1M) into 1M.:series_name') if last_timestamp < now - 86400*days_this_month*12: new_queries.append('select mean(wattage) from /^device.'+str(self.device.serial)+'.*/ group by time(1y) into 1y.:series_name') # drop old continuous query, add new one. Essentially a refresh. for new_query in new_queries: for existing_query in existing_queries: if new_query == existing_query[2]: db.query('drop continuous query '+str(existing_query[1])) db.query(new_query) # Check to see if the tier series has been initialized. This should only need to happen once ever. try: db.query('select * from tier.device.'+str(self.device.serial)) except: try: tier_dict = {} tier_dict['name'] = "tier.device."+str(self.device.serial) tier_dict['columns'] = ["tier_level"] tier_dict['points'] = [[str(self.device.devicewebsettings.current_tier.tier_level)]] db.write_points([tier_dict]) except: pass
user_name = "test" password = "******" from jnpr.junos import Device from jnpr.junos.op.ethport import EthPortTable from time import sleep from influxdb.influxdb08 import client device = Device(host=device_ip, port=22, user=user_name, passwd=password) device.open() switch_name = device.facts['fqdn'] print 'Connected to', switch_name, '(', device.facts[ 'model'], 'running', device.facts['version'], ')' ports_table = EthPortTable(device) db = client.InfluxDBClient('localhost', 8086, 'root', 'root', 'network') print 'Connected to InfluxDB' print 'Collecting metrics...' columns = ['rx_packets', 'rx_bytes', 'tx_packets', 'tx_bytes'] while True: ports = ports_table.get() for port in ports: point = { 'name': switch_name + '.' + port['name'], 'columns': columns, 'points': [[ int(port['rx_packets']), int(port['rx_bytes']),
def main(): db = client.InfluxDBClient('localhost', INFLUX_DB_PORT, 'root', 'root', 'network') print 'Connected to InfluxDB' server(db)
def generate_points(start, stop, resolution, energy_use, device, channels): """ Function to generate random points of data. The goal of this function was to generate data that could maybe pass as being semi-realistic. To do this, each circuit type has its own profile with an average, minumum, maximum, and cutoff wattage. These values are added/subtracted by a random number in a range proportional to the maximum wattage for the circuit. This gives a series that appears to be changing slowly over time. This function works in much the same way as the save() function for an :class:`microdata.models.Event`. It keeps track of the cumulative KWh consumed and will advance the tier level if the threshold is passed. """ multiplier = 1 if energy_use == 2: multiplier = 2 if energy_use == 3: multiplier = .3 wattages = { 'Bedroom': { 'avg': 200, 'cutoff': 50, 'max': 300, 'min': 0, 'pk': CircuitType.objects.get(name='Bedroom').pk }, 'Kitchen': { 'avg': 1000, 'cutoff': 500, 'max': 2000, 'min': 0, 'pk': CircuitType.objects.get(name='Kitchen').pk }, 'Living Room': { 'avg': 400, 'cutoff': 50, 'max': 1000, 'min': 0, 'pk': CircuitType.objects.get(name='Living Room').pk }, } """ wattages = { 'Unknown':{ 'avg':800, 'cutoff':0, 'max':1000, 'min':600, 'channel': 0 }, 'Computer':{ 'avg':200, 'cutoff':50, 'max':350, 'min':0, 'channel': 1 }, 'Toaster':{ 'avg':20, 'cutoff':0, 'max':60, 'min':0, 'channel': 2 }, 'Refrigerator':{ 'avg':400, 'cutoff':100, 'max':600, 'min':0, 'channel': 2 }, 'Television':{ 'avg':100, 'cutoff':50, 'max':200, 'min':0, 'channel': 3 }, 'Oven':{ 'avg':700, 'cutoff':600, 'max':1000, 'min':0, 'channel': 2 }, 'Heater':{ 'avg':8000, 'cutoff':600, 'max':10000, 'min':0, 'channel': 1 }, } """ for appliance in wattages: for value in appliance: value *= multiplier db = influxdb.InfluxDBClient(settings.INFLUXDB_URI, 8086, "root", "root", "seads") count = 0 data = [] data_dict = {} data_dict['name'] = "device." + str(device.serial) data_dict['columns'] = ['time', 'wattage', 'circuit_pk', 'cost'] data_dict['points'] = [] tier_dict = {} tier_dict['name'] = "tier.device." + str(device.serial) tier_dict['columns'] = ['time', 'tier_level'] tier_dict['points'] = [] kilowatt_hours_monthly = device.kilowatt_hours_monthly kilowatt_hours_daily = device.kilowatt_hours_daily max_percentage_of_baseline = device.devicewebsettings.current_tier.max_percentage_of_baseline current_tier = device.devicewebsettings.current_tier this_year = datetime.now().year summer_start = datetime( year=this_year, month=device.devicewebsettings.territories.all()[0].summer_start, day=1) winter_start = datetime( year=this_year, month=device.devicewebsettings.territories.all()[0].winter_start, day=1) summer_rate = device.devicewebsettings.territories.all()[0].summer_rate winter_rate = device.devicewebsettings.territories.all()[0].winter_rate current_season = 'summer' if (summer_start <= datetime.now() < winter_start) == False: current_season = 'winter' tier_dict['points'] = [[ start, device.devicewebsettings.current_tier.tier_level ]] db.write_points([tier_dict]) for i in numpy.arange(start, stop, resolution): kwh = 0.0 point_list = [i] for channel in channels: wattage = wattages[channel.name]['avg'] + random.uniform( -wattages[channel.name]['max'] * 0.1, wattages[channel.name]['max'] * 0.1) wattage_to_append = 0 if wattage > wattages[channel.name]['max']: wattage_to_append = wattages[channel.name]['max'] elif wattage < wattages[channel.name]['cutoff']: wattage_to_append = 0 elif wattage < wattages[channel.name]['min']: wattage_to_append = wattages[channel.name]['min'] else: wattages[channel.name]['avg'] = wattage wattage_to_append = wattage kwh = (wattage_to_append / 1000.0) * (resolution) * (1 / 3600.0) kilowatt_hours_monthly += kwh kilowatt_hours_daily += kwh if (max_percentage_of_baseline != None): max_kwh_for_tier = (max_percentage_of_baseline / 100.0) * summer_rate * 31.0 if current_season == 'winter': max_kwh_for_tier = (current_tier.max_percentage_of_baseline / 100.0) * winter_rate * 31.0 if (kilowatt_hours_monthly > max_kwh_for_tier): current_tier = device.devicewebsettings.current_tier next_tier = device.devicewebsettings.rate_plans.all( )[0].tier_set.all().filter( tier_level=current_tier.tier_level + 1) if next_tier: device.devicewebsettings.current_tier = next_tier[0] device.devicewebsettings.save() device.save() tier_dict['points'] = [[ i, device.devicewebsettings.current_tier.tier_level ]] db.write_points([tier_dict]) cost = current_tier.rate * kwh channel_pk = wattages[channel.name]['pk'] point_list = [i, wattage_to_append, channel_pk, cost] count += 1 data_dict['points'].append(point_list) if count % 100000 == 0: data.append(data_dict) db.write_points(data) data = [] data_dict['points'] = [] data.append(data_dict) if (db.write_points(data)): queries = db.query('list continuous queries')[0]['points'] # drop old queries serial = str(device.serial) for q in queries: if 'device.' + serial in q[2]: db.query('drop continuous query ' + str(q[1])) # add new queries db.query('select * from device.' + serial + ' into device.' + serial + '.[circuit_pk]') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1y) into 1y.:series_name') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1M) into 1M.:series_name') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1w) into 1w.:series_name') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1d) into 1d.:series_name') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1h) into 1h.:series_name') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1m) into 1m.:series_name') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1s) into 1s.:series_name') db.query('select sum(cost) from "device.' + serial + '" into cost.device.' + serial) success = "Added {0} points successfully".format(count) device.kilowatt_hours_monthly = kilowatt_hours_monthly device.kilowatt_hours_daily = kilowatt_hours_daily device.save() return success
def influxdel(request): success = "" if request.method == 'POST': form = DatadelForm(request.POST) count = 0 if form.is_valid(): device = form.cleaned_data['device'] serial = str(device.serial) refresh_queries = form.cleaned_data['refresh_queries'] db = influxdb.InfluxDBClient(settings.INFLUXDB_URI, 8086, "root", "root", "seads") if refresh_queries is False: device.kilowatt_hours_monthly = 0 device.kilowatt_hours_daily = 0 device.save() rate_plan = device.devicewebsettings.rate_plans.all()[0] tiers = Tier.objects.filter(rate_plan=rate_plan) for tier in tiers: if tier.tier_level == 1: device.devicewebsettings.current_tier = tier device.devicewebsettings.save() tier_dict = {} tier_dict['name'] = "tier.device." + str(device.serial) tier_dict['columns'] = ['tier_level'] tier_dict['points'] = [[1]] db.write_points([tier_dict]) series = db.query('list series')[0]['points'] rg = re.compile('device.' + serial) for s in series: if rg.search(s[1]): db.query('drop series "' + s[1] + '"') events = Event.objects.filter(device=device) events.delete() queries = db.query('list continuous queries')[0]['points'] # drop old queries for q in queries: if 'device.' + serial in q[2]: db.query('drop continuous query ' + str(q[1])) else: queries = db.query('list continuous queries')[0]['points'] # drop old queries for q in queries: if 'device.' + serial in q[2]: db.query('drop continuous query ' + str(q[1])) # add new queries db.query('select * from device.' + serial + ' into device.' + serial + '.[circuit_pk]') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1y) into 1y.:series_name') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1M) into 1M.:series_name') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1w) into 1w.:series_name') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1d) into 1d.:series_name') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1h) into 1h.:series_name') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1m) into 1m.:series_name') db.query('select mean(wattage) from /^device.' + serial + '.*/ group by time(1s) into 1s.:series_name') db.query('select sum(cost) from "device.' + serial + '" into cost.device.' + serial) else: form = DatadelForm() title = "Debug - Data Deletion" description = "Use this form to delete data for the device chosen." return render( request, 'debug.html', { 'title': title, 'description': description, 'form': form, 'success': success })
def render_chart(user, notification): date_today = datetime.datetime.today() date_gmtime = gmtime() randbits = str(random.getrandbits(128)) start = 'now() - 1w' unit = 'h' if notification.keyword == 'monthly': start = 'now() - 1M' unit = 'd' elif notification.keyword == 'daily': start = 'now() - 1d' unit = 'm' stop = 'now()' db = influxdb.InfluxDBClient(settings.INFLUXDB_URI, 8086, 'root', 'root', 'seads') fig = plt.figure(figsize=(10, 5), dpi=100) # 1000px * 500px figure plt.ylabel('Watts') for device in Device.objects.filter(owner=user): points = {} result = db.query('list series')[0] appliances = Set() for series in result['points']: rg = re.compile('device.' + str(device.serial)) if re.match(rg, series[1]): appliance = series[1].split('device.' + str(device.serial) + '.') if (len(appliance) < 2): continue else: appliances.add(appliance[-1]) for appliance in appliances: query = 'select * from 1' + unit + '.device.' + str( device.serial ) + '.' + appliance + ' where time > ' + start + ' and time < ' + stop try: group = db.query(query) except: continue if (len(group)): group = group[0]['points'] for s in group: if s[0] in points: points[s[0]] += s[2] else: points[s[0]] = s[2] y = [] for key, value in points.iteritems(): y.append(value) x = 0 if notification.keyword == 'monthly': x = np.array([ date_today - datetime.timedelta(days=i) for i in range(len(y)) ]) elif notification.keyword == 'weekly': x = np.array([ date_today - datetime.timedelta(hours=i) for i in range(len(y)) ]) elif notification.keyword == 'daily': x = np.array([ date_today - datetime.timedelta(minutes=i) for i in range(len(y)) ]) if (len(y) > 0): plt.plot(x, y, label=device) plt.legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.) filepath = settings.STATIC_PATH + '/webapp/img/' filename = notification.keyword + '_' + str( user.pk) + '_' + randbits + '_plot.png' plt.savefig(filepath + filename, bbox_inches="tight") s3 = boto3.resource('s3') data = open(filepath + filename, 'rb') bucket = s3.Bucket(settings.S3_BUCKET) expires = datetime.datetime.today() + datetime.timedelta(days=90) bucket.put_object(Key='email/' + filename, Body=data, ACL='public-read', Expires=str(expires)) resource_url = 'https://' + settings.S3_BUCKET + '.s3.amazonaws.com/email/' + filename os.remove(filepath + filename) return [resource_url, strftime("%a, %d %b %Y %H:%M:%S +0000", date_gmtime)]