def serve_historipcal_hvac(bucketsize): t1 = datetime.now(TZ).replace(microsecond=0) t0 = get_today() tstats_df.window = bucketsize res = dofetch([tstats_view, weather_view], [tstats_df, weather_df], t0, t1) zones = defaultdict(lambda: defaultdict(dict)) df = res['tstats'].fillna(method='ffill').fillna(method='bfill') for (tstat, zone, hsp, csp, temp, state) in res.query( 'select tstat, zone, hsp_uuid, csp_uuid, temp_uuid, state_uuid from tstats' ): zone = zone.split('#')[-1] zones[zone]['inside'] = json.loads(df[temp].dropna().to_json()) zones[zone]['heating'] = json.loads(df[hsp].dropna().to_json()) zones[zone]['outside'] = json.loads( res['weather_temp'].max(axis=1).dropna().to_json()) zones[zone]['cooling'] = json.loads(df[csp].dropna().to_json()) zones[zone]['state'] = json.loads( df[state].dropna().apply(state_to_string).to_json()) for k, values in zones[zone].items(): if len(values) == 0: fakedates = pd.date_range(t0, t1, freq=bucketsize.replace('m', 'T')) if k != 'state': fakevals = [0] * len(fakedates) else: fakevals = ['off'] * len(fakedates) zones[zone][k] = json.loads( pd.DataFrame(fakevals, index=fakedates)[0].to_json()) return jsonify(zones)
def energy_summary(last, bucketsize): start_date = get_start(last) if last == 'year' and bucketsize == 'month': ranges = generate_months(get_today().month - 1) readings = [] times = [] for t0, t1 in ranges: meter_df.window = '15m' res = dofetch([meter_view], [meter_df], t1, t0) df = res['meters'].copy() df.columns = ['readings'] df /= 4. # divide by 4 to get 15min (kW) -> kWh times.append(pd.to_datetime(t1.isoformat())) readings.append(df['readings'].sum()) df = pd.DataFrame(readings, index=times, columns=['readings']) # print('\n/api/energy/year/in/month/ df: \n', df) return df.fillna('myNullVal').to_json() meter_df.window = '15m' print('start_date', start_date) res = dofetch([meter_view], [meter_df], start_date, datetime.now(TZ)) df = res['meters'].tz_convert(TZ).copy() df.columns = ['readings'] df['readings'] /= 4. return df.fillna('myNullVal').resample(bucketsize).apply(sum).to_json()
def get_temp_per_zone(bucketsize): t1 = datetime.now(TZ).replace(microsecond=0) t0 = get_today() tstats_df.window = bucketsize res = dofetch([tstats_view, weather_view], [tstats_df, weather_df], t0, t1) zones = defaultdict(lambda: defaultdict(dict)) df = res['tstats'] for (zone, temp) in res.query('select zone, temp_uuid from tstats'): zone = zone.split('#')[-1] zones[zone] = json.loads(df[temp].fillna('myNullVal').to_json()) return jsonify(zones)
def power_summary(last, bucketsize): # first, determine the start date from the 'last' argument start_date = get_start(last) if last == 'year' and bucketsize == 'month': ranges = generate_months(get_today().month - 1) readings = [] times = [] for t0, t1 in ranges: meter_df.window = '{0}d'.format((t0 - t1).days) res = dofetch([meter_view], [meter_df], t1, t0) times.append(t1.tz_convert(TZ).timestamp() * 1000) readings.append(res['meters'].fillna('myNullVal').values[0][0]) return jsonify({'readings': dict(zip(times, readings))}) # otherwise, meter_df.window = bucketsize print('start_date', start_date) res = dofetch([meter_view], [meter_df], start_date, datetime.now(TZ)) res['meters'].columns = ['readings'] return res['meters'].tz_convert(TZ).fillna('myNullVal').to_json()
def price(): res = xsg.get_price(sites[0], get_today(), get_today() + timedelta(days=1)) return res['price'].to_json()