def getDashboard_info(self, start, end, device_type="Solar", interval="900", location_list=None): dash = Dashboard() #current day print('current', start, end) dpm_value, tree_save, irr_value, co2, X_dpm, Y_dpm, Y_irr,Y_tree,Y_perform,Y_expected = self.getSummary_data(start, end, device_type="Solar", interval="900", location_list=location_list) # last Hour end_time = DateUtil.getlowest_min_date(min=60) start_time = end_time - timedelta(hours=1) print('last_hour', start_time, end_time) lh_dpm_value, lh_tree_save, lh_irr_value, lh_co2, lh_X_dpm, lh_Y_dpm, lh_Y_irr,lh_Y_tree,lh_Y_perform,lh_Y_expected = self.getSummary_data(start_time, end_time, device_type="Solar", interval="900", location_list=location_list) #print(lh_dpm_value, lh_tree_save, lh_irr_value, lh_co2) #print(Y_dpm) #print(Y_irr) dash.setE_Generation_summary(last_hour=lh_dpm_value,current=dpm_value) dash.setE_Generation_SummaryGraph(lh_X_dpm, lh_Y_dpm, x_uom="", y_uom="Wh") dash.setE_Generation_Graph(X_dpm,Y_dpm, x_uom="", y_uom="Wh") dash.setIrr_summary(last_hour=lh_irr_value,current=irr_value,uom='W/m2') dash.setIrr_SummaryGraph(lh_X_dpm, lh_Y_irr, x_uom="", y_uom="W/m2") dash.setIrr_Graph(X_dpm,Y_irr, x_uom="", y_uom="W/m2") dash.setTree_save_summary(last_hour=lh_tree_save, current=tree_save, co2=co2) dash.setTree_save_SummaryGraph(lh_X_dpm,lh_Y_tree) dash.setExpected_Graph(X_dpm,Y_expected,y_uom="kWh") dash.setPR_Graph(X_dpm,Y_perform,y_uom='%') #print(X_dpm) return dash
def get_batch_job_time(self,job_name): data = config.energy_iot_DB_RW[TABLE.IOT_CRON_JOB_TABLE].find_one({'job_name':job_name}) if data is not None: start_date = data['last_run'] else: start_date = DateUtil.get_current_day(add_hr=-48) return start_date
def Mapview(): error = APP_ERRORS.NO_ERROR err_desc = APP_ERRORS.DESC[APP_ERRORS.NO_ERROR] result = Dashboard() try: start_time = request.args.get('start_time') end_time = request.args.get('end_time') ty = request.args.get('type') values = request.args.get('values') device_type = request.args.get('device_type') loc_list = None if device_type is None: device_type = "Solar" if start_time is None: start_time = DateUtil.get_current_day() if end_time is None: end_time = start_time + timedelta(days=int(1)) print(start_time, end_time) interval = "900" # Each 900 sec (15 min) try: error, map_data = agg_Services.get15MinSummaryData_for_map( start_time, end_time) print("$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$", map_data) if not map_data: map_data = [] except Exception as err: error = APP_ERRORS.UNKNOWN err_desc = str(err) app.logger.error("mapview 1 >> error " + str(err)) except Exception as err: app.logger.error("mapview 2 >> error " + str(err)) return render_template('Map.html', menu="menu_map", map_data=map_data)
def inv_energy__gen(): print('inv_energy__gen') # Getting the form values filter_date = None filter_location_list = None filter_type = 'DAY' filter_type_nev = None try: filter_date = request.form['filter_date'] filter_location_list = request.form.getlist('filter_location_list[]') filter_type = request.form['filter_type'] filter_type_nev = request.form['filter_type_nev'] except Exception as err: print(str(err)) req_param = { 'filter_date': filter_date, 'filter_location_list': filter_location_list } # Changing to the time format try: start_time = datetime.strptime(filter_date, '%d/%m/%Y') end_time = start_time + timedelta(days=int(1)) except Exception as err: print(str(err)) start_time = None end_time = None if start_time is None and start_time != "": start_time = DateUtil.get_current_day() if end_time is None and end_time != "": end_time = start_time + timedelta(days=int(1)) # Change the table and time condition based on the query curr_day = DateUtil.get_current_day() if filter_type_nev == "NEXT": curr_day = session['last_end_time'] + timedelta(days=int(1)) start_time = curr_day end_time = curr_day + timedelta(days=int(1)) elif filter_type_nev == "PREV": curr_day = session['last_start_date'] - timedelta(days=int(1)) start_time = curr_day end_time = curr_day + timedelta(days=int(1)) time_axis_format = '%d-%b(%H:%M)' _table = TABLE.IOT_AGG_1_HOUR_SUMMARY_TABLE if filter_type == "WEEK": start_time, end_time = DateUtil.week_range(curr_day) _table = TABLE.IOT_AGG_1_HOUR_SUMMARY_TABLE time_axis_format = '%d-%b(%H:%M)' elif filter_type == "MONTH": start_time, end_time = DateUtil.month_range(curr_day) _table = TABLE.IOT_AGG_1_HOUR_SUMMARY_TABLE time_axis_format = '%d-%b-%Y' elif filter_type == "YEAR": start_time, end_time = DateUtil.year_range(curr_day) _table = TABLE.IOT_AGG_1_HOUR_SUMMARY_TABLE time_axis_format = '%d-%b(%H:%M)' session['last_start_date'] = start_time session['last_end_time'] = end_time _condition = { "sender_timestamp": { "$gte": start_time, "$lte": end_time }, 'sensor_type': "INVERTER", "irr_value": { "$exists": 'true' } } if filter_location_list and "ALL" not in filter_location_list: _condition.__setitem__('resource_path', {'$in': filter_location_list}) _projection = { "sender_timestamp": 1, 'resource_path': 1, 'sensor_id': 1, 'AGGREGATE_VALUE': 1, 'performance_ratio': 1, 'expected_value': 1 } print("search criteria ", _condition) print("Projection ", _projection) result = iot_dao.getrecord_from_table( _table, _condition, _projection, _sortby='resource_path').reset_index() try: result['AGGREGATE_VALUE'] = result['AGGREGATE_VALUE'].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) result['AGGREGATE_VALUE'] = round(result['AGGREGATE_VALUE'], 2) result['performance_ratio'] = result['performance_ratio'].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) result['performance_ratio'] = round(result['performance_ratio'], 2) except Exception as err: print(str(err)) agg = {"AGGREGATE_VALUE": 'sum', "performance_ratio": 'mean'} location_detail = uti_service.get_location_details() data_lst = [] graph_set = [] common_x = [] max_y = 10 if not result.empty: #result_4_grp = result[result['AGGREGATE_VALUE'] > 0] result_4_grp = result.groupby(['sender_timestamp', 'sensor_id']).agg(agg).reset_index() result_4_grp = result_4_grp.sort_values(['sender_timestamp'], ascending=[True]) sensor_id_list = [] summary = [] for sensor_id in result_4_grp['sensor_id'].unique(): sensor_id_list.append(sensor_id) x = [] y = [] result_4_grp_tmp = result_4_grp[result_4_grp['sensor_id'] == sensor_id] print("sensor_id ", sensor_id) summary_info = {} max = 0 sum = 0 min = 1000 for index, row in result_4_grp_tmp.iterrows(): print(index) x.append(row['sender_timestamp'].strftime('%d-%m-%Y %H:%M')) y.append(round(row['AGGREGATE_VALUE'], 2)) sum = sum + round(row['AGGREGATE_VALUE'], 2) if max < round(row['AGGREGATE_VALUE'], 2): max = round(row['AGGREGATE_VALUE'], 2) if row['AGGREGATE_VALUE'] != 0 and min > round( row['AGGREGATE_VALUE'], 2): min = row['AGGREGATE_VALUE'] if max_y < row['AGGREGATE_VALUE']: max_y = round(row['AGGREGATE_VALUE']) summary_info.__setitem__('sensor_id', sensor_id) summary_info.__setitem__('max_energy', round(max, 2)) if min == 1000: min = 0 summary_info.__setitem__('min_energy', round(min, 2)) summary_info.__setitem__('energy_gen', round(sum, 2)) summary.append(summary_info) if len(common_x) < len(x): common_x = x gp = {'X': x, 'Y': y, 'label': sensor_id, 'type': 'line'} print(gp) graph_set.append(gp) asert_info_tmp = iot_dao.getJSON_LISTrecord_from_table( TABLE.IOT_ASSET_MSTR, {"sensor_id": { '$in': sensor_id_list }}, { "asset_name": 1, "resource_path": 1, "asset_capacity": 1, "asset_location": 1, "asset_desc": 1, "sensor_id": 1, "sensor_type": 1, "model": 1, "module_efficiency": 1, "serial_no": 1, "total_panel_area": 1, "vendor_detail": 1 }, None) asert_info = [] for row in asert_info_tmp: rw = {} rw.__setitem__('Device Id', row['sensor_id']) rw.__setitem__('Location', row['resource_path']) rw.__setitem__('model', row['model']) rw.__setitem__('Device Name', row['asset_name']) rw.__setitem__('Device Desc', row['asset_desc']) rw.__setitem__('Total PV Area (SQM)', row['total_panel_area']) rw.__setitem__('Capacity (kWp)', row['asset_capacity']) asert_info.append(rw) for index, row in result.iterrows(): rw = {} rw.__setitem__('SENDER_TIMESTAMP', row['sender_timestamp'].strftime('%d-%m-%Y %H:%M')) rw.__setitem__('Location', row['resource_path']) rw.__setitem__( 'Location Code', location_detail[row['resource_path']]['location_code']) rw.__setitem__('sensor_id', row['sensor_id']) rw.__setitem__('Energy (kWh)', row['AGGREGATE_VALUE']) rw.__setitem__('Performance Ratio (%)', row['performance_ratio']) data_lst.append(rw) loc_list = [] for index, val in location_detail.items(): val['selected'] = 'F' if filter_location_list: if (val['resource_path'] in filter_location_list): val['selected'] = 'Y' loc_list.append(val) print(max_y) return render_template('sub_pages/common_template.html', pagename="inv_energy_gen_summary", title="Inverter Energy Generation", menu="menu_pr", data=data_lst, location_detail=loc_list, req_param=req_param, filter_type=filter_type, graph_set=graph_set, X_axies=common_x, max_y=max_y, info=asert_info, summary=summary)
def inverter_pr(): print('inverter_pr') # Getting the form values filter_date = None filter_location_list = None filter_type = 'DAY' filter_type_nev = None try: filter_date = request.form['filter_date'] filter_location_list = request.form.getlist('filter_location_list[]') filter_type = request.form['filter_type'] filter_type_nev = request.form['filter_type_nev'] except Exception as err: print(str(err)) req_param = { 'filter_date': filter_date, 'filter_location_list': filter_location_list } # Changing to the time format try: start_time = datetime.strptime(filter_date, '%d/%m/%Y') end_time = start_time + timedelta(days=int(1)) except Exception as err: print(str(err)) start_time = None end_time = None if start_time is None and start_time != "": start_time = DateUtil.get_current_day() if end_time is None and end_time != "": end_time = start_time + timedelta(days=int(1)) # Change the table and time condition based on the query curr_day = DateUtil.get_current_day() if filter_type_nev == "NEXT": curr_day = session['last_end_time'] + timedelta(days=int(1)) start_time = curr_day end_time = curr_day + timedelta(days=int(1)) elif filter_type_nev == "PREV": curr_day = session['last_start_date'] - timedelta(days=int(1)) start_time = curr_day end_time = curr_day + timedelta(days=int(1)) time_axis_format = '%d-%b(%H:%M)' _table = TABLE.IOT_AGG_1_HOUR_SUMMARY_TABLE if filter_type == "WEEK": start_time, end_time = DateUtil.week_range(curr_day) _table = TABLE.IOT_AGG_1_HOUR_SUMMARY_TABLE time_axis_format = '%d-%b(%H:%M)' elif filter_type == "MONTH": start_time, end_time = DateUtil.month_range(curr_day) _table = TABLE.IOT_AGG_1_HOUR_SUMMARY_TABLE time_axis_format = '%d-%b-%Y' elif filter_type == "YEAR": start_time, end_time = DateUtil.year_range(curr_day) _table = TABLE.IOT_AGG_1_HOUR_SUMMARY_TABLE time_axis_format = '%d-%b(%H:%M)' session['last_start_date'] = start_time session['last_end_time'] = end_time _condition = { "sender_timestamp": { "$gte": start_time, "$lte": end_time }, 'sensor_type': "INVERTER", "irr_value": { "$exists": 'true' } } if filter_location_list and "ALL" not in filter_location_list: _condition.__setitem__('resource_path', {'$in': filter_location_list}) _projection = { "sender_timestamp": 1, 'resource_path': 1, 'sensor_id': 1, 'AGGREGATE_VALUE': 1, 'performance_ratio': 1, 'expected_value': 1 } print("search criteria ", _condition) print("Projection ", _projection) result = iot_dao.getrecord_from_table( _table, _condition, _projection, _sortby='resource_path').reset_index() try: result['AGGREGATE_VALUE'] = result['AGGREGATE_VALUE'].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) result['AGGREGATE_VALUE'] = round(result['AGGREGATE_VALUE'], 2) result['performance_ratio'] = result['performance_ratio'].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) result['performance_ratio'] = round(result['performance_ratio'], 2) except Exception as err: print(str(err)) agg = {"AGGREGATE_VALUE": 'sum', "performance_ratio": 'mean'} location_detail = uti_service.get_location_details() data_lst = [] x = [] y1 = [] y2 = [] if not result.empty: result_4_grp = result[result['AGGREGATE_VALUE'] > 0] result_4_grp = result_4_grp.groupby(['sensor_id' ]).agg(agg).reset_index() result_4_grp = result_4_grp.sort_values(['AGGREGATE_VALUE'], ascending=[True]) for index, row in result_4_grp.iterrows(): x.append(row['sensor_id']) if row['performance_ratio'] > 100: row['performance_ratio'] = 100 y1.append(round(row['performance_ratio'])) y2.append(round(row['AGGREGATE_VALUE'], 2)) for index, row in result.iterrows(): rw = {} rw.__setitem__('SENDER_TIMESTAMP', row['sender_timestamp'].strftime('%d-%m-%Y %H:%M')) rw.__setitem__('Location', row['resource_path']) rw.__setitem__( 'Location Code', location_detail[row['resource_path']]['location_code']) rw.__setitem__('sensor_id', row['sensor_id']) rw.__setitem__('Energy (kWh)', row['AGGREGATE_VALUE']) rw.__setitem__('Performance Ratio (%)', row['performance_ratio']) data_lst.append(rw) loc_list = [] for index, val in location_detail.items(): val['selected'] = 'F' if filter_location_list: if (val['resource_path'] in filter_location_list): val['selected'] = 'Y' loc_list.append(val) plot_data, layout = polt_dashboard.rander_simplegraph( x_axies=x, y_axies_01=y1, y_label_01="PR (%)", y_axies_02=y2, y_label_02="Energy (kWh)", title="Time Vs performance_ratio", type_01="Scatter", type_02="Bar") print(plot_data) print(layout) graphs = dict(data=plot_data, layout=layout) graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder) return render_template('sub_pages/common_template.html', pagename="inv_performance_ratio_summary", title="Inverter Performance Ratio", menu="menu_pr", data=data_lst, location_detail=loc_list, req_param=req_param, filter_type=filter_type, graphJSON=graphJSON, X=x, Y1=y1, Y2=y2, y1_lable="Energy (kWh)", y2_lable="Performance Ratio (%)")
def GETDashbord_info(): error = APP_ERRORS.NO_ERROR err_desc = APP_ERRORS.DESC[APP_ERRORS.NO_ERROR] result = Dashboard() try: filter_date = None filter_location_list = None filter_type = 'DAY' filter_type_nev = None try: filter_date = request.form['filter_date'] filter_location_list = request.form.getlist( 'filter_location_list[]') filter_type = request.form['filter_type'] filter_type_nev = request.form['filter_type_nev'] except Exception as err: print(str(err)) req_param = { 'filter_date': filter_date, 'filter_location_list': filter_location_list } # Changing to the time format try: start_time = datetime.strptime(filter_date, '%d/%m/%Y') end_time = start_time + timedelta(days=int(1)) except Exception as err: print(str(err)) start_time = None end_time = None if start_time is None and start_time != "": start_time = DateUtil.get_current_day() if end_time is None and end_time != "": end_time = start_time + timedelta(days=int(1)) # Change the table and time condition based on the query curr_day = DateUtil.get_current_day() if filter_type_nev == "NEXT": curr_day = session['last_end_time'] + timedelta(days=int(1)) start_time = curr_day end_time = curr_day + timedelta(days=int(1)) elif filter_type_nev == "PREV": curr_day = session['last_start_date'] - timedelta(days=int(1)) start_time = curr_day end_time = curr_day + timedelta(days=int(1)) interval = "900" # Each 900 sec (15 min) device_type = "Solar" try: result = agg_Services.getDashboard_info( start=start_time, end=end_time, device_type=device_type, interval=interval, location_list=filter_location_list) except Exception as err: traceback.print_exc() error = APP_ERRORS.UNKNOWN err_desc = str(err) result = Dashboard() app.logger.error("getDashboard_info 1 >> error " + str(err)) except Exception as err: traceback.print_exc() result = Dashboard() app.logger.error("getDashboard_info 2 >> error " + str(err)) error = APP_ERRORS.UNKNOWN err_desc = str(err) WeatherInfo = getWeatherInfo() result_data = MyEncoder().encode(result) plot_data, layout = polt_dashboard.rander_graph(result_data) print(plot_data) print(layout) graphs = dict(data=plot_data, layout=layout) graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder) print('success') location_detail = uti_service.get_location_details() loc_list = [] print(filter_location_list) for index, val in location_detail.items(): val['selected'] = 'F' if filter_location_list: if (val['resource_path'] in filter_location_list): val['selected'] = 'Y' # print(val) loc_list.append(val) print(req_param) return render_template('home.html', menu="menu_dash", dashboard_data=result, weatherInfo=WeatherInfo, current_day_dpm=None, graphJSON=graphJSON, location_detail=loc_list, filter_type=filter_type, req_param=req_param)
def getEnergy_info(): start_time = request.args.get('start_date') end_time = request.args.get('to_date') location = request.args.get('location') sensor_type = request.args.get('sensor_type') if not sensor_type: sensor_type = 'DPM' projection = request.args.get('projection_' + sensor_type) req_param = { 'start_date': start_time, 'to_date': end_time, 'location': location } try: start_time = datetime.strptime(start_time, '%Y-%m-%d') end_time = datetime.strptime(end_time, '%Y-%m-%d') except Exception as err: start_time = DateUtil.get_current_day() end_time = start_time + timedelta(days=int(1)) _table = TABLE.IOT_AGG_15_MINUTES_SUMMARY_TABLE _projection = None _condition = {"sender_timestamp": {"$gte": start_time, "$lte": end_time}} if location and location != "ALL": _condition.__setitem__('resource_path', location) if sensor_type: _condition.__setitem__('sensor_type', sensor_type) if not projection: projection = 'AGGREGATE_VALUE' _projection = { "sender_timestamp": 1, 'resource_path': 1, 'sensor_type': 1, projection: 1 } _sortby = "sender_timestamp" print(_condition) print(_projection) result = iot_dao.getrecord_from_table(_table, _condition, _projection, _sortby).reset_index() data_lst = [] x = [] y = [] for index, row in result.iterrows(): rw = {} rw.__setitem__('sender_timestamp', row['sender_timestamp'].strftime('%d-%m-%Y %H:%M')) x.append(row['sender_timestamp'].strftime('%H:%M-%d%b%Y ')) rw.__setitem__('location', row['resource_path']) rw.__setitem__('sensor_type', row['sensor_type']) rw.__setitem__(projection, row[projection]) y.append(row[projection]) data_lst.append(rw) location_detail = uti_service.get_location_details() loc_list = [] for index, val in location_detail.items(): loc_list.append(val) plot_data, layout = polt_dashboard.rander_simplegraph( x_axies=x, y_axies_01=y, y_label_01=projection) print(plot_data) print(layout) graphs = dict(data=plot_data, layout=layout) graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder) return render_template('sub_pages/common_template.html', pagename="dyn_summary_main", menu="menu_sv", data=data_lst, location_detail=loc_list, req_param=req_param, graphJSON=graphJSON)
def insert_1day_summary(self, duration_limit=5): total_inserted_rows = 0 assert_det = dao.getAssert_master() loc_irr_det = dao.getLocation_IRR() to = DateUtil.get_current_day() frm = DateUtil.get_current_day() - timedelta(days=int(duration_limit)) app.logger.info('pr_calculation :: ' + str(frm) + " < t >= " + str(to)) data_set = dao.getAll_iot_Main_table_detail( sensor_type=None, device_type=Util.Util.Solar, from_time=frm, to_time=to, table_name=TABLE.IOT_AGG_1_HOUR_SUMMARY_TABLE, mandatory_col='irr_value') CONFIG_TABLE = util_service.get_Config_matrix() print(CONFIG_TABLE) if data_set is None: app.logger.info( "************************ No record available ************************ " ) return 0 for device_type in data_set['device_type'].unique(): for sensor_type in data_set['sensor_type'].unique(): agg_columns = CONFIG_TABLE[device_type + '#' + sensor_type] agg = {} for col in agg_columns: if col['is_aggr'] == 'Y': fun = col['aggr_funct'] col_name = col['column_ref'] print(sensor_type, col_name, fun) data_set[col_name] = pd.to_numeric(data_set[col_name], errors='coerse') #logger.info(col_name + "$" + fun) if fun == "SUM": data_set[col_name] = data_set[col_name].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) agg.__setitem__(col_name, 'sum') elif fun == "AVG": data_set[col_name] = data_set[col_name].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) agg.__setitem__(col_name, 'mean') elif fun == "DIFF": data_set[col_name] = data_set[col_name].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) agg.__setitem__(col_name, 'max') agg.__setitem__('sensor_capacity', 'mean') data_set['sensor_capacity'] = data_set[ 'sensor_capacity'].replace([np.NaN, ''], 0).astype( np.float64).fillna(0.0) agg.__setitem__('irr_value', 'sum') data_set['irr_value'] = data_set['irr_value'].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) agg.__setitem__('AGGREGATE_VALUE', 'sum') data_set['AGGREGATE_VALUE'] = data_set[ 'AGGREGATE_VALUE'].replace([np.NaN, ''], 0).astype( np.float64).fillna(0.0) app.logger.info(agg) print(agg) data = data_set[data_set['sensor_type'].notnull() & (data_set['sensor_type'] == sensor_type) & data_set['device_type'].notnull() & (data_set['device_type'] == device_type)] data = data.groupby([ 'resource_path', 'device_type', 'sender_id', 'device_id', 'sensor_type', 'sensor_id', Grouper(key='sender_timestamp', freq='1D') ]).agg(agg).reset_index() data = data.sort_values( ['resource_path', 'sensor_type', 'sender_timestamp'], ascending=[True, True, True]) # to change the group by tome to lower limit # e.g if 12:15 - 12:30 => group by function return 12:15. But that should be 12:30 value #data['sender_timestamp'] = data['sender_timestamp'] + timedelta(hours=24) print(sensor_type, data.shape) batch_no = DateUtil().getcurr_datetime_String( format='%Y%m%d%H%M%S') app.logger.info( "-------------------------------------------------------------------------------------------" ) app.logger.info( "************************ Insert record into Aggregation table ************************ " ) app.logger.info("device_type " + device_type) app.logger.info("sensor_type " + sensor_type) app.logger.info("No of Rows " + str(len(data.index))) app.logger.info( "-------------------------------------------------------------------------------------------" ) for index, row in data.iterrows(): # print(1) row['batch_no'] = batch_no json_res = {} json_res['Created_on'] = DateUtil.getcurr_datetime() for i in row.keys(): try: json_res.__setitem__(i, row[i]) except Exception as err: print(row) if sensor_type != Util.Util.IRR: expected_value = 0 performance_ratio = 0 try: irr_value = row['irr_value'] blk_sensor_capacity = row['sensor_capacity'] agg_value = row['AGGREGATE_VALUE'] # 0.25 because of 15 minutes interval # to convert to W multiply by 1000 expected_value = round( float(irr_value * 0.001 * blk_sensor_capacity), 2) # for percentage calculation multiply by 100 performance_ratio = round( float(((agg_value) / expected_value) * 100), 2) except Exception as err: app.logger.error(str(err)) json_res.__setitem__('expected_value', expected_value) json_res.__setitem__('performance_ratio', performance_ratio) try: # dao.insert_one_record(row=json_res,table_name=TABLE.IOT_AGG_MAX_TABLE) _json = json_res _cond = { "device_type": json_res.__getitem__('device_type'), "sender_timestamp": json_res.__getitem__('sender_timestamp'), "sender_id": json_res.__getitem__('sender_id'), "sensor_type": json_res.__getitem__('sensor_type'), "device_id": json_res.__getitem__('device_id'), "resource_path": json_res.__getitem__('resource_path') } print(json_res) dao.upsert( table_name=TABLE.IOT_AGG_1_DAY_SUMMARY_TABLE, set_json=_json, _condition=_cond) except Exception as err: error_rec = { "Error:": "Error in IOT_AGG_1_HOUR_SUMMARY_TABLE (insert )", "Error Desc": str(err), "batchjob_time": { 'from': frm, 'to': to }, 'created_on': DateUtil.getcurr_datetime() } dao.insert_one_record( row=error_rec, table_name=TABLE.IOT_CRON_EXCEPTION) # print(3) total_inserted_rows += len(data.index) app.logger.info( "-------------------------------------------------------------------------------------------" ) app.logger.info( "************************ Update status into Main table ************************ " ) app.logger.info( "-------------------------------------------------------------------------------------------" ) _json = {'last_run': to} _cond = {"job_name": 'insert_1day_summary'} dao.upsert(table_name=TABLE.IOT_CRON_JOB_TABLE, set_json=_json, _condition=_cond) return "Done"
def pr_calculation(self, duration_limit=5): assert_det = dao.getAssert_master() loc_irr_det = dao.getLocation_IRR() to = DateUtil.getlowest_min_date(min=30) frm = to - timedelta(hours=int(duration_limit)) app.logger.info('pr_calculation :: ' + str(frm) + " < t >= " + str(to)) data_set = dao.getAll_iot_Main_table_detail( sensor_type=None, device_type=Util.Util.Solar, from_time=frm, to_time=to, table_name=TABLE.IOT_AGG_15_MINUTES_SUMMARY_TABLE) irr_data = data_set[data_set['sensor_type'].notnull() & (data_set['sensor_type'] == Util.Util.IRR)] if irr_data.empty: return 0 else: irr_summary = {} for index, row in irr_data.iterrows(): sensor_id = row['sensor_id'] sender_timestamp = row['sender_timestamp'].strftime( "%Y-%m-%d:%H:%M") irr_val = row['irr'] irr_summary.__setitem__(sensor_id + "#" + sender_timestamp, irr_val) print(irr_summary) other_data = data_set[data_set['sensor_type'].notnull() & (data_set['sensor_type'] != Util.Util.IRR)] for index, row1 in other_data.iterrows(): expected_value = 0 performance_ratio = 0 irr_value = 0 blk_sensor_capacity = 0 try: agg_value = row1['AGGREGATE_VALUE'] resource_path = row1['resource_path'] sender_timestamp = row1['sender_timestamp'].strftime( "%Y-%m-%d:%H:%M") irr_sensor = loc_irr_det[resource_path] irr_value = irr_summary.get(irr_sensor + "#" + sender_timestamp) if not irr_value: irr_value = 0 sensor_id = row1['sensor_id'] blk_sensor_capacity = assert_det[sensor_id] #print(agg_value, resource_path, irr_sensor, sender_timestamp, irr_value, irr_capacity) # 0.25 because of 15 minutes interval # to convert to W multiply by 1000 expected_value = round( float(irr_value * 0.001 * blk_sensor_capacity * 0.25), 2) # for percentage calculation multiply by 100 performance_ratio = round( float(((agg_value) / expected_value) * 100), 2) except Exception as err: expected_value = 0 performance_ratio = 0 #print('expected_value ',expected_value,'performance_ratio ', performance_ratio) try: set_json = { 'expected_value': expected_value, 'performance_ratio': performance_ratio, 'irr_value': irr_value, 'sensor_capacity': blk_sensor_capacity, 'updated_on': DateUtil.getcurr_datetime() } _condition = {'_id': row1['_id']} result_up = dao.update_record( set_json, TABLE.IOT_AGG_15_MINUTES_SUMMARY_TABLE, _condition, multi=False) #print("Update", result_up) except Exception as err: error_rec = { "Error:": "Error in pr_calculation (Update performance_ratio)", "Error Desc": str(err), "batchjob_time": { 'from': frm, 'to': to }, 'created_on': DateUtil.getcurr_datetime() } dao.insert_one_record(row=error_rec, table_name=TABLE.IOT_CRON_EXCEPTION) #dpm_data = data_set[data_set['sensor_type'].notnull() & (data_set['sensor_type'] == Util.Util.DPM)] return "Done"
def insert_aggregation_15_minutes_summary(self, duration_limit=5): app.logger.info("****insert_aggregation_15_minutes_summary***") total_inserted_rows = 0 CONFIG_TABLE = util_service.get_Config_matrix() #print(CONFIG_TABLE) #frm = DateUtil.get_current_day(add_hr=-duration_limit) to = DateUtil.getlowest_min_date(min=30) frm = to - timedelta(hours=int(duration_limit)) app.logger.info('insert_aggregation_15_minutes_summary :: ' + str(frm) + " < t >= " + str(to)) #frm = DateUtil().getcurr_datetime(add_hr=-10) #to = DateUtil().getcurr_datetime(add_hr=-1) #print(frm,to) df = dao.getAll_iot_Main_table_detail( sensor_type=None, from_time=frm, to_time=to, table_name=TABLE.IOT_AGG_MAX_TABLE) print('Total No of Records', df.shape) if df is None: app.logger.info( "************************ No record available ************************ " ) return 0 res_data = [] for device_type in df['device_type'].unique(): for sensor_type in df['sensor_type'].unique(): agg_columns = CONFIG_TABLE[device_type + '#' + sensor_type] agg = {} for col in agg_columns: if col['is_aggr'] == 'Y': fun = col['aggr_funct'] col_name = col['column_ref'] #logger.info(col_name + " : " + fun) df[col_name] = pd.to_numeric(df[col_name], errors='coerse') #logger.info(col_name + "$" + fun) if fun == "SUM": agg.__setitem__(col_name, 'sum') df[col_name] = df[col_name].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) elif fun == "AVG": agg.__setitem__(col_name, 'mean') df[col_name] = df[col_name].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) elif fun == "DIFF": agg.__setitem__(col_name, 'max') df[col_name] = df[col_name].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) app.logger.info(agg) for resource_path in df['resource_path'].unique(): data = df[df['sensor_type'].notnull() & (df['sensor_type'] == sensor_type) & df['device_type'].notnull() & (df['device_type'] == device_type) & df['resource_path'].notnull() & (df['resource_path'] == resource_path)] data = data.groupby([ 'resource_path', 'device_type', 'sender_id', 'device_id', 'sensor_type', 'sensor_id', 'sender_timestamp' ]).agg(agg).reset_index() data = data.sort_values( ['resource_path', 'sensor_type', 'sender_timestamp'], ascending=[True, True, True]) #logger.info("No of data " + str(len(data.index))) dpm__previous_reading = 0 inv__previous_reading = {} for sensor_id in data['resource_path'].unique(): inv__previous_reading.__setitem__(sensor_id, 0) for index, row in data.iterrows(): agg_value = '0' if sensor_type == Util.Util.DPM: if 'Act_Energy_Del' in row.keys(): dpm__current_reading = row['Act_Energy_Del'] #print('DPM__previous_reading', dpm__previous_reading) #print('DPM__current_reading', dpm__current_reading) if index > 0: try: agg_value = round( float(dpm__current_reading - dpm__previous_reading), 2) # if Reset happend if dpm__current_reading <= 0: agg_value = 0 except Exception as err: app.logger.error(str(err)) agg_value = 0 #print('DPM_aggregated_value', agg_value) dpm__previous_reading = dpm__current_reading elif sensor_type == Util.Util.INV: if 'E_DAY' in row.keys(): current_reading = row['E_DAY'] #print('INV_previous_reading', inv__previous_reading) #print('INV_current_reading', current_reading) if index > 0: try: agg_value = round( float(current_reading - inv__previous_reading.get( row['sensor_id'])), 2) # if Reset happend if agg_value < 0: agg_value = current_reading except Exception as err: app.logger.error(str(err)) #print('INV_aggregated_value', agg_value) inv__previous_reading.__setitem__( row['sensor_id'], current_reading) elif sensor_type == Util.Util.IRR: if 'irr' in row.keys(): agg_value = row['irr'] if index > 0: row.__setitem__('AGGREGATE_VALUE', agg_value) res_data.append(row) batch_no = DateUtil().getcurr_datetime_String(format='%Y%m%d%H%M%S') for row in res_data: # print(1) row['batch_no'] = batch_no json_res = {} json_res['Created_on'] = DateUtil.getcurr_datetime() for i in row.keys(): try: json_res.__setitem__(i, row[i]) except Exception as err: print(row) try: #print(json_res) total_inserted_rows += 1 #dao.insert_one_record(row=json_res, table_name=TABLE.IOT_AGG_15_MINUTES_SUMMARY_TABLE) _json = json_res _cond = { "device_type": json_res.__getitem__('device_type'), "sender_timestamp": json_res.__getitem__('sender_timestamp'), "sender_id": json_res.__getitem__('sender_id'), "sensor_type": json_res.__getitem__('sensor_type'), "device_id": json_res.__getitem__('device_id'), "resource_path": json_res.__getitem__('resource_path') } dao.upsert(table_name=TABLE.IOT_AGG_15_MINUTES_SUMMARY_TABLE, set_json=_json, _condition=_cond) except Exception as err: error_rec = { "Error:": "Error in IOT_AGG_15_MINUTES_SUMMARY_TABLE (insert )", "Error Desc": str(err), "batchjob_time": { 'from': frm, 'to': to }, 'created_on': DateUtil.getcurr_datetime() } dao.insert_one_record(row=error_rec, table_name=TABLE.IOT_CRON_EXCEPTION) _json = {'last_run': to} _cond = {"job_name": 'insert_aggregation_15_minutes_summary'} dao.upsert(table_name=TABLE.IOT_CRON_JOB_TABLE, set_json=_json, _condition=_cond) app.logger.info( "-------------------------------------------------------------------------------------------" ) app.logger.info( "************************ Completed Aggregation table 15_max ************************ " ) app.logger.info("total_inserted_rows: " + str(total_inserted_rows)) app.logger.info( "-------------------------------------------------------------------------------------------" ) #return data return total_inserted_rows
def insert_aggregation_15_min_Max(self, duration_limit, interval, device_type, sensor_type): app.logger.info("****get_aggregation_tableFrom_main***") total_inserted_rows = 0 CONFIG_TABLE = util_service.get_Config_matrix() #print(CONFIG_TABLE) #frm = DateUtil().getcurr_datetime(add_hr=-duration_limit) to = DateUtil().getlowest_min_date(min=15) frm = to - timedelta(hours=int(duration_limit)) app.logger.info('insert_aggregation_15_min_Max :: ' + str(frm) + " < t >= " + str(to)) df = dao.getAll_iot_Main_table_detail(sensor_type=None, from_time=frm, to_time=to, processed_status=None, table_name=TABLE.IOT_MAIN_TABLE) print('Total No of Records', df.shape) if df is None: app.logger.info( "************************ No record available ************************ " ) return 0 for device_type in df['device_type'].unique(): for sensor_type in df['sensor_type'].unique(): agg_columns = CONFIG_TABLE[device_type + '#' + sensor_type] agg = {} for col in agg_columns: if col['is_aggr'] == 'Y': fun = col['aggr_funct'] col_name = col['column_ref'] #logger.info(col_name + " : " + fun) df[col_name] = pd.to_numeric(df[col_name], errors='coerse') #logger.info(col_name + "$" + fun) if fun == "SUM": df[col_name] = df[col_name].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) agg.__setitem__(col_name, 'sum') elif fun == "AVG": df[col_name] = df[col_name].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) agg.__setitem__(col_name, 'mean') elif fun == "DIFF": df[col_name] = df[col_name].replace( [np.NaN, ''], 0).astype(np.float64).fillna(0.0) agg.__setitem__(col_name, 'max') app.logger.info(agg) data = df[df['sensor_type'].notnull() & (df['sensor_type'] == sensor_type) & df['device_type'].notnull() & (df['device_type'] == device_type)] data = data.groupby([ 'resource_path', 'device_type', 'sender_id', 'device_id', 'sensor_type', 'sensor_id', Grouper(key='sender_timestamp', freq=interval + 's') ]).agg(agg).reset_index() data = data.sort_values( ['resource_path', 'sensor_type', 'sender_timestamp'], ascending=[True, True, True]) # to change the group by tome to lower limit # e.g if 12:15 - 12:30 => group by function return 12:15. But that should be 12:30 value data['sender_timestamp'] = data[ 'sender_timestamp'] + timedelta(seconds=int(interval)) print(sensor_type, data.shape) batch_no = DateUtil().getcurr_datetime_String( format='%Y%m%d%H%M%S') app.logger.info( "-------------------------------------------------------------------------------------------" ) app.logger.info( "************************ Insert record into Aggregation table ************************ " ) app.logger.info("device_type " + device_type) app.logger.info("sensor_type " + sensor_type) app.logger.info("No of Rows " + str(len(data.index))) app.logger.info( "-------------------------------------------------------------------------------------------" ) for index, row in data.iterrows(): #print(1) row['batch_no'] = batch_no json_res = {} json_res['Created_on'] = DateUtil.getcurr_datetime() for i in row.keys(): try: json_res.__setitem__(i, row[i]) except Exception as err: print(row) try: #dao.insert_one_record(row=json_res,table_name=TABLE.IOT_AGG_MAX_TABLE) _json = json_res _cond = { "device_type": json_res.__getitem__('device_type'), "sender_timestamp": json_res.__getitem__('sender_timestamp'), "sender_id": json_res.__getitem__('sender_id'), "sensor_type": json_res.__getitem__('sensor_type'), "device_id": json_res.__getitem__('device_id'), "resource_path": json_res.__getitem__('resource_path') } dao.upsert(table_name=TABLE.IOT_AGG_MAX_TABLE, set_json=_json, _condition=_cond) except Exception as err: error_rec = { "Error:": "Error in insert_aggregation_15_min_Max (insert )", "Error Desc": str(err), "batchjob_time": { 'from': frm, 'to': to }, 'created_on': DateUtil.getcurr_datetime() } dao.insert_one_record( row=error_rec, table_name=TABLE.IOT_CRON_EXCEPTION) #print(3) total_inserted_rows += len(data.index) app.logger.info( "-------------------------------------------------------------------------------------------" ) app.logger.info( "************************ Update status into Main table ************************ " ) app.logger.info( "-------------------------------------------------------------------------------------------" ) _json = {'last_run': to} _cond = {"job_name": 'insert_aggregation_15_min_Max'} dao.upsert(table_name=TABLE.IOT_CRON_JOB_TABLE, set_json=_json, _condition=_cond) ''' app.logger.info("insert_aggregation_15_min_Max (Update processed_status)") for index, row1 in df.iterrows(): try: set_json = {'processed_status': 'Y'} _condition = {'_id': row1['_id']} result_up = dao.update_record(set_json, TABLE.IOT_MAIN_TABLE, _condition, multi=False) #print("Update", _condition) except Exception as err: error_rec = {"Error:": "Error in insert_aggregation_15_min_Max (Update processed_status)", "Error Desc": str(err), "batchjob_time": {'from': frm, 'to': to},'created_on': DateUtil.getcurr_datetime()} dao.insert_one_record(row=error_rec,table_name=TABLE.IOT_CRON_EXCEPTION) ''' app.logger.info( "-------------------------------------------------------------------------------------------" ) app.logger.info( "************************ Completed Aggregation table 15_max ************************ " ) app.logger.info("total_inserted_rows: " + str(total_inserted_rows)) app.logger.info( "-------------------------------------------------------------------------------------------" ) #return data return total_inserted_rows