def on_get(req, resp): print(req.params) combined_equipment_id = req.params.get('combinedequipmentid') period_type = req.params.get('periodtype') base_start_datetime_local = req.params.get('baseperiodstartdatetime') base_end_datetime_local = req.params.get('baseperiodenddatetime') reporting_start_datetime_local = req.params.get( 'reportingperiodstartdatetime') reporting_end_datetime_local = req.params.get( 'reportingperiodenddatetime') ################################################################################################################ # Step 1: valid parameters ################################################################################################################ if combined_equipment_id is None: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_COMBINED_EQUIPMENT_ID') else: combined_equipment_id = str.strip(combined_equipment_id) if not combined_equipment_id.isdigit() or int( combined_equipment_id) <= 0: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_COMBINED_EQUIPMENT_ID') if period_type is None: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') else: period_type = str.strip(period_type) if period_type not in ['hourly', 'daily', 'monthly', 'yearly']: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') timezone_offset = int(config.utc_offset[1:3]) * 60 + int( config.utc_offset[4:6]) if config.utc_offset[0] == '-': timezone_offset = -timezone_offset base_start_datetime_utc = None if base_start_datetime_local is not None and len( str.strip(base_start_datetime_local)) > 0: base_start_datetime_local = str.strip(base_start_datetime_local) try: base_start_datetime_utc = datetime.strptime(base_start_datetime_local, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ timedelta(minutes=timezone_offset) except ValueError: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_BASE_PERIOD_BEGINS_DATETIME") base_end_datetime_utc = None if base_end_datetime_local is not None and len( str.strip(base_end_datetime_local)) > 0: base_end_datetime_local = str.strip(base_end_datetime_local) try: base_end_datetime_utc = datetime.strptime(base_end_datetime_local, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ timedelta(minutes=timezone_offset) except ValueError: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_BASE_PERIOD_ENDS_DATETIME") if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ base_start_datetime_utc >= base_end_datetime_utc: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_BASE_PERIOD_ENDS_DATETIME') if reporting_start_datetime_local is None: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_REPORTING_PERIOD_BEGINS_DATETIME") else: reporting_start_datetime_local = str.strip( reporting_start_datetime_local) try: reporting_start_datetime_utc = datetime.strptime(reporting_start_datetime_local, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ timedelta(minutes=timezone_offset) except ValueError: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_REPORTING_PERIOD_BEGINS_DATETIME") if reporting_end_datetime_local is None: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_REPORTING_PERIOD_ENDS_DATETIME") else: reporting_end_datetime_local = str.strip( reporting_end_datetime_local) try: reporting_end_datetime_utc = datetime.strptime(reporting_end_datetime_local, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ timedelta(minutes=timezone_offset) except ValueError: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_REPORTING_PERIOD_ENDS_DATETIME") if reporting_start_datetime_utc >= reporting_end_datetime_utc: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_REPORTING_PERIOD_ENDS_DATETIME') ################################################################################################################ # Step 2: query the combined equipment ################################################################################################################ cnx_system = mysql.connector.connect(**config.myems_system_db) cursor_system = cnx_system.cursor() cnx_energy = mysql.connector.connect(**config.myems_energy_db) cursor_energy = cnx_energy.cursor() cnx_historical = mysql.connector.connect(**config.myems_historical_db) cursor_historical = cnx_historical.cursor() cursor_system.execute( " SELECT id, name, cost_center_id " " FROM tbl_combined_equipments " " WHERE id = %s ", (combined_equipment_id, )) row_combined_equipment = cursor_system.fetchone() if row_combined_equipment is None: if cursor_system: cursor_system.close() if cnx_system: cnx_system.disconnect() if cursor_energy: cursor_energy.close() if cnx_energy: cnx_energy.disconnect() if cnx_historical: cnx_historical.close() if cursor_historical: cursor_historical.disconnect() raise falcon.HTTPError( falcon.HTTP_404, title='API.NOT_FOUND', description='API.COMBINED_EQUIPMENT_NOT_FOUND') combined_equipment = dict() combined_equipment['id'] = row_combined_equipment[0] combined_equipment['name'] = row_combined_equipment[1] combined_equipment['cost_center_id'] = row_combined_equipment[2] ################################################################################################################ # Step 3: query associated equipments ################################################################################################################ # todo ################################################################################################################ # Step 4: query input energy categories and output energy categories ################################################################################################################ energy_category_set_input = set() energy_category_set_output = set() # query input energy categories in base period cursor_energy.execute( " SELECT DISTINCT(energy_category_id) " " FROM tbl_combined_equipment_input_category_hourly " " WHERE combined_equipment_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s ", (combined_equipment['id'], base_start_datetime_utc, base_end_datetime_utc)) rows_energy_categories = cursor_energy.fetchall() if rows_energy_categories is not None or len( rows_energy_categories) > 0: for row_energy_category in rows_energy_categories: energy_category_set_input.add(row_energy_category[0]) # query input energy categories in reporting period cursor_energy.execute( " SELECT DISTINCT(energy_category_id) " " FROM tbl_combined_equipment_input_category_hourly " " WHERE combined_equipment_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s ", (combined_equipment['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) rows_energy_categories = cursor_energy.fetchall() if rows_energy_categories is not None or len( rows_energy_categories) > 0: for row_energy_category in rows_energy_categories: energy_category_set_input.add(row_energy_category[0]) # query output energy categories in base period cursor_energy.execute( " SELECT DISTINCT(energy_category_id) " " FROM tbl_combined_equipment_output_category_hourly " " WHERE combined_equipment_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s ", (combined_equipment['id'], base_start_datetime_utc, base_end_datetime_utc)) rows_energy_categories = cursor_energy.fetchall() if rows_energy_categories is not None or len( rows_energy_categories) > 0: for row_energy_category in rows_energy_categories: energy_category_set_output.add(row_energy_category[0]) # query output energy categories in reporting period cursor_energy.execute( " SELECT DISTINCT(energy_category_id) " " FROM tbl_combined_equipment_output_category_hourly " " WHERE combined_equipment_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s ", (combined_equipment['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) rows_energy_categories = cursor_energy.fetchall() if rows_energy_categories is not None or len( rows_energy_categories) > 0: for row_energy_category in rows_energy_categories: energy_category_set_output.add(row_energy_category[0]) # query properties of all energy categories above cursor_system.execute( " SELECT id, name, unit_of_measure, kgce, kgco2e " " FROM tbl_energy_categories " " ORDER BY id ", ) rows_energy_categories = cursor_system.fetchall() if rows_energy_categories is None or len(rows_energy_categories) == 0: if cursor_system: cursor_system.close() if cnx_system: cnx_system.disconnect() if cursor_energy: cursor_energy.close() if cnx_energy: cnx_energy.disconnect() if cnx_historical: cnx_historical.close() if cursor_historical: cursor_historical.disconnect() raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.ENERGY_CATEGORY_NOT_FOUND') energy_category_dict = dict() for row_energy_category in rows_energy_categories: if row_energy_category[0] in energy_category_set_input or \ row_energy_category[0] in energy_category_set_output: energy_category_dict[row_energy_category[0]] = { "name": row_energy_category[1], "unit_of_measure": row_energy_category[2], "kgce": row_energy_category[3], "kgco2e": row_energy_category[4] } ################################################################################################################ # Step 5: query associated points ################################################################################################################ point_list = list() cursor_system.execute( " SELECT p.id, p.name, p.units, p.object_type " " FROM tbl_combined_equipments e, tbl_combined_equipments_parameters ep, tbl_points p " " WHERE e.id = %s AND e.id = ep.combined_equipment_id AND ep.parameter_type = 'point' " " AND ep.point_id = p.id " " ORDER BY p.id ", (combined_equipment['id'], )) rows_points = cursor_system.fetchall() if rows_points is not None and len(rows_points) > 0: for row in rows_points: point_list.append({ "id": row[0], "name": row[1], "units": row[2], "object_type": row[3] }) ################################################################################################################ # Step 6: query base period energy input ################################################################################################################ base_input = dict() if energy_category_set_input is not None and len( energy_category_set_input) > 0: for energy_category_id in energy_category_set_input: base_input[energy_category_id] = dict() base_input[energy_category_id]['timestamps'] = list() base_input[energy_category_id]['values'] = list() base_input[energy_category_id]['subtotal'] = Decimal(0.0) cursor_energy.execute( " SELECT start_datetime_utc, actual_value " " FROM tbl_combined_equipment_input_category_hourly " " WHERE combined_equipment_id = %s " " AND energy_category_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s " " ORDER BY start_datetime_utc ", (combined_equipment['id'], energy_category_id, base_start_datetime_utc, base_end_datetime_utc)) rows_combined_equipment_hourly = cursor_energy.fetchall() rows_combined_equipment_periodically = \ utilities.aggregate_hourly_data_by_period(rows_combined_equipment_hourly, base_start_datetime_utc, base_end_datetime_utc, period_type) for row_combined_equipment_periodically in rows_combined_equipment_periodically: current_datetime_local = row_combined_equipment_periodically[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) if period_type == 'hourly': current_datetime = current_datetime_local.strftime( '%Y-%m-%dT%H:%M:%S') elif period_type == 'daily': current_datetime = current_datetime_local.strftime( '%Y-%m-%d') elif period_type == 'monthly': current_datetime = current_datetime_local.strftime( '%Y-%m') elif period_type == 'yearly': current_datetime = current_datetime_local.strftime( '%Y') actual_value = Decimal(0.0) if row_combined_equipment_periodically[1] is None \ else row_combined_equipment_periodically[1] base_input[energy_category_id]['timestamps'].append( current_datetime) base_input[energy_category_id]['values'].append( actual_value) base_input[energy_category_id]['subtotal'] += actual_value ################################################################################################################ # Step 7: query base period energy output ################################################################################################################ base_output = dict() if energy_category_set_output is not None and len( energy_category_set_output) > 0: for energy_category_id in energy_category_set_output: base_output[energy_category_id] = dict() base_output[energy_category_id]['timestamps'] = list() base_output[energy_category_id]['values'] = list() base_output[energy_category_id]['subtotal'] = Decimal(0.0) cursor_energy.execute( " SELECT start_datetime_utc, actual_value " " FROM tbl_combined_equipment_output_category_hourly " " WHERE combined_equipment_id = %s " " AND energy_category_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s " " ORDER BY start_datetime_utc ", (combined_equipment['id'], energy_category_id, base_start_datetime_utc, base_end_datetime_utc)) rows_combined_equipment_hourly = cursor_energy.fetchall() rows_combined_equipment_periodically = \ utilities.aggregate_hourly_data_by_period(rows_combined_equipment_hourly, base_start_datetime_utc, base_end_datetime_utc, period_type) for row_combined_equipment_periodically in rows_combined_equipment_periodically: current_datetime_local = row_combined_equipment_periodically[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) if period_type == 'hourly': current_datetime = current_datetime_local.strftime( '%Y-%m-%dT%H:%M:%S') elif period_type == 'daily': current_datetime = current_datetime_local.strftime( '%Y-%m-%d') elif period_type == 'monthly': current_datetime = current_datetime_local.strftime( '%Y-%m') elif period_type == 'yearly': current_datetime = current_datetime_local.strftime( '%Y') actual_value = Decimal(0.0) if row_combined_equipment_periodically[1] is None \ else row_combined_equipment_periodically[1] base_output[energy_category_id]['timestamps'].append( current_datetime) base_output[energy_category_id]['values'].append( actual_value) base_output[energy_category_id]['subtotal'] += actual_value ################################################################################################################ # Step 8: query reporting period energy input ################################################################################################################ reporting_input = dict() if energy_category_set_input is not None and len( energy_category_set_input) > 0: for energy_category_id in energy_category_set_input: reporting_input[energy_category_id] = dict() reporting_input[energy_category_id]['timestamps'] = list() reporting_input[energy_category_id]['values'] = list() reporting_input[energy_category_id]['subtotal'] = Decimal(0.0) cursor_energy.execute( " SELECT start_datetime_utc, actual_value " " FROM tbl_combined_equipment_input_category_hourly " " WHERE combined_equipment_id = %s " " AND energy_category_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s " " ORDER BY start_datetime_utc ", (combined_equipment['id'], energy_category_id, reporting_start_datetime_utc, reporting_end_datetime_utc)) rows_combined_equipment_hourly = cursor_energy.fetchall() rows_combined_equipment_periodically = \ utilities.aggregate_hourly_data_by_period(rows_combined_equipment_hourly, reporting_start_datetime_utc, reporting_end_datetime_utc, period_type) for row_combined_equipment_periodically in rows_combined_equipment_periodically: current_datetime_local = row_combined_equipment_periodically[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) if period_type == 'hourly': current_datetime = current_datetime_local.strftime( '%Y-%m-%dT%H:%M:%S') elif period_type == 'daily': current_datetime = current_datetime_local.strftime( '%Y-%m-%d') elif period_type == 'monthly': current_datetime = current_datetime_local.strftime( '%Y-%m') elif period_type == 'yearly': current_datetime = current_datetime_local.strftime( '%Y') actual_value = Decimal(0.0) if row_combined_equipment_periodically[1] is None \ else row_combined_equipment_periodically[1] reporting_input[energy_category_id]['timestamps'].append( current_datetime) reporting_input[energy_category_id]['values'].append( actual_value) reporting_input[energy_category_id][ 'subtotal'] += actual_value ################################################################################################################ # Step 9: query reporting period energy output ################################################################################################################ reporting_output = dict() if energy_category_set_output is not None and len( energy_category_set_output) > 0: for energy_category_id in energy_category_set_output: reporting_output[energy_category_id] = dict() reporting_output[energy_category_id]['timestamps'] = list() reporting_output[energy_category_id]['values'] = list() reporting_output[energy_category_id]['subtotal'] = Decimal(0.0) cursor_energy.execute( " SELECT start_datetime_utc, actual_value " " FROM tbl_combined_equipment_output_category_hourly " " WHERE combined_equipment_id = %s " " AND energy_category_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s " " ORDER BY start_datetime_utc ", (combined_equipment['id'], energy_category_id, reporting_start_datetime_utc, reporting_end_datetime_utc)) rows_combined_equipment_hourly = cursor_energy.fetchall() rows_combined_equipment_periodically = \ utilities.aggregate_hourly_data_by_period(rows_combined_equipment_hourly, reporting_start_datetime_utc, reporting_end_datetime_utc, period_type) for row_combined_equipment_periodically in rows_combined_equipment_periodically: current_datetime_local = row_combined_equipment_periodically[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) if period_type == 'hourly': current_datetime = current_datetime_local.strftime( '%Y-%m-%dT%H:%M:%S') elif period_type == 'daily': current_datetime = current_datetime_local.strftime( '%Y-%m-%d') elif period_type == 'monthly': current_datetime = current_datetime_local.strftime( '%Y-%m') elif period_type == 'yearly': current_datetime = current_datetime_local.strftime( '%Y') actual_value = Decimal(0.0) if row_combined_equipment_periodically[1] is None \ else row_combined_equipment_periodically[1] reporting_output[energy_category_id]['timestamps'].append( current_datetime) reporting_output[energy_category_id]['values'].append( actual_value) reporting_output[energy_category_id][ 'subtotal'] += actual_value ################################################################################################################ # Step 10: query tariff data ################################################################################################################ parameters_data = dict() parameters_data['names'] = list() parameters_data['timestamps'] = list() parameters_data['values'] = list() if energy_category_set_input is not None and len( energy_category_set_input) > 0: for energy_category_id in energy_category_set_input: energy_category_tariff_dict = utilities.get_energy_category_tariffs( combined_equipment['cost_center_id'], energy_category_id, reporting_start_datetime_utc, reporting_end_datetime_utc) tariff_timestamp_list = list() tariff_value_list = list() for k, v in energy_category_tariff_dict.items(): # convert k from utc to local k = k + timedelta(minutes=timezone_offset) tariff_timestamp_list.append(k.isoformat()[0:19][0:19]) tariff_value_list.append(v) parameters_data['names'].append( 'TARIFF-' + energy_category_dict[energy_category_id]['name']) parameters_data['timestamps'].append(tariff_timestamp_list) parameters_data['values'].append(tariff_value_list) ################################################################################################################ # Step 11: query associated points data ################################################################################################################ for point in point_list: point_values = [] point_timestamps = [] if point['object_type'] == 'ANALOG_VALUE': query = (" SELECT utc_date_time, actual_value " " FROM tbl_analog_value " " WHERE point_id = %s " " AND utc_date_time BETWEEN %s AND %s " " ORDER BY utc_date_time ") cursor_historical.execute( query, (point['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) rows = cursor_historical.fetchall() if rows is not None and len(rows) > 0: for row in rows: current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) current_datetime = current_datetime_local.strftime( '%Y-%m-%dT%H:%M:%S') point_timestamps.append(current_datetime) point_values.append(row[1]) elif point['object_type'] == 'ENERGY_VALUE': query = (" SELECT utc_date_time, actual_value " " FROM tbl_energy_value " " WHERE point_id = %s " " AND utc_date_time BETWEEN %s AND %s " " ORDER BY utc_date_time ") cursor_historical.execute( query, (point['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) rows = cursor_historical.fetchall() if rows is not None and len(rows) > 0: for row in rows: current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) current_datetime = current_datetime_local.strftime( '%Y-%m-%dT%H:%M:%S') point_timestamps.append(current_datetime) point_values.append(row[1]) elif point['object_type'] == 'DIGITAL_VALUE': query = (" SELECT utc_date_time, actual_value " " FROM tbl_digital_value " " WHERE point_id = %s " " AND utc_date_time BETWEEN %s AND %s ") cursor_historical.execute( query, (point['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) rows = cursor_historical.fetchall() if rows is not None and len(rows) > 0: for row in rows: current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) current_datetime = current_datetime_local.strftime( '%Y-%m-%dT%H:%M:%S') point_timestamps.append(current_datetime) point_values.append(row[1]) parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') parameters_data['timestamps'].append(point_timestamps) parameters_data['values'].append(point_values) ################################################################################################################ # Step 12: construct the report ################################################################################################################ if cursor_system: cursor_system.close() if cnx_system: cnx_system.disconnect() if cursor_energy: cursor_energy.close() if cnx_energy: cnx_energy.disconnect() result = dict() result['combined_equipment'] = dict() result['combined_equipment']['name'] = combined_equipment['name'] result['base_period_input'] = dict() result['base_period_input']['names'] = list() result['base_period_input']['units'] = list() result['base_period_input']['timestamps'] = list() result['base_period_input']['values'] = list() result['base_period_input']['subtotals'] = list() if energy_category_set_input is not None and len( energy_category_set_input) > 0: for energy_category_id in energy_category_set_input: result['base_period_input']['names'].append( energy_category_dict[energy_category_id]['name']) result['base_period_input']['units'].append( energy_category_dict[energy_category_id] ['unit_of_measure']) result['base_period_input']['timestamps'].append( base_input[energy_category_id]['timestamps']) result['base_period_input']['values'].append( base_input[energy_category_id]['values']) result['base_period_input']['subtotals'].append( base_input[energy_category_id]['subtotal']) result['base_period_output'] = dict() result['base_period_output']['names'] = list() result['base_period_output']['units'] = list() result['base_period_output']['timestamps'] = list() result['base_period_output']['values'] = list() result['base_period_output']['subtotals'] = list() if energy_category_set_output is not None and len( energy_category_set_output) > 0: for energy_category_id in energy_category_set_output: result['base_period_output']['names'].append( energy_category_dict[energy_category_id]['name']) result['base_period_output']['units'].append( energy_category_dict[energy_category_id] ['unit_of_measure']) result['base_period_output']['timestamps'].append( base_output[energy_category_id]['timestamps']) result['base_period_output']['values'].append( base_output[energy_category_id]['values']) result['base_period_output']['subtotals'].append( base_output[energy_category_id]['subtotal']) result['base_period_efficiency'] = dict() result['base_period_efficiency']['names'] = list() result['base_period_efficiency']['units'] = list() result['base_period_efficiency']['timestamps'] = list() result['base_period_efficiency']['values'] = list() result['base_period_efficiency']['cumulations'] = list() if energy_category_set_output is not None and len( energy_category_set_output) > 0: for energy_category_id_output in energy_category_set_output: for energy_category_id_input in energy_category_set_input: result['base_period_efficiency']['names'].append( energy_category_dict[energy_category_id_output]['name'] + '/' + energy_category_dict[energy_category_id_input]['name']) result['base_period_efficiency']['units'].append( energy_category_dict[energy_category_id_output] ['unit_of_measure'] + '/' + energy_category_dict[energy_category_id_input] ['unit_of_measure']) result['base_period_efficiency']['timestamps'].append( base_output[energy_category_id_output]['timestamps']) efficiency_values = list() for i in range( len(base_output[energy_category_id_output] ['timestamps'])): efficiency_values.append(( base_output[energy_category_id_output]['values'][i] / base_input[energy_category_id_input]['values'][i] ) if base_input[energy_category_id_input]['values'][i] > Decimal(0.0) else None) result['base_period_efficiency']['values'].append( efficiency_values) base_cumulation = (base_output[energy_category_id_output]['subtotal'] / base_input[energy_category_id_input]['subtotal']) if \ base_input[energy_category_id_input]['subtotal'] > Decimal(0.0) else None result['base_period_efficiency']['cumulations'].append( base_cumulation) result['reporting_period_input'] = dict() result['reporting_period_input']['names'] = list() result['reporting_period_input']['energy_category_ids'] = list() result['reporting_period_input']['units'] = list() result['reporting_period_input']['timestamps'] = list() result['reporting_period_input']['values'] = list() result['reporting_period_input']['subtotals'] = list() result['reporting_period_input']['increment_rates'] = list() if energy_category_set_input is not None and len( energy_category_set_input) > 0: for energy_category_id in energy_category_set_input: result['reporting_period_input']['names'].append( energy_category_dict[energy_category_id]['name']) result['reporting_period_input']['energy_category_ids'].append( energy_category_id) result['reporting_period_input']['units'].append( energy_category_dict[energy_category_id] ['unit_of_measure']) result['reporting_period_input']['timestamps'].append( reporting_input[energy_category_id]['timestamps']) result['reporting_period_input']['values'].append( reporting_input[energy_category_id]['values']) result['reporting_period_input']['subtotals'].append( reporting_input[energy_category_id]['subtotal']) result['reporting_period_input']['increment_rates'].append( (reporting_input[energy_category_id]['subtotal'] - base_input[energy_category_id]['subtotal']) / base_input[energy_category_id]['subtotal'] if base_input[energy_category_id]['subtotal'] > 0.0 else None) result['reporting_period_output'] = dict() result['reporting_period_output']['names'] = list() result['reporting_period_output']['energy_category_ids'] = list() result['reporting_period_output']['units'] = list() result['reporting_period_output']['timestamps'] = list() result['reporting_period_output']['values'] = list() result['reporting_period_output']['subtotals'] = list() result['reporting_period_output']['increment_rates'] = list() if energy_category_set_output is not None and len( energy_category_set_output) > 0: for energy_category_id in energy_category_set_output: result['reporting_period_output']['names'].append( energy_category_dict[energy_category_id]['name']) result['reporting_period_output'][ 'energy_category_ids'].append(energy_category_id) result['reporting_period_output']['units'].append( energy_category_dict[energy_category_id] ['unit_of_measure']) result['reporting_period_output']['timestamps'].append( reporting_output[energy_category_id]['timestamps']) result['reporting_period_output']['values'].append( reporting_output[energy_category_id]['values']) result['reporting_period_output']['subtotals'].append( reporting_output[energy_category_id]['subtotal']) result['reporting_period_output']['increment_rates'].append( (reporting_output[energy_category_id]['subtotal'] - base_output[energy_category_id]['subtotal']) / base_output[energy_category_id]['subtotal'] if base_output[energy_category_id]['subtotal'] > 0.0 else None ) result['reporting_period_efficiency'] = dict() result['reporting_period_efficiency']['names'] = list() result['reporting_period_efficiency']['units'] = list() result['reporting_period_efficiency']['timestamps'] = list() result['reporting_period_efficiency']['values'] = list() result['reporting_period_efficiency']['cumulations'] = list() result['reporting_period_efficiency']['increment_rates'] = list() if energy_category_set_output is not None and len( energy_category_set_output) > 0: for energy_category_id_output in energy_category_set_output: for energy_category_id_input in energy_category_set_input: result['reporting_period_efficiency']['names'].append( energy_category_dict[energy_category_id_output]['name'] + '/' + energy_category_dict[energy_category_id_input]['name']) result['reporting_period_efficiency']['units'].append( energy_category_dict[energy_category_id_output] ['unit_of_measure'] + '/' + energy_category_dict[energy_category_id_input] ['unit_of_measure']) result['reporting_period_efficiency']['timestamps'].append( reporting_output[energy_category_id_output] ['timestamps']) efficiency_values = list() for i in range( len(reporting_output[energy_category_id_output] ['timestamps'])): efficiency_values.append( (reporting_output[energy_category_id_output] ['values'][i] / reporting_input[energy_category_id_input] ['values'][i] ) if reporting_input[energy_category_id_input] ['values'][i] > Decimal(0.0) else None) result['reporting_period_efficiency']['values'].append( efficiency_values) base_cumulation = (base_output[energy_category_id_output]['subtotal'] / base_input[energy_category_id_input]['subtotal']) if \ base_input[energy_category_id_input]['subtotal'] > Decimal(0.0) else None reporting_cumulation = (reporting_output[energy_category_id_output]['subtotal'] / reporting_input[energy_category_id_input]['subtotal']) if \ reporting_input[energy_category_id_input]['subtotal'] > Decimal(0.0) else None result['reporting_period_efficiency'][ 'cumulations'].append(reporting_cumulation) result['reporting_period_efficiency'][ 'increment_rates'].append( ((reporting_cumulation - base_cumulation) / base_cumulation if (base_cumulation > Decimal(0.0)) else None)) result['parameters'] = { "names": parameters_data['names'], "timestamps": parameters_data['timestamps'], "values": parameters_data['values'] } resp.body = json.dumps(result)
def on_delete(req, resp, id_): if not id_.isdigit() or int(id_) <= 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_OFFLINE_METER_ID') cnx = mysql.connector.connect(**config.myems_system_db) cursor = cnx.cursor() cursor.execute( " SELECT uuid " " FROM tbl_offline_meters " " WHERE id = %s ", (id_, )) row = cursor.fetchone() if row is None: cursor.close() cnx.disconnect() raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.OFFLINE_METER_NOT_FOUND') else: offline_meter_uuid = row[0] # check if this offline meter is being used by virtual meters cursor.execute( " SELECT vm.name " " FROM tbl_variables va, tbl_expressions ex, tbl_virtual_meters vm " " WHERE va.meter_id = %s AND va.meter_type = 'offline_meter' AND va.expression_id = ex.id " " AND ex.virtual_meter_id = vm.id ", (id_, )) row_virtual_meter = cursor.fetchone() if row_virtual_meter is not None: cursor.close() cnx.disconnect() raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description= 'API.THIS_OFFLINE_METER_IS_BEING_USED_BY_A_VIRTUAL_METER') # check relationship with spaces cursor.execute( " SELECT id " " FROM tbl_spaces_offline_meters " " WHERE offline_meter_id = %s ", (id_, )) rows_companies = cursor.fetchall() if rows_companies is not None and len(rows_companies) > 0: cursor.close() cnx.disconnect() raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.THERE_IS_RELATION_WITH_SPACES') # check relation with combined equipments cursor.execute( " SELECT combined_equipment_id " " FROM tbl_combined_equipments_offline_meters " " WHERE offline_meter_id = %s ", (id_, )) rows_combined_equipments = cursor.fetchall() if rows_combined_equipments is not None and len( rows_combined_equipments) > 0: cursor.close() cnx.disconnect() raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.THERE_IS_RELATION_WITH_COMBINED_EQUIPMENTS') # check relation with combined equipment parameters cursor.execute( " SELECT combined_equipment_id " " FROM tbl_combined_equipments_parameters " " WHERE numerator_meter_uuid = %s OR denominator_meter_uuid = %s", ( offline_meter_uuid, offline_meter_uuid, )) rows_combined_equipments = cursor.fetchall() if rows_combined_equipments is not None and len( rows_combined_equipments) > 0: cursor.close() cnx.disconnect() raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description= 'API.THERE_IS_RELATION_WITH_COMBINED_EQUIPMENT_PARAMETERS') # check relations with tenants cursor.execute( " SELECT tenant_id " " FROM tbl_tenants_offline_meters " " WHERE offline_meter_id = %s ", (id_, )) rows_tenants = cursor.fetchall() if rows_tenants is not None and len(rows_tenants) > 0: cursor.close() cnx.disconnect() raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.THERE_IS_RELATION_WITH_TENANTS') # check relations with stores cursor.execute( " SELECT store_id " " FROM tbl_stores_offline_meters " " WHERE offline_meter_id = %s ", (id_, )) rows_stores = cursor.fetchall() if rows_stores is not None and len(rows_stores) > 0: cursor.close() cnx.disconnect() raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.THERE_IS_RELATION_WITH_STORES') # check relations with shopfloors cursor.execute( " SELECT shopfloor_id " " FROM tbl_shopfloors_offline_meters " " WHERE offline_meter_id = %s ", (id_, )) rows_shopfloors = cursor.fetchall() if rows_shopfloors is not None and len(rows_shopfloors) > 0: cursor.close() cnx.disconnect() raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.THERE_IS_RELATION_WITH_SHOPFLOORS') # check relations with equipments cursor.execute( " SELECT equipment_id " " FROM tbl_equipments_offline_meters " " WHERE offline_meter_id = %s ", (id_, )) rows_equipments = cursor.fetchall() if rows_equipments is not None and len(rows_equipments) > 0: cursor.close() cnx.disconnect() raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.THERE_IS_RELATIONSHIP_WITH_EQUIPMENTS') # check relation with equipment parameters cursor.execute( " SELECT equipment_id " " FROM tbl_equipments_parameters " " WHERE numerator_meter_uuid = %s OR denominator_meter_uuid = %s", ( offline_meter_uuid, offline_meter_uuid, )) rows_equipments = cursor.fetchall() if rows_equipments is not None and len(rows_equipments) > 0: cursor.close() cnx.disconnect() raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.THERE_IS_RELATION_WITH_EQUIPMENT_PARAMETERS') # check relation with energy flow diagram links cursor.execute( " SELECT id " " FROM tbl_energy_flow_diagrams_links " " WHERE meter_uuid = %s ", (offline_meter_uuid, )) rows_links = cursor.fetchall() if rows_links is not None and len(rows_links) > 0: cursor.close() cnx.disconnect() raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description= 'API.THERE_IS_RELATION_WITH_ENERGY_FLOW_DIAGRAM_LINKS') cursor.execute(" DELETE FROM tbl_offline_meters WHERE id = %s ", (id_, )) cnx.commit() cursor.close() cnx.disconnect() resp.status = falcon.HTTP_204
def on_post(req, resp): """Handles POST requests""" try: raw_json = req.stream.read().decode('utf-8') except Exception as ex: raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR', description=ex) new_values = json.loads(raw_json) if 'name' not in new_values['data'].keys() or \ not isinstance(new_values['data']['name'], str) or \ len(str.strip(new_values['data']['name'])) == 0: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_OFFLINE_METER_NAME') name = str.strip(new_values['data']['name']) if 'energy_category_id' not in new_values['data'].keys() or \ not isinstance(new_values['data']['energy_category_id'], int) or \ new_values['data']['energy_category_id'] <= 0: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_ENERGY_CATEGORY_ID') energy_category_id = new_values['data']['energy_category_id'] if 'is_counted' not in new_values['data'].keys() or \ not isinstance(new_values['data']['is_counted'], bool): raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_IS_COUNTED_VALUE') is_counted = new_values['data']['is_counted'] if 'hourly_low_limit' not in new_values['data'].keys() or \ not (isinstance(new_values['data']['hourly_low_limit'], float) or isinstance(new_values['data']['hourly_low_limit'], int)): raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_HOURLY_LOW_LIMIT_VALUE') hourly_low_limit = new_values['data']['hourly_low_limit'] if 'hourly_high_limit' not in new_values['data'].keys() or \ not (isinstance(new_values['data']['hourly_high_limit'], float) or isinstance(new_values['data']['hourly_high_limit'], int)): raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_HOURLY_HIGH_LIMIT_VALUE') hourly_high_limit = new_values['data']['hourly_high_limit'] if 'cost_center_id' not in new_values['data'].keys() or \ not isinstance(new_values['data']['cost_center_id'], int) or \ new_values['data']['cost_center_id'] <= 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_COST_CENTER_ID') cost_center_id = new_values['data']['cost_center_id'] if 'energy_item_id' in new_values['data'].keys() and \ new_values['data']['energy_item_id'] is not None: if not isinstance(new_values['data']['energy_item_id'], int) or \ new_values['data']['energy_item_id'] <= 0: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_ENERGY_ITEM_ID') energy_item_id = new_values['data']['energy_item_id'] else: energy_item_id = None if 'description' in new_values['data'].keys() and \ new_values['data']['description'] is not None and \ len(str(new_values['data']['description'])) > 0: description = str.strip(new_values['data']['description']) else: description = None cnx = mysql.connector.connect(**config.myems_system_db) cursor = cnx.cursor() cursor.execute( " SELECT name " " FROM tbl_offline_meters " " WHERE name = %s ", (name, )) if cursor.fetchone() is not None: cursor.close() cnx.disconnect() raise falcon.HTTPError( falcon.HTTP_404, title='API.BAD_REQUEST', description='API.OFFLINE_METER_NAME_IS_ALREADY_IN_USE') cursor.execute( " SELECT name " " FROM tbl_energy_categories " " WHERE id = %s ", (new_values['data']['energy_category_id'], )) if cursor.fetchone() is None: cursor.close() cnx.disconnect() raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.ENERGY_CATEGORY_NOT_FOUND') if energy_item_id is not None: cursor.execute( " SELECT name, energy_category_id " " FROM tbl_energy_items " " WHERE id = %s ", (new_values['data']['energy_item_id'], )) row = cursor.fetchone() if row is None: cursor.close() cnx.disconnect() raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.ENERGY_ITEM_NOT_FOUND') else: if row[1] != energy_category_id: cursor.close() cnx.disconnect() raise falcon.HTTPError( falcon.HTTP_404, title='API.BAD_REQUEST', description= 'API.ENERGY_ITEM_IS_NOT_BELONG_TO_ENERGY_CATEGORY') cursor.execute( " SELECT name " " FROM tbl_cost_centers " " WHERE id = %s ", (new_values['data']['cost_center_id'], )) row = cursor.fetchone() if row is None: cursor.close() cnx.disconnect() raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.COST_CENTER_NOT_FOUND') add_values = (" INSERT INTO tbl_offline_meters " " (name, uuid, energy_category_id, " " is_counted, hourly_low_limit, hourly_high_limit, " " cost_center_id, energy_item_id, description) " " VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s) ") cursor.execute(add_values, (name, str(uuid.uuid4()), energy_category_id, is_counted, hourly_low_limit, hourly_high_limit, cost_center_id, energy_item_id, description)) new_id = cursor.lastrowid cnx.commit() cursor.close() cnx.disconnect() resp.status = falcon.HTTP_201 resp.location = '/offlinemeters/' + str(new_id)
def on_get(req, resp): print(req.params) shopfloor_id = req.params.get('shopfloorid') period_type = req.params.get('periodtype') base_start_datetime_local = req.params.get('baseperiodstartdatetime') base_end_datetime_local = req.params.get('baseperiodenddatetime') reporting_start_datetime_local = req.params.get( 'reportingperiodstartdatetime') reporting_end_datetime_local = req.params.get( 'reportingperiodenddatetime') ################################################################################################################ # Step 1: valid parameters ################################################################################################################ if shopfloor_id is None: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_SHOPFLOOR_ID') else: shopfloor_id = str.strip(shopfloor_id) if not shopfloor_id.isdigit() or int(shopfloor_id) <= 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_SHOPFLOOR_ID') if period_type is None: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') else: period_type = str.strip(period_type) if period_type not in ['hourly', 'daily', 'monthly', 'yearly']: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') timezone_offset = int(config.utc_offset[1:3]) * 60 + int( config.utc_offset[4:6]) if config.utc_offset[0] == '-': timezone_offset = -timezone_offset base_start_datetime_utc = None if base_start_datetime_local is not None and len( str.strip(base_start_datetime_local)) > 0: base_start_datetime_local = str.strip(base_start_datetime_local) try: base_start_datetime_utc = datetime.strptime(base_start_datetime_local, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ timedelta(minutes=timezone_offset) except ValueError: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_BASE_PERIOD_START_DATETIME") base_end_datetime_utc = None if base_end_datetime_local is not None and len( str.strip(base_end_datetime_local)) > 0: base_end_datetime_local = str.strip(base_end_datetime_local) try: base_end_datetime_utc = datetime.strptime(base_end_datetime_local, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ timedelta(minutes=timezone_offset) except ValueError: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_BASE_PERIOD_END_DATETIME") if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ base_start_datetime_utc >= base_end_datetime_utc: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_BASE_PERIOD_END_DATETIME') if reporting_start_datetime_local is None: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_REPORTING_PERIOD_START_DATETIME") else: reporting_start_datetime_local = str.strip( reporting_start_datetime_local) try: reporting_start_datetime_utc = datetime.strptime(reporting_start_datetime_local, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ timedelta(minutes=timezone_offset) except ValueError: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_REPORTING_PERIOD_START_DATETIME") if reporting_end_datetime_local is None: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_REPORTING_PERIOD_END_DATETIME") else: reporting_end_datetime_local = str.strip( reporting_end_datetime_local) try: reporting_end_datetime_utc = datetime.strptime(reporting_end_datetime_local, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ timedelta(minutes=timezone_offset) except ValueError: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_REPORTING_PERIOD_END_DATETIME") if reporting_start_datetime_utc >= reporting_end_datetime_utc: raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_REPORTING_PERIOD_END_DATETIME') ################################################################################################################ # Step 2: query the shopfloor ################################################################################################################ cnx_system = mysql.connector.connect(**config.myems_system_db) cursor_system = cnx_system.cursor() cnx_energy = mysql.connector.connect(**config.myems_energy_db) cursor_energy = cnx_energy.cursor() cnx_historical = mysql.connector.connect(**config.myems_historical_db) cursor_historical = cnx_historical.cursor() cursor_system.execute( " SELECT id, name, area, cost_center_id " " FROM tbl_shopfloors " " WHERE id = %s ", (shopfloor_id, )) row_shopfloor = cursor_system.fetchone() if row_shopfloor is None: if cursor_system: cursor_system.close() if cnx_system: cnx_system.disconnect() if cursor_energy: cursor_energy.close() if cnx_energy: cnx_energy.disconnect() if cnx_historical: cnx_historical.close() if cursor_historical: cursor_historical.disconnect() raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.SHOPFLOOR_NOT_FOUND') shopfloor = dict() shopfloor['id'] = row_shopfloor[0] shopfloor['name'] = row_shopfloor[1] shopfloor['area'] = row_shopfloor[2] shopfloor['cost_center_id'] = row_shopfloor[3] ################################################################################################################ # Step 3: query energy categories ################################################################################################################ energy_category_set = set() # query energy categories in base period cursor_energy.execute( " SELECT DISTINCT(energy_category_id) " " FROM tbl_shopfloor_input_category_hourly " " WHERE shopfloor_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s ", (shopfloor['id'], base_start_datetime_utc, base_end_datetime_utc)) rows_energy_categories = cursor_energy.fetchall() if rows_energy_categories is not None or len( rows_energy_categories) > 0: for row_energy_category in rows_energy_categories: energy_category_set.add(row_energy_category[0]) # query energy categories in reporting period cursor_energy.execute( " SELECT DISTINCT(energy_category_id) " " FROM tbl_shopfloor_input_category_hourly " " WHERE shopfloor_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s ", (shopfloor['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) rows_energy_categories = cursor_energy.fetchall() if rows_energy_categories is not None or len( rows_energy_categories) > 0: for row_energy_category in rows_energy_categories: energy_category_set.add(row_energy_category[0]) # query all energy categories in base period and reporting period cursor_system.execute( " SELECT id, name, unit_of_measure, kgce, kgco2e " " FROM tbl_energy_categories " " ORDER BY id ", ) rows_energy_categories = cursor_system.fetchall() if rows_energy_categories is None or len(rows_energy_categories) == 0: if cursor_system: cursor_system.close() if cnx_system: cnx_system.disconnect() if cursor_energy: cursor_energy.close() if cnx_energy: cnx_energy.disconnect() if cnx_historical: cnx_historical.close() if cursor_historical: cursor_historical.disconnect() raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.ENERGY_CATEGORY_NOT_FOUND') energy_category_dict = dict() for row_energy_category in rows_energy_categories: if row_energy_category[0] in energy_category_set: energy_category_dict[row_energy_category[0]] = { "name": row_energy_category[1], "unit_of_measure": row_energy_category[2], "kgce": row_energy_category[3], "kgco2e": row_energy_category[4] } ################################################################################################################ # Step 4: query associated sensors ################################################################################################################ point_list = list() cursor_system.execute( " SELECT p.id, p.name, p.units, p.object_type " " FROM tbl_shopfloors st, tbl_sensors se, tbl_shopfloors_sensors ss, " " tbl_points p, tbl_sensors_points sp " " WHERE st.id = %s AND st.id = ss.shopfloor_id AND ss.sensor_id = se.id " " AND se.id = sp.sensor_id AND sp.point_id = p.id " " ORDER BY p.id ", (shopfloor['id'], )) rows_points = cursor_system.fetchall() if rows_points is not None and len(rows_points) > 0: for row in rows_points: point_list.append({ "id": row[0], "name": row[1], "units": row[2], "object_type": row[3] }) ################################################################################################################ # Step 5: query associated points ################################################################################################################ cursor_system.execute( " SELECT p.id, p.name, p.units, p.object_type " " FROM tbl_shopfloors s, tbl_shopfloors_points sp, tbl_points p " " WHERE s.id = %s AND s.id = sp.shopfloor_id AND sp.point_id = p.id " " ORDER BY p.id ", (shopfloor['id'], )) rows_points = cursor_system.fetchall() if rows_points is not None and len(rows_points) > 0: for row in rows_points: point_list.append({ "id": row[0], "name": row[1], "units": row[2], "object_type": row[3] }) ################################################################################################################ # Step 6: query base period energy input ################################################################################################################ base = dict() if energy_category_set is not None and len(energy_category_set) > 0: for energy_category_id in energy_category_set: base[energy_category_id] = dict() base[energy_category_id]['timestamps'] = list() base[energy_category_id]['values'] = list() base[energy_category_id]['subtotal'] = Decimal(0.0) base[energy_category_id]['mean'] = None base[energy_category_id]['median'] = None base[energy_category_id]['minimum'] = None base[energy_category_id]['maximum'] = None base[energy_category_id]['stdev'] = None base[energy_category_id]['variance'] = None cursor_energy.execute( " SELECT start_datetime_utc, actual_value " " FROM tbl_shopfloor_input_category_hourly " " WHERE shopfloor_id = %s " " AND energy_category_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s " " ORDER BY start_datetime_utc ", (shopfloor['id'], energy_category_id, base_start_datetime_utc, base_end_datetime_utc)) rows_shopfloor_hourly = cursor_energy.fetchall() rows_shopfloor_periodically, \ base[energy_category_id]['mean'], \ base[energy_category_id]['median'], \ base[energy_category_id]['minimum'], \ base[energy_category_id]['maximum'], \ base[energy_category_id]['stdev'], \ base[energy_category_id]['variance'] = \ utilities.statistics_hourly_data_by_period(rows_shopfloor_hourly, base_start_datetime_utc, base_end_datetime_utc, period_type) for row_shopfloor_periodically in rows_shopfloor_periodically: current_datetime_local = row_shopfloor_periodically[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) if period_type == 'hourly': current_datetime = current_datetime_local.strftime( '%Y-%m-%dT%H:%M:%S') elif period_type == 'daily': current_datetime = current_datetime_local.strftime( '%Y-%m-%d') elif period_type == 'monthly': current_datetime = current_datetime_local.strftime( '%Y-%m') elif period_type == 'yearly': current_datetime = current_datetime_local.strftime( '%Y') actual_value = Decimal(0.0) if row_shopfloor_periodically[1] is None \ else row_shopfloor_periodically[1] base[energy_category_id]['timestamps'].append( current_datetime) base[energy_category_id]['values'].append(actual_value) base[energy_category_id]['subtotal'] += actual_value ################################################################################################################ # Step 7: query reporting period energy input ################################################################################################################ reporting = dict() if energy_category_set is not None and len(energy_category_set) > 0: for energy_category_id in energy_category_set: reporting[energy_category_id] = dict() reporting[energy_category_id]['timestamps'] = list() reporting[energy_category_id]['values'] = list() reporting[energy_category_id]['subtotal'] = Decimal(0.0) reporting[energy_category_id]['mean'] = None reporting[energy_category_id]['median'] = None reporting[energy_category_id]['minimum'] = None reporting[energy_category_id]['maximum'] = None reporting[energy_category_id]['stdev'] = None reporting[energy_category_id]['variance'] = None cursor_energy.execute( " SELECT start_datetime_utc, actual_value " " FROM tbl_shopfloor_input_category_hourly " " WHERE shopfloor_id = %s " " AND energy_category_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s " " ORDER BY start_datetime_utc ", (shopfloor['id'], energy_category_id, reporting_start_datetime_utc, reporting_end_datetime_utc)) rows_shopfloor_hourly = cursor_energy.fetchall() rows_shopfloor_periodically, \ reporting[energy_category_id]['mean'], \ reporting[energy_category_id]['median'], \ reporting[energy_category_id]['minimum'], \ reporting[energy_category_id]['maximum'], \ reporting[energy_category_id]['stdev'], \ reporting[energy_category_id]['variance'] = \ utilities.statistics_hourly_data_by_period(rows_shopfloor_hourly, reporting_start_datetime_utc, reporting_end_datetime_utc, period_type) for row_shopfloor_periodically in rows_shopfloor_periodically: current_datetime_local = row_shopfloor_periodically[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) if period_type == 'hourly': current_datetime = current_datetime_local.strftime( '%Y-%m-%dT%H:%M:%S') elif period_type == 'daily': current_datetime = current_datetime_local.strftime( '%Y-%m-%d') elif period_type == 'monthly': current_datetime = current_datetime_local.strftime( '%Y-%m') elif period_type == 'yearly': current_datetime = current_datetime_local.strftime( '%Y') actual_value = Decimal(0.0) if row_shopfloor_periodically[1] is None \ else row_shopfloor_periodically[1] reporting[energy_category_id]['timestamps'].append( current_datetime) reporting[energy_category_id]['values'].append( actual_value) reporting[energy_category_id]['subtotal'] += actual_value ################################################################################################################ # Step 8: query tariff data ################################################################################################################ parameters_data = dict() parameters_data['names'] = list() parameters_data['timestamps'] = list() parameters_data['values'] = list() if energy_category_set is not None and len(energy_category_set) > 0: for energy_category_id in energy_category_set: energy_category_tariff_dict = utilities.get_energy_category_tariffs( shopfloor['cost_center_id'], energy_category_id, reporting_start_datetime_utc, reporting_end_datetime_utc) tariff_timestamp_list = list() tariff_value_list = list() for k, v in energy_category_tariff_dict.items(): # convert k from utc to local k = k + timedelta(minutes=timezone_offset) tariff_timestamp_list.append(k.isoformat()[0:19][0:19]) tariff_value_list.append(v) parameters_data['names'].append( 'TARIFF-' + energy_category_dict[energy_category_id]['name']) parameters_data['timestamps'].append(tariff_timestamp_list) parameters_data['values'].append(tariff_value_list) ################################################################################################################ # Step 9: query associated sensors and points data ################################################################################################################ for point in point_list: point_values = [] point_timestamps = [] if point['object_type'] == 'ANALOG_VALUE': query = (" SELECT utc_date_time, actual_value " " FROM tbl_analog_value " " WHERE point_id = %s " " AND utc_date_time BETWEEN %s AND %s " " ORDER BY utc_date_time ") cursor_historical.execute( query, (point['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) rows = cursor_historical.fetchall() if rows is not None and len(rows) > 0: for row in rows: current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) current_datetime = current_datetime_local.strftime( '%Y-%m-%dT%H:%M:%S') point_timestamps.append(current_datetime) point_values.append(row[1]) elif point['object_type'] == 'ENERGY_VALUE': query = (" SELECT utc_date_time, actual_value " " FROM tbl_energy_value " " WHERE point_id = %s " " AND utc_date_time BETWEEN %s AND %s " " ORDER BY utc_date_time ") cursor_historical.execute( query, (point['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) rows = cursor_historical.fetchall() if rows is not None and len(rows) > 0: for row in rows: current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) current_datetime = current_datetime_local.strftime( '%Y-%m-%dT%H:%M:%S') point_timestamps.append(current_datetime) point_values.append(row[1]) elif point['object_type'] == 'DIGITAL_VALUE': query = (" SELECT utc_date_time, actual_value " " FROM tbl_digital_value " " WHERE point_id = %s " " AND utc_date_time BETWEEN %s AND %s ") cursor_historical.execute( query, (point['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) rows = cursor_historical.fetchall() if rows is not None and len(rows) > 0: for row in rows: current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) current_datetime = current_datetime_local.strftime( '%Y-%m-%dT%H:%M:%S') point_timestamps.append(current_datetime) point_values.append(row[1]) parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') parameters_data['timestamps'].append(point_timestamps) parameters_data['values'].append(point_values) ################################################################################################################ # Step 10: construct the report ################################################################################################################ if cursor_system: cursor_system.close() if cnx_system: cnx_system.disconnect() if cursor_energy: cursor_energy.close() if cnx_energy: cnx_energy.disconnect() result = dict() result['shopfloor'] = dict() result['shopfloor']['name'] = shopfloor['name'] result['shopfloor']['area'] = shopfloor['area'] result['base_period'] = dict() result['base_period']['names'] = list() result['base_period']['units'] = list() result['base_period']['timestamps'] = list() result['base_period']['values'] = list() result['base_period']['subtotals'] = list() result['base_period']['means'] = list() result['base_period']['medians'] = list() result['base_period']['minimums'] = list() result['base_period']['maximums'] = list() result['base_period']['stdevs'] = list() result['base_period']['variances'] = list() if energy_category_set is not None and len(energy_category_set) > 0: for energy_category_id in energy_category_set: result['base_period']['names'].append( energy_category_dict[energy_category_id]['name']) result['base_period']['units'].append( energy_category_dict[energy_category_id] ['unit_of_measure']) result['base_period']['timestamps'].append( base[energy_category_id]['timestamps']) result['base_period']['values'].append( base[energy_category_id]['values']) result['base_period']['subtotals'].append( base[energy_category_id]['subtotal']) result['base_period']['means'].append( base[energy_category_id]['mean']) result['base_period']['medians'].append( base[energy_category_id]['median']) result['base_period']['minimums'].append( base[energy_category_id]['minimum']) result['base_period']['maximums'].append( base[energy_category_id]['maximum']) result['base_period']['stdevs'].append( base[energy_category_id]['stdev']) result['base_period']['variances'].append( base[energy_category_id]['variance']) result['reporting_period'] = dict() result['reporting_period']['names'] = list() result['reporting_period']['energy_category_ids'] = list() result['reporting_period']['units'] = list() result['reporting_period']['timestamps'] = list() result['reporting_period']['values'] = list() result['reporting_period']['subtotals'] = list() result['reporting_period']['means'] = list() result['reporting_period']['means_per_unit_area'] = list() result['reporting_period']['means_increment_rate'] = list() result['reporting_period']['medians'] = list() result['reporting_period']['medians_per_unit_area'] = list() result['reporting_period']['medians_increment_rate'] = list() result['reporting_period']['minimums'] = list() result['reporting_period']['minimums_per_unit_area'] = list() result['reporting_period']['minimums_increment_rate'] = list() result['reporting_period']['maximums'] = list() result['reporting_period']['maximums_per_unit_area'] = list() result['reporting_period']['maximums_increment_rate'] = list() result['reporting_period']['stdevs'] = list() result['reporting_period']['stdevs_per_unit_area'] = list() result['reporting_period']['stdevs_increment_rate'] = list() result['reporting_period']['variances'] = list() result['reporting_period']['variances_per_unit_area'] = list() result['reporting_period']['variances_increment_rate'] = list() if energy_category_set is not None and len(energy_category_set) > 0: for energy_category_id in energy_category_set: result['reporting_period']['names'].append( energy_category_dict[energy_category_id]['name']) result['reporting_period']['energy_category_ids'].append( energy_category_id) result['reporting_period']['units'].append( energy_category_dict[energy_category_id] ['unit_of_measure']) result['reporting_period']['timestamps'].append( reporting[energy_category_id]['timestamps']) result['reporting_period']['values'].append( reporting[energy_category_id]['values']) result['reporting_period']['subtotals'].append( reporting[energy_category_id]['subtotal']) result['reporting_period']['means'].append( reporting[energy_category_id]['mean']) result['reporting_period']['means_per_unit_area'].append( reporting[energy_category_id]['mean'] / shopfloor['area'] if reporting[energy_category_id]['mean'] is not None and shopfloor['area'] is not None and shopfloor['area'] > Decimal(0.0) else None) result['reporting_period']['means_increment_rate'].append( (reporting[energy_category_id]['mean'] - base[energy_category_id]['mean']) / base[energy_category_id]['mean'] if ( base[energy_category_id]['mean'] is not None and base[energy_category_id]['mean'] > Decimal(0.0) ) else None) result['reporting_period']['medians'].append( reporting[energy_category_id]['median']) result['reporting_period']['medians_per_unit_area'].append( reporting[energy_category_id]['median'] / shopfloor['area'] if reporting[energy_category_id] ['median'] is not None and shopfloor['area'] is not None and shopfloor['area'] > Decimal(0.0) else None) result['reporting_period']['medians_increment_rate'].append( (reporting[energy_category_id]['median'] - base[energy_category_id]['median']) / base[energy_category_id]['median'] if ( base[energy_category_id]['median'] is not None and base[energy_category_id]['median'] > Decimal(0.0) ) else None) result['reporting_period']['minimums'].append( reporting[energy_category_id]['minimum']) result['reporting_period']['minimums_per_unit_area'].append( reporting[energy_category_id]['minimum'] / shopfloor['area'] if reporting[energy_category_id] ['minimum'] is not None and shopfloor['area'] is not None and shopfloor['area'] > Decimal(0.0) else None) result['reporting_period']['minimums_increment_rate'].append( (reporting[energy_category_id]['minimum'] - base[energy_category_id]['minimum']) / base[energy_category_id]['minimum'] if ( base[energy_category_id]['minimum'] is not None and base[energy_category_id]['minimum'] > Decimal(0.0) ) else None) result['reporting_period']['maximums'].append( reporting[energy_category_id]['maximum']) result['reporting_period']['maximums_per_unit_area'].append( reporting[energy_category_id]['maximum'] / shopfloor['area'] if reporting[energy_category_id] ['maximum'] is not None and shopfloor['area'] is not None and shopfloor['area'] > Decimal(0.0) else None) result['reporting_period']['maximums_increment_rate'].append( (reporting[energy_category_id]['maximum'] - base[energy_category_id]['maximum']) / base[energy_category_id]['maximum'] if ( base[energy_category_id]['maximum'] is not None and base[energy_category_id]['maximum'] > Decimal(0.0) ) else None) result['reporting_period']['stdevs'].append( reporting[energy_category_id]['stdev']) result['reporting_period']['stdevs_per_unit_area'].append( reporting[energy_category_id]['stdev'] / shopfloor['area'] if reporting[energy_category_id]['stdev'] is not None and shopfloor['area'] is not None and shopfloor['area'] > Decimal(0.0) else None) result['reporting_period']['stdevs_increment_rate'].append( (reporting[energy_category_id]['stdev'] - base[energy_category_id]['stdev']) / base[energy_category_id]['stdev'] if ( base[energy_category_id]['stdev'] is not None and base[energy_category_id]['stdev'] > Decimal(0.0) ) else None) result['reporting_period']['variances'].append( reporting[energy_category_id]['variance']) result['reporting_period']['variances_per_unit_area'].append( reporting[energy_category_id]['variance'] / shopfloor['area'] if reporting[energy_category_id] ['variance'] is not None and shopfloor['area'] is not None and shopfloor['area'] > Decimal(0.0) else None) result['reporting_period']['variances_increment_rate'].append( (reporting[energy_category_id]['variance'] - base[energy_category_id]['variance']) / base[energy_category_id]['variance'] if ( base[energy_category_id]['variance'] is not None and base[energy_category_id]['variance'] > Decimal(0.0) ) else None) result['parameters'] = { "names": parameters_data['names'], "timestamps": parameters_data['timestamps'], "values": parameters_data['values'] } resp.body = json.dumps(result)
def on_get(req, resp, id_): if not id_.isdigit() or int(id_) <= 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_OFFLINE_METER_ID') cnx = mysql.connector.connect(**config.myems_system_db) cursor = cnx.cursor(dictionary=True) query = (" SELECT id, name, uuid " " FROM tbl_energy_categories ") cursor.execute(query) rows_energy_categories = cursor.fetchall() energy_category_dict = dict() if rows_energy_categories is not None and len( rows_energy_categories) > 0: for row in rows_energy_categories: energy_category_dict[row['id']] = { "id": row['id'], "name": row['name'], "uuid": row['uuid'] } query = (" SELECT id, name, uuid " " FROM tbl_energy_items ") cursor.execute(query) rows_energy_items = cursor.fetchall() energy_item_dict = dict() if rows_energy_items is not None and len(rows_energy_items) > 0: for row in rows_energy_items: energy_item_dict[row['id']] = { "id": row['id'], "name": row['name'], "uuid": row['uuid'] } query = (" SELECT id, name, uuid " " FROM tbl_cost_centers ") cursor.execute(query) rows_cost_centers = cursor.fetchall() cost_center_dict = dict() if rows_cost_centers is not None and len(rows_cost_centers) > 0: for row in rows_cost_centers: cost_center_dict[row['id']] = { "id": row['id'], "name": row['name'], "uuid": row['uuid'] } query = (" SELECT id, name, uuid, energy_category_id, " " is_counted, hourly_low_limit, hourly_high_limit, " " energy_item_id, cost_center_id, description " " FROM tbl_offline_meters " " WHERE id = %s ") cursor.execute(query, (id_, )) row = cursor.fetchone() cursor.close() cnx.disconnect() if row is None: raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.OFFLINE_METER_NOT_FOUND') else: energy_category = energy_category_dict.get( row['energy_category_id'], None) energy_item = energy_item_dict.get(row['energy_item_id'], None) cost_center = cost_center_dict.get(row['cost_center_id'], None) meta_result = { "id": row['id'], "name": row['name'], "uuid": row['uuid'], "energy_category": energy_category, "is_counted": True if row['is_counted'] else False, "hourly_low_limit": row['hourly_low_limit'], "hourly_high_limit": row['hourly_high_limit'], "energy_item": energy_item, "cost_center": cost_center, "description": row['description'] } resp.body = json.dumps(meta_result)
def on_put(req, resp, id_): """Handles PUT requests""" access_control(req) try: raw_json = req.stream.read().decode('utf-8') except Exception as ex: raise falcon.HTTPError(falcon.HTTP_400, 'API.ERROR', ex) if not id_.isdigit() or int(id_) <= 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_GATEWAY_ID') new_values = json.loads(raw_json) if 'name' not in new_values['data'].keys() or \ not isinstance(new_values['data']['name'], str) or \ len(str.strip(new_values['data']['name'])) == 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_GATEWAY_NAME') name = str.strip(new_values['data']['name']) cnx = mysql.connector.connect(**config.myems_system_db) cursor = cnx.cursor() cursor.execute(" SELECT name " " FROM tbl_gateways " " WHERE id = %s ", (id_, )) if cursor.fetchone() is None: cursor.close() cnx.close() raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.GATEWAY_NOT_FOUND') cursor.execute( " SELECT name " " FROM tbl_gateways " " WHERE name = %s AND id != %s ", (name, id_)) if cursor.fetchone() is not None: cursor.close() cnx.close() raise falcon.HTTPError( falcon.HTTP_400, title='API.BAD_REQUEST', description='API.GATEWAY_NAME_IS_ALREADY_IN_USE') update_row = (" UPDATE tbl_gateways " " SET name = %s " " WHERE id = %s ") cursor.execute(update_row, ( name, id_, )) cnx.commit() cursor.close() cnx.close() resp.status = falcon.HTTP_200
def on_get(req, resp): print(req.params) tenant_id = req.params.get('tenantid') tenant_uuid = req.params.get('tenantuuid') period_type = req.params.get('periodtype') base_start_datetime_local = req.params.get('baseperiodstartdatetime') base_end_datetime_local = req.params.get('baseperiodenddatetime') reporting_start_datetime_local = req.params.get('reportingperiodstartdatetime') reporting_end_datetime_local = req.params.get('reportingperiodenddatetime') ################################################################################################################ # Step 1: valid parameters ################################################################################################################ if tenant_id is None and tenant_uuid is None: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_TENANT_ID') if tenant_id is not None: tenant_id = str.strip(tenant_id) if not tenant_id.isdigit() or int(tenant_id) <= 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_TENANT_ID') if tenant_uuid is not None: regex = re.compile('^[a-f0-9]{8}-?[a-f0-9]{4}-?4[a-f0-9]{3}-?[89ab][a-f0-9]{3}-?[a-f0-9]{12}\Z', re.I) match = regex.match(str.strip(tenant_uuid)) if not bool(match): raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_TENANT_UUID') if period_type is None: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') else: period_type = str.strip(period_type) if period_type not in ['hourly', 'daily', 'weekly', 'monthly', 'yearly']: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PERIOD_TYPE') timezone_offset = int(config.utc_offset[1:3]) * 60 + int(config.utc_offset[4:6]) if config.utc_offset[0] == '-': timezone_offset = -timezone_offset base_start_datetime_utc = None if base_start_datetime_local is not None and len(str.strip(base_start_datetime_local)) > 0: base_start_datetime_local = str.strip(base_start_datetime_local) try: base_start_datetime_utc = datetime.strptime(base_start_datetime_local, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ timedelta(minutes=timezone_offset) except ValueError: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_BASE_PERIOD_START_DATETIME") base_end_datetime_utc = None if base_end_datetime_local is not None and len(str.strip(base_end_datetime_local)) > 0: base_end_datetime_local = str.strip(base_end_datetime_local) try: base_end_datetime_utc = datetime.strptime(base_end_datetime_local, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ timedelta(minutes=timezone_offset) except ValueError: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_BASE_PERIOD_END_DATETIME") if base_start_datetime_utc is not None and base_end_datetime_utc is not None and \ base_start_datetime_utc >= base_end_datetime_utc: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_BASE_PERIOD_END_DATETIME') if reporting_start_datetime_local is None: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_REPORTING_PERIOD_START_DATETIME") else: reporting_start_datetime_local = str.strip(reporting_start_datetime_local) try: reporting_start_datetime_utc = datetime.strptime(reporting_start_datetime_local, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ timedelta(minutes=timezone_offset) except ValueError: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_REPORTING_PERIOD_START_DATETIME") if reporting_end_datetime_local is None: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_REPORTING_PERIOD_END_DATETIME") else: reporting_end_datetime_local = str.strip(reporting_end_datetime_local) try: reporting_end_datetime_utc = datetime.strptime(reporting_end_datetime_local, '%Y-%m-%dT%H:%M:%S').replace(tzinfo=timezone.utc) - \ timedelta(minutes=timezone_offset) except ValueError: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description="API.INVALID_REPORTING_PERIOD_END_DATETIME") if reporting_start_datetime_utc >= reporting_end_datetime_utc: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_REPORTING_PERIOD_END_DATETIME') ################################################################################################################ # Step 2: query the tenant ################################################################################################################ cnx_system = mysql.connector.connect(**config.myems_system_db) cursor_system = cnx_system.cursor() cnx_energy = mysql.connector.connect(**config.myems_energy_db) cursor_energy = cnx_energy.cursor() cnx_historical = mysql.connector.connect(**config.myems_historical_db) cursor_historical = cnx_historical.cursor() if tenant_id is not None: cursor_system.execute(" SELECT id, name, area, cost_center_id " " FROM tbl_tenants " " WHERE id = %s ", (tenant_id,)) row_tenant = cursor_system.fetchone() elif tenant_uuid is not None: cursor_system.execute(" SELECT id, name, area, cost_center_id " " FROM tbl_tenants " " WHERE uuid = %s ", (tenant_uuid,)) row_tenant = cursor_system.fetchone() if row_tenant is None: if cursor_system: cursor_system.close() if cnx_system: cnx_system.close() if cursor_energy: cursor_energy.close() if cnx_energy: cnx_energy.close() if cursor_historical: cursor_historical.close() if cnx_historical: cnx_historical.close() raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.TENANT_NOT_FOUND') tenant = dict() tenant['id'] = row_tenant[0] tenant['name'] = row_tenant[1] tenant['area'] = row_tenant[2] tenant['cost_center_id'] = row_tenant[3] ################################################################################################################ # Step 3: query energy categories ################################################################################################################ energy_category_set = set() # query energy categories in base period cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " " FROM tbl_tenant_input_category_hourly " " WHERE tenant_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s ", (tenant['id'], base_start_datetime_utc, base_end_datetime_utc)) rows_energy_categories = cursor_energy.fetchall() if rows_energy_categories is not None or len(rows_energy_categories) > 0: for row_energy_category in rows_energy_categories: energy_category_set.add(row_energy_category[0]) # query energy categories in reporting period cursor_energy.execute(" SELECT DISTINCT(energy_category_id) " " FROM tbl_tenant_input_category_hourly " " WHERE tenant_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s ", (tenant['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) rows_energy_categories = cursor_energy.fetchall() if rows_energy_categories is not None or len(rows_energy_categories) > 0: for row_energy_category in rows_energy_categories: energy_category_set.add(row_energy_category[0]) # query all energy categories in base period and reporting period cursor_system.execute(" SELECT id, name, unit_of_measure, kgce, kgco2e " " FROM tbl_energy_categories " " ORDER BY id ", ) rows_energy_categories = cursor_system.fetchall() if rows_energy_categories is None or len(rows_energy_categories) == 0: if cursor_system: cursor_system.close() if cnx_system: cnx_system.close() if cursor_energy: cursor_energy.close() if cnx_energy: cnx_energy.close() if cursor_historical: cursor_historical.close() if cnx_historical: cnx_historical.close() raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.ENERGY_CATEGORY_NOT_FOUND') energy_category_dict = dict() for row_energy_category in rows_energy_categories: if row_energy_category[0] in energy_category_set: energy_category_dict[row_energy_category[0]] = {"name": row_energy_category[1], "unit_of_measure": row_energy_category[2], "kgce": row_energy_category[3], "kgco2e": row_energy_category[4]} ################################################################################################################ # Step 4: query associated sensors ################################################################################################################ point_list = list() cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " " FROM tbl_tenants t, tbl_sensors s, tbl_tenants_sensors ts, " " tbl_points p, tbl_sensors_points sp " " WHERE t.id = %s AND t.id = ts.tenant_id AND ts.sensor_id = s.id " " AND s.id = sp.sensor_id AND sp.point_id = p.id " " ORDER BY p.id ", (tenant['id'],)) rows_points = cursor_system.fetchall() if rows_points is not None and len(rows_points) > 0: for row in rows_points: point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) ################################################################################################################ # Step 5: query associated points ################################################################################################################ cursor_system.execute(" SELECT p.id, p.name, p.units, p.object_type " " FROM tbl_tenants t, tbl_tenants_points tp, tbl_points p " " WHERE t.id = %s AND t.id = tp.tenant_id AND tp.point_id = p.id " " ORDER BY p.id ", (tenant['id'],)) rows_points = cursor_system.fetchall() if rows_points is not None and len(rows_points) > 0: for row in rows_points: point_list.append({"id": row[0], "name": row[1], "units": row[2], "object_type": row[3]}) ################################################################################################################ # Step 6: query base period energy input ################################################################################################################ base = dict() if energy_category_set is not None and len(energy_category_set) > 0: for energy_category_id in energy_category_set: kgce = energy_category_dict[energy_category_id]['kgce'] kgco2e = energy_category_dict[energy_category_id]['kgco2e'] base[energy_category_id] = dict() base[energy_category_id]['timestamps'] = list() base[energy_category_id]['values'] = list() base[energy_category_id]['subtotal'] = Decimal(0.0) base[energy_category_id]['subtotal_in_kgce'] = Decimal(0.0) base[energy_category_id]['subtotal_in_kgco2e'] = Decimal(0.0) cursor_energy.execute(" SELECT start_datetime_utc, actual_value " " FROM tbl_tenant_input_category_hourly " " WHERE tenant_id = %s " " AND energy_category_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s " " ORDER BY start_datetime_utc ", (tenant['id'], energy_category_id, base_start_datetime_utc, base_end_datetime_utc)) rows_tenant_hourly = cursor_energy.fetchall() rows_tenant_periodically = utilities.aggregate_hourly_data_by_period(rows_tenant_hourly, base_start_datetime_utc, base_end_datetime_utc, period_type) for row_tenant_periodically in rows_tenant_periodically: current_datetime_local = row_tenant_periodically[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) if period_type == 'hourly': current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') elif period_type == 'daily': current_datetime = current_datetime_local.strftime('%Y-%m-%d') elif period_type == 'weekly': current_datetime = current_datetime_local.strftime('%Y-%m-%d') elif period_type == 'monthly': current_datetime = current_datetime_local.strftime('%Y-%m') elif period_type == 'yearly': current_datetime = current_datetime_local.strftime('%Y') actual_value = Decimal(0.0) if row_tenant_periodically[1] is None else row_tenant_periodically[1] base[energy_category_id]['timestamps'].append(current_datetime) base[energy_category_id]['values'].append(actual_value) base[energy_category_id]['subtotal'] += actual_value base[energy_category_id]['subtotal_in_kgce'] += actual_value * kgce base[energy_category_id]['subtotal_in_kgco2e'] += actual_value * kgco2e ################################################################################################################ # Step 8: query reporting period energy input ################################################################################################################ reporting = dict() if energy_category_set is not None and len(energy_category_set) > 0: for energy_category_id in energy_category_set: kgce = energy_category_dict[energy_category_id]['kgce'] kgco2e = energy_category_dict[energy_category_id]['kgco2e'] reporting[energy_category_id] = dict() reporting[energy_category_id]['timestamps'] = list() reporting[energy_category_id]['values'] = list() reporting[energy_category_id]['subtotal'] = Decimal(0.0) reporting[energy_category_id]['subtotal_in_kgce'] = Decimal(0.0) reporting[energy_category_id]['subtotal_in_kgco2e'] = Decimal(0.0) reporting[energy_category_id]['toppeak'] = Decimal(0.0) reporting[energy_category_id]['onpeak'] = Decimal(0.0) reporting[energy_category_id]['midpeak'] = Decimal(0.0) reporting[energy_category_id]['offpeak'] = Decimal(0.0) cursor_energy.execute(" SELECT start_datetime_utc, actual_value " " FROM tbl_tenant_input_category_hourly " " WHERE tenant_id = %s " " AND energy_category_id = %s " " AND start_datetime_utc >= %s " " AND start_datetime_utc < %s " " ORDER BY start_datetime_utc ", (tenant['id'], energy_category_id, reporting_start_datetime_utc, reporting_end_datetime_utc)) rows_tenant_hourly = cursor_energy.fetchall() rows_tenant_periodically = utilities.aggregate_hourly_data_by_period(rows_tenant_hourly, reporting_start_datetime_utc, reporting_end_datetime_utc, period_type) for row_tenant_periodically in rows_tenant_periodically: current_datetime_local = row_tenant_periodically[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) if period_type == 'hourly': current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') elif period_type == 'daily': current_datetime = current_datetime_local.strftime('%Y-%m-%d') elif period_type == 'weekly': current_datetime = current_datetime_local.strftime('%Y-%m-%d') elif period_type == 'monthly': current_datetime = current_datetime_local.strftime('%Y-%m') elif period_type == 'yearly': current_datetime = current_datetime_local.strftime('%Y') actual_value = Decimal(0.0) if row_tenant_periodically[1] is None else row_tenant_periodically[1] reporting[energy_category_id]['timestamps'].append(current_datetime) reporting[energy_category_id]['values'].append(actual_value) reporting[energy_category_id]['subtotal'] += actual_value reporting[energy_category_id]['subtotal_in_kgce'] += actual_value * kgce reporting[energy_category_id]['subtotal_in_kgco2e'] += actual_value * kgco2e energy_category_tariff_dict = utilities.get_energy_category_peak_types(tenant['cost_center_id'], energy_category_id, reporting_start_datetime_utc, reporting_end_datetime_utc) for row in rows_tenant_hourly: peak_type = energy_category_tariff_dict.get(row[0], None) if peak_type == 'toppeak': reporting[energy_category_id]['toppeak'] += row[1] elif peak_type == 'onpeak': reporting[energy_category_id]['onpeak'] += row[1] elif peak_type == 'midpeak': reporting[energy_category_id]['midpeak'] += row[1] elif peak_type == 'offpeak': reporting[energy_category_id]['offpeak'] += row[1] ################################################################################################################ # Step 9: query tariff data ################################################################################################################ parameters_data = dict() parameters_data['names'] = list() parameters_data['timestamps'] = list() parameters_data['values'] = list() if energy_category_set is not None and len(energy_category_set) > 0: for energy_category_id in energy_category_set: energy_category_tariff_dict = utilities.get_energy_category_tariffs(tenant['cost_center_id'], energy_category_id, reporting_start_datetime_utc, reporting_end_datetime_utc) tariff_timestamp_list = list() tariff_value_list = list() for k, v in energy_category_tariff_dict.items(): # convert k from utc to local k = k + timedelta(minutes=timezone_offset) tariff_timestamp_list.append(k.isoformat()[0:19][0:19]) tariff_value_list.append(v) parameters_data['names'].append('TARIFF-' + energy_category_dict[energy_category_id]['name']) parameters_data['timestamps'].append(tariff_timestamp_list) parameters_data['values'].append(tariff_value_list) ################################################################################################################ # Step 10: query associated sensors and points data ################################################################################################################ for point in point_list: point_values = [] point_timestamps = [] if point['object_type'] == 'ANALOG_VALUE': query = (" SELECT utc_date_time, actual_value " " FROM tbl_analog_value " " WHERE point_id = %s " " AND utc_date_time BETWEEN %s AND %s " " ORDER BY utc_date_time ") cursor_historical.execute(query, (point['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) rows = cursor_historical.fetchall() if rows is not None and len(rows) > 0: for row in rows: current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') point_timestamps.append(current_datetime) point_values.append(row[1]) elif point['object_type'] == 'ENERGY_VALUE': query = (" SELECT utc_date_time, actual_value " " FROM tbl_energy_value " " WHERE point_id = %s " " AND utc_date_time BETWEEN %s AND %s " " ORDER BY utc_date_time ") cursor_historical.execute(query, (point['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) rows = cursor_historical.fetchall() if rows is not None and len(rows) > 0: for row in rows: current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') point_timestamps.append(current_datetime) point_values.append(row[1]) elif point['object_type'] == 'DIGITAL_VALUE': query = (" SELECT utc_date_time, actual_value " " FROM tbl_digital_value " " WHERE point_id = %s " " AND utc_date_time BETWEEN %s AND %s " " ORDER BY utc_date_time ") cursor_historical.execute(query, (point['id'], reporting_start_datetime_utc, reporting_end_datetime_utc)) rows = cursor_historical.fetchall() if rows is not None and len(rows) > 0: for row in rows: current_datetime_local = row[0].replace(tzinfo=timezone.utc) + \ timedelta(minutes=timezone_offset) current_datetime = current_datetime_local.strftime('%Y-%m-%dT%H:%M:%S') point_timestamps.append(current_datetime) point_values.append(row[1]) parameters_data['names'].append(point['name'] + ' (' + point['units'] + ')') parameters_data['timestamps'].append(point_timestamps) parameters_data['values'].append(point_values) ################################################################################################################ # Step 12: construct the report ################################################################################################################ if cursor_system: cursor_system.close() if cnx_system: cnx_system.close() if cursor_energy: cursor_energy.close() if cnx_energy: cnx_energy.close() if cursor_historical: cursor_historical.close() if cnx_historical: cnx_historical.close() result = dict() result['tenant'] = dict() result['tenant']['name'] = tenant['name'] result['tenant']['area'] = tenant['area'] result['base_period'] = dict() result['base_period']['names'] = list() result['base_period']['units'] = list() result['base_period']['timestamps'] = list() result['base_period']['values'] = list() result['base_period']['subtotals'] = list() result['base_period']['subtotals_in_kgce'] = list() result['base_period']['subtotals_in_kgco2e'] = list() result['base_period']['total_in_kgce'] = Decimal(0.0) result['base_period']['total_in_kgco2e'] = Decimal(0.0) if energy_category_set is not None and len(energy_category_set) > 0: for energy_category_id in energy_category_set: result['base_period']['names'].append(energy_category_dict[energy_category_id]['name']) result['base_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) result['base_period']['timestamps'].append(base[energy_category_id]['timestamps']) result['base_period']['values'].append(base[energy_category_id]['values']) result['base_period']['subtotals'].append(base[energy_category_id]['subtotal']) result['base_period']['subtotals_in_kgce'].append(base[energy_category_id]['subtotal_in_kgce']) result['base_period']['subtotals_in_kgco2e'].append(base[energy_category_id]['subtotal_in_kgco2e']) result['base_period']['total_in_kgce'] += base[energy_category_id]['subtotal_in_kgce'] result['base_period']['total_in_kgco2e'] += base[energy_category_id]['subtotal_in_kgco2e'] result['reporting_period'] = dict() result['reporting_period']['names'] = list() result['reporting_period']['energy_category_ids'] = list() result['reporting_period']['units'] = list() result['reporting_period']['timestamps'] = list() result['reporting_period']['values'] = list() result['reporting_period']['subtotals'] = list() result['reporting_period']['subtotals_in_kgce'] = list() result['reporting_period']['subtotals_in_kgco2e'] = list() result['reporting_period']['subtotals_per_unit_area'] = list() result['reporting_period']['toppeaks'] = list() result['reporting_period']['onpeaks'] = list() result['reporting_period']['midpeaks'] = list() result['reporting_period']['offpeaks'] = list() result['reporting_period']['increment_rates'] = list() result['reporting_period']['total_in_kgce'] = Decimal(0.0) result['reporting_period']['total_in_kgco2e'] = Decimal(0.0) result['reporting_period']['increment_rate_in_kgce'] = Decimal(0.0) result['reporting_period']['increment_rate_in_kgco2e'] = Decimal(0.0) if energy_category_set is not None and len(energy_category_set) > 0: for energy_category_id in energy_category_set: result['reporting_period']['names'].append(energy_category_dict[energy_category_id]['name']) result['reporting_period']['energy_category_ids'].append(energy_category_id) result['reporting_period']['units'].append(energy_category_dict[energy_category_id]['unit_of_measure']) result['reporting_period']['timestamps'].append(reporting[energy_category_id]['timestamps']) result['reporting_period']['values'].append(reporting[energy_category_id]['values']) result['reporting_period']['subtotals'].append(reporting[energy_category_id]['subtotal']) result['reporting_period']['subtotals_in_kgce'].append( reporting[energy_category_id]['subtotal_in_kgce']) result['reporting_period']['subtotals_in_kgco2e'].append( reporting[energy_category_id]['subtotal_in_kgco2e']) result['reporting_period']['subtotals_per_unit_area'].append( reporting[energy_category_id]['subtotal'] / tenant['area'] if tenant['area'] > 0.0 else None) result['reporting_period']['toppeaks'].append(reporting[energy_category_id]['toppeak']) result['reporting_period']['onpeaks'].append(reporting[energy_category_id]['onpeak']) result['reporting_period']['midpeaks'].append(reporting[energy_category_id]['midpeak']) result['reporting_period']['offpeaks'].append(reporting[energy_category_id]['offpeak']) result['reporting_period']['increment_rates'].append( (reporting[energy_category_id]['subtotal'] - base[energy_category_id]['subtotal']) / base[energy_category_id]['subtotal'] if base[energy_category_id]['subtotal'] > 0.0 else None) result['reporting_period']['total_in_kgce'] += reporting[energy_category_id]['subtotal_in_kgce'] result['reporting_period']['total_in_kgco2e'] += reporting[energy_category_id]['subtotal_in_kgco2e'] result['reporting_period']['total_in_kgco2e_per_unit_area'] = \ result['reporting_period']['total_in_kgce'] / tenant['area'] if tenant['area'] > 0.0 else None result['reporting_period']['increment_rate_in_kgce'] = \ (result['reporting_period']['total_in_kgce'] - result['base_period']['total_in_kgce']) / \ result['base_period']['total_in_kgce'] if result['base_period']['total_in_kgce'] > Decimal(0.0) else None result['reporting_period']['total_in_kgce_per_unit_area'] = \ result['reporting_period']['total_in_kgco2e'] / tenant['area'] if tenant['area'] > 0.0 else None result['reporting_period']['increment_rate_in_kgco2e'] = \ (result['reporting_period']['total_in_kgco2e'] - result['base_period']['total_in_kgco2e']) / \ result['base_period']['total_in_kgco2e'] if result['base_period']['total_in_kgco2e'] > Decimal(0.0)\ else None result['parameters'] = { "names": parameters_data['names'], "timestamps": parameters_data['timestamps'], "values": parameters_data['values'] } # export result to Excel file and then encode the file to base64 string result['excel_bytes_base64'] = excelexporters.tenantenergycategory.export(result, tenant['name'], reporting_start_datetime_local, reporting_end_datetime_local, period_type) resp.text = json.dumps(result)
def on_post(self, req, resp): """Handles POST requests""" output_dict = {"msgHeader": {"authToken": ""}, "data": {"updated": ""}} errors = utils.errors success = "company group successfully created" try: raw_json = req.stream.read() input_dict = json.loads(raw_json, encoding='utf-8') except Exception as ex: raise falcon.HTTPError(falcon.HTTP_400, 'Invalid JSON', 'The JSON was incorrect.') try: if not validate.Request(api='companyGroup', request=input_dict): output_dict["data"].update({ "error": 1, "message": errors["json"] }) resp.body = json.dumps(output_dict) else: db = DB(input_dict["msgHeader"]["authLoginID"]) val_error = validate(db).basicChecks( token=input_dict["msgHeader"]["authToken"]) if val_error: output_dict["data"].update({ "error": 1, "message": val_error }) resp.body = json.dumps(output_dict) else: indict = input_dict["data"] #compGrp = Table("mw_company_group", schema="mint_loan") #compMaster = Table("mw_company_master",schema="mint_loan") #compCityMap = Table("mw_company_city_mapping", schema="mint_loan") #compDocMap = Table("mw_company_document_mapping",schema="mint_loan") #compCityProdMap = Table("mw_company_city_product_mapping",schema="mint_loan") #compOtherMap = Table("mw_company_other_details_mapping" ,schema="mint_loan") #created_date=(datetime.utcnow() + timedelta(seconds=19800)).strftime("%Y-%m-%d %H:%M:%S") if "cityProduct" in list(indict.keys()): inserted = db.Insert( db="mint_loan", table="mw_company_city_mapping", compulsory=False, date=False, **utils.mergeDicts({ "COMPANY_SHORT_NAME": input_dict["data"]["groupName"] if input_dict["data"]["groupName"] else None, "DASHBOARD_ID": input_dict["data"]["displayName"] if input_dict["data"]["displayName"] else None, "LOAN_PACK_ID": input_dict["data"]["displayName"] if input_dict["data"]["displayName"] else None, "INSURANCE_PACK_ID": input_dict["data"]["displayName"] if input_dict["data"]["displayName"] else None, "INVESTMENT_PACK_ID": input_dict["data"]["displayName"] if input_dict["data"]["displayName"] else None, "SAVINGS_ENABLED": input_dict["data"]["displayName"] if input_dict["data"]["displayName"] else None, "CITY_ID": input_dict["data"]["displayName"] if input_dict["data"]["displayName"] else None, "CREATED_BY": input_dict["msgHeader"]["authLoginID"], "CREATED_DATE": datetime.now().strftime("%Y-%m-%d %H:%M:%S") }) ) #(datetime.utcnow() + timedelta(seconds=19800)).strftime("%Y-%m-%d %H:%M:%S")) token = generate(db).AuthToken() if token["updated"]: output_dict["data"]["updated"] = '' output_dict["data"].update({ "error": 0, "message": success }) output_dict["msgHeader"]["authToken"] = token[ "token"] else: output_dict["data"].update({ "error": 1, "message": errors["token"] }) resp.body = json.dumps(output_dict) db._DbClose_() except Exception as ex: # falcon.HTTPError(falcon.HTTP_400,'Invalid JSON', 'The JSON was incorrect.') raise
def on_patch(self, req, resp): raise falcon.HTTPError(falcon.HTTP_400)
def on_post(req, resp): """Handles POST requests""" try: raw_json = req.stream.read().decode('utf-8') except Exception as ex: raise falcon.HTTPError(falcon.HTTP_400, title='API.ERROR', description=ex) new_values = json.loads(raw_json) if 'host' not in new_values['data'].keys() or \ not isinstance(new_values['data']['host'], str) or \ len(str.strip(new_values['data']['host'])) == 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_EMAIL_SERVER_HOST') host = str.strip(new_values['data']['host']) if 'port' not in new_values['data'].keys() or \ not isinstance(new_values['data']['port'], int) or \ new_values['data']['port'] <= 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PORT') port = float(new_values['data']['port']) if 'requires_authentication' not in new_values['data'].keys() or \ not isinstance(new_values['data']['requires_authentication'], bool): raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_REQUIRES_AUTHENTICATION') requires_authentication = new_values['data']['requires_authentication'] if requires_authentication: if 'user_name' not in new_values['data'].keys() or \ not isinstance(new_values['data']['user_name'], str) or \ len(str.strip(new_values['data']['user_name'])) == 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_USER_NAME') user_name = new_values['data']['user_name'] else: user_name = None if requires_authentication: if 'password' not in new_values['data'].keys() or \ not isinstance(new_values['data']['password'], str) or \ len(str.strip(new_values['data']['password'])) == 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PASSWORD') password = base64.b64encode(bytearray(new_values['data']['password'], 'utf-8')) else: password = None if 'from_addr' not in new_values['data'].keys() or \ not isinstance(new_values['data']['from_addr'], str) or \ len(str.strip(new_values['data']['from_addr'])) == 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_FROM_ADDR') from_addr = new_values['data']['from_addr'] match = re.match(r'^[_a-z0-9-]+(\.[_a-z0-9-]+)*@[a-z0-9-]+(\.[a-z0-9-]+)*(\.[a-z]{2,4})$', from_addr) if match is None: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_FROM_ADDR') cnx = mysql.connector.connect(**config.myems_fdd_db) cursor = cnx.cursor() cursor.execute(" SELECT host " " FROM tbl_email_servers " " WHERE host = %s ", (host,)) if cursor.fetchone() is not None: cursor.close() cnx.disconnect() raise falcon.HTTPError(falcon.HTTP_404, title='API.BAD_REQUEST', description='API.EMAIL_SERVER_HOST_IS_ALREADY_IN_USE') add_value = (" INSERT INTO tbl_email_servers " " (host, port, requires_authentication, user_name, password, from_addr) " " VALUES (%s, %s, %s, %s, %s, %s) ") cursor.execute(add_value, (host, port, requires_authentication, user_name, password, from_addr)) new_id = cursor.lastrowid cnx.commit() cursor.close() cnx.disconnect() resp.status = falcon.HTTP_201 resp.location = '/emailservers/' + str(new_id)
def on_put(req, resp, id_): """Handles PUT requests""" try: raw_json = req.stream.read().decode('utf-8') except Exception as ex: raise falcon.HTTPError(falcon.HTTP_400, title='API.EXCEPTION', description=ex) if not id_.isdigit() or int(id_) <= 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_EMAIL_SERVER_ID') new_values = json.loads(raw_json) if 'host' not in new_values['data'].keys() or \ not isinstance(new_values['data']['host'], str) or \ len(str.strip(new_values['data']['host'])) == 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_EMAIL_SERVER_HOST') host = str.strip(new_values['data']['host']) if 'port' not in new_values['data'].keys() or \ not isinstance(new_values['data']['port'], int) or \ new_values['data']['port'] <= 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PORT') port = float(new_values['data']['port']) if 'requires_authentication' not in new_values['data'].keys() or \ not isinstance(new_values['data']['requires_authentication'], bool): raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_REQUIRES_AUTHENTICATION') requires_authentication = new_values['data']['requires_authentication'] if requires_authentication: if 'user_name' not in new_values['data'].keys() or \ not isinstance(new_values['data']['user_name'], str) or \ len(str.strip(new_values['data']['user_name'])) == 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_USER_NAME') user_name = new_values['data']['user_name'] else: user_name = None if requires_authentication: if 'password' not in new_values['data'].keys() or \ not isinstance(new_values['data']['password'], str) or \ len(str.strip(new_values['data']['password'])) == 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_PASSWORD') password = base64.b64encode(bytearray(new_values['data']['password'], 'utf-8')) else: password = None if 'from_addr' not in new_values['data'].keys() or \ not isinstance(new_values['data']['from_addr'], str) or \ len(str.strip(new_values['data']['from_addr'])) == 0: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_FROM_ADDR') from_addr = new_values['data']['from_addr'] match = re.match(r'^[_a-z0-9-]+(\.[_a-z0-9-]+)*@[a-z0-9-]+(\.[a-z0-9-]+)*(\.[a-z]{2,4})$', from_addr) if match is None: raise falcon.HTTPError(falcon.HTTP_400, title='API.BAD_REQUEST', description='API.INVALID_FROM_ADDR') cnx = mysql.connector.connect(**config.myems_fdd_db) cursor = cnx.cursor() cursor.execute(" SELECT id " " FROM tbl_email_servers " " WHERE id = %s ", (id_,)) if cursor.fetchone() is None: cursor.close() cnx.disconnect() raise falcon.HTTPError(falcon.HTTP_404, title='API.NOT_FOUND', description='API.EMAIL_SERVER_NOT_FOUND') cursor.execute(" SELECT host " " FROM tbl_email_servers " " WHERE host = %s AND id != %s ", (host, id_)) if cursor.fetchone() is not None: cursor.close() cnx.disconnect() raise falcon.HTTPError(falcon.HTTP_404, title='API.BAD_REQUEST', description='API.EMAIL_SERVER_HOST_IS_ALREADY_IN_USE') update_row = (" UPDATE tbl_email_servers " " SET host = %s, port = %s, requires_authentication = %s, " " user_name = %s, password = %s, from_addr = %s " " WHERE id = %s ") cursor.execute(update_row, (host, port, requires_authentication, user_name, password, from_addr, id_,)) cnx.commit() cursor.close() cnx.disconnect() resp.status = falcon.HTTP_200
def handle(ex, req, resp, params): description = ('Sorry, couldn\'t write your thing to the ' 'database. It worked on my box.') raise falcon.HTTPError(falcon.HTTP_725, 'Database Error', description)