def get_all_organisation_monitoring_sites(tenant): """ Gets all the monitoring sites for the specified organisation. Args: tenant: the name of the organisation whose monitoring sites are to be returned. Returns: A list of the monitoring sites associated with the specified organisation name. """ records = [] MonitoringSite = monitoring_site.MonitoringSite(tenant) results = MonitoringSite.get_all() for result in results: if 'LatestHourlyMeasurement' in result: w = result['LatestHourlyMeasurement'] last_hour_pm25_value = int(w[-1]['last_hour_pm25_value']) last_hour = convert_date.date_to_formated_str(w[-1]['last_hour']) else: last_hour_pm25_value = 0 last_hour = '' obj = { "DeviceCode": result['DeviceCode'], 'Parish': result['Parish'], 'Division': result['Division'], 'Last_Hour_PM25_Value': last_hour_pm25_value, 'Latitude': result['Latitude'], 'Longitude': result['Longitude'], 'LocationCode': result['LocationCode'], 'LastHour': last_hour, '_id': str(result['_id']) } records.append(obj) return records
def calculate_average_daily_measurements_for_last_28_days(): ms = monitoring_site.MonitoringSite() gr = graph.Graph() monitoring_sites = list( app.mongo.db.monitoring_site.find({}, { "DeviceCode": 1, "Parish": 1, "LocationCode": 1, "Division": 1, "_id": 0 })) devices_historical_records = [] for monitoring_site_device in monitoring_sites: print(monitoring_site_device) code = monitoring_site_device['DeviceCode'] historical_results = [] records = [] pm25_daily_values = [] average_pm25 = 0 if code: #check if code is not empty print(code) parish = monitoring_site_device['Parish'] division = monitoring_site_device['Division'] location_code = monitoring_site_device['LocationCode'] created_at = helpers.str_to_date( helpers.date_to_str(datetime.now())) endtime = helpers.date_to_str(datetime.now()) starttime = helpers.date_to_str(datetime.now() - timedelta(days=28)) monitoring_site_measurements_cursor = gr.get_filtered_data( code, starttime, endtime, 'daily', 'PM 2.5') #monitoring_site_measurements_cursor = ms.get_device_past_28_days_measurements(code) for site in monitoring_site_measurements_cursor: record = { 'pm2_5_value': int(site['pollutant_value']), 'time': site["time"] } records.append(record) pm25_daily_values.append(int(site['pollutant_value'])) historical_results.append(site) if len(pm25_daily_values) > 0: average_pm25 = np.mean(pm25_daily_values) historical_record = { 'deviceCode': code, 'average_pm25': average_pm25, 'historical_records': records, 'Parish': parish, 'Division': division, 'LocationCode': location_code, 'created_at': created_at } devices_historical_records.append(historical_record) mongo_helpers.save_device_daily_historical_averages( devices_historical_records) return jsonify({'response': 'all new hourly measurements saved'}), 200
def get_device_past_28_days_measurements(): ms = monitoring_site.MonitoringSite() gr = graph.Graph() if request.method == 'GET': device_code = request.args.get('device_code') if device_code: historical_results = [] records = [] pm25_daily_values = [] endtime = helpers.date_to_str(datetime.now()) starttime = helpers.date_to_str(datetime.now() - timedelta(days=28)) monitoring_site_measurements_cursor = gr.get_filtered_data( device_code, starttime, endtime, 'daily', 'PM 2.5') #monitoring_site_measurements_cursor = ms.get_device_past_28_days_measurements(device_code) for site in monitoring_site_measurements_cursor: record = { 'pm2_5_value': int(site['pollutant_value']), 'time': site["time"] } records.append(record) pm25_daily_values.append(int(site['pollutant_value'])) historical_results.append(site) return jsonify({ "historical_measurements": historical_results, "records": records, "pm25_values": pm25_daily_values }) else: return jsonify({ "error msg": "device code wasn't supplied in the query string parameter." })
def calculate_average_daily_measurements_for_last_28_days(): tenant = request.args.get('tenant') if not tenant: return jsonify({ "message": "please specify the organization name. Refer to the API documentation for details.", "success": False }), 400 MonitoringSiteModel = monitoring_site.MonitoringSite(tenant) monitoring_sites = MonitoringSiteModel.get_monitoring_sites() devices_historical_records = [] for monitoring_site_device in monitoring_sites: print(monitoring_site_device) code = monitoring_site_device['DeviceCode'] historical_results = [] records = [] pm25_daily_values = [] average_pm25 = 0 if code: # check if code is not empty parish = monitoring_site_device['Parish'] division = monitoring_site_device['Division'] location_code = monitoring_site_device['LocationCode'] created_at = convert_date.str_to_date( convert_date.str_to_date(datetime.now())) endtime = convert_date.str_to_date(datetime.now()) starttime = convert_date.str_to_date(datetime.now() - timedelta(days=28)) DailyAverage = device_daily_measurements.DailyMeasurements(tenant) monitoring_site_measurements_cursor = DailyAverage.get_filtered_data( code, starttime, endtime, 'daily', 'PM 2.5') for site in monitoring_site_measurements_cursor: record = { 'pm2_5_value': int(site['pollutant_value']), 'time': site["time"] } records.append(record) pm25_daily_values.append(int(site['pollutant_value'])) historical_results.append(site) if len(pm25_daily_values) > 0: average_pm25 = np.mean(pm25_daily_values) historical_record = { 'deviceCode': code, 'average_pm25': average_pm25, 'historical_records': records, 'Parish': parish, 'Division': division, 'LocationCode': location_code, 'created_at': created_at } devices_historical_records.append(historical_record) # save_device_daily_historical_averages(devices_historical_records) print(devices_historical_records)
def get_organisation_monitoring_site_locations(): ms = monitoring_site.MonitoringSite() if request.method == 'GET': org_name = request.args.get('organisation_name') if org_name: monitoring_sites_locations = [] organisation_monitoring_sites_cursor = ms.get_monitoring_site_locations( org_name) for site in organisation_monitoring_sites_cursor: monitoring_sites_locations.append(site) results = json.loads(json_util.dumps(monitoring_sites_locations)) return jsonify({"airquality_monitoring_sites": results}) else: return jsonify({"error msg": "organisation name wasn't supplied in the query string parameter."})
def get_pm25categorycount_for_locations(): ms = monitoring_site.MonitoringSite() d = dashboard.Dashboard() if request.method == 'GET': org_name = request.args.get('organisation_name') if org_name: # organisation_monitoring_sites_cursor = ms.get_all_organisation_monitoring_sites(org_name) # results = d.get_pm25_category_count(organisation_monitoring_sites_cursor) results = d.get_pm25_category_location_count_for_the_lasthour( org_name) if results: return jsonify(results[0]['pm25_categories']) else: return jsonify([]) else: return jsonify({"error msg": "organisation name wasn't supplied in the query string parameter."})
def get_all_device_past_28_days_measurements(): ms = monitoring_site.MonitoringSite() if request.method == 'GET': results = [] values = [] labels = [] background_colors = [] monitoring_site_measurements_cursor = ms.get_all_devices_past_28_days_measurements() for site in monitoring_site_measurements_cursor: values.append(int(site["average_pm25"])) labels.append(site["Parish"]) background_colors.append( helpers.set_pm25_category_background(site["average_pm25"])) results.append(site) return jsonify({"results": {"average_pm25_values": values, "labels": labels, "background_colors": background_colors}}) else: return jsonify({"error msg": "invalid request."})
def get_random_location_hourly_customised_chart_data_2(): ms = monitoring_site.MonitoringSite() gr = graph.Graph() device_code = 'A743BPWK' start_date = '2020-04-09T07:00:00.000000Z' end_date = '2020-05-12T07:00:00.000000Z' frequency = 'monthly' pollutant = 'PM 2.5' chart_type = 'pie' organisation_name = 'KCCA' parish = ' Wandegeya' location_code = 'KCCA_KWPE_AQ05' division = 'Kawempe' custom_chat_data = [] datasets = [] colors = ['#7F7F7F', '#E377C2', '#17BECF', '#BCBD22', '#3f51b5'] # blue,cyan, olive, custom_chart_title = 'Mean ' + frequency.capitalize() + ' ' + \ pollutant + ' for ' locations_names = parish custom_chart_title = custom_chart_title + locations_names + ' Between ' + helpers.convert_date_to_formated_str(helpers.str_to_date( start_date), frequency) + ' and ' + helpers.convert_date_to_formated_str(helpers.str_to_date(end_date), frequency) values = [] labels = [] device_results = {} filtered_data = gr.get_filtered_data( device_code, start_date, end_date, frequency, pollutant) if filtered_data: for data in filtered_data: values.append(data['pollutant_value']) labels.append(data['time']) device_results = {'pollutant_values': values, 'labels': labels} color = colors.pop() dataset = {'data': values, 'label': parish + ' ' + pollutant, 'borderColor': color, 'backgroundColor': color, 'fill': False} datasets.append(dataset) custom_chat_data.append({'start_date': start_date, 'end_date': end_date, 'division': division, 'parish': parish, 'frequency': frequency, 'pollutant': pollutant, 'location_code': location_code, 'chart_type': chart_type, 'chart_data': device_results}) return jsonify({'results': custom_chat_data, 'datasets': datasets, 'custom_chart_title': custom_chart_title})
def download_customised_data(): # create an instance of the MonitoringSite class ms = monitoring_site.MonitoringSite() # create an instance of the Graph class gr = graph.Graph() if request.method != 'POST': return {'message': 'Method not allowed. The method is not allowed for the requested URL'}, 400 else: json_data = request.get_json() download_type = request.args.get('type') if not json_data: return {'message': 'No input data provided'}, 400 if not download_type: return {'message': 'Please specify the type of data you wish to download'}, 400 if download_type not in ['csv', 'json']: return {'message': 'Invalid data type. Please specify csv or json'}, 400 locations = json_data["locations"] start_date = json_data["startDate"] end_date = json_data["endDate"] frequency = json_data["frequency"] # select multiple pollutants pollutants = json_data["pollutants"] file_type = json_data["fileType"] degree_of_cleanness = json_data["degreeOfClean"] organisation_name = json_data["organisation_name"] custom_chat_data = [] # datasets = {} # displaying multiple locations datasets = [] locations_devices = [] for location in locations: devices = ms.get_location_devices_code( organisation_name, location['label']) # datasets[location['label']] = {} for device in devices: device_code = device['DeviceCode'] division = device['Division'] parish = device['Parish'] location_code = device['LocationCode'] values = [] labels = [] pollutant_list = [] device_results = {} data_to_download = {} channel_ref = [] # control how some of the data is accessed flag = 0 # loop through pollutants selected by the user for pollutant in pollutants: filtered_data = gr.get_filtered_data( device_code, start_date, end_date, frequency, pollutant['value']) data_to_download[pollutant['value']] = [] if filtered_data: for data in filtered_data: values.append(data['pollutant_value']) if flag == 0: labels.append(data['time']) channel_ref.append(device_code) pollutant_list.append(pollutant['value']) data_to_download[pollutant['value']].append( data['pollutant_value']) flag = 1 data_to_download['channel_ref'] = channel_ref data_to_download['device_code'] = device_code data_to_download['division'] = division data_to_download['parish'] = parish data_to_download['time'] = labels data_to_download['location_code'] = location_code data_to_download['start_date'] = start_date data_to_download['end_date'] = end_date data_to_download['frequency'] = frequency data_to_download['file_type'] = file_type data_to_download['owner'] = organisation_name data_to_download['location'] = location['label'] # datasets[location['label']] = data_to_download datasets.append(data_to_download) # downloading json if download_type == 'json': return jsonify({'results': datasets}) # downloading csv if download_type == 'csv': # print(json.dumps(datasets, indent=1)) # json normalization to pandas datafrome tempp = pd.DataFrame() for location in locations: temp = pd.json_normalize(datasets, 'time', ['owner']) tempp['datetime'] = temp[0] for pollutant in pollutants: temp = pd.json_normalize(datasets, pollutant['label'], [ 'owner', 'location', 'parish', 'division', 'frequency', 'location_code']) tempp['owner'] = temp['owner'] tempp['location'] = temp['location'] tempp['location_code'] = temp['location_code'] tempp['parish'] = temp['parish'] tempp['division'] = temp['division'] tempp['frequency'] = temp['frequency'] tempp[pollutant['label']] = temp[0] # FIELDS = ["location", "PM 2.5", "start_date"] # df = pd.json_normalize(datasets, max_level=1) # print(df) for location in locations: temp = pd.json_normalize(datasets, 'channel_ref', ['owner']) tempp['channel_ref'] = temp[0] print(tempp) final_data = tempp.to_json(orient='records') # buid a dataframe from data_to_download # df_new = pd.DataFrame(columns=['PM 2.5', 'PM 10', 'channel_ref']) # for item in datasets: # df_new[item] = datasets[item] # print(df_new) # print(df_new.to_json(orient='columns')) # return jsonify({'results': datasets}) return final_data
def generate_customised_chart_data(): ms = monitoring_site.MonitoringSite() gr = graph.Graph() if request.method == 'POST': json_data = request.get_json() if not json_data: return {'message': 'No input data provided'}, 400 # input_data, errors = validate_inputs(input_data=json_data) //add server side validation # if not errors: locations = json_data["locations"] start_date = json_data["startDate"] end_date = json_data["endDate"] frequency = json_data["frequency"] pollutant = json_data["pollutant"] chart_type = json_data["chartType"] organisation_name = json_data["organisation_name"] custom_chat_data = [] datasets = [] #displaying multiple locations locations_devices =[] colors =['#7F7F7F','#E377C2', '#17BECF', '#BCBD22','#3f51b5'] custom_chart_title= 'Mean ' + frequency.capitalize() + ' '+ pollutant + ' for ' locations_names = ','.join([str(location['label']) for location in locations]) custom_chart_title = custom_chart_title + locations_names custom_chart_title_second_section = ' Between ' + helpers.convert_date_to_formated_str(helpers.str_to_date(start_date),frequency) + ' and ' + helpers.convert_date_to_formated_str(helpers.str_to_date(end_date),frequency) for location in locations: devices = ms.get_location_devices_code( organisation_name, location['label']) for device in devices: device_code = device['DeviceCode'] division = device['Division'] parish = device['Parish'] location_code= device['LocationCode'] values =[] labels =[] background_colors= [] device_results={} if chart_type == 'pie': filtered_data = gr.get_piechart_data( device_code, start_date, end_date, frequency, pollutant) if filtered_data: for data in filtered_data: values.append(data['category_count']) labels.append(data['category_name']) background_colors.append( helpers.assign_color_to_pollutant_category(data['category_name'])) device_results = { 'pollutant_values': values, 'labels': labels} color = colors.pop() dataset = {'data': values, 'label': parish + ' ' + pollutant, 'backgroundColor': background_colors} datasets.append(dataset) custom_chat_data.append({'start_date':start_date, 'end_date':end_date, 'division':division, 'parish':parish,'frequency':frequency, 'pollutant':pollutant, 'location_code':location_code, 'chart_type':chart_type,'chart_data':device_results, 'datasets':datasets, 'custom_chart_title':custom_chart_title, 'custom_chart_title_second_section':custom_chart_title_second_section}) else: filtered_data = gr.get_filtered_data(device_code, start_date, end_date, frequency, pollutant) if filtered_data: for data in filtered_data: values.append(data['pollutant_value']) labels.append(data['time']) device_results = { 'pollutant_values': values, 'labels': labels} color = colors.pop() dataset = {'data': values, 'label': parish + ' ' + pollutant, 'borderColor': color, 'backgroundColor': color, 'fill': False} datasets.append(dataset) measurement_units = '(µg/m3)' if pollutant == 'NO2': measurement_units = ' Concentration' chart_label = pollutant + measurement_units custom_chat_data.append({'start_date':start_date, 'end_date':end_date, 'division':division, 'parish':parish,'frequency':frequency, 'pollutant':pollutant, 'location_code':location_code, 'chart_type':chart_type,'chart_data':device_results, 'datasets':datasets, 'custom_chart_title':custom_chart_title, 'chart_label':chart_label, 'custom_chart_title_second_section':custom_chart_title_second_section}) locations_devices.append(devices) return jsonify({'results':custom_chat_data, 'datasets':datasets, 'custom_chart_title':custom_chart_title, 'custom_chart_title_second_section':custom_chart_title_second_section})