#### Import Libraries and Functions from PyHydroQC import anomaly_utilities, rules_detect, calibration from PyHydroQC.parameters import site_params import matplotlib.pyplot as plt import pandas as pd import math #### Retrieve data ######################################### site = 'MainStreet' sensors = ['temp', 'cond', 'ph', 'do'] years = [2014, 2015, 2016, 2017, 2018, 2019] sensor_array = anomaly_utilities.get_data(sensors=sensors, site=site, years=years, path="./LRO_data/") #### Rules Based Anomaly Detection ######################################### range_count = dict() persist_count = dict() rules_metrics = dict() for snsr in sensor_array: sensor_array[snsr], range_count[snsr] = rules_detect.range_check( df=sensor_array[snsr], maximum=site_params[site][snsr]['max_range'], minimum=site_params[site][snsr]['min_range']) sensor_array[snsr], persist_count[snsr] = rules_detect.persistence( df=sensor_array[snsr], length=site_params[site][snsr]['persist'],
from PyHydroQC import anomaly_utilities from PyHydroQC import model_workflow from PyHydroQC import rules_detect from PyHydroQC import ARIMA_correct from PyHydroQC import modeling_utilities from PyHydroQC.model_workflow import ModelType # Parameters may be specified in a parameters file or in the same script from Examples.FB_parameters import site_params, LSTM_params, calib_params #### Retrieve data ######################################### site = 'FranklinBasin' sensors = ['temp', 'cond', 'ph', 'do'] sensor_array = anomaly_utilities.get_data(sensors=sensors, filename='FB2017.csv', path="LRO_data/") #### Rules Based Anomaly Detection ######################################### range_count = dict() persist_count = dict() rules_metrics = dict() for snsr in sensor_array: sensor_array[snsr], range_count[snsr] = rules_detect.range_check( df=sensor_array[snsr], maximum=site_params[site][snsr]['max_range'], minimum=site_params[site][snsr]['min_range']) sensor_array[snsr], persist_count[snsr] = rules_detect.persistence( df=sensor_array[snsr], length=site_params[site][snsr]['persist'], output_grp=True) sensor_array[snsr] = rules_detect.interpolate(df=sensor_array[snsr]) print('Rules based detection complete.\n') #### Detect Calibration Events
] year = [2014, 2015, 2016, 2017, 2018, 2019] sensor = ['temp', 'cond', 'ph', 'do'] site_detect = [] rules_metrics = [] for j in range(0, len(sites)): site = sites[j] if site == 'BlackSmithFork': year.pop(0) print( "\n\n###########################################\n#Processing data for site: " + sites[j] + ".\n###########################################") df_full, sensor_array = anomaly_utilities.get_data(sites[j], sensor, year, path="../LRO_data/") # RULES BASED ANOMALY DETECTION # ######################################### range_count = [] persist_count = [] methods_output.rules_metrics = [] # size = [] for i in range(0, len(sensor_array)): sensor_array[sensor[i]], r_c = rules_detect.range_check( sensor_array[sensor[i]], site_params[j][i].max_range, site_params[j][i].min_range) range_count.append(r_c) sensor_array[sensor[i]], p_c = rules_detect.persistence( sensor_array[sensor[i]], site_params[j][i].persist)