def get_site_list(base_url, hts): """Creates a dataframe of sites from the WaterUse.hts file""" all_sites = ws.site_list(base_url, hts) ################################ # Restrict to a subset of sites sites = all_sites[3400:4000] #sites = all_sites ################################# return sites
def test_site_mtypes(hts): sites = site_list(base_url, hts) site1 = sites.iloc[2].SiteName mtype_df1 = measurement_list(base_url, hts, site1).reset_index().iloc[0] tsdata1 = get_data(base_url, hts, site1, mtype_df1.Measurement, from_date=str(mtype_df1.From), to_date=str(mtype_df1.From)) assert len(tsdata1) == 1
def get_site(): """This function prompts the user to enter the WAP that they wish to generate plots for""" site = None while site is None: site_entry = input("Enter the WAP of interest: ") site_list = ws.site_list('http://wateruse.ecan.govt.nz', 'WaterUse.hts') if site_entry in site_list.values: site = site_entry else: print("The WAP you have entered is not in the WaterUse.hts file. Please try again.") return site
########################################### ### Parameters base_url = 'http://wateruse.ecan.govt.nz' hts = 'WQAll.hts' site = 'SQ31045' measurement = 'Total Phosphorus' from_date = '1983-11-22 10:50' to_date = '2018-04-13 14:05' dtl_method = 'trend' ########################################## ### Examples ## Get site list sites = site_list(base_url, hts) ## Get the measurement types for a specific site mtype_df1 = measurement_list(base_url, hts, site) ## Get the water quality parameter data (only applies to WQ data) mtype_df2 = wq_sample_parameter_list(base_url, hts, site) ## Get the time series data for a specific site and measurement type tsdata1 = get_data(base_url, hts, site, measurement, from_date=from_date, to_date=to_date) ## Get extra WQ time series data (only applies to WQ data) tsdata2, extra2 = get_data(base_url, hts, site, measurement, from_date=from_date, to_date=to_date, parameters=True) ## Get WQ sample data (only applies to WQ data) tsdata3 = get_data(base_url, hts, site, 'WQ Sample', from_date=from_date, to_date=to_date)
def test_site_list(): sites = site_list(base_url, hts, True) assert len(sites) > 9000
sites = mssql.rd_sql(server, hydro_db, site_table, cols, where_in=where_in) sites1 = sites[sites.ExtSiteID.str.contains('[A-Z]+\d\d/\d+')].copy() return sites1 ############################################## ### Query sites1 = pd.read_csv(csv_path) crc_dict = {'RecordNumber': sites1.RecordNumber.tolist()} crc1 = rd_crc(crc_dict) ht_sites = ws.site_list(ht_url, url_hts) ht_sites['ExtSiteID'] = util.convert_site_names(ht_sites.SiteName) ht_sites1 = ht_sites.dropna() ht_crc_wap = pd.merge(crc1, ht_sites1, on='ExtSiteID').drop_duplicates('SiteName') ht_waps = ht_crc_wap[['SiteName']] ################################################ ### Export meter name table export_dir = r'C:\Users\hamishg\OneDrive - Environment Canterbury\Documents\_Projects\git\WaterUseQA\CreateTimeSeriesPlots' meter_name_csv = 'list.csv'
@author: KurtV """ # import python modules from hilltoppy import web_service as ws import pandas as pd # Set URL base_url = 'http://wateruse.ecan.govt.nz' # Set server hts file (Server hts name, not Hilltop01 hts name!) # WQGroundwater.hts = \Hilltop01\Data\WQGroundwaterCombined.dsn hts = 'WQAll.hts' # Generate a dataframe of all sites in the server file with location data hts_sites_df = ws.site_list(base_url, hts, location=True) # Export hts sites table to csv file hts_sites_df.to_csv('Sites in hts file.csv', index=False) # Generate a list of all sites in the server file hts_sites_list = sorted(hts_sites_df.SiteName.tolist(), key=str.lower) # Generate a measurement summary and sample parameter summary for all sites # Initialise empty lists site_measurement_summary = [] site_sample_parameter_summary = [] # Loop through all sites in the hts file for site in hts_sites_list: # Call site-specific measurement list measurement_summary = ws.measurement_list(base_url, hts, site)
up_takes2 = up_takes1[up_takes1.AllocatedRate > 0].copy() up_takes2['AllocatedRateSum'] = up_takes2.groupby( 'Wap')['AllocatedRate'].transform('sum') up_takes2['AllocatedRateRatio'] = up_takes2[ 'AllocatedRate'] / up_takes2['AllocatedRateSum'] wap_ratios = up_takes2[up_takes2.HydroGroup == 'Surface Water'].groupby( 'Wap')['AllocatedRateRatio'].sum() wap_ratios.index.name = 'ExtSiteID' #################################### ### Pull out the Hilltop usage data ## Determine the sites available in Hilltop ht_sites = ws.site_list(param['Input']['hilltop_base_url'], param['Input']['hilltop_hts']) ht_sites['Wap'] = convert_site_names(ht_sites.SiteName) ht_sites1 = ht_sites[ ht_sites['Wap'].isin(wap_ratios.index) & ~ht_sites['Wap'].isin(param['Input']['browns_rock_waps'])].copy( ) ht_sites1.rename(columns={'SiteName': 'Site'}, inplace=True) mtype_list = [] for site in ht_sites1.Site: timer = 10 while timer > 0: try: m1 = ws.measurement_list( param['Input']['hilltop_base_url'],
def test_site_list_with_collection(data): sites = site_list(data['base_url'], data['hts'], collection=data['collection']) assert len(sites) > 40
def test_site_list(data): sites = site_list(data['base_url'], data['hts'], True) assert len(sites) > 1000
min_n_days = 5 * 365 min_missing_days = 14 ######################################## ### Get data ## Nasa data ds1 = xr.open_dataset(nc1) da1 = ds1[nasa_mtype].resample(time='D', closed='right', label='left').sum('time') * 3 del ds1 ## Met stations sites = ws.site_list(ecan_base_url, hts_name, True) sites.rename(columns={'SiteName': 'Site'}, inplace=True) mtypes = [] for s in sites['Site'].values: mtypes.append(ws.measurement_list(ecan_base_url, hts_name, s, mtype)) mtypes_df = pd.concat(mtypes).reset_index().drop('DataType', axis=1) mtypes_df['n_days'] = (mtypes_df['To'] - mtypes_df['From']).dt.days mtypes_df1 = mtypes_df[mtypes_df['n_days'] >= min_n_days].copy() sites1 = pd.merge(sites, mtypes_df1, on='Site') ts_data = []