def get_measurement_list(site): """Extracts the measurement types that are available in the Hilltop WaterUse.hts file for a given site""" base_url = 'http://wateruse.ecan.govt.nz' hts = 'WaterUse.hts' raw_list = ws.measurement_list(base_url, hts, site) raw_list2 = raw_list.reset_index() filtered_list = raw_list2.loc[raw_list2['Measurement'].isin(['Compliance Volume','Water Meter','Volume','Volume [Flow]','Volume [Average Flow]'])] return filtered_list
def test_site_mtypes(hts): sites = site_list(base_url, hts) site1 = sites.iloc[2].SiteName mtype_df1 = measurement_list(base_url, hts, site1).reset_index().iloc[0] tsdata1 = get_data(base_url, hts, site1, mtype_df1.Measurement, from_date=str(mtype_df1.From), to_date=str(mtype_df1.From)) assert len(tsdata1) == 1
base_url = 'http://wateruse.ecan.govt.nz' hts = 'WQAll.hts' site = 'SQ31045' measurement = 'Total Phosphorus' from_date = '1983-11-22 10:50' to_date = '2018-04-13 14:05' dtl_method = 'trend' ########################################## ### Examples ## Get site list sites = site_list(base_url, hts) ## Get the measurement types for a specific site mtype_df1 = measurement_list(base_url, hts, site) ## Get the water quality parameter data (only applies to WQ data) mtype_df2 = wq_sample_parameter_list(base_url, hts, site) ## Get the time series data for a specific site and measurement type tsdata1 = get_data(base_url, hts, site, measurement, from_date=from_date, to_date=to_date) ## Get extra WQ time series data (only applies to WQ data) tsdata2, extra2 = get_data(base_url, hts, site, measurement, from_date=from_date, to_date=to_date, parameters=True) ## Get WQ sample data (only applies to WQ data) tsdata3 = get_data(base_url, hts, site, 'WQ Sample', from_date=from_date, to_date=to_date) ## Convert values under the detection limit to numeric values (only applies to WQ data) tsdata4, extra4 = get_data(base_url, hts, site, measurement, from_date=from_date, to_date=to_date, parameters=True, dtl_method=dtl_method)
def test_measurement_list(): mtype_df1 = measurement_list(base_url, hts, site) assert len(mtype_df1) > 30
hts_sites_df = ws.site_list(base_url, hts, location=True) # Export hts sites table to csv file hts_sites_df.to_csv('Sites in hts file.csv', index=False) # Generate a list of all sites in the server file hts_sites_list = sorted(hts_sites_df.SiteName.tolist(), key=str.lower) # Generate a measurement summary and sample parameter summary for all sites # Initialise empty lists site_measurement_summary = [] site_sample_parameter_summary = [] # Loop through all sites in the hts file for site in hts_sites_list: # Call site-specific measurement list measurement_summary = ws.measurement_list(base_url, hts, site) # Append list to measurement summary table site_measurement_summary.append(measurement_summary) # Try calling the site-specific sample parameter list try: sample_parameter_summary = ws.wq_sample_parameter_list( base_url, hts, site) # Append list to sample parameter summary table site_sample_parameter_summary.append(sample_parameter_summary) # Some sites have measurements but no sample parameters except ValueError: continue # Concatenate summary lists into pandas dataframes site_measurement_summary_df = pd.concat(site_measurement_summary, sort=False) site_sample_parameter_summary_df = pd.concat(site_sample_parameter_summary,
param['Input']['hilltop_hts']) ht_sites['Wap'] = convert_site_names(ht_sites.SiteName) ht_sites1 = ht_sites[ ht_sites['Wap'].isin(wap_ratios.index) & ~ht_sites['Wap'].isin(param['Input']['browns_rock_waps'])].copy( ) ht_sites1.rename(columns={'SiteName': 'Site'}, inplace=True) mtype_list = [] for site in ht_sites1.Site: timer = 10 while timer > 0: try: m1 = ws.measurement_list( param['Input']['hilltop_base_url'], param['Input']['hilltop_hts'], site) break except Exception as err: err1 = err timer = timer - 1 if timer == 0: raise ValueError(err1) else: print(err1) sleep(3) mtype_list.append(m1) mtypes = pd.concat(mtype_list).reset_index() mtypes1 = mtypes[mtypes.To >= from_date]
def test_measurement_list(data): mtype_df1 = measurement_list(data['base_url'], data['hts'], data['site']) assert len(mtype_df1) > 6
## Nasa data ds1 = xr.open_dataset(nc1) da1 = ds1[nasa_mtype].resample(time='D', closed='right', label='left').sum('time') * 3 del ds1 ## Met stations sites = ws.site_list(ecan_base_url, hts_name, True) sites.rename(columns={'SiteName': 'Site'}, inplace=True) mtypes = [] for s in sites['Site'].values: mtypes.append(ws.measurement_list(ecan_base_url, hts_name, s, mtype)) mtypes_df = pd.concat(mtypes).reset_index().drop('DataType', axis=1) mtypes_df['n_days'] = (mtypes_df['To'] - mtypes_df['From']).dt.days mtypes_df1 = mtypes_df[mtypes_df['n_days'] >= min_n_days].copy() sites1 = pd.merge(sites, mtypes_df1, on='Site') ts_data = [] for index, row in sites1.iterrows(): ts_data.append( ws.get_data(ecan_base_url, hts_name, row['Site'], row['Measurement'])) ts_data_df = pd.concat(ts_data)