import hydrogeosines as hgs
import matplotlib.pyplot as plt
import numpy as np

#%%  Testing MVC principal
acworth_site = hgs.Site('death valley', geoloc=[-116.471360, 36.408130, 688])
acworth_site.import_csv('tests/data/death_valley/death_valley.csv',
                        input_category=["GW","BP","ET"], utc_offset=0, unit=["m","m","nstr"],
                        how="add", check_duplicates=True)

data = acworth_site.data
raw = data.pivot(index='datetime', columns=['category', 'location'], values='value')

#%%
acworth_site.add_ET(et_comp='nstr')
# heads = data.pivot(index='datetime', columns=['category', 'location'], values='value')

#%% Processing
print("Correct heads ...")
process_acworth = hgs.Processing(acworth_site)

corrected = process_acworth.GW_correct(et_method='ts', lag_h=8)

#%%
print(corrected['GW_correct'][('BLM-1', 'all')][0]['WLc'])
Exemple #2
0
#%%  Testing MVC principal
death_valley = hgs.Site('death valley', geoloc=[-116.471360, 36.408130, 688])
death_valley.import_csv('tests/data/death_valley/Rau_et_al_2021.csv',
                        input_category=["GW", "BP", "ET"],
                        utc_offset=0,
                        unit=["m", "m", "nstr"],
                        how="add",
                        check_duplicates=True)

#%%
# tmp = death_valley.data.hgs.get_loc_unit('ET')

#%% Processing
# create Instance of Processing with csiro_site
process = hgs.Processing(death_valley)
process.info()

#%%
# test hals method
hals_results = process.hals()

#%% Output
csiro_output = hgs.Output(hals_results)  # process.results container or results

# for visualization
csiro_output.plot(
    folder='export'
)  # possible different plotting style methods, e.g. simple, report, etc
csiro_output.export(
    folder='export'
Exemple #3
0
import hydrogeosines as hgs
import numpy as np
import pandas as pd

#%%  Testing MVC principal
fowlers = hgs.Site('Fowlers Gap', geoloc=[141.73099, -31.2934, 160])

fowlers.import_csv('tests/data/fowlers_gap/acworth_all.csv', 
                        input_category=['BP', 'GW', 'GW', 'GW', 'ET'], 
                        utc_offset = 10, 
                        unit=['m', 'm', 'm', 'm', 'm**2/s**2'], 
                        loc_names = ["Baro", "FG822-1", "FG822-2", "Smith", "ET"],
                        how="add", check_duplicates=True)

#%%
process = hgs.Processing(fowlers) #.decimate(2).by_dates(start='2015-11-01', stop='2016-02-01').by_gwloc("FG822-2")


#%%
process.info()

#%% test gw_correct
gw_correct_results  = process.GW_correct(lag_h=24, et_method='hals')

#%%
be_results  = process.BE_time(method="all")

#%%
correct_output  = hgs.Output(gw_correct_results)

fig = correct_output.plot(folder="export")
#%%  Testing MVC principal
site = hgs.Site('death valley', geoloc=[-116.471360, 36.408130, 688])
site.import_csv('tests/data/death_valley/BLM-1_double.csv',
                        input_category=["GW","GW","BP","ET"], utc_offset=0, unit=["m","m","m","nstr"],
                        how="add", check_duplicates=True)

#%%  Testing MVC principal
# site = hgs.Site('thirlmere', geoloc=[150.543527, -34.229377, 289.576])
# site.import_csv('tests/data/thirlmere_lakes/GW075409.1.2.csv',
#                         input_category=["GW","BP","ET"], utc_offset=10, unit=["m","m","nstr"],
#                         how="add", check_duplicates=True)

#%% Processing
# create Instance of Processing with csiro_site
process = hgs.Processing(site)

# # test hals method
hals_results  = process.hals(update=True)

#%% estimate hydraulic properties ...
hyd_prop  = process.K_Ss_estimate(loc='BLM-2', scr_len=10, case_rad=0.127, scr_rad=0.127, scr_depth=78, update=True)
# print(hyd_prop)

#%% quantify BE using the frequency domain approach
be_freq_2  = process.BE_freq(method="rau", freq_method='hals')
# print(be_freq_2)

# output = hgs.Output(process)
# output.view(): HALS as Plot (Y: Amplitude, X: Phase)
# output.export(): HALS as CSV
                        loc_names=["Loc_A", "Loc_B", "Loc_C"],
                        header=None,
                        how="add",
                        check_duplicates=True)

example_site.import_csv('tests/data/notebook/BP_record.csv',
                        input_category="BP",
                        utc_offset=10,
                        unit="m",
                        loc_names=["Baro"],
                        header=None,
                        how="add",
                        check_duplicates=True)
#%%
locations = ["Loc_A", "Loc_B"]
process_loc_AB = hgs.Processing(example_site).by_gwloc(locations)
regular = process_loc_AB.site.data.copy()
regular = make_regular(regular,
                       inter_max=5000,
                       part_min=60,
                       inter_max_total=40)
BP_align_out = BP_align(regular,
                        inter_max=5000,
                        part_min=60,
                        inter_max_total=40)
#%%
# Most common frequency (MCF)
mcf = test.copy()
mcf = mcf.hgs.filters.drop_nan  # only needed for test data due to the ignore_index in append
#TODO: replace non_valid entries? duplicates already handled at import
spl_freqs = mcf.hgs.spl_freq_groupby
Exemple #6
0
                      check_duplicates=True)

csiro_site.import_csv('tests/data/csiro/test_sample/CSIRO_BP_short.csv',
                      input_category="BP",
                      utc_offset=10,
                      unit="mbar",
                      loc_names="Baro",
                      how="add",
                      check_duplicates=True)

#%%
csiro_site.add_ET(et_comp='g', et_cat=4)

#%% Processing
# create Instance of Processing with csiro_site
process_csiro = hgs.Processing(csiro_site)

# create Instance of Processing for specific locations of csiro_site
locations = ["Loc_A", "Loc_B"]
process_csiro_SiteA_B = hgs.Processing(csiro_site).by_gwloc(locations)

#%%
# add a regularly sampled data container to the processing object
# it is automatically reused in some of the methods, reducing computation times
process_csiro = hgs.Processing(csiro_site).by_gwloc(locations).make_regular()

# test hals method
# hals_results  = process_csiro.hals()

# test be method
be_results = process_csiro.BE_time(method="all")
Exemple #7
0
pivot = regular.hgs.pivot

# demonstrate Most common frequency (MCF) (included in make_regular)
mcf = data2.copy()
mcf = mcf.hgs.filters.drop_nan  # used within hgs processing workflows

#TODO: replace non_valid entries? duplicates already handled at import
# Sample frequency for each group
spl_freqs = mcf.hgs.spl_freq_groupby

# Resampling for each group
mcf = mcf.hgs.resample_by_group(spl_freqs)

#%% Processing
# create Instance of Processing with csiro_site
process_csiro = hgs.Processing(csiro_site)

# create Instance of Processing for specific locations of csiro_site
locations = ["Loc_A", "Loc_D"]
process_csiro_SiteA = hgs.Processing(csiro_site).by_gwloc(locations)

# add a regularly sampled data container to the processing object
# it is automatically reused in some of the methods, reducing computation times
locations = ["Loc_A", "Loc_B"]
process_csiro = hgs.Processing(csiro_site).by_gwloc(
    locations).RegularAndAligned()

# test hals method
hals_results = process_csiro.hals()

# test be method
from copy import deepcopy

import matplotlib.pyplot as plt

fowlers_site = hgs.Site('Fowlers Gap', geoloc=[141.73099, -31.2934, 160])
fowlers_site.import_csv(
    'tests/data/fowlers_gap/acworth_short.csv',
    input_category=['BP', 'GW', 'GW', 'GW', 'ET'],
    utc_offset=10,
    unit=['m', 'm', 'm', 'm', 'm**2/s**2'],
    loc_names=["Baro", "FG822-1", "FG822-2", "Smith", "ET"],
    how="add",
    check_duplicates=True)

#%%
process = hgs.Processing(fowlers_site)
# create Instance of Processing for specific locations of csiro_site and add a regularly sampled data container to the processing object
# it is automatically reused in some of the methods, reducing computation times

locations = ["FG822-1", "FG822-2"]
process_sub_loc = hgs.Processing(fowlers_site).by_gwloc(
    locations).make_regular()

process_sub_date = hgs.Processing(fowlers_site).by_dates(
    '2014-12-01', '2014-12-05')
process_sub_date_loc = hgs.Processing(fowlers_site).by_gwloc(
    "FG822-2").by_dates('2014-11-01', '2014-12-05')

#%%

# test hals method
Exemple #9
0
import hydrogeosines as hgs
import numpy as np
import pandas as pd

#%%  Testing MVC principal
fowlers = hgs.Site('csiro', geoloc=[141.73099, -31.2934, 160])

fowlers.import_csv('tests/data/fowlers_gap/acworth_all.csv',
                   input_category=['BP', 'GW', 'GW', 'GW', 'ET'],
                   utc_offset=10,
                   unit=['m', 'm', 'm', 'm', 'm**2/s**2'],
                   loc_names=["Baro", "FG822-1", "FG822-2", "Smith", "ET"],
                   how="add",
                   check_duplicates=True)

#%% Processing
# create Instance of Processing with csiro_site
process = hgs.Processing(fowlers).by_gwloc(['FG822-2', 'Smith'])

# test hals method
fft_results = process.fft(update=True)

#%% Output
csiro_output = hgs.Output(fft_results)

# for visualization
csiro_output.plot(folder='export')

#%%
test = csiro_output.export(folder='export')
Exemple #10
0
@author: Daniel
"""

import hydrogeosines as hgs
import numpy as np
import pandas as pd

#%%  Testing MVC principal
fowlers = hgs.Site('csiro', geoloc=[141.73099, -31.2934, 160])

fowlers.import_csv('tests/data/fowlers_gap/acworth_short.csv',
                   input_category=['BP', 'GW', 'GW', 'GW', 'ET'],
                   utc_offset=10,
                   unit=['m', 'm', 'm', 'm', 'm**2/s**2'],
                   loc_names=["Baro", "FG822-1", "FG822-2", "Smith", "ET"],
                   how="add",
                   check_duplicates=True)

#%%
process = hgs.Processing(fowlers)

#%% test gw_correct
gw_correct_results = process.GW_correct(lag_h=24, et_method=None)

#%%
export = hgs.Output(gw_correct_results).export(folder='export')

#%%
figs = hgs.Output(gw_correct_results).plot(folder="export")
Exemple #11
0
                      how="add",
                      check_duplicates=True)

csiro_site.import_csv('tests/data/csiro/test_sample/CSIRO_BP_short.csv',
                      input_category="BP",
                      utc_offset=10,
                      unit="mbar",
                      loc_names="Baro",
                      how="add",
                      check_duplicates=True)

data = csiro_site.data

# add et
csiro_site.add_ET()
process_csiro = hgs.Processing(csiro_site)
#gw_correct_results  = process_csiro.GW_correct(lag_h=24, fqs=None)

pivot = process_csiro.data_regular.hgs.pivot
pivot.head(3)

mask = pivot.columns.get_level_values("category") == "ET"
pivot_noET = pivot.loc[:, ~mask]

#%%
acworth_site = hgs.Site('acworth', geoloc=[141.762065, -31.065781, 160])

acworth_site.import_csv('tests/data/fowlers_gap/acworth_gw.csv',
                        input_category=["GW", "BP", "GW"],
                        utc_offset=10,
                        unit=["Cm", "mm", "M"],