Exemple #1
0
def res_stats_test():
    import os
    import pyemu

    import os
    import numpy as np
    from pyemu import Pst, pst_utils
    # residual functionality testing
    pst_dir = os.path.join("pst")

    p = pyemu.pst_utils.generic_pst(["p1"],["o1"])
    try:
        p.get_res_stats()
    except:
        pass
    else:
        raise Exception()

    p = Pst(os.path.join(pst_dir, "pest.pst"))
    phi_comp = p.phi_components
    #print(phi_comp)
    df = p.get_res_stats()
    assert np.abs(df.loc["rss","all"] - p.phi) < 1.0e-6,"{0},{1}".format(df.loc["rss","all"],p.phi)
    for pc in phi_comp.keys():
        assert phi_comp[pc] == p.phi_components[pc]
Exemple #2
0
def add_pi_obj_func(pst, obj_func_dict=None, out_pst_name=None):
    if not isinstance(pst, Pst):
        pst = Pst(pst)
    if obj_func_dict is None:
        obj_func_dict = {name: 1.0 for name in pst.adj_par_names}
    pi_equation = ''
    for name, coef in obj_func_dict.items():
        assert (name in pst.adj_par_names
                ), "obj func component not in adjustable pars:" + name
        if coef < 0.0:
            pi_equation += ' - {0}*{1}'.format(coef, name)
        else:
            pi_equation += ' + {0}*{1}'.format(coef, name)
    pi_equation += ' = 0.0'
    pilbl = "pi_obj_func"
    pi_df = pd.DataFrame(
        {
            "pilbl": pilbl,
            "equation": pi_equation,
            "weight": 0.0,
            "obgnme": pilbl
        },
        index=[pilbl])

    if pst.prior_information.shape[0] == 0:
        pst.prior_information = pi_df
    else:
        assert pilbl not in pst.prior_information.index
        # append by enlargement
        pst.prior_information.loc[pilbl, :] = pi_df.loc[pilbl, :]

    if out_pst_name is not None:
        pst.write(out_pst_name)

    return pst
Exemple #3
0
def add_pi_obj_func(pst,obj_func_dict=None,out_pst_name=None):
    if not isinstance(pst,Pst):
        pst = Pst(pst)
    if obj_func_dict is None:
        obj_func_dict = {name:1.0 for name in pst.adj_par_names}
    pi_equation = ''
    for name,coef in obj_func_dict.items():
        assert(name in pst.adj_par_names),"obj func component not in adjustable pars:"+name
        if coef < 0.0:
            pi_equation += ' - {0}*{1}'.format(coef,name)
        else:
            pi_equation += ' + {0}*{1}'.format(coef, name)
    pi_equation += ' = 0.0'
    pilbl = "pi_obj_func"
    pi_df = pd.DataFrame({"pilbl":pilbl,"equation":pi_equation,"weight":0.0,"obgnme":pilbl},index=[pilbl])

    if pst.prior_information.shape[0] == 0:
        pst.prior_information = pi_df
    else:
        assert pilbl not in pst.prior_information.index
        # append by enlargement
        pst.prior_information.loc[pilbl,:] = pi_df.loc[pilbl,:]

    if out_pst_name is not None:
        pst.write(out_pst_name)

    return pst
Exemple #4
0
def res_stats_test():
    import os
    import pyemu

    import os
    import numpy as np
    from pyemu import Pst, pst_utils
    # residual functionality testing
    pst_dir = os.path.join("pst")

    p = pyemu.pst_utils.generic_pst(["p1"], ["o1"])
    try:
        p.get_res_stats()
    except:
        pass
    else:
        raise Exception()

    p = Pst(os.path.join(pst_dir, "pest.pst"))
    phi_comp = p.phi_components
    # print(phi_comp)
    df = p.get_res_stats()
    assert np.abs(df.loc["rss", "all"] - p.phi) < 1.0e-6, "{0},{1}".format(df.loc["rss", "all"], p.phi)
    for pc in phi_comp.keys():
        assert phi_comp[pc] == p.phi_components[pc]
Exemple #5
0
def parfile_test():
    import os
    import numpy as np
    import pandas as pd
    from pyemu import MonteCarlo, Ensemble, ParameterEnsemble, Pst, Cov

    jco = os.path.join("pst", "pest.jcb")
    pst = jco.replace(".jcb", ".pst")

    mc = MonteCarlo(jco=jco, pst=pst)
    mc.pst.parameter_data.loc[mc.pst.par_names[1], "scale"] = 0.001
    mc.draw(10)
    mc.parensemble.to_parfiles(os.path.join("temp", "testpar"))

    pst = Pst(pst)
    pst.parameter_data = pst.parameter_data.iloc[1:]
    pst.parameter_data["test", "parmne"] = "test"

    parfiles = [
        os.path.join("temp", f) for f in os.listdir("temp") if "testpar" in f
    ]
    rnames = ["test{0}".format(i) for i in range(len(parfiles))]

    pe = ParameterEnsemble.from_parfiles(pst=pst,
                                         parfile_names=parfiles,
                                         real_names=rnames)
Exemple #6
0
def res_test():
    import os
    import numpy as np
    from pyemu import Pst, pst_utils
    # residual functionality testing
    pst_dir = os.path.join('..', 'tests', "pst")

    p = Pst(os.path.join(pst_dir, "pest.pst"))
    phi_comp = p.phi_components
    assert "regul_p" in phi_comp
    assert "regul_m" in phi_comp

    p.adjust_weights_resfile()

    d = np.abs(p.phi - p.nnz_obs)
    assert d < 1.0E-5
    p.adjust_weights(obsgrp_dict={"head": 50})
    assert np.abs(p.phi_components["head"] - 50) < 1.0e-6

    # get()
    new_p = p.get()
    new_p.prior_information = p.prior_information
    new_file = os.path.join(pst_dir, "new.pst")
    new_p.write(new_file)

    p_load = Pst(new_file, resfile=p.resfile)
    for gname in p.phi_components:
        d = np.abs(p.phi_components[gname] - p_load.phi_components[gname])
        assert d < 1.0e-5
Exemple #7
0
def inf2():

    #non-pest
    from pyemu.mat import mat_handler as mhand
    from pyemu.pst import Pst
    from pyemu import Influence
    import numpy as np

    inpst = Pst(
        os.path.join("..", "verification", "Freyberg", "Freyberg_pp",
                     "freyberg_pp.pst"))

    pnames = inpst.par_names
    onames = inpst.obs_names
    npar = inpst.npar
    nobs = inpst.nobs
    j_arr = np.random.random((nobs, npar))
    parcov = mhand.Cov(x=np.eye(npar), names=pnames)
    obscov = mhand.Cov(x=np.eye(nobs), names=onames)
    jco = mhand.Jco.from_binary(inpst.filename.replace(".pst", ".jcb"))
    resf = inpst.filename.replace(".pst", ".rei")
    s = Influence(jco=jco, obscov=obscov, pst=inpst, resfile=resf)
    print(s.hat)
    print(s.observation_leverage)
    #v = s.studentized_res
    print(s.estimated_err_var)
    print(s.studentized_res)
Exemple #8
0
def reweight_test():
    import os
    import numpy as np
    from pyemu import Pst,pst_utils
    pst_dir = os.path.join("pst")
    p = Pst(os.path.join(pst_dir,"pest.pst"))
    obsgrp_dict = {"pred":1.0,"head":1.0,"conc":1.0}
    p.adjust_weights(obsgrp_dict=obsgrp_dict)
    assert np.abs(p.phi - 3.0) < 1.0e-5,p.phi

    obs = p.observation_data
    obs.loc[obs.obgnme=="pred","weight"] = 0.0
    assert np.abs(p.phi - 2.0) < 1.0e-5,p.phi

    obs_dict = {"pd_one":1.0,"pd_ten":1.0}
    p.adjust_weights(obs_dict=obs_dict)
    assert np.abs(p.phi - 4.0) < 1.0e-5,p.phi
Exemple #9
0
def flex_load_test():
    import os
    from pyemu import Pst,pst_utils
    pst_dir = os.path.join("pst")
    temp_dir = "temp"
    if not os.path.exists(temp_dir):
        os.mkdir(temp_dir)
    # just testing all sorts of different pst files
    pst_files = os.listdir(pst_dir)
    exceptions = []
    load_fails = []
    for pst_file in pst_files:
        if pst_file.endswith(".pst") and not "missing" in pst_file:
            #if not pst_file.startswith("LPR"):
            #    continue
            print(pst_file)
            p = Pst(os.path.join(pst_dir, pst_file), flex=True)
            out_name = os.path.join(temp_dir, pst_file)
            print("write")
            p.write(out_name, update_regul=True)
            p = Pst(out_name)
           #  try:
           #      p = Pst(os.path.join(pst_dir,pst_file),flex=True)
           #  except Exception as e:
           #      exceptions.append(pst_file + " read fail: " + str(e))
           #      load_fails.append(pst_file)
           #      continue
           #  out_name = os.path.join(temp_dir,pst_file)
           #  print(out_name)
           # #p.write(out_name,update_regul=True)
           #  try:
           #      p.write(out_name,update_regul=True)
           #  except Exception as e:
           #      exceptions.append(pst_file + " write fail: " + str(e))
           #      continue
           #  print(pst_file)
           #  try:
           #      p = Pst(out_name)
           #  except Exception as e:
           #      exceptions.append(pst_file + " reload fail: " + str(e))
           #      continue

    #with open("load_fails.txt",'w') as f:
    #    [f.write(pst_file+'\n') for pst_file in load_fails]
    if len(exceptions) > 0:
        raise Exception('\n'.join(exceptions))
Exemple #10
0
def from_io_with_inschek_test():
    import os
    from pyemu import Pst, pst_utils
    # creation functionality
    dir = os.path.join("..", "verification", "10par_xsec", "template_mac")
    pst = Pst(os.path.join(dir, "pest.pst"))

    tpl_files = [os.path.join(dir, f) for f in pst.template_files]
    out_files = [os.path.join(dir, f) for f in pst.output_files]
    ins_files = [os.path.join(dir, f) for f in pst.instruction_files]
    in_files = [os.path.join(dir, f) for f in pst.input_files]

    new_pst = Pst.from_io_files(tpl_files, in_files,
                                ins_files, out_files,
                                pst_filename=os.path.join("temp", "test.pst"))
    print(new_pst.observation_data)
    return
Exemple #11
0
def pst_manip_test():
    import os
    from pyemu import Pst
    pst_dir = os.path.join("pst")
    org_path = os.path.join(pst_dir, "pest.pst")
    new_path = os.path.join("temp", "pest1.pst")
    pst = Pst(org_path)
    pst.control_data.pestmode = "regularisation"
    pst.write(new_path)
    pst = Pst(new_path)
    pst.svd_data.maxsing = 1
    pst.write(new_path)
Exemple #12
0
def pst_manip_test():
    import os
    from pyemu import Pst
    pst_dir = os.path.join('..', 'tests', "pst")
    org_path = os.path.join(pst_dir, "pest.pst")
    new_path = os.path.join(pst_dir, "pest1.pst")
    pst = Pst(org_path)
    pst.control_data.pestmode = "regularisation"
    pst.write(new_path)
    pst = Pst(new_path)

    pst.write(new_path, update_regul=True)
Exemple #13
0
def test_write_input_files():
    import os
    import shutil
    import numpy as np
    import pyemu
    from pyemu import Pst, pst_utils
    # creation functionality
    dir = os.path.join("..", "verification", "10par_xsec", "template_mac")
    if os.path.exists("temp_dir"):
        shutil.rmtree("temp_dir")
    shutil.copytree(dir,"temp_dir")
    os.chdir("temp_dir")
    pst = Pst(os.path.join("pest.pst"))
    pst.write_input_files()
    arr1 = np.loadtxt(pst.input_files[0])
    print(pst.parameter_data.parval1)
    pst.parameter_data.loc[:,"parval1"] *= 10.0
    pst.write_input_files()
    arr2 = np.loadtxt(pst.input_files[0])
    assert (arr1 * 10).sum() == arr2.sum()
    os.chdir("..")
Exemple #14
0
def res_test():
    import os
    import numpy as np
    from pyemu import Pst,pst_utils
    # residual functionality testing
    pst_dir = os.path.join("pst")

    p = Pst(os.path.join(pst_dir,"pest.pst"))
    phi_comp = p.phi_components
    assert "regul_p" in phi_comp
    assert "regul_m" in phi_comp

    p.adjust_weights_resfile()

    d = np.abs(p.phi - p.nnz_obs)
    assert d < 1.0E-5
    p.adjust_weights(obsgrp_dict={"head": 50})
    assert np.abs(p.phi_components["head"] - 50) < 1.0e-6

    # get()
    new_p = p.get()
    new_p.prior_information = p.prior_information
    new_file = os.path.join("temp", "new.pst")
    new_p.write(new_file)

    p_load = Pst(new_file,resfile=p.resfile)
    for gname in p.phi_components:
        d = np.abs(p.phi_components[gname] - p_load.phi_components[gname])
        assert d < 1.0e-5
Exemple #15
0
def parfile_test():
    import os
    import numpy as np
    import pandas as pd
    from pyemu import MonteCarlo, Ensemble, ParameterEnsemble, Pst, Cov

    jco = os.path.join("pst", "pest.jcb")
    pst = jco.replace(".jcb", ".pst")

    mc = MonteCarlo(jco=jco, pst=pst)
    mc.pst.parameter_data.loc[mc.pst.par_names[1], "scale"] = 0.001
    mc.draw(10)
    mc.parensemble.to_parfiles(os.path.join("temp","testpar"))

    pst = Pst(pst)
    pst.parameter_data = pst.parameter_data.iloc[1:]
    pst.parameter_data["test","parmne"] = "test"

    parfiles = [os.path.join("temp",f) for f in os.listdir("temp") if "testpar" in f]
    rnames = ["test{0}".format(i) for i in range(len(parfiles))]

    pe = ParameterEnsemble.from_parfiles(pst=pst,parfile_names=parfiles,real_names=rnames)
Exemple #16
0
def schur_test():
    import os
    import numpy as np
    from pyemu import Schur, Cov, Pst
    w_dir = os.path.join("..", "verification", "henry")
    forecasts = ["pd_ten", "c_obs10_2"]
    pst = Pst(os.path.join(w_dir, "pest.pst"))
    cov = Cov.from_parameter_data(pst)
    cov.to_uncfile(os.path.join("temp", "pest.unc"), covmat_file=None)
    cov2 = Cov.from_uncfile(os.path.join("temp", "pest.unc"))
    sc = Schur(jco=os.path.join(w_dir, "pest.jcb"),
               forecasts=forecasts,
               parcov=cov2)
    print(sc.prior_forecast)
    print(sc.posterior_forecast)
    print(sc.get_par_group_contribution())

    df = sc.get_par_group_contribution(include_prior_results=True)
    levels = list(df.columns.levels[1])
    assert "prior" in levels, levels
    assert "post" in levels, levels

    print(sc.get_parameter_summary(include_map=True))
    print(sc.get_forecast_summary(include_map=True))
    print(sc.get_removed_obs_importance(reset_zero_weight=True))

    sc = Schur(jco=os.path.join(w_dir, "pest.jcb"),
               forecasts=forecasts,
               sigma_range=6.0)
    cov = Cov.from_parameter_data(pst, sigma_range=6.0)

    assert np.abs((sc.parcov.x - cov.x).sum()) == 0.0

    sc = Schur(jco=os.path.join(w_dir, "pest.jcb"),
               forecasts=forecasts,
               sigma_range=6.0,
               scale_offset=False)
    assert np.abs((sc.parcov.x - cov.x).sum()) == 0.0

    pst.parameter_data.loc[:, "offset"] = 100.0
    cov = Cov.from_parameter_data(pst)
    sc = Schur(jco=os.path.join(w_dir, "pest.jcb"),
               pst=pst,
               forecasts=forecasts,
               sigma_range=6.0,
               scale_offset=False)
    assert np.abs((sc.parcov.x - cov.x).sum()) != 0.0

    cov = Cov.from_parameter_data(pst, scale_offset=False, sigma_range=6.0)
    assert np.abs((sc.parcov.x - cov.x).sum()) == 0.0
Exemple #17
0
def load_test():
    import os
    from pyemu import Pst,pst_utils
    pst_dir = os.path.join('..','tests',"pst")
    temp_dir = "temp"
    if not os.path.exists(temp_dir):
        os.mkdir(temp_dir)
    # just testing all sorts of different pst files
    pst_files = os.listdir(pst_dir)
    exceptions = []
    load_fails = []
    for pst_file in pst_files:
        if pst_file.endswith(".pst"):
            print(pst_file)
            try:
                p = Pst(os.path.join(pst_dir,pst_file))
            except Exception as e:
                exceptions.append(pst_file + " read fail: " + str(e))
                load_fails.append(pst_file)
                continue
            out_name = os.path.join(temp_dir,pst_file)
           #p.write(out_name,update_regul=True)
            try:
                p.write(out_name,update_regul=True)
            except Exception as e:
                exceptions.append(pst_file + " write fail: " + str(e))
                continue
            try:
                p = Pst(out_name)
            except Exception as e:
                exceptions.append(pst_file + " reload fail: " + str(e))
                continue

    #with open("load_fails.txt",'w') as f:
    #    [f.write(pst_file+'\n') for pst_file in load_fails]
    if len(exceptions) > 0:
        raise Exception('\n'.join(exceptions))
Exemple #18
0
def la_test_io():
    from pyemu import Schur, Cov, Pst
    w_dir = os.path.join("..", "verification", "henry")
    forecasts = ["pd_ten", "c_obs10_2"]
    pst = Pst(os.path.join(w_dir, "pest.pst"))
    cov = Cov.from_parameter_data(pst)
    cov.to_binary(os.path.join("temp", "pest.bin.cov"))
    cov.to_ascii(os.path.join("temp", "pest.txt.cov"))
    sc_bin = Schur(jco=os.path.join(w_dir, "pest.jcb"),
                   forecasts=forecasts,
                   parcov=os.path.join("temp", "pest.bin.cov"))

    sc_ascii = Schur(jco=os.path.join(w_dir, "pest.jcb"),
                     forecasts=forecasts,
                     parcov=os.path.join("temp", "pest.txt.cov"))
Exemple #19
0
def schur_test():
    import os
    from pyemu import Schur, Cov, Pst
    w_dir = os.path.join("..", "verification", "henry")
    forecasts = ["pd_ten", "c_obs10_2"]
    pst = Pst(os.path.join(w_dir, "pest.pst"))
    cov = Cov.from_parameter_data(pst)
    cov.to_uncfile(os.path.join("temp", "pest.unc"), covmat_file=None)
    cov2 = Cov.from_uncfile(os.path.join("temp", "pest.unc"))
    sc = Schur(jco=os.path.join(w_dir, "pest.jcb"),
               forecasts=forecasts,
               parcov=cov2)
    print(sc.prior_forecast)
    print(sc.posterior_forecast)
    print(sc.get_par_group_contribution())
    print(sc.get_removed_obs_group_importance())
Exemple #20
0
def pst_manip_test():
    import os
    from pyemu import Pst
    pst_dir = os.path.join('..','tests',"pst")
    org_path = os.path.join(pst_dir,"pest.pst")
    new_path = os.path.join(pst_dir,"pest1.pst")
    pst = Pst(org_path)
    pst.control_data.pestmode = "regularisation"
    pst.write(new_path)
    pst = Pst(new_path)

    pst.write(new_path,update_regul=True)
Exemple #21
0
def from_io_with_inschek_test():
    import os
    from pyemu import Pst,pst_utils
    # creation functionality
    dir = os.path.join("..","verification","10par_xsec","template_mac")
    pst = Pst(os.path.join(dir,"pest.pst"))


    tpl_files = [os.path.join(dir,f) for f in pst.template_files]
    out_files = [os.path.join(dir,f) for f in pst.output_files]
    ins_files = [os.path.join(dir,f) for f in pst.instruction_files]
    in_files = [os.path.join(dir,f) for f in pst.input_files]


    new_pst = Pst.from_io_files(tpl_files, in_files,
                                ins_files, out_files,
                                pst_filename=os.path.join("temp","test.pst"))
    print(new_pst.observation_data)
    return
Exemple #22
0
def diagonal_cov_draw_test():
    import os
    import numpy as np
    from pyemu import MonteCarlo, Cov, Pst
    jco = os.path.join("pst", "pest.jcb")
    pst = Pst(jco.replace(".jcb", ".pst"))

    mc = MonteCarlo(jco=jco, pst=pst)
    num_reals = 10
    mc.draw(num_reals, obs=True)
    print(mc.obsensemble)
    pe1 = mc.parensemble.copy()

    cov = Cov(x=mc.parcov.as_2d, names=mc.parcov.row_names)
    #print(type(cov))
    mc = MonteCarlo(jco=jco, pst=pst)
    mc.parensemble.reseed()
    mc.draw(num_reals, cov=cov)
    pe2 = mc.parensemble
Exemple #23
0
def from_dataframe_test():
    import os
    import numpy as np
    import pandas as pd
    from pyemu import MonteCarlo,Ensemble,ParameterEnsemble,Pst, Cov

    jco = os.path.join("pst","pest.jcb")
    pst = jco.replace(".jcb",".pst")
    mc = MonteCarlo(jco=jco,pst=pst)
    names = ["par_{0}".format(_) for _ in range(10)]
    df = pd.DataFrame(np.random.random((10,mc.pst.npar)),columns=mc.pst.par_names)
    mc.parensemble = ParameterEnsemble.from_dataframe(df=df,pst=mc.pst)
    print(mc.parensemble.shape)
    mc.project_parensemble()
    mc.parensemble.to_csv(os.path.join("temp","test.csv"))

    pstc = Pst(pst)
    par = pstc.parameter_data
    par.sort_values(by="parnme",ascending=False,inplace=True)
    cov = Cov.from_parameter_data(pstc)
    pe = ParameterEnsemble.from_gaussian_draw(pst=mc.pst,cov=cov)
Exemple #24
0
def flex_load_test():
    import os
    from pyemu import Pst, pst_utils
    pst_dir = os.path.join("pst")
    temp_dir = "temp"
    if not os.path.exists(temp_dir):
        os.mkdir(temp_dir)
    # just testing all sorts of different pst files
    pst_files = os.listdir(pst_dir)
    exceptions = []
    load_fails = []
    for pst_file in pst_files:
        if pst_file.endswith(".pst") and not "missing" in pst_file:
            # if not pst_file.startswith("LPR"):
            #    continue
            print(pst_file)
            p = Pst(os.path.join(pst_dir, pst_file), flex=True)
            out_name = os.path.join(temp_dir, pst_file)
            print("write")
            p.write(out_name, update_regul=True)
            p = Pst(out_name)
        #  try:
        #      p = Pst(os.path.join(pst_dir,pst_file),flex=True)
        #  except Exception as e:
        #      exceptions.append(pst_file + " read fail: " + str(e))
        #      load_fails.append(pst_file)
        #      continue
        #  out_name = os.path.join(temp_dir,pst_file)
        #  print(out_name)
        # #p.write(out_name,update_regul=True)
        #  try:
        #      p.write(out_name,update_regul=True)
        #  except Exception as e:
        #      exceptions.append(pst_file + " write fail: " + str(e))
        #      continue
        #  print(pst_file)
        #  try:
        #      p = Pst(out_name)
        #  except Exception as e:
        #      exceptions.append(pst_file + " reload fail: " + str(e))
        #      continue

    # with open("load_fails.txt",'w') as f:
    #    [f.write(pst_file+'\n') for pst_file in load_fails]
    if len(exceptions) > 0:
        raise Exception('\n'.join(exceptions))
Exemple #25
0
def reweight_test():
    import os
    import numpy as np
    from pyemu import Pst, pst_utils
    pst_dir = os.path.join('..', 'tests', "pst")
    p = Pst(os.path.join(pst_dir, "pest.pst"))
    obsgrp_dict = {"pred": 1.0, "head": 1.0, "conc": 1.0}
    p.adjust_weights(obsgrp_dict=obsgrp_dict)
    assert np.abs(p.phi - 3.0) < 1.0e-5, p.phi

    obs = p.observation_data
    obs.loc[obs.obgnme == "pred", "weight"] = 0.0
    assert np.abs(p.phi - 2.0) < 1.0e-5, p.phi

    obs_dict = {"pd_one": 1.0, "pd_ten": 1.0}
    p.adjust_weights(obs_dict=obs_dict)
    assert np.abs(p.phi - 4.0) < 1.0e-5, p.phi
Exemple #26
0
def test_write_input_files():
    import os
    import shutil
    import numpy as np
    import pyemu
    from pyemu import Pst, pst_utils
    # creation functionality
    dir = os.path.join("..", "verification", "10par_xsec", "template_mac")
    if os.path.exists("temp_dir"):
        shutil.rmtree("temp_dir")
    shutil.copytree(dir, "temp_dir")
    os.chdir("temp_dir")
    pst = Pst(os.path.join("pest.pst"))
    pst.write_input_files()
    arr1 = np.loadtxt(pst.input_files[0])
    print(pst.parameter_data.parval1)
    pst.parameter_data.loc[:, "parval1"] *= 10.0
    pst.write_input_files()
    arr2 = np.loadtxt(pst.input_files[0])
    assert (arr1 * 10).sum() == arr2.sum()
    os.chdir("..")
Exemple #27
0
def load_test():
    import os
    from pyemu import Pst, pst_utils
    pst_dir = os.path.join("pst")
    temp_dir = "temp"
    if not os.path.exists(temp_dir):
        os.mkdir(temp_dir)
    # just testing all sorts of different pst files
    pst_files = os.listdir(pst_dir)
    exceptions = []
    load_fails = []
    for pst_file in pst_files:
        if pst_file.endswith(".pst") and not "comments" in pst_file and \
                not "missing" in pst_file:
            print(pst_file)
            try:
                p = Pst(os.path.join(pst_dir, pst_file))
            except Exception as e:
                exceptions.append(pst_file + " read fail: " + str(e))
                load_fails.append(pst_file)
                continue
            out_name = os.path.join(temp_dir, pst_file)
            print(out_name)
            # p.write(out_name)
            try:
                p.write(out_name)
            except Exception as e:
                exceptions.append(pst_file + " write fail: " + str(e))
                continue
            print(pst_file)
            try:
                p = Pst(out_name)
            except Exception as e:
                exceptions.append(pst_file + " reload fail: " + str(e))
                continue

    # with open("load_fails.txt",'w') as f:
    #    [f.write(pst_file+'\n') for pst_file in load_fails]
    if len(exceptions) > 0:
        raise Exception('\n'.join(exceptions))
Exemple #28
0
def to_mps(
    jco,
    obj_func=None,
    obs_constraint_sense=None,
    pst=None,
    decision_var_names=None,
    mps_filename=None,
):

    if isinstance(jco, str):
        pst_name = jco.lower().replace('.jcb', ".pst").replace(".jco", ".pst")
        jco = Matrix.from_binary(jco)
    assert isinstance(jco, Matrix)

    if pst is None:
        if os.path.exists(pst_name):
            pst = Pst(pst_name)
        else:
            raise Exception("could not find pst file {0} and pst argument is None, a ".format(pst_name) +\
                            "pst instance is required for setting decision variable bound constraints")
    else:
        assert len(set(jco.row_names).difference(
            pst.observation_data.index)) == 0
        assert len(set(jco.col_names).difference(
            pst.parameter_data.index)) == 0

    if decision_var_names is None:
        decision_var_names = jco.col_names
    else:
        if not isinstance(decision_var_names, list):
            decision_var_names = [decision_var_names]
        for i, dv in enumerate(decision_var_names):
            dv = dv.lower()
            decision_var_names[i] = dv
            assert dv in jco.col_names, "decision var {0} not in jco column names".format(
                dv)
            assert dv in pst.parameter_data.index, "decision var {0} not in pst parameter names".format(
                dv)

    if obs_constraint_sense is None:
        const_groups = [
            grp for grp in pst.obs_groups if grp.lower() in OPERATOR_WORDS
        ]
        if len(const_groups) == 0:
            raise Exception("to_mps(): obs_constraint_sense is None and no "+\
                            "obseravtion groups in {0}".format(','.join(pst.obs_groups)))
        obs_constraint_sense = {}
        obs_groups = pst.observation_data.groupby(
            pst.observation_data.obgnme).groups
        for og, obs_names in obs_groups.items():
            if og == 'n':
                continue
            if og in const_groups:
                for oname in obs_names:
                    obs_constraint_sense[oname] = og

    assert isinstance(obs_constraint_sense, dict)

    operators = {}
    rhs = {}
    for obs_name, operator in obs_constraint_sense.items():
        obs_name = obs_name.lower()
        assert obs_name in pst.obs_names, "obs constraint {0} not in pst observation names"
        rhs[obs_name] = pst.observation_data.loc[obs_name, "obsval"]
        assert obs_name in jco.row_names, "obs constraint {0} not in jco row names".format(
            obs_name)
        if operator.lower() not in OPERATOR_WORDS:
            if operator not in OPERATOR_SYMBOLS:
                raise Exception("operator {0} not in [{1}] or [{2}]".\
                    format(operator,','.join(OPERATOR_WORDS),','\
                           .join(OPERATOR_SYMBOLS)))
            op = OPERATOR_WORDS[OPERATOR_SYMBOLS.index(operator)]
        else:
            op = operator.lower()
        operators[obs_name] = op
        obs_constraint_sense[obs_name.lower()] = obs_constraint_sense.\
                                                 pop(obs_name)

    #build a list of constaints in order WRT jco row order
    order_obs_constraints = [
        name for name in jco.row_names if name in obs_constraint_sense
    ]

    #build a list of decision var names in order WRT jco col order
    order_dec_var = [
        name for name in jco.col_names if name in decision_var_names
    ]

    #shorten constraint names if needed
    new_const_count = 0
    new_constraint_names = {}
    for name in order_obs_constraints:
        if len(name) > 8:
            new_name = name[:7] + "{0}".format(new_const_count)
            print("to_mps(): shortening constraint name {0} to {1}\n")
            new_constraint_names[name] = new_name
            new_const_count += 1
        else:
            new_constraint_names[name] = name

    #shorten decision var names if needed
    new_dec_count = 0
    new_decision_names = {}
    for name in order_dec_var:
        if len(name) > 8:
            new_name = name[:7] + "{0}".format(new_dec_count)
            print("to_mps(): shortening decision var name {0} to {1}\n")
            new_decision_names[name] = new_name
            new_dec_count += 1
        else:
            new_decision_names[name] = name

    if obj_func is None:
        # look for an obs group named 'n' with a single member
        og = pst.obs_groups
        if 'n' not in pst.obs_groups:
            raise Exception("to_mps(): obj_func is None but no "+\
                            "obs group named 'n'")
        grps = pst.observation_data.groupby(pst.observation_data.obgnme).groups
        assert len(grps["n"]) == 1,"to_mps(): 'n' obj_func group has more " +\
                                   " one member"
        obj_name = grps['n'][0]
        obj_iidx = jco.row_names.index(obj_name)
        obj = {}
        for name in order_dec_var:
            jco_jidx = jco.col_names.index(name)
            obj[name] = jco.x[obj_iidx, jco_jidx]

    elif isinstance(obj_func, str):
        obj_func = obj_func.lower()
        assert obj_func in jco.row_names,\
            "obj_func {0} not in jco.row_names".format(obj_func)
        assert obj_func in pst.observation_data.obsnme,\
            "obj_func {0} not in pst observations".format(obj_func)

        obj_iidx = jco.row_names.index(obj_func)
        obj = {}
        for name in order_dec_var:
            jco_jidx = jco.col_names.index(name)
            obj[name] = jco.x[obj_iidx, jco_jidx]
        obj_name = str(obj_func)

    elif isinstance(obj_func, dict):
        obj = {}
        for name, value in obj_func.items():
            assert name in jco.col_names,"to_mps(): obj_func key {0} not ".format(name) +\
                "in jco col names"
            obj[name] = float(value)
        obj_name = "obj_func"
    else:
        raise NotImplementedError

    if mps_filename is None:
        mps_filename = pst.filename.replace(".pst", ".mps")

    with open(mps_filename, 'w') as f:
        f.write("NAME {0}\n".format("pest_opt"))
        f.write("ROWS\n")
        for name in order_obs_constraints:
            f.write(" {0}  {1}\n".format(operators[name],
                                         new_constraint_names[name]))
        f.write(" {0}  {1}\n".format('n', obj_name))

        f.write("COLUMNS\n")
        for dname in order_dec_var:
            jco_jidx = jco.col_names.index(dname)
            for cname in order_obs_constraints:
                jco_iidx = jco.row_names.index(cname)
                f.write("    {0:8}  {1:8}   {2:10G}\n".\
                        format(new_decision_names[dname],
                               new_constraint_names[cname],
                               jco.x[jco_iidx,jco_jidx]))
            f.write("    {0:8}  {1:8}   {2:10G}\n".\
                    format(new_decision_names[dname],
                           obj_name,obj[dname]))

        f.write("RHS\n")
        for name in order_obs_constraints:
            f.write("    {0:8}  {1:8}   {2:10G}\n".format(
                "rhs", new_constraint_names[name], rhs[name]))
        f.write("BOUNDS\n")
        for name in order_dec_var:
            up,lw = pst.parameter_data.loc[name,"parubnd"],\
                    pst.parameter_data.loc[name,"parlbnd"]
            f.write(" {0:2} {1:8}  {2:8}  {3:10G}\n".\
                    format("UP","BOUND",name,up))
            f.write(" {0:2} {1:8}  {2:8}  {3:10G}\n".\
                    format("LO","BOUND",name,lw))
        f.write("ENDATA\n")
Exemple #29
0
def to_mps(jco,obj_func=None,obs_constraint_sense=None,pst=None,
           decision_var_names=None,mps_filename=None,
           risk=0.5):
    """helper utility to write an mps file from pest-style
    jacobian matrix. Requires corresponding pest control
    file.

    Parameters:
        jco : pyemu.Matrix or str (filename of matrix)
        obj_func : optional.  If None, an obs group must exist
            named 'n' and must have one one member.  Can be a str, which
            is the name of an observation to treat as the objective function
            or can be a dict, which is keyed on decision var names and valued
            with objective function coeffs.
        obs_constraint_sense : optional.  If None, obs groups are sought that
            have names "l","g", or "e" - members of these groups are treated
            as constraints.  Otherwise, must be a dict keyed on constraint
             (obs) names with values of "l","g", or "e".
        pst : optional.  If None, a pest control file is sought with
            filename <case>.pst.  Otherwise, must be a pyemu.Pst instance or
            a filename of a pest control file. The control must have an
            associated .res or .rei file - this is needed for the RHS of the
            constraints.
        decision_var_names: optional.  If None, all parameters are treated as
            decision vars. Otherwise, must be a list of str of parameter names
            to use as decision vars
        mps_filename : optional.  If None, then <case>.mps is written.
            Otherwise, must be a str.
        risk : float
            the level of risk tolerance/aversion in the chance constraints.
            Values other then 0.50 require at least one parameter (non decision
            var) in the jco.  Ranges from 0.0,1.0
    """

    #if jco arg is a string, load a jco from binary
    if isinstance(jco,str):
        pst_name = jco.lower().replace('.jcb',".pst").replace(".jco",".pst")
        jco = Matrix.from_binary(jco)
    assert isinstance(jco,Matrix)

    # try to find a pst
    if pst is None:
        if os.path.exists(pst_name):
            pst = Pst(pst_name)
        else:
            raise Exception("could not find pst file {0} and pst argument is None, a ".format(pst_name) +\
                            "pst instance is required for setting decision variable bound constraints")
    else:
        assert len(set(jco.row_names).difference(pst.observation_data.index)) == 0
        assert len(set(jco.col_names).difference(pst.parameter_data.index)) == 0

    #make sure the pst has an associate res
    assert pst.res is not None," could find a residuals file (.res or .rei) for" +\
                               " for control file {0}".format(pst.filename)

    #if no decision_var_names where passed, use all columns in the jco
    if decision_var_names is None:
        decision_var_names = jco.col_names

    #otherwise, do some error checking and processing
    else:
        if not isinstance(decision_var_names,list):
            decision_var_names = [decision_var_names]
        for i,dv in enumerate(decision_var_names):
            dv = dv.lower()
            decision_var_names[i] = dv
            assert dv in jco.col_names,"decision var {0} not in jco column names".format(dv)
            assert dv in pst.parameter_data.index,"decision var {0} not in pst parameter names".format(dv)

    #if no obs_constraint_sense, try to build one from the obs group info
    if obs_constraint_sense is None:
        const_groups = [grp for grp in pst.obs_groups if grp.lower() in OPERATOR_WORDS]
        if len(const_groups) == 0:
            raise Exception("to_mps(): obs_constraint_sense is None and no "+\
                            "obseravtion groups in {0}".format(','.join(pst.obs_groups)))
        obs_constraint_sense = {}
        obs_groups = pst.observation_data.groupby(pst.observation_data.obgnme).groups
        for og,obs_names in obs_groups.items():
            if og == 'n':
                continue
            if og in const_groups:
                for oname in obs_names:
                    obs_constraint_sense[oname] = og

    assert isinstance(obs_constraint_sense,dict)
    assert len(obs_constraint_sense) > 0,"no obs_constraints..."

    #build up a dict of (in)equality operators for the constraints
    operators = {}
    for obs_name,operator in obs_constraint_sense.items():
        obs_name = obs_name.lower()
        assert obs_name in pst.obs_names,"obs constraint {0} not in pst observation names"
        assert obs_name in pst.res.name," obs constraint {0} not in pst.res names"
        assert obs_name in jco.row_names,"obs constraint {0} not in jco row names".format(obs_name)
        if operator.lower() not in OPERATOR_WORDS:
            if operator not in OPERATOR_SYMBOLS:
                raise Exception("operator {0} not in [{1}] or [{2}]".\
                    format(operator,','.join(OPERATOR_WORDS),','\
                           .join(OPERATOR_SYMBOLS)))
            op = OPERATOR_WORDS[OPERATOR_SYMBOLS.index(operator)]
        else:
            op = operator.lower()
        operators[obs_name] = op
        obs_constraint_sense[obs_name.lower()] = obs_constraint_sense.\
                                                 pop(obs_name)

    #build a list of constaint names in order WRT jco row order
    # order_obs_constraints = [name for name in jco.row_names if name in
    #                          obs_constraint_sense]

    order_obs_constraints = list(obs_constraint_sense.keys())
    order_obs_constraints.sort()

    #build a list of decision var names in order WRT jco col order
    #order_dec_var = [name for name in jco.col_names if name in
    #                 decision_var_names]

    order_dec_var = list(decision_var_names)
    order_dec_var.sort()

    #shorten constraint names if needed
    new_const_count = 0
    new_constraint_names = {}
    for name in order_obs_constraints:
        if len(name) > 8:
            new_name = name[:7]+"{0}".format(new_const_count)
            print("to_mps(): shortening constraint name {0} to {1}\n".format(name,new_name))
            new_constraint_names[name] = new_name
            new_const_count += 1
        else:
            new_constraint_names[name] = name

    #shorten decision var names if needed
    new_dec_count = 0
    new_decision_names = {}
    for name in order_dec_var:
        if len(name) > 8:
            new_name = name[:7]+"{0}".format(new_dec_count)
            print("to_mps(): shortening decision var name {0} to {1}\n".format(name,new_name))
            new_decision_names[name] = new_name
            new_dec_count += 1
        else:
            new_decision_names[name] = name

    # if no obj_func, try to make one
    if obj_func is None:
        # look for an obs group named 'n' with a single member
        og = pst.obs_groups
        if 'n' not in pst.obs_groups:
            raise Exception("to_mps(): obj_func is None but no "+\
                            "obs group named 'n'")
        grps = pst.observation_data.groupby(pst.observation_data.obgnme).groups
        assert len(grps["n"]) == 1,"to_mps(): 'n' obj_func group has more " +\
                                   " than one member, mps only support one objf "
        obj_name = grps['n'][0]
        obj_iidx = jco.row_names.index(obj_name)
        obj = {}
        for name in order_dec_var:
            jco_jidx = jco.col_names.index(name)
            obj[name] = jco.x[obj_iidx,jco_jidx]

    #otherwise, parse what was passed
    elif isinstance(obj_func,str):
        obj_func = obj_func.lower()
        assert obj_func in jco.row_names,\
            "obj_func {0} not in jco.row_names".format(obj_func)
        assert obj_func in pst.observation_data.obsnme,\
            "obj_func {0} not in pst observations".format(obj_func)

        obj_iidx = jco.row_names.index(obj_func)
        obj = {}
        for name in order_dec_var:
            jco_jidx = jco.col_names.index(name)
            obj[name] = jco.x[obj_iidx,jco_jidx]
        obj_name = str(obj_func)

    elif isinstance(obj_func,dict):
        obj = {}
        for name,value in obj_func.items():
            assert name in jco.col_names,"to_mps(): obj_func key "+\
                                         "{0} not ".format(name) +\
                                         "in jco col names"
            obj[name] = float(value)
        obj_name = "obj_func"
    else:
        raise NotImplementedError("unsupported obj_func arg type {0}".format(\
                                  type(obj_func)))

    if risk != 0.5:
        try:
            from scipy.special import erfinv
        except Exception as e:
            raise Exception("to_mps() error importing erfinv from scipy.special: "+\
                            "{0}".format(str(e)))

        par_names = [name for name in jco.col_names if name not in decision_var_names]
        if len(par_names) == 0:
            raise Exception("to_mps() error: risk != 0.5, but no "+\
                            "non-decision vars parameters ")
        unc_jco = jco.get(col_names=par_names)
        unc_pst = pst.get(par_names=par_names)
        sc = Schur(jco=unc_jco,pst=unc_pst,forecasts=order_obs_constraints)
        constraint_std = sc.get_forecast_summary().loc[:,"post_var"].apply(np.sqrt)
        rhs = {}

        # the probit value for a given risk...using the inverse
        # error function
        probit_val = np.sqrt(2.0) * erfinv((2.0 * risk) - 1.0)
        for name in order_obs_constraints:
            mu = unc_pst.res.loc[name,"residual"]
            std = constraint_std.loc[name]
            #if this is a less than constraint, then we want
            # to subtract
            if operators[name] == 'l':
                prob_val = mu - (probit_val * std)
            #if this is a greater than constraint, then we want
            # to add
            elif operators[name] == "g":
                prob_val = mu + (probit_val * std)
            else:
                raise NotImplementedError("chance constraints only " +\
                                          "implemented for 'l' or 'g' " +\
                                          "type constraints, not " +\
                                          "{0}".format(operators[name]))
            rhs[name] = prob_val
    else:
        rhs = {n:pst.res.loc[n,"residual"] for n in order_obs_constraints}

    if mps_filename is None:
        mps_filename = pst.filename.replace(".pst",".mps")

    with open(mps_filename,'w') as f:
        f.write("NAME {0}\n".format("pest_opt"))
        f.write("ROWS\n")
        for name in order_obs_constraints:
            f.write(" {0}  {1}\n".format(operators[name],
                                         new_constraint_names[name]))
        f.write(" {0}  {1}\n".format('n',obj_name))

        f.write("COLUMNS\n")
        for dname in order_dec_var:
            jco_jidx = jco.col_names.index(dname)
            for cname in order_obs_constraints:
                jco_iidx = jco.row_names.index(cname)
                v = jco.x[jco_iidx,jco_jidx]
                f.write("    {0:8}  {1:8}   {2:10G}\n".\
                        format(new_decision_names[dname],
                               new_constraint_names[cname],
                               v))
            # f.write("    {0:8}  {1:8}   {2:10G}\n".\
            #         format(new_decision_names[dname],
            #                obj_name,pst.parameter_data.loc[dname,"parval1"]))
            f.write("    {0:8}  {1:8}   {2:10G}\n".\
                    format(new_decision_names[dname],
                           obj_name,obj_func[dname]))




        f.write("RHS\n")
        for iname,name in enumerate(order_obs_constraints):
            f.write("    {0:8}  {1:8}   {2:10G}\n".
                    format("rhs",new_constraint_names[name],
                           rhs[name]))
        f.write("BOUNDS\n")
        for name in order_dec_var:
            up,lw = pst.parameter_data.loc[name,"parubnd"],\
                    pst.parameter_data.loc[name,"parlbnd"]
            f.write(" {0:2} {1:8}  {2:8}  {3:10G}\n".\
                    format("UP","BOUND",name,up))
            f.write(" {0:2} {1:8}  {2:8}  {3:10G}\n".\
                    format("LO","BOUND",name,lw))
        f.write("ENDATA\n")
Exemple #30
0
def tenpar_plot():
    import os
    import numpy as np
    import matplotlib.pyplot as plt
    from matplotlib.backends.backend_pdf import PdfPages

    import pandas as pd
    from pyemu import Pst
    d = os.path.join("smoother","10par_xsec")
    pst = Pst(os.path.join(d,"10par_xsec.pst"))
    plt_dir = os.path.join(d,"plot")
    if not os.path.exists(plt_dir):
        os.mkdir(plt_dir)


    par_files = [os.path.join(d,f) for f in os.listdir(d) if "parensemble." in f
                 and ".png" not in f]
    par_dfs = [pd.read_csv(par_file,index_col=0).apply(np.log10) for par_file in par_files]
    par_names = list(par_dfs[0].columns)
    mx = (pst.parameter_data.loc[:,"parubnd"] * 1.1).apply(np.log10)
    mn = (pst.parameter_data.loc[:,"parlbnd"] * 0.9).apply(np.log10)

    obj_df = pd.read_csv(os.path.join(d,"10par_xsec.pst.iobj.csv"),index_col=0)
    real_cols = [col for col in obj_df.columns if col.startswith("0")]
    obj_df.loc[:,real_cols] = obj_df.loc[:,real_cols].apply(np.log10)
    obj_df.loc[:,"mean"] = obj_df.loc[:,"mean"].apply(np.log10)
    obj_df.loc[:, "std"] = obj_df.loc[:, "std"].apply(np.log10)

    fig = plt.figure(figsize=(20, 10))
    ax = plt.subplot(111)
    axt = plt.twinx()
    obj_df.loc[:, real_cols].plot(ax=ax, lw=0.5, color="0.5", alpha=0.5, legend=False)
    ax.plot(obj_df.index, obj_df.loc[:, "mean"], 'b', lw=2.5,marker='.',markersize=5)
    #ax.fill_between(obj_df.index, obj_df.loc[:, "mean"] - (1.96 * obj_df.loc[:, "std"]),
    #                obj_df.loc[:, "mean"] + (1.96 * obj_df.loc[:, "std"]),
    #                facecolor="b", edgecolor="none", alpha=0.25)
    axt.plot(obj_df.index,obj_df.loc[:,"lambda"],"k",dashes=(2,1),lw=2.5)
    ax.set_ylabel("log$_10$ phi")
    axt.set_ylabel("lambda")
    ax.set_title("total runs:{0}".format(obj_df.total_runs.max()))
    plt.savefig(os.path.join(plt_dir,"iobj.pdf"))
    plt.close()

    with PdfPages(os.path.join(plt_dir,"parensemble.pdf")) as pdf:

        for par_file,par_df in zip(par_files,par_dfs):
            print(par_file)
            fig = plt.figure(figsize=(20,10))

            plt.figtext(0.5,0.975,par_file,ha="center")
            axes = [plt.subplot(2,6,i+1) for i in range(len(par_names))]
            for par_name,ax in zip(par_names,axes):
                mean = par_df.loc[:,par_name].mean()
                std = par_df.loc[:,par_name].std()
                par_df.loc[:,par_name].hist(ax=ax,edgecolor="none",
                                            alpha=0.5,grid=False)
                ax.set_yticklabels([])
                ax.set_title("{0}, {1:6.2f}".\
                             format(par_name,10.0**mean))
                ax.set_xlim(mn[par_name],mx[par_name])
                ylim = ax.get_ylim()
                if "stage" in par_name:
                    val = np.log10(1.5)
                else:
                    val = np.log10(2.5)
                ticks = ["{0:2.1f}".format(x) for x in 10.0**ax.get_xticks()]
                ax.set_xticklabels(ticks,rotation=90)
                ax.plot([val,val],ylim,"k-",lw=2.0)

                ax.plot([mean,mean],ylim,"b-",lw=1.5)
                ax.plot([mean+(2.0*std),mean+(2.0*std)],ylim,"b--",lw=1.5)
                ax.plot([mean-(2.0*std),mean-(2.0*std)],ylim,"b--",lw=1.5)
            pdf.savefig()
            plt.close()




    obs_files = [os.path.join(d,f) for f in os.listdir(d) if "obsensemble." in f
                 and ".png" not in f]
    obs_dfs = [pd.read_csv(obs_file) for obs_file in obs_files]
    #print(obs_files)
    #mx = max([obs_df.obs.max() for obs_df in obs_dfs])
    #mn = min([obs_df.obs.min() for obs_df in obs_dfs])
    #print(mn,mx)
    obs_names = ["h01_04","h01_06","h01_08","h02_08"]
    #print(obs_files)
    obs_dfs = [obs_df.loc[:,obs_names] for obs_df in obs_dfs]
    mx = {obs_name:max([obs_df.loc[:,obs_name].max() for obs_df in obs_dfs]) for obs_name in obs_names}
    mn = {obs_name:min([obs_df.loc[:,obs_name].min() for obs_df in obs_dfs]) for obs_name in obs_names}

    with PdfPages(os.path.join(plt_dir,"obsensemble.pdf")) as pdf:
        for obs_file,obs_df in zip(obs_files,obs_dfs):
            fig = plt.figure(figsize=(10,10))
            plt.figtext(0.5,0.975,obs_file,ha="center")
            print(obs_file)
            axes = [plt.subplot(2,2,i+1) for i in range(len(obs_names))]
            for ax,obs_name in zip(axes,obs_names):
                mean = obs_df.loc[:,obs_name].mean()
                std = obs_df.loc[:,obs_name].std()
                obs_df.loc[:,obs_name].hist(ax=ax,edgecolor="none",
                                            alpha=0.5,grid=False)
                ax.set_yticklabels([])
                #print(ax.get_xlim(),mn[obs_name],mx[obs_name])
                ax.set_title("{0}, {1:6.2f}:{2:6.2f}".format(obs_name,mean,std))
                #ax.set_xlim(mn[obs_name],mx[obs_name])
                ax.set_xlim(0.0,20.0)
                ylim = ax.get_ylim()
                oval = pst.observation_data.loc[obs_name,"obsval"]
                ax.plot([oval,oval],ylim,"k-",lw=2)
                ax.plot([mean,mean],ylim,"b-",lw=1.5)
                ax.plot([mean+(2.0*std),mean+(2.0*std)],ylim,"b--",lw=1.5)
                ax.plot([mean-(2.0*std),mean-(2.0*std)],ylim,"b--",lw=1.5)
            pdf.savefig()
            plt.close()
Exemple #31
0
def to_mps(jco,
           obj_func=None,
           obs_constraint_sense=None,
           pst=None,
           decision_var_names=None,
           mps_filename=None,
           risk=0.5):
    """helper utility to write an mps file from pest-style
    jacobian matrix. Requires corresponding pest control
    file.

    Parameters:
        jco : pyemu.Matrix or str (filename of matrix)
        obj_func : optional.  If None, an obs group must exist
            named 'n' and must have one one member.  Can be a str, which
            is the name of an observation to treat as the objective function
            or can be a dict, which is keyed on decision var names and valued
            with objective function coeffs.
        obs_constraint_sense : optional.  If None, obs groups are sought that
            have names "l","g", or "e" - members of these groups are treated
            as constraints.  Otherwise, must be a dict keyed on constraint
             (obs) names with values of "l","g", or "e".
        pst : optional.  If None, a pest control file is sought with
            filename <case>.pst.  Otherwise, must be a pyemu.Pst instance or
            a filename of a pest control file. The control must have an
            associated .res or .rei file - this is needed for the RHS of the
            constraints.
        decision_var_names: optional.  If None, all parameters are treated as
            decision vars. Otherwise, must be a list of str of parameter names
            to use as decision vars
        mps_filename : optional.  If None, then <case>.mps is written.
            Otherwise, must be a str.
        risk : float
            the level of risk tolerance/aversion in the chance constraints.
            Values other then 0.50 require at least one parameter (non decision
            var) in the jco.  Ranges from 0.0,1.0
    """

    #if jco arg is a string, load a jco from binary
    if isinstance(jco, str):
        pst_name = jco.lower().replace('.jcb', ".pst").replace(".jco", ".pst")
        jco = Matrix.from_binary(jco)
    assert isinstance(jco, Matrix)

    # try to find a pst
    if pst is None:
        if os.path.exists(pst_name):
            pst = Pst(pst_name)
        else:
            raise Exception("could not find pst file {0} and pst argument is None, a ".format(pst_name) +\
                            "pst instance is required for setting decision variable bound constraints")
    else:
        assert len(set(jco.row_names).difference(
            pst.observation_data.index)) == 0
        assert len(set(jco.col_names).difference(
            pst.parameter_data.index)) == 0

    #make sure the pst has an associate res
    assert pst.res is not None," could find a residuals file (.res or .rei) for" +\
                               " for control file {0}".format(pst.filename)

    #if no decision_var_names where passed, use all columns in the jco
    if decision_var_names is None:
        decision_var_names = jco.col_names

    #otherwise, do some error checking and processing
    else:
        if not isinstance(decision_var_names, list):
            decision_var_names = [decision_var_names]
        for i, dv in enumerate(decision_var_names):
            dv = dv.lower()
            decision_var_names[i] = dv
            assert dv in jco.col_names, "decision var {0} not in jco column names".format(
                dv)
            assert dv in pst.parameter_data.index, "decision var {0} not in pst parameter names".format(
                dv)

    #if no obs_constraint_sense, try to build one from the obs group info
    if obs_constraint_sense is None:
        const_groups = [
            grp for grp in pst.obs_groups if grp.lower() in OPERATOR_WORDS
        ]
        if len(const_groups) == 0:
            raise Exception("to_mps(): obs_constraint_sense is None and no "+\
                            "obseravtion groups in {0}".format(','.join(pst.obs_groups)))
        obs_constraint_sense = {}
        obs_groups = pst.observation_data.groupby(
            pst.observation_data.obgnme).groups
        for og, obs_names in obs_groups.items():
            if og == 'n':
                continue
            if og in const_groups:
                for oname in obs_names:
                    obs_constraint_sense[oname] = og

    assert isinstance(obs_constraint_sense, dict)
    assert len(obs_constraint_sense) > 0, "no obs_constraints..."

    #build up a dict of (in)equality operators for the constraints
    operators = {}
    for obs_name, operator in obs_constraint_sense.items():
        obs_name = obs_name.lower()
        assert obs_name in pst.obs_names, "obs constraint {0} not in pst observation names"
        assert obs_name in pst.res.name, " obs constraint {0} not in pst.res names"
        assert obs_name in jco.row_names, "obs constraint {0} not in jco row names".format(
            obs_name)
        if operator.lower() not in OPERATOR_WORDS:
            if operator not in OPERATOR_SYMBOLS:
                raise Exception("operator {0} not in [{1}] or [{2}]".\
                    format(operator,','.join(OPERATOR_WORDS),','\
                           .join(OPERATOR_SYMBOLS)))
            op = OPERATOR_WORDS[OPERATOR_SYMBOLS.index(operator)]
        else:
            op = operator.lower()
        operators[obs_name] = op
        obs_constraint_sense[obs_name.lower()] = obs_constraint_sense.\
                                                 pop(obs_name)

    #build a list of constaint names in order WRT jco row order
    # order_obs_constraints = [name for name in jco.row_names if name in
    #                          obs_constraint_sense]

    order_obs_constraints = list(obs_constraint_sense.keys())
    order_obs_constraints.sort()

    #build a list of decision var names in order WRT jco col order
    #order_dec_var = [name for name in jco.col_names if name in
    #                 decision_var_names]

    order_dec_var = list(decision_var_names)
    order_dec_var.sort()

    #shorten constraint names if needed
    new_const_count = 0
    new_constraint_names = {}
    for name in order_obs_constraints:
        if len(name) > 8:
            new_name = name[:7] + "{0}".format(new_const_count)
            print("to_mps(): shortening constraint name {0} to {1}\n".format(
                name, new_name))
            new_constraint_names[name] = new_name
            new_const_count += 1
        else:
            new_constraint_names[name] = name

    #shorten decision var names if needed
    new_dec_count = 0
    new_decision_names = {}
    for name in order_dec_var:
        if len(name) > 8:
            new_name = name[:7] + "{0}".format(new_dec_count)
            print("to_mps(): shortening decision var name {0} to {1}\n".format(
                name, new_name))
            new_decision_names[name] = new_name
            new_dec_count += 1
        else:
            new_decision_names[name] = name

    # if no obj_func, try to make one
    if obj_func is None:
        # look for an obs group named 'n' with a single member
        og = pst.obs_groups
        if 'n' not in pst.obs_groups:
            raise Exception("to_mps(): obj_func is None but no "+\
                            "obs group named 'n'")
        grps = pst.observation_data.groupby(pst.observation_data.obgnme).groups
        assert len(grps["n"]) == 1,"to_mps(): 'n' obj_func group has more " +\
                                   " than one member, mps only support one objf "
        obj_name = grps['n'][0]
        obj_iidx = jco.row_names.index(obj_name)
        obj = {}
        for name in order_dec_var:
            jco_jidx = jco.col_names.index(name)
            obj[name] = jco.x[obj_iidx, jco_jidx]

    #otherwise, parse what was passed
    elif isinstance(obj_func, str):
        obj_func = obj_func.lower()
        assert obj_func in jco.row_names,\
            "obj_func {0} not in jco.row_names".format(obj_func)
        assert obj_func in pst.observation_data.obsnme,\
            "obj_func {0} not in pst observations".format(obj_func)

        obj_iidx = jco.row_names.index(obj_func)
        obj = {}
        for name in order_dec_var:
            jco_jidx = jco.col_names.index(name)
            obj[name] = jco.x[obj_iidx, jco_jidx]
        obj_name = str(obj_func)

    elif isinstance(obj_func, dict):
        obj = {}
        for name, value in obj_func.items():
            assert name in jco.col_names,"to_mps(): obj_func key "+\
                                         "{0} not ".format(name) +\
                                         "in jco col names"
            obj[name] = float(value)
        obj_name = "obj_func"
    else:
        raise NotImplementedError("unsupported obj_func arg type {0}".format(\
                                  type(obj_func)))

    if risk != 0.5:
        try:
            from scipy.special import erfinv
        except Exception as e:
            raise Exception("to_mps() error importing erfinv from scipy.special: "+\
                            "{0}".format(str(e)))

        par_names = [
            name for name in jco.col_names if name not in decision_var_names
        ]
        if len(par_names) == 0:
            raise Exception("to_mps() error: risk != 0.5, but no "+\
                            "non-decision vars parameters ")
        unc_jco = jco.get(col_names=par_names)
        unc_pst = pst.get(par_names=par_names)
        sc = Schur(jco=unc_jco, pst=unc_pst, forecasts=order_obs_constraints)
        constraint_std = sc.get_forecast_summary().loc[:, "post_var"].apply(
            np.sqrt)
        rhs = {}

        # the probit value for a given risk...using the inverse
        # error function
        probit_val = np.sqrt(2.0) * erfinv((2.0 * risk) - 1.0)
        for name in order_obs_constraints:
            mu = unc_pst.res.loc[name, "residual"]
            std = constraint_std.loc[name]
            #if this is a less than constraint, then we want
            # to subtract
            if operators[name] == 'l':
                prob_val = mu - (probit_val * std)
            #if this is a greater than constraint, then we want
            # to add
            elif operators[name] == "g":
                prob_val = mu + (probit_val * std)
            else:
                raise NotImplementedError("chance constraints only " +\
                                          "implemented for 'l' or 'g' " +\
                                          "type constraints, not " +\
                                          "{0}".format(operators[name]))
            rhs[name] = prob_val
    else:
        rhs = {n: pst.res.loc[n, "residual"] for n in order_obs_constraints}

    if mps_filename is None:
        mps_filename = pst.filename.replace(".pst", ".mps")

    with open(mps_filename, 'w') as f:
        f.write("NAME {0}\n".format("pest_opt"))
        f.write("ROWS\n")
        for name in order_obs_constraints:
            f.write(" {0}  {1}\n".format(operators[name],
                                         new_constraint_names[name]))
        f.write(" {0}  {1}\n".format('n', obj_name))

        f.write("COLUMNS\n")
        for dname in order_dec_var:
            jco_jidx = jco.col_names.index(dname)
            for cname in order_obs_constraints:
                jco_iidx = jco.row_names.index(cname)
                v = jco.x[jco_iidx, jco_jidx]
                f.write("    {0:8}  {1:8}   {2:10G}\n".\
                        format(new_decision_names[dname],
                               new_constraint_names[cname],
                               v))
            # f.write("    {0:8}  {1:8}   {2:10G}\n".\
            #         format(new_decision_names[dname],
            #                obj_name,pst.parameter_data.loc[dname,"parval1"]))
            f.write("    {0:8}  {1:8}   {2:10G}\n".\
                    format(new_decision_names[dname],
                           obj_name,obj_func[dname]))

        f.write("RHS\n")
        for iname, name in enumerate(order_obs_constraints):
            f.write("    {0:8}  {1:8}   {2:10G}\n".format(
                "rhs", new_constraint_names[name], rhs[name]))
        f.write("BOUNDS\n")
        for name in order_dec_var:
            up,lw = pst.parameter_data.loc[name,"parubnd"],\
                    pst.parameter_data.loc[name,"parlbnd"]
            f.write(" {0:2} {1:8}  {2:8}  {3:10G}\n".\
                    format("UP","BOUND",name,up))
            f.write(" {0:2} {1:8}  {2:8}  {3:10G}\n".\
                    format("LO","BOUND",name,lw))
        f.write("ENDATA\n")