Esempio n. 1
0
def get_mean_temps_2k(rgi, return_prcp):
    from oggm import cfg, utils, workflow, tasks
    from oggm.core.massbalance import PastMassBalance

    # Initialize OGGM
    cfg.initialize()
    wd = utils.gettempdir(reset=True)
    cfg.PATHS['working_dir'] = wd
    utils.mkdir(wd, reset=True)
    cfg.PARAMS['baseline_climate'] = 'HISTALP'
    # and set standard histalp values
    cfg.PARAMS['temp_melt'] = -1.75
    cfg.PARAMS['prcp_scaling_factor'] = 1.75

    gdir = workflow.init_glacier_regions(rgidf=rgi.split('_')[0],
                                         from_prepro_level=3,
                                         prepro_border=10)[0]
    # run histalp climate on glacier!
    tasks.process_histalp_data(gdir)

    f = gdir.get_filepath('climate_historical')
    with utils.ncDataset(f) as nc:
        refhgt = nc.ref_hgt

    mb = PastMassBalance(gdir, check_calib_params=False)

    df = pd.DataFrame()
    df2 = pd.DataFrame()

    for y in np.arange(1870, 2015):
        for i in np.arange(9, 12):
            flyear = utils.date_to_floatyear(y, i)
            tmp = mb.get_monthly_climate([refhgt], flyear)[0]
            df.loc[y, i] = tmp.mean()

        if return_prcp:
            for i in np.arange(3, 6):
                flyear = utils.date_to_floatyear(y, i)
                pcp = mb.get_monthly_climate([refhgt], flyear)[3]
                df2.loc[y, i] = tmp.mean()

    t99 = df.loc[1984:2014, :].mean().mean()
    t85 = df.loc[1870:1900, :].mean().mean()
    t2k = df.loc[1900:2000, :].mean().mean()

    if return_prcp:
        p99 = df2.loc[1984:2014, :].mean().mean()
        p85 = df2.loc[1870:1900, :].mean().mean()
        p2k = df2.loc[1900:2000, :].mean().mean()
        return t85, t99, t2k, p85, p99, p2k

    return t85, t99, t2k
def run_model(param, gdir, y_t, random_climate2, t0, te):
    '''
    :param param: temp bias, is changed by optimization
    :param gdir:  oggm.GlacierDirectory
    :param y_t: oggm.Flowline for the observed state (2000)
    :param random_climate2: oggm.massbalance.RandomMassBalance
    :return: 2 oggm.flowline.FluxBasedModels
             (glacier candidate and predicted glacier model)
    '''

    # run estimated glacier with random climate 2 until equilibrium
    # (glacier candidate)

    # estimated flowline = observed flowline
    estimated_fls = deepcopy(y_t)
    climate = deepcopy(random_climate2)
    # change temp_bias
    climate.temp_bias = param
    random_model = FluxBasedModel(estimated_fls, mb_model=climate, y0=t0)
    random_model.run_until_equilibrium()
    # run glacier candidate with past climate until 2000
    candidate_fls = deepcopy(y_t)
    for i in range(len(y_t)):
        candidate_fls[i].surface_h = random_model.fls[i].surface_h

    past_climate = PastMassBalance(gdir)
    past_model = FluxBasedModel(candidate_fls,
                                mb_model=past_climate,
                                glen_a=cfg.A,
                                y0=t0)
    past_model.run_until(te)

    return [random_model, past_model]
def _run_parallel_experiment(gdir):

    # read flowlines from pre-processing
    fls = gdir.read_pickle('model_flowlines')
    try:
        # construct searched glacier
        # TODO: y0 in random mass balance?
        random_climate1 = RandomMassBalance(gdir, y0=1850, bias=0, seed=[1])
        random_climate1.temp_bias = -0.75
        commit_model = FluxBasedModel(fls, mb_model=random_climate1,
                                      glen_a=cfg.A, y0=1850)
        commit_model.run_until_equilibrium()
        y_t0 = deepcopy(commit_model)

    # try different seed of mass balance, if equilibrium could not be found
    except:

        # construct searched glacier
        random_climate1 = RandomMassBalance(gdir, y0=1850, bias=0,seed=[1])
        commit_model = FluxBasedModel(fls, mb_model=random_climate1,
                                      glen_a=cfg.A, y0=1850)
        commit_model.run_until_equilibrium()
        y_t0 = deepcopy(commit_model)

    # construct observed glacier
    past_climate = PastMassBalance(gdir)
    commit_model2 = FluxBasedModel(commit_model.fls, mb_model=past_climate,
                                   glen_a=cfg.A, y0=1850)
    commit_model2.run_until(2000)
    y_t = deepcopy(commit_model2)

    # save output in gdir_dir
    experiment = {'y_t0': y_t0, 'y_t': y_t,
                  'climate': random_climate1}
    gdir.write_pickle(experiment, 'synthetic_experiment')
def _run_parallel_experiment(gdir, t0, te):
    '''

    :param gdir:
    :param t0:
    :param te:
    :return:
    '''

    # read flowlines from pre-processing
    fls = gdir.read_pickle('model_flowlines')
    try:
        # construct searched glacier
        random_climate1 = RandomMassBalance(gdir, y0=t0, halfsize=14)

        #set temp bias negative to force a glacier retreat later
        random_climate1.temp_bias = -0.75
        commit_model = FluxBasedModel(fls,
                                      mb_model=random_climate1,
                                      glen_a=cfg.A,
                                      y0=t0)
        commit_model.run_until_equilibrium()
        y_t0 = deepcopy(commit_model)

    # try different seed of mass balance, if equilibrium could not be found
    except:

        # construct searched glacier
        random_climate1 = RandomMassBalance(gdir, y0=t0, halfsize=14)
        commit_model = FluxBasedModel(fls,
                                      mb_model=random_climate1,
                                      glen_a=cfg.A,
                                      y0=t0)
        commit_model.run_until_equilibrium()
        y_t0 = deepcopy(commit_model)

    # construct observed glacier
    past_climate = PastMassBalance(gdir)
    commit_model2 = FluxBasedModel(commit_model.fls,
                                   mb_model=past_climate,
                                   glen_a=cfg.A,
                                   y0=t0)
    commit_model2.run_until(te)
    y_t = deepcopy(commit_model2)

    # save output in gdir_dir
    experiment = {'y_t0': y_t0, 'y_t': y_t, 'climate': random_climate1}
    gdir.write_pickle(experiment, 'synthetic_experiment')
Esempio n. 5
0
    def test_crossval(self):

        gdirs = up_to_distrib()

        # in case we ran crossval we need to rerun
        tasks.compute_ref_t_stars(gdirs)
        workflow.execute_entity_task(tasks.local_mustar, gdirs)
        workflow.execute_entity_task(tasks.apparent_mb, gdirs)

        # before crossval
        refmustars = []
        for gdir in gdirs:
            tdf = pd.read_csv(gdir.get_filepath('local_mustar'))
            refmustars.append(tdf['mu_star'].values[0])

        tasks.crossval_t_stars(gdirs)
        file = os.path.join(cfg.PATHS['working_dir'], 'crossval_tstars.csv')
        df = pd.read_csv(file, index_col=0)

        # see if the process didn't brake anything
        mustars = []
        for gdir in gdirs:
            tdf = pd.read_csv(gdir.get_filepath('local_mustar'))
            mustars.append(tdf['mu_star'].values[0])
        np.testing.assert_allclose(refmustars, mustars)

        # make some mb tests
        from oggm.core.massbalance import PastMassBalance
        for rid in df.index:
            gdir = [g for g in gdirs if g.rgi_id == rid][0]
            h, w = gdir.get_inversion_flowline_hw()
            cfg.PARAMS['use_bias_for_run'] = False
            mbmod = PastMassBalance(gdir)
            mbdf = gdir.get_ref_mb_data().ANNUAL_BALANCE.to_frame(name='ref')
            for yr in mbdf.index:
                mbdf.loc[yr, 'mine'] = mbmod.get_specific_mb(h, w, year=yr)
            mm = mbdf.mean()
            np.testing.assert_allclose(df.loc[rid].bias,
                                       mm['mine'] - mm['ref'],
                                       atol=1e-3)
            cfg.PARAMS['use_bias_for_run'] = True
            mbmod = PastMassBalance(gdir)
            mbdf = gdir.get_ref_mb_data().ANNUAL_BALANCE.to_frame(name='ref')
            for yr in mbdf.index:
                mbdf.loc[yr, 'mine'] = mbmod.get_specific_mb(h, w, year=yr)
            mm = mbdf.mean()
            np.testing.assert_allclose(mm['mine'], mm['ref'], atol=1e-3)
        if yr == 2000:
            plt.plot(x, fls[-1].surface_h, 'k')
        plt.plot(x, model.fls[-1].bed_h, 'k')
        i = i + 1

    plt.subplot(grid[1, :])
    for yr in np.arange(1850, 2000, 50):
        plt.axvline(x=yr, color='k')
        best_id = result['objective_' + str(yr + 50)].idxmin()
        list = result['objective_' + str(yr + 50)].dropna().index
        if not best_id in list:
            list = list.append(pd.Index([best_id]))
        for i in list:

            fls = deepcopy(result[str(yr)].dropna().loc[i].fls)
            past_climate = PastMassBalance(gdir)
            model = FluxBasedModel(fls,
                                   mb_model=past_climate,
                                   glen_a=cfg.A,
                                   y0=yr)
            if i == best_id:
                plt.plot(model.yr, model.length_m, 'ro')
                a, b = model.run_until_and_store(yr + 50)
                b.length_m.plot(color='red')
                plt.plot(model.yr, model.length_m, 'bo')
            else:
                a, b = model.run_until_and_store(yr + 50)
                b.length_m.plot(color='grey', alpha=0.3)
        if yr == 1850:
            a, b = model.run_until_and_store(2000)
            b.length_m.plot(linestyle=':', color='red')
Esempio n. 7
0
def find_initial_state(gdir):

    global past_climate,random_climate2
    global y_2000

    global x
    global ax1,ax2,ax3

    fls = gdir.read_pickle('model_flowlines')
    past_climate = PastMassBalance(gdir)
    random_climate1 = RandomMassBalance(gdir,y0=1865,halfsize=14)


    #construct searched glacier
    commit_model = FluxBasedModel(fls, mb_model=random_climate1,
                                  glen_a=cfg.A, y0=1850)
    commit_model.run_until_equilibrium()
    y_1880 = copy.deepcopy(commit_model)

    #construct observed glacier
    commit_model2 = FluxBasedModel(commit_model.fls, mb_model=past_climate,
                                  glen_a=cfg.A, y0=1880)
    commit_model2.run_until(2000)
    y_2000 = copy.deepcopy(commit_model2)

    results  = pd.DataFrame(columns=['1880','2000','length_1880'])

    for i in range(4):
        random_climate2 = RandomMassBalance(gdir, y0=1875, halfsize=14)
        res = minimize(objfunc, [0], args=(gdir, y_2000.fls),
                       method='COBYLA',
                       tol=1e-04, options={'maxiter': 100, 'rhobeg': 1})
        #try:
        result_model_1880, result_model_2000 = run_model(res.x, gdir,
                                                         y_2000.fls)
        results = results.append({'1880':result_model_1880,'2000':result_model_2000,'length_1880':result_model_1880.length_m,'length_2000':result_model_2000.length_m}, ignore_index=True)
        #except:
        #   pass

    # create plots
    for i in range(len(fls)):
        plt.figure(i,figsize=(20,10))
        #add subplot in the corner
        fig, ax1 = plt.subplots(figsize=(20, 10))
        ax2 = fig.add_axes([0.55, 0.66, 0.3, 0.2])
        ax1.set_title(gdir.rgi_id+' flowline '+str(i))
        box = ax1.get_position()
        ax1.set_position([box.x0, box.y0, box.width * 0.95, box.height])

        x = np.arange(y_2000.fls[i].nx) * y_2000.fls[i].dx * y_2000.fls[i].map_dx
        for j in range(len(results)):
            ax1.plot(x, results.get_value(j, '1880').fls[i].surface_h, alpha=0.8, )
            ax2.plot(x, results.get_value(j, '2000').fls[i].surface_h, alpha=0.8, )

        ax1.plot(x,y_1880.fls[i].surface_h,'k:')
        ax2.plot(x, y_2000.fls[i].surface_h, 'k')

        ax1.plot(x, y_1880.fls[i].bed_h, 'k')
        ax2.plot(x, y_2000.fls[i].bed_h, 'k')

        plot_dir = os.path.join(cfg.PATHS['working_dir'], 'plots')
        plt.savefig(os.path.join(plot_dir, gdir.rgi_id +'_fls'+str(i)+ '.png'))
    #plt.show()

    results.to_csv(os.path.join(plot_dir,str(gdir.rgi_id)+'.csv'))
    '''
Esempio n. 8
0
        return res.x


if __name__ == '__main__':
    global all_shapes

    f, ax = plt.subplots(2, sharex=True)

    all_shapes = []
    cfg.initialize()
    cfg.PATHS['climate_file'] = get_demo_file('HISTALP_oetztal.nc')
    # get gdir
    gdir_hef = pickle.load(open('gdir_hef.pkl', 'rb'))

    # get climate model
    random_climate = PastMassBalance(gdir_hef)
    pickle.dump(random_climate, open('random_climate_hef.pkl', 'wb'))
    #random_climate = pickle.load(open('random_climate_hef.pkl','rb'))

    hef_fls = pickle.load(open('hef_y1.pkl', 'rb'))

    commit_model = FluxBasedModel(hef_fls,
                                  mb_model=random_climate,
                                  glen_a=cfg.A,
                                  y0=1850)

    fls_y0 = copy.deepcopy(commit_model.fls)

    commit_model.run_until(1900)
    global fls_y1
    y1 = copy.deepcopy(commit_model)
Esempio n. 9
0
def plot_length(gdir, plot_dir, t0, te):
    reconstruction = gdir.read_pickle('reconstruction_output')
    experiment = gdir.read_pickle('synthetic_experiment')
    surface_t0 = pd.DataFrame()
    widths_t0 = pd.DataFrame()
    surface_t = pd.DataFrame()
    widths_t = pd.DataFrame()
    fls_t0 = pd.DataFrame()
    for rec in reconstruction:

        if rec[0] != None:
            # surface
            surface_t0 = surface_t0.append([rec[0].fls[-1].surface_h],
                                           ignore_index=True)
            surface_t = surface_t.append([rec[1].fls[-1].surface_h],
                                         ignore_index=True)
            widths_t0 = widths_t0.append([rec[0].fls[-1].widths],
                                         ignore_index=True)
            widths_t = widths_t.append([rec[1].fls[-1].widths],
                                       ignore_index=True)
            fls_t0 = fls_t0.append({
                'model': rec[0],
                'length': rec[0].length_m
            },
                                   ignore_index=True)

    past_climate = PastMassBalance(gdir)

    plt.figure(figsize=(25, 15))

    for i in np.arange(0, 30, 1):
        fls = gdir.read_pickle('model_flowlines')
        try:
            fls[-1].surface_h = surface_t0.iloc[i].values

            past_model = FluxBasedModel(fls,
                                        mb_model=past_climate,
                                        glen_a=cfg.A,
                                        y0=t0)
            a, b = past_model.run_until_and_store(te)
            plt.plot(b.length_m.to_series().rolling(36, center=True).mean(),
                     alpha=0.3,
                     color='grey',
                     label='')
        except:
            pass

    # mean plot
    fls = gdir.read_pickle('model_flowlines')
    fls[-1].surface_h = surface_t0.median(axis=0).values

    past_model = FluxBasedModel(fls,
                                mb_model=past_climate,
                                glen_a=cfg.A,
                                y0=t0)
    a, b = past_model.run_until_and_store(te)
    plt.plot(b.length_m.to_series().rolling(36, center=True).mean(),
             linewidth=3,
             label='median')

    #objective plot

    # calculate objective
    diff_s = surface_t.subtract(experiment['y_t'].fls[-1].surface_h, axis=1)**2
    diff_w = widths_t.subtract(experiment['y_t'].fls[-1].widths, axis=1)**2
    objective = diff_s.sum(axis=1) + diff_w.sum(axis=1)
    min_id = objective.argmin(axis=0)

    fls = gdir.read_pickle('model_flowlines')
    fls[-1].surface_h = surface_t0.iloc[min_id].values

    past_model = FluxBasedModel(fls,
                                mb_model=past_climate,
                                glen_a=cfg.A,
                                y0=t0)
    a, b = past_model.run_until_and_store(te)
    plt.plot(b.length_m.to_series().rolling(36, center=True).mean(),
             'r',
             linewidth=3,
             label='best objective')

    # experiment plot
    fls = gdir.read_pickle('synthetic_experiment')['y_t0'].fls
    past_climate = PastMassBalance(gdir)
    past_model = FluxBasedModel(fls,
                                mb_model=past_climate,
                                glen_a=cfg.A,
                                y0=t0)
    a, b = past_model.run_until_and_store(te)
    #plt.plot(b.length_m.to_series().rolling(36, center=True).mean(), 'k:', linewidth=3,
    #         label='experiment')

    fls = gdir.read_pickle('model_flowlines')
    past_model = FluxBasedModel(fls,
                                mb_model=past_climate,
                                glen_a=cfg.A,
                                y0=t0)
    plt.plot([2000], [past_model.length_m], 'o')

    if gdir.name != "":
        plt.title(gdir.rgi_id + ': ' + gdir.name, fontsize=25)
    else:
        plt.title(gdir.rgi_id, fontsize=25)
    plt.ylabel('Glacier Lenght Change (m) ', fontsize=25)
    plt.xlabel('Time', fontsize=25)
    plt.legend(loc='best', fontsize=25)
    plt.xlim((t0, te))
    plt.tick_params(axis='both', which='major', labelsize=25)
    plt.savefig(os.path.join(plot_dir, 'lengths_RGI50-11-00687.pdf'), dpi=200)
    #try:
    df = gdir.get_ref_length_data()
    df = df.loc[1855:2000]['dL']
    df = df - df.iloc[-1] + past_model.length_m
    plt.plot(df, 'k', linewidth=3, label='real observations')

    fls = gdir.read_pickle('model_flowlines')
    past_model = FluxBasedModel(fls,
                                mb_model=past_climate,
                                glen_a=cfg.A,
                                y0=t0)
    a, b = past_model.run_until_and_store(te)
    plt.plot(b.length_m.to_series().rolling(36, center=True).mean(),
             linewidth=3,
             label='default initial state')
    plt.legend(loc='best', fontsize=25)
    plt.savefig(os.path.join(plot_dir, 'lengths_RGI50-11-00687_obs.pdf'),
                dpi=200)

    #except:
    #    pass
    plt.show()
    return
Esempio n. 10
0
def find_initial_state(gdir):

    global past_climate, random_climate2
    global y_2000

    global x
    global ax1, ax2, ax3

    fls = gdir.read_pickle('model_flowlines')
    past_climate = PastMassBalance(gdir)
    random_climate1 = RandomMassBalance(gdir, y0=1865, halfsize=14)

    #construct searched glacier
    commit_model = FluxBasedModel(fls,
                                  mb_model=random_climate1,
                                  glen_a=cfg.A,
                                  y0=1850)
    commit_model.run_until_equilibrium()
    y_1880 = copy.deepcopy(commit_model)

    #construct observed glacier
    commit_model2 = FluxBasedModel(commit_model.fls,
                                   mb_model=past_climate,
                                   glen_a=cfg.A,
                                   y0=1880)
    commit_model2.run_until(2000)
    y_2000 = copy.deepcopy(commit_model2)

    results = pd.DataFrame(columns=['1880', '2000', 'length_1880'])

    pool = mp.Pool()
    result_list = pool.map(partial(run_parallel, gdir=gdir, y_2000=y_2000),
                           range(300))
    pool.close()
    pool.join()
    result_list = [x for x in result_list if x != [None, None]]
    # create plots
    for i in range(len(result_list[0][0].fls)):
        plt.figure(i, figsize=(20, 10))
        #add subplot in the corner
        fig, ax1 = plt.subplots(figsize=(20, 10))
        ax2 = fig.add_axes([0.55, 0.66, 0.3, 0.2])
        ax1.set_title(gdir.rgi_id + ' flowline ' + str(i))
        box = ax1.get_position()
        ax1.set_position([box.x0, box.y0, box.width * 0.95, box.height])

        x = np.arange(
            y_2000.fls[i].nx) * y_2000.fls[i].dx * y_2000.fls[i].map_dx
        for j in range(len(result_list)):
            if result_list[j][0] != None:
                ax1.plot(
                    x,
                    result_list[j][0].fls[i].surface_h,
                    alpha=0.8,
                )
                ax2.plot(
                    x,
                    result_list[j][1].fls[i].surface_h,
                    alpha=0.8,
                )

        ax1.plot(x, y_1880.fls[i].surface_h, 'k:')
        ax2.plot(x, y_2000.fls[i].surface_h, 'k')

        ax1.plot(x, y_1880.fls[i].bed_h, 'k')
        ax2.plot(x, y_2000.fls[i].bed_h, 'k')

        plot_dir = os.path.join(cfg.PATHS['working_dir'], 'plots',
                                'Ben_idea_parallel_without_length')

        if not os.path.exists(plot_dir):
            os.makedirs(plot_dir)

        plt.savefig(
            os.path.join(plot_dir, gdir.rgi_id + '_fls' + str(i) + '.png'))
    pickle.dump(result_list,
                open(os.path.join(plot_dir, gdir.rgi_id + '.pkl'), 'wb'))
    solution = [y_1880, y_2000]
    pickle.dump(
        solution,
        open(os.path.join(plot_dir, gdir.rgi_id + '_solution.pkl'), 'wb'))
Esempio n. 11
0
def test_animation(gdir):

    import matplotlib.pyplot as plt
    from matplotlib import animation
    mpl.style.use('default')

    fig = plt.figure(figsize=(20, 15))
    ax1 = plt.axes()
    fill = ax1.fill_between([], [],
                            color='grey',
                            alpha=0.1,
                            label='total range',
                            lw=2)
    fill2 = ax1.fill_between([], [], color='C0', alpha=0.5, label='IQR', lw=2)
    time_text = ax1.text(0.6, 0.95, '', transform=ax1.transAxes, size=25)

    plotlays, plotcols, label = [2], ["orange", "red", "C0"], [
        'default', 'best objective', 'median'
    ]
    lines = []
    for index in range(3):
        lobj = ax1.plot([], [],
                        lw=2,
                        color=plotcols[index],
                        label=label[index])[0]
        lines.append(lobj)

    fls = gdir.read_pickle('model_flowlines')
    past_climate = PastMassBalance(gdir)
    past_model = FluxBasedModel(copy.deepcopy(fls),
                                mb_model=past_climate,
                                glen_a=cfg.A,
                                y0=1865)
    # best objective model
    experiment = gdir.read_pickle('synthetic_experiment')
    df, best = find_best_objective(gdir, experiment['y_t'].fls, 1865, 2000)
    best_fls = copy.deepcopy(df.loc[best, '1865'].fls)
    best_model = FluxBasedModel(copy.deepcopy(best_fls),
                                mb_model=past_climate,
                                glen_a=cfg.A,
                                y0=1865)

    surface = pd.DataFrame()
    for i in df.index:
        surface = surface.append([df.loc[i, '1865'].fls[-1].surface_h],
                                 ignore_index=True)

    # median model
    median_fls = copy.deepcopy(gdir.read_pickle('model_flowlines'))
    median_fls[-1].surface_h = copy.deepcopy(surface.median(axis=0).values)

    median_model = FluxBasedModel(copy.deepcopy(median_fls),
                                  mb_model=past_climate,
                                  glen_a=cfg.A,
                                  y0=1865)

    # quant_25 model
    quant25_fls = copy.deepcopy(gdir.read_pickle('model_flowlines'))
    quant25_fls[-1].surface_h = copy.deepcopy(
        surface.quantile(q=0.25, axis=0).values)

    quant25_model = FluxBasedModel(copy.deepcopy(quant25_fls),
                                   mb_model=past_climate,
                                   glen_a=cfg.A,
                                   y0=1865)

    # quant_75 model
    quant75_fls = copy.deepcopy(gdir.read_pickle('model_flowlines'))
    quant75_fls[-1].surface_h = copy.deepcopy(
        surface.quantile(q=0.75, axis=0).values)

    quant75_model = FluxBasedModel(copy.deepcopy(quant75_fls),
                                   mb_model=past_climate,
                                   glen_a=cfg.A,
                                   y0=1865)

    #min
    min_fls = copy.deepcopy(gdir.read_pickle('model_flowlines'))
    min_fls[-1].surface_h = copy.deepcopy(surface.min(axis=0).values)

    min_model = FluxBasedModel(copy.deepcopy(min_fls),
                               mb_model=past_climate,
                               glen_a=cfg.A,
                               y0=1865)

    #max
    max_fls = copy.deepcopy(gdir.read_pickle('model_flowlines'))
    max_fls[-1].surface_h = copy.deepcopy(surface.max(axis=0).values)

    max_model = FluxBasedModel(copy.deepcopy(max_fls),
                               mb_model=past_climate,
                               glen_a=cfg.A,
                               y0=1865)

    x = np.arange(fls[-1].nx) * fls[-1].dx * fls[-1].map_dx

    def init():
        ax1.plot(x, fls[-1].bed_h, 'k')
        time_text.set_text('')
        for line in lines:
            line.set_data([], [])
        return lines

    def animate(t):
        if t == 1865:
            fls = gdir.read_pickle('model_flowlines')
            past_model.reset_flowlines(copy.deepcopy(fls))
            past_model.reset_y0(1865)

            best_model.reset_flowlines(copy.deepcopy(df.loc[best, '1865'].fls))
            best_model.reset_y0(1865)

            fls = gdir.read_pickle('model_flowlines')
            fls[-1].surface_h = surface.median(axis=0).values
            median_model.reset_flowlines(copy.deepcopy(fls))
            median_model.reset_y0(1865)

            fls = gdir.read_pickle('model_flowlines')
            fls[-1].surface_h = surface.quantile(0.25, axis=0).values
            quant25_model.reset_flowlines(copy.deepcopy(fls))
            quant25_model.reset_y0(1865)

            fls = gdir.read_pickle('model_flowlines')
            fls[-1].surface_h = surface.quantile(0.75, axis=0).values
            quant75_model.reset_flowlines(copy.deepcopy(fls))
            quant75_model.reset_y0(1865)

            fls = gdir.read_pickle('model_flowlines')
            fls[-1].surface_h = surface.min(axis=0).values
            min_model.reset_flowlines(copy.deepcopy(fls))
            min_model.reset_y0(1865)

            fls = gdir.read_pickle('model_flowlines')
            fls[-1].surface_h = surface.max(axis=0).values
            max_model.reset_flowlines(copy.deepcopy(fls))
            max_model.reset_y0(1865)

        else:
            past_model.run_until(t)
            best_model.run_until(t)
            median_model.run_until(t)
            min_model.run_until(t)
            max_model.run_until(t)
            quant25_model.run_until(t)
            quant75_model.run_until(t)

        time_text.set_text('time = %.1f' % t)

        y1 = past_model.fls[-1].surface_h
        y2 = best_model.fls[-1].surface_h
        y3 = median_model.fls[-1].surface_h
        y4 = min_model.fls[-1].surface_h
        y5 = max_model.fls[-1].surface_h
        y6 = quant25_model.fls[-1].surface_h
        y7 = quant75_model.fls[-1].surface_h

        xlist = [x, x, x]
        ylist = [y1, y2, y3]
        ax1.collections.clear()
        fill = ax1.fill_between(x,
                                y4,
                                y5,
                                color='grey',
                                alpha=0.2,
                                label='total range')
        fill2 = ax1.fill_between(x, y6, y7, color='C0', alpha=0.5, label='IQR')

        #for index in range(0,1):
        for lnum, line in enumerate(lines):
            line.set_data(xlist[lnum],
                          ylist[lnum])  # set data for each line separately.

        return (fill2, ) + tuple(lines) + (fill, ) + (time_text, )

    # call the animator.  blit=True means only re-draw the parts that have changed.
    ani = animation.FuncAnimation(fig,
                                  animate,
                                  frames=range(1865, 2005, 5),
                                  init_func=init,
                                  blit=True)

    plt.legend(loc='best', fontsize=20)
    plt.tick_params(axis='both', which='major', labelsize=25)
    plt.xlabel('Distance along the Flowline (m)', fontsize=25)
    plt.ylabel('Altitude (m)', fontsize=25)
    if gdir.name != "":
        plt.title(gdir.rgi_id + ': ' + gdir.name, fontsize=30)
    else:
        plt.title(gdir.rgi_id, fontsize=30)
    ani.save(os.path.join(gdir.dir, 'surface_animation.mp4'))
Esempio n. 12
0
def plot_length_change(gdir, plot_dir, t0, te, synthetic_exp=True):
    plot_dir = os.path.join(plot_dir, 'length_change')
    if not os.path.exists(plot_dir):
        os.makedirs(plot_dir)

    reconstruction = gdir.read_pickle('reconstruction_output')

    surface_t0 = pd.DataFrame()
    for rec in reconstruction:
        if rec[0] != None:
            # surface
            surface_t0 = surface_t0.append([rec[0].fls[-1].surface_h],
                                           ignore_index=True)

    past_climate = PastMassBalance(gdir)

    fig = plt.figure(figsize=(25, 15))
    ax = fig.add_subplot(111)

    for i in np.arange(0, 5, 1):
        #fls = gdir.read_pickle('model_flowlines')
        if reconstruction[i][0] != None:
            fls = copy.deepcopy(reconstruction[i][0].fls)
            past_model = FluxBasedModel(fls,
                                        mb_model=past_climate,
                                        glen_a=cfg.A,
                                        y0=t0)
            a, b = past_model.run_until_and_store(te)
            (b.length_m.rolling(time=36, center=True).mean() -
             b.length_m[-1]).plot(ax=ax, alpha=0.3, color='grey', label='')

    # median plot
    fls = copy.deepcopy(gdir.read_pickle('model_flowlines'))
    fls[-1].surface_h = surface_t0.median(axis=0).values

    past_model = FluxBasedModel(fls,
                                mb_model=past_climate,
                                glen_a=cfg.A,
                                y0=t0)
    a, b = past_model.run_until_and_store(te)
    (b.length_m.rolling(time=36, center=True).mean() - b.length_m[-1]).plot(
        ax=ax, linewidth=3, label='median')

    #objective plot

    # calculate objective
    if synthetic_exp:
        experiment = gdir.read_pickle('synthetic_experiment')
        df, min_id = find_best_objective(gdir, experiment['y_t'].fls, 1865,
                                         2000)
    else:
        df, min_id = find_best_objective(gdir, fls, 1865, 2000)

    fls = copy.deepcopy(df.loc[min_id, '1865'].fls)
    past_model = FluxBasedModel(fls,
                                mb_model=past_climate,
                                glen_a=cfg.A,
                                y0=t0)
    a, b = past_model.run_until_and_store(te)
    (b.length_m.rolling(time=36, center=True).mean() - b.length_m[-1]).plot(
        ax=ax, color='red', linewidth=3, label='best objective')

    fls = gdir.read_pickle('model_flowlines')
    past_model = FluxBasedModel(fls,
                                mb_model=past_climate,
                                glen_a=cfg.A,
                                y0=t0)

    if synthetic_exp:
        # experiment plot
        fls = copy.deepcopy(
            gdir.read_pickle('synthetic_experiment')['y_t0'].fls)
        past_climate = PastMassBalance(gdir)
        past_model = FluxBasedModel(fls,
                                    mb_model=past_climate,
                                    glen_a=cfg.A,
                                    y0=t0)
        a, b = past_model.run_until_and_store(te)
        (b.length_m.rolling(time=36, center=True).mean() -
         b.length_m[-1]).plot(ax=ax,
                              color='k',
                              linestyle=':',
                              linewidth=3,
                              label='experiment')
    else:

        try:
            df = gdir.get_ref_length_data()
            df = df.loc[1855:2000]['dL']
            df = df - df.iloc[-1]
            df = df.reset_index().set_index('years')
            df = df.rename(columns={'dL': 'real observations'})
            df.plot(ax=ax,
                    use_index=True,
                    color='k',
                    linewidth=3,
                    label='real observations')
        except:
            pass

    fls = gdir.read_pickle('model_flowlines')
    past_model = FluxBasedModel(fls,
                                mb_model=past_climate,
                                glen_a=cfg.A,
                                y0=t0)

    fls = gdir.read_pickle('model_flowlines')
    past_model = FluxBasedModel(fls,
                                mb_model=past_climate,
                                glen_a=cfg.A,
                                y0=t0)
    a, b = past_model.run_until_and_store(te)
    (b.length_m.rolling(time=36, center=True).mean() - b.length_m[-1]).plot(
        ax=ax, linewidth=3, label='default initial state')
    plt.legend(loc='best', fontsize=30)
    if gdir.name != "":
        ax.set_title(gdir.rgi_id + ': ' + gdir.name, fontsize=30)
    else:
        ax.set_title(gdir.rgi_id, fontsize=30)

    ax.set_xlabel('Time', fontsize=35)
    ax.legend(loc='best', fontsize=30)
    plt.xlim((t0, te))
    plt.tick_params(axis='both', which='major', labelsize=35)
    ax.set_ylabel('Glacier Length Change (m) ', fontsize=35)
    plt.savefig(os.path.join(plot_dir,
                             'lengths_change_' + str(gdir.rgi_id) + '.png'),
                dpi=300)

    return
def find_initial_state(gdir):

    global past_climate
    global y_1900, y_1850
    global y_start
    global x
    global ax1, ax2, ax3

    fls = gdir.read_pickle('model_flowlines')
    past_climate = PastMassBalance(gdir)
    random_climate = RandomMassBalance(gdir)
    commit_model = FluxBasedModel(fls,
                                  mb_model=random_climate,
                                  glen_a=cfg.A,
                                  y0=1850)
    commit_model.run_until_equilibrium()
    y_1850 = copy.deepcopy(commit_model)
    commit_model = FluxBasedModel(commit_model.fls,
                                  mb_model=past_climate,
                                  glen_a=cfg.A,
                                  y0=1850)

    commit_model.run_until(2000)
    y_1900 = copy.deepcopy(commit_model)
    x = np.arange(
        y_1900.fls[-1].nx) * y_1900.fls[-1].dx * y_1900.fls[-1].map_dx

    plt.figure(figsize=(20, 10))
    fig, ax1 = plt.subplots()
    ax2 = fig.add_axes([0.55, 0.66, 0.3, 0.2])
    ax1.set_title(gdir.rgi_id)

    box = ax1.get_position()
    ax1.set_position([box.x0, box.y0, box.width * 0.95, box.height])

    # Put a legend to the right of the current axis

    #plt.setp(ax1.get_xticklabels(), visible=False)
    #plt.plot(x, y_1850.fls[-1].surface_h, 'k:', label='solution')
    #plt.plot(x, y_1850.fls[-1].bed_h, 'k', label='bed')
    #plt.legend(loc='best')

    #ax2 = plt.subplot(412, sharex=ax1)
    #plt.setp(ax2.get_xticklabels(), visible=False)
    '''
    ax3 = plt.subplot(413,sharex=ax1)
    ax3.plot(x, np.zeros(len(x)), 'k--')

    ax4 = plt.subplot(414, sharex=ax1)
    ax4.plot(x, np.zeros(len(x)), 'k--')
    '''

    growing_model = FluxBasedModel(fls,
                                   mb_model=past_climate,
                                   glen_a=cfg.A,
                                   y0=1850)

    y_start = copy.deepcopy(growing_model)

    colors = [
        pylab.cm.Blues(np.linspace(0.3, 1, 2)),
        pylab.cm.Reds(np.linspace(0.3, 1, 2)),
        pylab.cm.Greens(np.linspace(0.3, 1, 2))
    ]

    #for i in [0,0.2,0.4,0.6,0.8,1,5,10,15,20,25,30,35,40,45,50]:
    j = 0

    for i in [1]:
        k = 0
        col = colors[j]
        j = j + 1
        for t in [20, 40, 60, 80, 100, 120, 140, 150]:
            res = minimize(objfunc, [0],
                           args=(
                               gdir,
                               y_1900.fls,
                               t,
                               i,
                           ),
                           method='COBYLA',
                           tol=1e-04,
                           options={
                               'maxiter': 500,
                               'rhobeg': 2
                           })
            try:
                result_model_1850, result_model_1900 = run_model(
                    res.x, gdir, y_1900.fls, t, i)

                f = np.sum(abs(result_model_1900.fls[-1].surface_h-y_1900.fls[-1].surface_h) ** 2) + \
                    np.sum(abs(y_1900.fls[-1].widths - result_model_1900.fls[-1].widths) ** 2)

                dif_s = result_model_1900.fls[-1].surface_h - y_1900.fls[
                    -1].surface_h
                dif_w = result_model_1900.fls[-1].widths - y_1900.fls[-1].widths
                #if np.max(dif_s)<40 and np.max(dif_w)<15:
                ax1.plot(x,
                         result_model_1850.fls[-1].surface_h,
                         alpha=0.8,
                         color=col[k],
                         label='t=' + str(t))
                ax2.plot(x,
                         result_model_1900.fls[-1].surface_h,
                         alpha=0.8,
                         color=col[k])

            except:
                pass
            k = k + 1

    ax1.plot(x, y_1850.fls[-1].surface_h,
             'k:')  #, label='surface elevation (not known)')
    ax1.plot(x, y_1850.fls[-1].bed_h, 'k')  #, label='bed topography')
    ax2.plot(x,
             y_1900.fls[-1].surface_h,
             'k',
             label='surface elevation (observed)')
    ax2.plot(x, y_1900.fls[-1].bed_h, 'k', label='bed')
    #ax3.plot(x,np.zeros(len(x)),'k:')
    ax1.annotate('t = 1850',
                 xy=(0.1, 0.95),
                 xycoords='axes fraction',
                 fontsize=13)
    ax2.annotate('t = 2000',
                 xy=(0.1, 0.9),
                 xycoords='axes fraction',
                 fontsize=9)
    ax1.set_xlabel('Distance along the Flowline (m)')
    ax1.set_ylabel('Altitude (m)')

    ax2.set_xlabel('Distance along the Flowline (m)')
    ax2.set_ylabel('Altitude (m)')

    ax1.legend(loc='center left', bbox_to_anchor=(1, 0.5))
    ax2.legend(loc='best')
    plot_dir = os.path.join(cfg.PATHS['working_dir'], 'plots')
    if not os.path.exists(plot_dir):
        os.makedirs(plot_dir)
    plt.savefig(os.path.join(plot_dir, gdir.rgi_id + '.png'))
    plt.show()
def find_initial_state(gdir):

    global past_climate
    global y_1900
    global y_start

    fls = gdir.read_pickle('model_flowlines')
    past_climate = PastMassBalance(gdir)
    commit_model = FluxBasedModel(fls,
                                  mb_model=past_climate,
                                  glen_a=cfg.A,
                                  y0=1850)
    y_1850 = copy.deepcopy(commit_model)
    commit_model.run_until(1900)
    y_1900 = copy.deepcopy(commit_model)
    x = np.arange(
        y_1900.fls[-1].nx) * y_1900.fls[-1].dx * y_1900.fls[-1].map_dx

    plt.figure()

    ax1 = plt.subplot(311)
    ax1.set_title(gdir.rgi_id)
    plt.setp(ax1.get_xticklabels(), visible=False)
    plt.plot(x, y_1850.fls[-1].surface_h, 'k:', label='solution')
    plt.plot(x, y_1850.fls[-1].bed_h, 'k', label='bed')
    plt.legend(loc='best')

    ax2 = plt.subplot(312, sharex=ax1)
    plt.setp(ax2.get_xticklabels(), visible=False)
    ax2.plot(x, y_1900.fls[-1].surface_h, 'k:', label='solution')
    ax2.plot(x, y_1900.fls[-1].bed_h, 'k', label='bed')

    ax3 = plt.subplot(313, sharex=ax1)
    ax3.plot(x, np.zeros(len(x)), 'k--')

    growing_climate = LinearMassBalance(past_climate.get_ela(1850), 3)

    growing_model = FluxBasedModel(fls,
                                   mb_model=growing_climate,
                                   glen_a=cfg.A,
                                   y0=1850)
    #growing_model.fls[-1].surface_h=growing_model.fls[-1].bed_h
    succes = 0

    y_start = copy.deepcopy(growing_model)
    y_start.run_until(1950)

    res = minimize(objfunc,
                   0.5,
                   args=(
                       gdir,
                       past_climate.get_ela(1850),
                   ),
                   method='COBYLA',
                   tol=1e-04,
                   options={
                       'maxiter': 500,
                       'rhobeg': 5
                   })
    #print(res)

    result_model_1850, result_model_1900 = run_model(
        res.x, gdir, past_climate.get_ela(1850))
    dif = result_model_1900.fls[-1].surface_h - y_1900.fls[-1].surface_h
    s = np.sum(np.abs(dif))
    print(gdir.rgi_id, s)
    if s < 25:
        #print(gdir.rgi_id, i)
        succes += 1
        ax1.plot(x, result_model_1850.fls[-1].surface_h, label='optimum')
        ax2.plot(x, result_model_1900.fls[-1].surface_h, label='optimum')
        ax3.plot(x, dif)
    '''
    #ax[0].plot(x, y_start.fls[-1].surface_h, label=y_start.yr)
    ax[0].plot(x,result_model_1850.fls[-1].surface_h, label = 'optimum')
    ax[1].plot(x, result_model_1900.fls[-1].surface_h,
               label='optimum')

    ax[0].plot(x, y_1850.fls[-1].surface_h,':', label=y_1850.yr)
    ax[0].plot(x, y_1850.fls[-1].bed_h, 'k--')
    ax[0].legend(loc='best')
    ax[0].set_title(gdir.rgi_id)

    ax[1].plot(x,y_1900.fls[-1].surface_h,':',label = y_1900.yr)
    ax[1].plot(x, y_1900.fls[-1].bed_h, 'k--')
    ax[1].legend(loc='best')
    '''
    ax1.legend(loc='best')
    if succes > 0:
        plot_dir = os.path.join(cfg.PATHS['working_dir'], 'plots', 'surface_h')
        if not os.path.exists(plot_dir):
            os.makedirs(plot_dir)
        plt.savefig(os.path.join(plot_dir, gdir.rgi_id + '.png'))
        plt.show()
    return True
Esempio n. 15
0
# Read the rgi file
rgidf = gpd.read_file(path.join(WORKING_DIR, 'mb_ref_glaciers.shp'))

# Go - initialize working directories
gdirs = workflow.init_glacier_regions(rgidf)

# Cross-validation
file = path.join(cfg.PATHS['working_dir'], 'crossval_tstars.csv')
cvdf = pd.read_csv(file, index_col=0)
for gd in gdirs:
    t_cvdf = cvdf.loc[gd.rgi_id]
    heights, widths = gd.get_inversion_flowline_hw()
    # Mass-balance model with cross-validated parameters instead
    mb_mod = PastMassBalance(gd,
                             mu_star=t_cvdf.cv_mustar,
                             bias=t_cvdf.cv_bias,
                             prcp_fac=t_cvdf.cv_prcp_fac)
    # Mass-blaance timeseries, observed and simulated
    refmb = gd.get_ref_mb_data().copy()
    refmb['OGGM'] = mb_mod.get_specific_mb(heights, widths, year=refmb.index)
    # Compare their standard deviation
    std_ref = refmb.ANNUAL_BALANCE.std()
    rcor = np.corrcoef(refmb.OGGM, refmb.ANNUAL_BALANCE)[0, 1]
    if std_ref == 0:
        # I think that such a thing happens with some geodetic values
        std_ref = refmb.OGGM.std()
        rcor = 1
    # Store the scores
    cvdf.loc[gd.rgi_id,
             'CV_MB_BIAS'] = (refmb.OGGM.mean() - refmb.ANNUAL_BALANCE.mean())
    cvdf.loc[gd.rgi_id, 'CV_MB_SIGMA_BIAS'] = (refmb.OGGM.std() / std_ref)
Esempio n. 16
0
def plot_objective_surface(gdir, plot_dir, i, best=True):

    #plot_dir = os.path.join(plot_dir, 'surface')
    if not os.path.exists(plot_dir):
        os.makedirs(plot_dir)
    reconstruction = gdir.read_pickle('reconstruction_output')
    experiment = gdir.read_pickle('synthetic_experiment')
    fls = gdir.read_pickle('model_flowlines')

    surface_t0 = pd.DataFrame()
    surface_t = pd.DataFrame()
    widths_t0 = pd.DataFrame()
    widths_t = pd.DataFrame()

    analysis = pd.DataFrame()

    plt.figure(figsize=(25, 15))
    if gdir.name != "":
        plt.title(gdir.rgi_id + ': ' + gdir.name, fontsize=30)
    else:
        plt.title(gdir.rgi_id, fontsize=25)

    x = np.arange(experiment['y_t'].fls[i].nx) * \
        experiment['y_t'].fls[i].dx * experiment['y_t'].fls[-1].map_dx

    plt.annotate(r'$t =  2000$',
                 xy=(0.1, 0.95),
                 xycoords='axes fraction',
                 fontsize=30)

    for rec in reconstruction:

        if rec[0] != None:
            # surface
            surface_t0 = surface_t0.append([rec[0].fls[i].surface_h],
                                           ignore_index=True)
            surface_t = surface_t.append([rec[1].fls[i].surface_h],
                                         ignore_index=True)
            widths_t0 = widths_t0.append([rec[0].fls[i].widths],
                                         ignore_index=True)
            widths_t = widths_t.append([rec[1].fls[i].widths],
                                       ignore_index=True)

            plt.plot(x,
                     rec[1].fls[i].surface_h -
                     experiment['y_t'].fls[-1].surface_h,
                     color='grey',
                     alpha=0.3)

    if best:
        # calculate objective
        diff_s = surface_t.subtract(experiment['y_t'].fls[-1].surface_h,
                                    axis=1)**2
        diff_w = widths_t.subtract(experiment['y_t'].fls[-1].widths, axis=1)**2
        objective = diff_s.sum(axis=1) + diff_w.sum(axis=1)
        min_id = objective.argmin(axis=0)
        plt.plot(x,
                 surface_t.iloc[min_id].values -
                 experiment['y_t'].fls[-1].surface_h,
                 'r',
                 linewidth=3,
                 label='best objective')

    # plot median
    fls[-1].surface_h = surface_t0.median(axis=0).values
    past_climate = PastMassBalance(gdir)
    model = FluxBasedModel(fls, mb_model=past_climate, y0=1865)
    model.run_until(2000)
    plt.plot(x,
             model.fls[-1].surface_h - experiment['y_t'].fls[-1].surface_h,
             linewidth=3,
             label='median')
    '''
    ax1.plot(x, surface_t0.median(axis=0), linewidth=2)
    ax1.fill_between(x, surface_t0.quantile(q=0.75, axis=0).values,
                     surface_t0.quantile(q=0.25, axis=0).values,
                     alpha=0.5,label='IQR'+r'$\left(\widehat{x}_t^j\right)$')
    ax1.fill_between(x, surface_t0.min(axis=0).values,
                     surface_t0.max(axis=0).values, alpha=0.2, color='grey',
                     label='range'+r'$\left(\widehat{x}_t^j\right)$')

    ax2.plot(x, surface_t.median(axis=0),linewidth=2)
    ax2.fill_between(x, surface_t.quantile(q=0.75, axis=0).values,
                     surface_t.quantile(q=0.25, axis=0).values,
                     alpha=0.5)
    ax2.fill_between(x, surface_t.min(axis=0).values,
                     surface_t.max(axis=0).values, alpha=0.2,color='grey')
    '''
    plt.legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=15)
    plt.xlabel('Distance along the Flowline (m)', fontsize=30)
    plt.ylabel('Difference in Surface Elevation (m)', fontsize=30)

    plt.tick_params(axis='both', which='major', labelsize=25)
    plt.savefig(os.path.join(plot_dir, 'diff_s_HEF.png'))
    plt.show()
    #plt.close()

    return
Esempio n. 17
0
def minor_xval_statistics(gdirs):
    # initialize the pandas dataframes

    # to store mass balances of every glacier
    mbdf = pd.DataFrame([], index=np.arange(1850, 2050))

    # Cross-validation
    file = os.path.join(cfg.PATHS['working_dir'], 'crossval_tstars.csv')
    cvdf = pd.read_csv(file, index_col=0)

    # dataframe output
    xval = pd.DataFrame([],
                        columns=[
                            'RGIId', 'Name', 'tstar_bias', 'xval_bias',
                            'interp_bias', 'mustar', 'tstar', 'xval_mustar',
                            'xval_tstar', 'interp_mustar'
                        ])

    for gd in gdirs:
        t_cvdf = cvdf.loc[gd.rgi_id]
        heights, widths = gd.get_inversion_flowline_hw()

        # Observed mass-blance
        refmb = gd.get_ref_mb_data().copy()

        # Mass-balance model with cross-validated parameters instead
        mb_mod = PastMassBalance(gd,
                                 mu_star=t_cvdf.cv_mustar,
                                 bias=t_cvdf.cv_bias,
                                 prcp_fac=t_cvdf.cv_prcp_fac)
        refmb['OGGM_cv'] = mb_mod.get_specific_mb(heights,
                                                  widths,
                                                  year=refmb.index)
        # Compare their standard deviation
        std_ref = refmb.ANNUAL_BALANCE.std()
        rcor = np.corrcoef(refmb.OGGM_cv, refmb.ANNUAL_BALANCE)[0, 1]
        if std_ref == 0:
            # I think that such a thing happens with some geodetic values
            std_ref = refmb.OGGM_cv.std()
            rcor = 1
        # Store the scores
        cvdf.loc[gd.rgi_id, 'CV_MB_BIAS'] = (refmb.OGGM_cv.mean() -
                                             refmb.ANNUAL_BALANCE.mean())
        cvdf.loc[gd.rgi_id,
                 'CV_MB_SIGMA_BIAS'] = (refmb.OGGM_cv.std() / std_ref)
        cvdf.loc[gd.rgi_id, 'CV_MB_COR'] = rcor

        # Mass-balance model with interpolated mu_star
        mb_mod = PastMassBalance(gd,
                                 mu_star=t_cvdf.interp_mustar,
                                 bias=t_cvdf.cv_bias,
                                 prcp_fac=t_cvdf.cv_prcp_fac)
        refmb['OGGM_mu_interp'] = mb_mod.get_specific_mb(heights,
                                                         widths,
                                                         year=refmb.index)
        cvdf.loc[gd.rgi_id, 'INTERP_MB_BIAS'] = (refmb.OGGM_mu_interp.mean() -
                                                 refmb.ANNUAL_BALANCE.mean())

        # Mass-balance model with best guess tstar
        mb_mod = PastMassBalance(gd,
                                 mu_star=t_cvdf.mustar,
                                 bias=t_cvdf.bias,
                                 prcp_fac=t_cvdf.prcp_fac)
        refmb['OGGM_tstar'] = mb_mod.get_specific_mb(heights,
                                                     widths,
                                                     year=refmb.index)
        cvdf.loc[gd.rgi_id, 'tstar_MB_BIAS'] = (refmb.OGGM_tstar.mean() -
                                                refmb.ANNUAL_BALANCE.mean())

        # Pandas DataFrame Output
        #
        # 1. statistics
        tbias = cvdf.loc[gd.rgi_id, 'tstar_MB_BIAS']
        xbias = cvdf.loc[gd.rgi_id, 'CV_MB_BIAS']
        ibias = cvdf.loc[gd.rgi_id, 'INTERP_MB_BIAS']
        xval = xval.append(
            {
                'Name': gd.name,
                'RGIId': gd.rgi_id,
                'tstar_bias': tbias,
                'xval_bias': xbias,
                'interp_bias': ibias,
                'mustar': t_cvdf.mustar,
                'tstar': t_cvdf.tstar,
                'xval_mustar': t_cvdf.cv_mustar,
                'xval_tstar': t_cvdf.cv_tstar,
                'interp_mustar': t_cvdf.interp_mustar
            },
            ignore_index=True)

        #
        # 2. mass balance timeseries
        mbarray = np.dstack(
            (refmb.ANNUAL_BALANCE, refmb.OGGM_tstar, refmb.OGGM_cv)).squeeze()

        mbdf_add = pd.DataFrame(
            mbarray,
            columns=[[gd.rgi_id, gd.rgi_id, gd.rgi_id],
                     ['measured', 'calibrated', 'crossvalidated']],
            index=refmb.index)
        mbdf = pd.concat([mbdf, mbdf_add], axis=1)

    mbdf.columns = pd.MultiIndex.from_tuples(mbdf.columns)

    mbdf = mbdf.dropna(how='all')

    xval.index = xval.RGIId

    return xval, mbdf
Esempio n. 18
0
execute_entity_task(tasks.apparent_mb, gdirs)
# Recompute after the first round - this is being picky but this is
# Because geometries may change after apparent_mb's filtering
tasks.compute_ref_t_stars(gdirs)
tasks.distribute_t_stars(gdirs)
execute_entity_task(tasks.apparent_mb, gdirs)

# Model validation
tasks.quick_crossval_t_stars(gdirs)  # for later
tasks.distribute_t_stars(gdirs)  # To restore after cross-val

# Tests: for all glaciers, the mass-balance around tstar and the
# bias with observation should be approx 0
from oggm.core.massbalance import (ConstantMassBalance, PastMassBalance)
for gd in gdirs:
    heights, widths = gd.get_inversion_flowline_hw()

    mb_mod = ConstantMassBalance(gd, bias=0)  # bias=0 because of calib!
    mb = mb_mod.get_specific_mb(heights, widths)
    np.testing.assert_allclose(mb, 0, atol=10)  # numerical errors

    mb_mod = PastMassBalance(gd)  # Here we need the computed bias
    refmb = gd.get_ref_mb_data().copy()
    refmb['OGGM'] = mb_mod.get_specific_mb(heights, widths, year=refmb.index)
    np.testing.assert_allclose(refmb.OGGM.mean(),
                               refmb.ANNUAL_BALANCE.mean(),
                               atol=10)

# Log
log.info('Calibration is done!')
Esempio n. 19
0
def quick_crossval(gdirs, xval, major=0):
    # following climate.quick_crossval_t_stars
    # but minimized for performance

    full_ref_df = pd.read_csv(os.path.join(cfg.PATHS['working_dir'],
                                           'ref_tstars.csv'),
                              index_col=0)

    tmpdf = pd.DataFrame(
        [], columns=['std_oggm', 'std_ref', 'rmse', 'core', 'bias'])

    for i, rid in enumerate(full_ref_df.index):

        # the glacier to look at
        gdir = [g for g in gdirs if g.rgi_id == rid][0]

        # the reference glaciers
        tmp_ref_df = full_ref_df.loc[full_ref_df.index != rid]

        # select reference glacier directories
        # Only necessary if tasks.compute_ref_t_stars is uncommented below
        # ref_gdirs = [g for g in gdirs if g.rgi_id != rid]

        # before the cross-val store the info about "real" mustar
        rdf = pd.read_csv(gdir.get_filepath('local_mustar'))
        full_ref_df.loc[rid, 'mustar'] = rdf['mu_star'].values[0]

        # redistribute t_star
        with utils.DisableLogger():
            # compute_ref_t_stars should be done again for
            # every crossvalidation step
            # This will/might have an influence if one of the 10 surrounding
            # glaciers of the current glacier has more than one t_star
            # If so, the currently crossvalidated glacier was probably
            # used to select one t_star for this surrounding glacier.
            #
            # But: compute_ref_t_stars is very time consuming. And the
            # influence is probably very small. Also only 40 out of the 253
            # reference glaciers do have more than one possible t_star.
            #
            # tasks.compute_ref_t_stars(ref_gdirs)
            tasks.distribute_t_stars([gdir], ref_df=tmp_ref_df)

        # read crossvalidated values
        rdf = pd.read_csv(gdir.get_filepath('local_mustar'))

        # ----
        # --- MASS-BALANCE MODEL
        heights, widths = gdir.get_inversion_flowline_hw()
        mb_mod = PastMassBalance(gdir,
                                 mu_star=rdf['mu_star'].values[0],
                                 bias=rdf['bias'].values[0],
                                 prcp_fac=rdf['prcp_fac'].values[0])

        # Mass-blaance timeseries, observed and simulated
        refmb = gdir.get_ref_mb_data().copy()
        refmb['OGGM'] = mb_mod.get_specific_mb(heights,
                                               widths,
                                               year=refmb.index)

        # store single glacier results
        bias = refmb.OGGM.mean() - refmb.ANNUAL_BALANCE.mean()
        rmse = np.sqrt(np.mean(refmb.OGGM - refmb.ANNUAL_BALANCE)**2)
        rcor = np.corrcoef(refmb.OGGM, refmb.ANNUAL_BALANCE)[0, 1]

        ref_std = refmb.ANNUAL_BALANCE.std()

        # unclear how to treat this best
        if ref_std == 0:
            ref_std = refmb.OGGM.std()
            rcor = 1

        tmpdf.loc[len(tmpdf.index)] = {
            'std_oggm': refmb.OGGM.std(),
            'std_ref': ref_std,
            'bias': bias,
            'rmse': rmse,
            'core': rcor
        }

        if not major:
            # store cross validated values
            full_ref_df.loc[rid, 'cv_tstar'] = int(rdf['t_star'].values[0])
            full_ref_df.loc[rid, 'cv_mustar'] = rdf['mu_star'].values[0]
            full_ref_df.loc[rid, 'cv_bias'] = rdf['bias'].values[0]
            full_ref_df.loc[rid, 'cv_prcp_fac'] = rdf['prcp_fac'].values[0]

    # and store mean values
    std_quot = np.mean(tmpdf.std_oggm / tmpdf.std_ref)

    xval.loc[len(xval.index)] = {
        'prcpsf': cfg.PARAMS['prcp_scaling_factor'],
        'tliq': cfg.PARAMS['temp_all_liq'],
        'tmelt': cfg.PARAMS['temp_melt'],
        'tgrad': cfg.PARAMS['temp_default_gradient'],
        'std_quot': std_quot,
        'bias': tmpdf['bias'].mean(),
        'rmse': tmpdf['rmse'].mean(),
        'core': tmpdf['core'].mean()
    }

    if major:
        return xval
    else:
        for i, rid in enumerate(full_ref_df.index):
            # the glacier to look at
            gdir = full_ref_df.loc[full_ref_df.index == rid]
            # the reference glaciers
            tmp_ref_df = full_ref_df.loc[full_ref_df.index != rid]

            # Compute the distance
            distances = utils.haversine(gdir.lon.values[0], gdir.lat.values[0],
                                        tmp_ref_df.lon, tmp_ref_df.lat)

            # Take the 10 closests
            aso = np.argsort(distances)[0:9]
            amin = tmp_ref_df.iloc[aso]
            distances = distances[aso]**2
            interp = np.average(amin.mustar, weights=1. / distances)
            full_ref_df.loc[rid, 'interp_mustar'] = interp
        # write
        file = os.path.join(cfg.PATHS['working_dir'], 'crossval_tstars.csv')
        full_ref_df.to_csv(file)
        # alternative: do not write csv file, but store the needed values
        # within xval_minor_statistics

        return xval
def find_initial_state(gdir):

    global past_climate
    global y_1900
    global y_start

    f, ax = plt.subplots(2, sharex=True)

    fls = gdir.read_pickle('model_flowlines')
    past_climate = PastMassBalance(gdir)
    commit_model = FluxBasedModel(fls,
                                  mb_model=past_climate,
                                  glen_a=cfg.A,
                                  y0=1850)
    y_1850 = copy.deepcopy(commit_model)
    commit_model.run_until(1900)
    y_1900 = copy.deepcopy(commit_model)

    x = np.arange(
        y_1900.fls[-1].nx) * y_1900.fls[-1].dx * y_1900.fls[-1].map_dx
    surface_h = y_1900.fls[-1].bed_h
    x0 = surface_h[np.linspace(0, y_1900.fls[-1].nx - 1, 15).astype(int)]

    cons = ({
        'type': 'ineq',
        'fun': con1,
        'args': (gdir, )
    }, {
        'type': 'ineq',
        'fun': con2,
        'args': (gdir, )
    }, {
        'type': 'ineq',
        'fun': con3,
        'args': (gdir, )
    }, {
        'type': 'ineq',
        'fun': con4,
        'args': (gdir, )
    }, {
        'type': 'ineq',
        'fun': con5,
        'args': (gdir, )
    })

    results = [parallel(x, x0, cons, gdir) for x in range(75, 225, 25)]
    #output = [p.get() for p in results]
    output = results
    pickle.dump(
        output,
        open(
            '/home/juliaeis/PycharmProjects/find_inital_state/test_HEF/solution.pkl',
            'wb'))
    for index, shape in enumerate(output):
        try:
            model, end_model = run_model(shape, gdir)
            ax[0].plot(x, model.fls[-1].surface_h, alpha=0.5)
            ax[1].plot(x, end_model.fls[-1].surface_h, alpha=0.5)
        except:
            pass
    ax[0].plot(x, y_1900.fls[-1].bed_h, 'k', label='bed')
    ax[0].plot(x, y_1850.fls[-1].surface_h, 'k:', label='solution')
    ax[0].set_ylabel('Altitude (m)')
    ax[0].set_xlabel('Distance along the flowline (m)')
    ax[0].set_title('1850')
    ax[0].legend(loc='best')

    ax[1].plot(x, y_1900.fls[-1].bed_h, 'k', label='bed')
    ax[1].plot(x, y_1900.fls[-1].surface_h, 'k:', label='solution')
    ax[1].legend(loc='best')
    ax[1].set_ylabel('Altitude (m)')
    ax[1].set_xlabel('Distance along the flowline (m)')
    ax[1].set_title('1900')
    plot_dir = os.path.join(cfg.PATHS['working_dir'], 'plots')
    if not os.path.isdir(plot_dir):
        os.makedirs(plot_dir)
    plt.savefig(os.path.join(plot_dir, gdir.rgi_id + '.png'))
    plt.show()
    print(gdir.rgi_id, 'finished')
Esempio n. 21
0
    def __init__(self,
                 gdir,
                 magicc_ts=None,
                 dt_per_dt=1,
                 dp_per_dt=0,
                 mu_star=None,
                 bias=None,
                 y0=None,
                 halfsize=15,
                 filename='climate_historical',
                 input_filesuffix='',
                 **kwargs):
        """Initialize

        Parameters
        ----------
        gdir : GlacierDirectory
            the glacier directory
        magicc_ts : pd.Series
            the GMT time series
        mu_star : float, optional
            set to the alternative value of mu* you want to use
            (the default is to use the calibrated value)
        bias : float, optional
            set to the alternative value of the annual bias [mm we yr-1]
            you want to use (the default is to use the calibrated value)
        y0 : int, optional, default: tstar
            the year at the center of the period of interest. The default
            is to use tstar as center.
        dt_per_dt : float, optional, default 1
            the local climate change signal, in units of °C per °C
        halfsize : int, optional
            the half-size of the time window (window size = 2 * halfsize + 1)
        filename : str, optional
            set to a different BASENAME if you want to use alternative climate
            data.
        input_filesuffix : str
            the file suffix of the input climate file
        """

        if magicc_ts is None:
            raise InvalidParamsError('Need a magicc ts!')

        super(MagiccMassBalance, self).__init__()
        self.mbmod = MagiccConstantMassBalance(
            gdir,
            mu_star=mu_star,
            bias=bias,
            y0=y0,
            halfsize=halfsize,
            filename=filename,
            input_filesuffix=input_filesuffix,
            **kwargs)

        self.valid_bounds = self.mbmod.valid_bounds
        self.hemisphere = gdir.hemisphere

        # Set ys and ye
        self.ys = int(magicc_ts.index[0])
        self.ye = int(magicc_ts.index[-1])

        # Correct for dp_per_dt signal
        if len(np.atleast_1d(dp_per_dt)) == 12:
            ref_t = magicc_ts.loc[y0 - halfsize:y0 + halfsize].mean()
            prcp_ts = (magicc_ts - ref_t).values[:, np.newaxis] * dp_per_dt
            prcp_ts = pd.DataFrame(data=prcp_ts,
                                   index=magicc_ts.index,
                                   columns=np.arange(1, 13))
        else:
            ref_t = magicc_ts.loc[y0 - halfsize:y0 + halfsize].mean()
            prcp_ts = (magicc_ts - ref_t) * dp_per_dt

        # We correct the original factor - don't forget to also scale the diff
        self.prcp_fac_ts = self.mbmod.prcp_fac + self.mbmod.prcp_fac * prcp_ts

        # Correct for dt_per_dt signal
        if len(np.atleast_1d(dt_per_dt)) == 12:
            magicc_ts = pd.DataFrame(data=magicc_ts.values[:, np.newaxis] *
                                     dt_per_dt,
                                     index=magicc_ts.index,
                                     columns=np.arange(1, 13))
        else:
            magicc_ts = magicc_ts * dt_per_dt

        years = magicc_ts.loc[y0 - halfsize:y0 + halfsize].index.values

        # OK now check the bias to apply based on y0 and halfsize
        fls = gdir.read_pickle('model_flowlines')
        mb_ref = PastMassBalance(gdir)
        mb_ref = mb_ref.get_specific_mb(fls=fls, year=years).mean()

        def to_minimize(temp_bias):
            self.temp_bias_ts = magicc_ts - temp_bias
            mb_mine = self.get_specific_mb(fls=fls, year=years).mean()
            return mb_mine - mb_ref

        temp_bias = optimize.brentq(to_minimize, -10, 10, xtol=1e-5)
        self.temp_bias_ts = magicc_ts - temp_bias