示例#1
0
def plot_iterative_behaviour(gdir,
                             subdir,
                             figsize=(4.5, 3),
                             file_extension='png',
                             reset=False):
    fig = plt.figure(figsize=figsize)
    case = gdir.case
    ref_surf = salem.GeoTiff(gdir.get_filepath('ref_dem')).get_vardata()
    inv_settings = load_pickle(
        get_subdir_filepath(gdir, subdir, 'inversion_settings'))
    noise = 0.
    if os.path.exists(get_subdir_filepath(gdir, subdir, 'dem_noise')):
        noise = np.load(get_subdir_filepath(gdir, subdir, 'dem_noise'))
    noisy_ref_surf = ref_surf + noise
    ref_ice_mask = np.load(gdir.get_filepath('ref_ice_mask'))
    ref_inner_mask = compute_inner_mask(ref_ice_mask, full_array=True)

    dl = load_pickle(get_subdir_filepath(gdir, subdir, 'data_logger'))

    reg_parameters = inv_settings['reg_parameters']
    interesting_costs = []
    cost_names = []
    for l, lamb in enumerate(reg_parameters):
        if lamb != 0:
            interesting_costs.append(l)
            cost_names.append('J{:d}'.format(l))

    interesting_costs.append(-1)
    cost_names.append('surf_misfit')
    # make sure all directories exist:
    plot_dir = os.path.join(gdir.dir, subdir, 'plot')
    if reset:
        if os.path.exists(plot_dir):
            shutil.rmtree(plot_dir)
    #if not os.path.exists(plot_dir):
    os.makedirs(plot_dir)
    os.makedirs(os.path.join(plot_dir, 'bed_error'))
    os.makedirs(os.path.join(plot_dir, 'surf_error'))
    os.makedirs(os.path.join(plot_dir, 'summed_cost'))
    os.makedirs(os.path.join(plot_dir, 'gradient'))
    for c_name in cost_names:
        os.makedirs(os.path.join(plot_dir, c_name))
    dl.plot_rmses(plot_dir)
    dl.plot_c_terms(plot_dir)

    for i in dl.step_indices:
        plot_iterative_step(dl,
                            i,
                            interesting_costs,
                            cost_names,
                            plot_dir,
                            case,
                            ref_ice_mask,
                            ref_inner_mask,
                            noisy_ref_surf,
                            reg_parameters,
                            file_extension,
                            existing_fig=fig)

    plt.close(fig)
示例#2
0
def eval_identical_twin(idir):
    header = 'case,run,icevolerr,rmsebed,rmsesurf,biasbed,biassurf,' \
             'corr,rmsefg,biasfg,iterations,' \
             'maxbeddiffglacier,maxbeddiffdomain,' \
             'minbeddiffglacier,minbeddiffdomain,' \
             'voloutsidebounds\n'
    row = '{case:s},{run:s},{dV:.2f},{rmsebed:.1f},{rmsesurf:.1f},' \
          '{biasbed:.1f},{biassurf:.1f},{corr:.3f},{rmsefg:.1f},' \
          '{biasfg:.1f},{iterations:d},' \
          '{maxbeddiffglacier:.1f},{maxbeddiffdomain:.1f},' \
          '{minbeddiffglacier:.1f},{minbeddiffdomain:.1f},' \
          '{voloutsidebounds:.9f}'
    # TODO: max_bed_diff?
    dl = load_pickle(idir.get_subdir_filepath('data_logger'))
    vals = {}
    vals['case'] = dl.case.name
    vals['run'] = idir.inv_settings['inversion_subdir']
    ref_it = np.load(idir.gdir.get_filepath('ref_ice_thickness'))
    mod_it = (dl.surfs[-1] - dl.beds[-1])
    ref_vol = ref_it.sum()
    mod_vol = mod_it.sum()
    vals['dV'] = (mod_vol - ref_vol) / ref_vol * 1e2
    ref_ice_mask = np.load(idir.gdir.get_filepath('ref_ice_mask'))
    vals['rmsebed'] = RMSE(dl.true_bed, dl.beds[-1], ref_ice_mask)
    vals['rmsesurf'] = RMSE(dl.ref_surf, dl.surfs[-1], ref_ice_mask)
    vals['rmsefg'] = RMSE(dl.true_bed, dl.first_guessed_bed, ref_ice_mask)

    vals['biasbed'] = mean_BIAS(dl.beds[-1], dl.true_bed, ref_ice_mask)
    vals['biassurf'] = mean_BIAS(dl.surfs[-1], dl.ref_surf, ref_ice_mask)
    vals['biasfg'] = mean_BIAS(dl.first_guessed_bed, dl.true_bed,
                               ref_ice_mask)

    masked_true_it = np.ma.masked_array(dl.ref_surf - dl.true_bed,
                                         mask=np.logical_not(ref_ice_mask))
    masked_mod_it = np.ma.masked_array(dl.surfs[-1] - dl.beds[-1],
                                         mask=np.logical_not(ref_ice_mask))
    # TODO: ice thickness
    vals['corr'] = np.ma.corrcoef(masked_true_it.flatten(),
                                  masked_mod_it.flatten())[0, 1]
    vals['iterations'] = len(dl.step_indices)
    #vals['maxbeddiff'] = np.max((dl.beds[-1] - dl.true_bed) * ref_ice_mask)
    vals['maxbeddiffglacier'] = np.max((dl.beds[-1] - dl.true_bed) *
                                         ref_ice_mask)
    vals['maxbeddiffdomain'] = np.max(dl.beds[-1] - dl.true_bed)
    #vals['minbeddiff'] = np.min((dl.beds[-1] - dl.true_bed) * ref_ice_mask)
    vals['minbeddiffglacier'] = np.min((dl.beds[-1] - dl.true_bed) *
                                        ref_ice_mask)
    vals['minbeddiffdomain'] = np.min(dl.beds[-1] - dl.true_bed)
    vals['voloutsidebounds'] = np.sum((dl.surfs[-1] - dl.beds[-1])
                                      * (1 - ref_ice_mask)) * 1e-9
    data_row = row.format(**vals)

    with open(idir.get_subdir_filepath('results'), 'w') as f:
        f.writelines([header, data_row])

    return [header, data_row]
cfg.initialize()

output_dir = '/home/philipp/HR_02/plots/bed_diff'
basedir = '/home/philipp/HR_02/'
file_extension = 'pdf'

figsize = (4.5, 3)

for case in [test_cases.Trikora]:  #, test_cases.Borden]:
    filepaths = glob.glob(
        os.path.join(basedir, '{:s}/*/data_logger.pkl'.format(case.name)))
    filepaths = sorted(filepaths)
    for path in filepaths:
        idir, temp = os.path.split(path)
        gdir, exp = os.path.split(idir)
        dl = load_pickle(path)
        exp_name = experiment_naming_engine.get_experiment_name2(exp)
        if exp_name is not None and len(dl.step_indices) > 0:
            ice_mask = np.load(os.path.join(gdir, 'ref_ice_mask.npy'))
            bed_measurements = None
            if exp_name.startswith('bed measurements'):
                bed_measurements = np.load(
                    os.path.join(idir, 'bed_measurements.pkl'))

            diff_first_guess = dl.first_guessed_bed - dl.true_bed
            diff_optimized = dl.beds[-1] - dl.true_bed
            cbar_min = min(diff_first_guess.min(), diff_optimized.min())
            cbar_max = max(diff_first_guess.max(), diff_optimized.max())
            cbar_min_max = max(abs(cbar_min), abs(cbar_max))
            norm = MidpointNormalize(midpoint=0.,
                                     vmin=-cbar_min_max,
示例#4
0
columns = [
    'experiment', 'experimentgroup', 'experimentsubgroup', 'subgroupindex',
    'optimizedbed', 'optimizedsurf', 'optimizedicethick', 'firstguess', 'beds',
    'surfs', 'costs', 'cterms', 'optimizedbederror', 'optimizedsurferror',
    'optimizedbedrmse', 'optimizedsurfrmse', 'optimizedbedbias',
    'optimizedsurfbias', 'firstguessrmse', 'firstguessbias',
    'firstguess_5_percentile', 'firstguess_25_percentile',
    'firstguess_75_percentile', 'firstguess_95_percentile', 'surfacenoise',
    'surfacenoisermse', 'surfacenoisebias', 'surfacenoise_5_percentile',
    'surfacenoise_25_percentile', 'surfacenoise_75_percentile',
    'surfacenoise_95_percentile', 'bedmeasurements', 'bedmeasurementsrmse',
    'bedmeasurementsbias', 'iterations', 'R', 'dV', 'warning', 'dir_path'
]
df = pd.DataFrame(columns=columns)
for path in filepaths:
    dl = load_pickle(path)
    inv_subdir = os.path.split(path)[0]
    inv_settings = load_pickle(
        os.path.join(gdir.dir, inv_subdir, 'inversion_settings.pkl'))
    experiment = inv_settings['inversion_subdir']
    surface_noise = np.zeros(true_bed.shape)
    if os.path.exists(os.path.join(gdir.dir, inv_subdir, 'dem_noise.npy')):
        surface_noise = np.load(
            os.path.join(gdir.dir, inv_subdir, 'dem_noise.npy'))
    bed_measurements = np.ma.masked_all(true_bed.shape)
    if os.path.exists(
            os.path.join(gdir.dir, inv_subdir, 'bed_measurements.pkl')):
        bed_measurements = np.load(
            os.path.join(gdir.dir, inv_subdir, 'bed_measurements.pkl'))
    warning_found = False
    # first_guessed_bed_noise = np.load(os.path.join(gdir.dir, inv_subdir,
示例#5
0
    def run_minimize(self):
        """
        Here the actual minimization of the cost_function is done via
        scipy.optimize.minimize.
        First, data from the glacier directory is read and optionally a
        DataLogger is created. The inversion settings used for this
        particular inversion are saved in this subdirectory. Bounds for the
        minimization are derived. Then the cost function is created and the
        minimization of this cost function started. In the end, the result is
        written to disk and optionally, further information is written to disk.

        The whole process is dominated by the set inversion settings

        Returns
        -------
        Result of minimization as scipy.optimize.minimize returns (res.x
        gives flattened ndarray with bed, needs to be reshaped)

        """

        # Copy first_guessed_bed to inversion directory
        if self.inv_settings['log_minimize_steps']:
            # TODO: really useful? -> respect reset argument in gdir?
            self.clear_dir(self.get_current_basedir())

        with rasterio.open(self.gdir.get_filepath('first_guessed_bed')) as src:
            profile = src.profile
            data = src.read(1)
        with rasterio.open(self.get_subdir_filepath('first_guessed_bed'), 'w',
                           **profile) as dst:
            dst.write(data, 1)
        if os.path.exists(self.gdir.get_filepath('first_guessed_bed_noise')):
            shutil.copy(self.gdir.get_filepath('first_guessed_bed_noise'),
                        self.get_subdir_filepath('first_guessed_bed_noise'))

        write_pickle(self.inv_settings,
                     self.get_subdir_filepath('inversion_settings'))
        # Write out reg_parameters to check easier later on
        self.write_string_to_file(self.get_subdir_filepath('reg_parameters'),
                                  str(self.inv_settings['reg_parameters']))
        self.inv_settings = load_pickle(
            self.get_subdir_filepath('inversion_settings'))
        self._read_all_data()
        self.minimize_log = ''
        self.data_logger = None
        callback = None

        if self.inv_settings['log_minimize_steps']:
            dl = DataLogger(self)
            self.data_logger = dl
            callback = self.iteration_info_callback

        # ----------------------------------------------------------------------
        # Core: things are happening here:
        bounds = self.get_bounds()

        self.cost_func = create_cost_func(self.gdir, self.data_logger,
                                          self.surf_noise,
                                          self.bed_measurements)
        res = None
        try:
            res = minimize(fun=self.cost_func,
                           x0=self.first_guessed_bed.astype(
                               np.float64).flatten(),
                           method=self.inv_settings['solver'],
                           jac=True,
                           bounds=bounds,
                           options=self.inv_settings['minimize_options'],
                           callback=callback)

            inverted_bed = res.x.reshape(self.first_guessed_bed.shape)
            # ----------------------------------------------------------------------

            profile['dtype'] = 'float64'
            with rasterio.open(self.get_subdir_filepath('inverted_bed'), 'w',
                               **profile) as dst:
                dst.write(inverted_bed, 1)

        except MemoryError as me:
            self.write_string_to_file(
                os.path.join(self.get_current_basedir(), 'warning.txt'),
                'Error during iteration: ' + str(me))

        if self.inv_settings['log_minimize_steps']:
            self.write_string_to_file('log.txt', self.minimize_log)
            dir = self.get_current_basedir()
            dl.filter_data_from_optimization()  # Optional, if we want to
            data_logging.write_pickle(dl,
                                      self.get_subdir_filepath('data_logger'))
            #dl.plot_all(dir)
            #plt.close('all')

        return res
示例#6
0
from combine.core.inversion import InversionDirectory
from combine.core.data_logging import load_pickle

from oggm import cfg
cfg.initialize()

basedir = '/data/philipp/thesis_test/Giluwe/perfect'

case = test_cases.Giluwe
gdir = NonRGIGlacierDirectory(case, basedir)
idir = InversionDirectory(gdir)
idir.inv_settings['inversion_counter'] = 1001

true_bed = salem.GeoTiff(gdir.get_filepath('dem')).get_vardata()

data_logger = load_pickle(idir.get_subdir_filepath('data_logger'))

plt.figure()
plt.imshow(data_logger.beds[-1] - true_bed)

subsampled_beds = data_logger.beds[-3:]

for i in [3, 4, 5, 6, 7, 8, 9, 10]:
    yfrom = 6
    yend = 8
    y = i
    xfrom = 0
    xend = -1

    plt.figure()
    for bed in subsampled_beds:
示例#7
0
 def inversion_settings(self):
     """Dictionary with settings for the inversion"""
     return load_pickle(self.get_filepath('inversion_settings'))
示例#8
0
spinup_surf = salem.GeoTiff(gdir.get_filepath('spinup_dem')).get_vardata()
reference_surf = salem.GeoTiff(gdir.get_filepath('ref_dem')).get_vardata()
bed_2d = salem.GeoTiff(gdir.get_filepath('dem')).get_vardata()

ref_ice_mask = np.load(gdir.get_filepath('ref_ice_mask'))
ref_it = np.load(gdir.get_filepath('ref_ice_thickness'))
spinup_it = np.load(gdir.get_filepath('spinup_ice_thickness'))
masked_ice_thick_end = np.ma.masked_array(ref_it,
                                          mask=np.logical_not(ref_ice_mask))
# masked_ice_thick_start = np.ma.masked_array(spinup_it,
#                                            mask=np.logical_not(ref_ice_mask))
masked_reference_surf = np.ma.masked_array(reference_surf,
                                           mask=np.logical_not(ref_ice_mask))

dl = load_pickle(os.path.join(gdir.dir, experiment, 'data_logger.pkl'))
inversion_settings = load_pickle(
    os.path.join(gdir.dir, experiment, 'inversion_settings.pkl'))
reg_parameters = inversion_settings['reg_parameters']

bed_differences = dl.get_bed_differences()
surf_differences = dl.get_surf_differences()
bed_rmses = [RMSE(bd, 0, ref_ice_mask) for bd in bed_differences]
surf_rmses = [RMSE(sd, 0, ref_ice_mask) for sd in surf_differences]
costs = dl.costs
c_terms = np.array(dl.c_terms).T
all_costs = []
cost_labels = []
i = 0

iteration_index = np.arange(1, len(bed_rmses) + 1)
示例#9
0
figsize = (4.5, 3)

cbar_min = -37.5
cbar_max = 37.5
cbar_min_max = max(abs(cbar_min), abs(cbar_max))
cbar_steps = 16

for case in [test_cases.Giluwe, test_cases.Borden]:
    filepaths = glob.glob(
        os.path.join(basedir, '{:s}/*/dem_noise.npy'.format(case.name)))
    filepaths = sorted(filepaths)
    for path in filepaths:
        idir, temp = os.path.split(path)
        gdir, exp = os.path.split(idir)
        dl = load_pickle(os.path.join(idir, 'data_logger.pkl'))
        exp_name = experiment_naming_engine.get_experiment_name2(exp)
        dem_noise = np.load(path)
        if exp_name is not None and len(dl.step_indices) > 0:
            print(exp_name + ' ' + case.name)
            print(dem_noise.min())
            print(dem_noise.max())

            ice_mask = np.load(os.path.join(gdir, 'ref_ice_mask.npy'))

            norm = MidpointNormalize(midpoint=0.,
                                     vmin=-cbar_min_max,
                                     vmax=cbar_min_max)
            # my_cmap = sns.diverging_palette(240, 15, l=40, s=99, as_cmap=True)
            my_cmap = plt.get_cmap('PuOr_r')
            plotpath = os.path.join(