Beispiel #1
0
    def _read_all_data(self):
        """
        Reads all necessary information from files in gdir for
        minimization/optimization and logging.
        """
        self.true_bed = salem.GeoTiff(
            self.gdir.get_filepath('dem')).get_vardata()
        self.ref_surf = salem.GeoTiff(
            self.gdir.get_filepath('ref_dem')).get_vardata()
        self.first_guessed_bed = salem.GeoTiff(
            self.get_subdir_filepath('first_guessed_bed')).get_vardata()
        self.ice_mask = np.load(self.gdir.get_filepath('ref_ice_mask'))
        if os.path.exists(self.gdir.get_filepath('dem_noise')):  #TODO: once
            # surface noise is present, it cant get rid off ...
            shutil.copy(self.gdir.get_filepath('dem_noise'),
                        self.get_subdir_filepath('dem_noise'))
            self.surf_noise = np.load(self.get_subdir_filepath('dem_noise'))
        else:
            self.surf_noise = None

        if os.path.exists(self.gdir.get_filepath('bed_measurements')):
            shutil.copy(self.gdir.get_filepath('bed_measurements'),
                        self.get_subdir_filepath('bed_measurements'))
            self.bed_measurements = np.load(
                self.get_subdir_filepath('bed_measurements'))
        else:
            self.bed_measurements = None
Beispiel #2
0
def create_glacier(gdir, run_spinup=True):
    """
    Creates a DEM-file for a glacier surface by running a forward model
    for spin-up to a first state and based on this state further on to a
    next state

    Parameters
    ----------
    gdir: NonRGIGlacierDirectory
        GlacierDirectory possibly containing spinup-state and used for
        saving the final reference state
    run_spinup: bool
        whether to run spin-up or rely on existing state

    Returns
    -------

    """
    inv_settings = gdir.inversion_settings
    if run_spinup:
        spinup(gdir,
               inv_settings['case'],
               inv_settings['yrs_spinup'],
               mb=inv_settings['mb_spinup'])

    spinup_it = np.load(gdir.get_filepath('spinup_ice_thickness'))
    spinup_surf = salem.GeoTiff(gdir.get_filepath('spinup_dem')).get_vardata()

    with rasterio.open(gdir.get_filepath('dem')) as src:
        bed = src.read(1)
        profile = src.profile

    ref_surf = run_forward(gdir,
                           inv_settings['case'],
                           inv_settings['yrs_forward_run'],
                           bed,
                           mb=inv_settings['mb_forward_run'],
                           init_ice_thick=spinup_it)

    profile['dtype'] = 'float32'
    with rasterio.open(gdir.get_filepath('ref_dem'), 'w', **profile) as dst:
        dst.write(ref_surf, 1)

    ref_surf = salem.GeoTiff(gdir.get_filepath('ref_dem')).get_vardata()
    ref_it = ref_surf - bed
    ref_ice_mask = ref_it > 0
    np.save(gdir.get_filepath('ref_ice_thickness'), ref_it)
    np.save(gdir.get_filepath('ref_ice_mask'), ref_ice_mask)
Beispiel #3
0
def plot_iterative_behaviour(gdir,
                             subdir,
                             figsize=(4.5, 3),
                             file_extension='png',
                             reset=False):
    fig = plt.figure(figsize=figsize)
    case = gdir.case
    ref_surf = salem.GeoTiff(gdir.get_filepath('ref_dem')).get_vardata()
    inv_settings = load_pickle(
        get_subdir_filepath(gdir, subdir, 'inversion_settings'))
    noise = 0.
    if os.path.exists(get_subdir_filepath(gdir, subdir, 'dem_noise')):
        noise = np.load(get_subdir_filepath(gdir, subdir, 'dem_noise'))
    noisy_ref_surf = ref_surf + noise
    ref_ice_mask = np.load(gdir.get_filepath('ref_ice_mask'))
    ref_inner_mask = compute_inner_mask(ref_ice_mask, full_array=True)

    dl = load_pickle(get_subdir_filepath(gdir, subdir, 'data_logger'))

    reg_parameters = inv_settings['reg_parameters']
    interesting_costs = []
    cost_names = []
    for l, lamb in enumerate(reg_parameters):
        if lamb != 0:
            interesting_costs.append(l)
            cost_names.append('J{:d}'.format(l))

    interesting_costs.append(-1)
    cost_names.append('surf_misfit')
    # make sure all directories exist:
    plot_dir = os.path.join(gdir.dir, subdir, 'plot')
    if reset:
        if os.path.exists(plot_dir):
            shutil.rmtree(plot_dir)
    #if not os.path.exists(plot_dir):
    os.makedirs(plot_dir)
    os.makedirs(os.path.join(plot_dir, 'bed_error'))
    os.makedirs(os.path.join(plot_dir, 'surf_error'))
    os.makedirs(os.path.join(plot_dir, 'summed_cost'))
    os.makedirs(os.path.join(plot_dir, 'gradient'))
    for c_name in cost_names:
        os.makedirs(os.path.join(plot_dir, c_name))
    dl.plot_rmses(plot_dir)
    dl.plot_c_terms(plot_dir)

    for i in dl.step_indices:
        plot_iterative_step(dl,
                            i,
                            interesting_costs,
                            cost_names,
                            plot_dir,
                            case,
                            ref_ice_mask,
                            ref_inner_mask,
                            noisy_ref_surf,
                            reg_parameters,
                            file_extension,
                            existing_fig=fig)

    plt.close(fig)
Beispiel #4
0
def add_consensus_thickness(gdir, base_url=None):
    """Add the consensus thickness estimate to the gridded_data file.

    varname: consensus_ice_thickness

    Parameters
    ----------
    gdir ::py:class:`oggm.GlacierDirectory`
        the glacier directory to process
    base_url : str
        where to find the thickness data. Default is
        https://cluster.klima.uni-bremen.de/~fmaussion/icevol/composite
    """

    if base_url is None:
        base_url = default_base_url
    if not base_url.endswith('/'):
        base_url += '/'

    rgi_str = gdir.rgi_id
    rgi_reg_str = rgi_str[:8]

    url = base_url + rgi_reg_str + '/' + rgi_str + '_thickness.tif'
    input_file = utils.file_downloader(url)

    dsb = salem.GeoTiff(input_file)
    thick = utils.clip_min(dsb.get_vardata(), 0)
    in_volume = thick.sum() * dsb.grid.dx**2
    thick = gdir.grid.map_gridded_data(thick, dsb.grid, interp='linear')

    # Correct for volume
    thick = utils.clip_min(thick.filled(0), 0)
    out_volume = thick.sum() * gdir.grid.dx**2
    if out_volume > 0:
        thick *= in_volume / out_volume

    # We mask zero ice as nodata
    thick = np.where(thick == 0, np.NaN, thick)

    # Write
    with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc:

        vn = 'consensus_ice_thickness'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', (
                'y',
                'x',
            ), zlib=True)
        v.units = 'm'
        ln = 'Ice thickness from the consensus estimate'
        v.long_name = ln
        v.base_url = base_url
        v[:] = thick
Beispiel #5
0
def region_grid(reg):

    global region_grids

    if reg not in region_grids:
        with utils.get_lock():
            fp = utils.file_downloader(region_files[reg]['vx'])
            ds = salem.GeoTiff(fp)
            region_grids[reg] = ds.grid

    return region_grids[reg]
def generate_bed_measurements(gdir, bed_measurements_mask, std=0):
    true_bed = salem.GeoTiff(gdir.get_filepath('dem')).get_vardata()
    noise = std * np.random.randn(*true_bed.shape)
    bed_measurements = (true_bed + noise) * bed_measurements_mask
    bed_measurements[np.logical_not(bed_measurements_mask)] = -np.inf
    bed_measurements = np.ma.masked_array(
        bed_measurements, mask=np.logical_not(bed_measurements_mask))
    print('Actual RMSE of bed measurements: {:g}'.format(
        RMSE(bed_measurements, true_bed)))
    # TODO: apply std scaling after masking ...?
    # TODO: investigate deviations of RMSE from numpys std
    return bed_measurements
def add_noise_to_first_guess(gdir, noise, cut_noise=True, min_ice_thick=5):
    """
    Adds noise to the first guess. Saves this to the file with the first guess
    and also saves the applied noise.

    Parameters
    ----------
    gdir: NonRGIGlacierDirectory
        GlacierDirectory containing the first guess
    noise: ndarray
        noise to apply to the first guess
    cut_noise: bool
        whether or not the noise should be cut to not penetrate the surface
        and require a minimum ice thickness if applied to first guess
    min_ice_thick: float
        minimum ice thickness, only applied if noise is cut
    """

    fg_filepath = gdir.get_filepath('first_guessed_bed')

    with rasterio.open(fg_filepath) as src:
        first_guessed_bed = src.read(1)
        profile = src.profile

    if cut_noise:
        ref_ice_mask = np.load(gdir.get_filepath('ref_ice_mask'))
        desired_rmse = RMSE(noise, 0, ref_ice_mask)
        ref_surf = salem.GeoTiff(gdir.get_filepath('ref_dem')).get_vardata()

        penetrating = (first_guessed_bed + noise - min_ice_thick > ref_surf)
        penetrating *= ref_ice_mask

        noise = np.where(penetrating,
                         ref_surf - first_guessed_bed - min_ice_thick, noise)
        # TODO: will result in problems -> iteratively?
        rmse = RMSE(noise, 0, ref_ice_mask)
        print('desired rmse: {:g}\\rmse after cutting: {:g}'.format(
            desired_rmse, rmse))
        # noise *= desired_rmse / rmse  # rescale to desired RMSE
        # if np.any(first_guessed_bed + noise > ref_surf):
        #    raise ValueError('First guess is found to penetrate ice surface; '
        #                     'Aborting')

    first_guessed_bed = first_guessed_bed + noise

    profile['dtype'] = 'float64'
    with rasterio.open(fg_filepath, 'w', **profile) as dst:
        dst.write(first_guessed_bed, 1)

    np.save(gdir.get_filepath('first_guessed_bed_noise'), noise)
Beispiel #8
0
def define_g2ti_glacier(path=None, base_dir=None):

    fname = os.path.join(path, 'outlines.shp')
    ent = gpd.read_file(fname)
    rid = ent.RGIId.values[0]
    if '5a' in rid:
        rid = rid.replace('5a', '60')

    ent['RGIId'] = rid
    ent['Name'] = '' if ent['Name'][0] == 'None' else ent['Name']
    gdir = utils.GlacierDirectory(ent.iloc[0], base_dir=base_dir)
    ent.to_file(gdir.get_filepath('outlines'))

    proj_out = salem.check_crs(ent.crs)

    # Also transform the intersects if necessary
    gdf = cfg.PARAMS['intersects_gdf']
    if len(gdf) > 0:
        gdf = gdf.loc[((gdf.RGIId_1 == gdir.rgi_id) |
                       (gdf.RGIId_2 == gdir.rgi_id))]
        if len(gdf) > 0:
            gdf = salem.transform_geopandas(gdf, to_crs=proj_out)
            if hasattr(gdf.crs, 'srs'):
                # salem uses pyproj
                gdf.crs = gdf.crs.srs
            gdf.to_file(gdir.get_filepath('intersects'))
    else:
        # Sanity check
        if cfg.PARAMS['use_intersects']:
            raise RuntimeError('You seem to have forgotten to set the '
                               'intersects file for this run. OGGM works '
                               'better with such a file. If you know what '
                               'your are doing, set '
                               "cfg.PARAMS['use_intersects'] = False to "
                               "suppress this error.")

    # Topo
    shutil.copy(os.path.join(path, 'dem.tif'), gdir.get_filepath('dem'))
    mpath = gdir.get_filepath('dem').replace('dem', 'g2ti_mask')
    shutil.copy(os.path.join(path, 'mask.tif'), mpath)

    # Grid
    ds = salem.GeoTiff(gdir.get_filepath('dem'))
    ds.grid.to_json(gdir.get_filepath('glacier_grid'))
    gdir.write_pickle(['G2TI'], 'dem_source')

    return gdir
Beispiel #9
0
def oggm_to_g2ti(gdir, dirname='final'):
    """From an oggm gdir to a g2ti tiff

    Parameters
    ----------
    """

    # Get the data
    grids_file = gdir.get_filepath('gridded_data')
    with netCDF4.Dataset(grids_file) as nc:
        with warnings.catch_warnings():
            # https://github.com/Unidata/netcdf4-python/issues/766
            warnings.filterwarnings("ignore", category=RuntimeWarning)
            thick = nc.variables['distributed_thickness'][:]

    dx = gdir.grid.dx
    vol = np.nansum(thick * dx**2)

    tpl_f = os.path.join(g2ti.geometry_dir, gdir.rgi_id[:8], gdir.rgi_id,
                         'dem.tif')
    dst = salem.GeoTiff(tpl_f)

    thick[~np.isfinite(thick)] = 0.

    dst_thick = dst.grid.map_gridded_data(thick,
                                          grid=gdir.grid,
                                          interp='spline')
    dst_thick[~np.isfinite(dst_thick)] = 0.

    # Conserve volume
    dx = dst.grid.dx
    dst_thick *= vol / np.nansum(dst_thick * dx**2)
    if not np.isclose(vol / np.nansum(dst_thick * dx**2), 1, atol=0.2):
        raise RuntimeError('Something went wrong in reproj.')

    with rasterio.open(tpl_f) as orig:
        # Set up profile for writing output
        profile = orig.profile

    ft = os.path.join(cfg.PATHS['working_dir'], dirname,
                      'RGI60-{}'.format(gdir.rgi_region))
    utils.mkdir(ft)
    ft = os.path.join(ft, 'thickness_{}.tif'.format(gdir.rgi_id))

    with rasterio.open(ft, 'w', **profile) as dest:
        dest.write(dst_thick.astype(np.float32).clip(0), 1)
Beispiel #10
0
def plot_domain(gdirs, ax=None, smap=None):
    """Plot the glacier directory."""

    # Files
    gdir = gdirs[0]

    topo = salem.GeoTiff(gdir.get_filepath('dem')).get_vardata()
    try:
        smap.set_data(topo)
    except ValueError:
        pass

    cm = truncate_colormap(ALTITUDE_CMAP, minval=0.25, maxval=1.0, n=256)
    smap.set_cmap(cm)
    smap.set_plot_params(nlevels=256)

    for gdir in gdirs:
        crs = gdir.grid.center_grid

        try:
            geom = gdir.read_pickle('geometries')

            # Plot boundaries
            poly_pix = geom['polygon_pix']
            smap.set_geometry(poly_pix,
                              crs=crs,
                              fc='white',
                              alpha=0.3,
                              zorder=2,
                              linewidth=.2)
            poly_pix = utils.tolist(poly_pix)
            for _poly in poly_pix:
                for l in _poly.interiors:
                    smap.set_geometry(l, crs=crs, color='black', linewidth=0.5)
        except FileNotFoundError:
            smap.set_shapefile(gdir.read_shapefile('outlines'))

    smap.plot(ax)

    return dict(cbar_label='Alt. [m]')
Beispiel #11
0
def compile_biased_first_guess(gdir, desired_mean_bias):
    ref_ice_thickness = np.load(gdir.get_filepath('ref_ice_thickness'))
    ref_ice_mask = np.load(gdir.get_filepath('ref_ice_mask'))
    true_bed = salem.GeoTiff(gdir.get_filepath('dem')).get_vardata()

    with rasterio.open(gdir.get_filepath('ref_dem')) as src:
        surf = src.read(1)
        profile = src.profile

    gamma = desired_mean_bias / (ref_ice_thickness.sum() / ref_ice_mask.sum())

    if gamma > 1:
        raise AttributeError('Given desired mean bias not applicable;'
                             'ice volume exceeded')

    first_guessed_bed = true_bed + gamma * ref_ice_thickness

    with rasterio.open(gdir.get_filepath('first_guessed_bed'), 'w',
                       **profile) as dst:
        dst.write(first_guessed_bed, 1)

    return first_guessed_bed
Beispiel #12
0
def compile_rmsed_first_guess(gdir, desired_rmse):
    ref_ice_thickness = np.load(gdir.get_filepath('ref_ice_thickness'))
    ref_ice_mask = np.load(gdir.get_filepath('ref_ice_mask'))
    true_bed = salem.GeoTiff(gdir.get_filepath('dem')).get_vardata()

    with rasterio.open(gdir.get_filepath('ref_dem')) as src:
        surf = src.read(1)
        profile = src.profile

    std = desired_rmse
    perturbation = std * np.random.randn(*ref_ice_mask.shape) * ref_ice_mask

    if np.any(perturbation > ref_ice_thickness) > 1:
        raise AttributeError('Perturbation peek through surface; Aborting')

    first_guessed_bed = true_bed + perturbation
    first_guessed_bed.dtype = np.float32

    with rasterio.open(gdir.get_filepath('first_guessed_bed'), 'w',
                       **profile) as dst:
        dst.write(first_guessed_bed, 1)

    return first_guessed_bed
Beispiel #13
0
    def test_distribute(self):
        gdir = g2task.define_g2ti_glacier(self.idir, base_dir=self.testdir)

        gis.glacier_masks(gdir)
        g2task.g2ti_masks(gdir)
        out = g2task.distribute_thickness_vas(gdir,
                                              vas_c=0.034,
                                              dis_factor=0.2,
                                              topo_factor=0.2,
                                              write_tiff=True)

        ref = out * np.NaN

        i, j, _, _, thick = g2task.get_ref_gtd_data(gdir)
        ref[j, i] = thick

        out = xr.DataArray(out)
        ref = xr.DataArray(ref)
        ft = os.path.join(cfg.PATHS['working_dir'], 'final',
                          'RGI60-{}'.format(gdir.rgi_region))
        ft = os.path.join(ft, 'thickness_{}.tif'.format(gdir.rgi_id))
        tif = xr.open_rasterio(ft)

        dx2 = salem.GeoTiff(ft).grid.dx**2

        np.testing.assert_allclose((tif * dx2).sum() * 1e-9,
                                   0.034 * gdir.rgi_area_km2**1.375,
                                   rtol=0.01)

        if do_plot:
            import matplotlib.pyplot as plt
            out.plot()
            plt.figure()
            ref.plot()
            plt.figure()
            tif.plot()
            plt.show()
Beispiel #14
0
    def test_mb(self):

        # This is a function to produce the MB function needed by Anna

        # Download the RGI file for the run
        # Make a new dataframe of those
        rgidf = gpd.read_file(get_demo_file('SouthGlacier.shp'))

        # Go - initialize working directories
        gdirs = workflow.init_glacier_regions(rgidf)

        # Preprocessing tasks
        task_list = [
            tasks.glacier_masks,
            tasks.compute_centerlines,
            tasks.initialize_flowlines,
            tasks.catchment_area,
            tasks.catchment_intersections,
            tasks.catchment_width_geom,
            tasks.catchment_width_correction,
        ]
        for task in task_list:
            execute_entity_task(task, gdirs)

        # Climate tasks -- only data IO and tstar interpolation!
        execute_entity_task(tasks.process_cru_data, gdirs)
        tasks.distribute_t_stars(gdirs)
        execute_entity_task(tasks.apparent_mb, gdirs)

        mbref = salem.GeoTiff(get_demo_file('mb_SouthGlacier.tif'))
        demref = salem.GeoTiff(get_demo_file('dem_SouthGlacier.tif'))

        mbref = mbref.get_vardata()
        mbref[mbref == -9999] = np.NaN
        demref = demref.get_vardata()[np.isfinite(mbref)]
        mbref = mbref[np.isfinite(mbref)] * 1000

        # compute the bias to make it 0 SMB on the 2D DEM
        mbmod = ConstantMassBalance(gdirs[0], bias=0)
        mymb = mbmod.get_annual_mb(demref) * cfg.SEC_IN_YEAR * cfg.RHO
        mbmod = ConstantMassBalance(gdirs[0], bias=np.average(mymb))
        mymb = mbmod.get_annual_mb(demref) * cfg.SEC_IN_YEAR * cfg.RHO
        np.testing.assert_allclose(np.average(mymb), 0., atol=1e-3)

        # Same for ref
        mbref = mbref - np.average(mbref)
        np.testing.assert_allclose(np.average(mbref), 0., atol=1e-3)

        # Fit poly
        p = np.polyfit(demref, mbref, deg=2)
        poly = np.poly1d(p)
        myfit = poly(demref)
        np.testing.assert_allclose(np.average(myfit), 0., atol=1e-3)

        if do_plot:
            import matplotlib.pyplot as plt
            plt.scatter(mbref, demref, s=5, label='Obs (2007-2012), shifted to '
                                                   'Avg(SMB) = 0')
            plt.scatter(mymb, demref, s=5, label='OGGM MB at t*')
            plt.scatter(myfit, demref, s=5, label='Polyfit', c='C3')
            plt.xlabel('MB (mm w.e yr-1)')
            plt.ylabel('Altidude (m)')
            plt.legend()
            plt.show()
                              bounds_min_max=(2, 1000)
                              )

# Optional, if not reset=True and already ran once
# only needed once:
#create_glacier(gdir)
fg = compile_first_guess(gdir)

#idir = InversionDirectory(gdir)

mb = gdir.inversion_settings['mb_forward_run']
if mb is None:
    mb = case.get_mb_model()

guessed_bed = torch.tensor(fg, dtype=torch.float, requires_grad=False)
spinup_surf = salem.GeoTiff(gdir.get_filepath('spinup_dem')).get_vardata()
ref_surf = salem.GeoTiff(gdir.get_filepath('ref_dem')).get_vardata()
ice_mask = np.load(gdir.get_filepath('ref_ice_mask'))
# run model forward
init_ice_thick = spinup_surf - guessed_bed

model_surf = run_forward_core(gdir.inversion_settings['yrs_forward_run'],
                              guessed_bed, case.dx, case.get_mb_model(),
                              init_ice_thick).detach().numpy()
print('Bias: ' + str(np.sum(model_surf - ref_surf)/ice_mask.sum()))
plt.imshow(model_surf - ref_surf)
plt.show()
#dl = data_logging.load_pickle(idir.get_current_basedir() + '/data_logger.pkl')

bed = salem.GeoTiff(gdir.get_filepath('dem')).get_vardata()
print('Bias: ' + str(np.sum(fg - bed)/ice_mask.sum()))
Beispiel #16
0
def _reproject_and_scale(gdir, do_error=False):
    """Reproject and scale itslive data, avoid code duplication for error"""


    reg = find_region(gdir)
    if reg is None:
        raise InvalidWorkflowError('There does not seem to be its_live data '
                                   'available for this glacier')

    vnx = 'vx'
    vny = 'vy'
    if do_error:
        vnx += '_err'
        vny += '_err'

    with utils.get_lock():
        fx = utils.file_downloader(region_files[reg][vnx])
        fy = utils.file_downloader(region_files[reg][vny])

    # Open the files
    dsx = salem.GeoTiff(fx)
    dsy = salem.GeoTiff(fy)
    # subset them to our map
    grid_gla = gdir.grid.center_grid
    proj_vel = dsx.grid.proj
    x0, x1, y0, y1 = grid_gla.extent_in_crs(proj_vel)
    dsx.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4)
    dsy.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4)
    grid_vel = dsx.grid.center_grid

    # TODO: this should be taken care of by salem
    # https://github.com/fmaussion/salem/issues/171
    with rasterio.Env():
        with rasterio.open(fx) as src:
            nodata = getattr(src, 'nodata', -32767.0)

    # Error files are wrong
    if nodata == 0:
        nodata = -32767.0

    # Get the coords at t0
    xx0, yy0 = grid_vel.center_grid.xy_coordinates

    # Compute coords at t1
    xx1 = dsx.get_vardata()
    yy1 = dsy.get_vardata()
    non_valid = (xx1 == nodata) | (yy1 == nodata)
    xx1[non_valid] = np.NaN
    yy1[non_valid] = np.NaN
    orig_vel = np.sqrt(xx1**2 + yy1**2)
    xx1 += xx0
    yy1 += yy0

    # Transform both to glacier proj
    xx0, yy0 = salem.transform_proj(proj_vel, grid_gla.proj, xx0, yy0)
    xx1, yy1 = salem.transform_proj(proj_vel, grid_gla.proj, xx1, yy1)

    # Correct no data after proj as well (inf)
    xx1[non_valid] = np.NaN
    yy1[non_valid] = np.NaN

    # Compute velocities from there
    vx = xx1 - xx0
    vy = yy1 - yy0

    # Scale back velocities - https://github.com/OGGM/oggm/issues/1014
    new_vel = np.sqrt(vx**2 + vy**2)
    p_ok = new_vel > 1e-5  # avoid div by zero
    vx[p_ok] = vx[p_ok] * orig_vel[p_ok] / new_vel[p_ok]
    vy[p_ok] = vy[p_ok] * orig_vel[p_ok] / new_vel[p_ok]

    # And transform to local map
    vx = grid_gla.map_gridded_data(vx, grid=grid_vel, interp='linear')
    vy = grid_gla.map_gridded_data(vy, grid=grid_vel, interp='linear')

    # Write
    with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc:
        vn = 'obs_icevel_x'
        if do_error:
            vn = vn.replace('obs', 'err')
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True)
        v.units = 'm yr-1'
        ln = 'ITS LIVE velocity data in x map direction'
        if do_error:
            ln = 'Uncertainty of ' + ln
        v.long_name = ln
        v[:] = vx.filled(np.nan)

        vn = 'obs_icevel_y'
        if do_error:
            vn = vn.replace('obs', 'err')
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', ('y', 'x', ), zlib=True)
        v.units = 'm yr-1'
        ln = 'ITS LIVE velocity data in y map direction'
        if do_error:
            ln = 'Uncertainty of ' + ln
        v.long_name = ln
        v[:] = vy.filled(np.nan)
from combine2d.core.arithmetics import compute_inner_mask

cfg.initialize()

basedir = '/home/philipp/HR_01/'
file_extension = 'png'
case = test_cases.BordenHR
dx = case.dx
gdir = NonRGIGlacierDirectory(case, basedir)
experiment = 'identical-twin 0'
output_dir = os.path.join(gdir.dir, experiment, 'plots')

figsize = (4.5, 3)

dl = load_pickle(os.path.join(gdir.dir, experiment, 'data_logger.pkl'))
ref_surf = salem.GeoTiff(gdir.get_filepath('ref_dem')).get_vardata()
ref_ice_mask = np.load(gdir.get_filepath('ref_ice_mask'))
#true_bed = salem.GeoTiff(gdir.get_filepath('dem')).get_vardata()

ref_inner_mask = compute_inner_mask(ref_ice_mask, full_array=True)
#plt.figure()
#plt.imshow(ref_ice_mask)
#plt.imshow(ref_inner_mask, cmap='RdBu')
#plt.show()
inversion_settings = load_pickle(
    os.path.join(gdir.dir, experiment, 'inversion_settings.pkl'))
reg_parameters = inversion_settings['reg_parameters']
margin = np.logical_xor(ref_ice_mask, ref_inner_mask)


def get_costs_arr(reg_parameters, ref_surf, ref_ice_mask, ref_inner_mask,
Beispiel #18
0
def create_cost_func(gdir,
                     data_logger=None,
                     surface_noise=None,
                     bed_measurements=None):
    """
    Creates a cost function based on the glacier directory.

    Parameters
    ----------
    gdir: NonRGIGlacierDirectory
        GlacierDirectory containing precomputed spinup surface and
        "observed" surface for final state. Further on contains inversion
        settings
    data_logger: DataLogger
        optionally logs data

    Returns
    -------
    tuple of (cost, grad) with cost as float and grad being a ndarray
    with same shape as b
    """

    # precompute known data to avoid recomputation during each call of
    # cost_fucntion
    conv_filter = torch.ones((1, 1, 3, 3), requires_grad=False)
    # TODO: think about whether cross is better suited (in forward model no diagonal transport
    #conv_filter = torch.tensor([[[[0, 1, 0], [1, 1, 1], [0, 1, 0]]]],
    #                           dtype=torch.float, requires_grad=True)
    spinup_surf = salem.GeoTiff(gdir.get_filepath('spinup_dem')).get_vardata()
    ref_surf = salem.GeoTiff(gdir.get_filepath('ref_dem')).get_vardata()
    if surface_noise is not None:
        spinup_surf += surface_noise
        ref_surf += surface_noise
        # TODO: allow for independent surface perturbations

    gpr = None
    if bed_measurements is not None:
        # PyTorch is a bit messy with masks.
        # Instead we use full tensors and multiply by a mask.
        gpr_data = torch.tensor(np.ma.filled(bed_measurements, -9999),
                                dtype=torch.float,
                                requires_grad=False)
        gpr_mask = torch.tensor(1 - bed_measurements.mask,
                                dtype=torch.float,
                                requires_grad=False)
        gpr = (gpr_data, gpr_mask)

    spinup_surf = torch.tensor(spinup_surf,
                               dtype=torch.float,
                               requires_grad=False)
    ref_surf = torch.tensor(ref_surf, dtype=torch.float, requires_grad=False)
    ref_ice_mask = np.load(gdir.get_filepath('ref_ice_mask'))
    ref_ice_mask = torch.tensor(ref_ice_mask.astype(np.int),
                                dtype=torch.float,
                                requires_grad=False)
    ref_inner_mask = torch.zeros(ref_ice_mask.shape)
    ref_inner_mask[1:-1, 1:-1] = torch.conv2d(
        ref_ice_mask.unsqueeze(0).unsqueeze(0), conv_filter) == 9

    inv_settings = gdir.inversion_settings
    reg_parameters = inv_settings['reg_parameters']
    lambs1 = np.ones(300) * 0.25
    lambs1[:120] = np.logspace(-4, -1, 120)
    yrs_to_run = inv_settings['yrs_forward_run']
    case = inv_settings['case']
    mb = inv_settings['mb_forward_run']
    if mb is None:
        mb = case.get_mb_model()

    def c_fun(b):
        """
        Wrapper for cost_function. First step for easy exchangeability
        afterwards and to get a cost_function signature exhibiting all true
        input arguments.

        Parameters
        ----------
        b: ndarray
            bed heights for which the costs should be calculated. (unit: [m])

        Returns
        -------
        tuple of (cost, grad) with cost as float and grad being a ndarray
        with same shape as b
        """
        return cost_function(b, reg_parameters, ref_surf, ref_ice_mask,
                             ref_inner_mask, spinup_surf, conv_filter,
                             yrs_to_run, case.dx, mb, gpr, data_logger, lambs1)

    return c_fun
Beispiel #19
0
def velocity_to_gdir(gdir):
    """Reproject the its_live files to the given glacier directory.

    Variables are added to the gridded_data nc file.

    Reprojecting velocities from one map proj to another is done
    reprojecting the vector distances. In this process, absolute velocities
    might change as well because map projections do not always preserve
    distances -> we scale them back to the original velocities as per the
    ITS_LIVE documentation that states that velocities are given in
    ground units, i.e. absolute velocities.

    We use bilinear interpolation to reproject the velocities to the local
    glacier map.

    Parameters
    ----------
    gdir : :py:class:`oggm.GlacierDirectory`
        where to write the data

    """

    reg = find_region(gdir)
    if reg is None:
        raise InvalidWorkflowError('There does not seem to be its_live data '
                                   'available for this glacier')

    if not gdir.has_file('gridded_data'):
        raise InvalidWorkflowError('Please run `glacier_masks` before running '
                                   'this task')

    with utils.get_lock():
        fx = utils.file_downloader(region_files[reg]['vx'])
        fy = utils.file_downloader(region_files[reg]['vy'])

    # Open the files
    dsx = salem.GeoTiff(fx)
    dsy = salem.GeoTiff(fy)
    # subset them to our map
    grid_gla = gdir.grid.center_grid
    proj_vel = dsx.grid.proj
    x0, x1, y0, y1 = grid_gla.extent_in_crs(proj_vel)
    dsx.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4)
    dsy.set_subset(corners=((x0, y0), (x1, y1)), crs=proj_vel, margin=4)
    grid_vel = dsx.grid.center_grid

    # TODO: this should be taken care of by salem
    # https://github.com/fmaussion/salem/issues/171
    with rasterio.Env():
        with rasterio.open(fx) as src:
            nodata = getattr(src, 'nodata', -32767.0)

    # Get the coords at t0
    xx0, yy0 = grid_vel.center_grid.xy_coordinates

    # Compute coords at t1
    xx1 = dsx.get_vardata()
    yy1 = dsy.get_vardata()
    non_valid = (xx1 == nodata) | (yy1 == nodata)
    xx1[non_valid] = np.NaN
    yy1[non_valid] = np.NaN
    orig_vel = np.sqrt(xx1**2 + yy1**2)
    xx1 += xx0
    yy1 += yy0

    # Transform both to glacier proj
    xx0, yy0 = salem.transform_proj(proj_vel, grid_gla.proj, xx0, yy0)
    xx1, yy1 = salem.transform_proj(proj_vel, grid_gla.proj, xx1, yy1)

    # Correct no data after proj as well (inf)
    xx1[non_valid] = np.NaN
    yy1[non_valid] = np.NaN

    # Compute velocities from there
    vx = xx1 - xx0
    vy = yy1 - yy0

    # Scale back velocities - https://github.com/OGGM/oggm/issues/1014
    new_vel = np.sqrt(vx**2 + vy**2)
    p_ok = new_vel > 0.1  # avoid div by zero
    vx[p_ok] = vx[p_ok] * orig_vel[p_ok] / new_vel[p_ok]
    vy[p_ok] = vy[p_ok] * orig_vel[p_ok] / new_vel[p_ok]

    # And transform to local map
    vx = grid_gla.map_gridded_data(vx, grid=grid_vel, interp='linear')
    vy = grid_gla.map_gridded_data(vy, grid=grid_vel, interp='linear')

    # Write
    with utils.ncDataset(gdir.get_filepath('gridded_data'), 'a') as nc:
        vn = 'obs_icevel_x'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', (
                'y',
                'x',
            ), zlib=True)
        v.units = 'm yr-1'
        v.long_name = 'ITS LIVE velocity data in x map direction'
        v[:] = vx

        vn = 'obs_icevel_y'
        if vn in nc.variables:
            v = nc.variables[vn]
        else:
            v = nc.createVariable(vn, 'f4', (
                'y',
                'x',
            ), zlib=True)
        v.units = 'm yr-1'
        v.long_name = 'ITS LIVE velocity data in xy map direction'
        v[:] = vy
Beispiel #20
0
def create_cost_func(gdir,
                     data_logger=None,
                     use_AD=False,
                     torch_backward_param=None):
    """
    Creates a cost function based on the glacier directory.

    Parameters
    ----------
    gdir: NonRGIGlacierDirectory
        GlacierDirectory containing precomputed spinup surface and
        "observed" surface for final state. Further on contains inversion
        settings
    data_logger: DataLogger
        optionally logs data
    use_AD: bool
        whether or not to use PyTorch AD for gradient calculation.
        Determines whether created cost function gives [cost,] or
        [cost, gradient]
    torch_backward_param: scalar
        Exploit 'special' parameter of torch if not None. Only has effect if
        use_AD = True

    Returns
    -------
    tuple of (cost, grad) with cost as float and grad being a ndarray
    with same shape as b
    """

    # precompute known data to avoid recomputation during each call of
    # cost_fucntion
    conv_filter = torch.ones((1, 1, 3, 3), requires_grad=False)

    spinup_surf = salem.GeoTiff(gdir.get_filepath('spinup_dem')).get_vardata()
    spinup_surf = torch.tensor(spinup_surf,
                               dtype=torch.float,
                               requires_grad=False)
    ref_surf = salem.GeoTiff(gdir.get_filepath('ref_dem')).get_vardata()
    ref_surf = torch.tensor(ref_surf, dtype=torch.float, requires_grad=False)
    ref_ice_mask = np.load(gdir.get_filepath('ref_ice_mask'))
    ref_ice_mask = torch.tensor(ref_ice_mask.astype(np.int),
                                dtype=torch.float,
                                requires_grad=False)
    ref_inner_mask = torch.zeros(ref_ice_mask.shape)
    ref_inner_mask[1:-1, 1:-1] = torch.conv2d(
        ref_ice_mask.unsqueeze(0).unsqueeze(0), conv_filter) == 9

    inv_settings = gdir.inversion_settings
    reg_parameters = inv_settings['reg_parameters']
    yrs_to_run = inv_settings['yrs_forward_run']
    case = inv_settings['case']
    mb = inv_settings['mb_forward_run']
    if mb is None:
        mb = case.get_mb_model()

    def c_fun(b):
        """
        Wrapper for cost_function. First step for easy exchangeability
        afterwards and to get a cost_function signature exhibiting all true
        input arguments.

        Parameters
        ----------
        b: ndarray
            bed heights for which the costs should be calculated. (unit: [m])

        Returns
        -------
        tuple of (cost, grad) with cost as float and grad being a ndarray
        with same shape as b
        """
        return cost_function(b, reg_parameters, ref_surf, ref_ice_mask,
                             ref_inner_mask, spinup_surf, conv_filter,
                             yrs_to_run, case.dx, mb, data_logger, use_AD,
                             torch_backward_param)

    return c_fun
Beispiel #21
0
err_calving_front = []
rel_tol_fls = []
rel_tol_calving_front = []
length_fls = []

files_no_data = []

# We need the components of vel
fx = os.path.join(MAIN_PATH, config['vel_x_path'])
fy = os.path.join(MAIN_PATH, config['vel_y_path'])
# And Error
ex = os.path.join(MAIN_PATH, config['err_x_path'])
ey = os.path.join(MAIN_PATH, config['err_y_path'])

# Open the files with salem
dsx = salem.GeoTiff(fx)
dsy = salem.GeoTiff(fy)

dex = salem.GeoTiff(ex)
dey = salem.GeoTiff(ey)

for gdir in gdirs:

    # first we compute the centerlines as shapefile to crop the satellite
    # data
    misc.write_flowlines_to_shape(gdir, path=gdir.dir)
    shp_path = os.path.join(gdir.dir, 'RGI60-05.shp')
    shp = gpd.read_file(shp_path)

    utils_vel.its_live_to_gdir(gdir, dsx=dsx, dsy=dsy, dex=dex, dey=dey, fx=fx)
Beispiel #22
0
    Returns
    -------

    """

    with rasterio.open(gdir.get_filepath('dem')) as src:
        bed = src.read(1)
        profile = src.profile

    spinup_surf = run_forward(gdir, case, yr_spinup_end, bed, mb=mb)

    profile['dtype'] = 'float32'
    with rasterio.open(gdir.get_filepath('spinup_dem'), 'w', **profile) as dst:
        dst.write(spinup_surf, 1)
    spinup_surf = salem.GeoTiff(gdir.get_filepath('spinup_dem')).get_vardata()
    spinup_it = spinup_surf - bed
    np.save(gdir.get_filepath('spinup_ice_thickness'), spinup_it)

    return spinup_surf, spinup_it


@torch.no_grad()
def run_forward(gdir, case, yrs, bed, mb=None, init_ice_thick=None):
    """
    Wrapper for run_forward_core. Can derive mass-balance from case, if not
    given, accepts strings for bed_files as well as ndarrays and tensors


    Parameters
    ----------
Beispiel #23
0
from combine2d.core import gis, test_cases
from combine2d.core.utils import NonRGIGlacierDirectory
from combine2d.core.inversion import InversionDirectory
from combine2d.core.data_logging import load_pickle

from oggm import cfg
cfg.initialize()

basedir = '/data/philipp/thesis_test/Giluwe/perfect'

case = test_cases.Giluwe
gdir = NonRGIGlacierDirectory(case, basedir)
idir = InversionDirectory(gdir)
idir.inv_settings['inversion_counter'] = 1001

true_bed = salem.GeoTiff(gdir.get_filepath('dem')).get_vardata()

data_logger = load_pickle(idir.get_subdir_filepath('data_logger'))

plt.figure()
plt.imshow(data_logger.beds[-1] - true_bed)

subsampled_beds = data_logger.beds[-3:]

for i in [3, 4, 5, 6, 7, 8, 9, 10]:
    yfrom = 6
    yend = 8
    y = i
    xfrom = 0
    xend = -1