Example #1
0
    def test_flowlines(self):

        hef_file = get_demo_file('Hintereisferner.shp')
        rgidf = gpd.GeoDataFrame.from_file(hef_file)

        # loop because for some reason indexing wont work
        for index, entity in rgidf.iterrows():
            gdir = cfg.GlacierDir(entity, base_dir=self.testdir)
            gis.define_glacier_region(gdir, entity)
            gis.glacier_masks(gdir)
            centerlines.compute_centerlines(gdir)
            geometry.initialize_flowlines(gdir)

        for div_id in gdir.divide_ids:
            cls = gdir.read_pickle('inversion_flowlines', div_id=div_id)
            for cl in cls:
                for j, ip, ob in zip(cl.inflow_indices, cl.inflow_points, cl.inflows):
                    self.assertTrue(cl.line.coords[j] == ip.coords[0])
                    self.assertTrue(ob.flows_to_point.coords[0] == ip.coords[0])
                    self.assertTrue(cl.line.coords[ob.flows_to_indice] == ip.coords[0])

        lens = [len(gdir.read_pickle('centerlines', div_id=i)) for i in [1,2,3]]
        self.assertTrue(sorted(lens) == [1, 1, 3])

        x, y = map(np.array, cls[0].line.xy)
        dis = np.sqrt((x[1:] - x[:-1])**2 + (y[1:] - y[:-1])**2)
        np.testing.assert_allclose(dis*0 + cfg.params['flowline_dx'], dis,
                                   rtol=0.01)
Example #2
0
    def test_catchment_area(self):

        hef_file = get_demo_file('Hintereisferner.shp')
        rgidf = gpd.GeoDataFrame.from_file(hef_file)

        # loop because for some reason indexing wont work
        for index, entity in rgidf.iterrows():
            gdir = cfg.GlacierDir(entity, base_dir=self.testdir)
            gis.define_glacier_region(gdir, entity)
            gis.glacier_masks(gdir)
            centerlines.compute_centerlines(gdir)
            geometry.catchment_area(gdir)

        for div_id in gdir.divide_ids:

            cis = gdir.read_pickle('catchment_indices', div_id=div_id)

            # The catchment area must be as big as expected
            nc = netCDF4.Dataset(gdir.get_filepath('grids', div_id=div_id))
            mask = nc.variables['glacier_mask'][:]
            nc.close()

            mymask_a = mask * 0
            mymask_b = mask * 0
            for i, ci in enumerate(cis):
                mymask_a[tuple(ci.T)] += 1
                mymask_b[tuple(ci.T)] = i+1
            self.assertTrue(np.max(mymask_a) == 1)
            np.testing.assert_allclose(mask, mymask_a)
Example #3
0
    def test_downstream(self):

        hef_file = get_demo_file('Hintereisferner.shp')
        rgidf = gpd.GeoDataFrame.from_file(hef_file)

        # loop because for some reason indexing wont work
        for index, entity in rgidf.iterrows():
            gdir = cfg.GlacierDir(entity, base_dir=self.testdir)
            gis.define_glacier_region(gdir, entity)
            gis.glacier_masks(gdir)
            centerlines.compute_centerlines(gdir)
            centerlines.compute_downstream_lines(gdir)
Example #4
0
    def test_geom_width(self):

        hef_file = get_demo_file('Hintereisferner.shp')
        rgidf = gpd.GeoDataFrame.from_file(hef_file)

        # loop because for some reason indexing wont work
        for index, entity in rgidf.iterrows():
            gdir = cfg.GlacierDir(entity, base_dir=self.testdir)
            gis.define_glacier_region(gdir, entity)
            gis.glacier_masks(gdir)
            centerlines.compute_centerlines(gdir)
            geometry.initialize_flowlines(gdir)
            geometry.catchment_area(gdir)
            geometry.catchment_width_geom(gdir)
Example #5
0
    def test_width(self):

        hef_file = get_demo_file('Hintereisferner.shp')
        rgidf = gpd.GeoDataFrame.from_file(hef_file)

        # loop because for some reason indexing wont work
        for index, entity in rgidf.iterrows():
            gdir = cfg.GlacierDir(entity, base_dir=self.testdir)
            gis.define_glacier_region(gdir, entity)
            gis.glacier_masks(gdir)
            centerlines.compute_centerlines(gdir)
            geometry.initialize_flowlines(gdir)
            geometry.catchment_area(gdir)
            geometry.catchment_width_geom(gdir)
            geometry.catchment_width_correction(gdir)

        area = 0.
        otherarea = 0.
        hgt = []
        harea = []
        for i in gdir.divide_ids:
            cls = gdir.read_pickle('inversion_flowlines', div_id=i)
            for cl in cls:
                harea.extend(list(cl.widths * cl.dx))
                hgt.extend(list(cl.surface_h))
                area += np.sum(cl.widths * cl.dx)
            nc = netCDF4.Dataset(gdir.get_filepath('grids', div_id=i))
            otherarea += np.sum(nc.variables['glacier_mask'][:])
            nc.close()

        nc = netCDF4.Dataset(gdir.get_filepath('grids', div_id=0))
        mask = nc.variables['glacier_mask'][:]
        topo = nc.variables['topo_smoothed'][:]
        nc.close()
        rhgt = topo[np.where(mask)][:]

        tdf = gpd.GeoDataFrame.from_file(gdir.get_filepath('outlines'))
        np.testing.assert_allclose(area, otherarea, rtol=0.1)
        area *= (gdir.grid.dx) ** 2
        otherarea *= (gdir.grid.dx) ** 2
        np.testing.assert_allclose(area * 10**-6, np.float(tdf['AREA']), rtol=1e-4)

        # Check for area distrib
        bins = np.arange(utils.nicenumber(np.min(hgt), 50, lower=True),
                         utils.nicenumber(np.max(hgt), 50)+1,
                         50.)
        h1, b = np.histogram(hgt, weights=harea, density=True, bins=bins)
        h2, b = np.histogram(rhgt, density=True, bins=bins)
        self.assertTrue(utils.rmsd(h1*100*50, h2*100*50) < 1)
Example #6
0
    def test_glacier_masks(self):
        """Again, easy test.

        The GIS was double checked externally with IDL.
        """

        hef_file = get_demo_file('Hintereisferner.shp')
        rgidf = gpd.GeoDataFrame.from_file(hef_file)

        # loop because for some reason indexing wont work
        for index, entity in rgidf.iterrows():
            gdir = cfg.GlacierDir(entity, base_dir=self.testdir)
            gis.define_glacier_region(gdir, entity)
            gis.glacier_masks(gdir)

        nc = netCDF4.Dataset(gdir.get_filepath('grids'))
        area = np.sum(nc.variables['glacier_mask'][:] * gdir.grid.dx**2) * 10**-6
        np.testing.assert_allclose(area,gdir.glacier_area, rtol=1e-1)
        nc.close()
Example #7
0
    def test_find_tstars(self):

        hef_file = get_demo_file('Hintereisferner.shp')
        rgidf = gpd.GeoDataFrame.from_file(hef_file)

        # loop because for some reason indexing wont work
        gdirs = []
        for index, entity in rgidf.iterrows():
            gdir = cfg.GlacierDir(entity, base_dir=self.testdir)
            gis.define_glacier_region(gdir, entity)
            gis.glacier_masks(gdir)
            centerlines.compute_centerlines(gdir)
            geometry.initialize_flowlines(gdir)
            geometry.catchment_area(gdir)
            geometry.catchment_width_geom(gdir)
            geometry.catchment_width_correction(gdir)
            gdirs.append(gdir)
        climate.distribute_climate_data(gdirs)
        climate.mu_candidates(gdir, div_id=0)

        hef_file = get_demo_file('mbdata_RGI40-11.00897.csv')
        mbdf = pd.read_csv(hef_file).set_index('YEAR')
        t_stars, bias = climate.t_star_from_refmb(gdir, mbdf['ANNUAL_BALANCE'])

        y, t, p = climate.mb_yearly_climate_on_glacier(gdir, div_id=0)

        # which years to look at
        selind = np.searchsorted(y, mbdf.index)
        t = t[selind]
        p = p[selind]

        mu_yr_clim = gdir.read_pickle('mu_candidates', div_id=0)
        for t_s, rmd in zip(t_stars, bias):
            mb_per_mu = p - mu_yr_clim.loc[t_s] * t
            md = utils.md(mbdf['ANNUAL_BALANCE'], mb_per_mu)
            np.testing.assert_allclose(md, rmd)
            self.assertTrue(np.abs(md/np.mean(mbdf['ANNUAL_BALANCE'])) < 0.1)
            r = utils.corrcoef(mbdf['ANNUAL_BALANCE'], mb_per_mu)
            self.assertTrue(r > 0.8)
Example #8
0
    def test_centerlines(self):

        hef_file = get_demo_file('Hintereisferner.shp')
        rgidf = gpd.GeoDataFrame.from_file(hef_file)

        # loop because for some reason indexing wont work
        for index, entity in rgidf.iterrows():
            gdir = cfg.GlacierDir(entity, base_dir=self.testdir)
            gis.define_glacier_region(gdir, entity)
            gis.glacier_masks(gdir)
            centerlines.compute_centerlines(gdir)

        for div_id in gdir.divide_ids:
            cls = gdir.read_pickle('centerlines', div_id=div_id)
            for cl in cls:
                for j, ip, ob in zip(cl.inflow_indices, cl.inflow_points, cl.inflows):
                    self.assertTrue(cl.line.coords[j] == ip.coords[0])
                    self.assertTrue(ob.flows_to_point.coords[0] == ip.coords[0])
                    self.assertTrue(cl.line.coords[ob.flows_to_indice] == ip.coords[0])

        lens = [len(gdir.read_pickle('centerlines', div_id=i)) for i in [1,2,3]]
        self.assertTrue(sorted(lens) == [1, 1, 3])
Example #9
0
    def test_mu_candidates(self):

        hef_file = get_demo_file('Hintereisferner.shp')
        rgidf = gpd.GeoDataFrame.from_file(hef_file)

        # loop because for some reason indexing wont work
        gdirs = []
        for index, entity in rgidf.iterrows():
            gdir = cfg.GlacierDir(entity, base_dir=self.testdir)
            gis.define_glacier_region(gdir, entity)
            gis.glacier_masks(gdir)
            centerlines.compute_centerlines(gdir)
            geometry.initialize_flowlines(gdir)
            geometry.catchment_area(gdir)
            geometry.catchment_width_geom(gdir)
            geometry.catchment_width_correction(gdir)
            gdirs.append(gdir)
        climate.distribute_climate_data(gdirs)
        climate.mu_candidates(gdir, div_id=0)

        se = gdir.read_pickle('mu_candidates')
        self.assertTrue(se.index[0] == 1802)
        self.assertTrue(se.index[-1] == 2003)

        df = pd.DataFrame()
        df['mu'] = se

        # Check that the moovin average of temp is negatively correlated
        # with the mus
        nc_r = netCDF4.Dataset(get_demo_file('histalp_merged_hef.nc'))
        ref_t = nc_r.variables['temp'][:, 1, 1]
        nc_r.close()
        ref_t = np.mean(ref_t.reshape((len(df), 12)), 1)
        ma = np.convolve(ref_t, np.ones(31) / float(31), 'same')
        df['temp'] = ma
        df = df.dropna()
        self.assertTrue(np.corrcoef(df['mu'], df['temp'])[0, 1] < -0.75)
Example #10
0
def init_hef(reset=False):

    # test directory
    if not os.path.exists(testdir):
        os.makedirs(testdir)
        reset = True
    if not os.path.exists(os.path.join(testdir, 'RGI40-11.00897')):
        reset = True
    if not os.path.exists(os.path.join(testdir, 'RGI40-11.00897',
                                       'flowline_params.p')):
        reset = True

    # Init
    cfg.initialize()
    cfg.set_divides_db(get_demo_file('HEF_divided.shp'))
    cfg.paths['srtm_file'] = get_demo_file('hef_srtm.tif')
    cfg.paths['histalp_file'] = get_demo_file('histalp_merged_hef.nc')
    cfg.params['border'] = 40

    # loop because for some reason indexing wont work
    hef_file = get_demo_file('Hintereisferner.shp')
    rgidf = gpd.GeoDataFrame.from_file(hef_file)
    for index, entity in rgidf.iterrows():
        gdir = cfg.GlacierDir(entity, base_dir=testdir, reset=reset)

    if not reset:
        return gdir

    gis.define_glacier_region(gdir, entity)
    gis.glacier_masks(gdir)
    centerlines.compute_centerlines(gdir)
    centerlines.compute_downstream_lines(gdir)
    geometry.initialize_flowlines(gdir)
    geometry.catchment_area(gdir)
    geometry.catchment_width_geom(gdir)
    geometry.catchment_width_correction(gdir)
    climate.distribute_climate_data([gdir])
    climate.mu_candidates(gdir, div_id=0)
    hef_file = get_demo_file('mbdata_RGI40-11.00897.csv')
    mbdf = pd.read_csv(hef_file).set_index('YEAR')
    t_star, bias = climate.t_star_from_refmb(gdir, mbdf['ANNUAL_BALANCE'])
    climate.local_mustar_apparent_mb(gdir, t_star[-1], bias[-1])

    inversion.prepare_for_inversion(gdir)
    ref_v = 0.573 * 1e9

    def to_optimize(x):
        fd = 1.9e-24 * x[0]
        fs = 5.7e-20 * x[1]
        v, _ = inversion.inversion_parabolic_point_slope(gdir,
                                                         fs=fs,
                                                         fd=fd)
        return (v - ref_v)**2

    import scipy.optimize as optimization
    out = optimization.minimize(to_optimize, [1,1],
                                bounds=((0.01, 1), (0.01, 1)),
                                tol=1e-3)['x']
    fd = 1.9e-24 * out[0]
    fs = 5.7e-20 * out[1]
    v, _ = inversion.inversion_parabolic_point_slope(gdir,
                                                     fs=fs,
                                                     fd=fd,
                                                     write=True)
    d = dict(fs=fs, fd=fd)
    gdir.write_pickle(d, 'flowline_params')
    return gdir
Example #11
0
    def test_invert_hef_nofs(self):

        # TODO: does not work on windows !!!
        if 'win' in sys.platform:
            print('test_invert_hef_nofs aborted due to windows.')
            return

        hef_file = get_demo_file('Hintereisferner.shp')
        rgidf = gpd.GeoDataFrame.from_file(hef_file)

        # loop because for some reason indexing wont work
        for index, entity in rgidf.iterrows():
            gdir = cfg.GlacierDir(entity, base_dir=self.testdir)
        gis.define_glacier_region(gdir, entity)
        gis.glacier_masks(gdir)
        centerlines.compute_centerlines(gdir)
        geometry.initialize_flowlines(gdir)
        geometry.catchment_area(gdir)
        geometry.catchment_width_geom(gdir)
        geometry.catchment_width_correction(gdir)
        climate.distribute_climate_data([gdir])
        climate.mu_candidates(gdir, div_id=0)
        hef_file = get_demo_file('mbdata_RGI40-11.00897.csv')
        mbdf = pd.read_csv(hef_file).set_index('YEAR')
        t_star, bias = climate.t_star_from_refmb(gdir, mbdf['ANNUAL_BALANCE'])
        t_star = t_star[-1]
        bias = bias[-1]
        climate.local_mustar_apparent_mb(gdir, t_star, bias)

        # OK. Values from Fischer and Kuhn 2013
        # Area: 8.55
        # meanH = 67+-7
        # Volume = 0.573+-0.063
        # maxH = 242+-13

        inversion.prepare_for_inversion(gdir)

        ref_v = 0.573 * 1e9

        def to_optimize(x):
            fd = 1.9e-24 * x[0]
            fs = 0.
            v, _ = inversion.inversion_parabolic_point_slope(gdir,
                                                             fs=fs,
                                                             fd=fd)
            return (v - ref_v)**2

        import scipy.optimize as optimization
        out = optimization.minimize(to_optimize, [1],
                                    bounds=((0.00001, 1000000),),
                                    tol=1e-3)['x']

        self.assertTrue(out[0] > 0.1)
        self.assertTrue(out[0] < 2)

        fd = 1.9e-24 * out[0]
        fs = 0.
        v, _ = inversion.inversion_parabolic_point_slope(gdir,
                                                         fs=fs,
                                                         fd=fd,
                                                         write=True)
        np.testing.assert_allclose(ref_v, v)

        lens = [len(gdir.read_pickle('centerlines', div_id=i)) for i in [1,2,3]]
        pid = np.argmax(lens) + 1
        cls = gdir.read_pickle('inversion_output', div_id=pid)
        fls = gdir.read_pickle('inversion_flowlines', div_id=pid)
        maxs = 0.
        for cl, fl in zip(cls, fls):
            thick = cl['thick']
            shape = cl['shape']
            self.assertTrue(np.all(np.isfinite(shape)))

            mywidths = np.sqrt(4*thick/shape) / gdir.grid.dx
            np.testing.assert_allclose(fl.widths, mywidths)

            _max = np.max(thick)
            if _max > maxs:
                maxs = _max

        np.testing.assert_allclose(242, maxs, atol=30)

        c0 = gdir.read_pickle('inversion_output', div_id=2)[-1]

        def to_optimize(x):
            fd = 1.9e-24 * x[0]
            fs = 5.7e-20 * x[1]
            v, _ = inversion.inversion_parabolic_point_slope(gdir,
                                                             fs=fs,
                                                             fd=fd)
            return (v - ref_v)**2

        import scipy.optimize as optimization
        out = optimization.minimize(to_optimize, [1, 1],
                                    bounds=((0.01, 1), (0.01, 1)),
                                    tol=1e-3)['x']

        self.assertTrue(out[0] > 0.1)
        self.assertTrue(out[1] > 0.1)
        self.assertTrue(out[0] < 1)
        self.assertTrue(out[1] < 1)

        fd = 1.9e-24 * out[0]
        fs = 5.7e-20 * out[1]
        v, _ = inversion.inversion_parabolic_point_slope(gdir,
                                                         fs=fs,
                                                         fd=fd,
                                                         write=True)
        np.testing.assert_allclose(ref_v, v)
Example #12
0
    def test_invert_hef(self):

        hef_file = get_demo_file('Hintereisferner.shp')
        rgidf = gpd.GeoDataFrame.from_file(hef_file)

        # loop because for some reason indexing wont work
        for index, entity in rgidf.iterrows():
            gdir = cfg.GlacierDir(entity, base_dir=self.testdir)
        gis.define_glacier_region(gdir, entity)
        gis.glacier_masks(gdir)
        centerlines.compute_centerlines(gdir)
        geometry.initialize_flowlines(gdir)
        geometry.catchment_area(gdir)
        geometry.catchment_width_geom(gdir)
        geometry.catchment_width_correction(gdir)
        climate.distribute_climate_data([gdir])
        climate.mu_candidates(gdir, div_id=0)
        hef_file = get_demo_file('mbdata_RGI40-11.00897.csv')
        mbdf = pd.read_csv(hef_file).set_index('YEAR')
        t_star, bias = climate.t_star_from_refmb(gdir, mbdf['ANNUAL_BALANCE'])
        t_star = t_star[-1]
        bias = bias[-1]
        climate.local_mustar_apparent_mb(gdir, t_star, bias)

        # OK. Values from Fischer and Kuhn 2013
        # Area: 8.55
        # meanH = 67+-7
        # Volume = 0.573+-0.063
        # maxH = 242+-13
        inversion.prepare_for_inversion(gdir)

        lens = [len(gdir.read_pickle('centerlines', div_id=i)) for i in [1,2,3]]
        pid = np.argmax(lens) + 1

        # Check how many clips:
        cls = gdir.read_pickle('inversion_input', div_id=pid)
        nabove = 0
        maxs = 0.
        npoints = 0.
        for cl in cls:
            # Clip slope to avoid negative and small slopes
            slope = cl['slope_angle']
            nm = np.where(slope <  np.deg2rad(2.))
            nabove += len(nm[0])
            npoints += len(slope)
            _max = np.max(slope)
            if _max > maxs:
                maxs = _max

        self.assertTrue(nabove == 0)
        self.assertTrue(np.rad2deg(maxs) < 40.)

        ref_v = 0.573 * 1e9

        def to_optimize(x):
            fd = 1.9e-24 * x[0]
            fs = 5.7e-20 * x[1]
            v, _ = inversion.inversion_parabolic_point_slope(gdir,
                                                             fs=fs,
                                                             fd=fd)
            return (v - ref_v)**2

        import scipy.optimize as optimization
        out = optimization.minimize(to_optimize, [1, 1],
                                    bounds=((0.01, 10), (0.01, 10)),
                                    tol=1e-3)['x']

        self.assertTrue(out[0] > 0.1)
        self.assertTrue(out[1] > 0.1)
        self.assertTrue(out[0] < 1.1)
        self.assertTrue(out[1] < 1.1)
        fd = 1.9e-24 * out[0]
        fs = 5.7e-20 * out[1]
        v, _ = inversion.inversion_parabolic_point_slope(gdir,
                                                         fs=fs,
                                                         fd=fd,
                                                         write=True)
        np.testing.assert_allclose(ref_v, v)

        lens = [len(gdir.read_pickle('centerlines', div_id=i)) for i in [1,2,3]]
        pid = np.argmax(lens) + 1
        cls = gdir.read_pickle('inversion_output', div_id=pid)
        fls = gdir.read_pickle('inversion_flowlines', div_id=pid)
        maxs = 0.
        for cl, fl in zip(cls, fls):
            thick = cl['thick']
            shape = cl['shape']
            self.assertTrue(np.all(np.isfinite(shape)))

            mywidths = np.sqrt(4*thick/shape) / gdir.grid.dx
            np.testing.assert_allclose(fl.widths, mywidths)

            _max = np.max(thick)
            if _max > maxs:
                maxs = _max

        np.testing.assert_allclose(242, maxs, atol=13)

        # check that its not tooo sensitive to the dx
        cfg.params['flowline_dx'] = 1.
        geometry.initialize_flowlines(gdir)
        geometry.catchment_area(gdir)
        geometry.catchment_width_geom(gdir)
        geometry.catchment_width_correction(gdir)
        climate.distribute_climate_data([gdir])
        climate.mu_candidates(gdir, div_id=0)
        hef_file = get_demo_file('mbdata_RGI40-11.00897.csv')
        mbdf = pd.read_csv(hef_file).set_index('YEAR')
        t_star, bias = climate.t_star_from_refmb(gdir, mbdf['ANNUAL_BALANCE'])
        t_star = t_star[-1]
        bias = bias[-1]
        climate.local_mustar_apparent_mb(gdir, t_star, bias)
        inversion.prepare_for_inversion(gdir)
        v, _ = inversion.inversion_parabolic_point_slope(gdir,
                                                         fs=fs,
                                                         fd=fd,
                                                         write=True)

        np.testing.assert_allclose(ref_v, v, rtol=0.02)
        cls = gdir.read_pickle('inversion_output', div_id=pid)
        maxs = 0.
        for cl in cls:
            thick = cl['thick']
            self.assertTrue(np.all(np.isfinite(shape)))
            _max = np.max(thick)
            if _max > maxs:
                maxs = _max
        # The following test fails because max thick is larger.
        # I think that dx=2 is a minimum
        # np.testing.assert_allclose(242, maxs, atol=13)
        np.testing.assert_allclose(242, maxs, atol=42)
Example #13
0
    def test_local_mustar(self):

        hef_file = get_demo_file('Hintereisferner.shp')
        rgidf = gpd.GeoDataFrame.from_file(hef_file)

        # loop because for some reason indexing wont work
        for index, entity in rgidf.iterrows():
            gdir = cfg.GlacierDir(entity, base_dir=self.testdir)
        gis.define_glacier_region(gdir, entity)
        gis.glacier_masks(gdir)
        centerlines.compute_centerlines(gdir)
        geometry.initialize_flowlines(gdir)
        geometry.catchment_area(gdir)
        geometry.catchment_width_geom(gdir)
        geometry.catchment_width_correction(gdir)
        climate.distribute_climate_data([gdir])
        climate.mu_candidates(gdir, div_id=0)

        hef_file = get_demo_file('mbdata_RGI40-11.00897.csv')
        mbdf = pd.read_csv(hef_file).set_index('YEAR')
        t_star, bias = climate.t_star_from_refmb(gdir, mbdf['ANNUAL_BALANCE'])

        t_star = t_star[-1]
        bias = bias[-1]

        climate.local_mustar_apparent_mb(gdir, t_star, bias)

        df = pd.read_csv(gdir.get_filepath('local_mustar', div_id=0))
        mu_ref = gdir.read_pickle('mu_candidates', div_id=0).loc[t_star]
        np.testing.assert_allclose(mu_ref, df['mu_star'][0], atol=1e-3)

        # Check for apparent mb to be zeros
        for i in [0] + list(gdir.divide_ids):
             fls = gdir.read_pickle('inversion_flowlines', div_id=i)
             tmb = 0.
             for fl in fls:
                 self.assertTrue(fl.apparent_mb.shape == fl.widths.shape)
                 tmb += np.sum(fl.apparent_mb * fl.widths)
             np.testing.assert_allclose(tmb, 0., atol=0.01)
             if i == 0: continue
             np.testing.assert_allclose(fls[-1].flux[-1], 0., atol=0.01)

        # ------ Look for gradient
        # which years to look at
        fls = gdir.read_pickle('inversion_flowlines', div_id=0)
        mb_on_h = np.array([])
        h = np.array([])
        for fl in fls:
            y, t, p = climate.mb_yearly_climate_on_height(gdir, fl.surface_h)
            selind = np.searchsorted(y, mbdf.index)
            t = np.mean(t[:, selind], axis=1)
            p = np.mean(p[:, selind], axis=1)
            mb_on_h = np.append(mb_on_h, p - mu_ref * t)
            h = np.append(h, fl.surface_h)
        dfg = pd.read_csv(get_demo_file('mbgrads_RGI40-11.00897.csv'),
                          index_col='ALTITUDE').mean(axis=1)
        # Take the altitudes below 3100 and fit a line
        dfg = dfg[dfg.index < 3100]
        pok = np.where(h < 3100)
        from scipy.stats import linregress
        slope_obs, _, _, _, _ = linregress(dfg.index, dfg.values)
        slope_our, _, _, _, _ = linregress(h[pok], mb_on_h[pok])
        np.testing.assert_allclose(slope_obs, slope_our, rtol=0.1)
Example #14
0
    def test_baltoro_centerlines(self):

        cfg.params['border'] = 2
        cfg.paths['srtm_file'] =  get_demo_file('baltoro_srtm_clip.tif')

        b_file = get_demo_file('baltoro_wgs84.shp')
        rgidf = gpd.GeoDataFrame.from_file(b_file)

        kienholz_file = get_demo_file('centerlines_baltoro_wgs84.shp')
        kdf = gpd.read_file(kienholz_file)

        # loop because for some reason indexing wont work
        for index, entity in rgidf.iterrows():
            gdir = cfg.GlacierDir(entity, base_dir=self.testdir)
            gis.define_glacier_region(gdir, entity)
            gis.glacier_masks(gdir)
            centerlines.compute_centerlines(gdir)

        my_mask = np.zeros((gdir.grid.ny, gdir.grid.nx), dtype=np.uint8)
        cls = gdir.read_pickle('centerlines', div_id=1)
        for cl in cls:
            ext_yx = tuple(reversed(cl.line.xy))
            my_mask[ext_yx] = 1

        # Transform
        kien_mask = np.zeros((gdir.grid.ny, gdir.grid.nx), dtype=np.uint8)
        from shapely.ops import transform
        for index, entity in kdf.iterrows():
            def proj(lon, lat):
                return salem.transform_proj(salem.wgs84, gdir.grid.proj,
                                            lon, lat)
            kgm = transform(proj, entity.geometry)

            # Interpolate shape to a regular path
            e_line = []
            for distance in np.arange(0.0, kgm.length, gdir.grid.dx):
                e_line.append(*kgm.interpolate(distance).coords)
            kgm = shpg.LineString(e_line)

            # Transform geometry into grid coordinates
            def proj(x, y):
                return gdir.grid.transform(x, y, crs=gdir.grid.proj)
            kgm = transform(proj, kgm)

            # Rounded nearest pix
            project = lambda x, y: (np.rint(x).astype(np.int64),
                            np.rint(y).astype(np.int64))

            kgm = transform(project, kgm)

            ext_yx = tuple(reversed(kgm.xy))
            kien_mask[ext_yx] = 1

        # We test the Heidke Skill score of our predictions
        rest = kien_mask + 2 * my_mask
        # gr.plot_array(rest)
        na = len(np.where(rest == 3)[0])
        nb = len(np.where(rest == 2)[0])
        nc = len(np.where(rest == 1)[0])
        nd = len(np.where(rest == 0)[0])
        denom = np.float64((na+nc)*(nd+nc)+(na+nb)*(nd+nb))
        hss = np.float64(2.) * ((na*nd)-(nb*nc)) / denom
        self.assertTrue(hss > 0.53)