Exemplo n.º 1
0
def test_D8_D4_fill(d4_grid):
    """
    Tests the functionality of D4 filling.
    """
    lfD8 = DepressionFinderAndRouter(
        d4_grid.mg1, pits=None, routing="D8", reroute_flow=False
    )
    lfD4 = DepressionFinderAndRouter(
        d4_grid.mg2, pits=None, routing="D4", reroute_flow=False
    )

    lfD8.map_depressions()
    lfD4.map_depressions()

    assert lfD8.number_of_lakes == 1
    assert lfD4.number_of_lakes == 3

    correct_D8_lake_map = np.empty(7 * 7, dtype=int)
    correct_D8_lake_map.fill(XX)
    correct_D8_lake_map[d4_grid.lake_nodes] = 10
    correct_D4_lake_map = correct_D8_lake_map.copy()
    correct_D4_lake_map[d4_grid.lake_nodes[5:]] = 32
    correct_D4_lake_map[d4_grid.lake_nodes[-2]] = 38
    correct_D8_depths = np.zeros(7 * 7, dtype=float)
    correct_D8_depths[d4_grid.lake_nodes] = 2.0
    correct_D4_depths = correct_D8_depths.copy()
    correct_D4_depths[d4_grid.lake_nodes[5:]] = 4.0
    correct_D4_depths[d4_grid.lake_nodes[-2]] = 3.0

    assert_array_equal(lfD8.lake_map, correct_D8_lake_map)
    assert_array_equal(lfD4.lake_map, correct_D4_lake_map)

    assert d4_grid.mg1.at_node["depression__depth"] == approx(correct_D8_depths)
    assert d4_grid.mg2.at_node["depression__depth"] == approx(correct_D4_depths)
    def __init__(self, input_file=None, params=None):
        """Initialize the StochasticDischargeHortonianModel."""

        # Call ErosionModel's init
        super(StochasticDischargeHortonianModel,
              self).__init__(input_file=input_file, params=params)

        # Instantiate components
        self.flow_router = FlowRouter(self.grid, **self.params)

        self.lake_filler = DepressionFinderAndRouter(self.grid, **self.params)

        self.rain_generator = \
            PrecipitationDistribution(delta_t=self.params['dt'],
                                      total_time=self.params['run_duration'],
                                      **self.params)

        # Add a field for discharge
        if 'surface_water__discharge' not in self.grid.at_node:
            self.grid.add_zeros('node', 'surface_water__discharge')
        self.discharge = self.grid.at_node['surface_water__discharge']                                    

        # Get the infiltration-capacity parameter
        self.infilt = self.params['infiltration_capacity']

        # Run flow routing and lake filler (only once, because we are not
        # not changing topography)
        self.flow_router.run_one_step()
        self.lake_filler.map_depressions()
Exemplo n.º 3
0
def test_find_lowest_node_on_lake_perimeter_c():
    """
    Ensures the key functionality of the cfunc is working.
    """
    mg = RasterModelGrid((7, 7), xy_spacing=0.5)
    z = mg.add_field("node", "topographic__elevation", mg.node_x.copy())
    z += 0.01 * mg.node_y
    mg.at_node["topographic__elevation"].reshape(mg.shape)[2:5, 2:5] *= 0.1
    fr = FlowAccumulator(mg, flow_director="D8")
    fr.run_one_step()  # the flow "gets stuck" in the hole
    df = DepressionFinderAndRouter(mg)

    node_nbrs = df._node_nbrs
    flood_status = df.flood_status
    elev = df._elev
    BIG_ELEV = df._BIG_ELEV
    nodes_this_depression = mg.zeros("node", dtype=int)
    nodes_this_depression[0] = 16
    pit_count = 1

    assert find_lowest_node_on_lake_perimeter_c(node_nbrs, flood_status, elev,
                                                nodes_this_depression,
                                                pit_count, BIG_ELEV) == (23, 1)
    nodes_this_depression[1] = 8
    pit_count = 2
    assert find_lowest_node_on_lake_perimeter_c(node_nbrs, flood_status, elev,
                                                nodes_this_depression,
                                                pit_count, BIG_ELEV) == (0, 2)
Exemplo n.º 4
0
    def __init__(self, grid, routing_method):

        self._grid = grid

        if routing_method == "D8":
            self.fd = FlowDirectorD8(self._grid)
        elif routing_method == "Steepest":
            self.fd = FlowDirectorSteepest(self._grid)
        else:
            raise ValueError("routing_method must be either D8 or Steepest.")

        self.fa = FlowAccumulator(
            self._grid,
            surface="topographic__elevation",
            flow_director=self.fd,
            runoff_rate="average_surface_water__specific_discharge",
        )
        self.lmb = LakeMapperBarnes(
            self._grid,
            method=routing_method,
            fill_flat=False,
            surface="topographic__elevation",
            fill_surface="topographic__elevation",
            redirect_flow_steepest_descent=False,
            reaccumulate_flow=False,
            track_lakes=False,
            ignore_overfill=True,
        )
        self.dfr = DepressionFinderAndRouter(self._grid)
Exemplo n.º 5
0
    def __init__(self, modern_dem_name, outlet_id, chi_mask_dem_name=None, from_file=None):
        """Initialize MetricCalculator with names of postglacial and modern
        DEMs."""


        if from_file is None:

            # Read and remember the modern DEM (whether data or model)
            (self.grid, self.z) = self.read_topography(modern_dem_name)
            #print self.grid.x_of_node
    
            self.grid.set_watershed_boundary_condition_outlet_id(outlet_id,
                                                                 self.z, nodata_value=-9999)
    
            # Instantiate and run a FlowRouter and lake filler, so we get
            # drainage area for cumulative-area statistic, and also fields for chi.
            fr = FlowRouter(self.grid)
            dfr = DepressionFinderAndRouter(self.grid)
            fr.route_flow()
            dfr.map_depressions()
    
            # Remember modern drainage area grid
            self.area = self.grid.at_node['drainage_area']
    
            # Instantiate a ChiFinder for chi-index
            self.chi_finder = ChiFinder(self.grid, min_drainage_area=10000.,
                                        reference_concavity=0.5)
            
            core_nodes = np.zeros(self.area.shape, dtype=bool)
            core_nodes[self.grid.core_nodes] = True
            # Read and remember the MASK, if provided
            if chi_mask_dem_name is None:
                 self.mask = (self.area>1e5)
                 self.till_mask = np.zeros(self.mask.shape, dtype=bool) 
                 self.till_mask[self.grid.core_nodes] = 1
            else:
                (self.mask_grid, zmask) = self.read_topography(chi_mask_dem_name)
                mask = (zmask>0)*1
                self.mask = (self.area>1e5)*(mask==1)
                
                mask_bool = (zmask>0)
                self.till_mask = np.zeros(self.mask.shape, dtype=bool) 
                self.till_mask[mask_bool*core_nodes] = 1
            # 
            
            # Create dictionary to contain metrics
            self.metric = {}
        
        else:
            with open(from_file, 'r') as f:
                metrics = load(f)
                self.modern_dem_name = metrics.pop('Topo file')

                self.metric = metrics
    
            fn_split = from_file.split('.')
            fn_split[-1] = 'chi'
            fn_split.append('txt')
            chi_filename = '.'.join(fn_split)
            self.density_chi = np.loadtxt(chi_filename)
Exemplo n.º 6
0
def d4_grid():
    """Test functionality of routing when D4 is specified.

    The elevation field in this test looks like::

    1   2   3   4   5   6   7

    1   2   3   0   5   0   7

    1   2   3   4   0   0   7

    1   2   3   0   5   6   7

    1   2   0   0   0   6   7

    1   2   3   0   5   6   7

    1   2   3   4   5   6   7
    """
    mg1 = RasterModelGrid(7, 7, 1.)
    mg2 = RasterModelGrid(7, 7, 1.)
    z = mg1.node_x.copy() + 1.
    lake_nodes = np.array([10, 16, 17, 18, 24, 32, 33, 38, 40])
    z[lake_nodes] = 0.
    mg1.add_field("node", "topographic__elevation", z, units="-")
    mg2.add_field("node", "topographic__elevation", z, units="-")

    frD8 = FlowAccumulator(mg1, flow_director='D8')
    frD4 = FlowAccumulator(mg2, flow_director='D4')
    lfD8 = DepressionFinderAndRouter(mg1, routing="D8")
    lfD4 = DepressionFinderAndRouter(mg2, routing="D4")

    class DansGrid(object):
        pass

    d4_grid = DansGrid()
    d4_grid.mg1 = mg1
    d4_grid.mg2 = mg2
    d4_grid.z = z
    d4_grid.lake_nodes = lake_nodes
    d4_grid.frD8 = frD8
    d4_grid.frD4 = frD4
    d4_grid.lfD8 = lfD8
    d4_grid.lfD4 = lfD4

    return d4_grid
Exemplo n.º 7
0
def test_route_to_multiple_error_raised():
    mg = RasterModelGrid((10, 10))
    z = mg.add_zeros("topographic__elevation", at="node")
    z += mg.x_of_node + mg.y_of_node
    fa = FlowAccumulator(mg, flow_director="MFD")
    fa.run_one_step()

    with pytest.raises(NotImplementedError):
        DepressionFinderAndRouter(mg)
Exemplo n.º 8
0
    def initialize(self, input_stream=None):
        """
        The BMI-style initialize method takes an optional input_stream
        parameter, which may be either a ModelParameterDictionary object or
        an input stream from which a ModelParameterDictionary can read values.
        """
        # Create a ModelParameterDictionary for the inputs
        if input_stream is None:
            inputs = None
        elif type(input_stream) == ModelParameterDictionary:
            inputs = input_stream
        else:
            inputs = ModelParameterDictionary(input_stream)

        # Make sure the grid includes elevation data. This means either:
        #  1. The grid has a node field called 'topographic__elevation', or
        #  2. The input file has an item called 'ELEVATION_FIELD_NAME' *and*
        #     a field by this name exists in the grid.
        try:
            self._elev = self._grid.at_node["topographic__elevation"]
        except FieldError:
            try:
                self.topo_field_name = inputs.read_string("ELEVATION_" +
                                                          "FIELD_NAME")
            except AttributeError:
                print("Error: Because your grid does not have a node field")
                print('called "topographic__elevation", you need to pass the')
                print("name of a text input file or ModelParameterDictionary,")
                print("and this file or dictionary needs to include the name")
                print("of another field in your grid that contains your")
                print("elevation data.")
                raise AttributeError
            except MissingKeyError:
                print("Error: Because your grid does not have a node field")
                print('called "topographic__elevation", your input file (or')
                print("ModelParameterDictionary) must include an entry with")
                print('the key "ELEVATION_FIELD_NAME", which gives the name')
                print("of a field in your grid that contains your elevation")
                print("data.")
                raise MissingKeyError("ELEVATION_FIELD_NAME")
            try:
                self._elev = self._grid.at_node[self.topo_field_name]
            except AttributeError:
                print(
                    "Your grid does not seem to have a node field called",
                    self.topo_field_name,
                )
        else:
            self.topo_field_name = "topographic__elevation"
        # create the only new output field:
        self.sed_fill_depth = self._grid.add_zeros("node",
                                                   "sediment_fill__depth",
                                                   noclobber=False)

        self._lf = DepressionFinderAndRouter(self._grid, routing=self._routing)
        self._fr = FlowAccumulator(self._grid, flow_director=self._routing)
    def __init__(self, input_file=None, params=None):
        """Initialize the LinearDiffusionModel."""

        # Call ErosionModel's init
        super(DrainageAreaModel, self).__init__(input_file=input_file,
                                                params=params)

        # Instantiate a FlowRouter and DepressionFinderAndRouter components
        self.flow_router = FlowRouter(self.grid, **self.params)
        self.lake_filler = DepressionFinderAndRouter(self.grid, **self.params)
Exemplo n.º 10
0
def test_pits_as_IDs(dans_grid3):
    """
    Smoke test for passing specific IDs, not an array, to the mapper.
    """
    dans_grid3.fr.run_one_step()

    pits = np.nonzero(dans_grid3.mg.at_node["flow__sink_flag"])[0]
    lf = DepressionFinderAndRouter(dans_grid3.mg, pits=pits)
    lf.map_depressions()

    assert dans_grid3.mg.at_node["drainage_area"] == approx(dans_grid3.A_new)
Exemplo n.º 11
0
def test_filling_supplied_pits(dans_grid3):
    """
    Test the filler without rereouting, but confusingly, where there *is*
    aready routing information available!
    Also tests the supply of an array for 'pits'
    """
    dans_grid3.fr.run_one_step()

    lf = DepressionFinderAndRouter(
        dans_grid3.mg, reroute_flow=False, pits="flow__sink_flag"
    )
    lf.map_depressions()
    assert_array_equal(dans_grid3.mg.at_node["flow__receiver_node"], dans_grid3.r_old)
Exemplo n.º 12
0
def test_filling_alone(dans_grid3):
    """
    Test the filler alone, w/o supplying information on the pits.

    Setting the the *pits* parameter to None causes the mapper to look for pits
    using its _find_pits method.
    """
    lf = DepressionFinderAndRouter(dans_grid3.mg, reroute_flow=False, pits=None)
    assert lf._user_supplied_pits is None

    lf.map_depressions()
    assert_array_equal(
        dans_grid3.mg.at_node["flow__receiver_node"], XX * np.ones(49, dtype=int)
    )
    assert_array_equal(lf.depression_outlet_map, dans_grid3.depr_outlet_target)
Exemplo n.º 13
0
    def __init__(self, input_file=None, params=None):
        """Initialize the StreamPowerThresholdModel."""

        # Call ErosionModel's init
        super(StreamPowerThresholdModel, self).__init__(input_file=input_file,
                                                        params=params)

        # Instantiate a FlowRouter and DepressionFinderAndRouter components
        self.flow_router = FlowRouter(self.grid, **self.params)
        self.lake_filler = DepressionFinderAndRouter(self.grid, **self.params)

        # Instantiate a FastscapeEroder component
        self.eroder = StreamPowerSmoothThresholdEroder(
            self.grid,
            K_sp=self.params['K_sp'],
            threshold_sp=self.params['threshold_sp'])
Exemplo n.º 14
0
    def __init__(self, input_file=None, params=None):
        """Initialize the LinearDiffusionModel."""

        # Call ErosionModel's init
        super(EffectiveDrainageAreaModel, self).__init__(input_file=input_file,
                                                         params=params)

        # Instantiate a FlowRouter and DepressionFinderAndRouter components
        self.flow_router = FlowRouter(self.grid, **self.params)
        self.lake_filler = DepressionFinderAndRouter(self.grid, **self.params)

        # Add a field for effective drainage area
        self.eff_area = self.grid.add_zeros('node', 'effective_drainage_area')

        # Get the effective-area parameter
        self.sat_param = self.params['saturation_area_scale']
Exemplo n.º 15
0
def test_lake_mapper():
    """
    Create a test grid and run a series of tests.
    """
    # Make a test grid
    rmg = create_test_grid()

    # Instantiate a lake mapper
    # (Note that we don't need to send it an input file name, because our grid
    # already has a topographic__elevation field)
    lm = DepressionFinderAndRouter(rmg)

    # Run it on our test grid
    lm.map_depressions()

    # Run tests
    check_fields1(rmg)
    check_array_values1(rmg, lm)
    check_fields2(rmg)
    check_array_values2(rmg, lm)
    def __init__(self, input_file=None, params=None):
        """Initialize the HybridAlluviumModel."""

        # Call ErosionModel's init
        super(HybridAlluviumModel, self).__init__(input_file=input_file,
                                                  params=params)

        # Instantiate a FlowRouter and DepressionFinderAndRouter components
        self.flow_router = FlowRouter(self.grid, **self.params)
        self.lake_filler = DepressionFinderAndRouter(self.grid, **self.params)

        #make area_field and/or discharge_field depending on discharge_method
        if self.params['discharge_method'] is not None:
            if self.params['discharge_method'] == 'area_field':
                area_field = self.grid.at_node['drainage_area']
                discharge_field = None
            elif self.params['discharge_method'] == 'discharge_field':
                discharge_field = self.grid.at_node['surface_water__discharge']
                area_field = None
            else:
                raise (KeyError)
        else:
            area_field = None
            discharge_field = None

        # Instantiate a HybridAlluvium component
        self.eroder = Space(self.grid,
                            K_sed=self.params['K_sed'],
                            K_br=self.params['K_br'],
                            F_f=self.params['F_f'],
                            phi=self.params['phi'],
                            H_star=self.params['H_star'],
                            v_s=self.params['v_s'],
                            m_sp=self.params['m_sp'],
                            n_sp=self.params['n_sp'],
                            sp_crit_sed=self.params['sp_crit_sed'],
                            sp_crit_br=self.params['sp_crit_br'],
                            method=self.params['method'],
                            discharge_method=self.params['discharge_method'],
                            area_field=area_field,
                            discharge_field=discharge_field)
Exemplo n.º 17
0
    def __init__(self, input_file=None, params=None):
        """Initialize the StreamPowerVarThresholdModel."""

        # Call ErosionModel's init
        super(StreamPowerVarThresholdModel, self).__init__(input_file=input_file,
                                                params=params)

        # Instantiate a FlowRouter and DepressionFinderAndRouter components
        self.flow_router = FlowRouter(self.grid, **self.params)
        self.lake_filler = DepressionFinderAndRouter(self.grid, **self.params)
        
        # Create a field for the (initial) erosion threshold
        self.threshold = self.grid.add_zeros('node', 'erosion__threshold')
        self.threshold[:] = self.params['threshold_sp']
        
        # Instantiate a FastscapeEroder component
        self.eroder = StreamPowerSmoothThresholdEroder(
            self.grid,
            K_sp=self.params['K_sp'],
            threshold_sp=self.threshold)

        # Get the parameter for rate of threshold increase with erosion depth
        self.thresh_change_per_depth = self.params['thresh_change_per_depth']
Exemplo n.º 18
0
v_profile = profile * vmax
accum_disp = profile * float(dxy)

# This is an array for counting how many pixels need to be moved
nshift = np.zeros(np.size(yLocation))
n_buff = 0  # optional extra buffer zone incase you only want to move a subset.

################################################################################
## Last, we instantiate landlab components that will evolve the landscape #####
################################################################################

fr = FlowRouter(rmg)  # standard D8 flow routing algorithm
sp = FastscapeEroder(rmg, K_sp='K_sp', m_sp=m, n_sp=n,
                     threshold_sp=0)  # river eroder
lin_diffuse = LinearDiffuser(rmg, linear_diffusivity='D')  #linear diffuser
fill = DepressionFinderAndRouter(rmg)  #lake filling algorithm

nts = int(num_frames)
ds = xr.Dataset(
    data_vars={
        'topographic__elevation': (
            ('time', 'y', 'x'),  # tuple of dimensions
            np.empty((nts, rmg.shape[0], rmg.shape[1])),  # n-d array of data
            {
                'units': 'meters'
            })
    },  # dictionary with data attributes
    coords={
        'x': (
            ('x'),  # tuple of dimensions
            rmg.x_of_node.reshape(
Exemplo n.º 19
0
    print('No pre-existing topography. Creating own random noise topo.')

#Create boundary conditions of the model grid (either closed or fixed-head)
for edge in (mg.nodes_at_left_edge,mg.nodes_at_right_edge, mg.nodes_at_top_edge):
    mg.status_at_node[edge] = CLOSED_BOUNDARY
for edge in (mg.nodes_at_bottom_edge):
    mg.status_at_node[edge] = FIXED_VALUE_BOUNDARY

#Initialize Fastscape
fc = FastscapeEroder(mg,
                    K_sp = ksp ,
                    m_sp = msp,
                    n_sp = nsp,
                    rainfall_intensity = 1)
fr = FlowRouter(mg)
lm = DepressionFinderAndRouter(mg)

for i in range(nSteps):
    fr.run_one_step(dt=1)
    lm.map_depressions()
    fc.run_one_step(dt=1)
    mg.at_node['topographic__elevation'][mg.core_nodes] += 0.0002

z = mg.at_node['topographic__elevation']

plt.figure()
imshow_grid(mg,z)
plt.savefig('test.png')
plt.close()

np.save('iniTopo',z)
Exemplo n.º 20
0
def test_steady_state_with_basic_solver_option():
    """
    Test that model matches the transport-limited analytical solution
    for slope/area relationship at steady state: S=((U * v_s) / (K * A^m)
    + U / (K * A^m))^(1/n).

    Also test that model matches the analytical solution for steady-state
    sediment flux: Qs = U * A * (1 - phi).
    """

    #set up a 5x5 grid with one open outlet node and low initial elevations.
    nr = 5
    nc = 5
    mg = RasterModelGrid((nr, nc), 10.0)

    z = mg.add_zeros('node', 'topographic__elevation')

    mg['node']['topographic__elevation'] += mg.node_y / 100000 \
        + mg.node_x / 100000 \
        + np.random.rand(len(mg.node_y)) / 10000
    mg.set_closed_boundaries_at_grid_edges(bottom_is_closed=True,
                                           left_is_closed=True,
                                           right_is_closed=True,
                                           top_is_closed=True)
    mg.set_watershed_boundary_condition_outlet_id(0,
                                                  mg['node']['topographic__elevation'],
                                                  -9999.)

    #Instantiate DepressionFinderAndRouter
    df = DepressionFinderAndRouter(mg)

    # Create a D8 flow handler
    fa = FlowAccumulator(mg, flow_director='D8',
                         depression_finder='DepressionFinderAndRouter')

    # Parameter values for detachment-limited test
    K = 0.01
    U = 0.0001
    dt = 1.0
    F_f = 0.0 #all sediment is considered coarse bedload
    m_sp = 0.5
    n_sp = 1.0
    v_s = 0.5
    phi=0.5

    # Instantiate the ErosionDeposition component...
    ed = ErosionDeposition(mg, K=K, F_f=F_f, phi=phi, v_s=v_s, m_sp=m_sp,
                           n_sp=n_sp, sp_crit=0, solver='basic')

    # ... and run it to steady state (5000x1-year timesteps).
    for i in range(5000):
        fa.run_one_step()
        flooded = np.where(df.flood_status==3)[0]
        ed.run_one_step(dt=dt, flooded_nodes=flooded)
        z[mg.core_nodes] += U * dt #m

    #compare numerical and analytical slope solutions
    num_slope = mg.at_node['topographic__steepest_slope'][mg.core_nodes]
    analytical_slope = (np.power(((U * v_s) / (K
        * np.power(mg.at_node['drainage_area'][mg.core_nodes], m_sp)))
        + (U / (K * np.power(mg.at_node['drainage_area'][mg.core_nodes],
        m_sp))), 1./n_sp))

    #test for match with analytical slope-area relationship
    testing.assert_array_almost_equal(num_slope, analytical_slope,
                                      decimal=8,
                                      err_msg='E/D slope-area test failed',
                                      verbose=True)

    #compare numerical and analytical sediment flux solutions
    num_sedflux = mg.at_node['sediment__flux'][mg.core_nodes]
    analytical_sedflux = (U * mg.at_node['drainage_area'][mg.core_nodes]
        * (1 - phi))

    #test for match with anakytical sediment flux
    testing.assert_array_almost_equal(num_sedflux, analytical_sedflux,
                                      decimal=8,
                                      err_msg='E/D sediment flux test failed',
                                      verbose=True)
Exemplo n.º 21
0
def dans_grid3():
    """
    Create a 7x7 test grid with a well defined hole in it.
    """
    mg = RasterModelGrid(7, 7, 1.)

    z = np.array([
        [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
        [0.0, 2.0, 2.0, 2.0, 2.0, 2.0, 0.0],
        [0.0, 2.0, 1.6, 1.5, 1.6, 2.0, 0.0],
        [0.0, 2.0, 1.7, 1.6, 1.7, 2.0, 0.0],
        [0.0, 2.0, 1.8, 2.0, 2.0, 2.0, 0.0],
        [0.0, 1.0, 0.6, 1.0, 1.0, 1.0, 0.0],
        [0.0, 0.0, -0.5, 0.0, 0.0, 0.0, 0.0],
    ]).flatten()

    r_old = np.array([
        [0, 1, 2, 3, 4, 5, 6],
        [7, 1, 2, 3, 4, 5, 13],
        [14, 14, 17, 17, 17, 20, 20],
        [21, 21, 17, 17, 17, 27, 27],
        [28, 28, 37, 38, 39, 34, 34],
        [35, 44, 44, 44, 46, 41, 41],
        [42, 43, 44, 45, 46, 47, 48],
    ]).flatten()

    r_new = np.array([
        [0, 1, 2, 3, 4, 5, 6],
        [7, 1, 2, 3, 4, 5, 13],
        [14, 14, 23, 24, 24, 20, 20],
        [21, 21, 30, 30, 24, 27, 27],
        [28, 28, 37, 38, 39, 34, 34],
        [35, 44, 44, 44, 46, 41, 41],
        [42, 43, 44, 45, 46, 47, 48],
    ]).flatten()

    A_old = np.array([
        [0., 1., 1., 1., 1., 1., 0.],
        [0., 1., 1., 1., 1., 1., 0.],
        [1., 1., 1., 6., 1., 1., 1.],
        [1., 1., 1., 1., 1., 1., 1.],
        [1., 1., 1., 1., 1., 1., 1.],
        [0., 1., 2., 2., 2., 1., 1.],
        [0., 0., 5., 0., 2., 0., 0.],
    ]).flatten()

    A_new = np.array([
        [0., 1., 1., 1., 1., 1., 0.],
        [0., 1., 1., 1., 1., 1., 0.],
        [1., 1., 1., 1., 1., 1., 1.],
        [1., 1., 2., 4., 1., 1., 1.],
        [1., 1., 7., 1., 1., 1., 1.],
        [0., 1., 8., 2., 2., 1., 1.],
        [0., 0., 11., 0., 2., 0., 0.],
    ]).flatten()

    s_new = np.array([
        [0, 1, 8, 2, 9, 3, 10],
        [4, 11, 5, 12, 6, 7, 13],
        [14, 15, 20, 19, 21, 22, 27],
        [26, 28, 29, 34, 33, 35, 41],
        [40, 42, 43, 44, 36, 37, 30],
        [23, 16, 24, 17, 18, 25, 38],
        [31, 45, 46, 39, 32, 47, 48],
    ]).flatten()

    links_old = np.array([
        [-1, -1, -1, -1, -1, -1, -1],
        [-1, 7, 8, 9, 10, 11, -1],
        [-1, 26, 28, -1, 29, 31, -1],
        [-1, 39, 113, 35, 114, 44, -1],
        [-1, 52, 60, 61, 62, 57, -1],
        [-1, 146, 73, 149, 75, 70, -1],
        [-1, -1, -1, -1, -1, -1, -1],
    ]).flatten()

    links_new = np.array([
        [-1, -1, -1, -1, -1, -1, -1],
        [-1, 7, 8, 9, 10, 11, -1],
        [-1, 26, 34, 35, 115, 31, -1],
        [-1, 39, 47, 125, 42, 44, -1],
        [-1, 52, 60, 61, 62, 57, -1],
        [-1, 146, 73, 149, 75, 70, -1],
        [-1, -1, -1, -1, -1, -1, -1],
    ]).flatten()

    depr_outlet_target = np.array([
        [XX, XX, XX, XX, XX, XX, XX],
        [XX, XX, XX, XX, XX, XX, XX],
        [XX, XX, 30, 30, 30, XX, XX],
        [XX, XX, 30, 30, 30, XX, XX],
        [XX, XX, XX, XX, XX, XX, XX],
        [XX, XX, XX, XX, XX, XX, XX],
        [XX, XX, XX, XX, XX, XX, XX],
    ]).flatten()

    mg.add_field("node", "topographic__elevation", z, units="-")

    fr = FlowAccumulator(mg, flow_director='D8')
    lf = DepressionFinderAndRouter(mg)

    class DansGrid(object):
        pass

    dans_grid = DansGrid()
    dans_grid.mg = mg
    dans_grid.fr = fr
    dans_grid.lf = lf
    dans_grid.z = z
    dans_grid.r_new = r_new
    dans_grid.r_old = r_old
    dans_grid.A_new = A_new
    dans_grid.A_old = A_old
    dans_grid.s_new = s_new
    dans_grid.depr_outlet_target = depr_outlet_target
    dans_grid.links_old = links_old
    dans_grid.links_new = links_new

    return dans_grid
Exemplo n.º 22
0
def test_degenerate_drainage():
    """
    This "hourglass" configuration should be one of the hardest to correctly
    re-route.
    """
    mg = RasterModelGrid(9, 5)
    z_init = mg.node_x.copy() * 0.0001 + 1.
    lake_pits = np.array([7, 11, 12, 13, 17, 27, 31, 32, 33, 37])
    z_init[lake_pits] = -1.
    z_init[22] = 0.  # the common spill pt for both lakes
    z_init[21] = 0.1  # an adverse bump in the spillway
    z_init[20] = -0.2  # the spillway
    z = mg.add_field("node", "topographic__elevation", z_init)

    fr = FlowRouter(mg)
    lf = DepressionFinderAndRouter(mg)
    fr.route_flow()
    lf.map_depressions()

    #    correct_A = np.array([ 0.,   0.,   0.,   0.,   0.,
    #                           0.,   1.,   3.,   1.,   0.,
    #                           0.,   5.,   1.,   2.,   0.,
    #                           0.,   1.,  10.,   1.,   0.,
    #                          21.,  21.,   1.,   1.,   0.,
    #                           0.,   1.,   9.,   1.,   0.,
    #                           0.,   3.,   1.,   2.,   0.,
    #                           0.,   1.,   1.,   1.,   0.,
    #                           0.,   0.,   0.,   0.,   0.])

    correct_A = np.array(
        [
            0.,
            0.,
            0.,
            0.,
            0.,
            0.,
            1.,
            3.,
            1.,
            0.,
            0.,
            2.,
            4.,
            2.,
            0.,
            0.,
            1.,
            10.,
            1.,
            0.,
            21.,
            21.,
            1.,
            1.,
            0.,
            0.,
            1.,
            9.,
            1.,
            0.,
            0.,
            2.,
            2.,
            2.,
            0.,
            0.,
            1.,
            1.,
            1.,
            0.,
            0.,
            0.,
            0.,
            0.,
            0.,
        ]
    )

    thelake = np.concatenate((lake_pits, [22])).sort()

    assert mg.at_node["drainage_area"] == approx(correct_A)
Exemplo n.º 23
0
def test_matches_transport_solution():
    """
    Test that model matches the transport-limited analytical solution
    for slope/area relationship at steady state: S=((U * v_s) / (K_sed * A^m)
    + U / (K_sed * A^m))^(1/n).

    Also test that model matches the analytical solution for steady-state
    sediment flux: Qs = U * A * (1 - phi).
    """

    # set up a 5x5 grid with one open outlet node and low initial elevations.
    nr = 5
    nc = 5
    mg = RasterModelGrid((nr, nc), xy_spacing=10.0)

    z = mg.add_zeros("node", "topographic__elevation")
    br = mg.add_zeros("node", "bedrock__elevation")
    soil = mg.add_zeros("node", "soil__depth")

    mg["node"]["topographic__elevation"] += (
        mg.node_y / 100000 + mg.node_x / 100000 +
        np.random.rand(len(mg.node_y)) / 10000)
    mg.set_closed_boundaries_at_grid_edges(
        bottom_is_closed=True,
        left_is_closed=True,
        right_is_closed=True,
        top_is_closed=True,
    )
    mg.set_watershed_boundary_condition_outlet_id(
        0, mg["node"]["topographic__elevation"], -9999.0)
    soil[:] += 100.0  # initial soil depth of 100 m
    br[:] = z[:]
    z[:] += soil[:]

    # Instantiate DepressionFinderAndRouter
    df = DepressionFinderAndRouter(mg)

    # Create a D8 flow handler
    fa = FlowAccumulator(mg,
                         flow_director="D8",
                         depression_finder="DepressionFinderAndRouter")

    # Parameter values for detachment-limited test
    K_sed = 0.01
    U = 0.0001
    dt = 1.0
    F_f = 1.0  # all detached rock disappears; detachment-ltd end-member
    m_sp = 0.5
    n_sp = 1.0
    v_s = 0.5
    phi = 0.5

    # Instantiate the Space component...
    sp = Space(
        mg,
        K_sed=K_sed,
        K_br=0.01,
        F_f=F_f,
        phi=phi,
        H_star=1.0,
        v_s=v_s,
        m_sp=m_sp,
        n_sp=n_sp,
        sp_crit_sed=0,
        sp_crit_br=0,
    )

    # ... and run it to steady state (5000x1-year timesteps).
    for i in range(5000):
        fa.run_one_step()
        flooded = np.where(df.flood_status == 3)[0]
        sp.run_one_step(dt=dt, flooded_nodes=flooded)
        br[mg.core_nodes] += U * dt  # m
        soil[
            0] = 100.0  # enforce constant soil depth at boundary to keep lowering steady
        z[:] = br[:] + soil[:]

    # compare numerical and analytical slope solutions
    num_slope = mg.at_node["topographic__steepest_slope"][mg.core_nodes]
    analytical_slope = np.power(
        ((U * v_s * (1 - phi)) /
         (K_sed * np.power(mg.at_node["drainage_area"][mg.core_nodes], m_sp)))
        +
        ((U * (1 - phi)) /
         (K_sed * np.power(mg.at_node["drainage_area"][mg.core_nodes], m_sp))),
        1.0 / n_sp,
    )

    # test for match with analytical slope-area relationship
    testing.assert_array_almost_equal(
        num_slope,
        analytical_slope,
        decimal=8,
        err_msg="SPACE transport-limited slope-area test failed",
        verbose=True,
    )

    # compare numerical and analytical sediment flux solutions
    num_sedflux = mg.at_node["sediment__flux"][mg.core_nodes]
    analytical_sedflux = U * mg.at_node["drainage_area"][mg.core_nodes] * (1 -
                                                                           phi)

    # test for match with anakytical sediment flux
    testing.assert_array_almost_equal(
        num_sedflux,
        analytical_sedflux,
        decimal=8,
        err_msg="SPACE transport-limited sediment flux test failed",
        verbose=True,
    )
fa = FlowAccumulator(grid,
                     surface='topographic__elevation',
                     flow_director='D8')
lmb = LakeMapperBarnes(
    grid,
    method='D8',
    fill_flat=False,
    surface="topographic__elevation",
    fill_surface="topographic__elevation",
    redirect_flow_steepest_descent=False,
    reaccumulate_flow=False,
    track_lakes=False,
    ignore_overfill=True,
)
dfr = DepressionFinderAndRouter(grid)

ld = LinearDiffuser(grid, D)
sp = FastscapeEroder(grid, K_sp=Ksp, m_sp=0.5, n_sp=1.0, threshold_sp=E0)

for i in range(N):

    dfr._find_pits()
    if dfr._number_of_pits > 0:
        lmb.run_one_step()

    z[grid.core_nodes] += U * dt

    ld.run_one_step(dt)
    fa.run_one_step()
    sp.run_one_step(dt)
Exemplo n.º 25
0
def test_composite_pits():
    """
    A test to ensure the component correctly handles cases where there are
    multiple pits, inset into each other.
    """
    mg = RasterModelGrid((10, 10))
    z = mg.add_field("topographic__elevation", mg.node_x.copy(), at="node")
    # a sloping plane
    # np.random.seed(seed=0)
    # z += np.random.rand(100)/10000.
    # punch one big hole
    z.reshape((10, 10))[3:8, 3:8] = 0.0
    # dig a couple of inset holes
    z[57] = -1.0
    z[44] = -2.0
    z[54] = -10.0

    # make an outlet
    z[71] = 0.9

    fr = FlowAccumulator(mg, flow_director="D8")
    lf = DepressionFinderAndRouter(mg)
    fr.run_one_step()
    lf.map_depressions()

    flow_sinks_target = np.zeros(100, dtype=bool)
    flow_sinks_target[mg.boundary_nodes] = True
    # no internal sinks now:
    assert_array_equal(mg.at_node["flow__sink_flag"], flow_sinks_target)

    # test conservation of mass:
    assert mg.at_node["drainage_area"].reshape(
        (10, 10))[1:-1, 1].sum() == approx(8.0**2)
    # ^all the core nodes

    # test the actual flow field:
    #    nA = np.array([  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,
    #                     8.,   8.,   7.,   6.,   5.,   4.,   3.,   2.,   1.,   0.,
    #                     1.,   1.,   1.,   1.,   1.,   1.,   1.,   1.,   1.,   0.,
    #                     1.,   1.,   1.,   4.,   2.,   2.,   8.,   4.,   1.,   0.,
    #                     1.,   1.,   1.,   8.,   3.,  15.,   3.,   2.,   1.,   0.,
    #                     1.,   1.,   1.,  13.,  25.,   6.,   3.,   2.,   1.,   0.,
    #                     1.,   1.,   1.,  45.,   3.,   3.,   5.,   2.,   1.,   0.,
    #                    50.,  50.,  49.,   3.,   2.,   2.,   2.,   4.,   1.,   0.,
    #                     1.,   1.,   1.,   1.,   1.,   1.,   1.,   1.,   1.,   0.,
    #                     0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.])
    nA = np.array([
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        8.0,
        8.0,
        7.0,
        6.0,
        5.0,
        4.0,
        3.0,
        2.0,
        1.0,
        0.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        0.0,
        1.0,
        1.0,
        1.0,
        4.0,
        2.0,
        2.0,
        6.0,
        4.0,
        1.0,
        0.0,
        1.0,
        1.0,
        1.0,
        6.0,
        3.0,
        12.0,
        3.0,
        2.0,
        1.0,
        0.0,
        1.0,
        1.0,
        1.0,
        8.0,
        20.0,
        4.0,
        3.0,
        2.0,
        1.0,
        0.0,
        1.0,
        1.0,
        1.0,
        35.0,
        5.0,
        4.0,
        3.0,
        2.0,
        1.0,
        0.0,
        50.0,
        50.0,
        49.0,
        13.0,
        10.0,
        8.0,
        6.0,
        4.0,
        1.0,
        0.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
    ])
    assert_array_equal(mg.at_node["drainage_area"], nA)

    # the lake code map:
    lc = np.array([
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        57,
        57,
        57,
        57,
        57,
        XX,
        XX,
        XX,
        XX,
        XX,
        57,
        57,
        57,
        57,
        57,
        XX,
        XX,
        XX,
        XX,
        XX,
        57,
        57,
        57,
        57,
        57,
        XX,
        XX,
        XX,
        XX,
        XX,
        57,
        57,
        57,
        57,
        57,
        XX,
        XX,
        XX,
        XX,
        XX,
        57,
        57,
        57,
        57,
        57,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
    ])

    # test the remaining properties:
    assert lf.lake_outlets.size == 1
    assert lf.lake_outlets[0] == 72
    outlets_in_map = np.unique(lf.depression_outlet_map)
    assert outlets_in_map.size == 2
    assert outlets_in_map[1] == 72
    assert lf.number_of_lakes == 1
    assert lf.lake_codes[0] == 57
    assert_array_equal(lf.lake_map, lc)
    assert lf.lake_areas[0] == approx(25.0)
    assert lf.lake_volumes[0] == approx(63.0)
Exemplo n.º 26
0
def test_three_pits():
    """
    A test to ensure the component correctly handles cases where there are
    multiple pits.
    """
    mg = RasterModelGrid((10, 10))
    z = mg.add_field("topographic__elevation", mg.node_x.copy(), at="node")
    # a sloping plane
    # np.random.seed(seed=0)
    # z += np.random.rand(100)/10000.
    # punch some holes
    z[33] = 1.0
    z[43] = 1.0
    z[37] = 4.0
    z[74:76] = 1.0
    fr = FlowAccumulator(mg, flow_director="D8")
    lf = DepressionFinderAndRouter(mg)
    fr.run_one_step()
    lf.map_depressions()

    flow_sinks_target = np.zeros(100, dtype=bool)
    flow_sinks_target[mg.boundary_nodes] = True
    # no internal sinks now:
    assert_array_equal(mg.at_node["flow__sink_flag"], flow_sinks_target)

    # test conservation of mass:
    assert mg.at_node["drainage_area"].reshape(
        (10, 10))[1:-1, 1].sum() == approx(8.0**2)
    # ^all the core nodes

    # test the actual flow field:
    nA = np.array([
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        8.0,
        8.0,
        7.0,
        6.0,
        5.0,
        4.0,
        3.0,
        2.0,
        1.0,
        0.0,
        2.0,
        2.0,
        1.0,
        1.0,
        2.0,
        1.0,
        1.0,
        1.0,
        1.0,
        0.0,
        26.0,
        26.0,
        25.0,
        15.0,
        11.0,
        10.0,
        9.0,
        8.0,
        1.0,
        0.0,
        2.0,
        2.0,
        1.0,
        9.0,
        2.0,
        1.0,
        1.0,
        1.0,
        1.0,
        0.0,
        2.0,
        2.0,
        1.0,
        1.0,
        5.0,
        4.0,
        3.0,
        2.0,
        1.0,
        0.0,
        2.0,
        2.0,
        1.0,
        1.0,
        1.0,
        1.0,
        3.0,
        2.0,
        1.0,
        0.0,
        20.0,
        20.0,
        19.0,
        18.0,
        17.0,
        12.0,
        3.0,
        2.0,
        1.0,
        0.0,
        2.0,
        2.0,
        1.0,
        1.0,
        1.0,
        1.0,
        3.0,
        2.0,
        1.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
    ])
    assert_array_equal(mg.at_node["drainage_area"], nA)

    # test a couple more properties:
    lc = np.empty(100, dtype=int)
    lc.fill(XX)
    lc[33] = 33
    lc[43] = 33
    lc[37] = 37
    lc[74:76] = 74
    assert_array_equal(lf.lake_map, lc)
    assert_array_equal(lf.lake_codes, [33, 37, 74])
    assert lf.number_of_lakes == 3
    assert lf.lake_areas == approx([2.0, 1.0, 2.0])
    assert lf.lake_volumes == approx([2.0, 2.0, 4.0])
Exemplo n.º 27
0
def test_degenerate_drainage():
    """
    This "hourglass" configuration should be one of the hardest to correctly
    re-route.
    """
    mg = RasterModelGrid((9, 5))
    z_init = mg.node_x.copy() * 0.0001 + 1.0
    lake_pits = np.array([7, 11, 12, 13, 17, 27, 31, 32, 33, 37])
    z_init[lake_pits] = -1.0
    z_init[22] = 0.0  # the common spill pt for both lakes
    z_init[21] = 0.1  # an adverse bump in the spillway
    z_init[20] = -0.2  # the spillway
    mg.add_field("topographic__elevation", z_init, at="node")

    fr = FlowAccumulator(mg, flow_director="D8")
    lf = DepressionFinderAndRouter(mg)
    fr.run_one_step()
    lf.map_depressions()

    #    correct_A = np.array([ 0.,   0.,   0.,   0.,   0.,
    #                           0.,   1.,   3.,   1.,   0.,
    #                           0.,   5.,   1.,   2.,   0.,
    #                           0.,   1.,  10.,   1.,   0.,
    #                          21.,  21.,   1.,   1.,   0.,
    #                           0.,   1.,   9.,   1.,   0.,
    #                           0.,   3.,   1.,   2.,   0.,
    #                           0.,   1.,   1.,   1.,   0.,
    #                           0.,   0.,   0.,   0.,   0.])

    correct_A = np.array([
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        1.0,
        3.0,
        1.0,
        0.0,
        0.0,
        2.0,
        4.0,
        2.0,
        0.0,
        0.0,
        1.0,
        10.0,
        1.0,
        0.0,
        21.0,
        21.0,
        1.0,
        1.0,
        0.0,
        0.0,
        1.0,
        9.0,
        1.0,
        0.0,
        0.0,
        2.0,
        2.0,
        2.0,
        0.0,
        0.0,
        1.0,
        1.0,
        1.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
        0.0,
    ])

    assert mg.at_node["drainage_area"] == approx(correct_A)
Exemplo n.º 28
0
def test_edge_draining():
    """
    This tests when the lake attempts to drain from an edge, where an issue
    is suspected.
    """
    # Create a 7x7 test grid with a well defined hole in it, AT THE EDGE.
    mg = RasterModelGrid((7, 7))

    z = mg.node_x.copy()
    guard_sides = np.concatenate((np.arange(7, 14), np.arange(35, 42)))
    edges = np.concatenate((np.arange(7), np.arange(42, 49)))
    hole_here = np.array(([15, 16, 22, 23, 29, 30]))
    z[guard_sides] = z[13]
    z[edges] = -2.0  # force flow outwards from the tops of the guards
    z[hole_here] = -1.0

    A_new = np.array([[[
        0.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        0.0,
        0.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        0.0,
        15.0,
        5.0,
        4.0,
        3.0,
        2.0,
        1.0,
        0.0,
        0.0,
        10.0,
        4.0,
        3.0,
        2.0,
        1.0,
        0.0,
        0.0,
        1.0,
        4.0,
        3.0,
        2.0,
        1.0,
        0.0,
        0.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        0.0,
        0.0,
        1.0,
        1.0,
        1.0,
        1.0,
        1.0,
        0.0,
    ]]]).flatten()

    depr_outlet_target = np.array([
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        14,
        14,
        XX,
        XX,
        XX,
        XX,
        XX,
        14,
        14,
        XX,
        XX,
        XX,
        XX,
        XX,
        14,
        14,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
        XX,
    ]).flatten()

    mg.add_field("topographic__elevation", z, at="node", units="-")

    fr = FlowAccumulator(mg, flow_director="D8")
    lf = DepressionFinderAndRouter(mg)

    fr.run_one_step()
    lf.map_depressions()
    assert mg.at_node["drainage_area"] == approx(A_new)
    assert lf.depression_outlet_map == approx(depr_outlet_target)
def get_ordered_cells_for_soil_moisture(grid, outlet_id=None):
    """
    Runs Landlab's FlowRouter and DepressionFinderAndRouter to
    route flow. Also orders the cells in the descending order of
    channel length (upstream cell order).
    
    Parameters:
    ==========    
    grid: grid object
        RasterModelGrid
    outlet_id: int (Optional)
        Outlet id to be set

    Returns:
    =======
    ordered_cells: np.array(dtype=int)
        cells ordered in descending order of channel length
    grid: grid object
        updated RasterModelGrid
    """

    if outlet_id == None:
        outlet_id = np.argmin(grid.at_node['topographic__elevation'])    
    outlet = grid.set_watershed_boundary_condition_outlet_id(outlet_id,
        grid.at_node['topographic__elevation'], nodata_value=-9999.,)
    grid.set_closed_boundaries_at_grid_edges(True, True, True, True)
    flw_r = FlowRouter(grid)
    flw_r.run_one_step()
    df = DepressionFinderAndRouter(grid)
    df.map_depressions()
    r = grid.at_node['flow__receiver_node'][grid.node_at_core_cell]
    R = np.zeros(grid.number_of_nodes, dtype=int)
    R[grid.node_at_core_cell] = r
    channel_length = np.zeros(grid.number_of_nodes, dtype=int)
    # Compute channel lengths for each node in the wtrshd (node_at_core_cell)
    for node in grid.node_at_core_cell:
        node_c = node.copy()
        while R[node_c] != node_c:
            channel_length[node] += 1
            node_c = R[node_c]
    grid.at_node['channel_length'] = channel_length
    # Sorting nodes in the ascending order of channel length
    # NOTE: length of ordered_nodes = grid.number_of_core_cells
    ordered_nodes = grid.node_at_core_cell[
        np.argsort(channel_length[grid.node_at_core_cell])]
    # Sorting nodes in the descending order of channel length
    ordered_nodes = ordered_nodes[::-1]
    dd = 1    # switch 2 for while loop
    count_loops = 0 # No. of loops while runs
    while dd:
        dd = 0
        count_loops += 1
        sorted_order = list(ordered_nodes)
        alr_counted_ = []
        for node_ in sorted_order:
            donors = []
            donors = list(grid.node_at_core_cell[np.where(r==node_)[0]])
            if len(donors) != 0:
                for k in range(0, len(donors)):
                    if donors[k] not in alr_counted_:
                        sorted_order.insert(donors[k], sorted_order.pop(sorted_order.index(node_)))
                        dd = 1    
            alr_counted_.append(node_)
        ordered_nodes = np.array(sorted_order)
    ordered_cells = grid.cell_at_node[ordered_nodes]
    return ordered_cells, grid
Exemplo n.º 30
0
def test_matches_bedrock_alluvial_solution():
    """
    Test that model matches the bedrock-alluvial analytical solution
    for slope/area relationship at steady state:
    S=((U * v_s * (1 - F_f)) / (K_sed * A^m) + U / (K_br * A^m))^(1/n).

    Also test that the soil depth everywhere matches the bedrock-alluvial
    analytical solution at steady state:
    H = -H_star * ln(1 - (v_s / (K_sed / (K_br * (1 - F_f)) + v_s))).
    """

    #set up a 5x5 grid with one open outlet node and low initial elevations.
    nr = 5
    nc = 5
    mg = RasterModelGrid((nr, nc), 10.0)

    z = mg.add_zeros('node', 'topographic__elevation')
    br = mg.add_zeros('node', 'bedrock__elevation')
    soil = mg.add_zeros('node', 'soil__depth')

    mg['node']['topographic__elevation'] += (
        mg.node_y / 100000 + mg.node_x / 100000 +
        np.random.rand(len(mg.node_y)) / 10000)
    mg.set_closed_boundaries_at_grid_edges(bottom_is_closed=True,
                                           left_is_closed=True,
                                           right_is_closed=True,
                                           top_is_closed=True)
    mg.set_watershed_boundary_condition_outlet_id(
        0, mg['node']['topographic__elevation'], -9999.)
    soil[:] += 0.  #initial condition of no soil depth.
    br[:] = z[:]
    z[:] += soil[:]

    #Instantiate DepressionFinderAndRouter
    df = DepressionFinderAndRouter(mg)

    # Create a D8 flow handler
    fa = FlowAccumulator(mg,
                         flow_director='D8',
                         depression_finder='DepressionFinderAndRouter')

    # Parameter values for detachment-limited test
    K_br = 0.02
    K_sed = 0.02
    U = 0.0001
    dt = 1.0
    F_f = 0.2  #all detached rock disappears; detachment-ltd end-member
    m_sp = 0.5
    n_sp = 1.0
    v_s = 0.25
    H_star = 0.1

    # Instantiate the Space component...
    sp = Space(mg,
               K_sed=K_sed,
               K_br=K_br,
               F_f=F_f,
               phi=0.0,
               H_star=H_star,
               v_s=v_s,
               m_sp=m_sp,
               n_sp=n_sp,
               sp_crit_sed=0,
               sp_crit_br=0)

    # ... and run it to steady state (10000x1-year timesteps).
    for i in range(10000):
        fa.run_one_step()
        flooded = np.where(df.flood_status == 3)[0]
        sp.run_one_step(dt=dt, flooded_nodes=flooded)
        br[mg.core_nodes] += U * dt  #m
        soil[0] = 0.  #enforce 0 soil depth at boundary to keep lowering steady
        z[:] = br[:] + soil[:]

    #compare numerical and analytical slope solutions
    num_slope = mg.at_node['topographic__steepest_slope'][mg.core_nodes]
    analytical_slope = (np.power(
        ((U * v_s * (1 - F_f)) /
         (K_sed * np.power(mg.at_node['drainage_area'][mg.core_nodes], m_sp)))
        +
        (U /
         (K_br * np.power(mg.at_node['drainage_area'][mg.core_nodes], m_sp))),
        1. / n_sp))

    #test for match with analytical slope-area relationship
    testing.assert_array_almost_equal(
        num_slope,
        analytical_slope,
        decimal=8,
        err_msg='SPACE bedrock-alluvial slope-area test failed',
        verbose=True)

    #compare numerical and analytical soil depth solutions
    num_h = mg.at_node['soil__depth'][mg.core_nodes]
    analytical_h = -H_star * np.log(1 - (v_s / (K_sed / (K_br *
                                                         (1 - F_f)) + v_s)))

    #test for match with analytical sediment depth
    testing.assert_array_almost_equal(
        num_h,
        analytical_h,
        decimal=5,
        err_msg='SPACE bedrock-alluvial soil thickness test failed',
        verbose=True)