示例#1
0
def test_error_for_to_many_with_depression():
    """Check that an error is thrown when to_many methods started DF."""

    mg0 = RasterModelGrid((10, 10), spacing=(1, 1))
    z0 = mg0.add_field("topographic__elevation",
                       mg0.node_x**2 + mg0.node_y**2,
                       at="node")

    mg1 = RasterModelGrid((10, 10), spacing=(1, 1))
    z1 = mg1.add_field("topographic__elevation",
                       mg1.node_x**2 + mg1.node_y**2,
                       at="node")

    with pytest.raises(NotImplementedError):
        FlowAccumulator(mg0,
                        flow_director="MFD",
                        depression_finder="DepressionFinderAndRouter")
    with pytest.raises(NotImplementedError):
        FlowAccumulator(mg0,
                        flow_director="DINF",
                        depression_finder="DepressionFinderAndRouter")

    fa0 = FlowAccumulator(mg0, flow_director="MFD")
    fa0.run_one_step()
    with pytest.raises(NotImplementedError):
        DepressionFinderAndRouter(mg0)

    fa1 = FlowAccumulator(mg1, flow_director="DINF")
    fa1.run_one_step()
    with pytest.raises(NotImplementedError):
        DepressionFinderAndRouter(mg1)
def test_error_for_to_many_with_depression():
    """Check that an error is thrown when to_many methods started DF."""

    mg0 = RasterModelGrid((10, 10), spacing=(1, 1))
    z0 = mg0.add_field('topographic__elevation',
                       mg0.node_x**2 + mg0.node_y**2,
                       at='node')

    mg1 = RasterModelGrid((10, 10), spacing=(1, 1))
    z1 = mg1.add_field('topographic__elevation',
                       mg1.node_x**2 + mg1.node_y**2,
                       at='node')

    with pytest.raises(ValueError):
        FlowAccumulator(mg0,
                        flow_director='MFD',
                        depression_finder='DepressionFinderAndRouter')
    with pytest.raises(ValueError):
        FlowAccumulator(mg0,
                        flow_director='DINF',
                        depression_finder='DepressionFinderAndRouter')

    fa0 = FlowAccumulator(mg0, flow_director='MFD')
    fa0.run_one_step()
    with pytest.raises(ValueError):
        DepressionFinderAndRouter(mg0)

    fa1 = FlowAccumulator(mg1, flow_director='DINF')
    fa1.run_one_step()
    with pytest.raises(ValueError):
        DepressionFinderAndRouter(mg1)
示例#3
0
def test_degenerate_drainage():
    """
    This "hourglass" configuration should be one of the hardest to correctly
    re-route.
    """
    mg = RasterModelGrid(9,5)
    z_init = mg.node_x.copy()*0.0001 + 1.
    lake_pits = np.array([7, 11, 12, 13, 17, 27, 31, 32, 33, 37])
    z_init[lake_pits] = -1.
    z_init[22] = 0.  # the common spill pt for both lakes
    z_init[21] = 0.1  # an adverse bump in the spillway
    z_init[20] = -0.2  # the spillway
    z = mg.add_field('node', 'topographic__elevation', z_init)

    fr = FlowRouter(mg)
    lf = DepressionFinderAndRouter(mg)
    fr.route_flow()
    lf.map_depressions()

    correct_A = np.array([ 0.,   0.,   0.,   0.,   0.,
                           0.,   1.,   1.,   1.,   0.,
                           0.,   4.,   1.,   3.,   0.,
                           0.,   1.,  10.,   1.,   0.,
                          21.,  21.,   1.,   1.,   0.,
                           0.,   1.,   9.,   1.,   0.,
                           0.,   4.,   1.,   3.,   0.,
                           0.,   1.,   1.,   1.,   0.,
                           0.,   0.,   0.,   0.,   0.])
    
    thelake = np.concatenate((lake_pits, [22])).sort()

    assert_array_almost_equal(mg.at_node['drainage_area'], correct_A)
示例#4
0
def test_three_pits():
    """
    A test to ensure the component correctly handles cases where there are
    multiple pits.
    """
    mg = RasterModelGrid(10,10,1.)
    z = mg.add_field('node', 'topographic__elevation', mg.node_x.copy())
    # a sloping plane
    #np.random.seed(seed=0)
    #z += np.random.rand(100)/10000.
    # punch some holes
    z[33] = 1.
    z[43] = 1.
    z[37] = 4.
    z[74:76] = 1.
    fr = FlowRouter(mg)
    lf = DepressionFinderAndRouter(mg)
    fr.route_flow()
    lf.map_depressions()
    
    flow_sinks_target = np.zeros(100, dtype=bool)
    flow_sinks_target[mg.boundary_nodes] = True
    # no internal sinks now:
    assert_array_equal(mg.at_node['flow_sinks'], flow_sinks_target)
    
    # test conservation of mass:
    assert_almost_equal(mg.at_node['drainage_area'
                                       ].reshape((10,10))[1:-1,1].sum(), 8.**2)
    # ^all the core nodes
    
    # test the actual flow field:
    nA = np.array([  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,
                     8.,   8.,   7.,   6.,   5.,   4.,   3.,   2.,   1.,   0.,
                     2.,   2.,   1.,   1.,   2.,   1.,   1.,   1.,   1.,   0.,
                    26.,  26.,  25.,  15.,  11.,  10.,   9.,   8.,   1.,   0.,
                     2.,   2.,   1.,   9.,   2.,   1.,   1.,   1.,   1.,   0.,
                     2.,   2.,   1.,   1.,   5.,   4.,   3.,   2.,   1.,   0.,
                     2.,   2.,   1.,   1.,   1.,   1.,   3.,   2.,   1.,   0.,
                    20.,  20.,  19.,  18.,  17.,  12.,   3.,   2.,   1.,   0.,
                     2.,   2.,   1.,   1.,   1.,   1.,   3.,   2.,   1.,   0.,
                     0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.])
    assert_array_equal(mg.at_node['drainage_area'], nA)
    
    #test a couple more properties:
    lc = np.empty(100, dtype=int)
    lc.fill(XX)
    lc[33] = 33
    lc[43] = 33
    lc[37] = 37
    lc[74:76] = 74
    assert_array_equal(lf.lake_map, lc)
    assert_array_equal(lf.lake_codes, [33, 37, 74])
    assert_equal(lf.number_of_lakes, 3)
    assert_array_almost_equal(lf.lake_areas, [2., 1., 2.])
    assert_array_almost_equal(lf.lake_volumes, [2., 2., 4.])
示例#5
0
def test_depression_finder_as_instance():
    mg = RasterModelGrid((5, 5), spacing=(1, 1))
    z = mg.add_field("topographic__elevation",
                     mg.node_x + mg.node_y,
                     at="node")
    df = DepressionFinderAndRouter(mg)
    fa = FlowAccumulator(mg, flow_director="D8", depression_finder=df)
def test_instantiated_depression_finder_with_kwargs():
    mg = RasterModelGrid((5, 5), xy_spacing=(1, 1))
    mg.add_field("topographic__elevation", mg.node_x + mg.node_y, at="node")
    df = DepressionFinderAndRouter(mg)
    with pytest.raises(ValueError):
        LossyFlowAccumulator(
            mg, flow_director="D8", depression_finder=df, routing="eggs"
        )
示例#7
0
def test_lake_mapper():
    """
    Create a test grid and run a series of tests.
    """
    # Make a test grid
    rmg = create_test_grid()

    # Instantiate a lake mapper
    # (Note that we don't need to send it an input file name, because our grid
    # already has a topographic__elevation field)
    lm = DepressionFinderAndRouter(rmg)
    
    # Run it on our test grid
    lm.map_depressions()
    
    # Run tests
    check_fields(rmg)
    check_array_values(rmg, lm)
示例#8
0
    def initialize(self, input_stream=None):
        """
        The BMI-style initialize method takes an optional input_stream
        parameter, which may be either a ModelParameterDictionary object or
        an input stream from which a ModelParameterDictionary can read values.
        """
        # Create a ModelParameterDictionary for the inputs
        if input_stream is None:
            inputs = None
        elif type(input_stream) == ModelParameterDictionary:
            inputs = input_stream
        else:
            inputs = ModelParameterDictionary(input_stream)

        # Make sure the grid includes elevation data. This means either:
        #  1. The grid has a node field called 'topographic__elevation', or
        #  2. The input file has an item called 'ELEVATION_FIELD_NAME' *and*
        #     a field by this name exists in the grid.
        try:
            self._elev = self._grid.at_node['topographic__elevation']
        except FieldError:
            try:
                self.topo_field_name = inputs.read_string('ELEVATION_' +
                                                          'FIELD_NAME')
            except AttributeError:
                print('Error: Because your grid does not have a node field')
                print('called "topographic__elevation", you need to pass the')
                print('name of a text input file or ModelParameterDictionary,')
                print('and this file or dictionary needs to include the name')
                print('of another field in your grid that contains your')
                print('elevation data.')
                raise AttributeError
            except MissingKeyError:
                print('Error: Because your grid does not have a node field')
                print('called "topographic__elevation", your input file (or')
                print('ModelParameterDictionary) must include an entry with')
                print('the key "ELEVATION_FIELD_NAME", which gives the name')
                print('of a field in your grid that contains your elevation')
                print('data.')
                raise MissingKeyError('ELEVATION_FIELD_NAME')
            try:
                self._elev = self._grid.at_node[self.topo_field_name]
            except AttributeError:
                print('Your grid does not seem to have a node field called',
                      self.topo_field_name)
        else:
            self.topo_field_name = 'topographic__elevation'
        # create the only new output field:
        self.sed_fill_depth = self._grid.add_zeros('node',
                                                   'sediment_fill__depth')

        self._lf = DepressionFinderAndRouter(self._grid, routing=self._routing)
        self._fr = FlowRouter(self._grid)
def test_specifying_routing_method_wrong():
    """Test specifying incorrect method for routing compatability with DepressionFinderAndRouter."""
    mg = RasterModelGrid((10, 10), xy_spacing=(1, 1))
    mg.add_field("topographic__elevation", mg.node_x + mg.node_y, at="node")

    with pytest.raises(ValueError):
        LossyFlowAccumulator(
            mg, flow_director="D4", depression_finder="DepressionFinderAndRouter"
        )

    df = DepressionFinderAndRouter(mg)
    with pytest.raises(ValueError):
        LossyFlowAccumulator(mg, flow_director="D4", depression_finder=df)
def test_specifying_routing_method_wrong():
    """Test specifying incorrect method for routing compatability with DepressionFinderAndRouter."""
    mg = RasterModelGrid((10, 10), spacing=(1, 1))
    z = mg.add_field('topographic__elevation',
                     mg.node_x + mg.node_y,
                     at='node')

    with pytest.raises(ValueError):
        FlowAccumulator(mg,
                        flow_director='D4',
                        depression_finder='DepressionFinderAndRouter')

    df = DepressionFinderAndRouter(mg)
    with pytest.raises(ValueError):
        FlowAccumulator(mg, flow_director='D4', depression_finder=df)
示例#11
0
class SinkFiller(Component):
    """
    This component identifies depressions in a topographic surface, then fills
    them in in the topography.  No attempt is made to conserve sediment mass.
    User may specify whether the holes should be filled to flat, or with a
    gradient downwards towards the depression outlet. The gradient can be
    spatially variable, and is chosen to not reverse any drainage directions
    at the perimeter of each lake.
    """
    _name = 'HoleFiller'

    _input_var_names = set(['topographic__elevation',
                            ])

    _output_var_names = set(['topographic__elevation',
                             'sediment_fill__depth',
                             ])

    _var_units = {'topographic__elevation': 'm',
                  'sediment_fill__depth': 'm',
                  }

    _var_mapping = {'topographic__elevation': 'node',
                    'sediment_fill__depth': 'node',
                    }

    _var_doc = {'topographic__elevation': 'Surface topographic elevation',
                 'sediment_fill__depth': 'Depth of sediment added at each' +
                                         'node',
                 }

    def __init__(self, grid, input_stream=None, current_time=0.,
                 routing='D8'):
        """
        Constructor assigns a copy of the grid, and calls the initialize
        method.

        Parameters
        ----------
        grid : RasterModelGrid
            A landlab RasterModelGrid.
        input_stream : str, file_like, or ModelParameterDictionary, optional
            ModelParameterDictionary that holds the input parameters.
        current_time : float, optional
            The current time for the mapper.
        routing : 'D8' or 'D4' (optional)
            If grid is a raster type, controls whether fill connectivity can
            occur on diagonals ('D8', default), or only orthogonally ('D4').
            Has no effect if grid is not a raster.
        """
        self._grid = grid
        if routing is not 'D8':
            assert routing is 'D4'
        self._routing = routing
        if ((type(self._grid) is landlab.grid.raster.RasterModelGrid) and
                (routing is 'D8')):
            self._D8 = True
            self.num_nbrs = 8
        else:
            self._D8 = False  # useful shorthand for thia test we do a lot
            if type(self._grid) is landlab.grid.raster.RasterModelGrid:
                self.num_nbrs = 4
        self.initialize(input_stream)

    def initialize(self, input_stream=None):
        """
        The BMI-style initialize method takes an optional input_stream
        parameter, which may be either a ModelParameterDictionary object or
        an input stream from which a ModelParameterDictionary can read values.
        """
        # Create a ModelParameterDictionary for the inputs
        if input_stream is None:
            inputs = None
        elif type(input_stream) == ModelParameterDictionary:
            inputs = input_stream
        else:
            inputs = ModelParameterDictionary(input_stream)

        # Make sure the grid includes elevation data. This means either:
        #  1. The grid has a node field called 'topographic__elevation', or
        #  2. The input file has an item called 'ELEVATION_FIELD_NAME' *and*
        #     a field by this name exists in the grid.
        try:
            self._elev = self._grid.at_node['topographic__elevation']
        except FieldError:
            try:
                self.topo_field_name = inputs.read_string('ELEVATION_' +
                                                          'FIELD_NAME')
            except AttributeError:
                print('Error: Because your grid does not have a node field')
                print('called "topographic__elevation", you need to pass the')
                print('name of a text input file or ModelParameterDictionary,')
                print('and this file or dictionary needs to include the name')
                print('of another field in your grid that contains your')
                print('elevation data.')
                raise AttributeError
            except MissingKeyError:
                print('Error: Because your grid does not have a node field')
                print('called "topographic__elevation", your input file (or')
                print('ModelParameterDictionary) must include an entry with')
                print('the key "ELEVATION_FIELD_NAME", which gives the name')
                print('of a field in your grid that contains your elevation')
                print('data.')
                raise MissingKeyError('ELEVATION_FIELD_NAME')
            try:
                self._elev = self._grid.at_node[self.topo_field_name]
            except AttributeError:
                print('Your grid does not seem to have a node field called',
                      self.topo_field_name)
        else:
            self.topo_field_name = 'topographic__elevation'
        # create the only new output field:
        self.sed_fill_depth = self._grid.add_zeros('node',
                                                   'sediment_fill__depth')

        self._lf = DepressionFinderAndRouter(self._grid, routing=self._routing)
        self._fr = FlowRouter(self._grid)

    def fill_pits(self, apply_slope=None):
        """
        This is the main method. Call it to fill depressions in a starting
        topography.

        Parameters
        ----------
        apply_slope : bool
            Whether to leave the filled surface flat (default), or apply a
            gradient downwards through all lake nodes towards the outlet.
            A test is performed to ensure applying this slope will not alter
            the drainage structure at the edge of the filled region (i.e.,
            that we are not accidentally reversing the flow direction far
            from the outlet.)

        Return fields
        -------------
        'topographic__elevation' : the updated elevations
        'sediment_fill__depth' : the depth of sediment added at each node
        """
        self.original_elev = self._elev.copy()
        # We need this, as we'll have to do ALL this again if we manage
        # to jack the elevs too high in one of the "subsidiary" lakes.
        # We're going to implement the lake_mapper component to do the heavy
        # lifting here, then delete its fields. This means we first need to
        # test if these fields already exist, in which case, we should *not*
        # delete them!
        existing_fields = {}
        spurious_fields = set()
        set_of_outputs = self._lf.output_var_names | self._fr.output_var_names
        try:
            set_of_outputs.remove(self.topo_field_name)
        except KeyError:
            pass
        for field in set_of_outputs:
            try:
                existing_fields[field] = self._grid.at_node[field].copy()
            except FieldError:  # not there; good!
                spurious_fields.add(field)

        self._fr.route_flow(method=self._routing)
        self._lf.map_depressions(pits=self._grid.at_node['flow_sinks'],
                                 reroute_flow=True)
        # add the depression depths to get up to flat:
        self._elev += self._grid.at_node['depression__depth']
        # if apply_slope is none, we're now done! But if not...
        if apply_slope:
            # new way of doing this - use the upstream structure! Should be
            # both more general and more efficient
            for (outlet_node, lake_code) in zip(self._lf.lake_outlets,
                                                self._lf.lake_codes):
                lake_nodes = np.where(self._lf.lake_map == lake_code)[0]
                lake_perim = self.get_lake_ext_margin(lake_nodes)
                perim_elevs = self._elev[lake_perim]
                out_elev = self._elev[outlet_node]
                lowest_elev_perim = perim_elevs[perim_elevs != out_elev].min()
                # note we exclude the outlet node
                elev_increment = ((lowest_elev_perim-self._elev[outlet_node]) /
                                  (lake_nodes.size + 2.))
                assert elev_increment > 0.
                all_ordering = self._grid.at_node['upstream_node_order']
                upstream_order_bool = np.in1d(all_ordering, lake_nodes,
                                              assume_unique=True)
                lake_upstream_order = all_ordering[upstream_order_bool]
                argsort_lake = np.argsort(lake_upstream_order)
                elevs_to_add = (np.arange(lake_nodes.size, dtype=float) +
                                1.) * elev_increment
                sorted_elevs_to_add = elevs_to_add[argsort_lake]
                self._elev[lake_nodes] += sorted_elevs_to_add
        # now put back any fields that were present initially, and wipe the
        # rest:
        for delete_me in spurious_fields:
            self._grid.delete_field('node', delete_me)
        for update_me in existing_fields.keys():
            self.grid.at_node[update_me] = existing_fields[update_me]
        # fill the output field
        self.sed_fill_depth[:] = self._elev - self.original_elev

    def fill_pits_old(self, apply_slope=None):
        """

        .. deprecated:: 0.1.38
            Use :func:`fill_pits` instead.

        This is the main method. Call it to fill depressions in a starting
        topography.

        Parameters
        ----------
        apply_slope : None, bool, or float
            If a float is provided this is the slope of the surface down
            towards the lake outlet. Supply a small positive number, e.g.,
            1.e-5 (or True, to use this default value).
            A test is performed to ensure applying this slope will not alter
            the drainage structure at the edge of the filled region (i.e.,
            that we are not accidentally reversing the flow direction far
            from the outlet.) The component will automatically decrease the
            (supplied or default) gradient a number of times to try to
            accommodate this, but will eventually raise an OverflowError
            if it can't deal with it. If you pass True, the method will use
            the default value of 1.e-5.

        Return fields
        -------------
        'topographic__elevation' : the updated elevations
        'sediment_fill__depth' : the depth of sediment added at each node
        """
        self.original_elev = self._elev.copy()
        # We need this, as we'll have to do ALL this again if we manage
        # to jack the elevs too high in one of the "subsidiary" lakes.
        # We're going to implement the lake_mapper component to do the heavy
        # lifting here, then delete its fields. This means we first need to
        # test if these fields already exist, in which case, we should *not*
        # delete them!
        existing_fields = {}
        spurious_fields = set()
        set_of_outputs = self._lf.output_var_names | self._fr.output_var_names
        try:
            set_of_outputs.remove(self.topo_field_name)
        except KeyError:
            pass
        for field in set_of_outputs:
            try:
                existing_fields[field] = self._grid.at_node[field].copy()
            except FieldError:  # not there; good!
                spurious_fields.add(field)

        self._fr.route_flow(method=self._routing)
        self._lf.map_depressions(pits=self._grid.at_node['flow_sinks'],
                                 reroute_flow=False)
        # add the depression depths to get up to flat:
        self._elev += self._grid.at_node['depression__depth']
        # if apply_slope is none, we're now done! But if not...
        if apply_slope is True:
            apply_slope = DEFAULT_SLOPE
        elif type(apply_slope) in (float, int):
            assert apply_slope >= 0.
        if apply_slope:
            # this isn't very efficient, but OK as we're only running this
            # code ONCE in almost all use cases
            sublake = False
            unstable = True
            stability_increment = 0
            self.lake_nodes_treated = np.array([], dtype=int)
            while unstable:
                while 1:
                    for (outlet_node, lake_code) in zip(self._lf.lake_outlets,
                                                        self._lf.lake_codes):
                        self.apply_slope_current_lake(apply_slope, outlet_node,
                                                      lake_code, sublake)
                    # Call the mapper again here. Bail out if no core pits are
                    # found.
                    # This is necessary as there are some configs where adding
                    # the slope could create subsidiary pits in the topo
                    self._lf.map_depressions(pits=None, reroute_flow=False)
                    if len(self._lf.lake_outlets) == 0.:
                        break
                    self._elev += self._grid.at_node['depression__depth']
                    sublake = True
                    self.lake_nodes_treated = np.array([], dtype=int)
                # final test that all lakes are not reversing flow dirs
                all_lakes = np.where(self._lf.flood_status <
                                     BAD_INDEX_VALUE)[0]
                unstable = self.drainage_directions_change(all_lakes,
                                                           self.original_elev,
                                                           self._elev)
                if unstable:
                    apply_slope *= 0.1
                    sublake = False
                    self.lake_nodes_treated = np.array([], dtype=int)
                    self._elev[:] = original_elev  # put back init conds
                    stability_increment += 1
                    if stability_increment == 10:
                        raise OverflowError('Filler could not find a stable ' +
                                            'condition with a sloping ' +
                                            'surface!')
        # now put back any fields that were present initially, and wipe the
        # rest:
        for delete_me in spurious_fields:
            self._grid.delete_field('node', delete_me)
        for update_me in existing_fields.keys():
            self.grid.at_node[update_me] = existing_fields[update_me]
        # fill the output field
        self.sed_fill_depth[:] = self._elev - self.original_elev

    def add_slopes(self, slope, outlet_node, lake_code):
        """
        Assuming you have already run the lake_mapper, adds an incline towards
        the outlet to the nodes in the lake.
        """
        new_elevs = self._elev.copy()
        outlet_coord = (self._grid.node_x[outlet_node],
                        self._grid.node_y[outlet_node])
        lake_nodes = np.where(self._lf.lake_map == lake_code)[0]
        lake_nodes = np.setdiff1d(lake_nodes, self.lake_nodes_treated)
        lake_ext_margin = self.get_lake_ext_margin(lake_nodes)
        d = self._grid.get_distances_of_nodes_to_point(outlet_coord,
                                                       node_subset=lake_nodes)
        add_vals = slope*d
        new_elevs[lake_nodes] += add_vals
        self.lake_nodes_treated = np.union1d(self.lake_nodes_treated,
                                             lake_nodes)
        return new_elevs, lake_nodes

    def get_lake_ext_margin(self, lake_nodes):
        """
        Returns the nodes forming the external margin of the lake, honoring
        the *routing* method (D4/D8) if applicable.
        """
        if self._D8 is True:
            all_poss = np.union1d(self._grid.get_active_neighbors_at_node(lake_nodes),
                                  self._grid.get_diagonal_list(lake_nodes))
        else:
            all_poss = np.unique(self._grid.get_active_neighbors_at_node(lake_nodes))
        lake_ext_edge = np.setdiff1d(all_poss, lake_nodes)
        return lake_ext_edge[lake_ext_edge != BAD_INDEX_VALUE]

    def get_lake_int_margin(self, lake_nodes, lake_ext_edge):
        """
        Returns the nodes forming the internal margin of the lake, honoring
        the *routing* method (D4/D8) if applicable.
        """
        lee = lake_ext_edge
        if self._D8 is True:
            all_poss_int = np.union1d(self._grid.get_active_neighbors_at_node(lee),
                                      self._grid.get_diagonal_list(lee))
        else:
            all_poss_int = np.unique(self._grid.get_active_neighbors_at_node(lee))
        lake_int_edge = np.intersect1d(all_poss_int, lake_nodes)
        return lake_int_edge[lake_int_edge != BAD_INDEX_VALUE]

    def apply_slope_current_lake(self, apply_slope, outlet_node, lake_code,
                                 sublake):
        """
        Wraps the add_slopes method to allow handling of conditions where the
        drainage structure would be changed or we're dealing with a sublake.
        """
        while 1:
            starting_elevs = self._elev.copy()
            self._elev[:], lake_nodes = self.add_slopes(apply_slope,
                                                        outlet_node, lake_code)
            ext_edge = self.get_lake_ext_margin(lake_nodes)
            if sublake:
                break
            else:
                if not self.drainage_directions_change(lake_nodes,
                                                       starting_elevs,
                                                       self._elev):
                    break
                else:
                    # put the elevs back...
                    self._elev[lake_nodes] = starting_elevs[lake_nodes]
                    # the slope was too big. Reduce it.
                    apply_slope *= 0.1
        # if we get here, either sublake, or drainage dirs are stable

    def drainage_directions_change(self, lake_nodes, old_elevs, new_elevs):
        """
        True if the drainage structure at lake margin changes, False otherwise.
        """
        ext_edge = self.get_lake_ext_margin(lake_nodes)
        if self._D8:
            edge_neighbors = np.hstack((self._grid.get_active_neighbors_at_node(ext_edge),
                                        self._grid.get_diagonal_list(
                                            ext_edge)))
        else:
            edge_neighbors = self._grid.get_active_neighbors_at_node(ext_edge).copy()
        edge_neighbors[edge_neighbors == BAD_INDEX_VALUE] = -1
        # ^value irrelevant
        old_neighbor_elevs = old_elevs[edge_neighbors]
        new_neighbor_elevs = new_elevs[edge_neighbors]
        # enforce the "don't change drainage direction" condition:
        edge_elevs = old_elevs[ext_edge].reshape((ext_edge.size, 1))
        cond = np.allclose((edge_elevs >= old_neighbor_elevs),
                           (edge_elevs >= new_neighbor_elevs))
        # if True, we're good, the tilting didn't mess with the fr
        return not cond
#create the fields in the grid
mg.create_node_array_zeros('topographic__elevation')
z = mg.create_node_array_zeros() + init_elev
#z += mg.node_x*0.001
mg['node'][ 'topographic__elevation'] = z + numpy.random.rand(len(z))/1000.

#make some K values in a field to test
mg.at_node['K_values'] = 0.1+numpy.random.rand(nrows*ncols)/10.

print( 'Running ...' )
time_on = time.time()

#instantiate the components:
fr = FlowRouter(mg)
sp = StreamPowerEroder(mg, './drive_sp_params.txt')
lf = DepressionFinderAndRouter(mg)
#load the Fastscape module too, to allow direct comparison
fsp = Fsc(mg, './drive_sp_params.txt')

#perform the loop:
elapsed_time = 0. #total time in simulation
while elapsed_time < time_to_run:
#for i in xrange(10):
    print(elapsed_time)
    if elapsed_time+dt>time_to_run:
        print("Short step!")
        dt = time_to_run - elapsed_time
    mg = fr.route_flow(method='D8')
    lf.map_depressions()
    #print 'Area: ', numpy.max(mg.at_node['drainage_area'])
    #mg = fsp.erode(mg)
示例#13
0
def test_composite_pits():
    """
    A test to ensure the component correctly handles cases where there are
    multiple pits, inset into each other.
    """
    mg = RasterModelGrid(10, 10, 1.)
    z = mg.add_field('node', 'topographic__elevation', mg.node_x.copy())
    # a sloping plane
    #np.random.seed(seed=0)
    #z += np.random.rand(100)/10000.
    # punch one big hole
    z.reshape((10,10))[3:8,3:8] = 0.
    # dig a couple of inset holes
    z[57] = -1.
    z[44] = -2.
    z[54] = -10.
    fr = FlowRouter(mg)
    lf = DepressionFinderAndRouter(mg)
    fr.route_flow()
    lf.map_depressions()
    
    flow_sinks_target = np.zeros(100, dtype=bool)
    flow_sinks_target[mg.boundary_nodes] = True
    # no internal sinks now:
    assert_array_equal(mg.at_node['flow_sinks'], flow_sinks_target)
    
    # test conservation of mass:
    assert_almost_equal(mg.at_node['drainage_area'
                                       ].reshape((10,10))[1:-1,1].sum(), 8.**2)
    # ^all the core nodes
    
    # test the actual flow field:
    nA = np.array([  0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,
                     8.,   8.,   7.,   6.,   5.,   4.,   3.,   2.,   1.,   0.,
                     1.,   1.,   1.,   1.,   1.,   1.,   1.,   1.,   1.,   0.,
                     1.,   1.,   1.,   4.,   2.,   2.,   8.,   4.,   1.,   0.,
                     1.,   1.,   1.,   8.,   3.,  15.,   3.,   2.,   1.,   0.,
                     1.,   1.,   1.,  13.,  25.,   6.,   3.,   2.,   1.,   0.,
                     1.,   1.,   1.,  45.,   3.,   3.,   5.,   2.,   1.,   0.,
                    50.,  50.,  49.,   3.,   2.,   2.,   2.,   4.,   1.,   0.,
                     1.,   1.,   1.,   1.,   1.,   1.,   1.,   1.,   1.,   0.,
                     0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.,   0.])
    assert_array_equal(mg.at_node['drainage_area'], nA)
    
    # the lake code map:
    lc = np.array([XX, XX, XX, XX, XX, XX, XX, XX, XX, XX,
                   XX, XX, XX, XX, XX, XX, XX, XX, XX, XX,
                   XX, XX, XX, XX, XX, XX, XX, XX, XX, XX,
                   XX, XX, XX, 57, 57, 57, 57, 57, XX, XX,
                   XX, XX, XX, 57, 57, 57, 57, 57, XX, XX,
                   XX, XX, XX, 57, 57, 57, 57, 57, XX, XX,
                   XX, XX, XX, 57, 57, 57, 57, 57, XX, XX,
                   XX, XX, XX, 57, 57, 57, 57, 57, XX, XX,
                   XX, XX, XX, XX, XX, XX, XX, XX, XX, XX,
                   XX, XX, XX, XX, XX, XX, XX, XX, XX, XX])
    
    #test the remaining properties:
    assert_equal(lf.lake_outlets.size, 1)
    assert_equal(lf.lake_outlets[0], 72)
    outlets_in_map = np.unique(lf.depression_outlet_map)
    assert_equal(outlets_in_map.size, 2)
    assert_equal(outlets_in_map[0], 72)
    assert_equal(lf.number_of_lakes, 1)
    assert_equal(lf.lake_codes[0], 57)
    assert_array_equal(lf.lake_map, lc)
    assert_almost_equal(lf.lake_areas[0], 25.)
    assert_almost_equal(lf.lake_volumes[0], 63.)