Ejemplo n.º 1
0
def test_compute_stats_single():
    for G, distances, betas in network_generator():
        data_dict = mock.mock_data_dict(G)
        numeric_data = mock.mock_numerical_data(len(data_dict), num_arrs=1)
        # easy version
        N_easy = networks.Network_Layer_From_nX(G, distances)
        D_easy = layers.Data_Layer_From_Dict(data_dict)
        D_easy.assign_to_network(N_easy, max_dist=500)
        D_easy.compute_stats_single('boo', numeric_data[0])
        # custom version
        N_full = networks.Network_Layer_From_nX(G, distances)
        D_full = layers.Data_Layer_From_Dict(data_dict)
        D_full.assign_to_network(N_full, max_dist=500)
        D_full.compute_aggregated(stats_keys=['boo'],
                                  stats_data_arrs=numeric_data)
        # compare
        for n_label in ['boo']:
            for s_label in [
                    'max', 'min', 'mean', 'mean_weighted', 'variance',
                    'variance_weighted'
            ]:
                for dist in distances:
                    assert np.allclose(
                        N_easy.metrics['stats'][n_label][s_label][dist],
                        N_full.metrics['stats'][n_label][s_label][dist],
                        equal_nan=True,
                        atol=0.001,
                        rtol=0)
        # check that non-single dimension arrays are caught
        with pytest.raises(ValueError):
            D_easy.compute_stats_single('boo', numeric_data)
Ejemplo n.º 2
0
def test_compute_stats_multiple():
    for G, distances, betas in network_generator():
        data_dict = mock.mock_data_dict(G)
        numeric_data = mock.mock_numerical_data(len(data_dict), num_arrs=2)
        # easy version
        N_easy = networks.Network_Layer_From_nX(G, distances)
        D_easy = layers.Data_Layer_From_Dict(data_dict)
        D_easy.assign_to_network(N_easy, max_dist=500)
        D_easy.compute_stats_multiple(['boo', 'baa'], numeric_data)
        # custom version
        N_full = networks.Network_Layer_From_nX(G, distances)
        D_full = layers.Data_Layer_From_Dict(data_dict)
        D_full.assign_to_network(N_full, max_dist=500)
        D_full.compute_aggregated(stats_keys=['boo', 'baa'],
                                  stats_data_arrs=numeric_data)
        # compare
        for n_label in ['boo', 'baa']:
            for s_label in [
                    'max', 'min', 'mean', 'mean_weighted', 'variance',
                    'variance_weighted'
            ]:
                for dist in distances:
                    assert np.allclose(
                        N_easy.metrics['stats'][n_label][s_label][dist],
                        N_full.metrics['stats'][n_label][s_label][dist],
                        equal_nan=True,
                        atol=0.001,
                        rtol=0)
Ejemplo n.º 3
0
def test_metrics_to_dict():
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    # create a network layer and run some metrics
    N = networks.Network_Layer_From_nX(G, distances=[500, 1000])

    # check with no metrics
    metrics_dict = N.metrics_to_dict()
    dict_check(metrics_dict, N)

    # check with centrality metrics
    N.compute_centrality(measures=['node_harmonic'])
    metrics_dict = N.metrics_to_dict()
    dict_check(metrics_dict, N)

    # check with data metrics
    data_dict = mock.mock_data_dict(G)
    landuse_labels = mock.mock_categorical_data(len(data_dict))
    numerical_data = mock.mock_numerical_data(len(data_dict))
    # TODO:
    '''
    D = layers.Data_Layer_From_Dict(data_dict)
    D.assign_to_network(N, max_dist=400)
    D.compute_aggregated(landuse_labels,
                         mixed_use_keys=['hill', 'shannon'],
                         accessibility_keys=['a', 'c'],
                         qs=[0, 1],
                         stats_keys=['boo'],
                         stats_data_arrs=numerical_data)
    '''
    metrics_dict = N.metrics_to_dict()
    dict_check(metrics_dict, N)
Ejemplo n.º 4
0
def test_check_data_map():
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    N = networks.Network_Layer_From_nX(G, distances=[500])
    data_dict = mock.mock_data_dict(G)
    data_uids, data_map = layers.data_map_from_dict(data_dict)

    # should throw error if not assigned
    with pytest.raises(ValueError):
        checks.check_data_map(data_map)

    # should work if flag set to False
    checks.check_data_map(data_map, check_assigned=False)

    # assign then check that it runs as intended
    data_map = data.assign_to_network(data_map,
                                      N._node_data,
                                      N._edge_data,
                                      N._node_edge_map,
                                      max_dist=400)
    checks.check_data_map(data_map)

    # catch zero length data arrays
    empty_2d_arr = np.full((0, 4), np.nan)
    with pytest.raises(ValueError):
        checks.check_data_map(empty_2d_arr)

    # catch invalid dimensionality
    with pytest.raises(ValueError):
        checks.check_data_map(data_map[:, :-1])
Ejemplo n.º 5
0
def mmm_layercake_diversity(_graph,
                            _layer_spec,
                            _n_iters=200,
                            _spans=200,
                            _bands=4,
                            _tiers=2,
                            random_seed=0):
    np.random.seed(random_seed)

    for k in [
            'cap_step', 'dist_threshold', 'pop_threshold', 'spill_rate',
            'explore_rate'
    ]:
        if k not in _layer_spec:
            raise AttributeError(f'Missing key {k}')

    # generate the backbone Network Layer
    # include 1 (local only) for directly counting items assigned to current node
    # also include np.inf so that all node combinations are considered in spite of wrapping around back
    distances = list({1, np.inf, _layer_spec['dist_threshold']})
    Netw_Layer = networks.Network_Layer_From_nX(_graph, distances=distances)

    # generate population data layer
    Pop_Layer = generate_data_layer(_spans, 20, Netw_Layer, _randomised=False)
    # population state and map
    # for this experiment assignments are not changing
    pop_state = np.full(len(Pop_Layer.uids), 1.0)  # use floats!

    # generate the landuse substrate
    # keep to 1 location per node for visualisation sake
    # randomisation is immaterial because all will be instanced as 0.0
    Landuse_Layer = generate_data_layer(_spans,
                                        1,
                                        Netw_Layer,
                                        _randomised=False)

    # calculate neighbourhood density
    # move into loop if using dynamic populations
    Pop_Layer.compute_stats_single('pop_intensity', pop_state)
    pop_intensity = np.copy(
        Netw_Layer.metrics['stats']['pop_intensity']['sum'][1])

    # iterate
    cap_step = _layer_spec['cap_step']
    dist_threshold = _layer_spec['dist_threshold']
    pop_threshold = _layer_spec['pop_threshold']
    spill_rate = _layer_spec['spill_rate']
    explore_rate = _layer_spec['explore_rate']
    innovation_rate = _layer_spec['innovation_rate']
    landuse_state = compute_iter(_n_iters, Netw_Layer._nodes,
                                 Netw_Layer._edges, Landuse_Layer._data,
                                 np.array(Netw_Layer.distances),
                                 np.array(Netw_Layer.betas), _tiers, _bands,
                                 _spans,
                                 Netw_Layer.distances.index(dist_threshold),
                                 spill_rate, pop_threshold, cap_step,
                                 explore_rate, innovation_rate, pop_intensity)

    return landuse_state
Ejemplo n.º 6
0
def test_check_network_maps():
    # network maps
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    N = networks.Network_Layer_From_nX(G, distances=[500])
    # from cityseer.util import plot
    # plot.plot_networkX_primal_or_dual(primal=G)
    # plot.plot_graph_maps(N.uids, N._node_data, N._edge_data)
    # catch zero length node and edge arrays
    empty_node_arr = np.full((0, 5), np.nan)
    with pytest.raises(ValueError):
        checks.check_network_maps(empty_node_arr, N._edge_data,
                                  N._node_edge_map)
    empty_edge_arr = np.full((0, 4), np.nan)
    with pytest.raises(ValueError):
        checks.check_network_maps(N._node_data, empty_edge_arr,
                                  N._node_edge_map)
    # check that malformed node and data maps throw errors
    with pytest.raises(ValueError):
        checks.check_network_maps(N._node_data[:, :-1], N._edge_data,
                                  N._node_edge_map)
    with pytest.raises(ValueError):
        checks.check_network_maps(N._node_data, N._edge_data[:, :-1],
                                  N._node_edge_map)
    # catch problematic edge map values
    for x in [np.nan, -1]:
        # missing start node
        corrupted_edges = N._edge_data.copy()
        corrupted_edges[0, 0] = x
        with pytest.raises(AssertionError):
            checks.check_network_maps(N._node_data, corrupted_edges,
                                      N._node_edge_map)
        # missing end node
        corrupted_edges = N._edge_data.copy()
        corrupted_edges[0, 1] = x
        with pytest.raises(KeyError):
            checks.check_network_maps(N._node_data, corrupted_edges,
                                      N._node_edge_map)
        # invalid length
        corrupted_edges = N._edge_data.copy()
        corrupted_edges[0, 2] = x
        with pytest.raises(ValueError):
            checks.check_network_maps(N._node_data, corrupted_edges,
                                      N._node_edge_map)
        # invalid angle_sum
        corrupted_edges = N._edge_data.copy()
        corrupted_edges[0, 3] = x
        with pytest.raises(ValueError):
            checks.check_network_maps(N._node_data, corrupted_edges,
                                      N._node_edge_map)
        # invalid imp_factor
        corrupted_edges = N._edge_data.copy()
        corrupted_edges[0, 4] = x
        with pytest.raises(ValueError):
            checks.check_network_maps(N._node_data, corrupted_edges,
                                      N._node_edge_map)
Ejemplo n.º 7
0
def test_compute_accessibilities():
    for G, distances, betas in network_generator():
        data_dict = mock.mock_data_dict(G)
        landuse_labels = mock.mock_categorical_data(len(data_dict))
        # easy version
        N_easy = networks.Network_Layer_From_nX(G, distances)
        D_easy = layers.Data_Layer_From_Dict(data_dict)
        D_easy.assign_to_network(N_easy, max_dist=500)
        D_easy.compute_accessibilities(landuse_labels, ['c'])
        # custom version
        N_full = networks.Network_Layer_From_nX(G, distances)
        D_full = layers.Data_Layer_From_Dict(data_dict)
        D_full.assign_to_network(N_full, max_dist=500)
        D_full.compute_aggregated(landuse_labels, accessibility_keys=['c'])
        # compare
        for d in distances:
            for wt in ['weighted', 'non_weighted']:
                assert np.allclose(N_easy.metrics['accessibility'][wt]['c'][d],
                                   N_full.metrics['accessibility'][wt]['c'][d],
                                   atol=0.001,
                                   rtol=0)
Ejemplo n.º 8
0
def test_hill_diversity():
    for G, distances, betas in network_generator():
        data_dict = mock.mock_data_dict(G)
        landuse_labels = mock.mock_categorical_data(len(data_dict))
        # easy version
        N_easy = networks.Network_Layer_From_nX(G, distances)
        D_easy = layers.Data_Layer_From_Dict(data_dict)
        D_easy.assign_to_network(N_easy, max_dist=500)
        D_easy.hill_diversity(landuse_labels, qs=[0, 1, 2])
        # custom version
        N_full = networks.Network_Layer_From_nX(G, distances)
        D_full = layers.Data_Layer_From_Dict(data_dict)
        D_full.assign_to_network(N_full, max_dist=500)
        D_full.compute_aggregated(landuse_labels,
                                  mixed_use_keys=['hill'],
                                  qs=[0, 1, 2])
        # compare
        for d in distances:
            for q in [0, 1, 2]:
                assert np.allclose(N_easy.metrics['mixed_uses']['hill'][q][d],
                                   N_full.metrics['mixed_uses']['hill'][q][d],
                                   atol=0.001,
                                   rtol=0)
Ejemplo n.º 9
0
def test_to_networkX():
    # also see test_graphs.test_networkX_from_graph_maps for underlying graph maps version

    # check round trip to and from graph maps results in same graph
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    # explicitly set live and weight params for equality checks
    # graph_maps_from_networkX generates these implicitly if missing
    G = graphs.nX_decompose(G, decompose_max=20)
    for n in G.nodes():
        G.nodes[n]['live'] = bool(np.random.randint(0, 1))
    for s, e in G.edges():
        G[s][e]['imp_factor'] = np.random.randint(0, 2)

    # add random data to check persistence at other end
    baa_node = None
    for n in G.nodes():
        baa_node = n
        G.nodes[n]['boo'] = 'baa'
        break
    boo_edge = None
    for s, e in G.edges():
        boo_edge = (s, e)
        G[s][e]['baa'] = 'boo'

    # test with metrics
    N = networks.Network_Layer_From_nX(G, distances=[500])
    N.compute_centrality(measures=['node_harmonic'])
    metrics_dict = N.metrics_to_dict()
    G_round_trip = N.to_networkX()
    for n, d in G.nodes(data=True):
        assert G_round_trip.nodes[n]['x'] == d['x']
        assert G_round_trip.nodes[n]['y'] == d['y']
        assert G_round_trip.nodes[n]['live'] == d['live']
    for s, e, d in G.edges(data=True):
        assert G_round_trip[s][e]['geom'] == d['geom']
        assert G_round_trip[s][e]['imp_factor'] == d['imp_factor']
    # check that metrics came through
    for uid, metrics in metrics_dict.items():
        assert G_round_trip.nodes[uid]['metrics'] == metrics
    # check data persistence
    assert G_round_trip.nodes[baa_node]['boo'] == 'baa'
    assert G_round_trip[boo_edge[0]][boo_edge[1]]['baa'] == 'boo'
Ejemplo n.º 10
0
def test_Network_Layer_From_nX():
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(
        G)
    x_arr = node_data[:, 0]
    y_arr = node_data[:, 1]
    betas = np.array([-0.04, -0.02])
    distances = networks.distance_from_beta(betas)

    # test Network_Layer_From_NetworkX's class
    for d, b in zip([distances, None], [None, betas]):
        for angular in [True, False]:
            N = networks.Network_Layer_From_nX(G, distances=d, betas=b)
            assert np.allclose(N.uids, node_uids, atol=0.001, rtol=0)
            assert np.allclose(N._node_data, node_data, atol=0.001, rtol=0)
            assert np.allclose(N._edge_data, edge_data, atol=0.001, rtol=0)
            assert np.allclose(
                N.distances, distances, atol=0.001,
                rtol=0)  # inferred automatically when only betas provided
            assert np.allclose(
                N.betas, betas, atol=0.001,
                rtol=0)  # inferred automatically when only distances provided
            assert N.min_threshold_wt == checks.def_min_thresh_wt
            assert np.allclose(N.x_arr, x_arr, atol=0.001, rtol=0)
            assert np.allclose(N.y_arr, y_arr, atol=0.001, rtol=0)
            assert np.allclose(N.live, node_data[:, 2], atol=0.001, rtol=0)
            assert np.allclose(N.edge_lengths,
                               edge_data[:, 2],
                               atol=0.001,
                               rtol=0)
            assert np.allclose(N.edge_angles,
                               edge_data[:, 3],
                               atol=0.001,
                               rtol=0)
            assert np.allclose(N.edge_impedance_factor,
                               edge_data[:, 4],
                               atol=0.001,
                               rtol=0)
            assert np.allclose(N.edge_in_bearing,
                               edge_data[:, 5],
                               atol=0.001,
                               rtol=0)
            assert np.allclose(N.edge_out_bearing,
                               edge_data[:, 6],
                               atol=0.001,
                               rtol=0)

    # check alternate min_threshold_wt gets passed through successfully
    alt_min = 0.02
    alt_distances = networks.distance_from_beta(betas,
                                                min_threshold_wt=alt_min)
    N = networks.Network_Layer_From_nX(G,
                                       betas=betas,
                                       min_threshold_wt=alt_min)
    assert np.allclose(N.distances, alt_distances, atol=0.001, rtol=0)

    # check for malformed signatures
    with pytest.raises(TypeError):
        networks.Network_Layer_From_nX('boo', distances=distances)
    with pytest.raises(ValueError):
        networks.Network_Layer_From_nX(G)  # no betas or distances
    with pytest.raises(ValueError):
        networks.Network_Layer_From_nX(G, distances=None, betas=None)
    with pytest.raises(ValueError):
        networks.Network_Layer_From_nX(G, distances=[])
    with pytest.raises(ValueError):
        networks.Network_Layer_From_nX(G, betas=[])
Ejemplo n.º 11
0
def test_nX_from_graph_maps():
    # also see test_networks.test_to_networkX for tests on implementation via Network layer

    # check round trip to and from graph maps results in same graph
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    # explicitly set live params for equality checks
    # graph_maps_from_networkX generates these implicitly if missing
    for n in G.nodes():
        G.nodes[n]['live'] = bool(np.random.randint(0, 1))

    # test directly from and to graph maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(G)
    G_round_trip = graphs.nX_from_graph_maps(node_uids, node_data, edge_data, node_edge_map)
    assert list(G_round_trip.nodes) == list(G.nodes)
    assert list(G_round_trip.edges) == list(G.edges)

    # check with metrics dictionary
    N = networks.Network_Layer_From_nX(G, distances=[500, 1000])

    N.compute_centrality(measures=['node_harmonic'])
    data_dict = mock.mock_data_dict(G)
    landuse_labels = mock.mock_categorical_data(len(data_dict))
    D = layers.Data_Layer_From_Dict(data_dict)
    D.assign_to_network(N, max_dist=400)
    D.compute_aggregated(landuse_labels,
                         mixed_use_keys=['hill', 'shannon'],
                         accessibility_keys=['a', 'c'],
                         qs=[0, 1])
    metrics_dict = N.metrics_to_dict()
    # without backbone
    G_round_trip_data = graphs.nX_from_graph_maps(node_uids,
                                                  node_data,
                                                  edge_data,
                                                  node_edge_map,
                                                  metrics_dict=metrics_dict)
    for uid, metrics in metrics_dict.items():
        assert G_round_trip_data.nodes[uid]['metrics'] == metrics
    # with backbone
    G_round_trip_data = graphs.nX_from_graph_maps(node_uids,
                                                  node_data,
                                                  edge_data,
                                                  node_edge_map,
                                                  networkX_graph=G,
                                                  metrics_dict=metrics_dict)
    for uid, metrics in metrics_dict.items():
        assert G_round_trip_data.nodes[uid]['metrics'] == metrics

    # test with decomposed
    G_decomposed = graphs.nX_decompose(G, decompose_max=20)
    # set live explicitly
    for n in G_decomposed.nodes():
        G_decomposed.nodes[n]['live'] = bool(np.random.randint(0, 1))
    node_uids_d, node_data_d, edge_data_d, node_edge_map_d = graphs.graph_maps_from_nX(G_decomposed)

    G_round_trip_d = graphs.nX_from_graph_maps(node_uids_d, node_data_d, edge_data_d, node_edge_map_d)
    assert list(G_round_trip_d.nodes) == list(G_decomposed.nodes)
    for n, node_data in G_round_trip.nodes(data=True):
        assert n in G_decomposed
        assert node_data['live'] == G_decomposed.nodes[n]['live']
        assert node_data['x'] == G_decomposed.nodes[n]['x']
        assert node_data['y'] == G_decomposed.nodes[n]['y']
    assert G_round_trip_d.edges == G_decomposed.edges

    # error checks for when using backbone graph:
    # mismatching numbers of nodes
    corrupt_G = G.copy()
    corrupt_G.remove_node(0)
    with pytest.raises(ValueError):
        graphs.nX_from_graph_maps(node_uids, node_data, edge_data, node_edge_map, networkX_graph=corrupt_G)
    # mismatching node uid
    with pytest.raises(ValueError):
        corrupt_node_uids = list(node_uids)
        corrupt_node_uids[0] = 'boo'
        graphs.nX_from_graph_maps(corrupt_node_uids, node_data, edge_data, node_edge_map, networkX_graph=G)
Ejemplo n.º 12
0
def mmml_phd(_graph, _iters: int, distances: list, avg_dwell_dens: float,
             models: dict, column_order: list, cap_step: float,
             cap_jitter: float, max_cap: float, new_threshold: float,
             random_seed: int):
    '''
    landuse states is based on the number of nodes: kept simple for visualisation

    don't confuse with the data layer, which is POI based
    for the sake of the simulation, all POI's assume the same x, y locations as assigned nodes

    column order must be filtered to exclude the currently targeted layer

    the tension between existing stores and latent competition (for given capacity) is important to dynamics
    the non-linearity between activation and deactivation is also important
    '''
    _spans = len(_graph)

    # set random seed for reproducibility and comparison across scenarios
    np.random.seed(random_seed)

    # build network layer
    Netw_Layer = networks.Network_Layer_From_nX(_graph, distances=distances)
    # calculate centralities - static
    Netw_Layer.compute_centrality(['node_harmonic_angular'], angular=True)
    Netw_Layer.compute_centrality(['node_betweenness_beta'], angular=False)

    # generate flat data layer - network assignment happens internally - note randomised=False
    Flat_Layer = generate_data_layer(_spans, 1, Netw_Layer, _randomised=False)
    # calculate dwelling density - static
    dwellings = np.full(_spans, avg_dwell_dens)
    Flat_Layer.compute_stats_single('population_density', dwellings)
    # the dwellings map is static in this case, so may as well set up front
    dwellings_map = np.full((_iters, _spans), float(avg_dwell_dens))

    # prepare the arrays for timeline maps that keep track of computations
    # one per layer / model
    landuse_maps = []
    capacitance_maps = []
    for _ in models:
        landuse_maps.append(np.full((_iters, _spans), 0.0))
        capacitance_maps.append(np.full((_iters, _spans), 0.0))
    # per layer
    n_layers = len(models)
    landuse_states = np.full((n_layers, _spans), 0.0)
    capacitances = np.full((n_layers, _spans), 0.0)
    measured_accessibility = np.full((n_layers, _spans), 0.0)
    predicted_accessibility = np.full((n_layers, _spans), 0.0)

    # iterate iters
    for n in tqdm(range(_iters)):
        # build a landuse map reflecting the current state
        # shape is sum of all landuses by x, y, nearest, next nearest
        data_uids = []
        lu_data_map = np.full((int(landuse_states.sum()), 4), np.nan)
        landuse_labels = []
        # iterate each model's layer
        current_lu_counter = 0
        for model_idx, model_key in enumerate(models.keys()):
            # get the node indices of each active landuse location (for given model's layer)
            current_active = np.where(landuse_states[model_idx] != 0)[0]
            # iterate the active node locations and add to the current landuses data structure
            for active_idx in current_active:
                # create one instance per landuse
                for active_n in range(
                        int(landuse_states[model_idx][active_idx])):
                    # get the x and y
                    x, y = (Netw_Layer.x_arr[active_idx],
                            Netw_Layer.y_arr[active_idx])
                    # add to uids
                    data_uids.append(current_lu_counter)
                    # add to the data structure: nearest node will be the current node
                    lu_data_map[current_lu_counter] = [
                        x, y, active_idx, np.nan
                    ]
                    current_lu_counter += 1
                    # add the landuse code associated with the model
                    landuse_labels.append(model_key)
        # the current landuses data structure can now be used for landuse accessibilities
        # use empty placeholders if no landuse datapoints
        # first iter needs to create keys
        if not lu_data_map.shape[0]:
            for key in models.keys():
                if not key in Netw_Layer.metrics['accessibility']['weighted']:
                    Netw_Layer.metrics['accessibility']['weighted'][key] = {}
                for dist in distances:
                    if not dist in Netw_Layer.metrics['accessibility'][
                            'weighted'][key]:
                        Netw_Layer.metrics['accessibility']['weighted'][key][
                            dist] = {}
                    Netw_Layer.metrics['accessibility']['weighted'][key][
                        dist] = np.full(_spans, 0.0)
        else:
            # generate a datalayer
            DL = layers.Data_Layer(data_uids, lu_data_map)
            # manually assign the network layer
            DL._Network = Netw_Layer
            # compute accessibilities for label of each datapoint against list of target keys
            DL.compute_accessibilities(landuse_labels, list(models.keys()))

        # for each model, build the input data array fed to the model, then predict
        for model_idx, (model_key, model_vals) in enumerate(models.items()):
            # number of columns is columns * distances - target columns * distances
            col_num = len(column_order) * len(distances) - len(distances)
            # placeholder array
            X_arr = np.full((_spans, col_num), np.nan)
            # iterate the columns, then distances, and add to input data array
            col_num_idx = 0
            for col in column_order:
                # target columns
                if col == model_key:
                    continue
                elif col == 'c_node_harmonic_angular_{dist}':
                    c = Netw_Layer.metrics['centrality'][
                        'node_harmonic_angular']
                elif col == 'c_node_betweenness_beta_{dist}':
                    c = Netw_Layer.metrics['centrality'][
                        'node_betweenness_beta']
                elif col == 'cens_dwellings_{dist}':
                    c = Netw_Layer.metrics['stats']['population_density'][
                        'sum_weighted']
                else:
                    c = Netw_Layer.metrics['accessibility']['weighted'][col]
                for dist in distances:
                    X_arr[:, col_num_idx] = c[dist]
                    col_num_idx += 1
            assert col_num_idx == col_num
            # predict y_hat
            X_trans = model_vals['transformer'].transform(X_arr)
            predicted_accessibility[model_idx] = model_vals['model'].predict(
                X_trans).flatten()

        # extract the target column at the target distance for comparison to predicted
        for model_idx, (model_key, model_vals) in enumerate(models.items()):
            d = model_vals['target_dist']
            measured_accessibility[model_idx] = \
                Netw_Layer.metrics['accessibility']['weighted'][model_key][d]

        # adjust capacitances and landuse states
        for i in range(len(models)):
            # calculate potentials
            potentials = predicted_accessibility[i] - measured_accessibility[i]
            # currently occupied landuse locations for given layer
            occupied_loc = landuse_states[i] == 1
            # squash landuses - do this inside loop to capture previous on / off state changes
            squashed_landuses = landuse_states.sum(axis=0)
            """
            EXISTING LOCATIONS
            """
            # existing locations - OK as long as potentials are greater than measured
            exist_pot = np.copy(potentials)
            # normalise to smoothen and avoid batch-wise blocks
            exist_pot /= np.abs(
                potentials).max()  # intentionally using potentials
            # scale to capacitance step
            exist_pot *= cap_step
            # only applies to current landuse locations
            capacitances[i][occupied_loc] += exist_pot[occupied_loc]
            """
            POTENTIAL LOCATIONS
            """
            future_pot = np.copy(potentials)
            # future potentials - must exceed new_threshold to trigger new location
            future_pot -= new_threshold
            # normalise to smoothen and avoid batch-wise blocks
            future_pot /= np.abs(
                potentials).max()  # intentionally using potentials
            # scale to capacitance step
            future_pot *= cap_step
            # only applies to available locations
            capacitances[i][~occupied_loc] += future_pot[~occupied_loc]
            """
            LANDUSE STATES
            """
            # add some stochasticity - scale to capacitance step
            capacitances[i] += np.random.randn(_spans) * cap_step * cap_jitter
            # clip capacitances
            capacitances[i] = np.clip(capacitances[i], 0, max_cap)
            # turn off per current capacitance for current landuse
            off_idx = np.logical_and(capacitances[i] <= 0,
                                     landuse_states[i] == 1)
            landuse_states[i][off_idx] = 0
            # turn on if no landuses currently at this index
            on_idx = np.logical_and(capacitances[i] >= 1,
                                    squashed_landuses == 0)
            landuse_states[i][on_idx] = 1
            # record landuse and capacitance states
            capacitance_maps[i][n] = capacitances[i]
            landuse_maps[i][n] = landuse_states[i]

    return dwellings_map, landuse_maps, capacitance_maps
Ejemplo n.º 13
0
def test_compute_aggregated_A():
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    betas = np.array([-0.01, -0.005])
    distances = networks.distance_from_beta(betas)
    # network layer
    N = networks.Network_Layer_From_nX(G, distances)
    node_map = N._node_data
    edge_map = N._edge_data
    node_edge_map = N._node_edge_map
    # data layer
    data_dict = mock.mock_data_dict(G)
    qs = np.array([0, 1, 2])
    D = layers.Data_Layer_From_Dict(data_dict)
    # check single metrics independently against underlying for some use-cases, e.g. hill, non-hill, accessibility...
    D.assign_to_network(N, max_dist=500)
    # generate some mock landuse data
    landuse_labels = mock.mock_categorical_data(len(data_dict))
    landuse_classes, landuse_encodings = layers.encode_categorical(
        landuse_labels)
    # compute hill mixed uses
    D.compute_aggregated(landuse_labels,
                         mixed_use_keys=['hill_branch_wt'],
                         qs=qs)
    # test against underlying method
    data_map = D._data
    mu_data_hill, mu_data_other, ac_data, ac_data_wt, \
    stats_sum, stats_sum_wt, stats_mean, stats_mean_wt, stats_variance, stats_variance_wt, stats_max, stats_min = \
        data.local_aggregator(node_map,
                              edge_map,
                              node_edge_map,
                              data_map,
                              distances,
                              betas,
                              landuse_encodings,
                              qs=qs,
                              mixed_use_hill_keys=np.array([1]))
    for q_idx, q_key in enumerate(qs):
        for d_idx, d_key in enumerate(distances):
            assert np.allclose(
                N.metrics['mixed_uses']['hill_branch_wt'][q_key][d_key],
                mu_data_hill[0][q_idx][d_idx],
                atol=0.001,
                rtol=0)
    # gini simpson
    D.compute_aggregated(landuse_labels, mixed_use_keys=['gini_simpson'])
    # test against underlying method
    data_map = D._data
    mu_data_hill, mu_data_other, ac_data, ac_data_wt, \
    stats_sum, stats_sum_wt, stats_mean, stats_mean_wt, stats_variance, stats_variance_wt, stats_max, stats_min = \
        data.local_aggregator(node_map,
                              edge_map,
                              node_edge_map,
                              data_map,
                              distances,
                              betas,
                              landuse_encodings,
                              mixed_use_other_keys=np.array([1]))
    for d_idx, d_key in enumerate(distances):
        assert np.allclose(N.metrics['mixed_uses']['gini_simpson'][d_key],
                           mu_data_other[0][d_idx],
                           atol=0.001,
                           rtol=0)
    # accessibilities
    D.compute_aggregated(landuse_labels, accessibility_keys=['c'])
    # test against underlying method
    data_map = D._data
    mu_data_hill, mu_data_other, ac_data, ac_data_wt, \
    stats_sum, stats_sum_wt, stats_mean, stats_mean_wt, stats_variance, stats_variance_wt, stats_max, stats_min = \
        data.local_aggregator(node_map,
                              edge_map,
                              node_edge_map,
                              data_map,
                              distances,
                              betas,
                              landuse_encodings,
                              accessibility_keys=np.array([landuse_classes.index('c')]))
    for d_idx, d_key in enumerate(distances):
        assert np.allclose(
            N.metrics['accessibility']['non_weighted']['c'][d_key],
            ac_data[0][d_idx],
            atol=0.001,
            rtol=0)
        assert np.allclose(N.metrics['accessibility']['weighted']['c'][d_key],
                           ac_data_wt[0][d_idx],
                           atol=0.001,
                           rtol=0)
    # also check the number of returned types for a few assortments of metrics
    mixed_uses_hill_types = np.array([
        'hill', 'hill_branch_wt', 'hill_pairwise_wt', 'hill_pairwise_disparity'
    ])
    mixed_use_other_types = np.array(
        ['shannon', 'gini_simpson', 'raos_pairwise_disparity'])
    ac_codes = np.array(landuse_classes)

    mu_hill_random = np.arange(len(mixed_uses_hill_types))
    np.random.shuffle(mu_hill_random)

    mu_other_random = np.arange(len(mixed_use_other_types))
    np.random.shuffle(mu_other_random)

    ac_random = np.arange(len(landuse_classes))
    np.random.shuffle(ac_random)

    # mock disparity matrix
    mock_disparity_wt_matrix = np.full(
        (len(landuse_classes), len(landuse_classes)), 1)

    # not necessary to do all labels, first few should do
    for mu_h_min in range(3):
        mu_h_keys = np.array(mu_hill_random[mu_h_min:])

        for mu_o_min in range(3):
            mu_o_keys = np.array(mu_other_random[mu_o_min:])

            for ac_min in range(3):
                ac_keys = np.array(ac_random[ac_min:])

                # in the final case, set accessibility to a single code otherwise an error would be raised
                if len(mu_h_keys) == 0 and len(mu_o_keys) == 0 and len(
                        ac_keys) == 0:
                    ac_keys = np.array([0])

                # randomise order of keys and metrics
                mu_h_metrics = mixed_uses_hill_types[mu_h_keys]
                mu_o_metrics = mixed_use_other_types[mu_o_keys]
                ac_metrics = ac_codes[ac_keys]

                N_temp = networks.Network_Layer_From_nX(G, distances)
                D_temp = layers.Data_Layer_From_Dict(data_dict)
                D_temp.assign_to_network(N_temp, max_dist=500)
                D_temp.compute_aggregated(
                    landuse_labels,
                    mixed_use_keys=list(mu_h_metrics) + list(mu_o_metrics),
                    accessibility_keys=ac_metrics,
                    cl_disparity_wt_matrix=mock_disparity_wt_matrix,
                    qs=qs)

                # test against underlying method
                mu_data_hill, mu_data_other, ac_data, ac_data_wt, stats_sum, stats_sum_wt, \
                stats_mean, stats_mean_wt, stats_variance, stats_variance_wt, stats_max, stats_min = \
                    data.local_aggregator(node_map,
                                          edge_map,
                                          node_edge_map,
                                          data_map,
                                          distances,
                                          betas,
                                          landuse_encodings,
                                          qs=qs,
                                          mixed_use_hill_keys=mu_h_keys,
                                          mixed_use_other_keys=mu_o_keys,
                                          accessibility_keys=ac_keys,
                                          cl_disparity_wt_matrix=mock_disparity_wt_matrix)

                for mu_h_idx, mu_h_met in enumerate(mu_h_metrics):
                    for q_idx, q_key in enumerate(qs):
                        for d_idx, d_key in enumerate(distances):
                            assert np.allclose(
                                N_temp.metrics['mixed_uses'][mu_h_met][q_key]
                                [d_key],
                                mu_data_hill[mu_h_idx][q_idx][d_idx],
                                atol=0.001,
                                rtol=0)

                for mu_o_idx, mu_o_met in enumerate(mu_o_metrics):
                    for d_idx, d_key in enumerate(distances):
                        assert np.allclose(
                            N_temp.metrics['mixed_uses'][mu_o_met][d_key],
                            mu_data_other[mu_o_idx][d_idx],
                            atol=0.001,
                            rtol=0)

                for ac_idx, ac_met in enumerate(ac_metrics):
                    for d_idx, d_key in enumerate(distances):
                        assert np.allclose(N_temp.metrics['accessibility']
                                           ['non_weighted'][ac_met][d_key],
                                           ac_data[ac_idx][d_idx],
                                           atol=0.001,
                                           rtol=0)
                        assert np.allclose(N_temp.metrics['accessibility']
                                           ['weighted'][ac_met][d_key],
                                           ac_data_wt[ac_idx][d_idx],
                                           atol=0.001,
                                           rtol=0)

    # most integrity checks happen in underlying method, though check here for mismatching labels length and typos
    with pytest.raises(ValueError):
        D.compute_aggregated(landuse_labels[-1], mixed_use_keys=['shannon'])
    with pytest.raises(ValueError):
        D.compute_aggregated(landuse_labels, mixed_use_keys=['spelling_typo'])
    # don't check accessibility_labels for typos - because only warning is triggered (not all labels will be in all data)
    # check that unassigned data layer flags
    with pytest.raises(ValueError):
        D_new = layers.Data_Layer_From_Dict(data_dict)
        D_new.compute_aggregated(landuse_labels, mixed_use_keys=['shannon'])
Ejemplo n.º 14
0
def mmm_layercake_phd(_graph,
                      _iters=200,
                      echo_rate=0.2,
                      echo_distance=400,
                      competition_factor=1.1,
                      death_rate=0.01,
                      flow_jitter=0.2,
                      cap_jitter=0.1,
                      _layer_specs=(),
                      random_seed=0):
    if isinstance(_layer_specs, dict):
        _layer_specs = (_layer_specs,)

    if not len(_layer_specs):
        raise AttributeError('''
            No layer specs provided: e.g. ({
                cap_step=0.5,
                dist_threshold=800,
                pop_threshold=800
            })
        ''')

    for l_s in _layer_specs:
        for k in ['cap_step', 'dist_threshold', 'pop_threshold']:
            if k not in l_s:
                raise AttributeError(f'Missing key {k}')

    _spans = len(_graph)

    # generate the backbone Network Layer
    # include 1 (local only) for directly counting items assigned to current node
    # also include np.inf so that all node combinations are considered
    # in spite of wrapping around back
    distances = [l_s['dist_threshold'] for l_s in _layer_specs]
    distances = list(set(distances))
    # add echo distance
    distances.append(echo_distance)
    distances = list(set(distances))
    # build network layer
    Netw_Layer = networks.Network_Layer_From_nX(_graph, distances=distances)
    # generate population data layer - assignment happens internally - note randomised=False
    Pop_Layer = generate_data_layer(_spans, 1, Netw_Layer, _randomised=False)
    # population state and timeline map
    # for this experiment assignments are not changing
    pop_state = np.full(len(Pop_Layer.uids), 1.0)  # use floats!
    pop_map = np.full((_iters, _spans), 0.0)
    # generate the landuse substrate
    # keep to 1 location per node for visualisation sake
    Landuse_Layer = generate_data_layer(_spans, 1, Netw_Layer, _randomised=False)
    # prepare the arrays for timeline maps that keep track of computations - one per layer
    landuse_maps = []
    capacitance_maps = []
    for _ in _layer_specs:
        landuse_maps.append(np.full((_iters, _spans), 0.0))
        capacitance_maps.append(np.full((_iters, _spans), 0.0))
    # flow map is based on squashed flows - only one copy necessary
    flow_map = np.full((_iters, _spans), 0.0)

    n_layers = len(_layer_specs)
    # per layer
    landuse_states = np.full((n_layers, len(Landuse_Layer.uids)), 0.0)
    capacitances = np.full((n_layers, _spans), 0.0)

    assigned_trips_actual = np.full((n_layers, _spans), 0.0)
    # assigned_trips_potential = np.full((n_layers, _spans), 0.0)
    assigned_trips_echos = np.full(_spans, 0.0)  # echos is squashed

    netw_flow_actual = np.full((n_layers, _spans), 0.0)
    # netw_flow_potential = np.full((n_layers, _spans), 0.0)
    netw_flow_echos = np.full(_spans, 0.0)  # echos is squashed

    agged_trips_actual = np.full((n_layers, _spans), 0.0)
    agged_flows_actual = np.full((n_layers, _spans), 0.0)
    # agged_trips_potential = np.full((n_layers, _spans), 0.0)
    # agged_flows_potential = np.full((n_layers, _spans), 0.0)

    # set random seed for reproducibility and comparison across scenarios
    np.random.seed(random_seed)
    # set seed landuse location (otherwise no initial flows)
    for i in range(n_layers):
        rdm_idx = np.random.choice(_spans, 1)
        landuse_states[i][rdm_idx] = 1

    # iterate
    for n in tqdm(range(_iters)):
        pop_intensity = pop_state
        # record current state - actually doesn't change for this experiment...
        pop_map[n] = pop_intensity
        # apportion flow - once per layer
        for i, l_s in enumerate(_layer_specs):
            ### ACTUAL
            # compute singly constrained realised flows
            # NOTE: do not use a capacitance weighted version:
            # singly-constrained already effectively takes competition vis-a-vis flow
            # potentials into account!!!
            Landuse_Layer.model_singly_constrained('trips',
                                                   Pop_Layer._data,
                                                   Landuse_Layer._data,
                                                   pop_intensity,
                                                   landuse_states[i])
            assigned_trips_actual[i] = \
                Netw_Layer.metrics['models']['trips'][l_s['dist_threshold']]['assigned_trips']
            netw_flow_actual[i] = Netw_Layer.metrics['models']['trips'][l_s['dist_threshold']][
                'network_flows']
            # aggregate PREVIOUS iterations echos to current trips BEFORE computing new echos
            agged_trips_actual[i] = assigned_trips_actual[i] + assigned_trips_echos
            agged_flows_actual[i] = netw_flow_actual[i] + netw_flow_echos

        # squashing
        squashed_trips = agged_trips_actual.sum(axis=0)
        squashed_flows = agged_flows_actual.sum(axis=0)
        squashed_landuses = landuse_states.sum(axis=0)
        flow_map[n] = squashed_flows  # record flow state

        ### ECHOS
        # compute singly constrained echos (spillovers) - combined for all layers
        echos = squashed_trips * echo_rate
        Landuse_Layer.model_singly_constrained('echos',
                                               Landuse_Layer._data,  # landuse to landuse
                                               Landuse_Layer._data,  # landuse to landuse
                                               echos,
                                               squashed_landuses)
        assigned_trips_echos = Netw_Layer.metrics['models']['echos'][echo_distance][
            'assigned_trips']
        netw_flow_echos = Netw_Layer.metrics['models']['echos'][echo_distance]['network_flows']

        # compute
        # the tension between existing stores and latent competition (for given capacity) is
        # important to dynamics
        # the non-linearity between activation and deactivation is also important
        for i, l_s in enumerate(_layer_specs):
            # update squashed landuses inside loop to capture changes
            squashed_landuses = landuse_states.sum(axis=0)
            '''
            FITNESS - this is based on actual trips and assessed for this layer only
            '''
            # the fitness function:
            # trips to specific location + conversion of adjacent flows
            up_locations = np.logical_and(agged_trips_actual[i] >= l_s['pop_threshold'],
                                          landuse_states[i] == 1)
            capacitances[i][up_locations] += l_s['cap_step']
            # decreases otherwise
            down_locations = np.logical_and(agged_trips_actual[i] < l_s['pop_threshold'],
                                            landuse_states[i] == 1)
            capacitances[i][down_locations] -= l_s['cap_step']
            '''
            POTENTIALS - this is based on potentials
            '''
            # identify n potential: total population / threshold for landuse
            potential = pop_state.sum() / l_s['pop_threshold']
            # multiply by competition factor and cast to int
            potential = int(potential * competition_factor)
            # identify latent demand by subtracting actual number of current landuses
            actual = landuse_states[i].sum()
            latent = potential - actual
            latent = int(np.clip(latent, 0, np.abs(latent)))
            # check for available locations
            available_locations = squashed_landuses == 0
            # identify n highest (jittered) flows at unoccupied locations
            jitter = np.random.randn(_spans) * squashed_flows.max() * flow_jitter
            sorted_flows_idx = np.argsort(squashed_flows + jitter, kind='mergesort')[::-1]
            # sort available locations accordingly and filter
            sorted_locations = available_locations[sorted_flows_idx]
            filtered_flows_idx = sorted_flows_idx[sorted_locations]
            # snip per latent demand
            filtered_flows_idx = filtered_flows_idx[:latent]
            capacitances[i][filtered_flows_idx] += l_s['cap_step']
            '''
            CAPACITANCE - non-linearity
            '''
            # add some stochasticity - scale to capacitance step
            capacitances[i] += np.random.randn(_spans) * l_s['cap_step'] * cap_jitter
            # clip capacitances to range
            capacitances[i] = np.clip(capacitances[i], 0, 1)
            # turn off and on per current capacitance
            off_idx = np.intersect1d(np.where(capacitances[i] == 0),
                                     np.where(landuse_states[i] == 1))  # use layer
            landuse_states[i][off_idx] = 0
            # cull based on death rate probability
            active_idx = np.where(landuse_states[i] == 1)[0]
            cull = np.random.choice([0, 1], active_idx.shape[0],
                                    p=[death_rate, 1 - death_rate])
            cull_idx = active_idx[cull == 0]
            landuse_states[i][cull_idx] = 0
            capacitances[i][cull_idx] = 0
            # turn on per current capacitance
            on_idx = np.intersect1d(np.where(capacitances[i] == 1),
                                    np.where(squashed_landuses == 0))  # use squashed
            landuse_states[i][on_idx] = 1
            # record landuse and capacitance states
            capacitance_maps[i][n] = capacitances[i]
            landuse_maps[i][n] = landuse_states[i]

    return pop_map, landuse_maps, capacitance_maps, flow_map
Ejemplo n.º 15
0
def test_compute_aggregated_B():
    '''
    Test stats component
    '''
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    betas = np.array([-0.01, -0.005])
    distances = networks.distance_from_beta(betas)
    # network layer
    N = networks.Network_Layer_From_nX(G, distances)
    node_map = N._node_data
    edge_map = N._edge_data
    node_edge_map = N._node_edge_map
    # data layer
    data_dict = mock.mock_data_dict(G)
    qs = np.array([0, 1, 2])
    D = layers.Data_Layer_From_Dict(data_dict)
    # check single metrics independently against underlying for some use-cases, e.g. hill, non-hill, accessibility...
    D.assign_to_network(N, max_dist=500)

    # generate some mock landuse data
    mock_numeric = mock.mock_numerical_data(len(data_dict), num_arrs=2)

    # generate stats
    D.compute_aggregated(stats_keys=['boo', 'baa'],
                         stats_data_arrs=mock_numeric)

    # test against underlying method
    data_map = D._data
    mu_data_hill, mu_data_other, ac_data, ac_data_wt, \
    stats_sum, stats_sum_wt, stats_mean, stats_mean_wt, stats_variance, stats_variance_wt, stats_max, stats_min = \
        data.local_aggregator(node_map,
                              edge_map,
                              node_edge_map,
                              data_map,
                              distances,
                              betas,
                              numerical_arrays=mock_numeric)

    stats_keys = [
        'max', 'min', 'sum', 'sum_weighted', 'mean', 'mean_weighted',
        'variance', 'variance_weighted'
    ]
    stats_data = [
        stats_max, stats_min, stats_sum, stats_sum_wt, stats_mean,
        stats_mean_wt, stats_variance, stats_variance_wt
    ]

    for num_idx, num_label in enumerate(['boo', 'baa']):
        for s_key, stats in zip(stats_keys, stats_data):
            for d_idx, d_key in enumerate(distances):
                assert np.allclose(N.metrics['stats'][num_label][s_key][d_key],
                                   stats[num_idx][d_idx],
                                   atol=0.001,
                                   rtol=0)

    # check that mismatching label and array lengths are caught
    for labels, arrs in (
        (['a'], mock_numeric),  # mismatching lengths
        (['a', 'b'], None),  # missing arrays
        (None, mock_numeric)):  # missing labels
        with pytest.raises(ValueError):
            D.compute_aggregated(stats_keys=labels, stats_data_arrs=arrs)
Ejemplo n.º 16
0
def test_compute_centrality():
    '''
    Underlying method also tested via test_networks.test_network_centralities
    '''
    G = mock.mock_graph()
    G = graphs.nX_simple_geoms(G)
    betas = np.array([-0.01, -0.005])
    distances = networks.distance_from_beta(betas)
    # generate data structures
    N = networks.Network_Layer_From_nX(G, distances)
    node_data = N._node_data
    edge_data = N._edge_data
    node_edge_map = N._node_edge_map
    # check measures against underlying method
    N = networks.Network_Layer_From_nX(G, distances)
    N.compute_centrality(measures=['node_density'])
    # test against underlying method
    measures_data = centrality.local_centrality(
        node_data,
        edge_data,
        node_edge_map,
        distances,
        betas,
        measure_keys=('node_density', ))
    for d_idx, d_key in enumerate(distances):
        assert np.allclose(N.metrics['centrality']['node_density'][d_key],
                           measures_data[0][d_idx])
    # also check the number of returned types for a few assortments of metrics
    measures = [
        'node_density', 'node_farness', 'node_cycles', 'node_harmonic',
        'segment_density', 'node_betweenness', 'segment_betweenness'
    ]
    np.random.shuffle(measures)  # in place
    # not necessary to do all labels, first few should do
    for min_idx in range(3):
        measure_keys = np.array(measures[min_idx:])
        N = networks.Network_Layer_From_nX(G, distances)
        N.compute_centrality(measures=measures)
        # test against underlying method
        measures_data = centrality.local_centrality(
            node_data,
            edge_data,
            node_edge_map,
            distances,
            betas,
            measure_keys=tuple(measure_keys))
        for m_idx, measure_name in enumerate(measure_keys):
            for d_idx, d_key in enumerate(distances):
                assert np.allclose(
                    N.metrics['centrality'][measure_name][d_key],
                    measures_data[m_idx][d_idx],
                    atol=0.001,
                    rtol=0)
    # check that angular gets passed through
    N_ang = networks.Network_Layer_From_nX(G, distances=[2000])
    N_ang.compute_centrality(measures=['node_harmonic_angular'], angular=True)
    N = networks.Network_Layer_From_nX(G, distances=[2000])
    N.compute_centrality(measures=['node_harmonic'], angular=False)
    assert not np.allclose(
        N_ang.metrics['centrality']['node_harmonic_angular'][2000],
        N.metrics['centrality']['node_harmonic'][2000],
        atol=0.001,
        rtol=0)
    assert not np.allclose(
        N_ang.metrics['centrality']['node_harmonic_angular'][2000],
        N.metrics['centrality']['node_harmonic'][2000],
        atol=0.001,
        rtol=0)
    # check that typos, duplicates, and mixed angular / non-angular are caught
    with pytest.raises(ValueError):
        N.compute_centrality(measures=['spelling_typo'])
    with pytest.raises(ValueError):
        N.compute_centrality(measures=['node_density', 'node_density'])
    with pytest.raises(ValueError):
        N.compute_centrality(
            measures=['harmonic_angle', 'node_harmonic_angular'])
Ejemplo n.º 17
0
def mmm_layercake_a(_graph, _iters=200, _layer_specs=(), random_seed=0):
    if isinstance(_layer_specs, dict):
        _layer_specs = (_layer_specs, )

    if not len(_layer_specs):
        raise AttributeError('''
            No layer specs provided: e.g. ({
                cap_step=0.5,
                dist_threshold=800,
                pop_threshold=800,
                spill_rate=1.1
            })
        ''')

    for l_s in _layer_specs:
        for k in ['cap_step', 'dist_threshold', 'pop_threshold', 'spill_rate']:
            if k not in l_s:
                raise AttributeError(f'Missing key {k}')

    _spans = len(_graph)

    # generate the backbone Network Layer
    # include 1 (local only) for directly counting items assigned to current node
    # also include np.inf so that all node combinations are considered in spite of wrapping around back
    distances = [l_s['dist_threshold'] for l_s in _layer_specs]
    distances = list(set(distances))
    Netw_Layer = networks.Network_Layer_From_nX(_graph, distances=distances)
    # generate population data layer
    Pop_Layer = generate_data_layer(_spans, 1, Netw_Layer, _randomised=False)
    # population state and map
    # for this experiment assignments are not changing
    pop_state = np.full(len(Pop_Layer.uids), 20.0)  # use floats!
    # for plotting density onto to network nodes
    pop_map = np.full((_iters, _spans), 0.0)
    # generate the landuse substrate
    # keep to 1 location per node for visualisation sake
    # randomisation is immaterial because all will be instanced as 0.0
    Landuse_Layer = generate_data_layer(_spans,
                                        1,
                                        Netw_Layer,
                                        _randomised=False)
    landuse_maps = []
    capacitance_maps = []
    spillover_maps = []
    for _ in _layer_specs:
        landuse_maps.append(np.full((_iters, _spans), 0.0))
        capacitance_maps.append(np.full((_iters, _spans), 0.0))
        spillover_maps.append(np.full((_iters, _spans), 0.0))

    n_layers = len(_layer_specs)
    # per layer
    landuse_states = np.full((n_layers, len(Landuse_Layer.uids)), 0.0)
    capacitances = np.full((n_layers, _spans), 0.0)
    assigned_trips_actual = np.full((n_layers, _spans), 0.0)
    constrained_spillovers = np.full((n_layers, _spans), 0.0)
    netw_flows_actual = np.full((n_layers, _spans), 0.0)

    # left and right spatial gradients
    right_gradient = np.zeros_like(pop_state)
    left_gradient = np.zeros_like(pop_state)
    # get max and min
    ma = 0
    mi = np.inf
    for n, d in _graph.nodes(data=True):
        x = d['x']
        if x > ma:
            ma = x
        if x < mi:
            mi = x
    # set gradient strength
    for i, (n, d) in enumerate(_graph.nodes(data=True)):
        x = d['x']
        right_gradient[i] = (x - mi) / (ma - mi)
        left_gradient[i] = 1 - right_gradient[i]

    # for keeping track of fifths for different gradients etc.
    inc_5 = int(_iters / 5)

    # set random seed for reproducibility and comparison across scenarios
    np.random.seed(random_seed)

    # iterate
    for n in tqdm(range(_iters)):

        # calculate neighbourhood density
        if n < inc_5:
            pop_intensity = pop_state
        elif n < inc_5 * 2:
            pop_intensity = right_gradient * pop_state
        elif n < inc_5 * 3:
            pop_intensity = left_gradient * pop_state
        elif n < inc_5 * 4:
            pop_intensity = pop_state * 2
        else:
            pop_intensity = pop_state / 2
        # record current state - actually doesn't change for this experiment...
        pop_map[n] = pop_intensity

        # identify locations for landuse development
        # enforce single landuse per location - identify free parcels
        squashed_landuse_states = np.sum(landuse_states, axis=0)

        # resets
        assigned_trips_actual.fill(0)
        constrained_spillovers.fill(0)

        for i, l_s in enumerate(_layer_specs):
            # record current landuse state
            # strength = landuse_states[i] * capacitances[i]
            landuse_maps[i][n] = landuse_states[i]

            # compute singly constrained realised flows
            Landuse_Layer.model_singly_constrained('assigned_flows',
                                                   Pop_Layer._data,
                                                   Landuse_Layer._data,
                                                   pop_intensity,
                                                   landuse_states[i])
            assigned_trips_actual[i] = Netw_Layer.metrics['models'][
                'assigned_flows'][l_s['dist_threshold']]['assigned_trips']
            flows = Netw_Layer.metrics['models']['assigned_flows'][
                l_s['dist_threshold']]['network_flows']

            # reciprocate spillovers
            spills = assigned_trips_actual[i] * l_s['spill_rate']
            Landuse_Layer.model_singly_constrained('assigned_spillovers',
                                                   Landuse_Layer._data,
                                                   Landuse_Layer._data, spills,
                                                   squashed_landuse_states)
            constrained_spillovers[i] = Netw_Layer.metrics['models'][
                'assigned_spillovers'][l_s['dist_threshold']]['assigned_trips']
            spillovers = Netw_Layer.metrics['models']['assigned_spillovers'][
                l_s['dist_threshold']]['assigned_trips']

            # TODO:
            # important: distributing flows this way causes different behaviour to below manner
            # below manner immediately seeds to the lagging edge of landuses - causing folds or pulses
            # on the other hand this manner mainly seeds on the leading edge nearer the actual flows
            # i.e. back-fill situations more likely to head in other direction
            # netw_flows_actual[i] = flows + spillovers
            # spillover_maps[i][n] = spillovers

        squashed_flows = np.sum(assigned_trips_actual, axis=0)
        squashed_spillovers = np.sum(constrained_spillovers, axis=0)
        for i, l_s in enumerate(_layer_specs):
            # distribute flows & spillovers - does not strictly take competition into account... but easier than furness process
            # TODO - spatial version: assymetrical betweenness or some or other gravity wouldn't take k competition into account...?

            Landuse_Layer.compute_stats_single('flow_intensity',
                                               squashed_flows)
            flows = Netw_Layer.metrics['stats']['flow_intensity'][
                'mean_weighted'][l_s['dist_threshold']]

            Landuse_Layer.compute_stats_single('spillover_intensity',
                                               squashed_spillovers)
            spillovers = Netw_Layer.metrics['stats']['spillover_intensity'][
                'mean_weighted'][l_s['dist_threshold']]

            # TODO: see above
            netw_flows_actual[i] = flows + spillovers
            spillover_maps[i][n] = spillovers

        # compute caps
        for i, l_s in enumerate(_layer_specs):
            # squash inside the loop to capture changes from previous iter
            squashed_landuse_states = np.sum(landuse_states, axis=0)
            # deduce flows and update capacitances
            flows = np.copy(netw_flows_actual[i])
            flows -= l_s['pop_threshold']
            flows /= l_s['pop_threshold']
            flows *= l_s['cap_step']
            flows = np.clip(flows, -l_s['cap_step'], l_s['cap_step'])
            capacitances[i] += flows
            # only seed per seed_rate
            t = netw_flows_actual[i]
            potential = int(np.nansum(t) / l_s['pop_threshold'])
            existing = np.nansum(t > l_s['pop_threshold'])
            new = potential - existing
            if new <= 0:
                rd_idx = np.random.randint(0, _spans)
                capacitances[i][rd_idx] = 1
            else:
                # prepare jitter - only scale jitter if not all == 0, e.g. first iter, otherwise use plain jitter
                # jitter = np.random.random(_spans)
                # jitter_scale = np.nanmax(flows) - np.nanmin(flows)
                # if jitter_scale:
                # jitter *= jitter_scale  # * l_s['explore_rate']
                # add jitter to flow
                # jittered = flows + jitter
                # sort by highest jittered flow
                seed_idx = np.argsort(flows, kind='mergesort')[::-1]
                # can't use np.intersect1d because it will sort indices
                seed_idx = seed_idx[np.in1d(
                    seed_idx, np.where(squashed_landuse_states == 0))]
                seed_idx = seed_idx[:new]
                # normalise seeds and then add to capacitances
                # normalise seeds and then add to capacitances for continuous changes
                # for single length arrays, normalised min will have max = 0 - use nanmax or treat separately
                if len(seed_idx) == 1:
                    capacitances[i][seed_idx] += 1
                elif len(seed_idx) != 0:
                    seed_vals = flows[seed_idx]
                    seed_vals -= np.nanmin(seed_vals)
                    seed_vals /= np.nanmax(seed_vals)
                    # seed_vals *= l_s['cap_step']
                    capacitances[i][seed_idx] += seed_vals

            # constrain capacitance
            capacitances[i] = np.clip(capacitances[i], 0, 1)
            # deactivate dead landuses and activate new
            off_idx = np.intersect1d(np.where(capacitances[i] <= 0),
                                     np.where(squashed_landuse_states == 1))
            on_idx = np.intersect1d(np.where(capacitances[i] >= 1),
                                    np.where(squashed_landuse_states == 0))
            landuse_states[i][off_idx] = 0
            landuse_states[i][on_idx] = 1
            # record capacitance state
            capacitance_maps[i][n] = capacitances[i]

    return pop_map, landuse_maps, capacitance_maps, spillover_maps
Ejemplo n.º 18
0
plt.style.use('./matplotlibrc')

base_path = path.dirname(__file__)

#
#
# INTRO PLOT
G = mock.mock_graph()
plot.plot_nX(G, path='graph.png', labels=True, dpi=150)

# INTRO EXAMPLE PLOTS
G = graphs.nX_simple_geoms(G)
G = graphs.nX_decompose(G, 20)

N = networks.Network_Layer_From_nX(G, distances=[400, 800])
N.compute_centrality(measures=['segment_harmonic'])

data_dict = mock.mock_data_dict(G, random_seed=25)
D = layers.Data_Layer_From_Dict(data_dict)
D.assign_to_network(N, max_dist=400)
landuse_labels = mock.mock_categorical_data(len(data_dict), random_seed=25)
D.hill_branch_wt_diversity(landuse_labels, qs=[0])
G_metrics = N.to_networkX()

segment_harmonic_vals = []
mixed_uses_vals = []
for node, data in G_metrics.nodes(data=True):
    segment_harmonic_vals.append(
        data['metrics']['centrality']['segment_harmonic'][800])
    mixed_uses_vals.append(
Ejemplo n.º 19
0
def mmm_layercake_b(_graph,
                    _iters=200,
                    _layer_specs=(),
                    seed=False,
                    random_seed=0):
    if isinstance(_layer_specs, dict):
        _layer_specs = (_layer_specs, )

    if not len(_layer_specs):
        raise AttributeError('''
            No layer specs provided: e.g. ({
                cap_step=0.5,
                dist_threshold=800,
                pop_threshold=800,
                explore_rate=0.5
            })
        ''')

    for l_s in _layer_specs:
        for k in [
                'cap_step', 'dist_threshold', 'pop_threshold', 'explore_rate'
        ]:
            if k not in l_s:
                raise AttributeError(f'Missing key {k}')

    _spans = len(_graph)

    # generate the backbone Network Layer
    # include 1 (local only) for directly counting items assigned to current node
    # also include np.inf so that all node combinations are considered in spite of wrapping around back
    distances = [l_s['dist_threshold'] for l_s in _layer_specs]
    distances = list(set(distances))
    Netw_Layer = networks.Network_Layer_From_nX(_graph, distances=distances)
    # generate population data layer - assignment happens internally - note randomised=False
    Pop_Layer = generate_data_layer(_spans, 1, Netw_Layer, _randomised=False)
    # population state and map
    # for this experiment assignments are not changing
    pop_state = np.full(len(Pop_Layer.uids), 20.0)  # use floats!
    # for plotting density onto to network nodes
    pop_map = np.full((_iters, _spans), 0.0)
    # generate the landuse substrate
    # keep to 1 location per node for visualisation sake
    Landuse_Layer = generate_data_layer(_spans,
                                        1,
                                        Netw_Layer,
                                        _randomised=False)
    landuse_maps = []
    capacitance_maps = []
    for _ in _layer_specs:
        landuse_maps.append(np.full((_iters, _spans), 0.0))
        capacitance_maps.append(np.full((_iters, _spans), 0.0))

    n_layers = len(_layer_specs)

    landuse_states = np.full((n_layers, len(Landuse_Layer.uids)),
                             0.0)  # per layer
    capacitances = np.full((n_layers, _spans), 0.0)
    assigned_trips_actual = np.full((n_layers, _spans), 0.0)
    assigned_trips_potential = np.full((n_layers, _spans), 0.0)
    netw_flow_actual = np.full((n_layers, _spans), 0.0)
    netw_flow_potential = np.full((n_layers, _spans), 0.0)

    # left and right gradients
    gradient = np.arange(_spans)
    gradient = gradient / np.max(gradient)
    right_gradient = np.full(_spans, 0.0)
    mid = int(_spans / 2)
    right_gradient[mid:] = gradient[:mid]
    right_gradient[:mid] = gradient[mid:]
    left_gradient = np.flip(right_gradient)

    # for keeping track of fifths for different gradients etc.
    inc_5 = int(_iters / 5)

    # set random seed for reproducibility and comparison across scenarios
    np.random.seed(random_seed)
    # seed
    if seed:
        for n in range(n_layers):
            rand_idx = np.random.randint(0, _spans)
            landuse_states[n][rand_idx] = 1

    # iterate
    for n in tqdm(range(_iters)):

        # calculate neighbourhood density
        pop_intensity = np.copy(pop_state)
        if n < inc_5:
            pass
        elif n < inc_5 * 2:
            pop_intensity *= left_gradient
        elif n < inc_5 * 3:
            pop_intensity *= right_gradient
        elif n < inc_5 * 4:
            pop_intensity *= 2
        else:
            pop_intensity /= 2
        # record current state - actually doesn't change for this experiment...
        pop_map[n] = pop_intensity

        # apportion flow - once per layer
        assigned_trips_actual.fill(0)
        netw_flow_actual.fill(0)
        assigned_trips_potential.fill(0)
        netw_flow_potential.fill(0)
        flat_lu = np.full(_spans, 1.0)
        for i, l_s in enumerate(_layer_specs):
            # record current landuse state
            landuse_maps[i][n] = landuse_states[i]

            # compute singly constrained realised flows
            Landuse_Layer.model_singly_constrained('assigned_flows',
                                                   Pop_Layer._data,
                                                   Landuse_Layer._data,
                                                   pop_intensity,
                                                   landuse_states[i])
            assigned_trips_actual[i] += Netw_Layer.metrics['models'][
                'assigned_flows'][l_s['dist_threshold']]['assigned_trips']
            netw_flow_actual[i] += Netw_Layer.metrics['models'][
                'assigned_flows'][l_s['dist_threshold']]['network_flows']

            # compute potential flows (for exploration)
            # this is an even surface and doesn't need smoothing
            Landuse_Layer.model_singly_constrained('potential_flows',
                                                   Pop_Layer._data,
                                                   Landuse_Layer._data,
                                                   pop_intensity, flat_lu)
            assigned_trips_potential[i] += Netw_Layer.metrics['models'][
                'potential_flows'][l_s['dist_threshold']]['assigned_trips']
            netw_flow_potential[i] += Netw_Layer.metrics['models'][
                'potential_flows'][l_s['dist_threshold']]['network_flows']

        # compute caps
        squashed_netw_flow_actual = np.sum(netw_flow_actual, axis=0)
        squashed_netw_flow_potential = np.sum(netw_flow_potential, axis=0)
        for i, l_s in enumerate(_layer_specs):
            # deduce flows and update capacitances
            flows = np.copy(squashed_netw_flow_actual)
            flows -= l_s['pop_threshold']
            flows /= l_s['pop_threshold']
            flows *= l_s['cap_step']
            flows = np.clip(flows, -l_s['cap_step'], l_s['cap_step'])
            capacitances[i] += flows
            # constrain capacitance
            capacitances[i] = np.clip(capacitances[i], 0, 1)

            # deactivate dead landuses and activate new
            # TODO: the non-linearity of the activation / deactivation seems significant...?
            off_idx = np.intersect1d(np.where(capacitances[i] <= 0),
                                     np.where(landuse_states[i] == 1))
            landuse_states[i][off_idx] = 0
            on_idx = np.intersect1d(np.where(capacitances[i] >= 1),
                                    np.where(landuse_states[i] == 0))
            landuse_states[i][on_idx] = 1
            # record capacitance state
            capacitance_maps[i][n] = capacitances[i]

            # only seed per seed_rate
            potential = np.nansum(
                assigned_trips_potential[i]) / l_s['pop_threshold']
            existing = np.nansum(
                assigned_trips_actual[i]) > l_s['pop_threshold']
            # VS: existing = np.nansum(landuse_states[i] == 1)
            new = int(potential - existing)
            if new > 0:
                blended = squashed_netw_flow_actual * (
                    1 - l_s['explore_rate']
                ) + squashed_netw_flow_potential * l_s['explore_rate']
                jitter = np.random.randn(
                    _spans) * l_s['cap_step'] - l_s['cap_step'] / 2
                blended += jitter
                # sort by highest jittered flow
                seed_idx = np.argsort(blended, kind='mergesort')[::-1]
                # can't use np.intersect1d because it will sort indices
                seed_idx = seed_idx[np.in1d(
                    seed_idx,
                    np.where(landuse_states[i] == 0)[0])]
                seed_idx = seed_idx[:new]
                # normalise seeds and then add to capacitances for continuous changes
                # for single length arrays, normalised min will have max = 0 - use nanmax or treat separately
                if len(seed_idx) == 1:
                    capacitances[i][seed_idx] += 1
                elif len(seed_idx) != 0:
                    seed_vals = blended[seed_idx]
                    seed_vals -= np.nanmin(seed_vals)
                    seed_vals /= np.nanmax(seed_vals)
                    # seed_vals *= l_s['cap_step']
                    capacitances[i][seed_idx] += seed_vals

    return pop_map, landuse_maps, capacitance_maps
Ejemplo n.º 20
0
def mmm_layercake_c(_graph, _iters=200, _layer_specs=(), random_seed=0):
    if isinstance(_layer_specs, dict):
        _layer_specs = (_layer_specs, )

    if not len(_layer_specs):
        raise AttributeError('''
            No layer specs provided: e.g. ({
                cap_step=0.5,
                dist_threshold=800,
                pop_threshold=800,
                explore_rate=0.5,
                comp_rate=0.5
            })
        ''')

    for l_s in _layer_specs:
        for k in [
                'cap_step', 'dist_threshold', 'pop_threshold', 'explore_rate',
                'comp_rate'
        ]:
            if k not in l_s:
                raise AttributeError(f'Missing key {k}')

    _spans = len(_graph)

    # generate the backbone Network Layer
    # include 1 (local only) for directly counting items assigned to current node
    # also include np.inf so that all node combinations are considered in spite of wrapping around back
    distances = [l_s['dist_threshold'] for l_s in _layer_specs]
    distances = list(set(distances))
    Netw_Layer = networks.Network_Layer_From_nX(_graph, distances=distances)
    # generate population data layer - assignment happens internally - note randomised=False
    Pop_Layer = generate_data_layer(_spans, 1, Netw_Layer, _randomised=False)
    # population state and map
    # for this experiment assignments are not changing
    pop_state = np.full(len(Pop_Layer.uids), 20.0)  # use floats!
    # for plotting density onto to network nodes
    pop_map = np.full((_iters, _spans), 0.0)
    # generate the landuse substrate
    # keep to 1 location per node for visualisation sake
    Landuse_Layer = generate_data_layer(_spans,
                                        1,
                                        Netw_Layer,
                                        _randomised=False)
    landuse_maps = []
    capacitance_maps = []
    flow_maps = []
    for _ in _layer_specs:
        landuse_maps.append(np.full((_iters, _spans), 0.0))
        capacitance_maps.append(np.full((_iters, _spans), 0.0))
        flow_maps.append(np.full((_iters, _spans), 0.0))

    n_layers = len(_layer_specs)
    # per layer
    landuse_states = np.full((n_layers, len(Landuse_Layer.uids)), 0.0)
    capacitances = np.full((n_layers, _spans), 0.0)
    assigned_trips_actual = np.full((n_layers, _spans), 0.0)
    assigned_trips_potential = np.full((n_layers, _spans), 0.0)
    netw_flow_actual = np.full((n_layers, _spans), 0.0)
    netw_flow_potential = np.full((n_layers, _spans), 0.0)

    # left and right spatial gradients
    right_gradient = np.zeros_like(pop_state)
    left_gradient = np.zeros_like(pop_state)
    # get max and min
    ma = 0
    mi = np.inf
    for n, d in _graph.nodes(data=True):
        x = d['x']
        if x > ma:
            ma = x
        if x < mi:
            mi = x
    # set gradient strength
    for i, (n, d) in enumerate(_graph.nodes(data=True)):
        x = d['x']
        right_gradient[i] = (x - mi) / (ma - mi)
        left_gradient[i] = 1 - right_gradient[i]

    # for keeping track of fifths for different gradients etc.
    inc_5 = int(_iters / 5)

    # set random seed for reproducibility and comparison across scenarios
    np.random.seed(random_seed)

    # iterate
    for n in tqdm(range(_iters)):

        # calculate neighbourhood density
        if n < inc_5:
            pop_intensity = pop_state
        elif n < inc_5 * 2:
            pop_intensity = right_gradient * pop_state
        elif n < inc_5 * 3:
            pop_intensity = left_gradient * pop_state
        elif n < inc_5 * 4:
            pop_intensity = pop_state * 2
        else:
            pop_intensity = pop_state / 2
        # record current state - actually doesn't change for this experiment...
        pop_map[n] = pop_intensity

        # apportion flow - once per layer
        assigned_trips_actual.fill(0)
        netw_flow_actual.fill(0)
        assigned_trips_potential.fill(0)
        netw_flow_potential.fill(0)
        flat_lu = np.full(_spans, 1.0)
        for i, l_s in enumerate(_layer_specs):
            # record current landuse state
            landuse_maps[i][n] = landuse_states[i]

            # compute singly constrained realised flows
            Landuse_Layer.model_singly_constrained('assigned_flows',
                                                   Pop_Layer._data,
                                                   Landuse_Layer._data,
                                                   pop_intensity,
                                                   landuse_states[i])
            assigned_trips_actual[i] += Netw_Layer.metrics['models'][
                'assigned_flows'][l_s['dist_threshold']]['assigned_trips']
            netw_flow_actual[i] += Netw_Layer.metrics['models'][
                'assigned_flows'][l_s['dist_threshold']]['network_flows']

            # record flow state
            flow_maps[i][n] = netw_flow_actual[i]

            # compute potential flows (for exploration)
            # this is an even surface and doesn't need smoothing
            Landuse_Layer.model_singly_constrained('potential_flows',
                                                   Pop_Layer._data,
                                                   Landuse_Layer._data,
                                                   pop_intensity, flat_lu)
            assigned_trips_potential[i] += Netw_Layer.metrics['models'][
                'potential_flows'][l_s['dist_threshold']]['assigned_trips']
            netw_flow_potential[i] += Netw_Layer.metrics['models'][
                'potential_flows'][l_s['dist_threshold']]['network_flows']

        # compute
        # the tension between existing stores and latent competition (for given capacity) is important to dynamics
        # the non-linearity between activation and deactivation is also important
        squashed_netw_flow_actual = np.sum(netw_flow_actual, axis=0)
        squashed_netw_flow_potential = np.sum(netw_flow_potential, axis=0)
        for i, l_s in enumerate(_layer_specs):
            # assign health to existing locations based on actual constrained flows - using continuous form
            # don't need to filter by active because constrained flows only available for active locations
            health = np.copy(assigned_trips_actual[i])
            health -= l_s['pop_threshold']
            health /= l_s['pop_threshold']
            health *= l_s['cap_step']
            health = np.clip(health, -l_s['cap_step'], l_s['cap_step'])
            landuse_states[i] += health
            landuse_states[i] = np.clip(landuse_states[i], 0, 1)

            # capacitances are fluid - they constantly decay and need constant energy inputs to persist or grow
            capacitances[i] /= 2
            # determine latent competition - start by measuring actual and potential landuse intensity
            potential = np.nansum(assigned_trips_potential[i])
            actual = np.nansum(assigned_trips_actual[i])
            # high potential represents full competition for flows, new entrants are trying to capture from existing
            high_potential = potential / l_s['pop_threshold'] - np.nansum(
                actual > l_s['pop_threshold'])
            # low potential represents weak competition for flows, new entrants only trying to capture untapped potential
            low_potential = (potential - actual) / l_s['pop_threshold']
            # apply competition rate
            new = int(low_potential * (1 - l_s['comp_rate']) +
                      high_potential * l_s['comp_rate'])
            if new > 0:
                # deduce flows, this does not need to be exact, as long as proportional representation of flows
                blended = squashed_netw_flow_actual * (
                    1 - l_s['explore_rate']
                ) + squashed_netw_flow_potential * l_s['explore_rate']
                jitter = np.random.randn(
                    _spans) * l_s['cap_step'] - l_s['cap_step'] / 2
                blended += jitter
                # sort by highest jittered flow
                seed_idx = np.argsort(blended, kind='mergesort')[::-1]
                # can't use np.intersect1d because it will sort indices
                # trying to steal flows from existing buildings, so can't populate currently existing landuse activations
                seed_idx = seed_idx[np.in1d(
                    seed_idx,
                    np.where(landuse_states[i] == 0)[0])]
                # snip off at new
                seed_idx = seed_idx[:new]
                # normalise seeds and then add to capacitances for continuous changes
                # for single length arrays, normalised min will have max = 0 - use nanmax or treat separately
                if len(seed_idx) == 1:
                    capacitances[i][seed_idx] += 1
                elif len(seed_idx) != 0:
                    seed_vals = blended[seed_idx]
                    seed_vals -= np.nanmin(seed_vals)
                    seed_vals /= np.nanmax(seed_vals)
                    # seed_vals *= l_s['cap_step']
                    capacitances[i][seed_idx] += seed_vals

            on_idx = np.intersect1d(np.where(capacitances[i] >= 1),
                                    np.where(landuse_states[i] == 0))
            landuse_states[i][on_idx] = 1
            # record capacitance state
            capacitance_maps[i][n] = capacitances[i]

    return pop_map, landuse_maps, capacitance_maps, flow_maps
Ejemplo n.º 21
0
B) calculate density (normalise)
C) calculate density weighted centrality
D) randomly select two population units and relocate from lower to higher (enforce min and max constraint)
'''

iters = 400
spans = 200
spine_nx = generate_graph(_spans=spans)
spine_nx_stepped = generate_graph(_spans=spans, _stepped=True)

util_funcs.plt_setup()
fig, axes = plt.subplots(1, 2, figsize=(12, 20))

for ax_n, graph in enumerate([spine_nx, spine_nx_stepped]):
    # generate the Network Layer
    Netw_Layer = networks.Network_Layer_From_nX(graph,
                                                distances=[400, 800, np.inf])
    # generate population layer
    Pop_Layer = generate_data_layer(_spans=spans,
                                    _intensity=20,
                                    _Network_Layer=Netw_Layer)
    # population state and map
    pop_state = np.full(len(Pop_Layer.uids), 1.0)  # use floats!
    pop_map = np.full((iters, spans), 0.0)
    # iterate
    for n in tqdm(range(iters)):
        # POPULATION
        # record current state
        pop_map[n] = set_current_num(Pop_Layer._data, Netw_Layer._nodes)
        # calculate the effective density
        # each population point is a single unit
        # the state technically remains the same, it is the x, y and assignments that change!