Пример #1
0
def grid_ville():
    '''
    Type: Graph
    Number of nodes: 1200
    Number of edges: 1320
    Average degree:   2.2000
    Gridville summed lengths: 24200.000000000062
    '''
    grid_ville = nx.Graph()
    # divisor and extents
    div = 12
    ext = 1100
    # add nodes
    for x_id in range(div):
        for y_id in range(div):
            grid_ville.add_node(f'{x_id}_{y_id}',
                                x=ext / div * x_id,
                                y=ext / div * y_id)
    # add edges
    sum_lengths = 0
    for x_id in range(div):
        for y_id in range(div):
            node_set = []
            # last row and column do not have a next row / column
            # add edge in the x direction
            if y_id < div - 1:
                a_nd_start = f'{x_id}_{y_id}'
                a_nd_end = f'{x_id}_{y_id + 1}'
                node_set.append((a_nd_start, a_nd_end))
            # add edge in the y direction
            if x_id < div - 1:
                b_nd_start = f'{x_id}_{y_id}'
                b_nd_end = f'{x_id + 1}_{y_id}'
                node_set.append((b_nd_start, b_nd_end))
            # for x direction and y direction node sets, add edges and edge geoms
            for start, end in node_set:
                start_x = grid_ville.nodes[start]['x']
                start_y = grid_ville.nodes[start]['y']
                end_x = grid_ville.nodes[end]['x']
                end_y = grid_ville.nodes[end]['y']
                geom = geometry.LineString([(start_x, start_y),
                                            (end_x, end_y)])
                grid_ville.add_edge(start, end, geom=geom)
                sum_lengths += geom.length
    #  decompose new edges
    grid_ville = graphs.nX_decompose(grid_ville, 20)
    # print info
    print(nx.info(grid_ville))
    # report sum
    print(f'Gridville summed lengths: {sum_lengths}')
    # ready
    return grid_ville
Пример #2
0
def test_to_networkX(primal_graph):
    # also see test_graphs.test_networkX_from_graph_maps for underlying graph maps version

    # check round trip to and from graph maps results in same graph
    # explicitly set live and weight params for equality checks
    # graph_maps_from_networkX generates these implicitly if missing
    G = graphs.nX_decompose(primal_graph, decompose_max=20)
    for n in G.nodes():
        G.nodes[n]['live'] = bool(np.random.randint(0, 1))
    for s, e, k in G.edges(keys=True):
        G[s][e][k]['imp_factor'] = np.random.randint(0, 2)

    # add random data to check persistence at other end
    baa_node = None
    for n in G.nodes():
        baa_node = n
        G.nodes[n]['boo'] = 'baa'
        break
    boo_edge = None
    for s, e, k in G.edges(keys=True):
        boo_edge = (s, e)
        G[s][e][k]['baa'] = 'boo'
        break

    # test with metrics
    N = networks.NetworkLayerFromNX(G, distances=[500])
    N.node_centrality(measures=['node_harmonic'])
    metrics_dict = N.metrics_to_dict()
    G_round_trip = N.to_networkX()
    for n, d in G.nodes(data=True):
        assert G_round_trip.nodes[n]['x'] == d['x']
        assert G_round_trip.nodes[n]['y'] == d['y']
        assert G_round_trip.nodes[n]['live'] == d['live']
    for s, e, k, d in G.edges(keys=True, data=True):
        assert G_round_trip[s][e][k]['geom'] == d['geom']
        assert G_round_trip[s][e][k]['imp_factor'] == d['imp_factor']
    # check that metrics came through
    for uid, metrics in metrics_dict.items():
        assert G_round_trip.nodes[uid]['metrics'] == metrics
    # check data persistence
    assert G_round_trip.nodes[baa_node]['boo'] == 'baa'
    assert G_round_trip[boo_edge[0]][boo_edge[1]][0]['baa'] == 'boo'
Пример #3
0
from shapely import geometry

from cityseer.metrics import networks, layers
from cityseer.tools import mock, graphs, plot

base_path = os.getcwd()
plt.style.use('matplotlibrc')

###
# INTRO PLOT
G = mock.mock_graph()
plot.plot_nX(G, labels=True, node_size=80, path='images/graph.png', dpi=150)

# INTRO EXAMPLE PLOTS
G = graphs.nX_simple_geoms(G)
G = graphs.nX_decompose(G, 20)

N = networks.NetworkLayerFromNX(G, distances=[400, 800])
N.segment_centrality(measures=['segment_harmonic'])

data_dict = mock.mock_data_dict(G, random_seed=25)
D = layers.DataLayerFromDict(data_dict)
D.assign_to_network(N, max_dist=400)
landuse_labels = mock.mock_categorical_data(len(data_dict), random_seed=25)
D.hill_branch_wt_diversity(landuse_labels, qs=[0])
G_metrics = N.to_networkX()

segment_harmonic_vals = []
mixed_uses_vals = []
for node, data in G_metrics.nodes(data=True):
    segment_harmonic_vals.append(
Пример #4
0
def test_nX_from_graph_maps(primal_graph):
    # also see test_networks.test_to_networkX for tests on implementation via Network layer

    # check round trip to and from graph maps results in same graph
    # explicitly set live params for equality checks
    # graph_maps_from_networkX generates these implicitly if missing
    for n in primal_graph.nodes():
        primal_graph.nodes[n]['live'] = bool(np.random.randint(0, 1))

    # test directly from and to graph maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(primal_graph)
    G_round_trip = graphs.nX_from_graph_maps(node_uids, node_data, edge_data, node_edge_map)
    assert list(G_round_trip.nodes) == list(primal_graph.nodes)
    assert list(G_round_trip.edges) == list(primal_graph.edges)

    # check with metrics dictionary
    N = networks.NetworkLayerFromNX(primal_graph, distances=[500, 1000])

    N.node_centrality(measures=['node_harmonic'])
    data_dict = mock.mock_data_dict(primal_graph)
    landuse_labels = mock.mock_categorical_data(len(data_dict))
    D = layers.DataLayerFromDict(data_dict)
    D.assign_to_network(N, max_dist=400)
    D.compute_landuses(landuse_labels,
                       mixed_use_keys=['hill', 'shannon'],
                       accessibility_keys=['a', 'c'],
                       qs=[0, 1])
    metrics_dict = N.metrics_to_dict()
    # without backbone
    G_round_trip_data = graphs.nX_from_graph_maps(node_uids,
                                                  node_data,
                                                  edge_data,
                                                  node_edge_map,
                                                  metrics_dict=metrics_dict)
    for uid, metrics in metrics_dict.items():
        assert G_round_trip_data.nodes[uid]['metrics'] == metrics
    # with backbone
    G_round_trip_data = graphs.nX_from_graph_maps(node_uids,
                                                  node_data,
                                                  edge_data,
                                                  node_edge_map,
                                                  networkX_multigraph=primal_graph,
                                                  metrics_dict=metrics_dict)
    for uid, metrics in metrics_dict.items():
        assert G_round_trip_data.nodes[uid]['metrics'] == metrics

    # test with decomposed
    G_decomposed = graphs.nX_decompose(primal_graph, decompose_max=20)
    # set live explicitly
    for n in G_decomposed.nodes():
        G_decomposed.nodes[n]['live'] = bool(np.random.randint(0, 1))
    node_uids_d, node_data_d, edge_data_d, node_edge_map_d = graphs.graph_maps_from_nX(G_decomposed)

    G_round_trip_d = graphs.nX_from_graph_maps(node_uids_d, node_data_d, edge_data_d, node_edge_map_d)
    assert list(G_round_trip_d.nodes) == list(G_decomposed.nodes)
    for n, iter_node_data in G_round_trip.nodes(data=True):
        assert n in G_decomposed
        assert iter_node_data['live'] == G_decomposed.nodes[n]['live']
        assert iter_node_data['x'] == G_decomposed.nodes[n]['x']
        assert iter_node_data['y'] == G_decomposed.nodes[n]['y']
    assert G_round_trip_d.edges == G_decomposed.edges

    # error checks for when using backbone graph:
    # mismatching numbers of nodes
    corrupt_G = primal_graph.copy()
    corrupt_G.remove_node(0)
    with pytest.raises(ValueError):
        graphs.nX_from_graph_maps(node_uids,
                                  node_data,
                                  edge_data,
                                  node_edge_map,
                                  networkX_multigraph=corrupt_G)
    # mismatching node uid
    with pytest.raises(KeyError):
        corrupt_node_uids = list(node_uids)
        corrupt_node_uids[0] = 'boo'
        graphs.nX_from_graph_maps(corrupt_node_uids,
                                  node_data,
                                  edge_data,
                                  node_edge_map,
                                  networkX_multigraph=primal_graph)
    # missing edge
    with pytest.raises(KeyError):
        corrupt_primal_graph = primal_graph.copy()
        corrupt_primal_graph.remove_edge(0, 1)
        graphs.nX_from_graph_maps(node_uids,
                                  node_data,
                                  edge_data,
                                  node_edge_map,
                                  networkX_multigraph=corrupt_primal_graph)
Пример #5
0
def test_nX_decompose(primal_graph):
    # check that missing geoms throw an error
    G = primal_graph.copy()
    del G[0][1][0]['geom']
    with pytest.raises(KeyError):
        graphs.nX_decompose(G, 20)

    # check that non-LineString geoms throw an error
    G = primal_graph.copy()
    for s, e, k in G.edges(keys=True):
        G[s][e][k]['geom'] = geometry.Point([G.nodes[s]['x'], G.nodes[s]['y']])
        break
    with pytest.raises(TypeError):
        graphs.nX_decompose(G, 20)

    # test decomposition
    G = primal_graph.copy()
    # first clean the graph to strip disconnected looping component
    # this gives a start == end node situation for testing
    G_simple = graphs.nX_remove_filler_nodes(G)
    G_decompose = graphs.nX_decompose(G_simple, 50)

    # from cityseer.tools import plot
    # plot.plot_nX(G_simple, labels=True, node_size=80, plot_geoms=True)
    # plot.plot_nX(G_decompose, plot_geoms=True)
    assert nx.number_of_nodes(G_decompose) == 292
    assert nx.number_of_edges(G_decompose) == 314
    for s, e in G_decompose.edges():
        assert G_decompose.number_of_edges(s, e) == 1

    # check that total lengths are the same
    G_lens = 0
    for s, e, e_data in G_simple.edges(data=True):
        G_lens += e_data['geom'].length
    G_d_lens = 0
    for s, e, e_data in G_decompose.edges(data=True):
        G_d_lens += e_data['geom'].length
    assert np.allclose(G_lens, G_d_lens, atol=0.001, rtol=0)

    # check that geoms are correctly flipped
    G_forward = primal_graph.copy()
    G_forward_decompose = graphs.nX_decompose(G_forward, 20)

    G_backward = primal_graph.copy()
    for i, (s, e, k, d) in enumerate(G_backward.edges(data=True, keys=True)):
        # flip each third geom
        if i % 3 == 0:
            G[s][e][k]['geom'] = geometry.LineString(d['geom'].coords[::-1])
    G_backward_decompose = graphs.nX_decompose(G_backward, 20)

    for n, d in G_forward_decompose.nodes(data=True):
        assert d['x'] == G_backward_decompose.nodes[n]['x']
        assert d['y'] == G_backward_decompose.nodes[n]['y']

    # test that geom coordinate mismatch throws an error
    G = primal_graph.copy()
    for k in ['x', 'y']:
        for n in G.nodes():
            G.nodes[n][k] = G.nodes[n][k] + 1
            break
        with pytest.raises(ValueError):
            graphs.nX_decompose(G, 20)
Пример #6
0
def test_assign_to_network(primal_graph):
    # create additional dead-end scenario
    primal_graph.remove_edge(14, 15)
    primal_graph.remove_edge(15, 28)
    # G = graphs.nX_auto_edge_params(G)
    G = graphs.nX_decompose(primal_graph, 50)
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(G)
    # generate data
    data_dict = mock.mock_data_dict(G, random_seed=25)
    data_uids, data_map = layers.data_map_from_dict(data_dict)
    # override data point locations for test cases vis-a-vis isolated nodes and isolated edges
    data_map[18, :2] = [701200, 5719400]
    data_map[39, :2] = [700750, 5720025]
    data_map[26, :2] = [700400, 5719525]
    # 500m visually confirmed in plots
    data_map_1600 = data_map.copy()
    data_map_1600 = data.assign_to_network(data_map_1600,
                                           node_data,
                                           edge_data,
                                           node_edge_map,
                                           max_dist=1600)
    targets = np.array([
        [0, 164, 163],
        [1, 42, 241],
        [2, 236, 235],
        [3, 48, 262],
        [4, 211, 212],
        [5, 236, 235],
        [6, 58, 57],
        [7, 72, 5],
        [8, 75, 76],
        [9, 92, 9],
        [10, 61, 62],
        [11, 96, 13],
        [12, 0, 59],
        [13, 98, 99],
        [14, 203, 202],
        [15, 121, 120],
        [16, 48, 262],
        [17, 2, 70],
        [18, 182, 183],
        [19, 158, 157],
        [20, 83, 84],
        [21, 2, np.nan],
        [22, 171, 170],
        [23, 266, 52],
        [24, 83, 84],
        [25, 88, 11],
        [26, 49, np.nan],
        [27, 19, 138],
        [28, 134, 135],
        [29, 262, 46],
        [30, 78, 9],
        [31, 188, 189],
        [32, 180, 181],
        [33, 95, 94],
        [34, 226, 225],
        [35, 110, 111],
        [36, 39, 228],
        [37, 158, 25],
        [38, 88, 87],
        [39, 263, np.nan],
        [40, 120, 121],
        [41, 146, 21],
        [42, 10, 97],
        [43, 119, 118],
        [44, 82, 5],
        [45, 11, 88],
        [46, 100, 99],
        [47, 138, 19],
        [48, 14, np.nan],
        [49, 106, 105]
    ])
    # for debugging
    # from cityseer.tools import plot
    # plot.plot_graph_maps(node_data, edge_data, data_map)
    # assignment map includes data x, data y, nearest assigned, next nearest assigned
    assert np.allclose(data_map_1600[:, 2:],
                       targets[:, 1:],
                       equal_nan=True,
                       atol=0,
                       rtol=0)
    # max distance of 0 should return all NaN
    data_map_test_0 = data_map.copy()
    data_map_test_0 = data.assign_to_network(data_map_test_0,
                                             node_data,
                                             edge_data,
                                             node_edge_map,
                                             max_dist=0)
    assert np.all(np.isnan(data_map_test_0[:, 2]))
    assert np.all(np.isnan(data_map_test_0[:, 3]))
    # max distance of 2000 should return no NaN for nearest
    # there will be some NaN for next nearest
    data_map_test_2000 = data_map.copy()
    data_map_test_2000 = data.assign_to_network(data_map_test_2000,
                                                node_data,
                                                edge_data,
                                                node_edge_map,
                                                max_dist=2000)
    assert not np.any(np.isnan(data_map_test_2000[:, 2]))
Пример #7
0
def york_burb():
    '''
    NX graph summary before pruning
    Name:
    Type: Graph
    Number of nodes: 42590
    Number of edges: 43728
    Average degree:   2.0534
    NX graph summary after pruning
    Name:
    Type: Graph
    Number of nodes: 1298
    Number of edges: 1361
    Average degree:   2.0971
    Yorkburb summed lengths: 23704.85033655231
    '''
    db_config = {
        'host': 'localhost',
        'port': 5433,
        'user': '******',
        'database': 'gareth',
        'password': ''
    }
    # use process loaders to load graph
    york_burb = asyncio.run(inner_york_burb(db_config))
    # sum lengths for reference
    sum_lengths = 0
    for s, e, d in york_burb.edges(data=True):
        sum_lengths += geometry.LineString(d['geom']).length
    print(f'Yorkburb summed lengths: {sum_lengths}')
    # adjust x / y values to smaller coordinate system
    # first pass - find minimums
    min_x = min_y = np.inf
    for n, d in york_burb.nodes(data=True):
        x, y = (d['x'], d['y'])
        if x < min_x:
            min_x = x
        if y < min_y:
            min_y = y
    # second pass - adjust coordinates
    for n in york_burb.nodes():
        old_x = york_burb.nodes[n]['x']
        york_burb.nodes[n]['x'] = old_x - min_x
        old_y = york_burb.nodes[n]['y']
        york_burb.nodes[n]['y'] = old_y - min_y
    # likewise adjust and check geoms
    for s, e, d in york_burb.edges(data=True):
        old_geom = d['geom']
        new_geom = []
        for x, y in old_geom.coords:
            new_geom.append((x - min_x, y - min_y))
        new_geom = geometry.LineString(new_geom)
        d['geom'] = new_geom
        assert old_geom.length == new_geom.length
    # check that total lengths haven't changed
    post_sum_lengths = 0
    for s, e, d in york_burb.edges(data=True):
        post_sum_lengths += geometry.LineString(d['geom']).length
    assert post_sum_lengths == sum_lengths
    # relabel nodes
    rl = {}
    rl_counter = 0
    for n in york_burb.nodes():
        rl[n] = rl_counter
        rl_counter += 1
    york_burb = nx.relabel_nodes(york_burb, rl, copy=True)
    # remove link (shorten dead-end to simplify adding new routes)
    york_burb.remove_edge(1283, 1074)
    # remove node (make way for adjacent edge)
    york_burb.remove_nodes_from([157, 1089, 1144, 1163, 998, 503])
    # add nodes where necessary
    for x, y in [(460379.79, 451844.15), (460402.13, 451866.82),
                 (460429.81, 451876.83), (460462.79, 451626.64),
                 (460160.19, 451843.77), (460147.19, 451864.79),
                 (460140.00, 451826.62), (460107.08, 451863.13),
                 (460160.19, 451797.30), (460104.04, 451788.73),
                 (460188.12, 451423.61), (459840.70, 451434.38),
                 (459913.19, 451389.30)]:
        adj_x = x - min_x
        adj_y = y - min_y
        york_burb.add_node(rl_counter, x=adj_x, y=adj_y)
        rl_counter += 1
    # add missing footpaths
    for start_nd, end_nd in [
        (238, 865), (1117, 43), (797, 67), (918, 797), (795, 653), (365, 797),
        (705, 673), (230, 362), (1068, 1085), (666, 1041), (869, 426),
        (116, 991), (1097, 991), (99, 312), (771, 1113), (1069, 1218),
        (223, 447), (1167, 1186), (643, 1049), (1034, 185), (1189, 886),
        (4, 671), (60, 78), (359, 1188), (540, 1283), (1283, 770), (770, 817),
        (82, 889), (223, 306), (874, 304), (969, 478), (159, 1298),
        (1298, 1299), (1299, 1300), (1300, 1246), (659, 1300), (1028, 1299),
        (624, 1298), (1298, 616), (628, 1303), (1303, 1302), (1302, 160),
        (1302, 1304), (1305, 1304), (1304, 1306), (945, 1307), (1307, 620),
        (1307, 1275), (478, 1301), (1301, 493), (1301, 492), (1180, 1308),
        (1308, 856), (871, 1308), (870, 1309), (1309, 1310), (1310, 1172),
        (1310, 429), (1017, 778)
    ]:
        x_start = (york_burb.nodes[start_nd]['x'])
        y_start = (york_burb.nodes[start_nd]['y'])
        x_end = (york_burb.nodes[end_nd]['x'])
        y_end = (york_burb.nodes[end_nd]['y'])
        geom = geometry.LineString([(x_start, y_start), (x_end, y_end)])
        york_burb.add_edge(start_nd, end_nd, geom=geom)
    #  decompose new edges
    york_burb = graphs.nX_decompose(york_burb, 20)
    # ready
    return york_burb
Пример #8
0
def suburb():
    '''
    Number of nodes: 1349
    Number of edges: 1348
    Average degree:   1.9985
    Gridville summed lengths: 23550.0. Last length 14.0625
    '''
    suburb = nx.Graph()
    # set params
    recursions = 7
    distance = 1200
    # set the seed centroid
    node_id = 1
    suburb.add_node(node_id, x=distance / 2, y=distance / 2)
    centroids = [node_id]
    node_id += 1
    # sum geom lengths
    sum_lengths = 0
    last_length = np.inf
    # recursively add centroids and edges
    for i in range(recursions):
        # alternate directions
        x_direction = True
        if i % 2 == 0:
            x_direction = False
            # distance only updated every second cycle
            distance = distance / 2 - 25
        # new centroids - keep separate and then replace at end of loop
        new_centroids = []
        # for each centroid
        for start_id in centroids:
            x_start = suburb.nodes[start_id]['x']
            y_start = suburb.nodes[start_id]['y']
            # add the new nodes and geoms in either direction
            for dist in [distance, -distance]:
                # create the end coordinates
                if x_direction:
                    x_centroid = x_start + dist / 2
                    y_centroid = y_start
                    x_end = x_start + dist
                    y_end = y_start
                else:
                    x_centroid = x_start
                    y_centroid = y_start + dist / 2
                    x_end = x_start
                    y_end = y_start + dist
                # calculate the new centroids and end nodes
                centroid_id = node_id
                node_id += 1
                new_centroids.append(centroid_id)  # add to new centroids
                suburb.add_node(centroid_id, x=x_centroid, y=y_centroid)
                end_id = node_id
                node_id += 1
                suburb.add_node(end_id, x=x_end, y=y_end)
                # create the new geoms and edges
                geom_a = geometry.LineString([(x_start, y_start),
                                              (x_centroid, y_centroid)])
                suburb.add_edge(start_id, centroid_id, geom=geom_a)
                sum_lengths += geom_a.length
                geom_b = geometry.LineString([(x_centroid, y_centroid),
                                              (x_end, y_end)])
                suburb.add_edge(centroid_id, end_id, geom=geom_b)
                sum_lengths += geom_b.length
                # keep track of least length
                last_length = geom_a.length
        centroids = new_centroids

    #  decompose new edges
    suburb = graphs.nX_decompose(suburb, 20)
    # print info
    print(nx.info(suburb))
    # report sum
    print(f'Suburb summed lengths: {sum_lengths}. Last length {last_length}')

    return suburb
def test_decomposed_local_centrality(primal_graph):
    # centralities on the original nodes within the decomposed network should equal non-decomposed workflow
    betas = np.array([0.02, 0.01, 0.005, 0.0008, 0.0])
    distances = networks.distance_from_beta(betas)
    node_measure_keys = ('node_density', 'node_farness', 'node_cycles',
                         'node_harmonic', 'node_beta', 'node_betweenness',
                         'node_betweenness_beta')
    segment_measure_keys = ('segment_density', 'segment_harmonic',
                            'segment_beta', 'segment_betweenness')
    # test a decomposed graph
    G_decomposed = graphs.nX_decompose(primal_graph, 20)
    # graph maps
    node_uids, node_data, edge_data, node_edge_map = graphs.graph_maps_from_nX(
        primal_graph)  # generate node and edge maps
    node_uids_decomp, node_data_decomp, edge_data_decomp, node_edge_map_decomp = graphs.graph_maps_from_nX(
        G_decomposed)
    # non-decomposed case
    node_measures_data = centrality.local_node_centrality(node_data,
                                                          edge_data,
                                                          node_edge_map,
                                                          distances,
                                                          betas,
                                                          node_measure_keys,
                                                          angular=False)
    # decomposed case
    node_measures_data_decomposed = centrality.local_node_centrality(
        node_data_decomp,
        edge_data_decomp,
        node_edge_map_decomp,
        distances,
        betas,
        node_measure_keys,
        angular=False)
    # node
    d_range = len(distances)
    m_range = len(node_measure_keys)
    assert node_measures_data.shape == (m_range, d_range, len(primal_graph))
    assert node_measures_data_decomposed.shape == (m_range, d_range,
                                                   len(G_decomposed))
    # with increasing decomposition:
    # - node based measures will not match
    # - closeness segment measures will match - these measure to the cut endpoints per thresholds
    # - betweenness segment measures won't match - don't measure to cut endpoints
    # segment versions
    segment_measures_data = centrality.local_segment_centrality(
        node_data,
        edge_data,
        node_edge_map,
        distances,
        betas,
        segment_measure_keys,
        angular=False)
    segment_measures_data_decomposed = centrality.local_segment_centrality(
        node_data_decomp,
        edge_data_decomp,
        node_edge_map_decomp,
        distances,
        betas,
        segment_measure_keys,
        angular=False)
    m_range = len(segment_measure_keys)
    assert segment_measures_data.shape == (m_range, d_range, len(primal_graph))
    assert segment_measures_data_decomposed.shape == (m_range, d_range,
                                                      len(G_decomposed))
    for m_idx in range(m_range):
        for d_idx in range(d_range):
            match = np.allclose(
                segment_measures_data[m_idx][d_idx],
                # compare against the original 56 elements (prior to adding decomposed)
                segment_measures_data_decomposed[m_idx][d_idx][:57],
                atol=0.1,
                rtol=0)  # relax precision
            if m_range in (0, 1, 2):
                assert match