def test_dict(): value_dict = dataframe_to_dict(gdf, attr_str) cluster_object = AZPReactiveTabu(max_iterations=max_it, k1=k1, k2=k2, random_state=0) cluster_object.fit_from_dict(neighbors_dict, value_dict, n_regions=n_reg) result = region_list_from_array(cluster_object.labels_) compare_region_lists(result, optimal_clustering)
def test_dict(method): value_dict = dataframe_to_dict(gdf, attr_str) cluster_object = PRegionsExact() cluster_object.fit_from_dict(neighbors_dict, value_dict, n_regions=2, method=method) result = region_list_from_array(cluster_object.labels_) compare_region_lists(result, optimal_clustering)
def test_dict(): value_dict = dataframe_to_dict(gdf, attr_str) cluster_object = AZPSimulatedAnnealing(init_temperature=1, max_iterations=2, random_state=0) cluster_object.fit_from_dict(neighbors_dict, value_dict, n_regions=2) result = region_list_from_array(cluster_object.labels_) compare_region_lists(result, optimal_clustering)
def test_dict(): value_dict = dataframe_to_dict(gdf, attr_str) cluster_object = AZP(random_state=0) cluster_object.fit_from_dict(neighbors_dict, value_dict, n_regions=2) result = region_list_from_array(cluster_object.labels_) compare_region_lists(result, optimal_clustering)
np.array([0, 0, 0, 1, 0, 0, 1, 1, 1])) attr_str = "attr" spatially_extensive_attr_str = "spatially_extensive_attr" gdf = GeoDataFrame( { attr_str: attr, spatially_extensive_attr_str: spatially_extensive_attr }, geometry=[ Polygon([(x, y), (x, y + 1), (x + 1, y + 1), (x + 1, y)]) for y in range(3) for x in range(3) ]) # for tests with scalar attr & spatially_extensive_attr per area attr = attr.reshape(-1, 1) spatially_extensive_attr = spatially_extensive_attr.reshape(-1, 1) adj, graph, neighbors_dict, w = convert_from_geodataframe(gdf) attr_dict = dataframe_to_dict(gdf, attr_str) spatially_extensive_attr_dict = dataframe_to_dict( gdf, spatially_extensive_attr_str) # for tests where attr & spatially_extensive_attr are vectors in each area double_attr = np.column_stack((attr, attr)) double_spatially_extensive_attr = np.column_stack( (spatially_extensive_attr, spatially_extensive_attr)) double_threshold = np.hstack((threshold, threshold)) double_attr_dict = dataframe_to_dict(gdf, [attr_str] * 2) double_spatially_extensive_attr_dict = dataframe_to_dict( gdf, [spatially_extensive_attr_str] * 2)
from region.tests.util import region_list_from_array, convert_from_geodataframe from region.util import dataframe_to_dict attr = np.array([726.7, 623.6, 487.3, 200.4, 245.0, 481.0, 170.9, 225.9, 226.9]) attr_str = "attr" gdf = GeoDataFrame( {attr_str: attr}, geometry=[Polygon([(x, y), # 3x3-grid (x, y+1), (x+1, y+1), (x+1, y)]) for y in range(3) for x in range(3)] ) optimal_clustering = region_list_from_array(np.array([0, 0, 0, 1, 1, 0, 1, 1, 1])) # for tests with scalar attr & spatially_extensive_attr per area attr = attr.reshape(-1, 1) adj, graph, neighbors_dict, w = convert_from_geodataframe(gdf) attr_dict = dataframe_to_dict(gdf, attr_str) # for tests where attr & spatially_extensive_attr are vectors in each area double_attr = np.column_stack((attr, attr)) double_attr_dict = dataframe_to_dict(gdf, [attr_str] * 2) double_attr_str = [attr_str] * 2
threshold = 2 optimal_clustering = region_list_from_array(np.array([0, 0, 1, 1])) attr_str = "attr" spatially_extensive_attr_str = "spatially_extensive_attr" gdf = GeoDataFrame( {attr_str: attr, spatially_extensive_attr_str: spatially_extensive_attr}, geometry=[Polygon([(x, y), (x, y+1), (x+1, y+1), (x+1, y)]) for y in range(2) for x in range(2)] ) # for tests with scalar attr & spatially_extensive_attr per area attr = attr.reshape(-1, 1) spatially_extensive_attr = spatially_extensive_attr.reshape(-1, 1) adj, graph, neighbors_dict, w = convert_from_geodataframe(gdf) attr_dict = dataframe_to_dict(gdf, attr_str) spatially_extensive_attr_dict = dataframe_to_dict(gdf, spatially_extensive_attr_str) # for tests where attr & spatially_extensive_attr are vectors in each area double_attr = np.column_stack((attr, attr)) double_spatially_extensive_attr = np.column_stack((spatially_extensive_attr, spatially_extensive_attr)) double_threshold = np.hstack((threshold, threshold)) double_attr_dict = dataframe_to_dict(gdf, [attr_str] * 2) double_spatially_extensive_attr_dict = dataframe_to_dict( gdf, [spatially_extensive_attr_str] * 2)