def run_stats(self): """Main function which do the process.""" # Get the common fields..currentField() self.admin_layer = self.cbx_aggregation_layer.currentLayer() input_name = self.admin_layer.name() field = self.cbx_indicator_field.currentField() self.layer = QgsProject.instance().mapLayersByName(input_name)[0] # Output. self.output_file_path = self.le_output_filepath.text() try: self.button_box_ok.setDisabled(True) # noinspection PyArgumentList QApplication.setOverrideCursor(Qt.WaitCursor) # noinspection PyArgumentList QApplication.processEvents() if not self.admin_layer: raise NoLayerProvidedException if not self.admin_layer and self.use_point_layer: raise NoLayerProvidedException crs_admin_layer = self.admin_layer.crs() # Output if not self.output_file_path: temp_file = NamedTemporaryFile(delete=False, suffix='-geopublichealth.shp') self.output_file_path = temp_file.name temp_file.flush() temp_file.close() else: with open(self.output_file_path, 'w') as document: pass admin_layer_provider = self.layer.dataProvider() fields = admin_layer_provider.fields() if admin_layer_provider.fields().indexFromName( self.name_field) != -1: raise FieldExistingException(field=self.name_field) fields.append(QgsField('LISA_P', QVariant.Double)) fields.append(QgsField('LISA_Z', QVariant.Double)) fields.append(QgsField('LISA_Q', QVariant.Int)) fields.append(QgsField('LISA_I', QVariant.Double)) fields.append(QgsField('LISA_C', QVariant.Double)) # The QgsVectorFileWriter was Deprecated since 3.10 However,....... #The create() function DOEST NOT Flush the feature unless QGIS close. #options = QgsVectorFileWriter.SaveVectorOptions() #options.driverName = "ESRI Shapefile" #file_writer=QgsVectorFileWriter.create(self.output_file_path,fields,QgsWkbTypes.Polygon,self.admin_layer.crs(),QgsCoordinateTransformContext(),options) #It's currently a bug https://github.com/qgis/QGIS/issues/35021 # So I will keep it for now file_writer = QgsVectorFileWriter(self.output_file_path, 'utf-8', fields, QgsWkbTypes.Polygon, self.admin_layer.crs(), 'ESRI Shapefile') if self.cbx_contiguity.currentIndex() == 0: # queen # fix_print_with_import print('Info: Local Moran\'s using queen contiguity') #Pysal 2.0 change #https://github.com/pysal/pysal/blob/master/MIGRATING.md w = Queen.from_shapefile(self.admin_layer.source()) else: # 1 for rook # fix_print_with_import print('Info: Local Moran\'s using rook contiguity') w = Rook.from_shapefile(self.admin_layer.source()) #Pysal 2.0 #https://stackoverflow.com/questions/59455383/pysal-does-not-have-attribute-open import geopandas f = geopandas.read_file(self.admin_layer.source().replace( '.shp', '.dbf')) y = f[str(field)] lm = Moran_Local(y, w, transformation="r", permutations=999) sig_q = lm.q * (lm.p_sim <= 0.05 ) # could make significance level an option outFeat = QgsFeature() i = 0 count = self.admin_layer.featureCount() for i, feature in enumerate(self.admin_layer.getFeatures()): attributes = feature.attributes() attributes.append(float(lm.p_sim[i])) attributes.append(float(lm.z_sim[i])) attributes.append(int(lm.q[i])) attributes.append(float(lm.Is[i])) attributes.append(int(sig_q[i])) new_feature = QgsFeature() new_geom = QgsGeometry(feature.geometry()) new_feature.setAttributes(attributes) new_feature.setGeometry(new_geom) file_writer.addFeature(new_feature) del file_writer self.output_layer = QgsVectorLayer(self.output_file_path, "LISA Moran's I - " + field, 'ogr') QgsProject.instance().addMapLayer(self.output_layer) self.add_symbology() self.signalStatus.emit(3, tr('Successful process')) except GeoPublicHealthException as e: display_message_bar(msg=e.msg, level=e.level, duration=e.duration) finally: self.button_box_ok.setDisabled(False) # noinspection PyArgumentList QApplication.restoreOverrideCursor() # noinspection PyArgumentList QApplication.processEvents()
def setUp(self): w = Rook.from_shapefile(examples.get_path("columbus.shp")) f = popen(examples.get_path("columbus.dbf")) w.transform = "r" self.w = w self.y = np.array(f.by_col["CRIME"])
def multiscalar(geoDF,populationCensusTif): geoDF=geoDF.to_crs(communityArea_CRS) plt.figure() geoDF.plot(column='Gonorrhea in Females',cmap='OrRd',figsize=(10, 10),legend=True,scheme='quantiles') # scheme='quantiles' # geoplot.polyplot(geoDF,figsize=(20, 20)) # pc=gpd.read_file(populationCensus) # print(pc.columns) # print(pc.crs) # pc.to_file(os.path.join(dataRoot,"populationCensus_save.shp")) # Feature_to_Raster(os.path.join(dataRoot,"populationCensus_save.shp"), os.path.join(dataRoot,"popuC_1.tif"), 20, field_name=['CENSUS_BLO'], NoData_value=-9999) geoDF_sample=zonal_stats(geoDF,populationCensusTif,stats="count min mean max median") geoDF_sample_df=pd.DataFrame(geoDF_sample) geoDF_sample_df.rename(columns={"count":"popuCount", "min":"popuMin", "mean":"popuMean", "max":"popuMax", "median":"popuMedian"},inplace=True) geoDF_merge=geoDF.merge(geoDF_sample_df,left_on=geoDF.index, right_on=geoDF_sample_df.index) groups_list = ['Cancer (All Sites)','Lung Cancer','Tuberculosis','Gonorrhea in Females'] geoDF_merge[groups_list]=geoDF_merge[groups_list].fillna(0) geoDF_merge["sumGroup"]=geoDF_merge[groups_list].sum(axis=1) for i in range(len(groups_list)): geoDF_merge['comp_' + groups_list[i]] = geoDF_merge[groups_list[i]] /geoDF_merge["sumGroup"] #input_df['popuMean'] df_pts=geoDF_merge.copy() df=df_pts w_queen = Queen.from_dataframe(df) w_rook = Rook.from_dataframe(df) w_kernel_1k = Kernel.from_dataframe(df_pts, bandwidth=2500) w_kernel_2k = Kernel.from_dataframe(df_pts, bandwidth=3000) #01-show spatial weights structure fig, ax = plt.subplots(1,4, figsize=(16,4)) plot_spatial_weights(w_queen, df, ax=ax[0]) ax[0].set_title('queen') plot_spatial_weights(w_rook, df, ax=ax[1]) ax[1].set_title('rook') plot_spatial_weights(w_kernel_1k, df, ax=ax[2]) ax[2].set_title('kernel 1k') plot_spatial_weights(w_kernel_2k, df, ax=ax[3]) ax[3].set_title('kernel 2k') #02-show local environment def plot_local_environment(w, ax): from segregation.spatial.spatial_indexes import _build_local_environment d = _build_local_environment(df, groups_list, w) d['geometry'] = df.geometry d = gpd.GeoDataFrame(d) d.plot('Lung Cancer', k=6, scheme='quantiles', ax=ax) ax.axis('off') plt.figure() fig, axs = plt.subplots(1,4, figsize=(16,4)) for i, wtype in enumerate([w_queen, w_rook, w_kernel_1k, w_kernel_2k]): plot_local_environment(w=wtype, ax=axs[i]) #03-different local environments result in different segregation statistics #aspatial multiInfo=MultiInformationTheory(df, groups_list).statistic print("aspatial:",multiInfo) #rook neighborhood rookNeighborhood=SpatialInformationTheory(df, groups_list, w=w_rook).statistic print("rook neighborhood:",rookNeighborhood) #queen neighborhood queenNeighbor=SpatialInformationTheory(df, groups_list, w=w_queen).statistic print("queen neighborhood:",queenNeighbor) # 1 kilometer kernel distance neighborhood kernelDisNeighbor_a=SpatialInformationTheory(df, groups_list, w=w_kernel_1k).statistic print( "kernel distance neighborhood_A:",kernelDisNeighbor_a) # 2 kilometer kernel distance neighborhood kernelDisNeighbor_b=SpatialInformationTheory(df, groups_list, w=w_kernel_2k).statistic print("kernel distance neighborhood_B:",kernelDisNeighbor_b) distances = [1000.,2000.,3000.,4000.,5000.] # note these are floats [1000.,2000.,3000.,4000.,5000.] euclidian_profile = compute_segregation_profile(df_pts, groups=groups_list, distances=distances) print("euclidian_profile",euclidian_profile) #04-local street network can have a big impact on - df = df.to_crs(epsg=4326) # net = get_osm_network(df) # net.save_hdf5('dc_network.h5') #it can take awhile to download a street network, so you can save and read it back in using pandana dc_networkPath=r"C:\Users\richi\omen-richiebao\omen-code\Chicago_code\Chicago Health_spatioTemporalAnalysis\data\dc_network.h5" net = Network.from_hdf5(dc_networkPath) # net = Network.from_hdf5('dc_network.h5') #three different segregation profiles network_linear_profile = compute_segregation_profile(df_pts, groups=groups_list, network=net, distances=distances) network_exponential_profile = compute_segregation_profile(df_pts, groups=groups_list, network=net, distances=distances, decay='exp', precompute=False) plt.figure() fig, ax = plt.subplots(figsize=(12,8)) ax.scatter(euclidian_profile.keys(), euclidian_profile.values(), c='green', label='euclidian exp') ax.plot(list(euclidian_profile.keys()), list(euclidian_profile.values()), c='green') #TypeError: float() argument must be a string or a number, not 'dict_keys' ax.scatter(network_linear_profile.keys(), network_linear_profile.values(), c='red', label='net linear') ax.plot(list(network_linear_profile.keys()), list(network_linear_profile.values()), c='red') ax.scatter(network_exponential_profile.keys(), network_exponential_profile.values(), c='blue', label='net exp') ax.plot(list(network_exponential_profile.keys()), list(network_exponential_profile.values()), c='blue') plt.xlabel('meters') plt.ylabel('SIT') plt.legend() plt.show()
def greedy( gdf, strategy="balanced", balance="count", min_colors=4, sw="queen", min_distance=None, silence_warnings=True, interchange=False, ): """ Color GeoDataFrame using various strategies of greedy (topological) colouring. Attempts to color a GeoDataFrame using as few colors as possible, where no neighbours can have same color as the feature itself. Offers various strategies ported from QGIS or implemented within networkX for greedy graph coloring. ``greedy`` will return pandas.Series representing assinged color codes. Parameters ---------- gdf : GeoDataFrame GeoDataFrame strategy : str (default 'balanced') Determine coloring strategy. Options are ``'balanced'`` for algorithm based on QGIS Topological coloring. It is aiming for a visual balance, defined by the balance parameter. Other options are those supported by networkx.greedy_color: * ``'largest_first'`` * ``'random_sequential'`` * ``'smallest_last'`` * ``'independent_set'`` * ``'connected_sequential_bfs'`` * ``'connected_sequential_dfs'`` * ``'connected_sequential'`` (alias for the previous strategy) * ``'saturation_largest_first'`` * ``'DSATUR'`` (alias for the previous strategy) For details see https://networkx.github.io/documentation/stable/reference/algorithms/generated/networkx.algorithms.coloring.greedy_color.html balance : str (default 'count') If strategy is ``'balanced'``, determine the method of color balancing. * ``'count'`` attempts to balance the number of features per each color. * ``'area'`` attempts to balance the area covered by each color. * ``'centroid'`` attempts to balance the distance between colors based on the distance between centroids. * ``'distance'`` attempts to balance the distance between colors based on the distance between geometries. Slower than ``'centroid'``, but more precise. ``'centroid'`` and ``'distance'`` are significantly slower than other especially for larger GeoDataFrames. Apart from ``'count'``, all require CRS to be projected (not in degrees) to ensure metric values are correct. min_colors: int (default 4) If strategy is ``'balanced'``, define the minimal number of colors to be used. sw : 'queen', 'rook' or libpysal.weights.W (default 'queen') If min_distance is None, one can pass ``'libpysal.weights.W'`` object denoting neighbors or let greedy to generate one based on ``'queen'`` or ``'rook'`` contiguity. min_distance : float Set minimal distance between colors. If min_distance is not None, slower algorithm for generating spatial weghts is used based on intersection between geometries. Min_distance is then used as a tolerance of intersection. silence_warnings : bool (default True) Silence libpysal warnings when creating spatial weights. interchange : bool (defaul False) Use the color interchange algorithm (applicable for networkx strategies) For details see https://networkx.github.io/documentation/stable/reference/algorithms/generated/networkx.algorithms.coloring.greedy_color.html Examples -------- Default: >>> gdf['greedy_colors'] = greedy(gdf) Balanced by area: >>> gdf['balanced_area'] = greedy(gdf, strategy='balanced', >>> balance='area') Using rook adjacency: >>> gdf['rook_adjacency'] = greedy(gdf, sw='rook') Adding minimal distance between colors: >>> gdf['min_distance'] = greedy(gdf, min_distance=100) Using different coloring strategy: >>> gdf['smallest_last'] = greedy(gdf, strategy='smallest_last') Returns ------- color : pd.Series pandas.Series representing assinged color codes """ if min_distance is not None: sw = _geos_sw(gdf, tolerance=min_distance, silence_warnings=silence_warnings) if not isinstance(sw, W): if sw == "queen": sw = Queen.from_dataframe(gdf, ids=gdf.index.to_list(), silence_warnings=silence_warnings) elif sw == "rook": sw = Rook.from_dataframe(gdf, ids=gdf.index.to_list(), silence_warnings=silence_warnings) if strategy == "balanced": return pd.Series( _balanced(gdf, sw, balance=balance, min_colors=min_colors)) elif strategy in STRATEGIES: color = nx.greedy_color(sw.to_networkx(), strategy=strategy, interchange=interchange) color = pd.Series(color).sort_index() color.index = gdf.index return color else: raise ValueError("{} is not a valid strategy.".format(strategy))
import sys import os import libpysal import geopandas from libpysal.weights import Queen, Rook, KNN import matplotlib.pyplot as plt sys.path.append(os.path.abspath('..')) libpysal.examples.available() libpysal.examples.explain('mexico') pth = libpysal.examples.get_path("mexicojoin.shp") gdf = geopandas.read_file(pth) ax = gdf.plot(edgecolor='grey', facecolor='w') ax.set_axis_off() w_rook = Rook.from_dataframe(gdf) f, ax = w_rook.plot(gdf, ax=ax, edge_kws=dict(color='r', linestyle=':', linewidth=1), node_kws=dict(marker='')) ax.set_axis_off() gdf.head() w_queen = Queen.from_dataframe(gdf) plt.show() ax = gdf.plot(edgecolor='grey', facecolor='w') f, ax = w_queen.plot(gdf, ax=ax, edge_kws=dict(color='r', linestyle=':', linewidth=1), node_kws=dict(marker='')) ax.set_axis_off()