def test_CircularCompactness(self): self.df_buildings["area"] = self.df_buildings.geometry.area self.df_buildings["circom"] = mm.CircularCompactness( self.df_buildings, "area").series check = self.df_buildings.area[0] / (_circle_area( list(self.df_buildings.geometry[0].convex_hull.exterior.coords))) assert self.df_buildings["circom"][0] == check area = self.df_buildings.geometry.area self.df_buildings["circom2"] = mm.CircularCompactness( self.df_buildings, area).series assert self.df_buildings["circom2"][0] == check self.df_buildings["circom3"] = mm.CircularCompactness( self.df_buildings).series assert self.df_buildings["circom3"][0] == check
def time_CircularCompactness(self): mm.CircularCompactness(self.df_buildings, "area")
else: results_list.append(0) series = pd.Series(results_list, index=gdf.index) print('Gini calculated.') return series # In[ ]: for buf in buffers: tessellation = gpd.read_file( 'data/tessellation/{0}_tessellation.shp'.format(buf)) tessellation['area'] = tessellation.area tessellation['lal'] = mm.LongestAxisLength(tessellation).series tessellation['circom'] = mm.CircularCompactness(tessellation).series tessellation['shapeix'] = mm.ShapeIndex(tessellation, 'lal', 'area').series tessellation['rectan'] = mm.Rectangularity(tessellation, 'area').series tessellation['fractal'] = mm.FractalDimension(tessellation, 'area').series tessellation['orient'] = mm.Orientation(tessellation).series distancesw = libpysal.weights.DistanceBand.from_dataframe(tessellation, 400, ids='uID') tessellation['freq'] = mm.Neighbors(tessellation, distancesw, 'uID').series tessellation['car'] = mm.AreaRatio(tessellation, buildings, 'area', mm.Area(buildings).series) tessellation['gini_area'] = gini_fn(tessellation, 'area', distancesw, 'uID') tessellation['gini_car'] = gini_fn(tessellation, 'car', distancesw, 'uID') tessellation.to_file('data/tessellation/{0}_tessellation.shp'.format(buf))
def retrieval_similar_cases(data_path, cases_path, threshold): threshold = json.loads(threshold)['threshold'] data = geopandas.read_file(data_path) # print(data.shape) #(1, 46) cases = geopandas.read_file(cases_path) # print(len(cases)) #1055 sf_cases = shapefile.Reader(cases_path, encoding='gbk') sf_data = shapefile.Reader(data_path, encoding='gbk') change_list = ['bank', 'hospital', 'mall', 'school', 'subway'] for l in change_list: cases.loc[cases[l].notnull(), l] = 1 cases.loc[cases[l].isnull(), l] = 0 cases = cases.fillna(value='nan') data['area'] = momepy.Area(data).series data['length'] = momepy.Perimeter(data).series data['ccd_means'] = momepy.CentroidCorners(data).mean data['ccd_std_stdev'] = momepy.CentroidCorners(data).std data['circ_comp'] = momepy.CircularCompactness(data).series # 周长紧凑度 data['cwa'] = momepy.CompactnessWeightedAxis(data).series # 紧凑度加权轴 data['convexity'] = momepy.Convexity(data).series # 凸度 data['corners'] = momepy.Corners(data).series # 角数 data['elongation'] = momepy.Elongation(data).series # 伸长率 data['eri'] = momepy.EquivalentRectangularIndex(data).series # 等校矩形指数 data['fractal'] = momepy.FractalDimension(data).series # 分形维数 data['rectangularity'] = momepy.Rectangularity(data).series # 矩形度 data['squ_comp'] = momepy.SquareCompactness(data).series # 紧凑度指数 data['long_ax'] = momepy.LongestAxisLength(data).series # 最长轴的长度值 data['shape_index'] = momepy.ShapeIndex( data, longest_axis='long_ax').series # 形状索引 cases['area'] = momepy.Area(cases).series cases['length'] = momepy.Perimeter(cases).series cases['ccd_means'] = momepy.CentroidCorners(cases).mean cases['ccd_std_stdev'] = momepy.CentroidCorners(cases).std cases['circ_comp'] = momepy.CircularCompactness(cases).series # 周长紧凑度 cases['cwa'] = momepy.CompactnessWeightedAxis(cases).series # 紧凑度加权轴 cases['convexity'] = momepy.Convexity(cases).series # 凸度 cases['corners'] = momepy.Corners(cases).series # 角数 cases['elongation'] = momepy.Elongation(cases).series # 伸长率 cases['eri'] = momepy.EquivalentRectangularIndex(cases).series # 等校矩形指数 cases['fractal'] = momepy.FractalDimension(cases).series # 分形维数 cases['rectangularity'] = momepy.Rectangularity(cases).series # 矩形度 cases['squ_comp'] = momepy.SquareCompactness(cases).series # 紧凑度指数 cases['long_ax'] = momepy.LongestAxisLength(cases).series # 最长轴的长度值 cases['shape_index'] = momepy.ShapeIndex( cases, longest_axis='long_ax').series # 形状索引 test_x = cases.iloc[:, 18:].sub(data.iloc[0, 2:], axis=1).abs().astype('float') ori = [] dft = [] him = [] for i in range(len(cases)): # Ori ori.append( abs( compute_ori(sf_cases.shape(i)) - compute_ori(sf_data.shape(0)))) # DFT fd_c, final_x_c, final_y_c = compute_DFT(sf_cases.shape(i)) fd_d, final_x_d, final_y_d = compute_DFT(sf_data.shape(0)) tmp = 0 for k in range(20): tmp += math.pow((fd_c[k] - fd_d[k]), 2) dft.append(math.sqrt(tmp)) # him inter = np.concatenate( (final_x_c[:, np.newaxis], final_y_c[:, np.newaxis]), 1) # nom_x[:, np.newaxis]新增一个维度 inter = inter.reshape( len(final_x_c), 1, 2) # !!!OpenCV找轮廓后,返回的ndarray的维数是(100, 1, 2)!!!而不是我们认为的(100, 2)。 inter_d = np.concatenate( (final_x_d[:, np.newaxis], final_y_d[:, np.newaxis]), 1) # nom_x[:, np.newaxis]新增一个维度 inter_d = inter_d.reshape( len(final_x_d), 1, 2) # !!!OpenCV找轮廓后,返回的ndarray的维数是(100, 1, 2)!!!而不是我们认为的(100, 2)。 him.append(cv2.matchShapes(inter, inter_d, 1, 0)) test_x['area'] = test_x['area'] * 0.000001 test_x['length'] = test_x['length'] * 0.001 test_x['ori'] = ori test_x['dft'] = dft test_x['shape'] = him loaded_model = pickle.load(open('xgb.pickle.dat', 'rb')) xgb_pred = loaded_model.predict_proba(test_x) shutil.rmtree('result') os.mkdir('result') result = {} for i in range(len(xgb_pred)): if test_x['area'][i] < data['area'][0] * 0.25 and test_x['ori'][ i] < 30 and xgb_pred[i][1] > threshold: # 0.99,26 information = { '编号ID': float(cases['ID'][i]), '地块名字': cases['NAME'][i], '地块所在地': cases['city'][i], '地块面积': cases['area'][i], '地块朝向': compute_ori(sf_cases.shape(i)), '容积率': cases['plot_area'][i], '价格': cases['price'][i], '绿化率': cases['greening_r'][i], '建成日期': cases['build_date'][i], 'school': float(cases['school'][i]), 'mall': float(cases['mall'][i]), 'restaurant': float(cases['restaurant'][i]), 'hospital': float(cases['hospital'][i]), 'subway': float(cases['subway'][i]), 'bank': float(cases['bank'][i]), 'park': float(cases['includ_g'][i]), 'water': float(cases['includ_w'][i]) } result[i] = json.dumps(information, sort_keys=True, indent=4, separators=(',', ': '), ensure_ascii=False) a = 1024 b = 512 img = np.zeros((a, a, 3)) img.fill(255) landuse = sf_cases l_shape = landuse.shape(i) l_convex = Polygon(l_shape.points).convex_hull x_c = l_convex.centroid.xy[0][ 0] # l_convex.centroid.xy:(array('d', [12945692.760656377]), array('d', [4861576.219346005])) y_c = l_convex.centroid.xy[1][0] l_dot = np.array(l_shape.points) l_nom_x = np.array(list(map(int, l_dot[:, 0]))) - int(x_c) l_nom_y = np.array(list(map(int, l_dot[:, 1]))) - int(y_c) l_inter = np.concatenate((l_nom_x[:, np.newaxis] + b, minus(l_nom_y)[:, np.newaxis] + b), 1) # nom_x[:, np.newaxis]新增一个维度 cv2.polylines( img, [np.asarray(l_inter)], True, (0, 0, 255), 1) # cv2.polylines(画布,点坐标列表,封闭,颜色,宽度)polylines点坐标不能出现浮点型,需要是整型 cv2.imwrite('./result/' + str(cases['ID'][i]) + '.jpg', img) final_data = json.dumps(result, sort_keys=True, indent=4, separators=(',', ': '), ensure_ascii=False) print(final_data) sf_cases.close() sf_data.close() del cases del data return final_data
mode="count").series streets["mdsAre"] = mm.Reached(streets, tess, "nID", "nID", spatial_weights=str_q1, mode="sum").series blg_q1 = libpysal.weights.contiguity.Queen.from_dataframe(blg) blg["libNCo"] = mm.Courtyards(blg, "bID", blg_q1).series blg["ldbPWL"] = mm.PerimeterWall(blg, blg_q1).series blocks["ldkAre"] = mm.Area(blocks).series blocks["ldkPer"] = mm.Perimeter(blocks).series blocks["lskCCo"] = mm.CircularCompactness(blocks, "ldkAre").series blocks["lskERI"] = mm.EquivalentRectangularIndex(blocks, "ldkAre", "ldkPer").series blocks["lskCWA"] = mm.CompactnessWeightedAxis(blocks, "ldkAre", "ldkPer").series blocks["ltkOri"] = mm.Orientation(blocks).series blo_q1 = libpysal.weights.contiguity.Queen.from_dataframe(blocks, ids="bID") blocks["ltkWNB"] = mm.Neighbors(blocks, blo_q1, "bID", weighted=True).series blocks["likWBB"] = mm.Count(blocks, blg, "bID", "bID", weighted=True).series tess.to_file("files/elements.gpkg", layer="tessellation", driver="GPKG") blg.to_file("files/elements.gpkg", layer="buildings", driver="GPKG") blocks.to_file("files/elements.gpkg", layer="blocks", driver="GPKG") streets.to_file("files/elements.gpkg", layer="streets", driver="GPKG")