def voronoiPolynomials(points,boundingPolygon ): '''constructs a proper Voronoi diagram (out to FAR) for the points''' assert(len(points)>2) voronoiDiagram = scipy.spatial.Voronoi(points) regions,vertices = voronoi_finite_polygons_2d(voronoiDiagram,radius=FAR) # read the main function in the file colorized_voronoi for an idea of what's going on here fullPolygons = [ Polygon.Polygon([ vertices[point] for point in region]) for region in regions] # okay, nested listcomp. So what happens here is each region returned from voronoi_finite_polygons_2d becomes a Polygon library polygon. This polygon is constructed from the appropriate vertices of the voronoi diagram for said region. (A region is represented as something like [2,0,5], meaning it is a polynomial constructed from voronoi vertices 2, 0, and 5, in that order) return [Polygon.Polygon(item & boundingPolygon) for item in fullPolygons]
def getVoronoi(shape, samples, vec): h, w = shape mask = np.where(vec != 0) samples = samples[mask] vec = vec[mask] he = np.arange(0, h) wi = np.arange(0, w) Yq, Zq = np.meshgrid(wi, he) Y_sample = Yq.flatten()[samples] Z_sample = Zq.flatten()[samples] points = np.column_stack((Y_sample, Z_sample)) voronoi = Voronoi(points) regions, vertices = voronoi_finite_polygons_2d(voronoi) reconstructed = np.zeros((w, h)) b = Polygon([(0, 0), (w - 1, 0), (w - 1, h - 1), (0, h - 1)]) reconstructed = np.zeros((w, h)) for i, region in enumerate(regions): polygon = vertices[region] shape = Polygon(polygon) if not b.contains(shape): shape = shape.intersection(b) x, y = shape.exterior.coords.xy row_indices = np.array(x, dtype=np.int16) column_indices = np.array(y, dtype=np.int16) row_min = np.amin(row_indices) row_max = np.amax(row_indices) + 1 column_min = np.amin(column_indices) column_max = np.amax(column_indices) + 1 enclose = np.zeros((row_max - row_min, column_max - column_min)) enclose_rows = row_indices - row_min enclose_columns = column_indices - column_min enclose[enclose_rows, enclose_columns] = 1 points = np.indices(enclose.shape).reshape(2, -1).T path = Path(zip(x - row_min, y - column_min)) mask = path.contains_points(points, radius=-1e-9) mask = mask.reshape(enclose.shape) reconstructed_rows, reconstructed_columns = np.where(mask) reconstructed[row_min + reconstructed_rows, column_min + reconstructed_columns] = vec[i] return reconstructed.T
def voronoiSegmentation(polygon, npoints): """ returns a list of polygons that segment polygon into npoints regions. npoints must be an integer greater than two.""" assert npoints > 2 trulyRandomPoints = [randomPointWithin(polygon) for _ in range(npoints)] points = lloydRelaxation(trulyRandomPoints, 2, boundingPolygon=polygon) # the ones we actually use, less random voronoiDiagram = scipy.spatial.Voronoi(points) regions, vertices = voronoi_finite_polygons_2d( voronoiDiagram, radius=FAR ) # read the main function in the file colorized_voronoi for an idea of what's going on here bigPolygons = [ DiplomacyPolygon([vertices[point] for point in region]) for region in regions ] # okay, nested listcomp. So what happens here is each region returned from voronoi_finite_polygons_2d becomes a Polygon library polygon. This polygon is constructed from the appropriate vertices of the voronoi diagram for said region. (A region is represented as something like [2,0,5], meaning it is a polynomial constructed from voronoi vertices 2, 0, and 5, in that order) return [ DiplomacyPolygon(polygon & bigPolygon) for bigPolygon in bigPolygons ] # iterate over the big polygons and take the intersection (&) of each big polygon and the polgyon to be segmented. We have to cast the result to DiplomacyPolygon because the '&' operator returns cPolygon.Polygon. (I have no idea why that cast operation works. Python, man.)
def getVoronoi(shape, samples, vec): ''' Constructs new depth image by creating Voronoi regions. Args: shape: Shape of the depth matrix samples: List of flattened indices of non-NaN values in depth matrix vec: List of depth values at the indices given by the previous list * NOTE: samples and vec must be obtained from the function create_samples.createSamples() Returns: matrix: New depth matrix ''' h, w = shape he = np.arange(0, h) wi = np.arange(0, w) Yq, Zq = np.meshgrid(wi, he) Y_sample = Yq.flatten()[samples] Z_sample = Zq.flatten()[samples] points = np.column_stack((Y_sample, Z_sample)) voronoi = Voronoi(points) regions, vertices = voronoi_finite_polygons_2d(voronoi) reconstructed = np.zeros((w, h)) b = Polygon([(0, 0), (w - 1, 0), (w - 1, h - 1), (0, h - 1)]) reconstructed = np.zeros((w, h)) for i, region in enumerate(regions): polygon = vertices[region] shape = Polygon(polygon) if not b.contains(shape): shape = shape.intersection(b) x, y = shape.exterior.coords.xy row_indices = np.array(x, dtype=np.int16) column_indices = np.array(y, dtype=np.int16) row_min = np.amin(row_indices) row_max = np.amax(row_indices) + 1 column_min = np.amin(column_indices) column_max = np.amax(column_indices) + 1 enclose = np.zeros((row_max - row_min, column_max - column_min)) enclose_rows = row_indices - row_min enclose_columns = column_indices - column_min enclose[enclose_rows, enclose_columns] = 1 points = np.indices(enclose.shape).reshape(2, -1).T path = Path(zip(x - row_min, y - column_min)) mask = path.contains_points(points, radius=-1e-9) mask = mask.reshape(enclose.shape) reconstructed_rows, reconstructed_columns = np.where(mask) reconstructed[row_min + reconstructed_rows, column_min + reconstructed_columns] = vec[i] return reconstructed.T
df_vor["update"] = update_Z df_vor["vor_label"] = vor_cell df_vor.to_csv( "{}TrainFrac{}_PairNegWgt{}_Delta{}_GroupEvt{}_df_vor.csv".format( outputPath, tf, negative_weight, delta, group_events)) print("finish merge algorithm") file_log.write( "finish merge algorithm df_vor is saved into {}PairNegWgt{}_Delta{}_GroupEvt{}.csv \n" .format(outputPath, negative_weight, delta, group_events)) # dictionary to save {label:[p1,p2,..., pN]} label_points = df_vor.groupby('vor_label')['vor_point'].apply(list).to_dict() regions, vertices = voronoi_finite_polygons_2d(Vor.points, Vor.vertices, Vor.regions, Vor.ridge_vertices, Vor.ridge_points, Vor.point_region) # save map #print(data) df_map, drop_x, drop_y = tool.save_vor_map(data.ix[(data.target.values == 1)], regions, vertices, label_points) df_map = df_map.reset_index(drop=True) df_map.to_csv( "{}TrainFrac{}_PairNegWgt{}_Delta{}_GroupEvt{}_map.csv".format( outputPath, tf, negative_weight, delta, group_events), columns=["mvaOutput_2lss_ttV", "mvaOutput_2lss_ttbar", "vor_label"]) # ksTest # create train df df_vor_train = df_vor.groupby('vor_label').sum()[[