def polyIntegratePdf(poly, mean, cov, eps=1.0e-1, method=None): """ Integrate a PDF with given mean and covariance over a convex polygon Args: poly: (n,2) array of corner points of convex polygon mean: (2,) vector mean of the Gaussian distribution cov: (2,2) array covariance of the Gaussian distribution Return: I: result of the integral """ # assert poly.ndim == 2 and poly.shape[1] == 2 # assert mean.shape == (2,) # assert cov.shape == (2, 2) if method == 'simulation': count = 10000 samples = np.random.multivariate_normal(mean, cov, count) prob = np.count_nonzero(in_hull(samples, poly)) / count assert np.isscalar(prob) return prob else: areaSum = 0.0 triangles = np.asarray(tripy.earclip(poly)) for triangle in triangles: sol, _ = quadpy.triangle.integrate_adaptive( lambda x: pdfExplicit(x, mean, cov), triangle, eps) areaSum += sol # assert np.isscalar(areaSum) return min(areaSum, 1)
def makeViz(self, reachSets): pointSets = [[tuple(p.p) for p in rs.set] for rs in reachSets.sets] triangleSets = [tripy.earclip(ps) for ps in pointSets] header = Header() header.stamp = rospy.Time.now() header.frame_id = reachSets.header.frame_id origin = Pose(Point(0, 0, 0), Quaternion(0, 0, 0, 1)) lineMarkers = MarkerArray() lineMarkerArray = [] for ii in range(len(pointSets)): m = Marker() m.header = header m.id = self.outline_marker_id + ii m.action = 0 m.pose = origin m.type = 4 #LINE_STRIP m.color = self.colors[ii%len(self.colors)] m.points = [Point(p[0], p[1], 0) for p in pointSets[ii]] m.scale = Vector3(.05, 0, 0) lineMarkerArray.append(m) lineMarkers.markers = lineMarkerArray self.outlinePub.publish(lineMarkers)
def getSurface(contour, height): surfacetris=tripy.earclip(contour) #this above returns something of the form ( ((1, 0), (0, 1), (0, 0)), ((...), ...), ... ) #this below converts it to array( [ [1, 0, height], [0, 1, height], [0, 0, height] ], [[...], ...]... ) surface = [np.array([np.append(a, [height]), np.append(b, [height]), np.append(c, [height])]) for a, b, c in surfacetris] return surface, np.array([[0, 0, 1]]*len(surface))
def convert_to_triangles(self): self.temp = tripy.earclip(self.vertices_polygon) self.vertices = [None] * (len(self.temp) * 6) for h in range(0, len(self.temp)): for l in range(0, 3): for u in range(0, 2): self.vertices[h * 6 + l * 2 + u] = self.temp[h][l][u]
def get_splitted_images(img, labels): image1 = np.full(img.shape, np.uint8(255)) image2 = image1.copy() new_labels1, new_labels2 = [], [] for _ in labels: tmp = np.zeros(img.shape, np.uint8) poly = np.column_stack(_['poly'].exterior.coords.xy).astype(np.int32)[:-1] cv2.fillPoly(tmp, [poly], (255, 255, 255)) mask_out = cv2.subtract(tmp, img) mask_out = cv2.subtract(tmp, mask_out) if 'wall' in _['name']: cv2.fillPoly(image1, [poly], 0) image1 = cv2.add(image1, mask_out) triangles = tripy.earclip(poly) wall_labels = [] for tr in triangles: polygon = Polygon(tr) if not polygon.is_valid: continue bbox = np.asarray(polygon.bounds, np.int32) label = {'name': _['name'], 'bbox': bbox, 'poly': Polygon(create_rect(bbox))} wall_labels.append(label) new_labels1.extend(merge_walls(wall_labels)) else: cv2.fillPoly(image2, [poly], 0) image2 = cv2.add(image2, mask_out) new_labels2.append(_) return [image1, image2], [new_labels1, new_labels2]
def triangule(x, y, z): # Creando poligono polygon = [] polygon3d = [] for i in range(len(x)): polygon.append((x[i], y[i])) polygon3d.append([x[i], y[i], z[i]]) # Triangulando triangles = tripy.earclip(polygon) triangles3D = [] for triangle in triangles: ind0 = polygon.index(triangle[0]) ind1 = polygon.index(triangle[1]) ind2 = polygon.index(triangle[2]) triangles3D.append([polygon3d[ind0], polygon3d[ind1], polygon3d[ind2]]) # print("Triangulacion para\n {}\n {}\n {}\n".format(x,y,z)) # print("\n", triangles3D) result = [] for triangle in triangles3D: sub_result = [[], [], []] for coord in triangle: sub_result[0].append(coord[0]) sub_result[1].append(coord[1]) sub_result[2].append(coord[2]) result.append(sub_result) return result
def gen_triangles(): for l, layer in enumerate(SEGMENTS): for seg in layer: tris = tripy.earclip(VERTICES[seg[1]:seg[1] + seg[2]]) # add triangles picking grid add_tris_to_grid(l, tris) # add triangle vertices to vertex buffer for tri in tris: VERTEXBUFFERS[l].append((tri[0][0], tri[0][1], seg[0], 0)) VERTEXBUFFERS[l].append((tri[1][0], tri[1][1], seg[0], 0)) VERTEXBUFFERS[l].append((tri[2][0], tri[2][1], seg[0], 0))
def Triangulate(poly): tris = tripy.earclip(poly) tris2 = [] for tri in tris: a,b,c = map(list,tri) i = poly.index(a) j = poly.index(b) k = poly.index(c) tri2 = [i,j,k] tris2.append(tri2) return tris2
def makeViz(self, reachSets): pointSets = [[tuple(p.p) for p in rs.set] for rs in reachSets.sets] triangleSets = [tripy.earclip(ps) for ps in pointSets] header = Header() header.stamp = rospy.Time.now() header.frame_id = reachSets.header.frame_id origin = Pose(Point(0, 0, 0), Quaternion(0, 0, 0, 1)) lineMarkers = MarkerArray() lineMarkerArray = [] for ii in range(len(pointSets)): m = Marker() m.header = header m.id = self.outline_marker_id + ii m.action = 0 m.pose = origin m.type = 4 #LINE_STRIP m.color = self.colors[ii % len(self.colors)] m.points = [Point(p[0], p[1], 0) for p in pointSets[ii]] m.scale = Vector3(.05, 0, 0) lineMarkerArray.append(m) lineMarkers.markers = lineMarkerArray self.outlinePub.publish(lineMarkers) triPoints = [xy for tri in triangleSets for xy in tri] triMarker = Marker() triMarker.header = header triMarker.id = self.tri_marker_id triMarker.type = 11 #TRIANGLE_LIST triMarker.action = 0 triMarker.pose = origin triMarker.color = ColorRGBA(1, 1, 1, 1) triMarker.scale = Vector3(1, 1, 1) triMarker.points = [ Point(p[0], p[1], 0) for tri in triPoints for p in tri ] #expand color array to cover all verts for all tris in each set with same color triFrequency = [len(ps) for ps in pointSets] triColors = [ self.colors[ii % len(self.colors)] for ii in range(len(pointSets)) ] triMarker.colors = [ c for cidx in range(len(triColors)) for c in repeat(triColors[cidx], triFrequency[cidx]) ] self.trisPub.publish(triMarker)
def test_polygon(self, poly_data): triangles = tripy.earclip(poly_data.vertices) total_area = tripy.calculate_total_area(triangles) absolute_error = abs(poly_data.total_area - total_area) self.assertTrue( absolute_error < tripy.EPSILON, '{}: area absolute error ({} - {} = {}) >= epsilon ({})'.format( poly_data.name, poly_data.total_area, total_area, absolute_error, tripy.EPSILON, )) self.assertEqual(triangles, poly_data.triangles)
def get_svg_scene(fname: str, px_per_meter: float = 50) -> JaxScene: """Loads scene representation from svg file Args: fname (str): path to svg file px_per_meter (float, optional): pixels per meters scale. Defaults to 50. Returns: Scene: scene representation instance """ polygons = [] paths, attributes, svg_attributes = sp.svg2paths( fname, return_svg_attributes=True) w, h = svg_attributes["width"], svg_attributes["height"] w, h = int(w.replace("px", "")), int(h.replace("px", "")) for path, attr in zip(paths, attributes): if not path.isclosed(): print(f"Found non-closed path {path}, skipping") continue if not all([isinstance(l, Line) for l in path]): print(f"Only simple line figures are currently allowed, skipping") continue # TODO: check lines color and set orientation onp_polygon = sort_segments( segments=onp.concatenate( [ onp.array(line_begin_end(line, px_per_meter, w, h))[onp.newaxis] for line in path ], axis=0, ), orientation="counterclockwise", ) # triangulate: polygon_points = [s[0] for s in onp_polygon] triangles_points = tripy.earclip(polygon_points) idxs = onp.array([0, 1, 2]) for triangle in triangles_points: segments = onp.zeros((3, 2, 2), dtype=onp_polygon.dtype) segments[:, 0] = onp.asarray(triangle) segments[:, 1] = segments[(idxs + 1) % 3, 0] jax_polygon = create_polygon(segments) if "transform" in attr and "rotate" in attr["transform"]: angle, cx, cy = eval(attr["transform"].replace("rotate", "")) cx, cy = cx, h - cy jax_polygon = rotate_polygon( jax_polygon, angle, (cx / px_per_meter, cy / px_per_meter)) polygons.append(jax_polygon) return create_scene(polygons)
def draw(firstFig, secondFig, interFigs, firstFigColor="blue", secondFigColor="red", interFigColor="yellow", labels=["first", "second", "inter", "combined"]): polyColors = [firstFigColor, secondFigColor, interFigColor] figures = [[firstFig], [secondFig], interFigs] ax = plt.subplots(2, 2, True, True)[1] ax[1, 1].set_title(labels[3]) polyNo = 0 for polygons in figures: polyColor = polyColors[polyNo] ax[polyNo // 2, polyNo % 2].set_title(labels[polyNo]) for polygon in polygons: assert (len(polygon) > 0), "Not a polygon" if len(polygon) == 1: ax[1, 1].plot([polygon[0][0]], [polygon[0][0]], 'yo') ax[polyNo // 2, polyNo % 2].plot([polygon[0][0]], [polygon[0][0]], 'yo') elif len(polygon) == 2: ax[1, 1].plot([polygon[0][0], polygon[1][0]], [polygon[0][1] - 0.1, polygon[1][1] - 0.1], 'yo--') ax[polyNo // 2, polyNo % 2].plot([polygon[0][0], polygon[1][0]], [polygon[0][1] - 0.1, polygon[1][1] - 0.1], 'yo--') else: triangles = earclip(polygon) ax[1, 1].add_collection( PolyCollection( triangles, edgecolor=polyColor, facecolor=polyColor)) # Main Plot polygon collection ax[polyNo // 2, polyNo % 2].add_collection( PolyCollection( triangles, edgecolor=polyColor, facecolor=polyColor)) # Side Plot polygon collection polyNo += 1 plt.autoscale() left, right = plt.xlim() down, up = plt.ylim() plt.xlim(min(left, down), max(right, up)) plt.ylim(min(left, down), max(right, up)) plt.show()
def FasterCap( Name, Polygons=[], #List with polygons PolygonsNames=[]): """ This function recieves a list of 2D polygons (arrays-like[N][2]) and creates a text file that can be used to calculate an capacitance matrix between objects using FasterCap in its 3D mode. """ '''Create and open file''' #If file exists, try next number. c = 0 while c < 100: FileName = Name + '_FasterCap_' + str(c) try: with open(FileName + ".txt") as file: print("File " + FileName + " already exists, trying next number.") file.close() c += 1 except IOError: c = 100 file = open(FileName + ".txt", "a") '''Header: setting simulation chracteristics''' file.write('*0 ' + FileName + '\n') file.write( '*Fast(er)Cap input file to calculate capacitance of polygon \n') file.write('\n') '''Writing the cell's polygons in terms of triangles''' if PolygonsNames == []: for P in range(len(Polygons)): PolygonsNames.append('Polygon' + str(P + 1)) for P, Polygon in enumerate(Polygons): file.write( '\n*G ' + str(PolygonsNames[P]) + '\t|3D coordinates of the three vertices of the triangle T patch\n\n' ) triangles = tripy.earclip(Polygon) for triangle in triangles: file.write('T ' + str(PolygonsNames[P]) + '\t') for vertex in range(3): file.write( str(format(triangle[vertex][0], '.4f')) + '\t' + str(round(triangle[vertex][1], 4)) + '\t10.0\t') #10.0 is the default z coordinate file.write('\n') return
def update(self): # Triangulate ngon and add each triangle to a batch. self._tris = pyglet.graphics.Batch() for tri in tripy.earclip(self.points): v_data = tuple([el for point in tri for el in point]) self._tris.add(3, pyglet.gl.GL_TRIANGLES, None, ('v2f', v_data)) # Use a line object for the outline. line_points = list(self.points[:]) line_points.append(self.points[0]) self._line = Line( line_points, colour=self.line_colour, width=self.line_width )
def getRndPtUniformPoly( poly: "The polygon for generating random points" = None ) -> "Given a polygon, generate a random point in the polygons uniformly": # Get list of triangles =================================================== lstTriangle.extend(tripy.earclip(poly)) # Weight them and make draws ============================================== lstWeight = [] for i in range(len(lstTriangle)): lstWeight.append( calTriangleAreaByCoords(lstTriangle[i][0], lstTriangle[i][1], lstTriangle[i][2])) # Select a triangle and randomize a point in the triangle ================= idx = rndPick(lstWeight) (x, y) = getRndPtUniformTriangle(lstTriangle[idx]) return (x, y)
def _get_triangulation(polygon_object_xy): """Returns triangulation of polygon. N = number of triangles :param polygon_object_xy: Instance of `shapely.geometry.Polygon` with vertices in x-y (Cartesian) coordinates. :return: triangle_to_vertex_matrix: N-by-3 numpy array, where triangle_to_vertex_matrix[i, j] is the index of the [j]th vertex in the [i]th triangle. Thus, if triangle_to_vertex_matrix[i, j] = k, the [j]th vertex in the [i]th triangle is the [k]th vertex in the original polygon. """ vertex_x_coords, vertex_y_coords = _polygon_to_vertex_arrays( polygon_object_xy) vertex_list_xy = front_utils._vertex_arrays_to_list( x_coords_metres=vertex_x_coords, y_coords_metres=vertex_y_coords) triangle_list = tripy.earclip(vertex_list_xy) num_triangles = len(triangle_list) triangle_to_vertex_matrix = None for i in range(num_triangles): these_vertex_indices = numpy.full(3, -1, dtype=int) for j in range(3): these_x_differences = numpy.absolute(vertex_x_coords - triangle_list[i][j][0]) these_y_differences = numpy.absolute(vertex_y_coords - triangle_list[i][j][1]) these_coord_diffs = these_x_differences + these_y_differences these_vertex_indices[j] = numpy.argmin(these_coord_diffs) if triangle_to_vertex_matrix is None: triangle_to_vertex_matrix = numpy.reshape(these_vertex_indices, (1, 3)) else: triangle_to_vertex_matrix = numpy.vstack( (triangle_to_vertex_matrix, these_vertex_indices)) return triangle_to_vertex_matrix
def get_ec_steps(coords): triangles = np.array(tripy.earclip(np.array(coords))) steps = [] for i in range(len(triangles) + 1): fig, ax = plt.subplots(facecolor=(49 / 255, 52 / 255, 49 / 255)) # coordinates for empty space cropping min_x = np.min(coords[:, 0]) min_y = np.min(coords[:, 1]) max_x = np.max(coords[:, 0]) max_y = np.max(coords[:, 1]) size_x = max_x - min_x size_y = max_y - min_y addon = np.max([size_x, size_y]) / 10 # create and plot polygon polygon = Polygon(coords, color='darkgray', zorder=1) ax.add_patch(polygon) for e in range(i): print_triangle(ax, triangles[e]) # crop empty space around graph plt.xlim(min_x - addon, max_x + addon) plt.ylim(min_y - addon, max_y + addon) # don't show axes ax.get_xaxis().set_visible(False) ax.get_yaxis().set_visible(False) ax.set_frame_on(False) # plot vertices ax.scatter(coords[:, 0], coords[:, 1], c='deeppink', zorder=3) img = fig2img(fig) steps.append(img) return steps
def createColorPlaneFromCurve(curve, triangulate, r, g, b, center=None): """ Creates a plane from a curve and a center. :param curve: Curve vertex list :param triangulate: Create plane from curve triangulation :param center: Center position :param r: Red color :param g: Green color :param b: Blue color :return: Merged shape :rtype: AdvancedGPUShape """ shapes = [] # Use delaunay triangulation if triangulate: k = [] for i in curve: k.append((i[0], i[1])) tri = _tripy.earclip(k) for i in tri: x1, y1 = i[0] x2, y2 = i[1] x3, y3 = i[2] shape = createTriangleColor((x1, y1, 0), (x2, y2, 0), (x3, y3, 0), r, g, b) shapes.append(_toGPUShape(shape)) else: if center is None: center = curve[0] for i in range(0, len(curve) - 1): x1, y1 = curve[i] x2, y2 = curve[(i + 1) % len(curve)] c1, c2 = center shape = createTriangleColor((x1, y1, 0), (x2, y2, 0), (c1, c2, 0), r, g, b) shapes.append(_toGPUShape(shape)) return AdvancedGPUShape(shapes)
for r in range(0, int(len(stage_two) / 2)): temp = stage_two[r * 2 + 1].split("\t") vector_list = [None] * (len(temp) - 1) for i in range(0, len(vector_list)): if (temp[i] != ''): vector_list[i] = [0, 0] vector_list[i][0] = int(float(temp[i].split(",")[0])) vector_list[i][1] = int(float(temp[i].split(",")[1])) #remove duplicates for i in range(0, len(vector_list)): if (i < len(vector_list)): if (vector_list[i][0] == vector_list[(i + 1) % len(vector_list)][0] and vector_list[i][1] == vector_list[(i + 1) % len(vector_list)][1]): vector_list.pop((i + 1) % len(vector_list)) triangles = tripy.earclip(vector_list) stage_T.write(stage_two[r * 2] + "\n") for h in range(0, len(triangles)): for l in range(0, 3): stage_T.write( str(triangles[h][l][0]) + "," + str(triangles[h][l][1]) + "\t") stage_T.write("\n") print(stage_two[r * 2]) stage_T.close()
def __init__(self, path, root, src_size, patch_size, fetch_mode='area', label_to_use=0, rotation=True, flip=False, blur=0, he_augmentation=False, scale_augmentation=False, color_matching=None, dump_patch=None, verbose=1): self.path = path self.root = root self.src_size = src_size self.patch_size = patch_size self.fetch_mode = fetch_mode self.label_to_use = label_to_use if self.fetch_mode not in OpenSlideGenerator.fetch_modes: raise Exception('invalid fetch_mode %r' % self.fetch_mode) self.rotation = rotation self.flip = flip self.blur = blur self.he_augmentation = he_augmentation self.scale_augmentation = scale_augmentation self.dump_patch = dump_patch self.verbose = verbose self.use_color_matching = False if color_matching is not None: self.match_color_prepare(cv2.imread(color_matching) / 255.0) self.use_color_matching = True self.slide_names = [] self.labels = [] # labels[LABEL_CATEGORY][LABEL] self.label_of_region = [] self.structure = [] self.shifted_structure = [] self.triangulation = [] self.regions_of_label = [] # dict() self.regions_of_label_slide = [] # dict() self.src_sizes = [] self.total_weight = 0 self.slide_weights = [] # total weight of a slide self.label_weights = [] # total weight of a label self.label_slide_weights = [ ] # total weight of regions of certain label in a slide. self.weights = [] # overall weight self.weights_in_slide = [] # weight in a slide self.weights_in_label = [] # weight in the same label self.weights_in_label_slide = [] # weight in the same label and slide self.total_area = 0 self.slide_areas = [] # total area of a slide self.label_areas = [] # total area of a label self.total_triangles = 0 self.slide_triangles = [] # total triangle number for each slide self.label_triangles = [] # total triangle number for each label self.label_slide_triangles = [ ] # total triangule number for each label-slide pair self.serialized_index = [ ] # serialized_index[ID] -> (SLIDE_ID, REGION_ID, TRIANGLE_ID) self.serialized_index_slide = [ ] # serialized_index_slide[SLIDE_ID][ID] -> (REGION_ID, TRIANGLE_ID) self.serialized_index_label = [ ] # serialized_index_label[label][ID] -> (SLIDE_ID, REGION_ID, TRIANGLE_ID) self.serialized_index_label_slide = [ ] # *[label][SLIDE_ID][ID] -> (REGION_ID, TRIANGLE_ID) # variables for Walker's alias method self.a_area = [] self.p_area = [] self.a_slide = [] self.p_slide = [] self.a_label = [] self.p_label = [] self.a_label_slide = [] self.p_label_slide = [] # OpenSlide objects self.slides = [] # log self.fetch_count = [] # region-wise # states for parsing input text file # 0: waiting for new file entry # 1: waiting for region header or svs entry # 2: reading a region state = 0 left_points = 0 label_buffer = [] # label_buffer[SLIDE_ID][REGION_ID][LABEL_CATEGORY] slide_id = -1 region_id = -1 with open(path) as f: for line in map(lambda l: l.split("#")[0].strip(), f.readlines()): if len(line) == 0: continue is_svs_line = (line[0] == "@") if is_svs_line: line = line[1:] else: try: items = list(map(int, line.split())) except Exception: raise Exception('invalid dataset file format!') if state == 0: if not is_svs_line: raise Exception('invalid dataset file format!') slide_id += 1 region_id = 0 svs_name = line.split()[0] if len(line.split()) > 1 and line.split()[1].isdigit: svs_src_size = int(line.split()[1]) else: svs_src_size = self.src_size self.slide_names.append(svs_name) self.src_sizes.append(svs_src_size) self.structure.append([]) label_buffer.append([]) state = 1 elif state == 1: if is_svs_line: # new file slide_id += 1 region_id = 0 svs_name = line.split()[0] if len(line.split()) > 1 and line.split()[1].isdigit: svs_src_size = int(line.split()[1]) else: svs_src_size = self.src_size # default src_size self.slide_names.append(svs_name) self.src_sizes.append(svs_src_size) self.structure.append([]) label_buffer.append([]) state = 1 else: # region header label_buffer[slide_id].append([]) for label_cat, label in enumerate(items[:-1]): label_buffer[slide_id][region_id].append(label) # handling newly found label category if len(self.labels) < label_cat + 1: self.labels.append([]) self.regions_of_label.append(dict()) self.regions_of_label_slide.append(dict()) self.a_label.append(dict()) self.p_label.append(dict()) self.a_label_slide.append(dict()) self.p_label_slide.append(dict()) self.label_areas.append(dict()) self.label_weights.append(dict()) self.label_slide_weights.append(dict()) self.label_triangles.append(dict()) self.label_slide_triangles.append(dict()) self.serialized_index_label.append(dict()) self.serialized_index_label_slide.append( dict()) # handling newly found label if label not in self.labels[label_cat]: self.labels[label_cat].append(label) self.regions_of_label[label_cat][label] = [] self.a_label[label_cat][label] = [] self.p_label[label_cat][label] = [] self.a_label_slide[label_cat][label] = [] self.p_label_slide[label_cat][label] = [] self.label_areas[label_cat][label] = 0 self.label_weights[label_cat][label] = 0 self.label_slide_weights[label_cat][label] = [] self.label_triangles[label_cat][label] = 0 self.label_slide_triangles[label_cat][ label] = [] self.serialized_index_label[label_cat][ label] = [] self.serialized_index_label_slide[label_cat][ label] = [] self.regions_of_label[label_cat][label].append( (slide_id, region_id)) self.structure[slide_id].append([]) left_points = items[-1] if items[-1] < 3: raise Exception( 'regions should consist of more than 3 points!' ) state = 2 elif state == 2: if is_svs_line or len(items) != 2: raise Exception('invalid dataset file format!') self.structure[-1][-1].append((items[0], items[1])) left_points -= 1 if left_points == 0: state = 1 region_id += 1 if state != 1: # dataset file should end with a completed region entry raise Exception('invalid dataset file format!') # set label_of_region for label_cat in range(len(self.labels)): self.label_of_region.append([]) for slide_id, label_of_regions in enumerate(label_buffer): self.label_of_region[label_cat].append([]) for region_id, label_of_categories in enumerate( label_of_regions): if label_cat < len(label_of_categories): self.label_of_region[label_cat][slide_id].append( label_of_categories[label_cat]) else: self.label_of_region[label_cat][slide_id].append(-1) # calculate regions_of_label_slide for label_cat in range(len(self.labels)): for label in self.labels[label_cat]: self.regions_of_label_slide[label_cat][label] = [] for i in range(len(self.structure)): self.regions_of_label_slide[label_cat][label].append([]) # prepare shifted (offset) structure self.shifted_structure = copy.deepcopy(self.structure) for i in range(len(self.shifted_structure)): for j in range(len(self.shifted_structure[i])): pco = pyclipper.PyclipperOffset() pco.AddPath(self.shifted_structure[i][j], pyclipper.JT_ROUND, pyclipper.ET_CLOSEDPOLYGON) # offsetting shifted_region = pco.Execute(-self.src_sizes[i] / 2) # shifted_region = pco.Execute(0) if len(shifted_region) == 0: self.shifted_structure[i][j] = [] # collapsed to a point else: self.shifted_structure[i][j] = shifted_region[0] for label_cat in range(len(self.labels)): label = self.label_of_region[label_cat][i][j] if label != -1: self.regions_of_label_slide[label_cat][label][ i].append(j) # load slides for name in self.slide_names: try: self.slides.append(OpenSlide(os.path.join(self.root, name))) except Exception as exc: raise Exception( 'an error has occurred while reading slide "{}"'.format( name)) for label_cat in range(len(self.labels)): self.weights_in_label.append([]) self.weights_in_label_slide.append([]) # region triangulation total_region_count = 0 for i in range(len(self.shifted_structure)): self.triangulation.append([]) self.weights.append([]) self.weights_in_slide.append([]) for label_cat in range(len(self.labels)): self.weights_in_label[label_cat].append([]) self.weights_in_label_slide[label_cat].append([]) self.serialized_index_slide.append([]) self.a_slide.append([]) self.p_slide.append([]) self.slide_weights.append(0) self.slide_triangles.append(0) w, h = self.slides[i].dimensions # slide width/height for label_cat in range(len(self.labels)): for label in self.labels[label_cat]: self.a_label_slide[label_cat][label].append([]) self.p_label_slide[label_cat][label].append([]) self.serialized_index_label_slide[label_cat][label].append( []) self.label_slide_weights[label_cat][label].append(0) self.label_slide_triangles[label_cat][label].append(0) for j in range(len(self.shifted_structure[i])): region = self.shifted_structure[i][j] total_region_count += 1 # triangulation self.triangulation[-1].append(tripy.earclip(region)) for x, y in region: if w < x or h < y: raise Exception( 'invalid polygon vertex position (%d, %d) in %s!' % (x, y, self.slide_names[i])) # triangle area calculation self.weights[i].append([]) self.weights_in_slide[i].append([]) self.slide_triangles[i] += len(self.triangulation[i][j]) for label_cat in range(len(self.labels)): self.weights_in_label[label_cat][i].append([]) self.weights_in_label_slide[label_cat][i].append([]) label = self.label_of_region[label_cat][i][j] if label != -1: self.label_triangles[label_cat][label] += len( self.triangulation[i][j]) self.label_slide_triangles[label_cat][label][i] += len( self.triangulation[i][j]) for (x1, y1), (x2, y2), (x3, y3) in self.triangulation[i][j]: a = x2 - x1 b = y2 - y1 c = x3 - x1 d = y3 - y1 area = abs(a * d - b * c) / 2 weight = area / (self.src_sizes[i]**2) self.weights[i][j].append(weight) self.weights_in_slide[i][j].append(weight) self.total_weight += weight self.slide_weights[i] += weight for label_cat in range(len(self.labels)): self.weights_in_label[label_cat][i][j].append(weight) self.weights_in_label_slide[label_cat][i][j].append( weight) label = self.label_of_region[label_cat][i][j] if label != -1: self.label_weights[label_cat][label] += weight self.label_slide_weights[label_cat][label][ i] += weight # calculate raw slide size for i in range(len(self.structure)): self.slide_areas.append(0) for j in range(len(self.structure[i])): region = self.structure[i][j] triangles = tripy.earclip(region) for (x1, y1), (x2, y2), (x3, y3) in triangles: a = x2 - x1 b = y2 - y1 c = x3 - x1 d = y3 - y1 area = abs(a * d - b * c) / 2 self.total_area += area self.slide_areas[-1] += area for label_cat in range(len(self.labels)): label = self.label_of_region[label_cat][i][j] if label != -1: self.label_areas[label_cat][label] += area # calculate the set of triangle weights for each fetch_mode for i in range(len(self.weights)): # svs for j in range(len(self.weights[i])): # region for k in range(len(self.weights[i][j])): # triangle self.weights[i][j][k] /= self.total_weight self.weights_in_slide[i][j][k] /= self.slide_weights[i] self.serialized_index.append((i, j, k)) self.serialized_index_slide[i].append((j, k)) for label_cat in range(len(self.labels)): label = self.label_of_region[label_cat][i][j] if label != -1: self.weights_in_label[label_cat][i][j][ k] /= self.label_weights[label_cat][label] if self.label_slide_weights[label_cat][label][ i] > 0: self.weights_in_label_slide[label_cat][i][j][ k] /= self.label_slide_weights[label_cat][ label][i] self.serialized_index_label[label_cat][ label].append((i, j, k)) self.serialized_index_label_slide[label_cat][ label][i].append((j, k)) self.total_triangles += 1 # Walker's alias method for weighted sampling of triangles def walker_precomputation(probs): EPS = 1e-10 # normalization prob_sum = 0 for prob in probs: prob_sum += prob prob_sum *= (1 + EPS) for i in range(len(probs)): probs[i] /= prob_sum a = [-1] * len(probs) p = [0] * len(probs) fixed = 0 while fixed < len(probs): # block assignment of small items for i in range(len(probs)): if p[i] == 0 and probs[i] * len(probs) <= (1.0 + EPS): p[i] = probs[i] * len(probs) probs[i] = 0 fixed += 1 # packing of large items for i in range(len(probs)): if probs[i] * len(probs) > 1.0: for j in range(len(probs)): if p[j] != 0 and a[j] == -1: a[j] = i probs[i] -= (1.0 - p[j]) / len(probs) if probs[i] * len(probs) <= (1.0 + EPS): break # fill -1 a for i in range(len(probs)): if a[i] == -1: a[i] = i return a, p # pre-computation for 'area' mode - all triangles are treated in single array probs = [] for i in range(len(self.weights)): # svs for j in range(len(self.weights[i])): # region for k in range(len(self.weights[i][j])): # triangle probs.append(self.weights[i][j][k]) self.a_area, self.p_area = walker_precomputation(probs) # pre-computaiton for 'slide' mode for i in range(len(self.weights)): # svs probs = [] for j in range(len(self.weights[i])): # region for k in range(len(self.weights[i][j])): # triangle probs.append(self.weights_in_slide[i][j][k]) self.a_slide[i], self.p_slide[i] = walker_precomputation(probs) # pre-computation for 'label' mode for label_cat in range(len(self.labels)): for label in self.labels[label_cat]: probs = [] for slide_id, region_id in self.regions_of_label[label_cat][ label]: for tri_id in range( len(self.weights_in_label[label_cat][slide_id] [region_id])): probs.append(self.weights_in_label[label_cat][slide_id] [region_id][tri_id]) self.a_label[label_cat][label], self.p_label[label_cat][ label] = walker_precomputation(probs) # pre-computation for 'label-slide' mode for label_cat in range(len(self.labels)): for label in self.labels[label_cat]: for slide_id in range(len(self.weights)): probs = [] for region_id in self.regions_of_label_slide[label_cat][ label][slide_id]: for tri_id in range( len(self.weights_in_label_slide[label_cat] [slide_id][region_id])): probs.append(self.weights_in_label_slide[label_cat] [slide_id][region_id][tri_id]) self.a_label_slide[label_cat][label][ slide_id], self.p_label_slide[label_cat][label][ slide_id] = walker_precomputation(probs) if self.verbose > 0: print('loaded {} slide(s).'.format(len(self.shifted_structure))) for i in range(len(self.shifted_structure)): print('[{}] {}'.format(i, self.slide_names[i])) print('- {} regions'.format(len(self.shifted_structure[i]))) print('- {} px2'.format(self.slide_areas[i])) print('- patch scale:', self.src_sizes[i]) weight_sum = 0 for region in self.weights[i]: for w_triangle in region: weight_sum += w_triangle print('- fetch probability (area mode):', weight_sum) print('there are total {} regions.'.format(total_region_count, int(self.total_area))) self.patch_per_epoch = 0 for i in range(len(self.src_sizes)): self.patch_per_epoch += self.slide_areas[i] / (self.src_sizes[i]** 2) self.patch_per_epoch = int(self.patch_per_epoch) if self.verbose > 0: print('patches per epoch is set to {}.'.format( self.patch_per_epoch)) print() self.reset_fetch_count()
''' Generating the coordinates of the ploygon ''' # coord = [[1,1], [3,10], [1,40], [2,80],[12,100], [12,15],[40,10]] #coord = [(596, 133), (616, 207), (661, 181), (612, 284), (671, 236), (657, 269), (726, 263), (664, 289), (735, 318),(706, 347), (738, 389), (709, 401), (628, 338), (651, 396), (646, 477), (609, 383), (599, 421), (586, 386),(529, 450), (565, 349), (454, 436), (522, 343), (474, 326), (458, 313), (493, 282), (519, 269), (528, 245),(527, 217), (535, 207), (591, 274)] coord = generatePolygon(800, 800, 600, 0.6, 0.2, 100) # print(coord) #traingles = [((591, 274), (596, 133), (616, 207)), ((616, 207), (661, 181), (612, 284)), ((612, 284), (671, 236), (657, 269)), ((657, 269), (726, 263), (664, 289)), ((664, 289), (735, 318), (706, 347)), ((706, 347), (738, 389), (709, 401)), ((706, 347), (709, 401), (628, 338)), ((628, 338), (651, 396), (646, 477)), ((628, 338), (646, 477), (609, 383)), ((609, 383), (599, 421), (586, 386)), ((586, 386), (529, 450), (565, 349)), ((565, 349), (454, 436), (522, 343)), ((522, 343), (474, 326), (458, 313)), ((522, 343), (458, 313), (493, 282)), ((522, 343), (493, 282), (519, 269)), ((528, 245), (527, 217), (535, 207)), ((528, 245), (535, 207), (591, 274)), ((591, 274), (616, 207), (612, 284)), ((612, 284), (657, 269), (664, 289)), ((664, 289), (706, 347), (628, 338)), ((628, 338), (609, 383), (586, 386)), ((628, 338), (586, 386), (565, 349)), ((565, 349), (522, 343), (519, 269)), ((565, 349), (519, 269), (528, 245)), ((565, 349), (528, 245), (591, 274)), ((565, 349), (591, 274), (612, 284)), ((612, 284), (664, 289), (628, 338)), ((612, 284), (628, 338), (565, 349))] #print(coord) ''' Traingulating the polygon Very important bit to calculate the visibility of a given vertice ''' traingles = tripy.earclip(coord) #print(traingles) trianglep = [] for traingle in traingles: trianglep.append((coord.index(traingle[0]), coord.index(traingle[1]), coord.index(traingle[2]))) # print(trianglep) # Array of points will be stored ''' First creating Point objects for each vertice Creating a collection of vertices and instantiating polygon obj by assigning the vertices ''' vertices = [] nv = len(coord)
def __init__(self, path, root, src_size, patch_size, fetch_mode='area', rotation=True, flip=False, dump_patch=None): self.path = path self.root = root self.src_size = src_size self.patch_size = patch_size self.fetch_mode = fetch_mode if not self.fetch_mode in LabeledOpenSlideDataset.fetch_modes: raise Exception('invalid fetch_mode %r' % self.fetch_mode) self.rotation = rotation self.flip = flip self.dump_patch = dump_patch self.names = [] self.labels = [] self.structure = [] self.triangulation = [] self.regions_of_label = dict() self.total_area = 0 self.slide_areas = [] # total area of a slide self.label_areas = dict() # total area of a label self.weights = [] # overall weight self.weights_in_slide = [] # weight in a slide self.weights_in_label = [] # weight in the same label self.total_triangles = 0 self.slide_triangles = [] # total triangle number for each slide self.label_triangles = dict() # total triangle number for each label self.serialized_index = [ ] # serialized_index[ID] -> (SLIDE_ID, REGION_ID, TRIANGLE_ID) self.serialized_index_slide = [ ] # serialized_index_slide[SLIDE_ID][ID] -> (REGION_ID, TRIANGLE_ID) self.serialized_index_label = dict( ) # serialized_index_label[label][ID] -> (SLIDE_ID, REGION_ID, TRIANGLE_ID) # variables for Walker's alias method self.a_area = [] self.p_area = [] self.a_slide = [] self.p_slide = [] self.a_label = dict() self.p_label = dict() # OpenSlide objects self.slides = [] # states for parsing input text file # 0: waiting for new file entry # 1: waiting for region header or svs entry # 2: reading a region state = 0 left_points = 0 with open(path) as f: for line in map(lambda l: l.split("#")[0].strip(), f.readlines()): if len(line) == 0: continue is_svs_line = (line[0] == "@") if is_svs_line: line = line[1:] else: try: x, y = map(int, line.split()) except Exception: raise Exception('invalid dataset file format!') if state == 0: if not is_svs_line: raise Exception('invalid dataset file format!') self.names.append(line) self.labels.append([]) self.structure.append([]) state = 1 elif state == 1: if is_svs_line: # new file self.names.append(line) self.labels.append([]) self.structure.append([]) self.a_slide.append([]) self.p_slide.append([]) state = 1 else: # region header self.labels[-1].append(x) # handling newly found label if x not in self.regions_of_label: self.regions_of_label[x] = [] self.a_label[x] = [] self.p_label[x] = [] self.label_areas[x] = 0 self.label_triangles[x] = 0 self.serialized_index_label[x] = [] self.structure[-1].append([]) self.regions_of_label[x].append( (len(self.structure) - 1, len(self.structure[-1]) - 1)) left_points = y if y < 3: raise Exception( 'regions should consist of more than 3 points!' ) state = 2 elif state == 2: if is_svs_line or left_points <= 0: raise Exception('invalid dataset file format!') self.structure[-1][-1].append((x, y)) left_points -= 1 if left_points == 0: state = 1 if state != 1: # dataset file should end with a completed region entry raise Exception('invalid dataset file format!') # load slides for name in self.names: try: self.slides.append(OpenSlide(os.path.join(self.root, name))) except Exception as exc: raise Exception( 'an error has occurred while reading slide "{}"'.format( name)) # region triangulation total_region_count = 0 for i in range(len(self.structure)): self.triangulation.append([]) self.weights.append([]) self.weights_in_slide.append([]) self.weights_in_label.append([]) self.serialized_index_slide.append([]) self.a_slide.append([]) self.p_slide.append([]) self.slide_areas.append(0) self.slide_triangles.append(0) w, h = self.slides[i].dimensions # slide width/height for j in range(len(self.structure[i])): region = self.structure[i][j] total_region_count += 1 # triangulation self.triangulation[-1].append(tripy.earclip(region)) for x, y in region: if w < x or h < y: raise Exception( 'invalid polygon vertex position (%d, %d) in %s!' % (x, y, self.names[i])) # triangle area calculation self.weights[-1].append([]) self.weights_in_slide[-1].append([]) self.weights_in_label[-1].append([]) self.slide_triangles[-1] += len(self.triangulation[-1][-1]) label = self.labels[i][j] self.label_triangles[label] += len(self.triangulation[-1][-1]) for (x1, y1), (x2, y2), (x3, y3) in self.triangulation[-1][-1]: a = x2 - x1 b = y2 - y1 c = x3 - x1 d = y3 - y1 area = abs(a * d - b * c) / 2 self.weights[-1][-1].append(area) self.weights_in_slide[-1][-1].append(area) self.weights_in_label[-1][-1].append(area) self.total_area += area self.slide_areas[-1] += area self.label_areas[label] += area # calculate the set of triangle weights for each fetch_mode for i in range(len(self.weights)): # svs for j in range(len(self.weights[i])): # region for k in range(len(self.weights[i][j])): # triangle self.weights[i][j][k] /= self.total_area self.weights_in_slide[i][j][k] /= self.slide_areas[i] label = self.labels[i][j] self.weights_in_label[i][j][k] /= self.label_areas[label] self.serialized_index.append((i, j, k)) self.serialized_index_slide[i].append((j, k)) self.serialized_index_label[label].append((i, j, k)) self.total_triangles += 1 # Walker's alias method for weighted sampling of triangles def walker_precomputation(probs): EPS = 1e-10 a = [-1] * len(probs) p = [0] * len(probs) fixed = 0 while fixed < len(probs): # block assignment of small items for i in range(len(probs)): if p[i] == 0 and probs[i] * len(probs) <= (1.0 + EPS): p[i] = probs[i] * len(probs) probs[i] = 0 fixed += 1 # packing of large items for i in range(len(probs)): if probs[i] * len(probs) > 1.0: for j in range(len(probs)): if p[j] != 0 and a[j] == -1: a[j] = i probs[i] -= (1.0 - p[j]) / len(probs) if probs[i] * len(probs) <= 1.0: break return a, p # pre-computation for 'area' mode - all triangles are treated in single array probs = [] for i in range(len(self.weights)): # svs for j in range(len(self.weights[i])): # region for k in range(len(self.weights[i][j])): # triangle probs.append(self.weights[i][j][k]) self.a_area, self.p_area = walker_precomputation(probs) # pre-computaiton for 'slide' mode for i in range(len(self.weights)): # svs probs = [] for j in range(len(self.weights[i])): # region for k in range(len(self.weights[i][j])): # triangle probs.append(self.weights_in_slide[i][j][k]) self.a_slide[i], self.p_slide[i] = walker_precomputation(probs) # pre-computation for 'label' mode for label in self.regions_of_label.keys(): probs = [] for slide_id, region_id in self.regions_of_label[label]: for tri_id in range( len(self.weights_in_label[slide_id][region_id])): probs.append( self.weights_in_label[slide_id][region_id][tri_id]) self.a_label[label], self.p_label[label] = walker_precomputation( probs) print('loaded {} slide(s).'.format(len(self.structure))) print('there are total {} regions with total area of {} px^2.'.format( total_region_count, int(self.total_area))) self.patch_per_epoch = int(self.total_area / (self.src_size**2) + 1) print('{} / {}^2 = {} ... patches/epoch is set to {}.'.format( self.total_area, self.src_size, self.total_area / (self.src_size**2), self.patch_per_epoch))
def polygon_to_triangles(p: Polygon): return tripy.earclip(p.boundary.coords[:-1])
def rndPlainNodes( N: "Number of vertices" = None, nodeIDs: "A list of node IDs, `N` will be overwritten if `nodeIDs` is given" = None, distr: "Spatial distribution of nodes, the options are \ 1) String, (default) 'uniformSquare', or \ 2) String, 'uniformCircle', or \ 3) String, 'uniformPoly', or\ 4) String, (not available) 'uniformOnNetwork', or\ 5) String, 'clustered, or \ 6) String, (not available) 'normal2D'" = 'uniformSquare', distrArgs: "Dictionary that describes the distribution of the nodes\ 1) for 'uniformSquare'\ {\ 'xRange': A 2-tuple with minimum/maximum range of x, default as (0, 100), \ 'yRange': A 2-tuple with minimum/maximum range of y, default as (0, 100), \ }\ 2) for 'uniformCircle'\ {\ 'centerLoc': centering location \ 'radius': radius of the circle \ }\ 3) for 'uniformPoly'\ {\ 'poly': polygon of the area, (no holes)\ (or 'polys': list of polygons) \ }\ 4) for 'uniformOnNetwork'\ {\ 'network': list of arcs that can be sampled \ }\ 5) for 'clustered\ {\ 'numCluster': number of cluster centers\ 'xRange': xRange of cluster centroid\ 'yRange': yRange of cluster centroid\ 'poly': polygon for customers\ 'centroidLocs': list of cluster center locations\ 'clusterDiameter': the spread of nodes, in diameter\ }\ 6) for 'normal2D'\ " = { 'xRange': (0, 100), 'yRange': (0, 100) } ) -> "A set of nodes with id start from 0 to N": # Check for required fields =============================================== if (N == None and nodeIDs == None): print(ERROR_MISSING_N) return # Initialize ============================================================== nodes = {} if (nodeIDs == None): nodeIDs = [i for i in range(N)] # Generate instance ======================================================= if (distr == "uniformSquare"): # Sanity check -------------------------------------------------------- if (distrArgs == None): distrArgs = {'xRange': (0, 100), 'yRange': (0, 100)} if ('xRange' not in distrArgs): distrArgs['xRange'] = (0, 100) if ('yRange' not in distrArgs): distrArgs['yRange'] = (0, 100) # Create nodes -------------------------------------------------------- for n in nodeIDs: x = random.randrange(distrArgs['xRange'][0], distrArgs['xRange'][1]) y = random.randrange(distrArgs['yRange'][0], distrArgs['yRange'][1]) nodes[n] = {'loc': (x, y)} elif (distr == "uniformCircle"): # Sanity check -------------------------------------------------------- if (distrArgs == None): distrArgs = {'centerLoc': [0, 0], 'radius': 100} if ('centerLoc' not in distrArgs or 'radius' not in distrArgs): print(ERROR_MISSING_DISTRARGS_UNICC) return # Create nodes -------------------------------------------------------- for n in nodeIDs: theta = random.uniform(0, 2 * math.pi) r = math.sqrt(random.uniform(0, distrArgs['radius']**2)) x = distrArgs['centerLoc'][0] + r * math.cos(theta) y = distrArgs['centerLoc'][1] + r * math.sin(theta) nodes[n] = {'loc': (x, y)} elif (distr == "uniformPoly"): # Sanity check -------------------------------------------------------- if (distrArgs == None): print(ERROR_MISSING_DISTRARGS) return if ('poly' not in distrArgs and 'polys' not in distrArgs): print(ERROR_MISSING_DISTRARGS_UNIPOLY) return # Create nodes -------------------------------------------------------- polys = [] if ('poly' in distrArgs and 'polys' not in distrArgs): polys = [distrArgs['poly']] elif ('poly' in distrArgs and 'polys' in distrArgs): polys = [distrArgs['poly']] polys.extend(distrArgs['polys']) elif ('polys' in distrArgs): polys = distrArgs['polys'] # Get all triangulated triangles lstTriangle = [] for p in polys: lstTriangle.extend(tripy.earclip(p)) # Weight them and make draws lstWeight = [] for i in range(len(lstTriangle)): lstWeight.append( calTriangleAreaByCoords(lstTriangle[i][0], lstTriangle[i][1], lstTriangle[i][2])) for n in nodeIDs: idx = rndPick(lstWeight) [x1, y1] = lstTriangle[idx][0] [x2, y2] = lstTriangle[idx][1] [x3, y3] = lstTriangle[idx][2] rndR1 = np.random.uniform(0, 1) rndR2 = np.random.uniform(0, 1) rndX = (1 - math.sqrt(rndR1)) * x1 + math.sqrt(rndR1) * ( 1 - rndR2) * x2 + math.sqrt(rndR1) * rndR2 * x3 rndY = (1 - math.sqrt(rndR1)) * y1 + math.sqrt(rndR1) * ( 1 - rndR2) * y2 + math.sqrt(rndR1) * rndR2 * y3 nodes[n] = {'loc': (rndX, rndY)} elif (distr == "uniformOnNetwork"): print("Stay tune") return elif (distr == "clustered"): # Sanity check -------------------------------------------------------- if (distrArgs == None): print(ERROR_MISSING_DISTRARGS) return if (('numCluster' not in distrArgs or 'xRange' not in distrArgs or 'yRange' not in distrArgs) and 'centroidLocs' not in distrArgs): print(ERROR_MISSING_DISTRARGS_CLUSTER) return if ('clusterDiameter' not in distrArgs): distrArgs['clusterDiameter'] = 20 # Create nodes -------------------------------------------------------- centroidLocs = [] if ('centroidLocs' in distrArgs): centroidLocs = distrArgs['centroidLocs'] else: for cl in range(distrArgs['numCluster']): x = random.randrange(distrArgs['xRange'][0], distrArgs['xRange'][1]) y = random.randrange(distrArgs['yRange'][0], distrArgs['yRange'][1]) centroidLocs.append([x, y]) for n in nodeIDs: # First pick a centroidLoc idx = random.randint(0, len(centroidLocs) - 1) ctrLoc = centroidLocs[idx] theta = random.uniform(0, 2 * math.pi) r = math.sqrt(random.uniform(0, distrArgs['clusterDiameter'])) x = ctrLoc[0] + r * math.cos(theta) y = ctrLoc[1] + r * math.sin(theta) nodes[n] = {'loc': (x, y)} elif (distr == "normal2D"): print("Stay tune") return else: print(ERROR_INCOR_DISTARG) return return nodes
def test_polygon(self, polygon, expected): actual = tripy.earclip(polygon) self.assertEqual(actual, expected)
def triangulate_polygon(p): """Triangulates a polygon.""" triangles = tripy.earclip(p) return triangles