def test_get_offset_polygons_truncated_square_inside_small_offset(): """This tests a "truncated square", which is a square with the corners shaved off, and an inside offset that's small compared to the corner chamfers.""" # 'expected_inside_p' is the expected offset polygon with a *negative* # offset, so it's inside the input polygon. lines = (Line((1.4142135623730951, 1.0, 0.0), (8.585786437626904, 1.0, 0.0)), Line((8.585786437626904, 1.0, 0.0), (9.0, 1.4142135623730951, 0.0)), Line((9.0, 1.4142135623730951, 0.0), (9.0, 8.585786437626904, 0.0)), Line((9.0, 8.585786437626904, 0.0), (8.585786437626904, 9.0, 0.0)), Line((8.585786437626904, 9.0, 0.0), (1.4142135623730951, 9.0, 0.0)), Line((1.4142135623730951, 9.0, 0.0), (1.0, 8.585786437626904, 0.0)), Line((1.0, 8.585786437626904, 0.0), (1.0, 1.4142135623730951, 0.0)), Line((1.0, 1.4142135623730951, 0.0), (1.4142135623730951, 1.0, 0.0))) expected_inside_p = Polygon() for line in lines: expected_inside_p.append(line) output_p = truncated_square_p.get_offset_polygons(-1) print("get_offset_polygons() returned:") for p in output_p: print(str(p)) assert (len(output_p) == 1) assert_polygons_are_identical(output_p[0], expected_inside_p)
def test_get_offset_polygons_square_inside(): # 'expected_inside_p' is the expected offset polygon with a *negative* # offset, so it's inside the input polygon. lines = (Line((1, 1, 0), (9, 1, 0)), Line( (9, 1, 0), (9, 9, 0)), Line((9, 9, 0), (1, 9, 0)), Line((1, 9, 0), (1, 1, 0))) expected_inside_p = Polygon() for line in lines: expected_inside_p.append(line) output_p = square_p.get_offset_polygons(-1) print("get_offset_polygons() returned:") for p in output_p: print(str(p)) assert (len(output_p) == 1) assert_polygons_are_identical(output_p[0], expected_inside_p)
def append(self, item, unify_overlaps=False, allow_reverse=False): super(ContourModel, self).append(item) if isinstance(item, Line): item_list = [item] if allow_reverse: item_list.append(Line(item.p2, item.p1)) found = False # Going back from the end to start. The last line_group always has # the highest chance of being suitable for the next line. line_group_indexes = range(len(self._line_groups) - 1, -1, -1) for line_group_index in line_group_indexes: line_group = self._line_groups[line_group_index] for candidate in item_list: if line_group.is_connectable(candidate): line_group.append(candidate) self._merge_polygon_if_possible( line_group, allow_reverse=allow_reverse) found = True break if found: break else: # add a single line as part of a new group new_line_group = Polygon(plane=self._plane) new_line_group.append(item) self._line_groups.append(new_line_group) elif isinstance(item, Polygon): if not unify_overlaps or (len(self._line_groups) == 0): self._line_groups.append(item) for subitem in next(item): self._update_limits(subitem) else: # go through all polygons and check if they can be combined is_outer = item.is_outer() new_queue = [item] processed_polygons = [] queue = self.get_polygons() while len(queue) > 0: polygon = queue.pop() if polygon.is_outer() != is_outer: processed_polygons.append(polygon) else: processed = [] while len(new_queue) > 0: new = new_queue.pop() if new.is_polygon_inside(polygon): # "polygon" is obsoleted by "new" processed.extend(new_queue) break elif polygon.is_polygon_inside(new): # "new" is obsoleted by "polygon" continue elif not new.is_overlap(polygon): processed.append(new) continue else: union = polygon.union(new) if union: for p in union: if p.is_outer() == is_outer: new_queue.append(p) else: processed_polygons.append(p) else: processed.append(new) break else: processed_polygons.append(polygon) new_queue = processed while len(self._line_groups) > 0: self._line_groups.pop() log.info("Processed polygons: %s", [len(p.get_lines()) for p in processed_polygons]) log.info("New queue: %s", [len(p.get_lines()) for p in new_queue]) for processed_polygon in processed_polygons + new_queue: self._line_groups.append(processed_polygon) # TODO: this is quite expensive - can we do it differently? self.reset_cache() else: # ignore any non-supported items (they are probably handled by a # parent class) pass
assert len(lines0) == len(lines1) for i in range(len(lines0)): line0 = lines0[i] line1 = lines1[i] (p00, p01) = line0.get_points() (p10, p11) = line1.get_points() assert p00 == p10 assert p01 == p11 # "square_p" is a polygon consisting of a simple square, # counter-clockwise. lines = (Line((0, 0, 0), (10, 0, 0)), Line( (10, 0, 0), (10, 10, 0)), Line((10, 10, 0), (0, 10, 0)), Line((0, 10, 0), (0, 0, 0))) square_p = Polygon() for line in lines: square_p.append(line) # "truncated_square_p" is a polygon consisting of a simple square # (counter-clockwise), but with the sharp corners replaced by small # chamfers. lines = (Line((1, 0, 0), (9, 0, 0)), Line( (9, 0, 0), (10, 1, 0)), Line( (10, 1, 0), (10, 9, 0)), Line( (10, 9, 0), (9, 10, 0)), Line( (9, 10, 0), (1, 10, 0)), Line( (1, 10, 0), (0, 9, 0)), Line( (0, 9, 0), (0, 1, 0)), Line((0, 1, 0), (1, 0, 0))) truncated_square_p = Polygon() for line in lines:
assert len(lines0) == len(lines1) for i in range(len(lines0)): line0 = lines0[i] line1 = lines1[i] (p00, p01) = line0.get_points() (p10, p11) = line1.get_points() assert p00 == p10 assert p01 == p11 # "square_p" is a polygon consisting of a simple square, # counter-clockwise. lines = (Line((0, 0, 0), (10, 0, 0)), Line( (10, 0, 0), (10, 10, 0)), Line((10, 10, 0), (0, 10, 0)), Line((0, 10, 0), (0, 0, 0))) square_p = Polygon() for line in lines: square_p.append(line) def test_get_offset_polygons_square_outside(): # 'expected_outside_p' is the expected offset polygon with a # *positive* offset, so it's outside the input polygon. lines = (Line((-1, -1, 0), (11, -1, 0)), Line( (11, -1, 0), (11, 11, 0)), Line( (11, 11, 0), (-1, 11, 0)), Line((-1, 11, 0), (-1, -1, 0))) expected_outside_p = Polygon() for line in lines: expected_outside_p.append(line) output_p = square_p.get_offset_polygons(1)
def append(self, item, unify_overlaps=False, allow_reverse=False): super(ContourModel, self).append(item) if isinstance(item, Line): item_list = [item] if allow_reverse: item_list.append(Line(item.p2, item.p1)) found = False # Going back from the end to start. The last line_group always has # the highest chance of being suitable for the next line. line_group_indexes = xrange(len(self._line_groups) - 1, -1, -1) for line_group_index in line_group_indexes: line_group = self._line_groups[line_group_index] for candidate in item_list: if line_group.is_connectable(candidate): line_group.append(candidate) self._merge_polygon_if_possible(line_group, allow_reverse=allow_reverse) found = True break if found: break else: # add a single line as part of a new group new_line_group = Polygon(plane=self._plane) new_line_group.append(item) self._line_groups.append(new_line_group) elif isinstance(item, Polygon): if not unify_overlaps or (len(self._line_groups) == 0): self._line_groups.append(item) for subitem in item.next(): self._update_limits(subitem) else: # go through all polygons and check if they can be combined is_outer = item.is_outer() new_queue = [item] processed_polygons = [] queue = self.get_polygons() while len(queue) > 0: polygon = queue.pop() if polygon.is_outer() != is_outer: processed_polygons.append(polygon) else: processed = [] while len(new_queue) > 0: new = new_queue.pop() if new.is_polygon_inside(polygon): # "polygon" is obsoleted by "new" processed.extend(new_queue) break elif polygon.is_polygon_inside(new): # "new" is obsoleted by "polygon" continue elif not new.is_overlap(polygon): processed.append(new) continue else: union = polygon.union(new) if union: for p in union: if p.is_outer() == is_outer: new_queue.append(p) else: processed_polygons.append(p) else: processed.append(new) break else: processed_polygons.append(polygon) new_queue = processed while len(self._line_groups) > 0: self._line_groups.pop() print "Processed polygons: %s" % str([len(p.get_lines()) for p in processed_polygons]) print "New queue: %s" % str([len(p.get_lines()) for p in new_queue]) for processed_polygon in processed_polygons + new_queue: self._line_groups.append(processed_polygon) # TODO: this is quite expensive - can we do it differently? self.reset_cache() else: # ignore any non-supported items (they are probably handled by a # parent class) pass