def intersections(psegs): """ Implementation of the Bentley-Ottmann algorithm. Input psegs: a list of segments Output intpoints: a list of intersection points """ eq = EventQueue(psegs) intpoints = [] T = AVLTree() L=[] while not eq.is_empty(): # for all events e = eq.events.pop(0) # remove the event p = e.p # get event point L = e.edges # segments with p as left end R,C = get_edges(T, p) # p: right (R) and interior (C) if len(L+R+C) > 1: # Intersection at p among L+R+C for s in L+R+C: if not s.contains(p): # if p is interior s.lp = p # change lp and s.status = INTERIOR # status intpoints.append(p) R,C = get_edges(T, p) for s in R+C: T.discard(s) for s in L+C: T.insert(s, str(s)) if len(L+C) == 0: s = R[0] if s is not None: sl, sr = get_lr(T, s) find_new_event(sl, sr, p, eq) else: sp, spp = get_lrmost(T, L+C) try: sl = T.prev_key(sp) except KeyError: # only on last key sl = None try: sr = T.succ_key(spp) except KeyError: # only on last key sr = None find_new_event(sl, sp, p, eq) find_new_event(sr, spp, p, eq) return intpoints
def intersections(psegs): """ Implementation of the Bentley-Ottmann algorithm. Input psegs: a list of segments Output intpoints: a list of intersection points """ eq = EventQueue(psegs) intpoints = [] T = AVLTree() L = [] while not eq.is_empty(): # for all events e = eq.events.pop(0) # remove the event p = e.p # get event point L = e.edges # segments with p as left end R, C = get_edges(T, p) # p: right (R) and interior (C) if len(L + R + C) > 1: # Intersection at p among L+R+C for s in L + R + C: if not s.contains(p): # if p is interior s.lp = p # change lp and s.status = INTERIOR # status intpoints.append(p) R, C = get_edges(T, p) for s in R + C: T.discard(s) for s in L + C: T.insert(s, str(s)) if len(L + C) == 0: s = R[0] if s is not None: sl, sr = get_lr(T, s) find_new_event(sl, sr, p, eq) else: sp, spp = get_lrmost(T, L + C) try: sl = T.prev_key(sp) except KeyError: # only on last key sl = None try: sr = T.succ_key(spp) except KeyError: # only on last key sr = None find_new_event(sl, sp, p, eq) find_new_event(sr, spp, p, eq) return intpoints
class BinTree: tree = None def __init__(self): self.tree = AVLTree() def insert(self, rating, username): if not rating in self.tree: self.tree[rating] = set() self.tree[rating].add(username) def remove(self, rating, username): if rating in self.tree: self.tree[rating].remove(username) if len(self.tree[rating]) == 0: self.tree.discard(rating) def all_pairs_inbetween(self, low, high): pairs = [] for k, v in self.tree[low: high].items(): for u in v: pairs.append((k, u)) return pairs
def intersection(polygons, canvas): # Dictionary for new polygons, connecting the old to new by having old polygons as keys new_polygons = {} # Add all vertices to event queue event_queue = SortedList() for polygon in polygons: new_polygons[polygon] = ConstructPolygon() for i, vertice in enumerate(polygon.vertices): event_queue.add(EventPoint(vertice, polygon, i)) canvas.create_text(vertice.x.evalf() + 240, vertice.y.evalf() + 240, fill="red", text=f"{i}") for i, e in enumerate(event_queue): pass #canvas.create_text(e.point.x.evalf()+250, e.point.y.evalf()+240, fill="blue", text=f"{i}") status = AVLTree() i = 0 while len(event_queue) > 0: event = event_queue.pop(0) add_next = False add_prev = False #breakpoint() # Update status next_in_polygon = event.next_in_polygon() s_next = OrderedSegment(event.polygon, event.point, next_in_polygon.point) previous_in_polygon = event.previous_in_polygon() s_prev = OrderedSegment(event.polygon, event.point, previous_in_polygon.point) if next_in_polygon in event_queue: status.insert(s_next, s_next) add_next = True else: status.discard(s_next) if previous_in_polygon in event_queue: status.insert(s_prev, s_prev) add_prev = True else: status.discard(s_prev) if add_next and event.intersection: new_polygons[event.polygon].add_up(event.point) if add_prev and event.intersection: new_polygons[event.polygon].add_down(event.point) if event.mark: new_polygons[event.polygon].add_up(event.point) new_polygons[event.polygon].add_down(event.point) # Add intersections to event queue intersections_up = [] intersections_down = [] if add_next: intersections_up = check_intersections(s_next, status, event, next_in_polygon, event_queue) if add_prev: intersections_down = check_intersections(s_prev, status, previous_in_polygon, event, event_queue) for intersection in (intersections_down + intersections_up): event_queue.add(intersection) # If there were intersections, bind the two polygons together if len(intersections_up) > 0: new_polygons[event.polygon] = new_polygons[ intersections_up[0].intersection.next_polygon] status.discard(s_next) s_next = OrderedSegment(event.polygon, event.point, intersections_up[0].point) status.insert(s_next, s_next) event_queue.remove(next_in_polygon) next_in_polygon.previous_point = intersections_up[0] event_queue.add(next_in_polygon) new_polygons[event.polygon].add_up(event.point) if len(intersections_down) > 0: new_polygons[event.polygon] = new_polygons[ intersections_down[0].intersection.next_polygon] status.discard(s_prev) s_prev = OrderedSegment(event.polygon, event.point, intersections_down[0].point) status.insert(s_prev, s_prev) event_queue.remove(previous_in_polygon) previous_in_polygon.next_point = intersections_down[0] event_queue.add(previous_in_polygon) new_polygons[event.polygon].add_down(event.point) #breakpoint() output = [] for p in new_polygons.values(): polygon = p.get_polygon() if not polygon in output: output.append(polygon) #breakpoint() return output
class BeladyCache(): def __init__(self, cache_size, min_obj_size, max_obj_size): self._max_size = cache_size self._used_size = 0 # dictionary: obj_id -> object with last and next caching time self._cached_objects = {} # AVL tree: next_time -> object with last and next caching time self._tree = AVLTree() self._oldest_obj_id = None self._freshest_obj_id = None self.stats = CacheStats.CacheStats("Belady", cache_size) self.daily_stats = CacheStats.DailyCacheStats(cache_size) def get_cache_stats_total(self): return self.stats.to_dict() def get_cache_stats_day(self): self.daily_stats.cache_used = self._used_size s = self.daily_stats.to_dict() self.daily_stats.reset() return s def get_num_cached_objects(self): return len(self._cached_objects) def is_cached(self, obj_id): return obj_id in self._cached_objects def is_remembered(self, obj_id): return self.is_cached(obj_id) def get_free_cache_bytes(self): return self._max_size - self._used_size def update_obj_size(self, obj_id, size, delta): if obj_id in self._cached_objects: # update size of object in cache self._cached_objects[obj_id][CACHE_SIZE] = size # update size of object in tree next_time = self._cached_objects[obj_id][CACHE_NEXT_TIME] # ineffizient: zwei Zugriffe self._tree[next_time][CACHE_SIZE] = size # update size used in cache self._used_size += delta # TODO: Muesste man nicht noch pruefen, ob Cachegroesse ueberschritten? def _evict_bytes(self, bytes, xtime): if self.stats.first_eviction_ts == 0: self.stats.first_eviction_ts = xtime # remove objects from cache evicted_bytes = 0 while evicted_bytes < bytes: # remove object with largest next_line_number from tree (next_line_number, obj) = self._tree.pop_max() # remove same object from cache evicted_bytes += self._remove_cached(obj[CACHE_OBJ_ID]) # update stats self.stats.cached_objects_current -= 1 self.stats.evicted_objects += 1 self.daily_stats.evicted_objects += 1 def remove_cached(self, obj_id): if self.is_cached(obj_id): self.stats.deleted_objects += 1 self.stats.cached_objects_current -= 1 self.daily_stats.deleted_objects += 1 return self._remove_cached(obj_id) return None def _remove_cached(self, obj_id): if obj_id in self._cached_objects: # remove object from cache obj = self._cached_objects.pop(obj_id) # remove object from tree next_line_number = obj[CACHE_NEXT_TIME] self._tree.discard(next_line_number) # adapt size self._used_size -= obj[CACHE_SIZE] return obj[CACHE_SIZE] return 0 def cache_object(self, obj_id, size, xtime, next_line_number, force=True, is_new=False): # do not cache object if next_line_number == -1 if next_line_number == -1: return # add object to cache self._cached_objects[obj_id] = [xtime, size, obj_id, next_line_number] # add new object to tree self._tree[next_line_number] = [xtime, size, obj_id, next_line_number] # update size self._used_size += size # remove other objects from cache if necessary if self._used_size > self._max_size: bytes = self._used_size - self._max_size self._evict_bytes(bytes, next_line_number) # check whether cache is large enough if self._used_size > self._max_size: # remove new object self._cached_objects.pop(obj_id) self._tree.discard(next_line_number) raise Exception("Error, cannot cache file. Size to large: %s %d" % (obj_id, size)) # update stats self.stats.cached_objects_current += 1 self.stats.cached_objects_total += 1 self.stats.cached_bytes_written += size self.daily_stats.cached_objects += 1 self.daily_stats.cached_bytes_written += size def get_cached(self, obj_id, xtime, next_line): # GET if obj_id in self._cached_objects: # remove object from cache size = self._remove_cached(obj_id) # add object with new time to cache self.cache_object(obj_id, size, xtime, next_line) # update stats self.stats.cache_hits += 1 self.stats.cached_bytes_read += size self.daily_stats.cache_hits += 1 self.daily_stats.cached_bytes_read += size return True # update stats self.stats.cache_misses += 1 self.daily_stats.cache_misses += 1 return False def rename(self, from_obj_id, to_obj_id): # Belady cache stores from_obj only if to_obj is accessed (GET), possibly after a RENAME chain # Belady cache does not store to_obj because it will be overwritten by this function if self.is_cached(to_obj_id): raise Exception("Error in rename(...): File cached that is not needed.") if self.is_cached(from_obj_id): # retrieve object and store it under new ID obj = self._cached_objects.pop(from_obj_id) self._cached_objects[to_obj_id] = obj # update ID of object in tree next_line_number = obj[CACHE_NEXT_TIME] self._tree[next_line_number][CACHE_OBJ_ID] = to_obj_id
def barrer(textFile): pos = 0 inst = open(textFile, 'r') salida = open("salida.txt", 'w') S = [] for linea in inst: linea.strip() if len(linea) is 0: break p = linea.split() S.append(((float(p[0]), float(p[1])), (float(p[2]), float(p[3])))) M = [] for i in range(len(S)): ((x1, y1), (x2, y2)) = S[i] if x1 > x2: x1, y1, x2, y2 = x2, y2, x1, y1 heappush(M, ((x1, y1), 'C', i, None)) heappush(M, ((x2, y2), 'F', i, None)) # print(len(M)) B = AVLTree() D = {} while len(M) > 0: ((x, y), tipo, i, j) = heappop(M) if x < pos: print("descartando", x, y, tipo) continue pos = x print(x, y, tipo, len(M)) if tipo == 'C': B[y] = i D[i] = y v_izq = None try: v_izq = B.prev_key(y) except: pass if v_izq is not None: l = B[v_izq] (xp, yp) = interseccion(S[i], S[l]) if xp is not None and xp > x: heappush(M, ((xp, yp), 'I', l, i)) print("izq: ", v_izq) v_der = None try: v_der = B.succ_key(y) except: pass if v_der is not None: r = B[v_der] (xp, yp) = interseccion(S[i], S[r]) if xp is not None and xp > x: heappush(M, ((xp, yp), 'I', i, r)) print("der: ", v_der) elif tipo == 'F': l = None r = None try: v_izq = B.prev_key(y) l = B[v_izq] v_der = B.succ_key(y) r = B[v_der] except: pass print("izq: ", v_izq) print("der: ", v_der) B.discard(y) del D[i] if l is not None and r is not None: (xp, yp) = interseccion(S[l], S[r]) if xp is not None and xp > x: heappush(M, ((xp, yp), 'I', l, r)) elif tipo == 'I': if i not in D or j not in D: continue assert B[D[i]] == i assert B[D[j]] == j # print("antes",B,D,i,j) B[D[i]], B[D[j]] = B[D[j]], B[D[i]] D[i], D[j] = D[j], D[i] # print("despues",B,D,i,j) v_izq = None try: v_izq = B.prev_key(y) except: pass if v_izq is not None: l = B[v_izq] (xp, yp) = interseccion(S[j], S[l]) if xp is not None and xp > x: heappush(M, ((xp, yp), 'I', j, l)) v_der = None try: v_der = B.succ_key(y) except: pass if v_der is not None: r = B[v_der] (xp, yp) = interseccion(S[i], S[r]) if xp is not None and xp > x: heappush(M, ((xp, yp), 'I', i, r)) print("izq: ", v_izq) print("der: ", v_der) print("%f %f" % (x, y), file=salida) else: print(tipo) salida.close()