Esempio n. 1
0
    def __init__(self, logfile_full_path='logs/orderbook.log'):
        self.bids = AVLTree()
        self.asks = AVLTree()

        # This is to retrieve an order *by order_id* from the trees in O(log(n)). (self.remove_order())
        # self.bids and self.asks are indexed by Order._key(), which is (self.price, self.size).
        self.order_id_key_translate = {}
        self.trades = {}
        self.subscribers = {}
        self.logger = Logger(logfile_full_path)
Esempio n. 2
0
    def reducer(self, key, values):
        tree = AVLTree()
        points = set()
        for items in values:
            for val in items:
                y = val[2][0][1]

                if val[1]:
                    tree.insert(y, val[2])
                    try:
                        i = line_intersection(val[2], tree.succ_item(y)[1])
                        if i:
                            points.add(i)
                    except KeyError:
                        pass
                    try:
                        i = line_intersection(val[2], tree.prev_item(y)[1])
                        if i:
                            points.add(i)
                    except KeyError:
                        pass
                else:
                    try:
                        i = line_intersection(
                            tree.prev_item(y)[1],
                            tree.succ_item(y)[1])
                        if i:
                            points.add(i)
                    except KeyError:
                        pass
                    try:
                        tree.remove(y)
                    except KeyError:
                        pass
        yield key, list(points)
def getNewAVL(seq):
    if has_fast_tree_support():
        from bintrees import FastAVLTree
        return FastAVLTree(seq)
    else:
        from bintrees import AVLTree
        return AVLTree(seq)
 def __init__(self, boundary, obstacle):
     self.boundary = boundary
     self.obstacle = obstacle
     self.vertices = []
     self.get_vertices()
     self.current_edge_list = AVLTree()
     self.vertical_extension = []
Esempio n. 5
0
    def __init__(self, series):
        self.__x_tree = AVLTree()
        self.__y_tree = AVLTree()
        self.__mapping = {}

        for (line, (xs, ys)) in enumerate(series):
            for (point, (x, y)) in enumerate(zip(xs, ys)):
                self.__x_tree.insert(x, x)
                self.__y_tree.insert(y, y)

                if self.__mapping.get(x) is None:
                    self.__mapping[x] = {}

                if self.__mapping[x].get(y) is None:
                    self.__mapping[x][y] = []

                self.__mapping[x][y].append((line, point, x, y))
 def testRandomisedValueInsertion(self):
     pytree = AVLTree()
     for _ in range(self.TEST_SZ):
         v = self.rand_val()
         self.tree.insert(v)
         pytree.insert(v, v)
         self.assertEqual(len(self.tree), len(pytree))
         self.assertTrue(self.tree.validateBSTInvariant(self.tree.root))
Esempio n. 7
0
    def grow_lambda_function1(self):
        text = open("RecursiveLambdaFunctionGrowth.txt", "r")
        word_dict = {}
        index_dict = {}
        words_evaluated = 0
        word_list = text.read().split()

        for cnt in range(1, len(word_list)):
            index_dict[cnt - 1] = len(word_list) / cnt

        index_tree = AVLTree(index_dict)
        print "Index AVL Tree:", repr(index_tree)
        #index_tree.foreach(print_node,1)

        try:
            while words_evaluated < len(word_list):
                #word_dict[words_evaluated]=word_list[random.randint(0,len(word_list)-1)]
                #print word_list[index_tree.pop_min()[0]]
                word_dict[words_evaluated] = word_list[index_tree.pop_min()[0]]
                words_evaluated += 1
        except:
            pass

        self.lambda_comp_tree = AVLTree(word_dict)
        print "Lambda Composition AVL Tree:"
        self.lambda_comp_tree.foreach(print_node)
        iteration = 0
        while iteration < len(word_list):
            k = self.lambda_comp_tree.get(iteration)
            print "k:", k
            try:
                prev = self.lambda_comp_tree.prev_key(iteration)
                prevk = self.lambda_comp_tree.get(prev)
                print "prevk:", prevk
            except:
                pass
            try:
                succ = self.lambda_comp_tree.succ_key(iteration)
                succk = self.lambda_comp_tree.get(succ)
                print "succk:", succk
            except:
                pass
            iteration += 1
Esempio n. 8
0
    def __init__(self, workspace, disasm_view, parent=None):
        super(QLinearViewer, self).__init__(parent)

        self.workspace = workspace
        self.disasm_view = disasm_view

        self.objects = []  # Objects that will be painted

        self.cfg = None
        self.cfb = None
        self._offset_to_addr = AVLTree()
        self._addr_to_offset = AVLTree()
        self._offset_to_object = AVLTree()
        self._offset = 0
        self._paint_start_offset = 0

        self._linear_view = None  # type: QLinearGraphicsView
        self._disasms = {}

        self._init_widgets()
Esempio n. 9
0
    def __init__(self, cache_size, min_obj_size, max_obj_size):

        self._max_size = cache_size
        self._used_size = 0
        # dictionary: obj_id -> object with last and next caching time
        self._cached_objects = {}
        # AVL tree: next_time -> object with last and next caching time
        self._tree = AVLTree()
        self._oldest_obj_id = None
        self._freshest_obj_id = None

        self.stats = CacheStats.CacheStats("Belady", cache_size)
        self.daily_stats = CacheStats.DailyCacheStats(cache_size)
Esempio n. 10
0
    def __init__(self, is_stack):
        """
        Constructor

        :param is_stack:    Whether this is a region map for stack frames or not. Different strategies apply for stack
                            regions.
        """
        self.is_stack = is_stack

        # An AVLTree, which maps stack addresses to region IDs
        self._address_to_region_id = AVLTree()
        # A dict, which maps region IDs to memory address ranges
        self._region_id_to_address = { }
Esempio n. 11
0
    def __init__(self, cfg=None):
        self._blanket = AVLTree()

        self._ffi = cffi.FFI()

        if cfg is not None:
            self._from_cfg(cfg)
        else:
            _l.debug(
                "CFG is not specified. Initialize CFBlanket from the knowledge base."
            )
            for func in self.kb.functions.values():
                self.add_function(func)
Esempio n. 12
0
    def grow_lambda_function2(self, wordlist):
        self.word_list = wordlist
        self.word_dict = {}

        cnt = 0
        while cnt < len(self.word_list):
            self.index_dict[cnt] = cnt
            cnt += 1

        self.index_tree = BinaryTree(self.index_dict)
        self.index_tree.foreach(self.build_lambda_comp_tree, 0)

        self.lambda_comp_tree = AVLTree(self.word_dict)
        print "==========================================================================="
        print "Lambda Composition AVL Tree (inorder traversed) is the original text itself:"
        print "==========================================================================="
        self.lambda_expression = []
        self.lambda_comp_tree.foreach(self.build_lambda_expression, 0)
        print self.lambda_expression
        print "==========================================================================="
        print "Lambda Composition AVL Tree (postorder traversed - Postfix expression):"
        print "Every parenthesis has two operands,operated by function outside:"
        print "==============================================================="
        self.lambda_expression = []
        self.lambda_comp_tree.foreach(self.build_lambda_expression, 1)
        self.lambda_composition = []
        cnt = 0

        per_random_walk_graph_tensor_neuron_network_intrinsic_merit = 0
        #recursively evaluate the Graph Tensor Neuron Network for random walk composition tree bottom up as Graph Neural Network
        #having Tensor Neuron activations for each subtree.
        while len(self.lambda_expression) > 2:
            operand2 = self.lambda_expression.pop()
            operand1 = self.lambda_expression.pop()
            function = self.lambda_expression.pop()
            subtree_graph_tensor_neuron_network_wght = self.subtree_graph_tensor_neuron_network_weight(
                operand1, function, operand2)
            self.graph_tensor_neuron_network_intrinsic_merit += subtree_graph_tensor_neuron_network_wght
            per_random_walk_graph_tensor_neuron_network_intrinsic_merit += subtree_graph_tensor_neuron_network_wght
            self.lambda_composition = "(" + function + "(" + operand1 + "," + operand2 + "))"
            self.lambda_expression.append(self.lambda_composition)
            cnt += 1
        if len(self.lambda_expression) > 1:
            return (
                self.lambda_expression[0] + "(" + self.lambda_expression[1] +
                ")",
                per_random_walk_graph_tensor_neuron_network_intrinsic_merit)
        else:
            return (
                self.lambda_expression[0],
                per_random_walk_graph_tensor_neuron_network_intrinsic_merit)
Esempio n. 13
0
def findIntersections(S):
    T = AVLTree()
    Q = []  # event queue with [QPointF, lineId]
    for i, l in enumerate(S):
        Q.append([l.p1(), i, "l"])  # l : left, r : right, c : center
        Q.append([l.p2(), i, "r"])
    while Q:    # stops if Q = []
        p = Q[0][0]
        Q.pop(0)
        # handleEventPoint(p)
        Up = [p]
        Lp = []
        Segs = []
        T.foreach(lambda k, v:  print(k,v ) )
Esempio n. 14
0
def get_edges(t, p):
    """
    Gets the edges that contain point p as their right
    endpoint or in the interior
    """
    lr = []
    lc = []
    for s in AVLTree(t):
        if s.rp == p:
            lr.append(s)
        elif s.lp == p and s.status == INTERIOR:
            lc.append(s)
        elif sideplr(p, s.lp, s.rp) == 0:
            lc.append(s)
    return lr, lc
Esempio n. 15
0
def intersections(psegs):
    """
    Implementation of the Bentley-Ottmann algorithm.

    Input
      psegs: a list of segments

    Output
      intpoints: a list of intersection points
    """
    eq = EventQueue(psegs)
    intpoints = []
    T = AVLTree()
    L = []
    while not eq.is_empty():  # for all events
        e = eq.events.pop(0)  # remove the event
        p = e.p  # get event point
        L = e.edges  # segments with p as left end
        R, C = get_edges(T, p)  # p: right (R) and interior (C)
        if len(L + R + C) > 1:  # Intersection at p among L+R+C
            for s in L + R + C:
                if not s.contains(p):  # if p is interior
                    s.lp = p  # change lp and
                    s.status = INTERIOR  # status
            intpoints.append(p)
            R, C = get_edges(T, p)
        for s in R + C:
            T.discard(s)
        for s in L + C:
            T.insert(s, str(s))
        if len(L + C) == 0:
            s = R[0]
            if s is not None:
                sl, sr = get_lr(T, s)
                find_new_event(sl, sr, p, eq)
        else:
            sp, spp = get_lrmost(T, L + C)
            try:
                sl = T.prev_key(sp)
            except KeyError:  # only on last key
                sl = None
            try:
                sr = T.succ_key(spp)
            except KeyError:  # only on last key
                sr = None
            find_new_event(sl, sp, p, eq)
            find_new_event(sr, spp, p, eq)
    return intpoints
Esempio n. 16
0
 def __init__(self):
     self.lambda_comp_tree = AVLTree()
     self.index_tree = BinaryTree()
     self.word_list = []
     self.word_dict = {}
     self.index_dict = {}
     self.index_list = []
     self.lambda_expression = []
     self.lambda_composition = ""
     self.graph_tensor_neuron_network_intrinsic_merit = 1.0
     self.entropy = 10000000000.0
     self.conceptnet = ConceptNet5Client()
     #self.Similarity="ConceptNet"
     self.Similarity = "WordNet"
     self.ClosedPaths = True
     self.dictionary = PyDictionary()
    def testRandomRemove(self):
        pytree = AVLTree()
        for i in range(self.TEST_SZ):
            size = i
            lst = self.gen_rand_list(size)
            for value in lst:
                self.tree.insert(value)
                pytree.insert(value, value)

            random.shuffle(lst)

            for j, value in enumerate(lst):
                pytree.remove(value)
                self.tree.remove(value)
                self.assertEqual(len(pytree), len(self.tree))
                self.assertEqual(size - j - 1, len(self.tree))
        self.assertTrue(not self.tree)
Esempio n. 18
0
    def populate_distance(self):
        added = 0
        for i in range(self.N):
            E_temp = self.Dist[i]
            V_temp = {}
            Q_temp = AVLTree()

            for key in range(len(E_temp)):
                j = key
                D_temp = Distance(E_temp[key], key)
                V_temp[key] = D_temp

                if key != i:
                    Q_temp[D_temp] = D_temp

                added += 1

            self.C[i] = V_temp
            self.II[i] = 1
            self.P[i] = Q_temp

            A_i = []
            A_i.append(i)
            self.A[i] = A_i
Esempio n. 19
0
    def separateFilesToClusters(self, sDirectory, size_threshold):
        start = 0
        for root, dirs, files in os.walk(sDirectory):
            start += len(dirs)

        for i in range(self.N):
            if self.II[i] == 1:
                if len(self.A[i]) >= size_threshold:
                    sDst = join(sDirectory, str(start))
                    os.makedirs(sDst)

                    for j in range(len(self.A[i])):
                        shutil.copy(self.fileList[self.A[i][j]], sDst)

                    start += 1


if __name__ == '__main__':
    t = AVLTree()
    d1 = Distance(1.12, 1)
    d2 = Distance(0.9, 2)
    d3 = Distance(0.9, 3)

    t[d1] = d1
    t[d2] = d2
    t[d3] = d3

    del t[d3]
    print len(t)
Esempio n. 20
0
 def __init__(self, entries, shared=None):
     self.sem_lock = Semaphore(value=1)
     self.psbt = AVLTree(entries)
     self.shared = shared
def watershed(image, markers):
    # Only check directly straight connections
    adjacent = np.array([[-1, 0], [0, -1], [0, 1], [1, 0]])

    # Constants
    boundary, unset, unfound, maxMarker = 0, 1, -1, 258

    # compute the gradient image
    gradient = np.abs(cv2.Laplacian(image, cv2.CV_64F))

    # Initialise
    for index, value in np.ndenumerate(markers):
        if value > 0:
            gradient[index[0], index[1]] = value + 255

    gradient = gradient + 2

    # Store which points need checking
    toCheck = AVLTree()
    for i in range(1,  gradient.shape[0] - 2):
        for j in range(1, gradient.shape[1] - 2):
            if gradient[i, j] < maxMarker:
                toCheck.insert((i, j), gradient[i, j])


    for i in range(2, 257):
        print(i, len(toCheck))
        gradient[gradient == i] = unset

        c = 0
        for index in toCheck.keys():
            value = gradient[index[0], index[1]]
            if value == unset:
                found = unfound

                # Find a surrounding value above 255
                for adj in adjacent:
                    if gradient[index[0] + adj[0], index[1] + adj[1]] >= maxMarker:
                        # This is a boundary point
                        if found != unfound:
                            # If this is a boundary between two different markers
                            if found != gradient[index[0] + adj[0], index[1] + adj[1]]:
                                gradient[index[0], index[1]] = boundary
                                break
                            else:
                                # if don't, dont change its value
                                continue


                        # If found change to that value
                        gradient[index[0], index[1]] = gradient[index[0] + adj[0], index[1] + adj[1]]
                        toCheck.remove((index[0], index[1]))

                        # Save which piece it borders on
                        found = gradient[index[0] + adj[0], index[1] + adj[1]]
                        c += 1

        print("Found:", c)

    # Prepare for return
    gradient[gradient == 0] = 9999
    gradient = gradient - 258
    gradient[gradient == (9999 - 258)] = 255

    return gradient.astype(np.uint8)
Esempio n. 22
0
 def __init__(self, tree=None):
     self._storage = AVLTree() if tree is None else tree  # type: AVLTree
Esempio n. 23
0
def barrer(textFile):
    pos = 0
    inst = open(textFile, 'r')
    salida = open("salida.txt", 'w')
    S = []
    for linea in inst:
        linea.strip()
        if len(linea) is 0:
            break
        p = linea.split()
        S.append(((float(p[0]), float(p[1])), (float(p[2]), float(p[3]))))

    M = []
    for i in range(len(S)):
        ((x1, y1), (x2, y2)) = S[i]
        if x1 > x2:
            x1, y1, x2, y2 = x2, y2, x1, y1
        heappush(M, ((x1, y1), 'C', i, None))
        heappush(M, ((x2, y2), 'F', i, None))


#    print(len(M))
    B = AVLTree()
    D = {}
    while len(M) > 0:
        ((x, y), tipo, i, j) = heappop(M)
        if x < pos:
            print("descartando", x, y, tipo)
            continue
        pos = x
        print(x, y, tipo, len(M))
        if tipo == 'C':
            B[y] = i
            D[i] = y
            v_izq = None
            try:
                v_izq = B.prev_key(y)
            except:
                pass
            if v_izq is not None:
                l = B[v_izq]
                (xp, yp) = interseccion(S[i], S[l])
                if xp is not None and xp > x:
                    heappush(M, ((xp, yp), 'I', l, i))
            print("izq: ", v_izq)
            v_der = None
            try:
                v_der = B.succ_key(y)
            except:
                pass
            if v_der is not None:
                r = B[v_der]
                (xp, yp) = interseccion(S[i], S[r])
                if xp is not None and xp > x:
                    heappush(M, ((xp, yp), 'I', i, r))
            print("der: ", v_der)
        elif tipo == 'F':
            l = None
            r = None
            try:
                v_izq = B.prev_key(y)
                l = B[v_izq]
                v_der = B.succ_key(y)
                r = B[v_der]

            except:
                pass
            print("izq: ", v_izq)
            print("der: ", v_der)
            B.discard(y)
            del D[i]
            if l is not None and r is not None:
                (xp, yp) = interseccion(S[l], S[r])
                if xp is not None and xp > x:
                    heappush(M, ((xp, yp), 'I', l, r))
        elif tipo == 'I':
            if i not in D or j not in D:
                continue
            assert B[D[i]] == i
            assert B[D[j]] == j
            #            print("antes",B,D,i,j)
            B[D[i]], B[D[j]] = B[D[j]], B[D[i]]
            D[i], D[j] = D[j], D[i]
            #            print("despues",B,D,i,j)
            v_izq = None
            try:
                v_izq = B.prev_key(y)
            except:
                pass
            if v_izq is not None:
                l = B[v_izq]
                (xp, yp) = interseccion(S[j], S[l])
                if xp is not None and xp > x:
                    heappush(M, ((xp, yp), 'I', j, l))
            v_der = None
            try:
                v_der = B.succ_key(y)
            except:
                pass
            if v_der is not None:
                r = B[v_der]
                (xp, yp) = interseccion(S[i], S[r])
                if xp is not None and xp > x:
                    heappush(M, ((xp, yp), 'I', i, r))
            print("izq: ", v_izq)
            print("der: ", v_der)
            print("%f %f" % (x, y), file=salida)
        else:
            print(tipo)

    salida.close()
Esempio n. 24
0
def intersection(polygons, canvas):
    # Dictionary for new polygons, connecting the old to new by having old polygons as keys
    new_polygons = {}

    # Add all vertices to event queue
    event_queue = SortedList()
    for polygon in polygons:
        new_polygons[polygon] = ConstructPolygon()
        for i, vertice in enumerate(polygon.vertices):
            event_queue.add(EventPoint(vertice, polygon, i))
            canvas.create_text(vertice.x.evalf() + 240,
                               vertice.y.evalf() + 240,
                               fill="red",
                               text=f"{i}")

    for i, e in enumerate(event_queue):
        pass
        #canvas.create_text(e.point.x.evalf()+250, e.point.y.evalf()+240, fill="blue", text=f"{i}")

    status = AVLTree()
    i = 0
    while len(event_queue) > 0:
        event = event_queue.pop(0)

        add_next = False
        add_prev = False

        #breakpoint()

        # Update status
        next_in_polygon = event.next_in_polygon()
        s_next = OrderedSegment(event.polygon, event.point,
                                next_in_polygon.point)
        previous_in_polygon = event.previous_in_polygon()
        s_prev = OrderedSegment(event.polygon, event.point,
                                previous_in_polygon.point)
        if next_in_polygon in event_queue:
            status.insert(s_next, s_next)
            add_next = True
        else:
            status.discard(s_next)

        if previous_in_polygon in event_queue:
            status.insert(s_prev, s_prev)
            add_prev = True
        else:
            status.discard(s_prev)

        if add_next and event.intersection:
            new_polygons[event.polygon].add_up(event.point)
        if add_prev and event.intersection:
            new_polygons[event.polygon].add_down(event.point)
        if event.mark:
            new_polygons[event.polygon].add_up(event.point)
            new_polygons[event.polygon].add_down(event.point)

        # Add intersections to event queue
        intersections_up = []
        intersections_down = []
        if add_next:
            intersections_up = check_intersections(s_next, status, event,
                                                   next_in_polygon,
                                                   event_queue)
        if add_prev:
            intersections_down = check_intersections(s_prev, status,
                                                     previous_in_polygon,
                                                     event, event_queue)
        for intersection in (intersections_down + intersections_up):
            event_queue.add(intersection)

        # If there were intersections, bind the two polygons together
        if len(intersections_up) > 0:
            new_polygons[event.polygon] = new_polygons[
                intersections_up[0].intersection.next_polygon]
            status.discard(s_next)
            s_next = OrderedSegment(event.polygon, event.point,
                                    intersections_up[0].point)
            status.insert(s_next, s_next)

            event_queue.remove(next_in_polygon)
            next_in_polygon.previous_point = intersections_up[0]
            event_queue.add(next_in_polygon)

            new_polygons[event.polygon].add_up(event.point)
        if len(intersections_down) > 0:
            new_polygons[event.polygon] = new_polygons[
                intersections_down[0].intersection.next_polygon]
            status.discard(s_prev)
            s_prev = OrderedSegment(event.polygon, event.point,
                                    intersections_down[0].point)
            status.insert(s_prev, s_prev)

            event_queue.remove(previous_in_polygon)
            previous_in_polygon.next_point = intersections_down[0]
            event_queue.add(previous_in_polygon)

            new_polygons[event.polygon].add_down(event.point)

        #breakpoint()

    output = []
    for p in new_polygons.values():
        polygon = p.get_polygon()
        if not polygon in output:
            output.append(polygon)

    #breakpoint()

    return output
Esempio n. 25
0
   def clear(self):
	"""Delete all items in data structure."""
       	self._data = {}
       	self._T = AVLTree()
Esempio n. 26
0
	def __init__(self):

		self.tree = AVLTree()
from bintrees import AVLTree

tree = AVLTree()
tree.insert(4, "hehe4")
tree.insert(2, "hehe2")
tree.insert(6, "hehe6")
tree.insert(1, "hehe1")
tree.insert(7, "hehe7")

print(tree)
Esempio n. 28
0
 def __init__(self, segments, x):
     self.segments = segments
     self.x = x
     self.tree = AVLTree()
Esempio n. 29
0
 def __init__(self, membersTypes, elements=None):
     self.membersTypes = membersTypes
     self.__treePriorities = AVLTree()
     self.__mapMessages = dict()
     if elements is not None and len(elements) > 0:
         self._extend(elements)
Esempio n. 30
0
   def __init__(self):
       	""" Initialize an empty priority data structure."""
	self._counter = 0L
       	self._T = AVLTree()
       	self._data = {}