def __init__(self, boundary, obstacle):
     self.boundary = boundary
     self.obstacle = obstacle
     self.vertices = []
     self.get_vertices()
     self.current_edge_list = AVLTree()
     self.vertical_extension = []
Ejemplo n.º 2
0
    def reducer(self, key, values):
        tree = AVLTree()
        points = set()
        for items in values:
            for val in items:
                y = val[2][0][1]

                if val[1]:
                    tree.insert(y, val[2])
                    try:
                        i = line_intersection(val[2], tree.succ_item(y)[1])
                        if i:
                            points.add(i)
                    except KeyError:
                        pass
                    try:
                        i = line_intersection(val[2], tree.prev_item(y)[1])
                        if i:
                            points.add(i)
                    except KeyError:
                        pass
                else:
                    try:
                        i = line_intersection(
                            tree.prev_item(y)[1],
                            tree.succ_item(y)[1])
                        if i:
                            points.add(i)
                    except KeyError:
                        pass
                    try:
                        tree.remove(y)
                    except KeyError:
                        pass
        yield key, list(points)
 def testRandomisedValueInsertion(self):
     pytree = AVLTree()
     for _ in range(self.TEST_SZ):
         v = self.rand_val()
         self.tree.insert(v)
         pytree.insert(v, v)
         self.assertEqual(len(self.tree), len(pytree))
         self.assertTrue(self.tree.validateBSTInvariant(self.tree.root))
Ejemplo n.º 4
0
    def __init__(self, logfile_full_path='logs/orderbook.log'):
        self.bids = AVLTree()
        self.asks = AVLTree()

        # This is to retrieve an order *by order_id* from the trees in O(log(n)). (self.remove_order())
        # self.bids and self.asks are indexed by Order._key(), which is (self.price, self.size).
        self.order_id_key_translate = {}
        self.trades = {}
        self.subscribers = {}
        self.logger = Logger(logfile_full_path)
Ejemplo n.º 5
0
    def __init__(self, cache_size, min_obj_size, max_obj_size):

        self._max_size = cache_size
        self._used_size = 0
        # dictionary: obj_id -> object with last and next caching time
        self._cached_objects = {}
        # AVL tree: next_time -> object with last and next caching time
        self._tree = AVLTree()
        self._oldest_obj_id = None
        self._freshest_obj_id = None

        self.stats = CacheStats.CacheStats("Belady", cache_size)
        self.daily_stats = CacheStats.DailyCacheStats(cache_size)
Ejemplo n.º 6
0
    def __init__(self, cfg=None):
        self._blanket = AVLTree()

        self._ffi = cffi.FFI()

        if cfg is not None:
            self._from_cfg(cfg)
        else:
            _l.debug(
                "CFG is not specified. Initialize CFBlanket from the knowledge base."
            )
            for func in self.kb.functions.values():
                self.add_function(func)
Ejemplo n.º 7
0
    def __init__(self, is_stack):
        """
        Constructor

        :param is_stack:    Whether this is a region map for stack frames or not. Different strategies apply for stack
                            regions.
        """
        self.is_stack = is_stack

        # An AVLTree, which maps stack addresses to region IDs
        self._address_to_region_id = AVLTree()
        # A dict, which maps region IDs to memory address ranges
        self._region_id_to_address = { }
Ejemplo n.º 8
0
    def grow_lambda_function2(self, wordlist):
        self.word_list = wordlist
        self.word_dict = {}

        cnt = 0
        while cnt < len(self.word_list):
            self.index_dict[cnt] = cnt
            cnt += 1

        self.index_tree = BinaryTree(self.index_dict)
        self.index_tree.foreach(self.build_lambda_comp_tree, 0)

        self.lambda_comp_tree = AVLTree(self.word_dict)
        print "==========================================================================="
        print "Lambda Composition AVL Tree (inorder traversed) is the original text itself:"
        print "==========================================================================="
        self.lambda_expression = []
        self.lambda_comp_tree.foreach(self.build_lambda_expression, 0)
        print self.lambda_expression
        print "==========================================================================="
        print "Lambda Composition AVL Tree (postorder traversed - Postfix expression):"
        print "Every parenthesis has two operands,operated by function outside:"
        print "==============================================================="
        self.lambda_expression = []
        self.lambda_comp_tree.foreach(self.build_lambda_expression, 1)
        self.lambda_composition = []
        cnt = 0

        per_random_walk_graph_tensor_neuron_network_intrinsic_merit = 0
        #recursively evaluate the Graph Tensor Neuron Network for random walk composition tree bottom up as Graph Neural Network
        #having Tensor Neuron activations for each subtree.
        while len(self.lambda_expression) > 2:
            operand2 = self.lambda_expression.pop()
            operand1 = self.lambda_expression.pop()
            function = self.lambda_expression.pop()
            subtree_graph_tensor_neuron_network_wght = self.subtree_graph_tensor_neuron_network_weight(
                operand1, function, operand2)
            self.graph_tensor_neuron_network_intrinsic_merit += subtree_graph_tensor_neuron_network_wght
            per_random_walk_graph_tensor_neuron_network_intrinsic_merit += subtree_graph_tensor_neuron_network_wght
            self.lambda_composition = "(" + function + "(" + operand1 + "," + operand2 + "))"
            self.lambda_expression.append(self.lambda_composition)
            cnt += 1
        if len(self.lambda_expression) > 1:
            return (
                self.lambda_expression[0] + "(" + self.lambda_expression[1] +
                ")",
                per_random_walk_graph_tensor_neuron_network_intrinsic_merit)
        else:
            return (
                self.lambda_expression[0],
                per_random_walk_graph_tensor_neuron_network_intrinsic_merit)
Ejemplo n.º 9
0
class ParallelSBTree(object):
    """
    Parallel threading structure as a Self-balancing Tree.  Used to implement
    the Reduce Parallel Computation algorithm.  For operations over the whole
    tree forming threads
    """
    def __init__(self, entries, shared=None):
        self.sem_lock = Semaphore(value=1)
        self.psbt = AVLTree(entries)
        self.shared = shared

    def foreach(self, function, root):
        if root == None:
            return

        value = root.value
        # change value to args
        worker_thread = Thread(target=function, args=(
            self.shared,
            value,
        ))
        worker_thread.start()

        # exponential spawning
        if root.left != None:
            worker_thread_l = Thread(target=self.foreach,
                                     args=(function, root.left))
            worker_thread_l.start()

        if root.right != None:
            worker_thread_r = Thread(target=self.foreach,
                                     args=(function, root.right))
            worker_thread_r.start()

    def update_node(self, key, value):
        print self.psbt
        self.remove(key)
        self.insert(key, value)

    def new_node(self, key, value):
        return self.psbt._new_node(key, value)

    def insert(self, key, value):
        self.psbt.insert(key, value)

    def remove(self, key):
        self.psbt.remove(key)

    def merge_result(self, shared_result):
        pass
Ejemplo n.º 10
0
def findIntersections(S):
    T = AVLTree()
    Q = []  # event queue with [QPointF, lineId]
    for i, l in enumerate(S):
        Q.append([l.p1(), i, "l"])  # l : left, r : right, c : center
        Q.append([l.p2(), i, "r"])
    while Q:    # stops if Q = []
        p = Q[0][0]
        Q.pop(0)
        # handleEventPoint(p)
        Up = [p]
        Lp = []
        Segs = []
        T.foreach(lambda k, v:  print(k,v ) )
Ejemplo n.º 11
0
def getNewAVL(seq):
    if has_fast_tree_support():
        from bintrees import FastAVLTree
        return FastAVLTree(seq)
    else:
        from bintrees import AVLTree
        return AVLTree(seq)
Ejemplo n.º 12
0
 def __init__(self):
     self.lambda_comp_tree = AVLTree()
     self.index_tree = BinaryTree()
     self.word_list = []
     self.word_dict = {}
     self.index_dict = {}
     self.index_list = []
     self.lambda_expression = []
     self.lambda_composition = ""
     self.graph_tensor_neuron_network_intrinsic_merit = 1.0
     self.entropy = 10000000000.0
     self.conceptnet = ConceptNet5Client()
     #self.Similarity="ConceptNet"
     self.Similarity = "WordNet"
     self.ClosedPaths = True
     self.dictionary = PyDictionary()
Ejemplo n.º 13
0
	def load_objects(self, index_name):
		# initialise empty tree
		self.objects = AVLTree()

		# try open file with objects
		try:
			ins = open(index_name + "/bin_tree_dump_objects.txt", "r")
		except:
			print "Index tree for objects doesnt exist"
			return

		# reading objects from file, one by one
		for line in ins:
			try:
				# load serialized data from file
				x = json.loads(line)

				# create instance of FreebaseObject
				o = FreebaseObject()
				# initialise content with object data from file
				o.deserialise(x)

				# insert loaded freebase object to objects tree
				self.objects.insert(o.get_id(),o)
			except:
				print "something wrong 2"
		
		print "Index tree for objects loaded"
Ejemplo n.º 14
0
    def __init__(self, series):
        self.__x_tree = AVLTree()
        self.__y_tree = AVLTree()
        self.__mapping = {}

        for (line, (xs, ys)) in enumerate(series):
            for (point, (x, y)) in enumerate(zip(xs, ys)):
                self.__x_tree.insert(x, x)
                self.__y_tree.insert(y, y)

                if self.__mapping.get(x) is None:
                    self.__mapping[x] = {}

                if self.__mapping[x].get(y) is None:
                    self.__mapping[x][y] = []

                self.__mapping[x][y].append((line, point, x, y))
	def grow_lambda_function1(self):
		text=open("RecursiveLambdaFunctionGrowth.txt","r")
		word_dict={}
		index_dict={}
		words_evaluated=0
		word_list=text.read().split()

		for cnt in range(1,len(word_list)):
			index_dict[cnt-1] = len(word_list)/cnt

		index_tree=AVLTree(index_dict)
		print "Index AVL Tree:", repr(index_tree)
		#index_tree.foreach(print_node,1)
	
		try:
			while words_evaluated < len(word_list):
				#word_dict[words_evaluated]=word_list[random.randint(0,len(word_list)-1)]
				#print word_list[index_tree.pop_min()[0]]
				word_dict[words_evaluated]=word_list[index_tree.pop_min()[0]]
				words_evaluated+=1
		except:
			pass
	
		self.lambda_comp_tree=AVLTree(word_dict)
		print "Lambda Composition AVL Tree:"
		self.lambda_comp_tree.foreach(print_node)
		iteration=0
		while iteration < len(word_list):
			k=self.lambda_comp_tree.get(iteration)
			print "k:",k
			try:
				prev=self.lambda_comp_tree.prev_key(iteration)
				prevk=self.lambda_comp_tree.get(prev)
				print "prevk:",prevk
			except:
				pass
			try:
				succ=self.lambda_comp_tree.succ_key(iteration)
				succk=self.lambda_comp_tree.get(succ)
				print "succk:",succk
			except:
				pass
			iteration+=1
Ejemplo n.º 16
0
    def grow_lambda_function1(self):
        text = open("RecursiveLambdaFunctionGrowth.txt", "r")
        word_dict = {}
        index_dict = {}
        words_evaluated = 0
        word_list = text.read().split()

        for cnt in range(1, len(word_list)):
            index_dict[cnt - 1] = len(word_list) / cnt

        index_tree = AVLTree(index_dict)
        print "Index AVL Tree:", repr(index_tree)
        #index_tree.foreach(print_node,1)

        try:
            while words_evaluated < len(word_list):
                #word_dict[words_evaluated]=word_list[random.randint(0,len(word_list)-1)]
                #print word_list[index_tree.pop_min()[0]]
                word_dict[words_evaluated] = word_list[index_tree.pop_min()[0]]
                words_evaluated += 1
        except:
            pass

        self.lambda_comp_tree = AVLTree(word_dict)
        print "Lambda Composition AVL Tree:"
        self.lambda_comp_tree.foreach(print_node)
        iteration = 0
        while iteration < len(word_list):
            k = self.lambda_comp_tree.get(iteration)
            print "k:", k
            try:
                prev = self.lambda_comp_tree.prev_key(iteration)
                prevk = self.lambda_comp_tree.get(prev)
                print "prevk:", prevk
            except:
                pass
            try:
                succ = self.lambda_comp_tree.succ_key(iteration)
                succk = self.lambda_comp_tree.get(succ)
                print "succk:", succk
            except:
                pass
            iteration += 1
Ejemplo n.º 17
0
    def __init__(self, is_stack):
        """
        Constructor

        :param is_stack:    Whether this is a region map for stack frames or not. Different strategies apply for stack
                            regions.
        """
        self.is_stack = is_stack

        # An AVLTree, which maps stack addresses to region IDs
        self._address_to_region_id = AVLTree()
        # A dict, which maps region IDs to memory address ranges
        self._region_id_to_address = {}
Ejemplo n.º 18
0
 def __init__(self, name, genome):
     self.name = name
     self.genome = genome
     self.sb_name_mapping = {}
     self.start_telomer = StartTelomereSyntenyBlock(name)
     self.end_telomer = EndTelomereSyntenyBlock(name)
     self.blocks = AVLTree()
     self.add_sb(self.start_telomer)
     self.add_sb(self.end_telomer)
     # maps sb into a dict of resolutions, it has mapping in
     # in each particular resolution, mapping is represented as a list,
     # this particular block is mapped to
     self.mapping = defaultdict(lambda: defaultdict(list))
Ejemplo n.º 19
0
def intersections(psegs):
    """
    Implementation of the Bentley-Ottmann algorithm.

    Input
      psegs: a list of segments

    Output
      intpoints: a list of intersection points
    """
    eq = EventQueue(psegs)
    intpoints = []
    T = AVLTree()
    L = []
    while not eq.is_empty():  # for all events
        e = eq.events.pop(0)  # remove the event
        p = e.p  # get event point
        L = e.edges  # segments with p as left end
        R, C = get_edges(T, p)  # p: right (R) and interior (C)
        if len(L + R + C) > 1:  # Intersection at p among L+R+C
            for s in L + R + C:
                if not s.contains(p):  # if p is interior
                    s.lp = p  # change lp and
                    s.status = INTERIOR  # status
            intpoints.append(p)
            R, C = get_edges(T, p)
        for s in R + C:
            T.discard(s)
        for s in L + C:
            T.insert(s, str(s))
        if len(L + C) == 0:
            s = R[0]
            if s is not None:
                sl, sr = get_lr(T, s)
                find_new_event(sl, sr, p, eq)
        else:
            sp, spp = get_lrmost(T, L + C)
            try:
                sl = T.prev_key(sp)
            except KeyError:  # only on last key
                sl = None
            try:
                sr = T.succ_key(spp)
            except KeyError:  # only on last key
                sr = None
            find_new_event(sl, sp, p, eq)
            find_new_event(sr, spp, p, eq)
    return intpoints
Ejemplo n.º 20
0
def get_edges(t, p):
    """
    Gets the edges that contain point p as their right
    endpoint or in the interior
    """
    lr = []
    lc = []
    for s in AVLTree(t):
        if s.rp == p:
            lr.append(s)
        elif s.lp == p and s.status == INTERIOR:
            lc.append(s)
        elif sideplr(p, s.lp, s.rp) == 0:
            lc.append(s)
    return lr, lc
Ejemplo n.º 21
0
class BinTree:
	tree = None

	def __init__(self):

		self.tree = AVLTree()

	def insert(self, rating, username):
		if not rating in self.tree:
			self.tree[rating] = set()
		self.tree[rating].add(username)

	def remove(self, rating, username):
		if rating in self.tree:
			self.tree[rating].remove(username)
			if len(self.tree[rating]) == 0:
				self.tree.discard(rating)

	def all_pairs_inbetween(self, low, high):
		pairs = []
		for k, v in self.tree[low: high].items():
			for u in v:
				pairs.append((k, u))
		return pairs
	def __init__(self):
		self.lambda_comp_tree=AVLTree()
		self.index_tree=BinaryTree()
		self.word_list=[]
		self.word_dict={}
		self.index_dict={}
		self.index_list=[]
		self.lambda_expression=[]
		self.lambda_composition=""
		self.graph_tensor_neuron_network_intrinsic_merit=1.0
		self.entropy=10000000000.0
		self.conceptnet=ConceptNet5Client()
		#self.Similarity="ConceptNet"
		self.Similarity="WordNet"
		self.ClosedPaths=True
		self.dictionary=PyDictionary()
Ejemplo n.º 23
0
def intersections(psegs):
    """
    Implementation of the Bentley-Ottmann algorithm.

    Input
      psegs: a list of segments

    Output
      intpoints: a list of intersection points
    """
    eq = EventQueue(psegs)
    intpoints = []
    T = AVLTree()
    L=[]
    while not eq.is_empty():            # for all events
        e = eq.events.pop(0)            # remove the event
        p = e.p                         # get event point
        L = e.edges                     # segments with p as left end
        R,C = get_edges(T, p)           # p: right (R) and interior (C)
        if len(L+R+C) > 1:              # Intersection at p among L+R+C
            for s in L+R+C:
                if not s.contains(p):   # if p is interior
                    s.lp = p            # change lp and
                    s.status = INTERIOR # status 
            intpoints.append(p)
            R,C = get_edges(T, p)
        for s in R+C:
            T.discard(s)
        for s in L+C:
            T.insert(s, str(s))
        if len(L+C) == 0:
            s = R[0]
            if s is not None:
                sl, sr = get_lr(T, s)
                find_new_event(sl, sr, p, eq)
        else:
            sp, spp = get_lrmost(T, L+C)
            try:
                sl = T.prev_key(sp)
            except KeyError:            # only on last key
                sl = None
            try:
                sr = T.succ_key(spp)
            except KeyError:            # only on last key
                sr = None
            find_new_event(sl, sp, p, eq)
            find_new_event(sr, spp, p, eq)
    return intpoints
	def grow_lambda_function2(self, wordlist):
		self.word_list=wordlist
		self.word_dict={}
		
		cnt=0
		while cnt < len(self.word_list):
			self.index_dict[cnt]=cnt
			cnt+=1

		self.index_tree=BinaryTree(self.index_dict)
		self.index_tree.foreach(self.build_lambda_comp_tree,0)
		
		self.lambda_comp_tree=AVLTree(self.word_dict)
		print "==========================================================================="
		print "Lambda Composition AVL Tree (inorder traversed) is the original text itself:"
		print "==========================================================================="
		self.lambda_expression=[]
		self.lambda_comp_tree.foreach(self.build_lambda_expression, 0)
		print self.lambda_expression
		print "==========================================================================="
		print "Lambda Composition AVL Tree (postorder traversed - Postfix expression):"
		print "Every parenthesis has two operands,operated by function outside:"
		print "==============================================================="
		self.lambda_expression=[]
		self.lambda_comp_tree.foreach(self.build_lambda_expression, 1)
		self.lambda_composition=[]
		cnt=0

		per_random_walk_graph_tensor_neuron_network_intrinsic_merit = 0 
		#recursively evaluate the Graph Tensor Neuron Network for random walk composition tree bottom up as Graph Neural Network
		#having Tensor Neuron activations for each subtree.
		while len(self.lambda_expression) > 2 :
			operand2=self.lambda_expression.pop()
			operand1=self.lambda_expression.pop()
			function=self.lambda_expression.pop()
			subtree_graph_tensor_neuron_network_wght = self.subtree_graph_tensor_neuron_network_weight(operand1, function, operand2)
			self.graph_tensor_neuron_network_intrinsic_merit += subtree_graph_tensor_neuron_network_wght
			per_random_walk_graph_tensor_neuron_network_intrinsic_merit += subtree_graph_tensor_neuron_network_wght
			self.lambda_composition="("+function+"("+operand1+","+operand2+"))" 
			self.lambda_expression.append(self.lambda_composition)
			cnt+=1
		if len(self.lambda_expression) > 1:
			return (self.lambda_expression[0] + "("+self.lambda_expression[1]+")", per_random_walk_graph_tensor_neuron_network_intrinsic_merit)
		else:
			return (self.lambda_expression[0], per_random_walk_graph_tensor_neuron_network_intrinsic_merit)
class LinearStreamingInterpolator(StreamingInterpolatorBase):
    """ Linear streaming interpolator """

    def __init__(self):
        self._history = AVLTree()

    def insert(self, x, val):
        """
        Register a datapoint

        x (float) : the x coordinate of the datapoint
        val (float): the rest of the datapoint
        """
        self._history.insert(x, val)

    def getInterpolatedVal(self, x):
        """
        Get the interpolated value for the given x

        x (float) : the x coordinate of the datapoint
        returns (float) : interpolated value
        """
        if self._history.is_empty():
            return None

        leftX, leftVal = None, None
        rightX, rightVal = None, None
        if self._history.min_key() <= x:
            leftX, leftVal = self._history.floor_item(x)
            if leftX == x:
                return leftVal
        if self._history.max_key() >= x:
            rightX, rightVal = self._history.ceiling_item(x)
            if rightX == x:
                return rightVal

        # check if on edge
        if leftVal == None:
            return rightVal
        elif rightVal == None:
            return leftVal

        # find weighted average of the vals of the closest enclosing data
        # points
        intervalLength = abs(x - leftX) + abs(x - rightX)
        value = float(abs(x - rightX) * leftVal + abs(x - leftX) * rightVal) / intervalLength
        return value
class NearestNeighborStreamingInterpolator(StreamingInterpolatorBase):
    """ Nearest Neighbor 1D Streaming Interpolator """

    def __init__(self):
        self._history = AVLTree()

    def insert(self, x, val):
        """
        Register a datapoint

        x (float) : the x coordinate of the datapoint
        val (float): the rest of the datapoint
        """
        self._history.insert(x, val)

    def getInterpolatedVal(self, x):
        """
        Get the interpolated value for the given x

        x (float) : the x coordinate of the datapoint
        returns (float) : interpolated value
        """
        if self._history.is_empty():
            return None

        # find the nearest point to the left and right
        leftX, leftVal = None, None
        rightX, rightVal = None, None
        if self._history.min_key() <= x:
            leftX, leftVal = self._history.floor_item(x)
        if self._history.max_key() >= x:
            rightX, rightVal = self._history.ceiling_item(x)

        # if there is only one neighbor, return it
        if leftVal == None:
            return rightVal
        elif rightVal == None:
            return leftVal

        # return the nearest neighbor
        if abs(x - leftX) < abs(x - rightX):
            return leftVal
        else:
            return rightVal
    def testRandomRemove(self):
        pytree = AVLTree()
        for i in range(self.TEST_SZ):
            size = i
            lst = self.gen_rand_list(size)
            for value in lst:
                self.tree.insert(value)
                pytree.insert(value, value)

            random.shuffle(lst)

            for j, value in enumerate(lst):
                pytree.remove(value)
                self.tree.remove(value)
                self.assertEqual(len(pytree), len(self.tree))
                self.assertEqual(size - j - 1, len(self.tree))
        self.assertTrue(not self.tree)
Ejemplo n.º 28
0
    def populate_distance(self):
        added = 0
        for i in range(self.N):
            E_temp = self.Dist[i]
            V_temp = {}
            Q_temp = AVLTree()

            for key in range(len(E_temp)):
                j = key
                D_temp = Distance(E_temp[key], key)
                V_temp[key] = D_temp

                if key != i:
                    Q_temp[D_temp] = D_temp

                added += 1

            self.C[i] = V_temp
            self.II[i] = 1
            self.P[i] = Q_temp

            A_i = []
            A_i.append(i)
            self.A[i] = A_i
Ejemplo n.º 29
0
    def __init__(self, workspace, disasm_view, parent=None):
        super(QLinearViewer, self).__init__(parent)

        self.workspace = workspace
        self.disasm_view = disasm_view

        self.objects = []  # Objects that will be painted

        self.cfg = None
        self.cfb = None
        self._offset_to_addr = AVLTree()
        self._addr_to_offset = AVLTree()
        self._offset_to_object = AVLTree()
        self._offset = 0
        self._paint_start_offset = 0

        self._linear_view = None  # type: QLinearGraphicsView
        self._disasms = {}

        self._init_widgets()
Ejemplo n.º 30
0
setup_AVLTree = """
from __main__ import avl_build_delete, avl_build, avl_search
"""
setup_FastAVLTree = """
from __main__ import cavl_build_delete, cavl_build, cavl_search
"""

try:
    fp = open('testkeys.txt')
    keys = eval(fp.read())
    fp.close()
except IOError:
    print("create 'testkeys.txt' with profile_bintree.py\n")
    sys.exit()

py_searchtree = AVLTree.from_keys(keys)
cy_searchtree = FastAVLTree.from_keys(keys)


def avl_build_delete():
    tree = AVLTree.from_keys(keys)
    for key in keys:
        del tree[key]


def cavl_build_delete():
    tree = FastAVLTree.from_keys(keys)
    for key in keys:
        del tree[key]

Ejemplo n.º 31
0
class RegionMap(object):
    """
    Mostly used in SimAbstractMemory, RegionMap stores a series of mappings between concrete memory address ranges and
    memory regions, like stack frames and heap regions.
    """

    def __init__(self, is_stack):
        """
        Constructor

        :param is_stack:    Whether this is a region map for stack frames or not. Different strategies apply for stack
                            regions.
        """
        self.is_stack = is_stack

        # An AVLTree, which maps stack addresses to region IDs
        self._address_to_region_id = AVLTree()
        # A dict, which maps region IDs to memory address ranges
        self._region_id_to_address = {}

    #
    # Properties
    #

    def __repr__(self):
        return "RegionMap<%s>" % ("S" if self.is_stack else "H")

    @property
    def is_empty(self):
        return len(self._address_to_region_id) == 0

    @property
    def stack_base(self):
        if not self.is_stack:
            raise SimRegionMapError('Calling "stack_base" on a non-stack region map.')

        return self._address_to_region_id.max_key()

    @property
    def region_ids(self):
        return self._region_id_to_address.keys()

    #
    # Public methods
    #

    def copy(self):
        r = RegionMap(is_stack=self.is_stack)

        # A shallow copy should be enough, since we never modify any RegionDescriptor object in-place
        if len(self._address_to_region_id) > 0:
            # TODO: There is a bug in bintrees 2.0.2 that prevents us from copying a non-empty AVLTree object
            # TODO: Consider submit a pull request
            r._address_to_region_id = self._address_to_region_id.copy()
        r._region_id_to_address = self._region_id_to_address.copy()

        return r

    def map(self, absolute_address, region_id, related_function_address=None):
        """
        Add a mapping between an absolute address and a region ID. If this is a stack region map, all stack regions
        beyond (lower than) this newly added regions will be discarded.

        :param absolute_address:            An absolute memory address.
        :param region_id:                   ID of the memory region.
        :param related_function_address:    A related function address, mostly used for stack regions.
        """

        if self.is_stack:
            # Sanity check
            if not region_id.startswith("stack_"):
                raise SimRegionMapError('Received a non-stack memory ID "%d" in a stack region map' % region_id)

            # Remove all stack regions that are lower than the one to add
            while True:
                try:
                    addr = self._address_to_region_id.floor_key(absolute_address)
                    descriptor = self._address_to_region_id[addr]
                    # Remove this mapping
                    del self._address_to_region_id[addr]
                    # Remove this region ID from the other mapping
                    del self._region_id_to_address[descriptor.region_id]
                except KeyError:
                    break

        else:
            if absolute_address in self._address_to_region_id:
                descriptor = self._address_to_region_id[absolute_address]
                # Remove this mapping
                del self._address_to_region_id[absolute_address]
                del self._region_id_to_address[descriptor.region_id]

        # Add this new region mapping
        desc = RegionDescriptor(region_id, absolute_address, related_function_address=related_function_address)

        self._address_to_region_id[absolute_address] = desc
        self._region_id_to_address[region_id] = desc

    def unmap_by_address(self, absolute_address):
        """
        Removes a mapping based on its absolute address.

        :param absolute_address: An absolute address
        """

        desc = self._address_to_region_id[absolute_address]
        del self._address_to_region_id[absolute_address]
        del self._region_id_to_address[desc.region_id]

    def absolutize(self, region_id, relative_address):
        """
        Convert a relative address in some memory region to an absolute address.

        :param region_id:           The memory region ID
        :param relative_address:    The relative memory offset in that memory region
        :return:                    An absolute address if converted, or an exception is raised when region id does not
                                    exist.
        """

        if region_id == "global":
            # The global region always bases 0
            return relative_address

        if region_id not in self._region_id_to_address:
            raise SimRegionMapError('Non-existent region ID "%s"' % region_id)

        base_address = self._region_id_to_address[region_id].base_address
        return base_address + relative_address

    def relativize(self, absolute_address, target_region_id=None):
        """
        Convert an absolute address to the memory offset in a memory region.

        Note that if an address belongs to heap region is passed in to a stack region map, it will be converted to an
        offset included in the closest stack frame, and vice versa for passing a stack address to a heap region.
        Therefore you should only pass in address that belongs to the same category (stack or non-stack) of this region
        map.

        :param absolute_address:    An absolute memory address
        :return:                    A tuple of the closest region ID, the relative offset, and the related function
                                    address.
        """

        if target_region_id is None:
            if self.is_stack:
                # Get the base address of the stack frame it belongs to
                base_address = self._address_to_region_id.ceiling_key(absolute_address)

            else:
                try:
                    base_address = self._address_to_region_id.floor_key(absolute_address)

                except KeyError:
                    # Not found. It belongs to the global region then.
                    return "global", absolute_address, None

            descriptor = self._address_to_region_id[base_address]

        else:
            if target_region_id == "global":
                # Just return the absolute address
                return "global", absolute_address, None

            if target_region_id not in self._region_id_to_address:
                raise SimRegionMapError('Trying to relativize to a non-existent region "%s"' % target_region_id)

            descriptor = self._region_id_to_address[target_region_id]
            base_address = descriptor.base_address

        return descriptor.region_id, absolute_address - base_address, descriptor.related_function_address
Ejemplo n.º 32
0
 def __init__(self, tree=None):
     self._storage = AVLTree() if tree is None else tree  # type: AVLTree
 def __init__(self, segments, x):
     self.segments = segments
     self.x = x
     self.tree = AVLTree()
Ejemplo n.º 34
0
class SortedTypedList(object):
    """This data structure allows to sort and maintain sorted
    a list of objects inheriting from :class:`netzob.Common.Utils.SortableObject.SortableObject`.
    It uses an AVLTree proposed by :mod:`bintrees` to represent elements in the list.

    :TODO: The inner object __tree stores the given element using an AVLTree. Thus
    exposing new methods only requires the creation of a wrapper method in this class.

    >>> from netzob.all import *
    >>> from netzob.Common.Utils.SortedTypedList import SortedTypedList
    >>> msg1 = RawMessage(b"msg1", date=25.0)
    >>> msg2 = RawMessage(b"msg2", date=2.0)
    >>> msg3 = RawMessage(b"msg3", date=1456487548.0)
    >>> l = SortedTypedList(RawMessage, [msg2, msg3, msg1])
    >>> print(list(l.values())[0])
    [2.0 None->None] 'msg2'

    >>> msg4 = RawMessage(b"msg4", date=145548.0)
    >>> l.add(msg4)
    >>> msg5 = RawMessage(b"msg5", date=14.0)
    >>> msg6 = RawMessage(b"msg6", date=1745645548.0)
    >>> l.addAll([msg5, msg6])
    >>> print(list(l.values())[5])
    [1745645548.0 None->None] 'msg6'
    >>> print(len(l))
    6

    """

    def __init__(self, membersTypes, elements=None):
        self.membersTypes = membersTypes
        self.__treePriorities = AVLTree()
        self.__mapMessages = dict()
        if elements is not None and len(elements) > 0:
            self._extend(elements)

    def add(self, element):
        """Insert in the proper place the specified element.

        :type: any object that comply with the typed of the current list and inherits from :class:`netzob.Common.Utils.SortableObject.SortableObject`.
        :raises: a TypeError if element is None or if its type doesn't comply with
                 the definition of the list.
        """
        if element is None:
            raise TypeError("Element cannot be None")
        self._extend([element])

    def addAll(self, elements):
        """Insert in their proper place all the specified element.

        :type: a list of any object that comply with the typed of the current list and inherits from :class:`netzob.Common.Utils.SortableObject.SortableObject`.
        :raises: a TypeError if element is None or if its type doesn't comply with
                 the definition of the list.
        """
        if elements is None:
            raise TypeError("Element cannot be None")
        self._extend(elements)

    def values(self):
        """Return a list sorted with the values of the current SortedTypedList.
        :warning: modifying this list has no effect on the SortedTypedList.

        :rtype: :mod:list
        """

        l = []
        for x in list(self.__treePriorities.keys()):
            l.extend(self.__mapMessages[x])
        return l

    def clear(self):
        """remove all items from the list.
        It's a O(n) operation"""
        self.__treePriorities.clear()

    def _extend(self, elements):
        """Add all the elements in the current list.

        :parameter elements: a list of :class:`netzob.Common.Utils.SortableObject.SortableObject` to insert.
        :raises: TypeError if something is wrong with the given elements
        """
        for e in elements:
            self._check(e)

        d = dict()
        for e in elements:
            d[e.priority()] = None
            if e.priority() in self.__mapMessages:
                self.__mapMessages[e.priority()].append(e)
            else:
                self.__mapMessages[e.priority()] = [e]
        self.__treePriorities.update(d)

    def _check(self, v):
        if not isinstance(v, self.membersTypes):
            raise TypeError(
                "Invalid type for argument, expecting: {0}, received : {1}".
                format(self.membersTypes, v.__class__.__name__))
        if not isinstance(v, SortableObject):
            raise TypeError(
                "Objects inserted in a SortedTypedList must inherits from SortableObject class"
            )

    def __len__(self):
        """Returns the number of elements in the sorted list which takes
        O(1) operation :)"""
        return len(self.__treePriorities)

    def __str__(self):
        return ', \n'.join([str(v) for v in list(self.values())])

    def __repr__(self):
        return repr(str(self))

    def __iter__(self):
        """SortedTypedList is an iterable over its values (and not its keys)."""
        return list(self.__treePriorities.values()).__iter__()
Ejemplo n.º 35
0
class KeyedRegion(object):
    """
    KeyedRegion keeps a mapping between stack offsets and all variables covering that offset. It assumes no variable in
    this region overlap with another variable in this region.

    Registers and function frames can all be viewed as a keyed region.
    """
    def __init__(self, tree=None):
        self._storage = AVLTree() if tree is None else tree  # type: AVLTree

    def __contains__(self, offset):
        """
        Test if there is at least one varaible covering the given offset.

        :param offset:
        :return:
        """

        try: base_offset, item = self._storage.floor_item(offset)  #pylint:disable=unused-variable
        except KeyError: return False

        if item.includes(offset):
            return True
        return False

    def __len__(self):
        return len(self._storage)

    def __iter__(self):
        for _, item in self._storage.items():
            yield item

    def __eq__(self, other):
        if set(self._storage.keys()) != set(other._storage.keys()):
            return False

        for k, v in self._storage.iter_items():
            if v != other._storage[k]:
                return False

        return True

    def copy(self):
        if not self._storage:
            return KeyedRegion()

        kr = KeyedRegion()
        for key, ro in self._storage.iter_items():
            kr._storage[key] = ro.copy()
        return kr

    def merge(self, other, make_phi_func=None):
        """
        Merge another KeyedRegion into this KeyedRegion.

        :param KeyedRegion other: The other instance to merge with.
        :return: None
        """

        # TODO: is the current solution not optimal enough?
        for _, item in other._storage.iter_items():  # type: RegionObject
            for loc_and_var in item.objects:
                self.__store(loc_and_var, overwrite=False, make_phi_func=make_phi_func)

        return self

    def dbg_repr(self):
        """
        Get a debugging representation of this keyed region.
        :return: A string of debugging output.
        """
        keys = self._storage.keys()
        offset_to_vars = { }

        for key in sorted(keys):
            ro = self._storage[key]
            variables = [ obj.variable for obj in ro.objects ]
            offset_to_vars[key] = variables

        s = [ ]
        for offset, variables in offset_to_vars.iteritems():
            s.append("Offset %#x: %s" % (offset, variables))
        return "\n".join(s)

    def add_variable(self, start, variable):
        """
        Add a variable to this region at the given offset.

        :param int start:
        :param SimVariable variable:
        :return: None
        """

        self._store(start, variable, overwrite=False)

    def set_variable(self, start, variable):
        """
        Add a variable to this region at the given offset, and remove all other variables that are fully covered by
        this variable.

        :param int start:
        :param SimVariable variable:
        :return: None
        """

        self._store(start, variable, overwrite=True)

    def get_base_addr(self, addr):
        """
        Get the base offset (the key we are using to index variables covering the given offset) of a specific offset.

        :param int addr:
        :return:
        :rtype:  int or None
        """

        try: base_addr, item = self._storage.floor_item(addr)
        except KeyError: return None

        if item.includes(addr):
            return base_addr

        return None

    def get_variables_by_offset(self, start):
        """
        Find variables covering the given region offset.

        :param int start:
        :return: A list of stack variables.
        :rtype:  set
        """

        try: base_addr = self._storage.floor_key(start)
        except KeyError: return [ ]

        item = self._storage[base_addr]  # type: RegionObject
        if item.includes(start):
            return item.variables
        return [ ]


    #
    # Private methods
    #

    def _store(self, start, variable, overwrite=False):
        """
        Store a variable into the storage.

        :param int start: The beginning address of the variable.
        :param variable: The variable to store.
        :param bool overwrite: Whether existing variables should be overwritten or not.
        :return: None
        """

        loc_and_var = LocationAndVariable(start, variable)
        self.__store(loc_and_var, overwrite=overwrite)

    def __store(self, loc_and_var, overwrite=False, make_phi_func=None):
        """
        Store a variable into the storage.

        :param LocationAndVariable loc_and_var: The descriptor describing start address and the variable.
        :param bool overwrite: Whether existing variables should be overwritten or not.
        :return: None
        """

        start = loc_and_var.start
        variable = loc_and_var.variable
        variable_size = variable.size if variable.size is not None else 1
        end = start + variable_size

        # region items in the middle
        overlapping_items = list(self._storage.item_slice(start, end))

        # is there a region item that begins before the start and overlaps with this variable?
        try:
            floor_key, floor_item = self._storage.floor_item(start)  # type: RegionObject
            if floor_item.includes(start):
                item = (floor_key, floor_item)
                if item not in overlapping_items:
                    # insert it into the beginningq
                    overlapping_items.insert(0, (floor_key, floor_item))
        except KeyError:
            # no there isn't
            pass

        # scan through the entire list of region items, split existing regions and insert new regions as needed
        to_update = { start: RegionObject(start, variable_size, { loc_and_var }) }
        last_end = start

        for _, item in overlapping_items:  # type: RegionObject
            if item.start < start:
                # we need to break this item into two
                a, b = item.split(start)
                if overwrite:
                    b.set_object(loc_and_var)
                else:
                    self._add_object_or_make_phi(b, loc_and_var, make_phi_func=make_phi_func)
                to_update[a.start] = a
                to_update[b.start] = b
                last_end = b.end
            elif item.start > last_end:
                # there is a gap between the last item and the current item
                # fill in the gap
                new_item = RegionObject(last_end, item.start - last_end, { loc_and_var })
                to_update[new_item.start] = new_item
                last_end = new_item.end
            elif item.end > end:
                # we need to split this item into two
                a, b = item.split(end)
                if overwrite:
                    a.set_object(loc_and_var)
                else:
                    self._add_object_or_make_phi(a, loc_and_var, make_phi_func=make_phi_func)
                to_update[a.start] = a
                to_update[b.start] = b
                last_end = b.end
            else:
                if overwrite:
                    item.set_object(loc_and_var)
                else:
                    self._add_object_or_make_phi(item, loc_and_var, make_phi_func=make_phi_func)
                to_update[loc_and_var.start] = item

        self._storage.update(to_update)

    def _is_overlapping(self, start, variable):

        if variable.size is not None:
            # make sure this variable does not overlap with any other variable
            end = start + variable.size
            try:
                prev_offset = self._storage.floor_key(end - 1)
            except KeyError:
                prev_offset = None

            if prev_offset is not None:
                if start <= prev_offset < end:
                    return True
                prev_item = self._storage[prev_offset][0]
                prev_item_size = prev_item.size if prev_item.size is not None else 1
                if start < prev_offset + prev_item_size < end:
                    return True
        else:
            try:
                prev_offset = self._storage.floor_key(start)
            except KeyError:
                prev_offset = None

            if prev_offset is not None:
                prev_item = self._storage[prev_offset][0]
                prev_item_size = prev_item.size if prev_item.size is not None else 1
                if prev_offset <= start < prev_offset + prev_item_size:
                    return True

        return False

    def _add_object_or_make_phi(self, item, loc_and_var, make_phi_func=None):  #pylint:disable=no-self-use
        if not make_phi_func or len({loc_and_var.variable} | item.variables) == 1:
            item.add_object(loc_and_var)
        else:
            # make a phi node
            item.set_object(LocationAndVariable(loc_and_var.start,
                                                make_phi_func(loc_and_var.variable, *item.variables)
                                                )
                            )
 def __init__(self):
     self._history = AVLTree()
Ejemplo n.º 37
0
    def separateFilesToClusters(self, sDirectory, size_threshold):
        start = 0
        for root, dirs, files in os.walk(sDirectory):
            start += len(dirs)

        for i in range(self.N):
            if self.II[i] == 1:
                if len(self.A[i]) >= size_threshold:
                    sDst = join(sDirectory, str(start))
                    os.makedirs(sDst)

                    for j in range(len(self.A[i])):
                        shutil.copy(self.fileList[self.A[i][j]], sDst)

                    start += 1


if __name__ == '__main__':
    t = AVLTree()
    d1 = Distance(1.12, 1)
    d2 = Distance(0.9, 2)
    d3 = Distance(0.9, 3)

    t[d1] = d1
    t[d2] = d2
    t[d3] = d3

    del t[d3]
    print len(t)
Ejemplo n.º 38
0
 def __init__(self, membersTypes, elements=None):
     self.membersTypes = membersTypes
     self.__treePriorities = AVLTree()
     self.__mapMessages = dict()
     if elements is not None and len(elements) > 0:
         self._extend(elements)
Ejemplo n.º 39
0
 def __init__(self, entries, shared=None):
     self.sem_lock = Semaphore(value=1)
     self.psbt = AVLTree(entries)
     self.shared = shared
Ejemplo n.º 40
0
def ABguilhermeberger(reds, blus):
	"""
	Input: red and blue segments with no red/red or blue/blue crossings, and no endpoints inside segments 
	Output: For each flag, the segment above and below of the same and the opposite color.
	"""
	red_bot_sentinel = ColoredSegment(Point(MIN_COORD-1, MIN_COORD-1), Point(MAX_COORD+1, MIN_COORD-1), 'Red')
	red_top_sentinel = ColoredSegment(Point(MIN_COORD-1, MAX_COORD+1), Point(MAX_COORD+1, MAX_COORD+1), 'Red')
	red_bot_sentinel.num = 0
	red_top_sentinel.num = len(reds)+1

	blu_bot_sentinel = ColoredSegment(Point(MIN_COORD-1, MIN_COORD-1), Point(MAX_COORD+1, MIN_COORD-1), 'Blue')
	blu_top_sentinel = ColoredSegment(Point(MIN_COORD-1, MAX_COORD+1), Point(MAX_COORD+1, MAX_COORD+1), 'Blue')
	blu_bot_sentinel.num = 0
	blu_top_sentinel.num = len(blus)+1

	blu_start_flags = [seg.a for seg in blus]
	blu_termi_flags = [seg.b for seg in blus]

	red_start_flags = [seg.a for seg in reds]
	red_termi_flags = [seg.b for seg in reds]

	red_flags = red_start_flags + red_termi_flags
	blu_flags = blu_start_flags + blu_termi_flags

	# Lists ordered by the flag sorting conditions defined on SI3
	red_flags = sorted(red_flags)
	blu_flags = sorted(blu_flags)
	flags = sorted(red_flags + blu_flags)

	# This list is ordered by the Y coordinate of the Start flag
	red_sweep = AVLTree()
	red_sweep.insert(red_bot_sentinel, red_bot_sentinel)
	red_sweep.insert(red_top_sentinel, red_top_sentinel)

	blu_sweep = AVLTree()
	blu_sweep.insert(blu_bot_sentinel, blu_bot_sentinel)
	blu_sweep.insert(blu_top_sentinel, blu_top_sentinel)

	def get_same_sweep(flag):
		if flag.color() == 'Blue':
			return blu_sweep
		else:
			return red_sweep

	def get_other_sweep(flag):
		if flag.color() == 'Red':
			return blu_sweep
		else:
			return red_sweep

	# By iterating through `flags`, we are effectively sweeping a vertical line from left to right
	for flag in flags:

		segment = flag.parent
		same_sweep = get_same_sweep(flag)
		other_sweep = get_other_sweep(flag)

		if flag.kind == 'Start':
			same_sweep.insert(segment, segment)
			flag.sb = same_sweep.prev_item(segment)[0].num
			flag.sa = same_sweep.succ_item(segment)[0].num
		else:
			flag.sb = segment.num
			flag.sa = segment.num
			same_sweep.remove(segment)

		other_sweep.insert(segment, segment)
		flag.ob = other_sweep.prev_item(segment)[0].num
		flag.oa = other_sweep.succ_item(segment)[0].num
		other_sweep.remove(segment)

	return red_flags, blu_flags
class RecursiveLambdaFunctionGrowth(object):
	def __init__(self):
		self.lambda_comp_tree=AVLTree()
		self.index_tree=BinaryTree()
		self.word_list=[]
		self.word_dict={}
		self.index_dict={}
		self.index_list=[]
		self.lambda_expression=[]
		self.lambda_composition=""
		self.graph_tensor_neuron_network_intrinsic_merit=1.0
		self.entropy=10000000000.0
		self.conceptnet=ConceptNet5Client()
		#self.Similarity="ConceptNet"
		self.Similarity="WordNet"
		self.ClosedPaths=True
		self.dictionary=PyDictionary()

	def get_next_tree_traversal_id(self,x,y):
		if y-x == 1 or x-y == 1:
			return 1 
		print "x,y:",x,y
		self.index_list.append((x+y)/2)
		self.get_next_tree_traversal_id(x,(x+y)/2)
		self.get_next_tree_traversal_id((x+y)/2,y)

	def build_lambda_expression(self,key,value):
		#print value,
		self.lambda_expression.append(value)

	def build_lambda_comp_tree(self,k,v):
		if k < len(self.word_list):
			self.word_dict[k]=self.word_list[k]

	def return_next(self,k,v):
		return (k,v)

	def grow_lambda_function2(self, wordlist):
		self.word_list=wordlist
		self.word_dict={}
		
		cnt=0
		while cnt < len(self.word_list):
			self.index_dict[cnt]=cnt
			cnt+=1

		self.index_tree=BinaryTree(self.index_dict)
		self.index_tree.foreach(self.build_lambda_comp_tree,0)
		
		self.lambda_comp_tree=AVLTree(self.word_dict)
		print "==========================================================================="
		print "Lambda Composition AVL Tree (inorder traversed) is the original text itself:"
		print "==========================================================================="
		self.lambda_expression=[]
		self.lambda_comp_tree.foreach(self.build_lambda_expression, 0)
		print self.lambda_expression
		print "==========================================================================="
		print "Lambda Composition AVL Tree (postorder traversed - Postfix expression):"
		print "Every parenthesis has two operands,operated by function outside:"
		print "==============================================================="
		self.lambda_expression=[]
		self.lambda_comp_tree.foreach(self.build_lambda_expression, 1)
		self.lambda_composition=[]
		cnt=0

		per_random_walk_graph_tensor_neuron_network_intrinsic_merit = 0 
		#recursively evaluate the Graph Tensor Neuron Network for random walk composition tree bottom up as Graph Neural Network
		#having Tensor Neuron activations for each subtree.
		while len(self.lambda_expression) > 2 :
			operand2=self.lambda_expression.pop()
			operand1=self.lambda_expression.pop()
			function=self.lambda_expression.pop()
			subtree_graph_tensor_neuron_network_wght = self.subtree_graph_tensor_neuron_network_weight(operand1, function, operand2)
			self.graph_tensor_neuron_network_intrinsic_merit += subtree_graph_tensor_neuron_network_wght
			per_random_walk_graph_tensor_neuron_network_intrinsic_merit += subtree_graph_tensor_neuron_network_wght
			self.lambda_composition="("+function+"("+operand1+","+operand2+"))" 
			self.lambda_expression.append(self.lambda_composition)
			cnt+=1
		if len(self.lambda_expression) > 1:
			return (self.lambda_expression[0] + "("+self.lambda_expression[1]+")", per_random_walk_graph_tensor_neuron_network_intrinsic_merit)
		else:
			return (self.lambda_expression[0], per_random_walk_graph_tensor_neuron_network_intrinsic_merit)
			
	def grow_lambda_function1(self):
		text=open("RecursiveLambdaFunctionGrowth.txt","r")
		word_dict={}
		index_dict={}
		words_evaluated=0
		word_list=text.read().split()

		for cnt in range(1,len(word_list)):
			index_dict[cnt-1] = len(word_list)/cnt

		index_tree=AVLTree(index_dict)
		print "Index AVL Tree:", repr(index_tree)
		#index_tree.foreach(print_node,1)
	
		try:
			while words_evaluated < len(word_list):
				#word_dict[words_evaluated]=word_list[random.randint(0,len(word_list)-1)]
				#print word_list[index_tree.pop_min()[0]]
				word_dict[words_evaluated]=word_list[index_tree.pop_min()[0]]
				words_evaluated+=1
		except:
			pass
	
		self.lambda_comp_tree=AVLTree(word_dict)
		print "Lambda Composition AVL Tree:"
		self.lambda_comp_tree.foreach(print_node)
		iteration=0
		while iteration < len(word_list):
			k=self.lambda_comp_tree.get(iteration)
			print "k:",k
			try:
				prev=self.lambda_comp_tree.prev_key(iteration)
				prevk=self.lambda_comp_tree.get(prev)
				print "prevk:",prevk
			except:
				pass
			try:
				succ=self.lambda_comp_tree.succ_key(iteration)
				succk=self.lambda_comp_tree.get(succ)
				print "succk:",succk
			except:
				pass
			iteration+=1

	def get_tensor_neuron_potential_for_relation(self,synset_vertex,synset_r):
		smt=0.0
		similarity=0.0
		for s1, s2 in product(synset_vertex, synset_r):
			if self.Similarity=="WordNet":
				smt=wn.wup_similarity(s1,s2)
			if self.Similarity=="ConceptNet":
				s1_lemma_names=s1.lemma_names()
				s2_lemma_names=s2.lemma_names()
				smt=self.conceptnet.conceptnet_distance(s1_lemma_names[0], s2_lemma_names[0])
			#print "similarity=",smt
			if smt > similarity and smt != 1.0:
				similarity = float(smt)
		return similarity

	def subtree_graph_tensor_neuron_network_weight(self, e1, r, e2):
		#relation_tensor_neuron_potential=self.get_tensor_neuron_potential_for_relation(r)
		if e1[0]=="(":
			e1_parsed=e1.split("(")
			#print "operand1:", e1_parsed[1]
			synset_e1 = wn.synsets(e1_parsed[1])
		else:
			synset_e1 = wn.synsets(e1)
			#print "operand1:", e1

		#print "Relation: ",r
		synset_r = wn.synsets(r)
		if e2[0]=="(":
			e2_parsed=e2.split("(")
			#print "operand2:", e2_parsed[1]
			synset_e2 = wn.synsets(e2_parsed[1])
		else:
			#print "operand2:", e2
			synset_e2 = wn.synsets(e2)

		similarity1 = 0.0
		similarity2 = 0.0

		#Children of each subtree are the Tensor Neuron inputs to the subtree root
		#Each subtree is evaluated as a graph neural network with weights for
		#each neural input to the subtree root. WordNet similarity is computed
		#between each child and subtree root and is presently assumed as Tensor Neuron
		#relation potential for the lack of better metric to measure word-word EEG potential.
		#If a dataset for tensor neuron potential
		#is available, it has to to be looked-up and numeric
		#potential has to be returned from here.

		similarity1 = self.get_tensor_neuron_potential_for_relation(synset_e1,synset_r)
		similarity2 = self.get_tensor_neuron_potential_for_relation(synset_e2,synset_r)

		if similarity1 == 0.0:
			similarity1 = 1.0
		if similarity2 == 0.0:
			similarity2 = 1.0

		weight1=0.5
		weight2=0.5
		bias=0.1

		#Finally a neuron activation function (simple 1-dimensional tensor) is computed and
		#returned to the subtree root for next level.
		return (weight1*similarity1 + weight2*similarity2 + bias)

	def randomwalk_lambda_function_composition_tree(self,randomwalk):
		randomwalk_lambdacomposition=self.grow_lambda_function2(randomwalk)
		return randomwalk_lambdacomposition

	def create_summary(self,text,corenumber=3,pathsimilarity=0.8,graphtraversedsummary=False,shortestpath=True):
		if graphtraversedsummary==True:
			definitiongraph=RecursiveGlossOverlap_Classifier.RecursiveGlossOverlapGraph(text)
			#This has to be replaced by a Hypergraph Transversal but NetworkX does not have Hypergraphs yet.
			#Hence approximating the transversal with a k-core which is the Graph counterpart of
			#Hypergraph transversal. Other measures create a summary too : Vertex Cover is NP-hard while Edge Cover is Polynomial Time.
			richclubcoeff=nx.rich_club_coefficient(definitiongraph.to_undirected())
			print "Rich Club Coefficient of the Recursive Gloss Overlap Definition Graph:",richclubcoeff
			kcore=nx.k_core(definitiongraph,corenumber)
			print "Text Summarized by k-core(subgraph having vertices of degree atleast k) on the Recursive Gloss Overlap graph:"
			print "=========================="
			print "Dense subgraph edges:"
			print "=========================="
			print kcore.edges()
			print "=========================="
			if shortestpath == False:
				for e in kcore.edges():
					for s1 in wn.synsets(e[0]):
						for s2 in wn.synsets(e[1]):
								if s1.path_similarity(s2) > pathsimilarity:
									lowestcommonhypernyms=s1.lowest_common_hypernyms(s2)
									for l in lowestcommonhypernyms:
										for ln in l.lemma_names():
											print e[0]," and ",e[1]," are ",ln,".",
			else:
				#Following is the slightly modified version of shortest_path_distance() function
				#in NLTK wordnet - traverses the synset path between 2 synsets instead of distance
				summary={}
				intermediates=[]
				for e in kcore.edges():
					for s1 in wn.synsets(e[0]):
						for s2 in wn.synsets(e[1]):
							s1dict = s1._shortest_hypernym_paths(False)
							s2dict = s2._shortest_hypernym_paths(False)
							s2dictkeys=s2dict.keys()
							for s,d in s1dict.iteritems():
								if s in s2dictkeys:
									slemmanames=s.lemma_names()
									if slemmanames[0] not in intermediates:
										intermediates.append(slemmanames[0])
					if len(intermediates) > 3:
						sentence1=e[0] + " is a " + intermediates[0]
						summary[sentence1]=self.relevance_to_text(sentence1,text) 
						for i in xrange(len(intermediates)-2):
							sentence2= intermediates[i] + " is a " + intermediates[i+1] + "."
							if sentence2 not in summary:
								summary[sentence2]=self.relevance_to_text(sentence2,text)
						sentence3=intermediates[len(intermediates)-1] + " is a " + e[1]
						summary[sentence3]=self.relevance_to_text(sentence3,text)
						intermediates=[]
				sorted_summary=sorted(summary,key=operator.itemgetter(1), reverse=True)
				print "==================================================================="
				print "Sorted summary created from k-core dense subgraph of text RGO"
				print "==================================================================="
				for s in sorted_summary:
					print s,
			return (sorted_summary, len(sorted_summary))
		else:
			definitiongraph_merit=RecursiveGlossOverlap_Classifier.RecursiveGlossOverlapGraph(text)
			definitiongraph=definitiongraph_merit[0]
			richclubcoeff=nx.rich_club_coefficient(definitiongraph.to_undirected(),normalized=False)
			print "Rich Club Coefficient of the Recursive Gloss Overlap Definition Graph:",richclubcoeff
			textsentences=text.split(".")
			lensummary=0
			summary=[]
			definitiongraphclasses=RecursiveGlossOverlap_Classifier.RecursiveGlossOverlap_Classify(text)
			print "Text Summarized based on the Recursive Gloss Overlap graph classes the text belongs to:"
			prominentclasses=int(len(definitiongraphclasses[0])/2)
			print "Total number of classes:",len(definitiongraphclasses[0])
			print "Number of prominent classes:",prominentclasses
			for c in definitiongraphclasses[0][:prominentclasses]:
				if len(summary) > len(textsentences) * 0.5:
					return (summary,lensummary)
				for s in textsentences:
					classsynsets=wn.synsets(c[0])
					for classsynset in classsynsets:
						if self.relevance_to_text(classsynset.definition(), s) > 0.41:
							if s not in summary:
								summary.append(s)
								lensummary += len(s)
								print s,
			return (summary,lensummary)

	def relevance_to_text(self, sentence, text):
		#Ratcliff/Obershelp gestalt string pattern matching 
		textset=set(text.split("."))
		relevancescore=0.0
		for t in textset:
			rs=difflib.SequenceMatcher(None,sentence,t).ratio()
			relevancescore=max(rs,relevancescore)
		return relevancescore 

	def instrument_relations(self, rw_words_list):
		word_list_len=len(rw_words_list)
		instrumented_rw_words_list=[]
		if word_list_len==2:
			path=path_between(rw_words_list[0], rw_words_list[1])
			for p in path:
				instrumented_rw_words_list.append(p)
		else:
			for n in range(0,word_list_len-2): 
				path=path_between(rw_words_list[n], rw_words_list[n+1])
				for p in path:
					instrumented_rw_words_list.append(p)
		if len(instrumented_rw_words_list) > 0:
			return instrumented_rw_words_list
		else:
			return rw_words_list

	def grow_lambda_function3(self,text,level=3):
		stpairs=[]
		maximum_per_random_walk_graph_tensor_neuron_network_intrinsic_merit=("",0.0)
		definitiongraph_merit=RecursiveGlossOverlap_Classifier.RecursiveGlossOverlapGraph(text,level)
		definitiongraph=definitiongraph_merit[0]
		sentiment=SentimentAnalyzer.SentimentAnalysis_RGO_Belief_Propagation_MarkovRandomFields(definitiongraph)
		apsp=nx.all_pairs_shortest_path(definitiongraph)
		for a in definitiongraph.nodes():
			for b in definitiongraph.nodes():
				stpairs.append((a,b))
		rw_ct=""
		if self.ClosedPaths==False:
			for k,v in stpairs:
				try:
					print "==================================================================="
					print "Random Walk between :",k," and ",v,":",apsp[k][v]
					instrumented_apspkv=self.instrument_relations(apsp[k][v])
					rw_ct=self.randomwalk_lambda_function_composition_tree(instrumented_apspkv)
					print "Random Walk Composition Tree for walk between :",k," and ",v,":",rw_ct
					print "maximum_per_random_walk_graph_tensor_neuron_network_intrinsic_merit=",maximum_per_random_walk_graph_tensor_neuron_network_intrinsic_merit
					print "==================================================================="
					if rw_ct[1] > maximum_per_random_walk_graph_tensor_neuron_network_intrinsic_merit[1]:
						maximum_per_random_walk_graph_tensor_neuron_network_intrinsic_merit=rw_ct
				except KeyError:
					pass
				rw_ct=""
		if self.ClosedPaths==True:
			allsimplecycles=nx.simple_cycles(definitiongraph)
			#allsimplecycles=nx.cycle_basis(definitiongraph)
			number_of_cycles=0
			for cycle in allsimplecycles:
				number_of_cycles += 1
				if number_of_cycles > 500:
					break
				try:
					print "==================================================================="
					print "Cycle :",cycle
					instrumented_cycle=self.instrument_relations(cycle)
					print "instrumented_cycle:",instrumented_cycle
					rw_ct=self.randomwalk_lambda_function_composition_tree(instrumented_cycle)
					print "Cycle Composition Tree for this cycle :",rw_ct
					print "maximum_per_random_walk_graph_tensor_neuron_network_intrinsic_merit=",maximum_per_random_walk_graph_tensor_neuron_network_intrinsic_merit
					print "==================================================================="
					if rw_ct[1] > maximum_per_random_walk_graph_tensor_neuron_network_intrinsic_merit[1]:
						maximum_per_random_walk_graph_tensor_neuron_network_intrinsic_merit=rw_ct
				except KeyError:
					pass
				rw_ct=""
		intrinsic_merit_dict={}
		print "grow_lambda_function3(): Graph Tensor Neuron Network Intrinsic Merit for this text:",self.graph_tensor_neuron_network_intrinsic_merit

		print "grow_lambda_function3(): Machine Translation Example - English to Kannada:"
		self.machine_translation(definitiongraph, "kn")

		self.korner_entropy(definitiongraph)
		print "grow_lambda_function3(): Korner Entropy Intrinsic Merit for this text:",self.entropy

		density = self.density(definitiongraph)
		print "grow_lambda_function3(): Graph Density (Regularity Lemma):",density

		bose_einstein_intrinsic_fitness=self.bose_einstein_intrinsic_fitness(definitiongraph)
		print "grow_lambda_function3(): Bose-Einstein Intrinsic Fitness:",bose_einstein_intrinsic_fitness

		print "grow_lambda_function3(): Maximum Per Random Walk Graph Tensor Neuron Network Intrinsic Merit :",maximum_per_random_walk_graph_tensor_neuron_network_intrinsic_merit

		print "grow_lambda_function3(): Recursive Gloss Overlap Classifier classes for text:",RecursiveGlossOverlap_Classifier.RecursiveGlossOverlap_Classify(text)

		intrinsic_merit_dict["graph_tensor_neuron_network_intrinsic_merit"]=self.graph_tensor_neuron_network_intrinsic_merit
		intrinsic_merit_dict["maximum_per_random_walk_graph_tensor_neuron_network_intrinsic_merit"]=maximum_per_random_walk_graph_tensor_neuron_network_intrinsic_merit
		intrinsic_merit_dict["korner_entropy"]=self.entropy
		intrinsic_merit_dict["density"]=density
		intrinsic_merit_dict["bose_einstein_intrinsic_fitness"]=bose_einstein_intrinsic_fitness
		intrinsic_merit_dict["recursive_gloss_overlap_intrinsic_merit"]=definitiongraph_merit[1]
		intrinsic_merit_dict["empath_sentiment"]=sentiment

		write_dot(definitiongraph,"RecursiveLambdaFunctionGrowth.dot")

		self.graph_tensor_neuron_network_intrinsic_merit=1.0
		print "intrinsic_merit_dict:",intrinsic_merit_dict
		return intrinsic_merit_dict 

	def machine_translation(self, definitiongraph, languagecode):
		nodes=definitiongraph.nodes()
		edges=definitiongraph.edges()
		translationgraph=nx.DiGraph()
		for k, v in edges:
			ktrans=self.dictionary.translate(k,languagecode)
			vtrans=self.dictionary.translate(v,languagecode)
			print "k=",k,",v=",v,",ktrans=",ktrans,",vtrans=",vtrans
			translationgraph.add_edge(ktrans, vtrans)
			translationgraph.add_edge(vtrans, ktrans)
		print "TextGraph Translated to ",languagecode,":",translationgraph		

	#KornerEntropy(G) = minimum [- sum_v_in_V(G) {1/|V(G)| * log(Pr[v in Y])}] for each independent set Y
	def korner_entropy(self, definitiongraph):
		nodes=definitiongraph.nodes()
		stable_sets=[]
		for v in nodes:
			stable_sets.append(nx.maximal_independent_set(definitiongraph.to_undirected(),[v]))
		print "korner_entropy(): Stable Independent Sets:",stable_sets
		entropy=0.0
		prob_v_in_stableset=0.0
		for v in nodes:
			for s in stable_sets:
				if v in s:
					prob_v_in_stableset=math.log(0.999999)
				else:
					prob_v_in_stableset=math.log(0.000001)
				entropy += (-1.0) * float(1.0/len(nodes)) * prob_v_in_stableset
			if entropy < self.entropy:
				self.entropy = entropy
			entropy=0.0
		return self.entropy

	#Graph Density - Regularity Lemma
	def density(self, definitiongraph):
		dty=nx.classes.function.density(definitiongraph)
		return dty

	#Bose-Einstein Bianconi intrinsic fitness 
	def bose_einstein_intrinsic_fitness(self, definitiongraph):
		#Bose-Einstein fitness presently assumes energy of a document vertex in a link graph to be
		#the entropy or extent of chaos in the definition graph of document text 
		#This has to be replaced by a more suitable fitness measure
		#Bose-Einstein Condensation function value is hardcoded
		entropy = self.korner_entropy(definitiongraph)
		becf = 0.3
		bei_fitness = math.pow(2, -1 * becf * entropy)
		return bei_fitness
Ejemplo n.º 42
0
	def __init__(self):

		self.tree = AVLTree()
Ejemplo n.º 43
0
 def __init__(self, membersTypes, elements=None):
     self.membersTypes = membersTypes
     self.__treePriorities = AVLTree()
     self.__mapMessages = dict()
     if elements is not None and len(elements) > 0:
         self._extend(elements)
Ejemplo n.º 44
0
def avl_build_delete():
    tree = AVLTree.from_keys(keys)
    for key in keys:
        del tree[key]
Ejemplo n.º 45
0
setup_AVLTree = """
from __main__ import avl_build_delete, avl_build, avl_search
"""
setup_FastAVLTree = """
from __main__ import cavl_build_delete, cavl_build, cavl_search
"""

try:
    fp = open('testkeys.txt')
    keys = eval(fp.read())
    fp.close()
except IOError:
    print("create 'testkeys.txt' with profile_bintree.py\n")
    sys.exit()

py_searchtree = AVLTree.from_keys(keys)
cy_searchtree = FastAVLTree.from_keys(keys)


def avl_build_delete():
    tree = AVLTree.from_keys(keys)
    for key in keys:
        del tree[key]


def cavl_build_delete():
    tree = FastAVLTree.from_keys(keys)
    for key in keys:
        del tree[key]

Ejemplo n.º 46
0
def avl_build():
    tree = AVLTree.from_keys(keys)
Ejemplo n.º 47
0
def avl_build_delete():
    tree = AVLTree.from_keys(keys)
    for key in keys:
        del tree[key]
Ejemplo n.º 48
0
from bintrees import AVLTree

tree = AVLTree()
tree.insert(4, "hehe4")
tree.insert(2, "hehe2")
tree.insert(6, "hehe6")
tree.insert(1, "hehe1")
tree.insert(7, "hehe7")

print(tree)
Ejemplo n.º 49
0
def avl_build():
    tree = AVLTree.from_keys(keys)
class State(object):
    def __init__(self, segments, x):
        self.segments = segments
        self.x = x
        self.tree = AVLTree()

    def segment_crossing_point(self, segment_id):

        segment = self.segments[segment_id.label]
        segment_dx = segment.x2 - segment.x1
        sweep_dx = self.x - segment.x1
        segment_dy = segment.y2 - segment.y1
        sweep_dy = segment_dy * (sweep_dx / segment_dx)
        return segment.y1 + sweep_dy

    def insert(self, new_segment):
        """ Returns new pair of neighbours """
        new_neighbours = []
        self.tree.insert(new_segment, new_segment)
        try:
            new_neighbours.append((self.tree.prev_item(new_segment)[0], new_segment))
        except KeyError:
            pass
        try:
            new_neighbours.append((self.tree.succ_item(new_segment)[0], new_segment))
        except KeyError:
            pass
        return new_neighbours

    def delete(self, segment):
        """ Returns new pair of neighbours """
        new_neighbours = []
        predecessor = None
        successor = None
        try:
            predecessor = self.tree.prev_item(segment)[0]
        except KeyError:
            pass
        try:
            successor = self.tree.succ_item(segment)[0]
        except KeyError:
            pass
        if predecessor and successor:
            # make predecessor and successor new neighbours
            new_neighbours.append((predecessor, successor))

        self.tree.remove(segment)
        return new_neighbours

    def swap(self, segment1, segment2):
        """ Returns new pair of neighbours """
        pos_x = self.x
        self.x = pos_x - epsilon
        new_neighbours1 = self.delete(segment1)
        self.x = pos_x + epsilon
        new_neighbours2 = self.insert(segment1)
        self.x = pos_x

        all_neighbours = new_neighbours1 + new_neighbours2
        new_neighbours = []

        for neighbours in all_neighbours:
            if neighbours != (segment1, segment2) and neighbours != (segment2, segment1):
                new_neighbours.append(neighbours)
        return new_neighbours
Ejemplo n.º 51
0
def get_intersections(spheres: List[Sphere]):

    start_points_map = {}
    end_points_map = {}

    for index, sphere in enumerate(spheres):
        start_points_map[(sphere.center.x - sphere.radius,
                          sphere.center.y)] = index
        end_points_map[(sphere.center.x + sphere.radius,
                        sphere.center.y)] = index

    event_points = [x for x in start_points_map.keys()]
    event_points.extend(x for x in end_points_map.keys())
    event_points.sort()

    start_points = sorted(start_points_map.keys(), reverse=True)
    end_points = sorted(end_points_map.keys(), reverse=True)

    tree = AVL()
    s = set()

    while len(start_points) > 0 and len(end_points) > 0:

        if start_points[-1] <= end_points[-1]:
            sphere = spheres[start_points_map[start_points[-1]]]
            tree.insert(sphere.center, sphere)

            try:
                key = sphere.center

                while True:
                    prev = tree.prev_item(key)[1]

                    if prev.intersects(sphere):
                        # print('Intersection found: {} and {}'.format(prev, sphere))
                        s.add(prev.center.to_tuple())
                        s.add(sphere.center.to_tuple())

                    key = prev.center

            except KeyError:
                pass

            try:
                key = sphere.center

                while True:
                    prev = tree.succ_item(key)[1]

                    if prev.intersects(sphere):
                        # print('Intersection found: {} and {}'.format(prev, sphere))
                        s.add(prev.center.to_tuple())
                        s.add(sphere.center.to_tuple())

                    key = prev.center

            except KeyError:
                pass

            start_points.pop()

        else:
            tree.remove(spheres[end_points_map[end_points[-1]]].center)
            end_points.pop()

    while len(end_points) > 0:
        tree.remove(spheres[end_points_map[end_points[-1]]].center)
        end_points.pop()

    return s
Ejemplo n.º 52
0
class Index():

	# balanced trees to store index references (name -> ID), each letter of alphabet and digit have one tree
	hash_index_tree = {}
	# tree for storing objects by ID
	objects = AVLTree()

	def __init__(self, index_name):
		if index_name != None:
			# initialise index from folder
			self.load_persistant_index(index_name)
		else:
			# initialise new index
			self.inilialise()



	def inilialise(self):
		hashKeys = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','x','y','z','0','1','2','3','4','5','6','7','8','9','+']
		
		#for each key create separate balanced tree
		for k in hashKeys:
			self.hash_index_tree[k] = AVLTree()


	# append object to index
	def index_object(self, obj):

		# store whole object in objec tree
		self.append_to_objects(obj)

		# store index reference to object for each name and each word in name of object
		object_id = obj.get_id()
		names = obj.get_names()

		# foreach names
		for n in names.keys():
			
			exploded_name = names[n].split(" ")

			# foreach word in name
			for e in exploded_name:
				# appnd to index tree
				self.append_to_index(e, object_id)



	def get_object(self, key):
		return self.objects.get(key)



	def append_to_objects(self, obj):
		self.objects.insert(obj.get_id(), obj)



	def append_to_index(self, key, object_id):
		
		# select desired tree from hash
		tree = self.select_tree(key)

		if tree == None:
			print "Error adding new entry, key was empty string. Obj.ID: " + object_id + "\n"
			return
		
		# update existing object_id's for key
		entry = tree.get(key);
		
		if entry == None:
			# append first value
			entry = str(object_id)
		else:
			# append N-th value
			entry += "," + object_id
		
		# update entry for key
		tree.insert(key, entry)


	# select indexing tree accoriding key first letter
	def select_tree(self, key):
		if(len(key) == 0):
			return None

		# get first letter of key, lowercase
		k = key[0].lower()			

		# select tree
		if k in self.hash_index_tree:
			# if firt letter is list of trees, return tree
			return self.hash_index_tree[k]
		else:
			# otherwise return universal tree
			return self.hash_index_tree["+"]



	def __str__(self):
		return str(self.hash_index_tree)


	# get items generator for index trees by key, or objects tree (k == None)
	def get_items_generator(self, k):
		if k == None:
			return self.objects.items()

		return self.hash_index_tree[k].items()


	# load index from existing index in folder
	def load_persistant_index(self, index_name):
		# load objects
		self.load_objects(index_name)
		# load all index trees
		self.load_index_trees(index_name)


	# load objects to memmory
	def load_objects(self, index_name):
		# initialise empty tree
		self.objects = AVLTree()

		# try open file with objects
		try:
			ins = open(index_name + "/bin_tree_dump_objects.txt", "r")
		except:
			print "Index tree for objects doesnt exist"
			return

		# reading objects from file, one by one
		for line in ins:
			try:
				# load serialized data from file
				x = json.loads(line)

				# create instance of FreebaseObject
				o = FreebaseObject()
				# initialise content with object data from file
				o.deserialise(x)

				# insert loaded freebase object to objects tree
				self.objects.insert(o.get_id(),o)
			except:
				print "something wrong 2"
		
		print "Index tree for objects loaded"


	#load all index trees
	def load_index_trees(self, index_name):
		hashKeys = ['a','b','c','d','e','f','g','h','i','j','k','l','m','n','o','p','q','r','s','t','u','v','x','y','z','0','1','2','3','4','5','6','7','8','9','+']

		# for each key in hashKeys try load file with data for index tree
		for key in hashKeys:
			# create empty tree
			self.hash_index_tree[key] = AVLTree()

			# try open appropriate file
			try:
				ins = open(index_name + "/bin_tree_dump_" + key + ".txt", "r")
			except:
				print "Index tree for " + key + " doesnt exist"
				continue

			# try reads entries from file (entry = line)
			for line in ins:

				try:
					# deserialise data from file
					x = json.loads(line)
					# get key and value 
					k = x[0]
					v = x[1] 
					
					# insert data to actual index tree
					self.hash_index_tree[key].insert(k,v)
				except:
					print "something wrong"
			
			print "Index tree for " + key + " loaded"
Ejemplo n.º 53
0
 def __init__(self, segments, pos_x):
     self.segments = segments
     self.pos_x = pos_x
     self.tree = AVLTree()
Ejemplo n.º 54
0
class SortedTypedList(object):
    """This data structure allows to sort and maintain sorted
    a list of objects inheriting from :class:`netzob.Common.Utils.SortableObject.SortableObject`.
    It uses an AVLTree proposed by :mod:`bintrees` to represent elements in the list.

    :TODO: The inner object __tree stores the given element using an AVLTree. Thus
    exposing new methods only requires the creation of a wrapper method in this class.

    >>> from netzob.all import *
    >>> from netzob.Common.Utils.SortedTypedList import SortedTypedList
    >>> msg1 = RawMessage(b"msg1", date=25.0)
    >>> msg2 = RawMessage(b"msg2", date=2.0)
    >>> msg3 = RawMessage(b"msg3", date=1456487548.0)
    >>> l = SortedTypedList(RawMessage, [msg2, msg3, msg1])
    >>> print(list(l.values())[0])
    [2.0 None->None] 'msg2'

    >>> msg4 = RawMessage(b"msg4", date=145548.0)
    >>> l.add(msg4)
    >>> msg5 = RawMessage(b"msg5", date=14.0)
    >>> msg6 = RawMessage(b"msg6", date=1745645548.0)
    >>> l.addAll([msg5, msg6])
    >>> print(list(l.values())[5])
    [1745645548.0 None->None] 'msg6'
    >>> print(len(l))
    6

    """
    def __init__(self, membersTypes, elements=None):
        self.membersTypes = membersTypes
        self.__treePriorities = AVLTree()
        self.__mapMessages = dict()
        if elements is not None and len(elements) > 0:
            self._extend(elements)

    def add(self, element):
        """Insert in the proper place the specified element.

        :type: any object that comply with the typed of the current list and inherits from :class:`netzob.Common.Utils.SortableObject.SortableObject`.
        :raises: a TypeError if element is None or if its type doesn't comply with
                 the definition of the list.
        """
        if element is None:
            raise TypeError("Element cannot be None")
        self._extend([element])

    def addAll(self, elements):
        """Insert in their proper place all the specified element.

        :type: a list of any object that comply with the typed of the current list and inherits from :class:`netzob.Common.Utils.SortableObject.SortableObject`.
        :raises: a TypeError if element is None or if its type doesn't comply with
                 the definition of the list.
        """
        if elements is None:
            raise TypeError("Element cannot be None")
        self._extend(elements)

    def values(self):
        """Return a list sorted with the values of the current SortedTypedList.
        :warning: modifying this list has no effect on the SortedTypedList.

        :rtype: :mod:list
        """

        l = []
        for x in list(self.__treePriorities.keys()):
            l.extend(self.__mapMessages[x])
        return l

    def clear(self):
        """remove all items from the list.
        It's a O(n) operation"""
        self.__treePriorities.clear()

    def _extend(self, elements):
        """Add all the elements in the current list.

        :parameter elements: a list of :class:`netzob.Common.Utils.SortableObject.SortableObject` to insert.
        :raises: TypeError if something is wrong with the given elements
        """
        for e in elements:
            self._check(e)

        d = dict()
        for e in elements:
            d[e.priority()] = None
            if e.priority() in self.__mapMessages:
                self.__mapMessages[e.priority()].append(e)
            else:
                self.__mapMessages[e.priority()] = [e]
        self.__treePriorities.update(d)

    def _check(self, v):
        if not isinstance(v, self.membersTypes):
            raise TypeError(
                "Invalid type for argument, expecting: {0}, received : {1}".
                format(self.membersTypes, v.__class__.__name__))
        if not isinstance(v, SortableObject):
            raise TypeError(
                "Objects inserted in a SortedTypedList must inherits from SortableObject class"
            )

    def __len__(self):
        """Returns the number of elements in the sorted list which takes
        O(1) operation :)"""
        return len(self.__treePriorities)

    def __str__(self):
        return ', \n'.join([str(v) for v in list(self.values())])

    def __repr__(self):
        return repr(str(self))

    def __iter__(self):
        """SortedTypedList is an iterable over its values (and not its keys)."""
        return list(self.__treePriorities.values()).__iter__()
Ejemplo n.º 55
0
class _SweepState(object):
    def __init__(self, segments, pos_x):
        self.segments = segments
        self.pos_x = pos_x
        self.tree = AVLTree()

    def insert_and_get_new_neighbours(self, new_segment_id):
        new_neighbours = []
        self.tree.insert(new_segment_id, new_segment_id)
        try:
            new_neighbours.append((self.tree.prev_item(new_segment_id)[0], new_segment_id))
        except KeyError:
            pass
        try:
            new_neighbours.append((self.tree.succ_item(new_segment_id)[0], new_segment_id))
        except KeyError:
            pass
        return new_neighbours

    def erase_and_get_new_neighbours(self, segment_id):
        new_neighbours = []
        before = None
        after = None
        try:
            before = self.tree.prev_item(segment_id)[0]
        except KeyError:
            pass
        try:
            after = self.tree.succ_item(segment_id)[0]
        except KeyError:
            pass
        if before and after:
            new_neighbours.append((before, after))
        self.tree.remove(segment_id)
        return new_neighbours

    def swap_and_get_new_neighbours(self, segment1_id, segment2_id):
        pos_x = self.pos_x
        self.pos_x = pos_x - epsilon
        new_neighbours1 = self.erase_and_get_new_neighbours(segment1_id)
        self.pos_x = pos_x + epsilon
        new_neighbours2 = self.insert_and_get_new_neighbours(segment1_id)
        self.pos_x = pos_x

        all_neighbours = new_neighbours1 + new_neighbours2
        new_neighbours = []

        for neighbours in all_neighbours:
            if neighbours != (segment1_id, segment2_id) and neighbours != (segment2_id, segment1_id):
                new_neighbours.append(neighbours)
        return new_neighbours

    def _get_cross_y_with_sweep_for_segment(self, segment):
        segment_dx = segment.x2 - segment.x1
        sweep_dx = self.pos_x - segment.x1
        segment_dy = segment.y2 - segment.y1
        sweep_dy = segment_dy * (sweep_dx / segment_dx)
        return segment.y1 + sweep_dy

    def get_cross_y_with_sweep_for_segment_id(self, segment_id):
        return self._get_cross_y_with_sweep_for_segment(self.segments[segment_id.idx])

    def get_segments_in_state(self):
        return self.tree.values()
Ejemplo n.º 56
0
def sweep(points):
    segments = []
    a = points[-1]
    for b in points:
        segments.append(Segment(a, b))
        a = b
    
    n = len(segments)
    split_points = [[] for _ in range(n)]

    all_intersections = [Point(0, 0)]
    for i in range(n):
        seg_ab = segments[i]
        for j in range(i+1, n):
            seg_cd = segments[j]
            intersections = intersect(seg_ab, seg_cd)
            """
            for intersection in intersections:
                closest = min(p in all_intersection, key=p.dist2(intersection))
                if intersection.dist2(closest) < 1e-10:
                    print("Merge")
            """
            split_points[i].extend(intersections)
            split_points[j].extend(intersections)

    segments = split_things(segments, split_points, split_segment)

    random.seed(1)
    for segment in segments:
        draw_line(segment[0], segment[1], random_hex_color())

    event_points = []
    for segment in segments:
        a, b = segment
        if a > b:
            a, b = b, a
        event_points.append((a, 0, segment))
        event_points.append((b, 1, segment))

    event_points.sort()

    n = len(segments)
    for i in range(n):
        ab = segments[i]
        for j in range(i+1, n):
            cd = segments[j]
            if ab == cd:
                print("Duplicate segment", i, j)
                print(ab)
                print(cd)
                print(ab.slope())
                print(cd.slope())
                print(ab.current_y())
                print(cd.current_y())
                import sys
                sys.exit(0)
    
    current_curves = AVLTree()
    for p, event_type, segment in event_points:
        global current_x
        current_x = p[0]
        
        if event_type == 0:
            current_curves.insert(segment, None)
        
        if event_type == 1:
            current_curves.remove(segment)
Ejemplo n.º 57
0
class Chromosome(object):
    def __init__(self, name, genome):
        self.name = name
        self.genome = genome
        self.sb_name_mapping = {}
        self.start_telomer = StartTelomereSyntenyBlock(name)
        self.end_telomer = EndTelomereSyntenyBlock(name)
        self.blocks = AVLTree()
        self.add_sb(self.start_telomer)
        self.add_sb(self.end_telomer)
        # maps sb into a dict of resolutions, it has mapping in
        # in each particular resolution, mapping is represented as a list,
        # this particular block is mapped to
        self.mapping = defaultdict(lambda: defaultdict(list))

    def add_sb(self, sb):
        if not isinstance(sb, SyntenyBlock):
            sb = self._create_sb(sb)
        self.genome.location_info[sb] = self
        self.sb_name_mapping[sb] = sb
        self.blocks.insert(key=(sb.start, sb.end), value=sb)

    def _create_sb(self, info):
        """
        example:
        sb = SyntenyBlock(name=info.name,
                          start=info.start,
                          end=info.end,
                          strand=info.strand,
                          chromosome=self)
                          """
        sb = SyntenyBlock(name=info[0], start=info[1], end=info[2], strand=info[3], chromosome=self)
        return sb

    def add_mapping(self, sb, res_to_map, sbs):
        for synteny_block in sbs:
            if synteny_block not in self.mapping[sb][res_to_map]:
                self.mapping[sb][res_to_map].append(synteny_block)

    def get_right_outer_most_mapping(self, sb, mapping):
        if sb in self.mapping and mapping in self.mapping[sb]:
            return self.mapping[sb][mapping][-1]
        return self.start_telomer

    def get_left_outer_most_mapping(self, sb, mapping):
        if sb in self.mapping and mapping in self.mapping[sb]:
            return self.mapping[sb][mapping][0]
        return self.end_telomer

    def get_sbs_between(self, sb_start, sb_end):
        # slice from 1: because, value_slice returns all values, which keys are greater or equals that minimum,
        # but less than maximum. Because of that "or equal" we have to slice
        result = list(self.blocks.value_slice((sb_start.start, sb_start.end), (sb_end.start, sb_end.end)))
        return result[1:] if len(result) > 0 else result

    def get_two_closest_mappings_and_relative_position(self, sb, res_to_map):
        l_position, r_position = 0, 0
        if res_to_map in self.mapping[sb]:
            return sb, sb, l_position, r_position
        for value in self.blocks.value_slice(start_key=(sb.start, sb.end)):
            r_position += 1
            if res_to_map in self.mapping[value]:
                right_closest_mapping = value
                break
        else:
            right_closest_mapping = self.end_telomer
        for value in self.blocks.value_slice(end_key=(sb.start, sb.end), reverse=True):
            l_position += 1
            if res_to_map in self.mapping[value]:
                left_closest_mapping = value
                break
        else:
            left_closest_mapping = self.start_telomer
        return left_closest_mapping, right_closest_mapping, l_position, r_position

    def __hash__(self):
        return self.name.__hash__()

    def __str__(self):
        return str(self.blocks)