Пример #1
0
def arrayRDP(arr, epsilon=0.0, n=None):
    """
    This is a slightly modified version of the _aRDP function, that accepts
    as arguments the tolerance in the distance and the maximum number of points
    the algorithm can select.
    **Note:** The results of this algoritm should be identical to the arrayRDP
    function if the *n* parameter is not specified. In that case, the
    performance is slightly worse, although the asymptotic complexity is the
    same. For this reason, this function internally delegates the solution in
    that function if the *n* parameter is missing.

    Parameters
    ----------
    arr:
        Array of values of consecutive points.
    epsilon:
        Maximum difference allowed in the simplification process.
    n:
        Maximum number of points of the resulted simplificated array.

    Returns
    -------
    out:
        Array of indices of the selected points.
    """
    if n is None:
        return _aRDP(arr, epsilon)
    if epsilon <= 0.0:
        raise ValueError('Epsilon must be > 0.0')
    n = n or len(arr)
    if n < 3:
        return arr
    fragments = SortedDict()
    #We store the distances as negative values due to the default order of
    #sorteddict
    dist, idx = max_vdist(arr, 0, len(arr) - 1)
    fragments[(-dist, idx)] = (0, len(arr) - 1)
    while len(fragments) < n-1:
        (dist, idx), (first, last) = fragments.popitem(last=False)
        if -dist <= epsilon:
            #We have to put again the last item to prevent loss
            fragments[(dist, idx)] = (first, last)
            break
        else:
            #We have to break the fragment in the selected index
            dist, newidx = max_vdist(arr, first, idx)
            fragments[(-dist, newidx)] = (first, idx)
            dist, newidx = max_vdist(arr, idx, last)
            fragments[(-dist, newidx)] = (idx, last)
    #Now we have to get all the indices in the keys of the fragments in order.
    result = SortedList(i[0] for i in fragments.itervalues())
    result.add(len(arr) - 1)
    return np.array(result)
Пример #2
0
def arrayRDP(arr, epsilon=0.0, n=None):
    """
    This is a slightly modified version of the _aRDP function, that accepts
    as arguments the tolerance in the distance and the maximum number of points
    the algorithm can select.
    **Note:** The results of this algoritm should be identical to the arrayRDP
    function if the *n* parameter is not specified. In that case, the
    performance is slightly worse, although the asymptotic complexity is the
    same. For this reason, this function internally delegates the solution in
    that function if the *n* parameter is missing.

    Parameters
    ----------
    arr:
        Array of values of consecutive points.
    epsilon:
        Maximum difference allowed in the simplification process.
    n:
        Maximum number of points of the resulted simplificated array.

    Returns
    -------
    out:
        Array of indices of the selected points.
    """
    if n is None:
        return _aRDP(arr, epsilon)
    if epsilon <= 0.0:
        raise ValueError('Epsilon must be > 0.0')
    n = n or len(arr)
    if n < 3:
        return arr
    fragments = SortedDict()
    #We store the distances as negative values due to the default order of
    #sorteddict
    dist, idx = max_vdist(arr, 0, len(arr) - 1)
    fragments[(-dist, idx)] = (0, len(arr) - 1)
    while len(fragments) < n - 1:
        (dist, idx), (first, last) = fragments.popitem(last=False)
        if -dist <= epsilon:
            #We have to put again the last item to prevent loss
            fragments[(dist, idx)] = (first, last)
            break
        else:
            #We have to break the fragment in the selected index
            dist, newidx = max_vdist(arr, first, idx)
            fragments[(-dist, newidx)] = (first, idx)
            dist, newidx = max_vdist(arr, idx, last)
            fragments[(-dist, newidx)] = (idx, last)
    #Now we have to get all the indices in the keys of the fragments in order.
    result = SortedList(i[0] for i in fragments.itervalues())
    result.add(len(arr) - 1)
    return np.array(result)
Пример #3
0
class KeyedRegion(object):
    """
    KeyedRegion keeps a mapping between stack offsets and all variables covering that offset. It assumes no variable in
    this region overlap with another variable in this region.

    Registers and function frames can all be viewed as a keyed region.
    """
    def __init__(self, tree=None):
        self._storage = SortedDict() if tree is None else tree

    def _get_container(self, offset):
        try:
            base_offset = next(
                self._storage.irange(maximum=offset, reverse=True))
        except StopIteration:
            return offset, None
        else:
            container = self._storage[base_offset]
            if container.includes(offset):
                return base_offset, container
            return offset, None

    def __contains__(self, offset):
        """
        Test if there is at least one varaible covering the given offset.

        :param offset:
        :return:
        """

        return self._get_container(offset)[1] is not None

    def __len__(self):
        return len(self._storage)

    def __iter__(self):
        return self._storage.itervalues()

    def __eq__(self, other):
        if set(self._storage.keys()) != set(other._storage.keys()):
            return False

        for k, v in self._storage.iteritems():
            if v != other._storage[k]:
                return False

        return True

    def copy(self):
        if not self._storage:
            return KeyedRegion()

        kr = KeyedRegion()
        for key, ro in self._storage.iteritems():
            kr._storage[key] = ro.copy()
        return kr

    def merge(self, other, make_phi_func=None):
        """
        Merge another KeyedRegion into this KeyedRegion.

        :param KeyedRegion other: The other instance to merge with.
        :return: None
        """

        # TODO: is the current solution not optimal enough?
        for _, item in other._storage.iteritems():  # type: RegionObject
            for loc_and_var in item.objects:
                self.__store(loc_and_var,
                             overwrite=False,
                             make_phi_func=make_phi_func)

        return self

    def dbg_repr(self):
        """
        Get a debugging representation of this keyed region.
        :return: A string of debugging output.
        """
        keys = self._storage.keys()
        offset_to_vars = {}

        for key in sorted(keys):
            ro = self._storage[key]
            variables = [obj.variable for obj in ro.objects]
            offset_to_vars[key] = variables

        s = []
        for offset, variables in offset_to_vars.iteritems():
            s.append("Offset %#x: %s" % (offset, variables))
        return "\n".join(s)

    def add_variable(self, start, variable):
        """
        Add a variable to this region at the given offset.

        :param int start:
        :param SimVariable variable:
        :return: None
        """

        self._store(start, variable, overwrite=False)

    def set_variable(self, start, variable):
        """
        Add a variable to this region at the given offset, and remove all other variables that are fully covered by
        this variable.

        :param int start:
        :param SimVariable variable:
        :return: None
        """

        self._store(start, variable, overwrite=True)

    def get_base_addr(self, addr):
        """
        Get the base offset (the key we are using to index variables covering the given offset) of a specific offset.

        :param int addr:
        :return:
        :rtype:  int or None
        """

        base_addr, container = self._get_container(addr)
        if container is None:
            return None
        else:
            return base_addr

    def get_variables_by_offset(self, start):
        """
        Find variables covering the given region offset.

        :param int start:
        :return: A list of stack variables.
        :rtype:  set
        """

        base_addr, container = self._get_container(start)
        if container is None:
            return []
        else:
            return container.variables

    #
    # Private methods
    #

    def _store(self, start, variable, overwrite=False):
        """
        Store a variable into the storage.

        :param int start: The beginning address of the variable.
        :param variable: The variable to store.
        :param bool overwrite: Whether existing variables should be overwritten or not.
        :return: None
        """

        loc_and_var = LocationAndVariable(start, variable)
        self.__store(loc_and_var, overwrite=overwrite)

    def __store(self, loc_and_var, overwrite=False, make_phi_func=None):
        """
        Store a variable into the storage.

        :param LocationAndVariable loc_and_var: The descriptor describing start address and the variable.
        :param bool overwrite: Whether existing variables should be overwritten or not.
        :return: None
        """

        start = loc_and_var.start
        variable = loc_and_var.variable
        variable_size = variable.size if variable.size is not None else 1
        end = start + variable_size

        # region items in the middle
        overlapping_items = list(self._storage.irange(start, end - 1))

        # is there a region item that begins before the start and overlaps with this variable?
        floor_key, floor_item = self._get_container(start)
        if floor_item is not None and floor_key not in overlapping_items:
            # insert it into the beginningq
            overlapping_items.insert(0, (floor_key, self._storage[floor_key]))

        # scan through the entire list of region items, split existing regions and insert new regions as needed
        to_update = {start: RegionObject(start, variable_size, {loc_and_var})}
        last_end = start

        for floor_key in overlapping_items:
            item = self._storage[floor_key]
            if item.start < start:
                # we need to break this item into two
                a, b = item.split(start)
                if overwrite:
                    b.set_object(loc_and_var)
                else:
                    self._add_object_or_make_phi(b,
                                                 loc_and_var,
                                                 make_phi_func=make_phi_func)
                to_update[a.start] = a
                to_update[b.start] = b
                last_end = b.end
            elif item.start > last_end:
                # there is a gap between the last item and the current item
                # fill in the gap
                new_item = RegionObject(last_end, item.start - last_end,
                                        {loc_and_var})
                to_update[new_item.start] = new_item
                last_end = new_item.end
            elif item.end > end:
                # we need to split this item into two
                a, b = item.split(end)
                if overwrite:
                    a.set_object(loc_and_var)
                else:
                    self._add_object_or_make_phi(a,
                                                 loc_and_var,
                                                 make_phi_func=make_phi_func)
                to_update[a.start] = a
                to_update[b.start] = b
                last_end = b.end
            else:
                if overwrite:
                    item.set_object(loc_and_var)
                else:
                    self._add_object_or_make_phi(item,
                                                 loc_and_var,
                                                 make_phi_func=make_phi_func)
                to_update[loc_and_var.start] = item

        self._storage.update(to_update)

    def _is_overlapping(self, start, variable):

        if variable.size is not None:
            # make sure this variable does not overlap with any other variable
            end = start + variable.size
            try:
                prev_offset = next(
                    self._storage.irange(maximum=end - 1, reverse=True))
            except StopIteration:
                prev_offset = None

            if prev_offset is not None:
                if start <= prev_offset < end:
                    return True
                prev_item = self._storage[prev_offset][0]
                prev_item_size = prev_item.size if prev_item.size is not None else 1
                if start < prev_offset + prev_item_size < end:
                    return True
        else:
            try:
                prev_offset = next(
                    self._storage.irange(maximum=start, reverse=True))
            except StopIteration:
                prev_offset = None

            if prev_offset is not None:
                prev_item = self._storage[prev_offset][0]
                prev_item_size = prev_item.size if prev_item.size is not None else 1
                if prev_offset <= start < prev_offset + prev_item_size:
                    return True

        return False

    def _add_object_or_make_phi(self, item, loc_and_var, make_phi_func=None):  #pylint:disable=no-self-use
        if not make_phi_func or len({loc_and_var.variable}
                                    | item.variables) == 1:
            item.add_object(loc_and_var)
        else:
            # make a phi node
            item.set_object(
                LocationAndVariable(
                    loc_and_var.start,
                    make_phi_func(loc_and_var.variable, *item.variables)))
Пример #4
0
class TreePage(BasePage):
    """
    Page object, implemented with a sorted dict. Who knows what's underneath!
    """

    def __init__(self, *args, **kwargs):
        storage = kwargs.pop("storage", None)
        super(TreePage, self).__init__(*args, **kwargs)
        self._storage = SortedDict() if storage is None else storage

    def keys(self):
        if len(self._storage) == 0:
            return set()
        else:
            return set.union(*(set(range(*self._resolve_range(mo))) for mo in self._storage.itervalues()))

    def replace_mo(self, state, old_mo, new_mo):
        start, end = self._resolve_range(old_mo)
        for key in self._storage.irange(start, end-1):
            val = self._storage[key]
            if val is old_mo:
                #assert new_mo.includes(a)
                self._storage[key] = new_mo

    def store_overwrite(self, state, new_mo, start, end):
        # iterate over each item we might overwrite
        # track our mutations separately since we're in the process of iterating
        deletes = []
        updates = { start: new_mo }

        for key in self._storage.irange(maximum=end-1, reverse=True):
            old_mo = self._storage[key]

            # make sure we aren't overwriting all of an item that overlaps the end boundary
            if end < self._page_addr + self._page_size and end not in updates and old_mo.includes(end):
                updates[end] = old_mo

            # we can't set a minimum on the range because we need to do the above for
            # the first object before start too
            if key < start:
                break

            # delete any key that falls within the range
            deletes.append(key)

        #assert all(m.includes(i) for i,m in updates.items())

        # perform mutations
        for key in deletes:
            del self._storage[key]

        self._storage.update(updates)

    def store_underwrite(self, state, new_mo, start, end):
        # track the point that we need to write up to
        last_missing = end - 1
        # track also updates since we can't update while iterating
        updates = {}

        for key in self._storage.irange(maximum=end-1, reverse=True):
            mo = self._storage[key]

            # if the mo stops
            if mo.base <= last_missing and not mo.includes(last_missing):
                updates[max(mo.last_addr+1, start)] = new_mo
            last_missing = mo.base - 1

            # we can't set a minimum on the range because we need to do the above for
            # the first object before start too
            if last_missing < start:
                break

        # if there are no memory objects <= start, we won't have filled start yet
        if last_missing >= start:
            updates[start] = new_mo

        #assert all(m.includes(i) for i,m in updates.items())

        self._storage.update(updates)

    def load_mo(self, state, page_idx):
        """
        Loads a memory object from memory.

        :param page_idx: the index into the page
        :returns: a tuple of the object
        """

        try:
            key = next(self._storage.irange(maximum=page_idx, reverse=True))
        except StopIteration:
            return None
        else:
            return self._storage[key]

    def load_slice(self, state, start, end):
        """
        Return the memory objects overlapping with the provided slice.

        :param start: the start address
        :param end: the end address (non-inclusive)
        :returns: tuples of (starting_addr, memory_object)
        """
        keys = list(self._storage.irange(start, end-1))
        if not keys or keys[0] != start:
            try:
                key = next(self._storage.irange(maximum=start, reverse=True))
            except StopIteration:
                pass
            else:
                if self._storage[key].includes(start):
                    items.insert(0, key)
        return [(key, self._storage[key]) for key in keys]

    def _copy_args(self):
        return { 'storage': self._storage.copy() }
Пример #5
0
def test_itervalues():
    mapping = [(val, pos) for pos, val in enumerate(string.ascii_lowercase)]
    temp = SortedDict(mapping)
    assert list(temp.itervalues()) == [pos for key, pos in mapping]
def test_itervalues():
    mapping = [(val, pos) for pos, val in enumerate(string.ascii_lowercase)]
    temp = SortedDict(mapping)
    assert list(temp.itervalues()) == [pos for key, pos in mapping]
Пример #7
0
class TxGraph(object):
    """represents a graph of all transactions
    within the current window

    Attributes:
        median(float)   :   the current median of the degree of the nodes
        highMarker(int) :   the latest timestamp seen so far
        lowMarker(int)  :   the earliest timestamp of the window we are
                            interested in
        txMap(dict)     :   this is a collection of EdgeList's with key being
                            the timestamp and the value an instance of EdgeList
        edgeMap(dict)   :   this is collection of all Edges within a window
                            with key being the name of an Edge
        nodeMap(dict)   :   this represents a collection of Nodes with a window
                            with key being the name of the Node
        degreeList(list):   list of degrees of noded (sorted)
    """

    WINDOW_SIZE = 60
    def __init__(self):
        self.median = 0
        self.highMarker = TxGraph.WINDOW_SIZE
        self.lowMarker = 1
        self.txMap = SortedDict() #sorted by unix epoch (timestamp)
        self.edgeMap = SortedDict() #sorted by edge name
        self.nodeMap = SortedDict() #sorted by node name
        self.degreeList = SortedList() #sorted by degreeList
    
    def __calculate_median(self, use_existing_list=False):
        """calculates median by adding degrees to a sortedlist
        """
        if not use_existing_list:
            #lets reconstruct the list
            self.degreeList = SortedList()
        
            for node in self.nodeMap.itervalues():
                if node.degree > 0:
                    self.degreeList.add(node.degree)

        listLen = len(self.degreeList)
        if listLen == 0:
            raise Exception("No items in the degreeList")

        if listLen == 1:
            return self.degreeList[0]/1.0

        if (listLen % 2) == 0: 
            return (self.degreeList[listLen/2] + self.degreeList[(listLen/2) - 1]) / 2.0
        
        return self.degreeList[listLen/2]/1.0

    
    def __get_edgelist(self, tstamp, create=True):
        """returns an instance of EdgeList with matching
        timestamp and creates one if needed
        """
        edgeList = self.txMap.get(tstamp, None)
        if edgeList is None and create is True:
            edgeList = EdgeList(tstamp)
            self.txMap[tstamp] = edgeList
        return edgeList

    
    def __getnode_with_name(self, name, create=True):
        """returns an instance of Node with matching name
        and creates one if necessary

        Args:
            name(str)   :   name of the edge
            create(bool):   flag to indicate whether to create a 
                            missing node
        """
        
        node = self.nodeMap.get(name, None)
        if node is None and create is True:
            node = Node(name)
            self.nodeMap[name] = node
        return node

    
    def __incr_degree_of_edge_nodes(self, edge):
        """increments the degree of the two nodes
        of an edge
        """

        src = self.__getnode_with_name(edge.source)
        src.incr_degree()
        
        tar = self.__getnode_with_name(edge.target)
        tar.incr_degree()
   
        return (src.degree, tar.degree)
    
    def __decr_degree_of_edge_nodes(self, edge):
        """decrements the degree of the two nodes
        of an edge
        """
        
        self.__decr_degree_of_node(edge.source)
        self.__decr_degree_of_node(edge.target)
   
    
    def __decr_degree_of_node(self, name):
        """decrements the degree of a node
        and removes it from the nodeMap if degree is 0
        """
        
        node = self.__getnode_with_name(name, create=False)
        node.decr_degree()
        
        if node.degree == 0:
            del self.nodeMap[node.name]


    def __remove_edge(self, edge):
        """removes an edge from the graph and updates the 
        degree of a node. If degree of a node goes to 0, then
        remove the node as well
        
        Args:
            egde(Edge)   :   An instance of Edge class
        """

        self.__decr_degree_of_edge_nodes(edge)
        del self.edgeMap[edge.name]

    
    def __update_tstamp_for_existing_edge(self, edgeName, tstamp):
        """updates the timestamp for an existing edge and moves
        the edge to an appropriate EdgeList
        
        Args:
            edgeName(str)   :   name of the edge to be updated
            tstamp(int)     :   unix epoch of the timstamp
        """
        
        currEdge = self.edgeMap[edgeName]
        if not currEdge:
            return
        
        if tstamp <= currEdge.tstamp:
            return #ignore older transactions within the window
        
        #remove the edge from the edgelist with old timestamp
        edgeList = self.__get_edgelist(currEdge.tstamp, create=False)
        del edgeList.edges[currEdge.name]

        #update the tstamp in the edge
        currEdge.tstamp = tstamp

        #move this edge to the correct edgelist
        edgeList = self.__get_edgelist(tstamp)
        edgeList.edges[currEdge.name] = currEdge

    
    def __update_tx_window(self):
        """updates the transaction window of the graph
        This method is called when a newer transaction out the 
        window arrives. It does the following:
        1. Gets the edgeList's that are below the lowMarker
        2. Goes through the edges and deletes them from the edgeMap
        3. Update the degree of the nodes
        4. Moves the window by deleting the stale edgeLists
        """
        tsIter = self.txMap.irange(None, self.lowMarker, inclusive=(True,False))
        lastTStamp = None
        for tstamp in tsIter:
            lastTStamp  = tstamp
            edgeList = self.txMap[tstamp]
        
            for edge in edgeList.edges.itervalues():
                self.__remove_edge(edge)

        #lets delete the stale edgelists
        if lastTStamp:
            lowIdx = self.txMap.index(lastTStamp)
            del self.txMap.iloc[:lowIdx+1]
    
    
    def process_transaction(self, tstamp, source, target):
        """this is the starting point of transaction processing.
        We first check whether the tx is within the window.
        If it is, then we update the Edge (if it already exists) or 
        create a new Edge if necessary and update the median.
        If the tx is not within the window and is newer, we then
        move the window and remove all stale(older) edges and create
        a new edge for the newer transaction and finally update the
        median
        """
        
        #basic sanity checks
        if source is None or target is None:
            raise Exception("Invalid node")

        if len(source) == 0 or len(target) == 0:
            raise Exception("Invalid node")

        if source == target:
            raise Exception("source and target cannot be the same")
        
        #timestamp of the transaction is old and can be ignored
        if tstamp < self.lowMarker:
            return

        #create a new edge representing this transaction     
        newEdge = Edge(tstamp, source, target)
        
        if tstamp <= self.highMarker:
            if newEdge.name in self.edgeMap:
                self.__update_tstamp_for_existing_edge(newEdge.name, tstamp)
                #no need to recalculate the median here since degree does not change
                return
            
            """handle new edge
            1. find the edgelist with the same timestamp (if not create it)
            2. add this edge to the edgelist and edgemap
            4. create new Nodes for the edges if needed or update their degrees
            5. update the degreeList with the new degrees
            6. recalculate the median but use the existing degreeList
            """
            edgeList = self.__get_edgelist(tstamp)
            edgeList.edges[newEdge.name] = newEdge
            self.edgeMap[newEdge.name] = newEdge

            """
            this is optimization because most of the degrees of the 
            nodes hasn't changed and therefore we can reuse the existing list
            """
            srcDegree, tarDegree = self.__incr_degree_of_edge_nodes(newEdge)
            if srcDegree == 1:
                self.degreeList.add(1)
            else:
                self.degreeList.remove(srcDegree - 1)
                self.degreeList.add(srcDegree)

            if tarDegree == 1:
                self.degreeList.add(1)
            else:
                self.degreeList.remove(tarDegree - 1)
                self.degreeList.add(tarDegree)

            self.median = self.__calculate_median(use_existing_list=True)
            return

        """this transaction is newer and we need to move the window
        1. update the low and high markers of the timestamp window
        2. create edgelist with this newer timestamp
        2. add the new edge to the edgelist
        3. add the new edge to the edgemap
        4. create new Nodes of the edges if needed or update their degrees
        5. calculate the median (but reconstruct the degreeList) 
        """
        #this tx is newer and we need to move the window
        self.highMarker = tstamp
        self.lowMarker = tstamp - TxGraph.WINDOW_SIZE + 1

        self.__update_tx_window()
        
        if newEdge.name in self.edgeMap:
            self.__update_tstamp_for_existing_edge(newEdge.name, tstamp)
        else:
            edgeList = self.__get_edgelist(tstamp)
            edgeList.edges[newEdge.name] = newEdge
            self.edgeMap[newEdge.name] = newEdge
            self.__incr_degree_of_edge_nodes(newEdge)

        self.median = self.__calculate_median()
Пример #8
0
class OrderedSet(co.MutableSet, co.Sequence):
    """Like OrderedDict, OrderedSet maintains the insertion order of elements.

    For example::

        >>> ordered_set = OrderedSet('abcde')
        >>> list(ordered_set) == list('abcde')
        >>> ordered_set = OrderedSet('edcba')
        >>> list(ordered_set) == list('edcba')

    OrderedSet also implements the collections.Sequence interface.

    """
    def __init__(self, iterable=()):
        self._keys = {}
        self._nums = SortedDict()
        self._count = count()
        self |= iterable

    def __contains__(self, key):
        "``key in ordered_set``"
        return key in self._keys

    count = __contains__

    def __iter__(self):
        "``iter(ordered_set)``"
        return self._nums.itervalues()

    def __reversed__(self):
        "``reversed(ordered_set)``"
        _nums = self._nums
        for key in reversed(_nums):
            yield _nums[key]

    def __getitem__(self, index):
        "``ordered_set[index]`` -> element; lookup element at index."
        _nums = self._nums
        num = _nums.iloc[index]
        return _nums[num]

    def __len__(self):
        "``len(ordered_set)``"
        return len(self._keys)

    def index(self, key):
        "Return index of key."
        try:
            return self._keys[key]
        except KeyError:
            raise ValueError('%r is not in %s' % (key, type(self).__name__))

    def add(self, key):
        "Add element, key, to set."
        if key not in self._keys:
            num = next(self._count)
            self._keys[key] = num
            self._nums[num] = key

    def discard(self, key):
        "Remove element, key, from set if it is a member."
        num = self._keys.pop(key, None)
        if num is not None:
            del self._nums[num]

    def __repr__(self):
        "Text representation of set."
        return '%s(%r)' % (type(self).__name__, list(self))

    __str__ = __repr__
Пример #9
0
class DotMap(MutableMapping):

    def __init__(self, *args, **kwargs):
        self._map = SortedDict()
        if args:
            d = args[0]
            if type(d) is dict:
                for k, v in self.__call_items(d):
                    if type(v) is dict:
                        v = DotMap(v)
                    self._map[k] = v
        if kwargs:
            for k, v in self.__call_items(kwargs):
                self._map[k] = v

    @staticmethod
    def __call_items(obj):
        if hasattr(obj, 'iteritems') and ismethod(getattr(obj, 'iteritems')):
            return obj.iteritems()
        else:
            return obj.items()

    def items(self):
        return self.iteritems()

    def iteritems(self):
        return self.__call_items(self._map)

    def __iter__(self):
        return self._map.__iter__()

    def __setitem__(self, k, v):
        self._map[k] = v

    def __getitem__(self, k):
        if k not in self._map:
            # automatically extend to new DotMap
            self[k] = DotMap()
        return self._map[k]

    def __setattr__(self, k, v):
        if k == '_map':
            super(DotMap, self).__setattr__(k, v)
        else:
            self[k] = v

    def __getattr__(self, k):
        if k == '_map':
            return self._map
        else:
            return self[k]

    def __delattr__(self, key):
        return self._map.__delitem__(key)

    def __contains__(self, k):
        return self._map.__contains__(k)

    def __str__(self):
        items = []
        for k, v in self.__call_items(self._map):
            items.append('{0}={1}'.format(k, repr(v)))
        out = 'DotMap({0})'.format(', '.join(items))
        return out

    def __repr__(self):
        return str(self)

    def to_dict(self):
        d = {}
        for k, v in self.items():
            if type(v) is DotMap:
                v = v.to_dict()
            d[k] = v
        return d

    def pprint(self):
        pprint(self.to_dict())

    # proper dict subclassing
    def values(self):
        return self._map.values()

    @staticmethod
    def parse_other(other):
        if type(other) is DotMap:
            return other._map
        else:
            return other

    def __cmp__(self, other):
        other = DotMap.parse_other(other)
        return self._map.__cmp__(other)

    def __eq__(self, other):
        other = DotMap.parse_other(other)
        if not isinstance(other, dict):
            return False
        return self._map.__eq__(other)

    def __ge__(self, other):
        other = DotMap.parse_other(other)
        return self._map.__ge__(other)

    def __gt__(self, other):
        other = DotMap.parse_other(other)
        return self._map.__gt__(other)

    def __le__(self, other):
        other = DotMap.parseOther(other)
        return self._map.__le__(other)

    def __lt__(self, other):
        other = DotMap.parse_other(other)
        return self._map.__lt__(other)

    def __ne__(self, other):
        other = DotMap.parse_other(other)
        return self._map.__ne__(other)

    def __delitem__(self, key):
        return self._map.__delitem__(key)

    def __len__(self):
        return self._map.__len__()

    def copy(self):
        return self

    def get(self, key, default=None):
        return self._map.get(key, default)

    def has_key(self, key):
        return key in self._map

    def iterkeys(self):
        return self._map.iterkeys()

    def itervalues(self):
        return self._map.itervalues()

    def keys(self):
        return self._map.keys()

    def pop(self, key, default=None):
        return self._map.pop(key, default)

    def setdefault(self, key, default=None):
        return self._map.setdefault(key, default)

    def viewitems(self):
        if version_info.major == 2 and version_info.minor >= 7:
            return self._map.viewitems()
        else:
            return self._map.items()

    def viewkeys(self):
        if version_info.major == 2 and version_info.minor >= 7:
            return self._map.viewkeys()
        else:
            return self._map.keys()

    def viewvalues(self):
        if version_info.major == 2 and version_info.minor >= 7:
            return self._map.viewvalues()
        else:
            return self._map.values()

    @classmethod
    def fromkeys(cls, seq, value=None):
        d = DotMap()
        d._map = SortedDict.fromkeys(seq, value)
        return d
Пример #10
0
class OrderedSet(co.MutableSet, co.Sequence):
    """Like OrderedDict, OrderedSet maintains the insertion order of elements.

    For example::

        >>> ordered_set = OrderedSet('abcde')
        >>> list(ordered_set) == list('abcde')
        >>> ordered_set = OrderedSet('edcba')
        >>> list(ordered_set) == list('edcba')

    OrderedSet also implements the collections.Sequence interface.

    """
    def __init__(self, iterable=()):
        self._keys = {}
        self._nums = SortedDict()
        self._count = count()
        self |= iterable

    def __contains__(self, key):
        "``key in ordered_set``"
        return key in self._keys

    count = __contains__

    def __iter__(self):
        "``iter(ordered_set)``"
        return self._nums.itervalues()

    def __reversed__(self):
        "``reversed(ordered_set)``"
        _nums = self._nums
        for key in reversed(_nums):
            yield _nums[key]

    def __getitem__(self, index):
        "``ordered_set[index]`` -> element; lookup element at index."
        _nums = self._nums
        num = _nums.iloc[index]
        return _nums[num]

    def __len__(self):
        "``len(ordered_set)``"
        return len(self._keys)

    def index(self, key):
        "Return index of key."
        try:
            return self._keys[key]
        except KeyError:
            raise ValueError('%r is not in %s' % (key, type(self).__name__))

    def add(self, key):
        "Add element, key, to set."
        if key not in self._keys:
            num = next(self._count)
            self._keys[key] = num
            self._nums[num] = key

    def discard(self, key):
        "Remove element, key, from set if it is a member."
        num = self._keys.pop(key, None)
        if num is not None:
            del self._nums[num]

    def __repr__(self):
        "Text representation of set."
        return '%s(%r)' % (type(self).__name__, list(self))

    __str__ = __repr__
Пример #11
0
class KeyedRegion(object):
    """
    KeyedRegion keeps a mapping between stack offsets and all variables covering that offset. It assumes no variable in
    this region overlap with another variable in this region.

    Registers and function frames can all be viewed as a keyed region.
    """
    def __init__(self, tree=None):
        self._storage = SortedDict() if tree is None else tree

    def _get_container(self, offset):
        try:
            base_offset = next(self._storage.irange(maximum=offset, reverse=True))
        except StopIteration:
            return offset, None
        else:
            container = self._storage[base_offset]
            if container.includes(offset):
                return base_offset, container
            return offset, None

    def __contains__(self, offset):
        """
        Test if there is at least one varaible covering the given offset.

        :param offset:
        :return:
        """

        return self._get_container(offset)[1] is not None

    def __len__(self):
        return len(self._storage)

    def __iter__(self):
        return self._storage.itervalues()

    def __eq__(self, other):
        if set(self._storage.keys()) != set(other._storage.keys()):
            return False

        for k, v in self._storage.iteritems():
            if v != other._storage[k]:
                return False

        return True

    def copy(self):
        if not self._storage:
            return KeyedRegion()

        kr = KeyedRegion()
        for key, ro in self._storage.iteritems():
            kr._storage[key] = ro.copy()
        return kr

    def merge(self, other, make_phi_func=None):
        """
        Merge another KeyedRegion into this KeyedRegion.

        :param KeyedRegion other: The other instance to merge with.
        :return: None
        """

        # TODO: is the current solution not optimal enough?
        for _, item in other._storage.iteritems():  # type: RegionObject
            for loc_and_var in item.objects:
                self.__store(loc_and_var, overwrite=False, make_phi_func=make_phi_func)

        return self

    def dbg_repr(self):
        """
        Get a debugging representation of this keyed region.
        :return: A string of debugging output.
        """
        keys = self._storage.keys()
        offset_to_vars = { }

        for key in sorted(keys):
            ro = self._storage[key]
            variables = [ obj.variable for obj in ro.objects ]
            offset_to_vars[key] = variables

        s = [ ]
        for offset, variables in offset_to_vars.iteritems():
            s.append("Offset %#x: %s" % (offset, variables))
        return "\n".join(s)

    def add_variable(self, start, variable):
        """
        Add a variable to this region at the given offset.

        :param int start:
        :param SimVariable variable:
        :return: None
        """

        self._store(start, variable, overwrite=False)

    def set_variable(self, start, variable):
        """
        Add a variable to this region at the given offset, and remove all other variables that are fully covered by
        this variable.

        :param int start:
        :param SimVariable variable:
        :return: None
        """

        self._store(start, variable, overwrite=True)

    def get_base_addr(self, addr):
        """
        Get the base offset (the key we are using to index variables covering the given offset) of a specific offset.

        :param int addr:
        :return:
        :rtype:  int or None
        """

        base_addr, container = self._get_container(addr)
        if container is None:
            return None
        else:
            return base_addr

    def get_variables_by_offset(self, start):
        """
        Find variables covering the given region offset.

        :param int start:
        :return: A list of stack variables.
        :rtype:  set
        """

        base_addr, container = self._get_container(start)
        if container is None:
            return []
        else:
            return container.variables

    #
    # Private methods
    #

    def _store(self, start, variable, overwrite=False):
        """
        Store a variable into the storage.

        :param int start: The beginning address of the variable.
        :param variable: The variable to store.
        :param bool overwrite: Whether existing variables should be overwritten or not.
        :return: None
        """

        loc_and_var = LocationAndVariable(start, variable)
        self.__store(loc_and_var, overwrite=overwrite)

    def __store(self, loc_and_var, overwrite=False, make_phi_func=None):
        """
        Store a variable into the storage.

        :param LocationAndVariable loc_and_var: The descriptor describing start address and the variable.
        :param bool overwrite: Whether existing variables should be overwritten or not.
        :return: None
        """

        start = loc_and_var.start
        variable = loc_and_var.variable
        variable_size = variable.size if variable.size is not None else 1
        end = start + variable_size

        # region items in the middle
        overlapping_items = list(self._storage.irange(start, end-1))

        # is there a region item that begins before the start and overlaps with this variable?
        floor_key, floor_item = self._get_container(start)
        if floor_item is not None and floor_key not in overlapping_items:
                # insert it into the beginningq
                overlapping_items.insert(0, (floor_key, self._storage[floor_key]))

        # scan through the entire list of region items, split existing regions and insert new regions as needed
        to_update = { start: RegionObject(start, variable_size, { loc_and_var }) }
        last_end = start

        for floor_key in overlapping_items:
            item = self._storage[floor_key]
            if item.start < start:
                # we need to break this item into two
                a, b = item.split(start)
                if overwrite:
                    b.set_object(loc_and_var)
                else:
                    self._add_object_or_make_phi(b, loc_and_var, make_phi_func=make_phi_func)
                to_update[a.start] = a
                to_update[b.start] = b
                last_end = b.end
            elif item.start > last_end:
                # there is a gap between the last item and the current item
                # fill in the gap
                new_item = RegionObject(last_end, item.start - last_end, { loc_and_var })
                to_update[new_item.start] = new_item
                last_end = new_item.end
            elif item.end > end:
                # we need to split this item into two
                a, b = item.split(end)
                if overwrite:
                    a.set_object(loc_and_var)
                else:
                    self._add_object_or_make_phi(a, loc_and_var, make_phi_func=make_phi_func)
                to_update[a.start] = a
                to_update[b.start] = b
                last_end = b.end
            else:
                if overwrite:
                    item.set_object(loc_and_var)
                else:
                    self._add_object_or_make_phi(item, loc_and_var, make_phi_func=make_phi_func)
                to_update[loc_and_var.start] = item

        self._storage.update(to_update)

    def _is_overlapping(self, start, variable):

        if variable.size is not None:
            # make sure this variable does not overlap with any other variable
            end = start + variable.size
            try:
                prev_offset = next(self._storage.irange(maximum=end-1, reverse=True))
            except StopIteration:
                prev_offset = None

            if prev_offset is not None:
                if start <= prev_offset < end:
                    return True
                prev_item = self._storage[prev_offset][0]
                prev_item_size = prev_item.size if prev_item.size is not None else 1
                if start < prev_offset + prev_item_size < end:
                    return True
        else:
            try:
                prev_offset = next(self._storage.irange(maximum=start, reverse=True))
            except StopIteration:
                prev_offset = None

            if prev_offset is not None:
                prev_item = self._storage[prev_offset][0]
                prev_item_size = prev_item.size if prev_item.size is not None else 1
                if prev_offset <= start < prev_offset + prev_item_size:
                    return True

        return False

    def _add_object_or_make_phi(self, item, loc_and_var, make_phi_func=None):  #pylint:disable=no-self-use
        if not make_phi_func or len({loc_and_var.variable} | item.variables) == 1:
            item.add_object(loc_and_var)
        else:
            # make a phi node
            item.set_object(LocationAndVariable(loc_and_var.start,
                                                make_phi_func(loc_and_var.variable, *item.variables)
                                                )
                            )