Пример #1
0
class QtDictListModel(QAbstractListModel):
    def __init__(self):
        QAbstractListModel.__init__(self)
        self._items = SortedDict()

    def role(self, item, role):
        return item

    def rowCount(self, parent):
        if parent.isValid():
            return 0
        return len(self._items)

    def from_index(self, index):
        if not index.isValid() or index.row() >= len(self._items):
            return None
        return self._items.peekitem(index.row())[1]

    def data(self, index, role):
        item = self.from_index(index)
        if item is None:
            return None
        return self.role(item, role)

    def _add(self, key, item):
        assert key not in self._items
        next_index = self._items.bisect_left(key)
        self.beginInsertRows(QModelIndex(), next_index, next_index)
        self._items[key] = item
        self.endInsertRows()

    # TODO - removal is O(n).
    def _remove(self, key):
        assert key in self._items
        item_index = self._items.index(key)
        self.beginRemoveRows(QModelIndex(), item_index, item_index)
        del self._items[key]
        self.endRemoveRows()

    def _clear(self):
        self.beginRemoveRows(QModelIndex(), 0, len(self._items) - 1)
        self._items.clear()
        self.endRemoveRows()

    # O(n). Rework if it's too slow.
    def _update(self, key, roles=None):
        item_index = self._items.index(key)
        index = self.index(item_index, 0)
        if roles is None:
            self.dataChanged.emit(index, index)
        else:
            self.dataChanged.emit(index, index, roles)
Пример #2
0
class sorted_cached_db:
    def __init__(self):
        self.sorted_map = SortedDict()

    def if_exists(self, key):
        return key in self.sorted_map

    def add(self, key, value):
        self.sorted_map[key] = value

    def delete(self, key):
        if not self.if_exists(key):
            return "key not present"
        del self.sorted_map[key]
        redis.query.filter_by(key=key, type="sorted").delete()
        db.session.commit()
        return "deleted"

    def find_rank(self, key):
        if not self.if_exists(key):
            return "does'not exists"
        return jsonify(self.sorted_map.index(key))

    def get_range(self, start, end):
        n = len(self.sorted_map)
        start = (n + start) % n
        end = (n + end) % n + 1
        return jsonify([
            self.sorted_map[key]['value']
            for key in self.sorted_map.islice(start, end)
        ])
Пример #3
0
class MenuBar(QMenuBar):
    def __init__(self,
                 menu_order: Tuple[MainMenu] = (FileMenu, ViewMenu, ToolsMenu,
                                                AlgorithmsMenu, WindowsMenu,
                                                HelpMenu)):
        super().__init__()

        self._menu_order = menu_order
        self._menus_order_indexes = {
            menu_type: i
            for (i, menu_type) in enumerate(self._menu_order)
        }

        self._ordered_added_menus = SortedDict(
        )  # {order_index: MainMenu class}

    def add_menu(self, menu_type: Type[MainMenu]) -> MainMenu:
        menu = menu_type()
        menu_order_index = self._menu_order_index(menu_type)
        self._ordered_added_menus[menu_order_index] = menu

        menu_index_in_ordered_added_menus = self._ordered_added_menus.index(
            menu_order_index)
        # If the menu is the last one
        if menu_index_in_ordered_added_menus == len(
                self._ordered_added_menus) - 1:
            self.addMenu(menu)
        else:
            next_menu_index_in_ordered_added_menus = menu_index_in_ordered_added_menus + 1
            next_menu = self._ordered_added_menus.peekitem(
                next_menu_index_in_ordered_added_menus)[1]
            self.insertMenu(next_menu.menuAction(), menu)

        return menu

    def menu(self,
             menu_type: Type[MainMenu],
             add_nonexistent: bool = True) -> Optional[MainMenu]:
        menu = self._ordered_added_menus.get(self._menu_order_index(menu_type))
        if menu is None and add_nonexistent:
            menu = self.add_menu(menu_type)
        return menu

    def add_menu_action(self,
                        menu_type: Type[MainMenu],
                        action_name,
                        method,
                        shortcut=None) -> QAction:
        return self.menu(menu_type).addAction(action_name, method, shortcut)

    def _menu_order_index(self, menu_type: Type[MainMenu]) -> int:
        return self._menus_order_indexes[menu_type]
Пример #4
0
class InMemoryStorage(object):
    def __init__(self):
        self.kvstore = SortedDict()  # hashtable

    def get(self, k):
        try:
            return self.kvstore[k]
        except:
            return 1

    def put(self, k, v):
        self.kvstore[k] = v
        return 0

    def delete(self, k):
        try:
            del self.kvstore[k]
            return 0
        except:
            return 1

    def split(self, section, keyspace_mid):
        """ delete one half of keystore for group split operation """
        midKey = None
        for key in self.kvstore.keys():  # TODO make more efficient for better performance
            if key > str(keyspace_mid):  # use iloc to estimate midpoint
                midKey = self.kvstore.index(key)
                break

        if section:  # section is either 0 or 1
            self.kvstore = self.kvstore.items()[midKey:]

        else:
            self.kvstore = self.kvstore.items()[:midKey]
        print(self.kvstore)
        return 0

    def save(self):  # need metadata here
        save_state("data/backup/db_copy.pkl", self.kvstore)

    def load(self):
        self.kvstore = load_state("data/backup/db_copy.pkl")
 def jobScheduling(self, startTime: List[int], endTime: List[int],
                   profit: List[int]) -> int:
     tasks = SortedList(zip(startTime, endTime, profit), key=lambda t: t[0])
     n = len(tasks)
     dp = SortedDict()
     for i in reversed(range(n)):
         s1, e, p = tasks[i]
         if s1 in dp:
             dp[s1] = max(dp[s1], p)
         else:
             dp[s1] = p
         j = dp.bisect_left(e)
         if j < len(dp):
             s2 = dp.keys()[j]
             dp[s1] = max(dp[s1], p + dp[s2])
         k = dp.index(s1)
         if k < len(dp) - 1:
             s2 = dp.keys()[k + 1]
             dp[s1] = max(dp[s1], dp[s2])
     return max(dp.values())
Пример #6
0
def test_index():
    mapping = [(val, pos) for pos, val in enumerate(string.ascii_lowercase)]
    temp = SortedDict(mapping)
    assert temp.index('a') == 0
    assert temp.index('f', 3, -3) == 5
            return self.fr <= t and t < self.to
        return self.fr == t.fr and self.to == t.to
    def __ne__(self, t):
        return not self.__eq__(t)
    def __hash__(self):
        return hash(str(self))
    def __str__(self):
        return "[{}-{}]".format(self.fr, self.to)

from sortedcontainers import SortedDict

d = SortedDict()
d[Range(50,60)] = None
d[Range(40,50)] = None
d[Range(10,20)] = None
d[Range(80,90)] = None
d[Range(110,120)] = None
for k in d.keys():
    print(k)
print("==============================")
print("85", d.iloc[d.index(85)])
print("40", d.iloc[d.index(40)])
print("49", d.iloc[d.index(49)])
print("50", d.iloc[d.index(50)])
print("59", d.iloc[d.index(59)])
try:
    d.index(60)
except ValueError:
    print("60 is not in d")
d[60]
Пример #8
0
class TxGraph(object):
    """represents a graph of all transactions
    within the current window

    Attributes:
        median(float)   :   the current median of the degree of the nodes
        highMarker(int) :   the latest timestamp seen so far
        lowMarker(int)  :   the earliest timestamp of the window we are
                            interested in
        txMap(dict)     :   this is a collection of EdgeList's with key being
                            the timestamp and the value an instance of EdgeList
        edgeMap(dict)   :   this is collection of all Edges within a window
                            with key being the name of an Edge
        nodeMap(dict)   :   this represents a collection of Nodes with a window
                            with key being the name of the Node
        degreeList(list):   list of degrees of noded (sorted)
    """

    WINDOW_SIZE = 60
    def __init__(self):
        self.median = 0
        self.highMarker = TxGraph.WINDOW_SIZE
        self.lowMarker = 1
        self.txMap = SortedDict() #sorted by unix epoch (timestamp)
        self.edgeMap = SortedDict() #sorted by edge name
        self.nodeMap = SortedDict() #sorted by node name
        self.degreeList = SortedList() #sorted by degreeList
    
    def __calculate_median(self, use_existing_list=False):
        """calculates median by adding degrees to a sortedlist
        """
        if not use_existing_list:
            #lets reconstruct the list
            self.degreeList = SortedList()
        
            for node in self.nodeMap.itervalues():
                if node.degree > 0:
                    self.degreeList.add(node.degree)

        listLen = len(self.degreeList)
        if listLen == 0:
            raise Exception("No items in the degreeList")

        if listLen == 1:
            return self.degreeList[0]/1.0

        if (listLen % 2) == 0: 
            return (self.degreeList[listLen/2] + self.degreeList[(listLen/2) - 1]) / 2.0
        
        return self.degreeList[listLen/2]/1.0

    
    def __get_edgelist(self, tstamp, create=True):
        """returns an instance of EdgeList with matching
        timestamp and creates one if needed
        """
        edgeList = self.txMap.get(tstamp, None)
        if edgeList is None and create is True:
            edgeList = EdgeList(tstamp)
            self.txMap[tstamp] = edgeList
        return edgeList

    
    def __getnode_with_name(self, name, create=True):
        """returns an instance of Node with matching name
        and creates one if necessary

        Args:
            name(str)   :   name of the edge
            create(bool):   flag to indicate whether to create a 
                            missing node
        """
        
        node = self.nodeMap.get(name, None)
        if node is None and create is True:
            node = Node(name)
            self.nodeMap[name] = node
        return node

    
    def __incr_degree_of_edge_nodes(self, edge):
        """increments the degree of the two nodes
        of an edge
        """

        src = self.__getnode_with_name(edge.source)
        src.incr_degree()
        
        tar = self.__getnode_with_name(edge.target)
        tar.incr_degree()
   
        return (src.degree, tar.degree)
    
    def __decr_degree_of_edge_nodes(self, edge):
        """decrements the degree of the two nodes
        of an edge
        """
        
        self.__decr_degree_of_node(edge.source)
        self.__decr_degree_of_node(edge.target)
   
    
    def __decr_degree_of_node(self, name):
        """decrements the degree of a node
        and removes it from the nodeMap if degree is 0
        """
        
        node = self.__getnode_with_name(name, create=False)
        node.decr_degree()
        
        if node.degree == 0:
            del self.nodeMap[node.name]


    def __remove_edge(self, edge):
        """removes an edge from the graph and updates the 
        degree of a node. If degree of a node goes to 0, then
        remove the node as well
        
        Args:
            egde(Edge)   :   An instance of Edge class
        """

        self.__decr_degree_of_edge_nodes(edge)
        del self.edgeMap[edge.name]

    
    def __update_tstamp_for_existing_edge(self, edgeName, tstamp):
        """updates the timestamp for an existing edge and moves
        the edge to an appropriate EdgeList
        
        Args:
            edgeName(str)   :   name of the edge to be updated
            tstamp(int)     :   unix epoch of the timstamp
        """
        
        currEdge = self.edgeMap[edgeName]
        if not currEdge:
            return
        
        if tstamp <= currEdge.tstamp:
            return #ignore older transactions within the window
        
        #remove the edge from the edgelist with old timestamp
        edgeList = self.__get_edgelist(currEdge.tstamp, create=False)
        del edgeList.edges[currEdge.name]

        #update the tstamp in the edge
        currEdge.tstamp = tstamp

        #move this edge to the correct edgelist
        edgeList = self.__get_edgelist(tstamp)
        edgeList.edges[currEdge.name] = currEdge

    
    def __update_tx_window(self):
        """updates the transaction window of the graph
        This method is called when a newer transaction out the 
        window arrives. It does the following:
        1. Gets the edgeList's that are below the lowMarker
        2. Goes through the edges and deletes them from the edgeMap
        3. Update the degree of the nodes
        4. Moves the window by deleting the stale edgeLists
        """
        tsIter = self.txMap.irange(None, self.lowMarker, inclusive=(True,False))
        lastTStamp = None
        for tstamp in tsIter:
            lastTStamp  = tstamp
            edgeList = self.txMap[tstamp]
        
            for edge in edgeList.edges.itervalues():
                self.__remove_edge(edge)

        #lets delete the stale edgelists
        if lastTStamp:
            lowIdx = self.txMap.index(lastTStamp)
            del self.txMap.iloc[:lowIdx+1]
    
    
    def process_transaction(self, tstamp, source, target):
        """this is the starting point of transaction processing.
        We first check whether the tx is within the window.
        If it is, then we update the Edge (if it already exists) or 
        create a new Edge if necessary and update the median.
        If the tx is not within the window and is newer, we then
        move the window and remove all stale(older) edges and create
        a new edge for the newer transaction and finally update the
        median
        """
        
        #basic sanity checks
        if source is None or target is None:
            raise Exception("Invalid node")

        if len(source) == 0 or len(target) == 0:
            raise Exception("Invalid node")

        if source == target:
            raise Exception("source and target cannot be the same")
        
        #timestamp of the transaction is old and can be ignored
        if tstamp < self.lowMarker:
            return

        #create a new edge representing this transaction     
        newEdge = Edge(tstamp, source, target)
        
        if tstamp <= self.highMarker:
            if newEdge.name in self.edgeMap:
                self.__update_tstamp_for_existing_edge(newEdge.name, tstamp)
                #no need to recalculate the median here since degree does not change
                return
            
            """handle new edge
            1. find the edgelist with the same timestamp (if not create it)
            2. add this edge to the edgelist and edgemap
            4. create new Nodes for the edges if needed or update their degrees
            5. update the degreeList with the new degrees
            6. recalculate the median but use the existing degreeList
            """
            edgeList = self.__get_edgelist(tstamp)
            edgeList.edges[newEdge.name] = newEdge
            self.edgeMap[newEdge.name] = newEdge

            """
            this is optimization because most of the degrees of the 
            nodes hasn't changed and therefore we can reuse the existing list
            """
            srcDegree, tarDegree = self.__incr_degree_of_edge_nodes(newEdge)
            if srcDegree == 1:
                self.degreeList.add(1)
            else:
                self.degreeList.remove(srcDegree - 1)
                self.degreeList.add(srcDegree)

            if tarDegree == 1:
                self.degreeList.add(1)
            else:
                self.degreeList.remove(tarDegree - 1)
                self.degreeList.add(tarDegree)

            self.median = self.__calculate_median(use_existing_list=True)
            return

        """this transaction is newer and we need to move the window
        1. update the low and high markers of the timestamp window
        2. create edgelist with this newer timestamp
        2. add the new edge to the edgelist
        3. add the new edge to the edgemap
        4. create new Nodes of the edges if needed or update their degrees
        5. calculate the median (but reconstruct the degreeList) 
        """
        #this tx is newer and we need to move the window
        self.highMarker = tstamp
        self.lowMarker = tstamp - TxGraph.WINDOW_SIZE + 1

        self.__update_tx_window()
        
        if newEdge.name in self.edgeMap:
            self.__update_tstamp_for_existing_edge(newEdge.name, tstamp)
        else:
            edgeList = self.__get_edgelist(tstamp)
            edgeList.edges[newEdge.name] = newEdge
            self.edgeMap[newEdge.name] = newEdge
            self.__incr_degree_of_edge_nodes(newEdge)

        self.median = self.__calculate_median()
Пример #9
0
from random import choice
from itertools import cycle
from sortedcontainers import SortedDict
from collections import OrderedDict

players = {1: 'a', 2: 'b', 3: 'c'}

sd = SortedDict(players)

print(sd)
sd.index('a')
sdc = cycle(sd.viewvalues())

print(next(sdc))
sd.pop(2)
print(next(sdc))

print(next(sdc))

# class Players(OrderedDict):
# 	def __iter__(self):
# 		return iter(self.values())

# 	def __next__(self):
# 		return

# 	def choose_starting_player(self):
# 		self.players = list(self.values())
# 		starting_player = choice(list(self.values()))
# 		self.i = list(self.items())
# 		print(self.i)
def test_index_key():
    temp = SortedDict(negate, 7, ((val, val) for val in range(100)))
    assert all(temp.index(val) == (99 - val) for val in range(100))
Пример #11
0
    def calcOrders(cols):
        def calcOrder(node):
            if node.weight == 0:
                return None
            oSum = 0
            for lc in node.leftCons:
                oSum += lc.leftNode.order
            return float(oSum) / node.weight

        def sumWeights(nodes, start, end):
            summa = 0
            for n in nodes[start:end]:
                summa += n.weight
            return summa

        # 1st column fix order
        for i, node in enumerate(cols[0]):
            node.order = i
        # adjust column order to previous columns order
        for col in cols[1:]:
            # order by nodes weight
            weightOrderedList = sorted(col,
                                       key=lambda node: node.weight,
                                       reverse=True)
            # print 'weightOrderedList={}'.format([n.weight for n in weightOrderedList])
            dstCol = SortedDict()  # {order: Node}
            for n in weightOrderedList:  # nodes with higher weight come 1st
                # order is a space selector
                order = calcOrder(n)
                if order is None:
                    # place left leaf nodes to top
                    if len(dstCol):
                        order = dstCol.iloc[0] - 0.1
                    else:
                        order = 0.0
                elif order in dstCol:
                    # place is reserved, place to the side with lower weight sum
                    i = dstCol.index(order)
                    nodes = dstCol.values()
                    lSum = sumWeights(nodes, 0, i)
                    iNext = i + 1
                    if iNext < len(nodes):
                        rSum = sumWeights(nodes, iNext, len(nodes))
                        if lSum < rSum:
                            if i > 0:
                                order = 0.5 * (nodes[i - 1].order + order)
                            else:
                                order -= 0.1
                    else:
                        order += 0.1
                n.order = order
                dstCol[order] = n
            # reorder column
            col.sort(key=lambda n: n.order)
            # print 'col: {}'.format([n.order for n in col])
            # set integer order
            for i, node in enumerate(col):
                node.order = i
        for col in cols:
            for node in col:
                node.sortConnections()
Пример #12
0
class CombinedLog:
    def __init__(self):
        self.logs = SortedDict()
        self.views = dict()
        self.last_timestamp = None

    def add_commit(self, peer_name, appended_commit):
        timestamp = appended_commit['timestamp']

        if self.last_timestamp is None or self.last_timestamp < timestamp:
            self.last_timestamp = timestamp

        if timestamp in self.logs:
            orig_commit = self.logs[timestamp]
            self.merge_commit(peer_name, orig_commit, appended_commit)
        else:
            logged_commit = dict()

            new_file_dict = dict()
            for fname, digest in appended_commit['new_file'].items():
                new_file_dict[fname] = (digest, peer_name)

            modified_dict = dict()
            for fname, digest in appended_commit['modified'].items():
                modified_dict[fname] = (digest, peer_name)

            copied_dict = dict()
            for occurence in appended_commit['copied']:
                copied_dict[occurence] = peer_name

            deleted_dict = dict()
            for fname in appended_commit['deleted']:
                deleted_dict[fname] = peer_name

            logged_commit = {
                'new_file': new_file_dict,
                'modified': modified_dict,
                'copied': copied_dict,
                'deleted': deleted_dict,
                'timestamp': appended_commit['timestamp'],
            }

            self.logs[timestamp] = logged_commit

        self.replay_from(timestamp)

    def merge_commit(self, peer_name, orig_commit, appended_commit):
        assert orig_commit['timestamp'] == appended_commit['timestamp']

        orig_dict = orig_commit['new_file']
        appended_dict = appended_commit['new_file']
        for fname, new_digest in appended_dict.items():
            if fname not in orig_dict or orig_dict[fname][0] > new_digest:
                orig_dict[fname] = (new_digest, peer_name)

        orig_dict = orig_commit['modified']
        appended_dict = appended_commit['modified']
        for fname, new_digest in appended_dict.items():
            if fname not in orig_dict or orig_dict[fname][0] > new_digest:
                orig_dict[fname] = (new_digest, peer_name)

        orig_dict = orig_commit['copied']
        appended_dict = appended_commit['copied']
        for occurence in appended_dict:
            if occurence not in orig_dict:
                orig_dict[occurence] = peer_name

        orig_dict = orig_commit['deleted']
        appended_dict = appended_commit['deleted']
        for occurence in appended_dict:
            if occurence not in orig_dict:
                orig_dict[occurence] = peer_name

    def replay_from(self, timestamp):
        assert timestamp in self.logs
        lower_ind = self.logs.index(timestamp) - 1

        if lower_ind >= 0:
            lower_timestamp = self.logs.peekitem(lower_ind)[0]
            cur_view = self.views[lower_timestamp]
        else:
            cur_view = dict()

        for stamp in self.logs.irange(timestamp):
            commit = self.logs[stamp]
            cur_view = cur_view.copy()

            # apply copy
            for (from_name, to_name), peer in commit['copied'].items():
                if from_name in cur_view:
                    # workaround to copy tuple
                    digest, peer = cur_view[from_name]
                    cur_view[to_name] = (digest, peer)

            # apply creation
            for fname, (digest, peer) in commit['new_file'].items():
                if fname not in cur_view:
                    cur_view[fname] = (digest, peer)

            # apply modification
            for fname, (digest, peer) in commit['modified'].items():
                if fname in cur_view:
                    cur_view[fname] = (digest, peer)

            # apply deletion
            for fname, peer_name in commit['deleted'].items():
                if fname in cur_view:
                    del cur_view[fname]

            self.views[stamp] = cur_view

    def get_logical_view(self):
        if self.views:
            assert self.last_timestamp is not None
            return {
                fname: digest
                for fname, (digest,
                            peer) in self.views[self.last_timestamp].items()
            }
        else:
            return dict()

    def get_combined_history(self):
        def transform(logged_commit):
            return {
                'new_file': {
                    fname: digest
                    for fname, (digest,
                                peer) in logged_commit['new_file'].items()
                },
                'modified': {
                    fname: digest
                    for fname, (digest,
                                peer) in logged_commit['modified'].items()
                },
                'copied':
                set((from_name, to_name)
                    for (from_name,
                         to_name), peer in logged_commit['copied'].items()),
                'deleted':
                set(fname for fname, peer in logged_commit['deleted'].items()),
                'timestamp':
                logged_commit['timestamp'],
            }

        return list(map(transform, self.logs.values()))
Пример #13
0
class Model(object):
  '''
  The model of a Stranbeest. The Model consists of a set of nodes, edges and boundary
  conditions. Each node has a unique name and a x and y position which may change
  whenever the simuation is incremented. Each node introduces two degrees of freedom.
  The edges are specified by the nodes they are connecting. The edges are the push/pull
  rods which connect the edges whith one another. An edges keeps the distances between
  two nodes constant and therefore constrains exactly one degree of freedom in the system.
  '''

  def __init__(self):
    '''
    Constructor
    '''
    self._nodes = SortedDict()
    self._edges = defaultdict(set)

  def addNode(self,name,x,y):
    if not isinstance(name,str  ): raise Exception("The 1st argument must be the node's name as str.")
    if not isinstance(x   ,float): raise Exception("The 2nd argument must be the node's x position as float.")
    if not isinstance(y   ,float): raise Exception("The 2nd argument must be the node's y position as float.")
    if name in self._nodes: raise Exception( 'There already exists a node by the name of "%(name)s"' % locals() )
    self._nodes[name] = x,y
    self.__t = 0.0
    for listener in self.onNodeAddListeners:
      listener(name,x,y)

  def addEdge(self,node1,node2):
    if node1 == node2:
      raise Exception('"node1" cannot be equal to "node2".')
    self._edges[node1].add(node2)
    self._edges[node2].add(node1)
    for listener in self.onEdgeAddListeners:
      listener( min(node1,node2), max(node1,node2) )

  def pos(self,name):
    return self._nodes[name]

  def move(self,name,x,y):
    self._nodes[name] = x,y
    for listener in self.onNodeMoveListeners:
      listener(name,x,y)

  def state(self):
    return fromiter( chain.from_iterable( self._nodes.values() ), float )

  def setState(self,state):
    for i,(x,y) in enumerate( zip(state[::2],state[1::2]) ):
      self.move(self._nodes.keys()[i],x,y)

  @property
  def t(self):
    return self.__t

  def increment(self,dt):
    v = self.v
    t0 = self.__t
    x0 = self.state()
    # https://en.wikipedia.org/wiki/Runge%E2%80%93Kutta_methods#The_Runge.E2.80.93Kutta_method
    k0 = v(x0,           t0)
    k1 = v(x0+k0*(dt/2), t0+dt/2)
    k2 = v(x0+k1*(dt/2), t0+dt/2)
    k3 = v(x0+k2*(dt),   t0+dt)
    self.setState( x0 + dt/6 * (k0+k1+k2+k3) )
    self.__t += dt

  def v(self,x,t):
    lhs = zeros( 2*[len(x)] )
    rhs = zeros( len(x) )
    iRows = iter( range( len(x) ) )
    for start,end in self.edges():
      iStart = 2*self._nodes.index(start)
      iEnd   = 2*self._nodes.index(end)
      iRow = next(iRows)
      dx = x[iEnd+0] - x[iStart+0] 
      dy = x[iEnd+1] - x[iStart+1]
      lhs[iRow,iStart+0] = dx; lhs[iRow,iEnd+0] = -dx
      lhs[iRow,iStart+1] = dy; lhs[iRow,iEnd+1] = -dy
      rhs[iRow] = 0
    for bc in self.bcs:
      bc.addEquations(x,t,iRows,lhs,rhs)
    return linalg.solve(lhs,rhs)

  def nodes(self):
    return self._nodes.iteritems()

  def edges(self):
    for node1,neighbors in self._edges.items():
      for node2 in neighbors:
        if node1 < node2:
          yield node1,node2

  bcs = []

  onEdgeAddListeners = set() # <- FIXME should be a multiset
  onNodeAddListeners = set() # <- FIXME should be a multiset
  onNodeMoveListeners = set() # <- FIXME should be a multiset
Пример #14
0
class Toolbox(QAbstractListModel):
    """A list of scheme nodes that can be dragged to the scheme.
    """

    DragMimeType = "application/x-toolbox-drag"
    """Toolbox assumes that all its items have the same MIME type."""
    def __init__(self, parent=None):
        super().__init__(parent)

        self._items = SortedDict()

    def getView(self):
        """Get a new QListView suitable for displaying the toolbox."""
        v = QListView()
        v.setModel(self)

        v.setSelectionMode(v.SingleSelection)
        v.setDragEnabled(True)
        v.setDragDropMode(v.DragOnly)

        return v

    def rowCount(self, parent=QModelIndex()):
        return len(self._items)

    def data(self, index: QModelIndex, role=Qt.DisplayRole):
        i = index.row()

        if role == Qt.DisplayRole:
            return self._items.peekitem(i)[0]
        return None

    def headerData(self,
                   section,
                   orientation: Qt.Orientation,
                   role=Qt.DisplayRole):
        if role == Qt.DisplayRole:
            if orientation == Qt.Vertical:
                return section
            return "Node toolbox"
        return None

    def addItem(self, name, item):
        i = self._items.bisect_right(name)

        self.beginInsertRows(QModelIndex(), i, i)
        self._items[name] = item
        self.endInsertRows()

    def removeItem(self, name):
        """Remove an item with a specified name from a toolbox.
        If an item with such a name does not exist, does nothing.
        """
        if name not in self._items:
            return None

        i = self._items.index(name)

        self.beginRemoveRows(QModelIndex(), i, i)
        item = self._items.pop(name)
        self.endRemoveRows()

        return item

    def flags(self, index: QModelIndex):
        default_flags = super().flags(index)

        if index.isValid():
            return default_flags | Qt.ItemIsDragEnabled
        return default_flags

    def mimeTypes(self):
        return [self.DragMimeType]

    def mimeData(self, indexes):
        assert len(indexes) == 1
        i = indexes[0].row()

        package = QMimeData()
        mime.dump(self._items.peekitem(i)[1],
                  package,
                  self.DragMimeType,
                  hooks=hooks.qt)

        return package

    def schemeDropEvent(self, scheme, event):
        """Event to be executed when an item from this toolbox gets dropped into a scheme.
        This function can be added as a custom drop event for this toolbox's MIME type, which is usually done by the
        SchemeEditor.
        """
        package = event.mimeData()
        node = mime.load(package, self.DragMimeType, hooks=hooks.qt)

        pos = event.scenePos() - QPointF(
            node.boundingRect().size().width() / 2,
            node.boundingRect().size().height() / 2)
        node.setPos(pos)
        scheme.addItem(node)

    # Serialization ====================================================================================================
    def serialize(self) -> dict:
        return self._items

    @classmethod
    def deserialize(cls, data: dict):
        obj = cls()
        obj._items = data

        return obj
class FileTable(object):
    """docstring for FileTable"""
    def __init__(self, myip, server):
        super(FileTable, self).__init__()
        self.ring = SortedDict()
        self.hasher = hashlib.sha224
        self.myhash = self.hash(myip)
        self.add_node(myip)

        self.server = server

    def hash(self, key):
        return self.hasher(key).hexdigest()[:-10]

    def hash_at(self, idx):
        idx %= len(self.ring)
        hash = self.ring.iloc[idx]
        return hash

    def add_node(self, ip):
        hash = self.hash(ip)
        self.ring[hash] = {'ip': ip, 'files': []}

        SDFS_LOGGER.info('After adding %s - %s' % (ip, repr(self.ring)))

    def remove_node(self, failed_list):
        start_time = time.time()
        # this is for debug
        flag  = False
            
        # deep copy failed list because it will be reset soon
        ip_list = list(failed_list)

        # change the order of failed node
        # make sure the smaller id node be handled first
        if len(ip_list) == 2:
            if self.hash(ip_list[0]) == 0 and self.hash(ip_list[1]) == len(self.ring) - 1:
                ip_list[0], ip_list[1] = ip_list[1], ip_list[0]
            elif self.ring.index(self.hash(ip_list[0])) == self.ring.index(self.hash(ip_list[1])) + 1:
                ip_list[0], ip_list[1] = ip_list[1], ip_list[0]

        for ip in ip_list:
            hash = self.hash(ip)
            idx = self.ring.index(hash)

            # if the node is not the direct successor of the failed node, do nothing
            if len(ip_list) == 2 and ip == ip_list[1] and self.hash_at((idx + 2) % len(self.ring)) == self.myhash:
                continue

            if self.hash_at((idx + 1) % len(self.ring)) == self.myhash or (self.hash_at((idx + 2) % len(self.ring)) == self.myhash and len(ip_list) == 2):
                # this is for debug
                flag = True

                heritage = set(self.ring[hash]['files'])
                my_files = set(self.ring[self.myhash]['files'])
                next_files = set(self.ring[self.hash_at(idx + 2)]['files'])

                # determine the 
                to_me = heritage - my_files
                to_next = (heritage & my_files) - next_files
                to_next_next = heritage & my_files & next_files
                replica_list = [list(to_me), list(to_next), list(to_next_next)]
                
                self.ring[self.myhash]['files'].extend(to_me)

                # handle replica
                dest_ip_to_me = self.ring[self.hash_at(self.ring.index(hash) - 1)]['ip']
                dest_ip_to_next = self.ring[self.hash_at(self.ring.index(self.myhash) + 1)]['ip']
                dest_ip_to_next_next = self.ring[self.hash_at(self.ring.index(self.myhash) + 2)]['ip']
                dest_ip_list = [dest_ip_to_me, dest_ip_to_next, dest_ip_to_next_next]
                
                del self.ring[hash]

                self.server.handle_replica(replica_list, dest_ip_list, ip_list)
            
            else:
                del self.ring[hash]
            
            elapsed_time = time.time() - start_time
            if flag:
                print "It takes", elapsed_time, "to handle replica"

    def lookup(self, sdfs_filename):
        hash = self.hash(sdfs_filename)
        idx = self.ring.bisect_left(hash) if self.ring.bisect_left(hash) < len(self.ring) else 0
        ip_list = [self.ring[self.hash_at(idx + i)]['ip'] for i in xrange(3)]
        return ip_list

    def insert(self, sdfs_filename):
        hash = self.hash(sdfs_filename)
        idx = self.ring.bisect_left(hash) if self.ring.bisect_left(hash) < len(self.ring) else 0
        for i in xrange(3):
            node_hash = self.hash_at(idx + i)
            self.ring[node_hash]['files'].append(sdfs_filename)
            
            SDFS_LOGGER.info('Inserted %s to %s' % (sdfs_filename, self.ring[node_hash]['ip']))

    def delete(self, sdfs_filename):
        hash = self.hash(sdfs_filename)
        idx = self.ring.bisect_left(hash) if self.ring.bisect_left(hash) < len(self.ring) else 0
        for i in xrange(3):
            node_hash = self.hash_at(idx + i)
            self.ring[node_hash]['files'].remove(sdfs_filename)
            
            SDFS_LOGGER.info('Deleted %s to %s' % (sdfs_filename, self.ring[node_hash]['ip']))

    def update_replica(self, replica_list, dest_ip_list):
        for i in xrange(3):
            self.ring[self.hash(dest_ip_list[i])]['files'] = list(set(self.ring[self.hash(dest_ip_list[i])]['files'] + replica_list[i]))


    def list_my_store(self):
        print '-' * 5 + 'my files are:'
        for f in self.ring[self.myhash]['files']:
            print f,
        print
        print '-' * 5 + 'that is all'

    def list_file_location(self):
        all_files = set()
        for value in self.ring.values():
            all_files.update(set(value['files']))

        for f in all_files:
            print f + ' is stored at ',
            for value in self.ring.values():
                if f in value['files']:
                    print value['ip'],
            print 
Пример #16
0
def test_index_key():
    temp = SortedDict(negate, ((val, val) for val in range(100)))
    temp._reset(7)
    assert all(temp.index(val) == (99 - val) for val in range(100))
def test_index():
    mapping = [(val, pos) for pos, val in enumerate(string.ascii_lowercase)]
    temp = SortedDict(mapping)
    assert temp.index('a') == 0
    assert temp.index('f', 3, -3) == 5
class FileTable(object):
    """docstring for FileTable"""
    def __init__(self, myip, server):
        super(FileTable, self).__init__()
        self.ring = SortedDict()
        self.hasher = hashlib.sha224
        self.myhash = self.hash(myip)
        self.add_node(myip)

        self.server = server

    def hash(self, key):
        return self.hasher(key).hexdigest()[:-10]

    def hash_at(self, idx):
        idx %= len(self.ring)
        hash = self.ring.iloc[idx]
        return hash

    def add_node(self, ip):
        hash = self.hash(ip)
        self.ring[hash] = {'ip': ip, 'files': []}

        SDFS_LOGGER.info('After adding %s - %s' % (ip, repr(self.ring)))

    def remove_node(self, failed_list):
        start_time = time.time()
        # this is for debug
        flag = False

        # deep copy failed list because it will be reset soon
        ip_list = list(failed_list)

        # change the order of failed node
        # make sure the smaller id node be handled first
        if len(ip_list) == 2:
            if self.hash(ip_list[0]) == 0 and self.hash(
                    ip_list[1]) == len(self.ring) - 1:
                ip_list[0], ip_list[1] = ip_list[1], ip_list[0]
            elif self.ring.index(self.hash(
                    ip_list[0])) == self.ring.index(self.hash(ip_list[1])) + 1:
                ip_list[0], ip_list[1] = ip_list[1], ip_list[0]

        for ip in ip_list:
            hash = self.hash(ip)
            idx = self.ring.index(hash)

            # if the node is not the direct successor of the failed node, do nothing
            if len(ip_list) == 2 and ip == ip_list[1] and self.hash_at(
                (idx + 2) % len(self.ring)) == self.myhash:
                continue

            if self.hash_at(
                (idx + 1) % len(self.ring)) == self.myhash or (self.hash_at(
                    (idx + 2) % len(self.ring)) == self.myhash and len(ip_list)
                                                               == 2):
                # this is for debug
                flag = True

                heritage = set(self.ring[hash]['files'])
                my_files = set(self.ring[self.myhash]['files'])
                next_files = set(self.ring[self.hash_at(idx + 2)]['files'])

                # determine the
                to_me = heritage - my_files
                to_next = (heritage & my_files) - next_files
                to_next_next = heritage & my_files & next_files
                replica_list = [list(to_me), list(to_next), list(to_next_next)]

                self.ring[self.myhash]['files'].extend(to_me)

                # handle replica
                dest_ip_to_me = self.ring[self.hash_at(
                    self.ring.index(hash) - 1)]['ip']
                dest_ip_to_next = self.ring[self.hash_at(
                    self.ring.index(self.myhash) + 1)]['ip']
                dest_ip_to_next_next = self.ring[self.hash_at(
                    self.ring.index(self.myhash) + 2)]['ip']
                dest_ip_list = [
                    dest_ip_to_me, dest_ip_to_next, dest_ip_to_next_next
                ]

                del self.ring[hash]

                self.server.handle_replica(replica_list, dest_ip_list, ip_list)

            else:
                del self.ring[hash]

            elapsed_time = time.time() - start_time
            if flag:
                print "It takes", elapsed_time, "to handle replica"

    def lookup(self, sdfs_filename):
        hash = self.hash(sdfs_filename)
        idx = self.ring.bisect_left(hash) if self.ring.bisect_left(hash) < len(
            self.ring) else 0
        ip_list = [self.ring[self.hash_at(idx + i)]['ip'] for i in xrange(3)]
        return ip_list

    def insert(self, sdfs_filename):
        hash = self.hash(sdfs_filename)
        idx = self.ring.bisect_left(hash) if self.ring.bisect_left(hash) < len(
            self.ring) else 0
        for i in xrange(3):
            node_hash = self.hash_at(idx + i)
            self.ring[node_hash]['files'].append(sdfs_filename)

            SDFS_LOGGER.info('Inserted %s to %s' %
                             (sdfs_filename, self.ring[node_hash]['ip']))

    def delete(self, sdfs_filename):
        hash = self.hash(sdfs_filename)
        idx = self.ring.bisect_left(hash) if self.ring.bisect_left(hash) < len(
            self.ring) else 0
        for i in xrange(3):
            node_hash = self.hash_at(idx + i)
            self.ring[node_hash]['files'].remove(sdfs_filename)

            SDFS_LOGGER.info('Deleted %s to %s' %
                             (sdfs_filename, self.ring[node_hash]['ip']))

    def update_replica(self, replica_list, dest_ip_list):
        for i in xrange(3):
            self.ring[self.hash(dest_ip_list[i])]['files'] = list(
                set(self.ring[self.hash(dest_ip_list[i])]['files'] +
                    replica_list[i]))

    def list_my_store(self):
        print '-' * 5 + 'my files are:'
        for f in self.ring[self.myhash]['files']:
            print f,
        print
        print '-' * 5 + 'that is all'

    def list_file_location(self):
        all_files = set()
        for value in self.ring.values():
            all_files.update(set(value['files']))

        for f in all_files:
            print f + ' is stored at ',
            for value in self.ring.values():
                if f in value['files']:
                    print value['ip'],
            print