Exemple #1
0
 def initTable(results):
     nodes = []
     for addr, result in results.items():
         if result[0]:
             nodes.append(Node(result[1], addr[0], addr[1]))
     spider = NodeSpiderCrawl(self.protocol, self.node, nodes,
                              self.ksize, self.alpha)
     return spider.find()
Exemple #2
0
    def _setWithTimestamp(self, existingValue, key, value, requestedTimeStamp,
                          encryptionKey, ttl):
        """
        Sends the command to store the key/value pair on all required nodes.
        :param existingValue: The current (value,timestamp) associated with the key, if one exists.
        :param key: The key to store the value under.
        :param value: The value to store.
        :param requestedTimeStamp: An explicit timestamp if desired, if None the existing timestamp will be
        incremented by one.
        """
        if requestedTimeStamp is None:
            if existingValue:
                existingTimestamp = decodeTimestamp(value[1], encryptionKey)
                if not existingTimestamp:
                    return defer.succeed(False)
                timestamp = str(existingTimestamp + random.randint(1, 100))
                #timestamp = existingValue[1] + 1
            else:
                timestamp = random.randint(0, 1000)

            self.log.debug(
                "setting '%s' = '%s' on network with automatic timestamp '%s'"
                % (key, value, timestamp))
        else:
            timestamp = requestedTimeStamp
            self.log.debug(
                "setting '%s' = '%s' on network with explicit timestamp '%s'" %
                (key, value, timestamp))

        dkey = digest(key)

        def store(nodes):
            self.log.info("setting '%s' on %s" % (key, map(str, nodes)))
            ds = [
                self.protocol.callStore(n, dkey, [
                    value,
                    encodeTimestamp(str(timestamp), encryptionKey),
                    encryptionKey, ttl, timestamp
                ]) for n in nodes
            ]
            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        node = Node(dkey)
        nearest = self.protocol.router.findNeighbors(node)
        self.log.debug("Found %s neighbours to store values at" % str(nearest))
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" %
                             key)
            return defer.succeed(False)
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                 self.alpha)
        return spider.find().addCallback(store)
Exemple #3
0
    async def _refresh_table(self):
        results = []
        for node_id in self.protocol.get_refresh_ids():
            node = Node(node_id)
            nearest = self.protocol.router.find_neighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                     self.alpha)
            results.append(spider.find())

        await asyncio.gather(*results)

        for dkey, value in self.storage.iter_older_than(3600):
            await self.set_digest(dkey, value)
Exemple #4
0
 async def bootstrap(self, addrs):
     log.debug("Attempting to bootstrap node with %i initial contacts",
               len(addrs))
     cos = list(map(self.bootstrap_node, addrs))
     gathered = await asyncio.gather(*cos)
     nodes = [node for node in gathered if node is not None]
     spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize,
                              self.alpha)
     return await spider.find()
Exemple #5
0
    def refreshTable(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        ds = []
        for id in self.protocol.getRefreshIDs():
            node = Node(id)
            nearest = self.protocol.router.findNeighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest)
            ds.append(spider.find())

        def republishKeys(_):
            ds = []
            # Republish keys older than one hour
            for key, value, timestamp in self.storage.iteritemsOlderThan(3600):
                ds.append(self.set(key, value, timestamp))
            return defer.gatherResults(ds)

        return defer.gatherResults(ds).addCallback(republishKeys)
Exemple #6
0
    async def delete_tag(self, key, value):
        dkey = digest(key)

        node = Node(dkey)
        nearest = self.protocol.router.find_neighbors(node)
        if not nearest:
            log.warning("There are no known neighbors to set key %s", key)
            return None
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                 self.alpha)
        nodes = await spider.find()
        log.info("setting '%s' on %s", dkey.hex(), list(map(str, nodes)))

        results = [
            self.protocol.call_delete_tag(n, dkey, key, value) for n in nodes
        ]
        # return true only if at least one store call succeeded
        return any(await asyncio.gather(*results))
Exemple #7
0
    async def delete(self, key, hash=True):
        dkey = key
        if hash:
            dkey = digest(key)
        """
        if self.storage.get(dkey) is not None:
            # delete the key from here
            self.storage.delete(dkey)
        """
        # also delete the key from neighbors
        node = Node(dkey)
        nearest = self.protocol.router.find_neighbors(node)
        if not nearest:
            log.warning("There are no known neighbors to get key %s", key)
            return None
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                 self.alpha)
        nodes = await spider.find()

        results = [self.protocol.call_delete(n, dkey) for n in nodes]
        # return true only if at least one delete call succeeded
        return any(await asyncio.gather(*results))
Exemple #8
0
    async def set_digest(self, dkey, key, name, value, hash=True):
        node = Node(dkey)

        nearest = self.protocol.router.find_neighbors(node)
        if not nearest:
            log.warning("There are no known neighbors to set key %s",
                        dkey.hex())
            return False

        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                 self.alpha)
        nodes = await spider.find()
        log.info("setting '%s' on %s", dkey.hex(), list(map(str, nodes)))

        # if this node is close too, then store here as well
        biggest = max([n.distance_to(node) for n in nodes])
        if self.node.distance_to(node) < biggest:
            self.storage.set(dkey, key, name, value, hash)
        results = [
            self.protocol.call_store(n, dkey, key, name, value, hash)
            for n in nodes
        ]
        # return true only if at least one store call succeeded
        return any(await asyncio.gather(*results))