Exemple #1
0
    def remove(self, key, value):
        """
        For the given key remove the given list values from the set in the network.
        """
        dkey = digest(key)
        node = Node(dkey)
        _log.debug("Server:remove %s" % base64.b64encode(dkey))

        def remove_(nodes):
            # if this node is close too, then store here as well
            max_distance = max([n.distanceTo(node) for n in nodes]) if nodes else sys.maxint
            if self.node.distanceTo(node) < max_distance:
                try:
                    pvalue = json.loads(value)
                    self.set_keys.add(dkey)
                    if dkey in self.storage:
                        old_value = json.loads(self.storage[dkey])
                        new_value = list(set(old_value) - set(pvalue))
                        self.storage[dkey] = json.dumps(new_value)
                        _log.debug("%s local remove key: %s old: %s remove: %s new: %s" % (base64.b64encode(node.id), base64.b64encode(dkey), old_value, pvalue, new_value))
                except:
                    _log.debug("Trying to remove somthing not a JSON coded list %s" % value, exc_info=True)
            ds = [self.protocol.callRemove(n, dkey, value) for n in nodes]
            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" % key)
            return defer.succeed(False)

        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
        return spider.find().addCallback(remove_)
Exemple #2
0
    def digest_set(self, dkey, value):
        """
        Set the given SHA1 digest key to the given value in the network.
        """
        node = Node(dkey)
        # this is useful for debugging messages
        hkey = binascii.hexlify(dkey)

        def store(nodes):
            self.log.info("setting '%s' on %s" % (hkey, map(str, nodes)))
            # if this node is close too, then store here as well
            if self.node.distanceTo(node) < max(
                [n.distanceTo(node) for n in nodes]):
                self.storage[dkey] = value
            ds = [self.protocol.callStore(n, dkey, value) for n in nodes]
            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" %
                             hkey)
            return defer.succeed(False)
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                 self.alpha)
        return spider.find().addCallback(store)
Exemple #3
0
    def send_message(self, nodeid, message):
        """
        Send a message to a given node on the network.
        """
        hexid = binascii.hexlify(nodeid)
        self.log.debug("messaging '%s' '%s'" % (hexid, message))
        node = Node(nodeid)

        def found_callback(nodes):
            self.log.debug("nearest nodes %s" % list(map(str, nodes)))
            nodes = filter(lambda n: n.id == nodeid, nodes)
            if len(nodes) == 0:
                self.log.debug("couldnt find destination node")
                return defer.succeed(None)
            else:
                self.log.debug("found node %s" % binascii.hexlify(nodes[0].id))
                async = self.protocol.callMessage(nodes[0], message)
                return async .addCallback(lambda r: r[0] and r[1] or None)

        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to find %s" % hexid)
            return defer.succeed(None)
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                 self.alpha)
        return spider.find().addCallback(found_callback)
Exemple #4
0
    def direct_message(self, nodeid, message):
        """Send direct message to a node.

        Spidercrawls the network to find the node and sends the message
        directly. This will fail if the node is behind a NAT and doesn't
        have a public ip.

        Args:
            nodeid: 160bit nodeid of the reciever as bytes
            message: iu-msgpack-python serializable message data

        Returns:
            Defered own transport address (ip, port) if successfull else None
        """

        def found_callback(nodes):
            nodes = filter(lambda n: n.id == nodeid, nodes)
            if len(nodes) == 0:
                return defer.succeed(None)
            else:
                async = self.protocol.callDirectMessage(nodes[0], message)
                return async.addCallback(lambda r: r[0] and r[1] or None)

        node = KademliaNode(nodeid)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            return defer.succeed(None)
        spider = NodeSpiderCrawl(
            self.protocol, node, nearest, self.ksize, self.alpha
        )
        return spider.find().addCallback(found_callback)
Exemple #5
0
 def initTable(results):
     nodes = []
     for addr, result in results.items():
         if result[0]:
             nodes.append(Node(result[1], addr[0], addr[1]))
     spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize, self.alpha)
     return spider.find()
Exemple #6
0
    async def _refresh_table(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        ds = []
        for node_id in self.protocol.getRefreshIDs():
            node = Node(node_id)
            nearest = self.protocol.router.findNeighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                     self.alpha)
            ds.append(spider.find())

        # do our crawling
        await asyncio.gather(*ds)

        # now republish keys older than one hour
        for dkey, value in self.storage.iteritemsOlderThan(3600):
            values_to_republish = []
            parsed_val = json.loads(value)
            if isinstance(parsed_val, list):
                [
                    values_to_republish.append(json.dumps(val))
                    for val in parsed_val
                ]
            else:
                values_to_republish.append(value)

            for val in values_to_republish:
                await self._call_remote_persist(dkey, val)
Exemple #7
0
    def set(self, key, value):
        """
        Set the given key to the given value in the network.
        """
        _log.debug("setting '%s' = '%s' on network" % (key, value))
        dkey = digest(key)
        node = Node(dkey)

        def store(nodes):
            _log.debug("setting '%s' to %s on %s" %
                       (key, value, map(str, nodes)))
            # if this node is close too, then store here as well
            if (not nodes or self.node.distanceTo(node) < max(
                [n.distanceTo(node) for n in nodes]) or dkey in self.storage):
                _log.debug("setting '%s' to %s locally" % (key, value))
                self.storage[dkey] = value
            ds = [self.protocol.callStore(n, dkey, value) for n in nodes]
            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            _log.warning("There are no known neighbors to set key %s" % key)
            return defer.succeed(False)
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                 self.alpha)
        return spider.find().addCallback(store)
 def initTable(results, challenge, id):
     nodes = []
     for addr, result in results.items():
         ip = addr[0]
         port = addr[1]
         if result[0]:
             resultId = result[1]['id']
             resultIdHex = resultId.encode('hex').upper()
             resultSign = result[1]['signature']
             data = self.protocol.certificateExists(resultIdHex)
             if not data:
                 identifier = "{}cert".format(resultIdHex)
                 self.protocol.callCertFindValue(Node(resultId,
                                                     ip,
                                                     port),
                                                Node(identifier))
             else:
                 cert_stored = self.protocol.searchForCertificate(resultIdHex)
                 try:
                     OpenSSL.crypto.verify(cert_stored,
                                          resultSign,
                                          challenge,
                                          "sha256")
                 except:
                     traceback.print_exc()
                 nodes.append(Node(resultId, ip, port))
     spider = NodeSpiderCrawl(self.protocol,
                             self.node,
                             nodes,
                             self.ksize,
                             self.alpha)
     return spider.find()
Exemple #9
0
 def initTable(results):
     nodes = []
     for addr, result in results.items():
         if result[0]:
             nodes.append(Node(result[1], addr[0], addr[1]))
     spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize, self.alpha)
     return spider.find()
 def initTable(results, challenge, id):
     nodes = []
     for addr, result in results.items():
         ip = addr[0]
         port = addr[1]
         if result[0]:
             resultId = result[1]['id']
             resultIdHex = resultId.encode('hex').upper()
             resultSign = result[1]['signature']
             data = self.protocol.certificateExists(resultIdHex)
             if not data:
                 identifier = "{}cert".format(resultIdHex)
                 self.protocol.callCertFindValue(
                     Node(resultId, ip, port), Node(identifier))
             else:
                 cert_stored = self.protocol.searchForCertificate(
                     resultIdHex)
                 try:
                     self.runtime_credentials.verify_signed_data_from_certstring(
                         cert_stored, resultSign, challenge,
                         certificate.TRUSTSTORE_TRANSPORT)
                 except:
                     traceback.print_exc()
                 nodes.append(Node(resultId, ip, port))
     spider = NodeSpiderCrawl(self.protocol, self.node, nodes,
                              self.ksize, self.alpha)
     return spider.find()
Exemple #11
0
    def send_message(self, nodeid, message):
        """
        Send a message to a given node on the network.
        """
        hexid = binascii.hexlify(nodeid)
        self.log.debug("messaging '%s' '%s'" % (hexid, message))
        node = Node(nodeid)

        def found_callback(nodes):
            self.log.debug("nearest nodes %s" % list(map(str, nodes)))
            nodes = filter(lambda n: n.id == nodeid, nodes)
            if len(nodes) == 0:
                self.log.debug("couldnt find destination node")
                return defer.succeed(None)
            else:
                self.log.debug("found node %s" % binascii.hexlify(nodes[0].id))
                async = self.protocol.callMessage(nodes[0], message)
                return async.addCallback(lambda r: r[0] and r[1] or None)

        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to find %s" % hexid)
            return defer.succeed(None)
        spider = NodeSpiderCrawl(self.protocol, node, nearest,
                                 self.ksize, self.alpha)
        return spider.find().addCallback(found_callback)
Exemple #12
0
    def set(self, key, value):
        """
        Set the given key to the given value in the network.
        """
        self.log.debug("setting '%s' = '%s' on network" % (key, value))
        dkey = digest(key)
        node = Node(dkey)

        def store(nodes):
            self.log.info("setting '%s' on %s" % (key, list(map(str, nodes))))
            # if this node is close too, then store here as well
            if self.node.distanceTo(node) < max([n.distanceTo(node) for n in nodes]):
                self.storage[dkey] = value
            ds = [self.protocol.callStore(n, dkey, value) for n in nodes]
            d = defer.DeferredList(ds)
            d.addCallback(self._anyRespondSuccess)
            d.addErrback(self.onError)
            return d

        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" % key)
            return defer.succeed(False)
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
        d = spider.find()
        d.addCallback(store)
        d.addErrback(self.onError)
        return d
Exemple #13
0
    def append(self, key, value):
        """
        For the given key append the given list values to the set in the network.
        """
        dkey = digest(key)
        node = Node(dkey)

        def append_(nodes):
            # if this node is close too, then store here as well
            if not nodes or self.node.distanceTo(node) < max([n.distanceTo(node) for n in nodes]):
                try:
                    pvalue = json.loads(value)
                    self.set_keys.add(dkey)
                    if dkey not in self.storage:
                        _log.debug("%s local append key: %s not in storage set value: %s" % (base64.b64encode(node.id), base64.b64encode(dkey), pvalue))
                        self.storage[dkey] = value
                    else:
                        old_value_ = self.storage[dkey]
                        old_value = json.loads(old_value_)
                        new_value = list(set(old_value + pvalue))
                        _log.debug("%s local append key: %s old: %s add: %s new: %s" % (base64.b64encode(node.id), base64.b64encode(dkey), old_value, pvalue, new_value))
                        self.storage[dkey] = json.dumps(new_value)
                except:
                    _log.debug("Trying to append something not a JSON coded list %s" % value, exc_info=True)
            ds = [self.protocol.callAppend(n, dkey, value) for n in nodes]
            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" % key)
            _log.debug("There are no known neighbors to set key %s" % key)
            return defer.succeed(False)

        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
        return spider.find().addCallback(append_)
    def append(self, key, value):
        """
        Append the given key to the given value in the network.
        """
        self.log.debug("setting '%s' = '%s' on network" % (key, value))
        dkey = digest(key)

        def append(nodes, mid):
            self.log.info("setting '%s' on %s" % (key, map(str, nodes)))

            # TODO: Must add transaction ID so we dont append multiple times.
            print "org mid", mid
            mid = uuid.uuid1().hex
            print "new mid", mid

            ds = [self.protocol.callAppend(node, mid, dkey, value) for node in nodes]
            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        node = Node(dkey)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" % key)
            return defer.succeed(False)

        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
        return spider.find().addCallback(append, "hej")
Exemple #15
0
    async def _refresh_table(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        results = []
        for node_id in self.protocol.get_refresh_ids():
            node = Node(node_id)
            nearest = self.protocol.router.find_neighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                     self.alpha)
            results.append(spider.find())

        # do our crawling
        await asyncio.gather(*results)
Exemple #16
0
    def append(self, key, value):
        """
        For the given key append the given list values to the set in the network.
        """
        dkey = digest(key)
        node = Node(dkey)

        def append_(nodes):
            # if this node is close too, then store here as well
            if self.node.distanceTo(node) < max(
                [n.distanceTo(node) for n in nodes]):
                try:
                    pvalue = json.loads(value)
                    self.set_keys.add(dkey)
                    if dkey not in self.storage:
                        _log.debug(
                            "%s local append key: %s not in storage set value: %s"
                            % (base64.b64encode(
                                node.id), base64.b64encode(dkey), pvalue))
                        self.storage[dkey] = value
                    else:
                        old_value_ = self.storage[dkey]
                        old_value = json.loads(old_value_)
                        new_value = list(set(old_value + pvalue))
                        _log.debug(
                            "%s local append key: %s old: %s add: %s new: %s" %
                            (base64.b64encode(node.id), base64.b64encode(dkey),
                             old_value, pvalue, new_value))
                        self.storage[dkey] = json.dumps(new_value)
                except:
                    _log.debug(
                        "Trying to append something not a JSON coded list %s" %
                        value,
                        exc_info=True)
            ds = [self.protocol.callAppend(n, dkey, value) for n in nodes]
            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" %
                             key)
            _log.debug("There are no known neighbors to set key %s" % key)
            return defer.succeed(False)

        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                 self.alpha)
        return spider.find().addCallback(append_)
    def remove(self, key, value):
        """
        For the given key remove the given list values from the set in the network.
        """
        dkey = digest(key)

        def remove(nodes):
            ds = [self.protocol.callRemove(node, dkey, value) for node in nodes]
            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        node = Node(dkey)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" % key)
            return defer.succeed(False)

        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
        return spider.find().addCallback(remove)
Exemple #18
0
    async def _refresh_table(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        ds = []
        for node_id in self.protocol.getRefreshIDs():
            node = Node(node_id)
            nearest = self.protocol.router.findNeighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest,
                                     self.ksize, self.alpha)
            ds.append(spider.find())

        # do our crawling
        await asyncio.gather(*ds)

        # now republish keys older than one hour
        for dkey, value in self.storage.iteritemsOlderThan(3600):
            await self.set_digest(dkey, value)
Exemple #19
0
    async def _refresh_table(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        results = []
        for node_id in self.protocol.get_refresh_ids():
            node = Node(node_id)
            nearest = self.protocol.router.find_neighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest,
                                     self.ksize, self.alpha)
            results.append(spider.find())

        # do our crawling
        await asyncio.gather(*results)

        # now republish keys older than one hour
        for dkey, value in self.storage.iter_older_than(3600):
            await self.set_digest(dkey, value)
Exemple #20
0
    def set(self, key, value):
        """
        Set the given key to the given value in the network.
        """
        self.log.debug("setting '%s' = '%s' on network" % (key, value))
        dkey = digest(key)

        def store(nodes):
            self.log.info("setting '%s' on %s" % (key, map(str, nodes)))
            ds = [self.protocol.callStore(node, dkey, value) for node in nodes]
            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        node = Node(dkey)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" % key)
            return defer.succeed(False)
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
        return spider.find().addCallback(store)
Exemple #21
0
    async def _refresh_table(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        ds = []
        for node_id in self.protocol.getRefreshIDs():
            log.debug('node_id=%s type %s', node_id, type(node_id))
            node = Node(node_id)
            nearest = self.protocol.router.findNeighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest,
                                     self.ksize, self.alpha)
            ds.append(spider.find())

        # do our crawling
        await asyncio.gather(*ds)

        # now republish keys older than one hour
        for dkey, value in self.storage.iteritemsOlderThan(3600):
            await self.set_digest(dkey, value)
Exemple #22
0
    def refreshTable(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        ds = []
        for id in self.protocol.getRefreshIDs():
            node = Node(id)
            nearest = self.protocol.router.findNeighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest)
            ds.append(spider.find())

        def republishKeys(_):
            ds = []
            # Republish keys older than one hour
            for key, value in self.storage.iteritemsOlderThan(3600):
                ds.append(self.set(key, value))
            return defer.gatherResults(ds)

        return defer.gatherResults(ds).addCallback(republishKeys)
Exemple #23
0
    def refreshTable(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        ds = []
        for id in self.protocol.getRefreshIDs():
            node = Node(id)
            nearest = self.protocol.router.findNeighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest)
            ds.append(spider.find())

        def republishKeys(_):
            ds = []
            # Republish keys older than one hour
            for key, value in self.storage.iteritemsOlderThan(3600):
                ds.append(self.set(key, value))
            return defer.gatherResults(ds)

        return defer.gatherResults(ds).addCallback(republishKeys)
Exemple #24
0
    def set(self, key, value):
        """
        Set the given key to the given value in the network.
        """
        self.log.debug("setting '%s' = '%s' on network" % (key, value))
        dkey = digest(key)

        def store(nodes):
            self.log.info("setting '%s' on %s" % (key, map(str, nodes)))
            ds = [self.protocol.callStore(node, dkey, value) for node in nodes]
            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        node = Node(dkey)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" %
                             key)
            return defer.succeed(False)
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                 self.alpha)
        return spider.find().addCallback(store)
    def remove(self, key, value):
        """
        For the given key remove the given list values from the set in the network.
        """
        dkey = digest(key)

        def remove(nodes):
            ds = [
                self.protocol.callRemove(node, dkey, value) for node in nodes
            ]
            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        node = Node(dkey)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" %
                             key)
            return defer.succeed(False)

        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                 self.alpha)
        return spider.find().addCallback(remove)
Exemple #26
0
    def direct_message(self, nodeid, message):
        """Send direct message to a node.

        Spidercrawls the network to find the node and sends the message
        directly. This will fail if the node is behind a NAT and doesn't
        have a public ip.

        Args:
            nodeid: 160bit nodeid of the reciever as bytes
            message: iu-msgpack-python serializable message data

        Returns:
            Defered own transport address (ip, port) if successfull else None
        """
        hexid = binascii.hexlify(nodeid)
        self.log.debug("Direct messaging %s: %s" % (hexid, message))

        def found_callback(nodes):
            nodes = filter(lambda n: n.id == nodeid, nodes)
            if len(nodes) == 0:
                msg = "{0} couldn't find destination node {1}"
                self.log.warning(msg.format(self.get_hex_id(), hexid))
                return defer.succeed(None)
            else:
                self.log.debug("found node %s" % binascii.hexlify(nodes[0].id))
                async = self.protocol.callDirectMessage(nodes[0], message)
                return async .addCallback(lambda r: r[0] and r[1] or None)

        node = Node(nodeid)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            msg = "{0} has no known neighbors to find {1}"
            self.log.warning(msg.format(self.get_hex_id(), hexid))
            return defer.succeed(None)
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize,
                                 self.alpha)
        return spider.find().addCallback(found_callback)
    def remove(self, key, value):
        """
        For the given key remove the given list values from the set in the network.
        """
        dkey = digest(key)
        node = Node(dkey)
        _log.debug("Server:remove %s" % base64.b64encode(dkey))

        def remove_(nodes):
            # if this node is close too, then store here as well
            if not nodes or self.node.distanceTo(node) < max([n.distanceTo(node) for n in nodes]):
                try:
                    pvalue = json.loads(value)
                    self.set_keys.add(dkey)
                    if dkey in self.storage:
                        try:
                            old_value = json.loads(self.storage[dkey])
                            new_value = list(set(old_value) - set(pvalue))
                        except:
                            # When the key have been used for single values or deleted it does not contain a list
                            # Just empty it
                            old_value = self.storage[dkey]
                            new_value = []
                        self.storage[dkey] = json.dumps(new_value)
                        _log.debug("%s local remove key: %s old: %s remove: %s new: %s" % (base64.b64encode(node.id), base64.b64encode(dkey), old_value, pvalue, new_value))
                except:
                    _log.debug("Trying to remove somthing not a JSON coded list %s" % value, exc_info=True)
            ds = [self.protocol.callRemove(n, dkey, value) for n in nodes]
            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" % key)
            return defer.succeed(False)

        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
        return spider.find().addCallback(remove_)
Exemple #28
0
    def direct_message(self, nodeid, message):
        """Send direct message to a node.

        Spidercrawls the network to find the node and sends the message
        directly. This will fail if the node is behind a NAT and doesn't
        have a public ip.

        Args:
            nodeid: 160bit nodeid of the reciever as bytes
            message: iu-msgpack-python serializable message data

        Returns:
            Defered own transport address (ip, port) if successfull else None
        """
        hexid = binascii.hexlify(nodeid)
        self.log.debug("Direct messaging %s: %s" % (hexid, message))

        def found_callback(nodes):
            nodes = filter(lambda n: n.id == nodeid, nodes)
            if len(nodes) == 0:
                msg = "{0} couldn't find destination node {1}"
                self.log.warning(msg.format(self.get_hex_id(), hexid))
                return defer.succeed(None)
            else:
                self.log.debug("found node %s" % binascii.hexlify(nodes[0].id))
                async = self.protocol.callDirectMessage(nodes[0], message)
                return async.addCallback(lambda r: r[0] and r[1] or None)

        node = Node(nodeid)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            msg = "{0} has no known neighbors to find {1}"
            self.log.warning(msg.format(self.get_hex_id(), hexid))
            return defer.succeed(None)
        spider = NodeSpiderCrawl(self.protocol, node, nearest,
                                 self.ksize, self.alpha)
        return spider.find().addCallback(found_callback)