Example #1
0
    def _process_segment(self, segnum):
        """
        I download, validate, decode, and decrypt one segment of the
        file that this Retrieve is retrieving. This means coordinating
        the process of getting k blocks of that file, validating them,
        assembling them into one segment with the decoder, and then
        decrypting them.
        """
        self.log("processing segment %d" % segnum)

        # TODO: The old code uses a marker. Should this code do that
        # too? What did the Marker do?

        # We need to ask each of our active readers for its block and
        # salt. We will then validate those. If validation is
        # successful, we will assemble the results into plaintext.
        ds = []
        for reader in self._active_readers:
            started = time.time()
            d1 = reader.get_block_and_salt(segnum)
            d2,d3 = self._get_needed_hashes(reader, segnum)
            d = deferredutil.gatherResults([d1,d2,d3])
            d.addCallback(self._validate_block, segnum, reader, reader.server, started)
            # _handle_bad_share takes care of recoverable errors (by dropping
            # that share and returning None). Any other errors (i.e. code
            # bugs) are passed through and cause the retrieve to fail.
            d.addErrback(self._handle_bad_share, [reader])
            ds.append(d)
        dl = deferredutil.gatherResults(ds)
        if self._verify:
            dl.addCallback(lambda ignored: "")
            dl.addCallback(self._set_segment)
        else:
            dl.addCallback(self._maybe_decode_and_decrypt_segment, segnum)
        return dl
Example #2
0
    def _process_segment(self, segnum):
        """
        I download, validate, decode, and decrypt one segment of the
        file that this Retrieve is retrieving. This means coordinating
        the process of getting k blocks of that file, validating them,
        assembling them into one segment with the decoder, and then
        decrypting them.
        """
        self.log("processing segment %d" % segnum)

        # TODO: The old code uses a marker. Should this code do that
        # too? What did the Marker do?

        # We need to ask each of our active readers for its block and
        # salt. We will then validate those. If validation is
        # successful, we will assemble the results into plaintext.
        ds = []
        for reader in self._active_readers:
            started = time.time()
            d1 = reader.get_block_and_salt(segnum)
            d2,d3 = self._get_needed_hashes(reader, segnum)
            d = deferredutil.gatherResults([d1,d2,d3])
            d.addCallback(self._validate_block, segnum, reader, reader.server, started)
            # _handle_bad_share takes care of recoverable errors (by dropping
            # that share and returning None). Any other errors (i.e. code
            # bugs) are passed through and cause the retrieve to fail.
            d.addErrback(self._handle_bad_share, [reader])
            ds.append(d)
        dl = deferredutil.gatherResults(ds)
        if self._verify:
            dl.addCallback(lambda ignored: "")
            dl.addCallback(self._set_segment)
        else:
            dl.addCallback(self._maybe_decode_and_decrypt_segment, segnum)
        return dl
Example #3
0
        def _got_buckets(result):
            bucketdict, success = result

            shareverds = []
            for (sharenum, bucket) in bucketdict.items():
                d = self._download_and_verify(s, sharenum, bucket)
                shareverds.append(d)

            dl = deferredutil.gatherResults(shareverds)

            def collect(results):
                verified = set()
                corrupt = set()
                incompatible = set()
                for succ, sharenum, whynot in results:
                    if succ:
                        verified.add(sharenum)
                    else:
                        if whynot == 'corrupt':
                            corrupt.add(sharenum)
                        elif whynot == 'incompatible':
                            incompatible.add(sharenum)
                return (verified, s, corrupt, incompatible, success)

            dl.addCallback(collect)
            return dl
Example #4
0
    def get_block(self, blocknum):
        # the first time we use this bucket, we need to fetch enough elements
        # of the share hash tree to validate it from our share hash up to the
        # hashroot.
        if self.share_hash_tree.needed_hashes(self.sharenum):
            d1 = self.bucket.get_share_hashes()
        else:
            d1 = defer.succeed([])

        # We might need to grab some elements of our block hash tree, to
        # validate the requested block up to the share hash.
        blockhashesneeded = self.block_hash_tree.needed_hashes(blocknum, include_leaf=True)
        # We don't need the root of the block hash tree, as that comes in the
        # share tree.
        blockhashesneeded.discard(0)
        d2 = self.bucket.get_block_hashes(blockhashesneeded)

        if blocknum < self.num_blocks-1:
            thisblocksize = self.block_size
        else:
            thisblocksize = self.share_size % self.block_size
            if thisblocksize == 0:
                thisblocksize = self.block_size
        d3 = self.bucket.get_block_data(blocknum,
                                        self.block_size, thisblocksize)

        dl = deferredutil.gatherResults([d1, d2, d3])
        dl.addCallback(self._got_data, blocknum)
        return dl
Example #5
0
        def _got_buckets(result):
            bucketdict, success = result

            shareverds = []
            for (sharenum, bucket) in bucketdict.items():
                d = self._download_and_verify(s, sharenum, bucket)
                shareverds.append(d)

            dl = deferredutil.gatherResults(shareverds)

            def collect(results):
                verified = set()
                corrupt = set()
                incompatible = set()
                for succ, sharenum, whynot in results:
                    if succ:
                        verified.add(sharenum)
                    else:
                        if whynot == 'corrupt':
                            corrupt.add(sharenum)
                        elif whynot == 'incompatible':
                            incompatible.add(sharenum)
                return (verified, s, corrupt, incompatible, success)

            dl.addCallback(collect)
            return dl
Example #6
0
    def get_block(self, blocknum):
        # the first time we use this bucket, we need to fetch enough elements
        # of the share hash tree to validate it from our share hash up to the
        # hashroot.
        if self.share_hash_tree.needed_hashes(self.sharenum):
            d1 = self.bucket.get_share_hashes()
        else:
            d1 = defer.succeed([])

        # We might need to grab some elements of our block hash tree, to
        # validate the requested block up to the share hash.
        blockhashesneeded = self.block_hash_tree.needed_hashes(
            blocknum, include_leaf=True)
        # We don't need the root of the block hash tree, as that comes in the
        # share tree.
        blockhashesneeded.discard(0)
        d2 = self.bucket.get_block_hashes(blockhashesneeded)

        if blocknum < self.num_blocks - 1:
            thisblocksize = self.block_size
        else:
            thisblocksize = self.share_size % self.block_size
            if thisblocksize == 0:
                thisblocksize = self.block_size
        d3 = self.bucket.get_block_data(blocknum, self.block_size,
                                        thisblocksize)

        dl = deferredutil.gatherResults([d1, d2, d3])
        dl.addCallback(self._got_data, blocknum)
        return dl
Example #7
0
 def _do_update(ignored):
     new_data = MutableData("foo bar baz" * 100000)
     new_small_data = MutableData("foo bar baz" * 10)
     d1 = self.mdmf_node.overwrite(new_data)
     d2 = self.sdmf_node.overwrite(new_small_data)
     dl = gatherResults([d1, d2])
     return dl
Example #8
0
 def _get_blocks(vrbp):
     ds = []
     for blocknum in range(veup.num_segments):
         db = vrbp.get_block(blocknum)
         db.addCallback(_discard_result)
         ds.append(db)
     # this gatherResults will fire once every block of this share has
     # been downloaded and verified, or else it will errback.
     return deferredutil.gatherResults(ds)
Example #9
0
    def start(self):
        ds = []
        if self._verify:
            for s in self._servers:
                ds.append(self._verify_server_shares(s))
        else:
            for s in self._servers:
                ds.append(self._check_server_shares(s))

        return deferredutil.gatherResults(ds).addCallback(self._format_results)
Example #10
0
    def start(self):
        ds = []
        if self._verify:
            for s in self._servers:
                ds.append(self._verify_server_shares(s))
        else:
            for s in self._servers:
                ds.append(self._check_server_shares(s))

        return deferredutil.gatherResults(ds).addCallback(self._format_results)
Example #11
0
 def test_gather_results(self):
     d1 = defer.Deferred()
     d2 = defer.Deferred()
     res = deferredutil.gatherResults([d1, d2])
     d1.errback(ValueError("BAD"))
     def _callb(res):
         self.fail("Should have errbacked, not resulted in %s" % (res,))
     def _errb(thef):
         thef.trap(ValueError)
     res.addCallbacks(_callb, _errb)
     return res
Example #12
0
    def _decode_and_decrypt_segments(self, ignored, data, offset):
        """
        After the servermap update, I take the encrypted and encoded
        data that the servermap fetched while doing its update and
        transform it into decoded-and-decrypted plaintext that can be
        used by the new uploadable. I return a Deferred that fires with
        the segments.
        """
        r = Retrieve(self._node, self._storage_broker, self._servermap,
                     self._version)
        # decode: takes in our blocks and salts from the servermap,
        # returns a Deferred that fires with the corresponding plaintext
        # segments. Does not download -- simply takes advantage of
        # existing infrastructure within the Retrieve class to avoid
        # duplicating code.
        sm = self._servermap
        # XXX: If the methods in the servermap don't work as
        # abstractions, you should rewrite them instead of going around
        # them.
        update_data = sm.update_data
        start_segments = {} # shnum -> start segment
        end_segments = {} # shnum -> end segment
        blockhashes = {} # shnum -> blockhash tree
        for (shnum, original_data) in update_data.iteritems():
            data = [d[1] for d in original_data if d[0] == self._version]
            # data is [(blockhashes,start,end)..]

            # Every data entry in our list should now be share shnum for
            # a particular version of the mutable file, so all of the
            # entries should be identical.
            datum = data[0]
            assert [x for x in data if x != datum] == []

            # datum is (blockhashes,start,end)
            blockhashes[shnum] = datum[0]
            start_segments[shnum] = datum[1] # (block,salt) bytestrings
            end_segments[shnum] = datum[2]

        d1 = r.decode(start_segments, self._start_segment)
        d2 = r.decode(end_segments, self._end_segment)
        d3 = defer.succeed(blockhashes)
        return deferredutil.gatherResults([d1, d2, d3])
Example #13
0
    def _decode_and_decrypt_segments(self, ignored, data, offset):
        """
        After the servermap update, I take the encrypted and encoded
        data that the servermap fetched while doing its update and
        transform it into decoded-and-decrypted plaintext that can be
        used by the new uploadable. I return a Deferred that fires with
        the segments.
        """
        r = Retrieve(self._node, self._storage_broker, self._servermap,
                     self._version)
        # decode: takes in our blocks and salts from the servermap,
        # returns a Deferred that fires with the corresponding plaintext
        # segments. Does not download -- simply takes advantage of
        # existing infrastructure within the Retrieve class to avoid
        # duplicating code.
        sm = self._servermap
        # XXX: If the methods in the servermap don't work as
        # abstractions, you should rewrite them instead of going around
        # them.
        update_data = sm.update_data
        start_segments = {} # shnum -> start segment
        end_segments = {} # shnum -> end segment
        blockhashes = {} # shnum -> blockhash tree
        for (shnum, original_data) in update_data.iteritems():
            data = [d[1] for d in original_data if d[0] == self._version]
            # data is [(blockhashes,start,end)..]

            # Every data entry in our list should now be share shnum for
            # a particular version of the mutable file, so all of the
            # entries should be identical.
            datum = data[0]
            assert [x for x in data if x != datum] == []

            # datum is (blockhashes,start,end)
            blockhashes[shnum] = datum[0]
            start_segments[shnum] = datum[1] # (block,salt) bytestrings
            end_segments[shnum] = datum[2]

        d1 = r.decode(start_segments, self._start_segment)
        d2 = r.decode(end_segments, self._end_segment)
        d3 = defer.succeed(blockhashes)
        return deferredutil.gatherResults([d1, d2, d3])