示例#1
0
    def remote_upload_chk(self, storage_index):
        """
        See ``RIHelper.upload_chk``
        """
        self.count("chk_upload_helper.upload_requests")
        lp = self.log(format="helper: upload_chk query for SI %(si)s",
                      si=si_b2a(storage_index))
        if storage_index in self._active_uploads:
            self.log("upload is currently active", parent=lp)
            uh = self._active_uploads[storage_index]
            return (None, uh)

        d = self._check_chk(storage_index, lp)
        d.addCallback(self._did_chk_check, storage_index, lp)

        def _err(f):
            self.log("error while checking for chk-already-in-grid",
                     failure=f,
                     level=log.WEIRD,
                     parent=lp,
                     umid="jDtxZg")
            return f

        d.addErrback(_err)
        return d
示例#2
0
    def __init__(self, storage_index, helper, storage_broker, secret_holder,
                 incoming_file, encoding_file, log_number):
        upload.CHKUploader.__init__(self, storage_broker, secret_holder)
        self._storage_index = storage_index
        self._helper = helper
        self._incoming_file = incoming_file
        self._encoding_file = encoding_file
        self._upload_id = si_b2a(storage_index)[:5]
        self._log_number = log_number
        self._upload_status = upload.UploadStatus()
        self._upload_status.set_helper(False)
        self._upload_status.set_storage_index(storage_index)
        self._upload_status.set_status("fetching ciphertext")
        self._upload_status.set_progress(0, 1.0)
        self._helper.log("CHKUploadHelper starting for SI %r" %
                         self._upload_id,
                         parent=log_number)

        self._storage_broker = storage_broker
        self._secret_holder = secret_holder
        self._fetcher = CHKCiphertextFetcher(self, incoming_file,
                                             encoding_file, self._log_number)
        self._reader = LocalCiphertextReader(self, storage_index,
                                             encoding_file)
        self._finished_observers = observer.OneShotObserverList()

        self._started = time.time()
        d = self._fetcher.when_done()
        d.addCallback(lambda res: self._reader.start())
        d.addCallback(lambda res: self.start_encrypted(self._reader))
        d.addCallback(self._finished)
        d.addErrback(self._failed)
示例#3
0
    def __init__(self, storage_index,
                 helper, storage_broker, secret_holder,
                 incoming_file, encoding_file,
                 log_number, progress=None):
        upload.CHKUploader.__init__(self, storage_broker, secret_holder, progress=progress)
        self._storage_index = storage_index
        self._helper = helper
        self._incoming_file = incoming_file
        self._encoding_file = encoding_file
        self._upload_id = si_b2a(storage_index)[:5]
        self._log_number = log_number
        self._upload_status = upload.UploadStatus()
        self._upload_status.set_helper(False)
        self._upload_status.set_storage_index(storage_index)
        self._upload_status.set_status("fetching ciphertext")
        self._upload_status.set_progress(0, 1.0)
        self._helper.log("CHKUploadHelper starting for SI %s" % self._upload_id,
                         parent=log_number)

        self._storage_broker = storage_broker
        self._secret_holder = secret_holder
        self._fetcher = CHKCiphertextFetcher(self, incoming_file, encoding_file,
                                             self._log_number)
        self._reader = LocalCiphertextReader(self, storage_index, encoding_file)
        self._finished_observers = observer.OneShotObserverList()

        self._started = time.time()
        d = self._fetcher.when_done()
        d.addCallback(lambda res: self._reader.start())
        d.addCallback(lambda res: self.start_encrypted(self._reader))
        d.addCallback(self._finished)
        d.addErrback(self._failed)
示例#4
0
    def __init__(self, filenode, servermap, verinfo, fetch_privkey=False):
        self._node = filenode
        assert self._node.get_pubkey()
        self._storage_index = filenode.get_storage_index()
        assert self._node.get_readkey()
        self._last_failure = None
        prefix = si_b2a(self._storage_index)[:5]
        self._log_number = log.msg("Retrieve(%s): starting" % prefix)
        self._outstanding_queries = {}  # maps (peerid,shnum) to start_time
        self._running = True
        self._decoding = False
        self._bad_shares = set()

        self.servermap = servermap
        assert self._node.get_pubkey()
        self.verinfo = verinfo
        # during repair, we may be called upon to grab the private key, since
        # it wasn't picked up during a verify=False checker run, and we'll
        # need it for repair to generate the a new version.
        self._need_privkey = fetch_privkey
        if self._node.get_privkey():
            self._need_privkey = False

        self._status = RetrieveStatus()
        self._status.set_storage_index(self._storage_index)
        self._status.set_helper(False)
        self._status.set_progress(0.0)
        self._status.set_active(True)
        (seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
         offsets_tuple) = self.verinfo
        self._status.set_size(datalength)
        self._status.set_encoding(k, N)
示例#5
0
    def __init__(self, filenode, servermap, verinfo, fetch_privkey=False):
        self._node = filenode
        assert self._node.get_pubkey()
        self._storage_index = filenode.get_storage_index()
        assert self._node.get_readkey()
        self._last_failure = None
        prefix = si_b2a(self._storage_index)[:5]
        self._log_number = log.msg("Retrieve(%s): starting" % prefix)
        self._outstanding_queries = {} # maps (peerid,shnum) to start_time
        self._running = True
        self._decoding = False
        self._bad_shares = set()

        self.servermap = servermap
        assert self._node.get_pubkey()
        self.verinfo = verinfo
        # during repair, we may be called upon to grab the private key, since
        # it wasn't picked up during a verify=False checker run, and we'll
        # need it for repair to generate the a new version.
        self._need_privkey = fetch_privkey
        if self._node.get_privkey():
            self._need_privkey = False

        self._status = RetrieveStatus()
        self._status.set_storage_index(self._storage_index)
        self._status.set_helper(False)
        self._status.set_progress(0.0)
        self._status.set_active(True)
        (seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
         offsets_tuple) = self.verinfo
        self._status.set_size(datalength)
        self._status.set_encoding(k, N)
示例#6
0
 def _failed(self, f):
     self.log(
         format="CHKUploadHelper(%(si)s) failed", si=si_b2a(self._storage_index)[:5], failure=f, level=log.UNUSUAL
     )
     self._finished_observers.fire(f)
     self._helper.upload_finished(self._storage_index, 0)
     del self._reader
示例#7
0
 def _make_chk_upload_helper(self, storage_index, lp):
     si_s = si_b2a(storage_index)
     incoming_file = os.path.join(self._chk_incoming, si_s)
     encoding_file = os.path.join(self._chk_encoding, si_s)
     uh = CHKUploadHelper_fake(
         storage_index, self, self._storage_broker, self._secret_holder, incoming_file, encoding_file, lp
     )
     return uh
示例#8
0
 def write(self, i, ss, serverid, tail=0):
     si = self.si(i)
     si = si[:-1] + bytes(bytearray((tail, )))
     had, made = ss.allocate_buckets(si, self.rs(i, serverid),
                                     self.cs(i, serverid), set([0]), 99)
     made[0].write(0, b"data")
     made[0].close()
     return si_b2a(si)
示例#9
0
 def _failed(self, f):
     self.log(format="CHKUploadHelper(%(si)s) failed",
              si=si_b2a(self._storage_index)[:5],
              failure=f,
              level=log.UNUSUAL)
     self._finished_observers.fire(f)
     self._helper.upload_finished(self._storage_index, 0)
     del self._reader
示例#10
0
 def __init__(self, rref, peerid, storage_index):
     self._rref = rref
     self._peerid = peerid
     peer_id_s = idlib.shortnodeid_b2a(peerid)
     storage_index_s = si_b2a(storage_index)
     self._reprstr = "<ReadBucketProxy %s to peer [%s] SI %s>" % (id(self), peer_id_s, storage_index_s)
     self._started = False # sent request to server
     self._ready = observer.OneShotObserverList() # got response from server
示例#11
0
 def _make_chk_upload_helper(self, storage_index, lp):
     si_s = si_b2a(storage_index)
     incoming_file = os.path.join(self._chk_incoming, si_s)
     encoding_file = os.path.join(self._chk_encoding, si_s)
     uh = CHKUploadHelper(storage_index, self, self._storage_broker,
                          self._secret_holder, incoming_file, encoding_file,
                          lp)
     return uh
示例#12
0
    def remote_upload_chk(self, storage_index):
        self.count("chk_upload_helper.upload_requests")
        r = upload.UploadResults()
        started = time.time()
        si_s = si_b2a(storage_index)
        lp = self.log(format="helper: upload_chk query for SI %(si)s", si=si_s)
        incoming_file = os.path.join(self._chk_incoming, si_s)
        encoding_file = os.path.join(self._chk_encoding, si_s)
        if storage_index in self._active_uploads:
            self.log("upload is currently active", parent=lp)
            uh = self._active_uploads[storage_index]
            return uh.start()

        d = self._check_for_chk_already_in_grid(storage_index, r, lp)

        def _checked(already_present):
            elapsed = time.time() - started
            r.timings['existence_check'] = elapsed
            if already_present:
                # the necessary results are placed in the UploadResults
                self.count("chk_upload_helper.upload_already_present")
                self.log("file already found in grid", parent=lp)
                return (r, None)

            self.count("chk_upload_helper.upload_need_upload")
            # the file is not present in the grid, by which we mean there are
            # less than 'N' shares available.
            self.log("unable to find file in the grid",
                     parent=lp,
                     level=log.NOISY)
            # We need an upload helper. Check our active uploads again in
            # case there was a race.
            if storage_index in self._active_uploads:
                self.log("upload is currently active", parent=lp)
                uh = self._active_uploads[storage_index]
            else:
                self.log("creating new upload helper", parent=lp)
                uh = self.chk_upload_helper_class(storage_index, self,
                                                  self._storage_broker,
                                                  self._secret_holder,
                                                  incoming_file, encoding_file,
                                                  r, lp)
                self._active_uploads[storage_index] = uh
                self._add_upload(uh)
            return uh.start()

        d.addCallback(_checked)

        def _err(f):
            self.log("error while checking for chk-already-in-grid",
                     failure=f,
                     level=log.WEIRD,
                     parent=lp,
                     umid="jDtxZg")
            return f

        d.addErrback(_err)
        return d
示例#13
0
    def __init__(self, filenode, storage_broker, servermap, verinfo,
                 fetch_privkey=False, verify=False):
        self._node = filenode
        _assert(self._node.get_pubkey())
        self._storage_broker = storage_broker
        self._storage_index = filenode.get_storage_index()
        _assert(self._node.get_readkey())
        self._last_failure = None
        prefix = si_b2a(self._storage_index)[:5]
        self._log_number = log.msg("Retrieve(%s): starting" % prefix)
        self._running = True
        self._decoding = False
        self._bad_shares = set()

        self.servermap = servermap
        self.verinfo = verinfo
        # TODO: make it possible to use self.verinfo.datalength instead
        (seqnum, root_hash, IV, segsize, datalength, k, N, prefix,
         offsets_tuple) = self.verinfo
        self._data_length = datalength
        # during repair, we may be called upon to grab the private key, since
        # it wasn't picked up during a verify=False checker run, and we'll
        # need it for repair to generate a new version.
        self._need_privkey = verify or (fetch_privkey
                                        and not self._node.get_privkey())

        if self._need_privkey:
            # TODO: Evaluate the need for this. We'll use it if we want
            # to limit how many queries are on the wire for the privkey
            # at once.
            self._privkey_query_markers = [] # one Marker for each time we've
                                             # tried to get the privkey.

        # verify means that we are using the downloader logic to verify all
        # of our shares. This tells the downloader a few things.
        #
        # 1. We need to download all of the shares.
        # 2. We don't need to decode or decrypt the shares, since our
        #    caller doesn't care about the plaintext, only the
        #    information about which shares are or are not valid.
        # 3. When we are validating readers, we need to validate the
        #    signature on the prefix. Do we? We already do this in the
        #    servermap update?
        self._verify = verify

        self._status = RetrieveStatus()
        self._status.set_storage_index(self._storage_index)
        self._status.set_helper(False)
        self._status.set_progress(0.0)
        self._status.set_active(True)
        self._status.set_size(datalength)
        self._status.set_encoding(k, N)
        self.readers = {}
        self._stopped = False
        self._pause_deferred = None
        self._offset = None
        self._read_length = None
        self.log("got seqnum %d" % self.verinfo[0])
示例#14
0
 def write(self, i, ss, serverid, tail=0):
     si = self.si(i)
     si = si[:-1] + chr(tail)
     had, made = ss.remote_allocate_buckets(si, self.rs(i, serverid),
                                            self.cs(i, serverid), set([0]),
                                            99, FakeCanary())
     made[0].remote_write(0, "data")
     made[0].remote_close()
     return si_b2a(si)
示例#15
0
    def to_string(self):
        assert isinstance(self.needed_shares, int)
        assert isinstance(self.total_shares, int)
        assert isinstance(self.size, (int, long))

        return (
            b'URI:CHK-Verifier:%s:%s:%d:%d:%d' %
            (si_b2a(self.storage_index), base32.b2a(self.uri_extension_hash),
             self.needed_shares, self.total_shares, self.size))
示例#16
0
 def __init__(self, filenode, storage_broker, secret_holder, monitor):
     logprefix = si_b2a(filenode.get_storage_index())[:5]
     log.PrefixingLogMixin.__init__(self, "allmydata.immutable.repairer",
                                    prefix=logprefix)
     self._filenode = filenode
     self._storage_broker = storage_broker
     self._secret_holder = secret_holder
     self._monitor = monitor
     self._offset = 0
示例#17
0
 def __init__(self, filenode, storage_broker, secret_holder, monitor):
     logprefix = si_b2a(filenode.get_storage_index())[:5]
     log.PrefixingLogMixin.__init__(self, "allmydata.immutable.repairer",
                                    prefix=logprefix)
     self._filenode = filenode
     self._storage_broker = storage_broker
     self._secret_holder = secret_holder
     self._monitor = monitor
     self._offset = 0
示例#18
0
 def write(self, i, ss, serverid, tail=0):
     si = self.si(i)
     si = si[:-1] + chr(tail)
     had,made = ss.remote_allocate_buckets(si,
                                           self.rs(i, serverid),
                                           self.cs(i, serverid),
                                           set([0]), 99, FakeCanary())
     made[0].remote_write(0, "data")
     made[0].remote_close()
     return si_b2a(si)
示例#19
0
    def test_previous_upload_failed(self):
        self.basedir = "helper/AssistedUpload/test_previous_upload_failed"
        self.setUpHelper(self.basedir)

        # we want to make sure that an upload which fails (leaving the
        # ciphertext in the CHK_encoding/ directory) does not prevent a later
        # attempt to upload that file from working. We simulate this by
        # populating the directory manually. The hardest part is guessing the
        # storage index.

        k = FakeClient.DEFAULT_ENCODING_PARAMETERS["k"]
        n = FakeClient.DEFAULT_ENCODING_PARAMETERS["n"]
        max_segsize = FakeClient.DEFAULT_ENCODING_PARAMETERS[
            "max_segment_size"]
        segsize = min(max_segsize, len(DATA))
        # this must be a multiple of 'required_shares'==k
        segsize = mathutil.next_multiple(segsize, k)

        key = hashutil.convergence_hash(k, n, segsize, DATA,
                                        "test convergence string")
        assert len(key) == 16
        encryptor = aes.create_encryptor(key)
        SI = hashutil.storage_index_hash(key)
        SI_s = si_b2a(SI)
        encfile = os.path.join(self.basedir, "CHK_encoding", SI_s)
        f = open(encfile, "wb")
        f.write(aes.encrypt_data(encryptor, DATA))
        f.close()

        u = upload.Uploader(self.helper_furl)
        u.setServiceParent(self.s)

        d = wait_a_few_turns()

        def _ready(res):
            assert u._helper
            return upload_data(u, DATA, convergence="test convergence string")

        d.addCallback(_ready)

        def _uploaded(results):
            the_uri = results.get_uri()
            assert "CHK" in the_uri

        d.addCallback(_uploaded)

        def _check_empty(res):
            files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
            self.failUnlessEqual(files, [])
            files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
            self.failUnlessEqual(files, [])

        d.addCallback(_check_empty)

        return d
示例#20
0
    def to_string(self):
        assert isinstance(self.needed_shares, int)
        assert isinstance(self.total_shares, int)
        assert isinstance(self.size, (int,long))

        return ('URI:CHK-Verifier:%s:%s:%d:%d:%d' %
                (si_b2a(self.storage_index),
                 base32.b2a(self.uri_extension_hash),
                 self.needed_shares,
                 self.total_shares,
                 self.size))
示例#21
0
 def _get_uri_extension(self, res):
     # assume that we can pull the UEB from any share. If we get an error,
     # declare the whole file unavailable.
     if not self._readers:
         self.log("no readers, so no UEB", level=log.NOISY)
         return
     b, server = self._readers.pop()
     rbp = ReadBucketProxy(b, server, si_b2a(self._storage_index))
     d = rbp.get_uri_extension()
     d.addCallback(self._got_uri_extension)
     d.addErrback(self._ueb_error)
     return d
示例#22
0
    def test_previous_upload_failed(self):
        self.basedir = "helper/AssistedUpload/test_previous_upload_failed"
        self.setUpHelper(self.basedir)

        # we want to make sure that an upload which fails (leaving the
        # ciphertext in the CHK_encoding/ directory) does not prevent a later
        # attempt to upload that file from working. We simulate this by
        # populating the directory manually. The hardest part is guessing the
        # storage index.

        k = FakeClient.DEFAULT_ENCODING_PARAMETERS["k"]
        n = FakeClient.DEFAULT_ENCODING_PARAMETERS["n"]
        max_segsize = FakeClient.DEFAULT_ENCODING_PARAMETERS["max_segment_size"]
        segsize = min(max_segsize, len(DATA))
        # this must be a multiple of 'required_shares'==k
        segsize = mathutil.next_multiple(segsize, k)

        key = hashutil.convergence_hash(k, n, segsize, DATA, "test convergence string")
        assert len(key) == 16
        encryptor = AES(key)
        SI = hashutil.storage_index_hash(key)
        SI_s = si_b2a(SI)
        encfile = os.path.join(self.basedir, "CHK_encoding", SI_s)
        f = open(encfile, "wb")
        f.write(encryptor.process(DATA))
        f.close()

        u = upload.Uploader(self.helper_furl)
        u.setServiceParent(self.s)

        d = wait_a_few_turns()

        def _ready(res):
            assert u._helper
            return upload_data(u, DATA, convergence="test convergence string")

        d.addCallback(_ready)

        def _uploaded(results):
            the_uri = results.get_uri()
            assert "CHK" in the_uri

        d.addCallback(_uploaded)

        def _check_empty(res):
            files = os.listdir(os.path.join(self.basedir, "CHK_encoding"))
            self.failUnlessEqual(files, [])
            files = os.listdir(os.path.join(self.basedir, "CHK_incoming"))
            self.failUnlessEqual(files, [])

        d.addCallback(_check_empty)

        return d
示例#23
0
 def _get_uri_extension(self, res):
     # assume that we can pull the UEB from any share. If we get an error,
     # declare the whole file unavailable.
     if not self._readers:
         self.log("no readers, so no UEB", level=log.NOISY)
         return
     b,server = self._readers.pop()
     rbp = ReadBucketProxy(b, server, si_b2a(self._storage_index))
     d = rbp.get_uri_extension()
     d.addCallback(self._got_uri_extension)
     d.addErrback(self._ueb_error)
     return d
示例#24
0
    def __init__(self, filenode, storage_broker, monitor, servermap,
                 mode=MODE_READ, add_lease=False):
        """I update a servermap, locating a sufficient number of useful
        shares and remembering where they are located.

        """

        self._node = filenode
        self._storage_broker = storage_broker
        self._monitor = monitor
        self._servermap = servermap
        self.mode = mode
        self._add_lease = add_lease
        self._running = True

        self._storage_index = filenode.get_storage_index()
        self._last_failure = None

        self._status = UpdateStatus()
        self._status.set_storage_index(self._storage_index)
        self._status.set_progress(0.0)
        self._status.set_mode(mode)

        self._servers_responded = set()

        # how much data should we read?
        #  * if we only need the checkstring, then [0:75]
        #  * if we need to validate the checkstring sig, then [543ish:799ish]
        #  * if we need the verification key, then [107:436ish]
        #   * the offset table at [75:107] tells us about the 'ish'
        #  * if we need the encrypted private key, we want [-1216ish:]
        #   * but we can't read from negative offsets
        #   * the offset table tells us the 'ish', also the positive offset
        # A future version of the SMDF slot format should consider using
        # fixed-size slots so we can retrieve less data. For now, we'll just
        # read 4000 bytes, which also happens to read enough actual data to
        # pre-fetch an 18-entry dirnode.
        self._read_size = 4000
        if mode == MODE_CHECK:
            # we use unpack_prefix_and_signature, so we need 1k
            self._read_size = 1000
        self._need_privkey = False
        if mode == MODE_WRITE and not self._node.get_privkey():
            self._need_privkey = True
        # check+repair: repair requires the privkey, so if we didn't happen
        # to ask for it during the check, we'll have problems doing the
        # publish.

        prefix = si_b2a(self._storage_index)[:5]
        self._log_number = log.msg(format="SharemapUpdater(%(si)s): starting (%(mode)s)",
                                   si=prefix, mode=mode)
示例#25
0
    def remote_upload_chk(self, storage_index):
        self.count("chk_upload_helper.upload_requests")
        r = upload.UploadResults()
        started = time.time()
        si_s = si_b2a(storage_index)
        lp = self.log(format="helper: upload_chk query for SI %(si)s", si=si_s)
        incoming_file = os.path.join(self._chk_incoming, si_s)
        encoding_file = os.path.join(self._chk_encoding, si_s)
        if storage_index in self._active_uploads:
            self.log("upload is currently active", parent=lp)
            uh = self._active_uploads[storage_index]
            return uh.start()

        d = self._check_for_chk_already_in_grid(storage_index, r, lp)
        def _checked(already_present):
            elapsed = time.time() - started
            r.timings['existence_check'] = elapsed
            if already_present:
                # the necessary results are placed in the UploadResults
                self.count("chk_upload_helper.upload_already_present")
                self.log("file already found in grid", parent=lp)
                return (r, None)

            self.count("chk_upload_helper.upload_need_upload")
            # the file is not present in the grid, by which we mean there are
            # less than 'N' shares available.
            self.log("unable to find file in the grid", parent=lp,
                     level=log.NOISY)
            # We need an upload helper. Check our active uploads again in
            # case there was a race.
            if storage_index in self._active_uploads:
                self.log("upload is currently active", parent=lp)
                uh = self._active_uploads[storage_index]
            else:
                self.log("creating new upload helper", parent=lp)
                uh = self.chk_upload_helper_class(storage_index, self,
                                                  self._storage_broker,
                                                  self._secret_holder,
                                                  incoming_file, encoding_file,
                                                  r, lp)
                self._active_uploads[storage_index] = uh
                self._add_upload(uh)
            return uh.start()
        d.addCallback(_checked)
        def _err(f):
            self.log("error while checking for chk-already-in-grid",
                     failure=f, level=log.WEIRD, parent=lp, umid="jDtxZg")
            return f
        d.addErrback(_err)
        return d
示例#26
0
    def __init__(self, filenode, storage_broker, servermap):
        self._node = filenode
        self._storage_broker = storage_broker
        self._servermap = servermap
        self._storage_index = self._node.get_storage_index()
        self._log_prefix = prefix = si_b2a(self._storage_index)[:5]
        num = self.log("Publish(%s): starting" % prefix, parent=None)
        self._log_number = num
        self._running = True
        self._first_write_error = None

        self._status = PublishStatus()
        self._status.set_storage_index(self._storage_index)
        self._status.set_helper(False)
        self._status.set_progress(0.0)
        self._status.set_active(True)
示例#27
0
    def __init__(self, filenode, storage_broker, servermap):
        self._node = filenode
        self._storage_broker = storage_broker
        self._servermap = servermap
        self._storage_index = self._node.get_storage_index()
        self._log_prefix = prefix = si_b2a(self._storage_index)[:5]
        num = self.log("Publish(%s): starting" % prefix, parent=None)
        self._log_number = num
        self._running = True
        self._first_write_error = None

        self._status = PublishStatus()
        self._status.set_storage_index(self._storage_index)
        self._status.set_helper(False)
        self._status.set_progress(0.0)
        self._status.set_active(True)
示例#28
0
    def remote_upload_chk(self, storage_index):
        self.count("chk_upload_helper.upload_requests")
        lp = self.log(format="helper: upload_chk query for SI %(si)s",
                      si=si_b2a(storage_index))
        if storage_index in self._active_uploads:
            self.log("upload is currently active", parent=lp)
            uh = self._active_uploads[storage_index]
            return (None, uh)

        d = self._check_chk(storage_index, lp)
        d.addCallback(self._did_chk_check, storage_index, lp)
        def _err(f):
            self.log("error while checking for chk-already-in-grid",
                     failure=f, level=log.WEIRD, parent=lp, umid="jDtxZg")
            return f
        d.addErrback(_err)
        return d
示例#29
0
 def __repr__(self):
     if hasattr(self, "_storage_index"):
         return "<Encoder for %s>" % si_b2a(self._storage_index)[:5]
     return "<Encoder for unknown storage index>"
示例#30
0
文件: debug.py 项目: drewp/tahoe-lafs
def dump_uri_instance(u, nodeid, secret, out, show_header=True):
    from allmydata import uri
    from allmydata.storage.server import si_b2a
    from allmydata.util import base32, hashutil
    from allmydata.util.encodingutil import quote_output

    if isinstance(u, uri.CHKFileURI):
        if show_header:
            print >>out, "CHK File:"
        print >>out, " key:", base32.b2a(u.key)
        print >>out, " UEB hash:", base32.b2a(u.uri_extension_hash)
        print >>out, " size:", u.size
        print >>out, " k/N: %d/%d" % (u.needed_shares, u.total_shares)
        print >>out, " storage index:", si_b2a(u.get_storage_index())
        _dump_secrets(u.get_storage_index(), secret, nodeid, out)
    elif isinstance(u, uri.CHKFileVerifierURI):
        if show_header:
            print >>out, "CHK Verifier URI:"
        print >>out, " UEB hash:", base32.b2a(u.uri_extension_hash)
        print >>out, " size:", u.size
        print >>out, " k/N: %d/%d" % (u.needed_shares, u.total_shares)
        print >>out, " storage index:", si_b2a(u.get_storage_index())

    elif isinstance(u, uri.LiteralFileURI):
        if show_header:
            print >>out, "Literal File URI:"
        print >>out, " data:", quote_output(u.data)

    elif isinstance(u, uri.WriteableSSKFileURI):
        if show_header:
            print >>out, "SSK Writeable URI:"
        print >>out, " writekey:", base32.b2a(u.writekey)
        print >>out, " readkey:", base32.b2a(u.readkey)
        print >>out, " storage index:", si_b2a(u.get_storage_index())
        print >>out, " fingerprint:", base32.b2a(u.fingerprint)
        print >>out
        if nodeid:
            we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
            print >>out, " write_enabler:", base32.b2a(we)
            print >>out
        _dump_secrets(u.get_storage_index(), secret, nodeid, out)

    elif isinstance(u, uri.ReadonlySSKFileURI):
        if show_header:
            print >>out, "SSK Read-only URI:"
        print >>out, " readkey:", base32.b2a(u.readkey)
        print >>out, " storage index:", si_b2a(u.get_storage_index())
        print >>out, " fingerprint:", base32.b2a(u.fingerprint)
    elif isinstance(u, uri.SSKVerifierURI):
        if show_header:
            print >>out, "SSK Verifier URI:"
        print >>out, " storage index:", si_b2a(u.get_storage_index())
        print >>out, " fingerprint:", base32.b2a(u.fingerprint)

    elif isinstance(u, uri.DirectoryURI):
        if show_header:
            print >>out, "Directory Writeable URI:"
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.ReadonlyDirectoryURI):
        if show_header:
            print >>out, "Directory Read-only URI:"
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.DirectoryURIVerifier):
        if show_header:
            print >>out, "Directory Verifier URI:"
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    else:
        print >>out, "unknown cap type"
示例#31
0
def dump_uri_instance(u, nodeid, secret, out, show_header=True):
    from allmydata import uri
    from allmydata.storage.server import si_b2a
    from allmydata.util import base32, hashutil
    from allmydata.util.encodingutil import quote_output

    if isinstance(u, uri.CHKFileURI):
        if show_header:
            print("CHK File:", file=out)
        print(" key:", base32.b2a(u.key), file=out)
        print(" UEB hash:", base32.b2a(u.uri_extension_hash), file=out)
        print(" size:", u.size, file=out)
        print(" k/N: %d/%d" % (u.needed_shares, u.total_shares), file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        _dump_secrets(u.get_storage_index(), secret, nodeid, out)
    elif isinstance(u, uri.CHKFileVerifierURI):
        if show_header:
            print("CHK Verifier URI:", file=out)
        print(" UEB hash:", base32.b2a(u.uri_extension_hash), file=out)
        print(" size:", u.size, file=out)
        print(" k/N: %d/%d" % (u.needed_shares, u.total_shares), file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)

    elif isinstance(u, uri.LiteralFileURI):
        if show_header:
            print("Literal File URI:", file=out)
        print(" data:", quote_output(u.data), file=out)

    elif isinstance(u, uri.WriteableSSKFileURI): # SDMF
        if show_header:
            print("SDMF Writeable URI:", file=out)
        print(" writekey:", base32.b2a(u.writekey), file=out)
        print(" readkey:", base32.b2a(u.readkey), file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
        print(file=out)
        if nodeid:
            we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
            print(" write_enabler:", base32.b2a(we), file=out)
            print(file=out)
        _dump_secrets(u.get_storage_index(), secret, nodeid, out)
    elif isinstance(u, uri.ReadonlySSKFileURI):
        if show_header:
            print("SDMF Read-only URI:", file=out)
        print(" readkey:", base32.b2a(u.readkey), file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
    elif isinstance(u, uri.SSKVerifierURI):
        if show_header:
            print("SDMF Verifier URI:", file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)

    elif isinstance(u, uri.WriteableMDMFFileURI): # MDMF
        if show_header:
            print("MDMF Writeable URI:", file=out)
        print(" writekey:", base32.b2a(u.writekey), file=out)
        print(" readkey:", base32.b2a(u.readkey), file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
        print(file=out)
        if nodeid:
            we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
            print(" write_enabler:", base32.b2a(we), file=out)
            print(file=out)
        _dump_secrets(u.get_storage_index(), secret, nodeid, out)
    elif isinstance(u, uri.ReadonlyMDMFFileURI):
        if show_header:
            print("MDMF Read-only URI:", file=out)
        print(" readkey:", base32.b2a(u.readkey), file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
    elif isinstance(u, uri.MDMFVerifierURI):
        if show_header:
            print("MDMF Verifier URI:", file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)


    elif isinstance(u, uri.ImmutableDirectoryURI): # CHK-based directory
        if show_header:
            print("CHK Directory URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.ImmutableDirectoryURIVerifier):
        if show_header:
            print("CHK Directory Verifier URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)

    elif isinstance(u, uri.DirectoryURI): # SDMF-based directory
        if show_header:
            print("Directory Writeable URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.ReadonlyDirectoryURI):
        if show_header:
            print("Directory Read-only URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.DirectoryURIVerifier):
        if show_header:
            print("Directory Verifier URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)

    elif isinstance(u, uri.MDMFDirectoryURI): # MDMF-based directory
        if show_header:
            print("Directory Writeable URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.ReadonlyMDMFDirectoryURI):
        if show_header:
            print("Directory Read-only URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.MDMFDirectoryURIVerifier):
        if show_header:
            print("Directory Verifier URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)

    else:
        print("unknown cap type", file=out)
示例#32
0
 def __repr__(self):
     return "<ReadBucketProxy %s to peer [%s] SI %s>" % \
            (id(self), self._server.get_name(), si_b2a(self._storage_index))
示例#33
0
 def to_string(self):
     assert isinstance(self.storage_index, bytes)
     assert isinstance(self.fingerprint, bytes)
     ret = b'URI:MDMF-Verifier:%s:%s' % (si_b2a(
         self.storage_index), base32.b2a(self.fingerprint))
     return ret
示例#34
0
 def __repr__(self):
     return "<ReadBucketProxy %s to peer [%s] SI %s>" % \
            (id(self), self._server.get_name(), si_b2a(self._storage_index))
示例#35
0
 def to_string(self):
     assert isinstance(self.storage_index, str)
     assert isinstance(self.fingerprint, str)
     ret = 'URI:MDMF-Verifier:%s:%s' % (si_b2a(self.storage_index),
                                        base32.b2a(self.fingerprint))
     return ret
示例#36
0
 def __repr__(self):
     if hasattr(self, "_storage_index"):
         return "<Encoder for %s>" % si_b2a(self._storage_index)[:5]
     return "<Encoder for unknown storage index>"
示例#37
0
文件: uri.py 项目: mk-fg/tahoe-lafs
 def to_string(self):
     assert isinstance(self.storage_index, str)
     assert isinstance(self.fingerprint, str)
     return "URI:SSK-Verifier:%s:%s" % (si_b2a(self.storage_index), base32.b2a(self.fingerprint))
示例#38
0
文件: debug.py 项目: cpelsser/tamias
def dump_uri_instance(u, nodeid, secret, out, show_header=True):
    from allmydata import uri
    from allmydata.storage.server import si_b2a
    from allmydata.util import base32, hashutil
    from allmydata.util.encodingutil import quote_output

    if isinstance(u, uri.CHKFileURI):
        if show_header:
            print >> out, "CHK File:"
        print >> out, " key:", base32.b2a(u.key)
        print >> out, " UEB hash:", base32.b2a(u.uri_extension_hash)
        print >> out, " size:", u.size
        print >> out, " k/N: %d/%d" % (u.needed_shares, u.total_shares)
        print >> out, " storage index:", si_b2a(u.get_storage_index())
        _dump_secrets(u.get_storage_index(), secret, nodeid, out)
    elif isinstance(u, uri.CHKFileVerifierURI):
        if show_header:
            print >> out, "CHK Verifier URI:"
        print >> out, " UEB hash:", base32.b2a(u.uri_extension_hash)
        print >> out, " size:", u.size
        print >> out, " k/N: %d/%d" % (u.needed_shares, u.total_shares)
        print >> out, " storage index:", si_b2a(u.get_storage_index())

    elif isinstance(u, uri.LiteralFileURI):
        if show_header:
            print >> out, "Literal File URI:"
        print >> out, " data:", quote_output(u.data)

    elif isinstance(u, uri.WriteableSSKFileURI):
        if show_header:
            print >> out, "SSK Writeable URI:"
        print >> out, " writekey:", base32.b2a(u.writekey)
        print >> out, " readkey:", base32.b2a(u.readkey)
        print >> out, " storage index:", si_b2a(u.get_storage_index())
        print >> out, " fingerprint:", base32.b2a(u.fingerprint)
        print >> out
        if nodeid:
            we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
            print >> out, " write_enabler:", base32.b2a(we)
            print >> out
        _dump_secrets(u.get_storage_index(), secret, nodeid, out)

    elif isinstance(u, uri.ReadonlySSKFileURI):
        if show_header:
            print >> out, "SSK Read-only URI:"
        print >> out, " readkey:", base32.b2a(u.readkey)
        print >> out, " storage index:", si_b2a(u.get_storage_index())
        print >> out, " fingerprint:", base32.b2a(u.fingerprint)
    elif isinstance(u, uri.SSKVerifierURI):
        if show_header:
            print >> out, "SSK Verifier URI:"
        print >> out, " storage index:", si_b2a(u.get_storage_index())
        print >> out, " fingerprint:", base32.b2a(u.fingerprint)

    elif isinstance(u, uri.DirectoryURI):
        if show_header:
            print >> out, "Directory Writeable URI:"
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.ReadonlyDirectoryURI):
        if show_header:
            print >> out, "Directory Read-only URI:"
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.DirectoryURIVerifier):
        if show_header:
            print >> out, "Directory Verifier URI:"
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    else:
        print >> out, "unknown cap type"
示例#39
0
def dump_uri_instance(u, nodeid, secret, out, show_header=True):
    from allmydata import uri
    from allmydata.storage.server import si_b2a
    from allmydata.util import base32, hashutil
    from allmydata.util.encodingutil import quote_output

    if isinstance(u, uri.CHKFileURI):
        if show_header:
            print("CHK File:", file=out)
        print(" key:", base32.b2a(u.key), file=out)
        print(" UEB hash:", base32.b2a(u.uri_extension_hash), file=out)
        print(" size:", u.size, file=out)
        print(" k/N: %d/%d" % (u.needed_shares, u.total_shares), file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        _dump_secrets(u.get_storage_index(), secret, nodeid, out)
    elif isinstance(u, uri.CHKFileVerifierURI):
        if show_header:
            print("CHK Verifier URI:", file=out)
        print(" UEB hash:", base32.b2a(u.uri_extension_hash), file=out)
        print(" size:", u.size, file=out)
        print(" k/N: %d/%d" % (u.needed_shares, u.total_shares), file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)

    elif isinstance(u, uri.LiteralFileURI):
        if show_header:
            print("Literal File URI:", file=out)
        print(" data:", quote_output(u.data), file=out)

    elif isinstance(u, uri.WriteableSSKFileURI):  # SDMF
        if show_header:
            print("SDMF Writeable URI:", file=out)
        print(" writekey:", base32.b2a(u.writekey), file=out)
        print(" readkey:", base32.b2a(u.readkey), file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
        print(file=out)
        if nodeid:
            we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
            print(" write_enabler:", base32.b2a(we), file=out)
            print(file=out)
        _dump_secrets(u.get_storage_index(), secret, nodeid, out)
    elif isinstance(u, uri.ReadonlySSKFileURI):
        if show_header:
            print("SDMF Read-only URI:", file=out)
        print(" readkey:", base32.b2a(u.readkey), file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
    elif isinstance(u, uri.SSKVerifierURI):
        if show_header:
            print("SDMF Verifier URI:", file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)

    elif isinstance(u, uri.WriteableMDMFFileURI):  # MDMF
        if show_header:
            print("MDMF Writeable URI:", file=out)
        print(" writekey:", base32.b2a(u.writekey), file=out)
        print(" readkey:", base32.b2a(u.readkey), file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
        print(file=out)
        if nodeid:
            we = hashutil.ssk_write_enabler_hash(u.writekey, nodeid)
            print(" write_enabler:", base32.b2a(we), file=out)
            print(file=out)
        _dump_secrets(u.get_storage_index(), secret, nodeid, out)
    elif isinstance(u, uri.ReadonlyMDMFFileURI):
        if show_header:
            print("MDMF Read-only URI:", file=out)
        print(" readkey:", base32.b2a(u.readkey), file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)
    elif isinstance(u, uri.MDMFVerifierURI):
        if show_header:
            print("MDMF Verifier URI:", file=out)
        print(" storage index:", si_b2a(u.get_storage_index()), file=out)
        print(" fingerprint:", base32.b2a(u.fingerprint), file=out)

    elif isinstance(u, uri.ImmutableDirectoryURI):  # CHK-based directory
        if show_header:
            print("CHK Directory URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.ImmutableDirectoryURIVerifier):
        if show_header:
            print("CHK Directory Verifier URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)

    elif isinstance(u, uri.DirectoryURI):  # SDMF-based directory
        if show_header:
            print("Directory Writeable URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.ReadonlyDirectoryURI):
        if show_header:
            print("Directory Read-only URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.DirectoryURIVerifier):
        if show_header:
            print("Directory Verifier URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)

    elif isinstance(u, uri.MDMFDirectoryURI):  # MDMF-based directory
        if show_header:
            print("Directory Writeable URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.ReadonlyMDMFDirectoryURI):
        if show_header:
            print("Directory Read-only URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)
    elif isinstance(u, uri.MDMFDirectoryURIVerifier):
        if show_header:
            print("Directory Verifier URI:", file=out)
        dump_uri_instance(u._filenode_uri, nodeid, secret, out, False)

    else:
        print("unknown cap type", file=out)
示例#40
0
 def to_string(self):
     assert isinstance(self.storage_index, str)
     assert isinstance(self.fingerprint, str)
     return 'URI:SSK-Verifier:%s:%s' % (si_b2a(
         self.storage_index), base32.b2a(self.fingerprint))