def _got_parms(parms): # just pretend we did the upload needed_shares, happy, total_shares, segsize = parms ueb_data = { "needed_shares": needed_shares, "total_shares": total_shares, "segment_size": segsize, "size": size, } ueb_hash = "fake" v = uri.CHKFileVerifierURI(self._storage_index, "x" * 32, needed_shares, total_shares, size) _UR = upload.UploadResults ur = _UR(file_size=size, ciphertext_fetched=0, preexisting_shares=0, pushed_shares=total_shares, sharemap={}, servermap={}, timings={}, uri_extension_data=ueb_data, uri_extension_hash=ueb_hash, verifycapstr=v.to_string()) self._upload_status.set_results(ur) return ur
def _test(self, uebdict): uebstring = uri.pack_extension(uebdict) uebhash = hashutil.uri_extension_hash(uebstring) fb = FakeBucketReaderWriterProxy() fb.put_uri_extension(uebstring) verifycap = uri.CHKFileVerifierURI(storage_index='x'*16, uri_extension_hash=uebhash, needed_shares=self.K, total_shares=self.M, size=self.SIZE) vup = checker.ValidatedExtendedURIProxy(fb, verifycap) return vup.start()
def _got_parms(parms): needed_shares, happy, total_shares, segsize = parms ueb_data = {"needed_shares": needed_shares, "total_shares": total_shares, "segment_size": segsize, "size": size, } self._results.uri_extension_data = ueb_data self._results.verifycapstr = uri.CHKFileVerifierURI(self._storage_index, "x"*32, needed_shares, total_shares, size).to_string() return self._results
def done(self, res): self.log("upload done", level=log.OPERATIONAL) self.set_status("Finished") self.set_encode_and_push_progress(extra=1.0) # done now = time.time() h_and_c_elapsed = now - self._start_hashing_and_close_timestamp self._times["hashes_and_close"] = h_and_c_elapsed total_elapsed = now - self._start_total_timestamp self._times["total_encode_and_push"] = total_elapsed # update our sharemap self._shares_placed = set(self.landlords.keys()) return uri.CHKFileVerifierURI(self._storage_index, self.uri_extension_hash, self.required_shares, self.num_shares, self.file_size)
def dump_immutable_chk_share(f, out, options): from allmydata import uri from allmydata.util import base32 from allmydata.immutable.layout import ReadBucketProxy from allmydata.util.encodingutil import quote_output, to_bytes # use a ReadBucketProxy to parse the bucket and find the uri extension bp = ReadBucketProxy(None, None, '') offsets = bp._parse_offsets(f.read_share_data(0, 0x44)) print("%20s: %d" % ("version", bp._version), file=out) seek = offsets['uri_extension'] length = struct.unpack(bp._fieldstruct, f.read_share_data(seek, bp._fieldsize))[0] seek += bp._fieldsize UEB_data = f.read_share_data(seek, length) unpacked = uri.unpack_extension_readable(UEB_data) keys1 = ("size", "num_segments", "segment_size", "needed_shares", "total_shares") keys2 = ("codec_name", "codec_params", "tail_codec_params") keys3 = ("plaintext_hash", "plaintext_root_hash", "crypttext_hash", "crypttext_root_hash", "share_root_hash", "UEB_hash") display_keys = {"size": "file_size"} def to_string(v): if isinstance(v, bytes): return unicode(v, "utf-8") else: return str(v) for k in keys1: if k in unpacked: dk = display_keys.get(k, k) print("%20s: %s" % (dk, to_string(unpacked[k])), file=out) print(file=out) for k in keys2: if k in unpacked: dk = display_keys.get(k, k) print("%20s: %s" % (dk, to_string(unpacked[k])), file=out) print(file=out) for k in keys3: if k in unpacked: dk = display_keys.get(k, k) print("%20s: %s" % (dk, to_string(unpacked[k])), file=out) leftover = set(unpacked.keys()) - set(keys1 + keys2 + keys3) if leftover: print(file=out) print("LEFTOVER:", file=out) for k in sorted(leftover): print("%20s: %s" % (k, to_string(unpacked[k])), file=out) # the storage index isn't stored in the share itself, so we depend upon # knowing the parent directory name to get it pieces = options['filename'].split(os.sep) if len(pieces) >= 2: piece = to_bytes(pieces[-2]) if base32.could_be_base32_encoded(piece): storage_index = base32.a2b(piece) uri_extension_hash = base32.a2b(unpacked["UEB_hash"]) u = uri.CHKFileVerifierURI(storage_index, uri_extension_hash, unpacked["needed_shares"], unpacked["total_shares"], unpacked["size"]) verify_cap = u.to_string() print("%20s: %s" % ("verify-cap", quote_output(verify_cap, quotemarks=False)), file=out) sizes = {} sizes['data'] = (offsets['plaintext_hash_tree'] - offsets['data']) sizes['validation'] = (offsets['uri_extension'] - offsets['plaintext_hash_tree']) sizes['uri-extension'] = len(UEB_data) print(file=out) print(" Size of data within the share:", file=out) for k in sorted(sizes): print("%20s: %s" % (k, sizes[k]), file=out) if options['offsets']: print(file=out) print(" Section Offsets:", file=out) print("%20s: %s" % ("share data", f._data_offset), file=out) for k in [ "data", "plaintext_hash_tree", "crypttext_hash_tree", "block_hashes", "share_hashes", "uri_extension" ]: name = {"data": "block data"}.get(k, k) offset = f._data_offset + offsets[k] print(" %20s: %s (0x%x)" % (name, offset, offset), file=out) print("%20s: %s" % ("leases", f._lease_offset), file=out)
def test_pack(self): key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" storage_index = hashutil.storage_index_hash(key) uri_extension_hash = hashutil.uri_extension_hash("stuff") needed_shares = 25 total_shares = 100 size = 1234 u = uri.CHKFileURI(key=key, uri_extension_hash=uri_extension_hash, needed_shares=needed_shares, total_shares=total_shares, size=size) self.failUnlessReallyEqual(u.get_storage_index(), storage_index) self.failUnlessReallyEqual(u.key, key) self.failUnlessReallyEqual(u.uri_extension_hash, uri_extension_hash) self.failUnlessReallyEqual(u.needed_shares, needed_shares) self.failUnlessReallyEqual(u.total_shares, total_shares) self.failUnlessReallyEqual(u.size, size) self.failUnless(u.is_readonly()) self.failIf(u.is_mutable()) self.failUnless(IURI.providedBy(u)) self.failUnless(IFileURI.providedBy(u)) self.failIf(IDirnodeURI.providedBy(u)) self.failUnlessReallyEqual(u.get_size(), 1234) u_ro = u.get_readonly() self.failUnlessIdentical(u, u_ro) u2 = uri.from_string(u.to_string()) self.failUnlessReallyEqual(u2.get_storage_index(), storage_index) self.failUnlessReallyEqual(u2.key, key) self.failUnlessReallyEqual(u2.uri_extension_hash, uri_extension_hash) self.failUnlessReallyEqual(u2.needed_shares, needed_shares) self.failUnlessReallyEqual(u2.total_shares, total_shares) self.failUnlessReallyEqual(u2.size, size) self.failUnless(u2.is_readonly()) self.failIf(u2.is_mutable()) self.failUnless(IURI.providedBy(u2)) self.failUnless(IFileURI.providedBy(u2)) self.failIf(IDirnodeURI.providedBy(u2)) self.failUnlessReallyEqual(u2.get_size(), 1234) u2i = uri.from_string(u.to_string(), deep_immutable=True) self.failUnlessReallyEqual(u.to_string(), u2i.to_string()) u2ro = uri.from_string(uri.ALLEGED_READONLY_PREFIX + u.to_string()) self.failUnlessReallyEqual(u.to_string(), u2ro.to_string()) u2imm = uri.from_string(uri.ALLEGED_IMMUTABLE_PREFIX + u.to_string()) self.failUnlessReallyEqual(u.to_string(), u2imm.to_string()) v = u.get_verify_cap() self.failUnless(isinstance(v.to_string(), str)) self.failUnless(v.is_readonly()) self.failIf(v.is_mutable()) v2 = uri.from_string(v.to_string()) self.failUnlessReallyEqual(v, v2) v3 = uri.CHKFileVerifierURI(storage_index="\x00" * 16, uri_extension_hash="\x00" * 32, needed_shares=3, total_shares=10, size=1234) self.failUnless(isinstance(v3.to_string(), str)) self.failUnless(v3.is_readonly()) self.failIf(v3.is_mutable())