def test_pack_badly(self): key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" storage_index = hashutil.storage_index_hash(key) uri_extension_hash = hashutil.uri_extension_hash("stuff") needed_shares = 25 total_shares = 100 size = 1234 self.failUnlessRaises( TypeError, uri.CHKFileURI, key=key, uri_extension_hash=uri_extension_hash, needed_shares=needed_shares, total_shares=total_shares, size=size, bogus_extra_argument="reject me", ) self.failUnlessRaises(TypeError, uri.CHKFileVerifierURI, bogus="bogus") self.failUnlessRaises( TypeError, uri.CHKFileVerifierURI, storage_index=storage_index, uri_extension_hash=uri_extension_hash, needed_shares=3, total_shares=10, # leave size= missing )
def test_pack_badly(self): key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" storage_index = hashutil.storage_index_hash(key) uri_extension_hash = hashutil.uri_extension_hash("stuff") needed_shares = 25 total_shares = 100 size = 1234 self.failUnlessRaises(TypeError, uri.CHKFileURI, key=key, uri_extension_hash=uri_extension_hash, needed_shares=needed_shares, total_shares=total_shares, size=size, bogus_extra_argument="reject me", ) self.failUnlessRaises(TypeError, uri.CHKFileVerifierURI, bogus="bogus") self.failUnlessRaises(TypeError, uri.CHKFileVerifierURI, storage_index=storage_index, uri_extension_hash=uri_extension_hash, needed_shares=3, total_shares=10, # leave size= missing )
def __init__(self, key, uri_extension_hash, needed_shares, total_shares, size): self.key = key self.uri_extension_hash = uri_extension_hash self.needed_shares = needed_shares self.total_shares = total_shares self.size = size self.storage_index = hashutil.storage_index_hash(self.key) if not len(self.storage_index) == 16: # sha256 hash truncated to 128 raise BadURIError("storage index must be 16 bytes long")
def test_previous_upload_failed(self): self.basedir = "helper/AssistedUpload/test_previous_upload_failed" self.setUpHelper(self.basedir) # we want to make sure that an upload which fails (leaving the # ciphertext in the CHK_encoding/ directory) does not prevent a later # attempt to upload that file from working. We simulate this by # populating the directory manually. The hardest part is guessing the # storage index. k = FakeClient.DEFAULT_ENCODING_PARAMETERS["k"] n = FakeClient.DEFAULT_ENCODING_PARAMETERS["n"] max_segsize = FakeClient.DEFAULT_ENCODING_PARAMETERS[ "max_segment_size"] segsize = min(max_segsize, len(DATA)) # this must be a multiple of 'required_shares'==k segsize = mathutil.next_multiple(segsize, k) key = hashutil.convergence_hash(k, n, segsize, DATA, "test convergence string") assert len(key) == 16 encryptor = aes.create_encryptor(key) SI = hashutil.storage_index_hash(key) SI_s = si_b2a(SI) encfile = os.path.join(self.basedir, "CHK_encoding", SI_s) f = open(encfile, "wb") f.write(aes.encrypt_data(encryptor, DATA)) f.close() u = upload.Uploader(self.helper_furl) u.setServiceParent(self.s) d = wait_a_few_turns() def _ready(res): assert u._helper return upload_data(u, DATA, convergence="test convergence string") d.addCallback(_ready) def _uploaded(results): the_uri = results.get_uri() assert "CHK" in the_uri d.addCallback(_uploaded) def _check_empty(res): files = os.listdir(os.path.join(self.basedir, "CHK_encoding")) self.failUnlessEqual(files, []) files = os.listdir(os.path.join(self.basedir, "CHK_incoming")) self.failUnlessEqual(files, []) d.addCallback(_check_empty) return d
def test_previous_upload_failed(self): self.basedir = "helper/AssistedUpload/test_previous_upload_failed" self.setUpHelper(self.basedir) # we want to make sure that an upload which fails (leaving the # ciphertext in the CHK_encoding/ directory) does not prevent a later # attempt to upload that file from working. We simulate this by # populating the directory manually. The hardest part is guessing the # storage index. k = FakeClient.DEFAULT_ENCODING_PARAMETERS["k"] n = FakeClient.DEFAULT_ENCODING_PARAMETERS["n"] max_segsize = FakeClient.DEFAULT_ENCODING_PARAMETERS["max_segment_size"] segsize = min(max_segsize, len(DATA)) # this must be a multiple of 'required_shares'==k segsize = mathutil.next_multiple(segsize, k) key = hashutil.convergence_hash(k, n, segsize, DATA, "test convergence string") assert len(key) == 16 encryptor = AES(key) SI = hashutil.storage_index_hash(key) SI_s = si_b2a(SI) encfile = os.path.join(self.basedir, "CHK_encoding", SI_s) f = open(encfile, "wb") f.write(encryptor.process(DATA)) f.close() u = upload.Uploader(self.helper_furl) u.setServiceParent(self.s) d = wait_a_few_turns() def _ready(res): assert u._helper return upload_data(u, DATA, convergence="test convergence string") d.addCallback(_ready) def _uploaded(results): the_uri = results.get_uri() assert "CHK" in the_uri d.addCallback(_uploaded) def _check_empty(res): files = os.listdir(os.path.join(self.basedir, "CHK_encoding")) self.failUnlessEqual(files, []) files = os.listdir(os.path.join(self.basedir, "CHK_incoming")) self.failUnlessEqual(files, []) d.addCallback(_check_empty) return d
def test_pack(self): key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" storage_index = hashutil.storage_index_hash(key) uri_extension_hash = hashutil.uri_extension_hash("stuff") needed_shares = 25 total_shares = 100 size = 1234 u = uri.CHKFileURI(key=key, uri_extension_hash=uri_extension_hash, needed_shares=needed_shares, total_shares=total_shares, size=size) self.failUnlessReallyEqual(u.get_storage_index(), storage_index) self.failUnlessReallyEqual(u.key, key) self.failUnlessReallyEqual(u.uri_extension_hash, uri_extension_hash) self.failUnlessReallyEqual(u.needed_shares, needed_shares) self.failUnlessReallyEqual(u.total_shares, total_shares) self.failUnlessReallyEqual(u.size, size) self.failUnless(u.is_readonly()) self.failIf(u.is_mutable()) self.failUnless(IURI.providedBy(u)) self.failUnless(IFileURI.providedBy(u)) self.failIf(IDirnodeURI.providedBy(u)) self.failUnlessReallyEqual(u.get_size(), 1234) u_ro = u.get_readonly() self.failUnlessIdentical(u, u_ro) he = u.to_human_encoding() self.failUnlessReallyEqual(he, "http://127.0.0.1:3456/uri/" + u.to_string()) self.failUnlessReallyEqual(uri.CHKFileURI.init_from_human_encoding(he), u) u2 = uri.from_string(u.to_string()) self.failUnlessReallyEqual(u2.get_storage_index(), storage_index) self.failUnlessReallyEqual(u2.key, key) self.failUnlessReallyEqual(u2.uri_extension_hash, uri_extension_hash) self.failUnlessReallyEqual(u2.needed_shares, needed_shares) self.failUnlessReallyEqual(u2.total_shares, total_shares) self.failUnlessReallyEqual(u2.size, size) self.failUnless(u2.is_readonly()) self.failIf(u2.is_mutable()) self.failUnless(IURI.providedBy(u2)) self.failUnless(IFileURI.providedBy(u2)) self.failIf(IDirnodeURI.providedBy(u2)) self.failUnlessReallyEqual(u2.get_size(), 1234) u2i = uri.from_string(u.to_string(), deep_immutable=True) self.failUnlessReallyEqual(u.to_string(), u2i.to_string()) u2ro = uri.from_string(uri.ALLEGED_READONLY_PREFIX + u.to_string()) self.failUnlessReallyEqual(u.to_string(), u2ro.to_string()) u2imm = uri.from_string(uri.ALLEGED_IMMUTABLE_PREFIX + u.to_string()) self.failUnlessReallyEqual(u.to_string(), u2imm.to_string()) v = u.get_verify_cap() self.failUnless(isinstance(v.to_string(), str)) self.failUnless(v.is_readonly()) self.failIf(v.is_mutable()) v2 = uri.from_string(v.to_string()) self.failUnlessReallyEqual(v, v2) he = v.to_human_encoding() v2_h = uri.CHKFileVerifierURI.init_from_human_encoding(he) self.failUnlessReallyEqual(v2, v2_h) v3 = uri.CHKFileVerifierURI(storage_index="\x00"*16, uri_extension_hash="\x00"*32, needed_shares=3, total_shares=10, size=1234) self.failUnless(isinstance(v3.to_string(), str)) self.failUnless(v3.is_readonly()) self.failIf(v3.is_mutable())
def si(self, i): return hashutil.storage_index_hash(str(i))
def test_pack(self): key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" storage_index = hashutil.storage_index_hash(key) uri_extension_hash = hashutil.uri_extension_hash("stuff") needed_shares = 25 total_shares = 100 size = 1234 u = uri.CHKFileURI(key=key, uri_extension_hash=uri_extension_hash, needed_shares=needed_shares, total_shares=total_shares, size=size) self.failUnlessReallyEqual(u.get_storage_index(), storage_index) self.failUnlessReallyEqual(u.key, key) self.failUnlessReallyEqual(u.uri_extension_hash, uri_extension_hash) self.failUnlessReallyEqual(u.needed_shares, needed_shares) self.failUnlessReallyEqual(u.total_shares, total_shares) self.failUnlessReallyEqual(u.size, size) self.failUnless(u.is_readonly()) self.failIf(u.is_mutable()) self.failUnless(IURI.providedBy(u)) self.failUnless(IFileURI.providedBy(u)) self.failIf(IDirnodeURI.providedBy(u)) self.failUnlessReallyEqual(u.get_size(), 1234) u_ro = u.get_readonly() self.failUnlessIdentical(u, u_ro) u2 = uri.from_string(u.to_string()) self.failUnlessReallyEqual(u2.get_storage_index(), storage_index) self.failUnlessReallyEqual(u2.key, key) self.failUnlessReallyEqual(u2.uri_extension_hash, uri_extension_hash) self.failUnlessReallyEqual(u2.needed_shares, needed_shares) self.failUnlessReallyEqual(u2.total_shares, total_shares) self.failUnlessReallyEqual(u2.size, size) self.failUnless(u2.is_readonly()) self.failIf(u2.is_mutable()) self.failUnless(IURI.providedBy(u2)) self.failUnless(IFileURI.providedBy(u2)) self.failIf(IDirnodeURI.providedBy(u2)) self.failUnlessReallyEqual(u2.get_size(), 1234) u2i = uri.from_string(u.to_string(), deep_immutable=True) self.failUnlessReallyEqual(u.to_string(), u2i.to_string()) u2ro = uri.from_string(uri.ALLEGED_READONLY_PREFIX + u.to_string()) self.failUnlessReallyEqual(u.to_string(), u2ro.to_string()) u2imm = uri.from_string(uri.ALLEGED_IMMUTABLE_PREFIX + u.to_string()) self.failUnlessReallyEqual(u.to_string(), u2imm.to_string()) v = u.get_verify_cap() self.failUnless(isinstance(v.to_string(), str)) self.failUnless(v.is_readonly()) self.failIf(v.is_mutable()) v2 = uri.from_string(v.to_string()) self.failUnlessReallyEqual(v, v2) v3 = uri.CHKFileVerifierURI(storage_index="\x00" * 16, uri_extension_hash="\x00" * 32, needed_shares=3, total_shares=10, size=1234) self.failUnless(isinstance(v3.to_string(), str)) self.failUnless(v3.is_readonly()) self.failIf(v3.is_mutable())
def si(self, i): return hashutil.storage_index_hash(b"%d" % (i,))