def test_mdmf_from_string(self): # Make sure that the from_string utility function works with # MDMF caps. u1 = uri.WriteableMDMFFileURI(self.writekey, self.fingerprint) cap = u1.to_string() self.failUnless(uri.is_uri(cap)) u2 = uri.from_string(cap) self.failUnlessReallyEqual(u1, u2) u3 = uri.from_string_mutable_filenode(cap) self.failUnlessEqual(u3, u1) u1 = uri.ReadonlyMDMFFileURI(self.readkey, self.fingerprint) cap = u1.to_string() self.failUnless(uri.is_uri(cap)) u2 = uri.from_string(cap) self.failUnlessReallyEqual(u1, u2) u3 = uri.from_string_mutable_filenode(cap) self.failUnlessEqual(u3, u1) u1 = uri.MDMFVerifierURI(self.storage_index, self.fingerprint) cap = u1.to_string() self.failUnless(uri.is_uri(cap)) u2 = uri.from_string(cap) self.failUnlessReallyEqual(u1, u2) u3 = uri.from_string_verifier(cap) self.failUnlessEqual(u3, u1)
def test_from_future(self): # any URI type that we don't recognize should be treated as unknown future_uri = "I am a URI from the future. Whatever you do, don't " u = uri.from_string(future_uri) self.failUnless(isinstance(u, uri.UnknownURI)) self.failUnlessReallyEqual(u.to_string(), future_uri) self.failUnless(u.get_readonly() is None) self.failUnless(u.get_error() is None) u2 = uri.UnknownURI(future_uri, error=CapConstraintError("...")) self.failUnlessReallyEqual(u.to_string(), future_uri) self.failUnless(u2.get_readonly() is None) self.failUnless(isinstance(u2.get_error(), CapConstraintError)) # Future caps might have non-ASCII chars in them. (Or maybe not, who can tell about the future?) future_uri = u"I am a cap from the \u263A future. Whatever you ".encode( 'utf-8') u = uri.from_string(future_uri) self.failUnless(isinstance(u, uri.UnknownURI)) self.failUnlessReallyEqual(u.to_string(), future_uri) self.failUnless(u.get_readonly() is None) self.failUnless(u.get_error() is None) u2 = uri.UnknownURI(future_uri, error=CapConstraintError("...")) self.failUnlessReallyEqual(u.to_string(), future_uri) self.failUnless(u2.get_readonly() is None) self.failUnless(isinstance(u2.get_error(), CapConstraintError))
def test_from_future(self): # any URI type that we don't recognize should be treated as unknown future_uri = "I am a URI from the future. Whatever you do, don't " u = uri.from_string(future_uri) self.failUnless(isinstance(u, uri.UnknownURI)) self.failUnlessReallyEqual(u.to_string(), future_uri) self.failUnless(u.get_readonly() is None) self.failUnless(u.get_error() is None) u2 = uri.UnknownURI(future_uri, error=CapConstraintError("...")) self.failUnlessReallyEqual(u.to_string(), future_uri) self.failUnless(u2.get_readonly() is None) self.failUnless(isinstance(u2.get_error(), CapConstraintError)) # Future caps might have non-ASCII chars in them. (Or maybe not, who can tell about the future?) future_uri = u"I am a cap from the \u263A future. Whatever you ".encode('utf-8') u = uri.from_string(future_uri) self.failUnless(isinstance(u, uri.UnknownURI)) self.failUnlessReallyEqual(u.to_string(), future_uri) self.failUnless(u.get_readonly() is None) self.failUnless(u.get_error() is None) u2 = uri.UnknownURI(future_uri, error=CapConstraintError("...")) self.failUnlessReallyEqual(u.to_string(), future_uri) self.failUnless(u2.get_readonly() is None) self.failUnless(isinstance(u2.get_error(), CapConstraintError))
def test_mdmf_verifier(self): # I'm not sure what I want to write here yet. writekey = "\x01" * 16 fingerprint = "\x02" * 32 uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint) d1 = uri.MDMFDirectoryURI(uri1) v1 = d1.get_verify_cap() self.failUnlessIsInstance(v1, uri.MDMFDirectoryURIVerifier) self.failIf(v1.is_mutable()) d2 = uri.from_string(d1.to_string()) v2 = d2.get_verify_cap() self.failUnlessIsInstance(v2, uri.MDMFDirectoryURIVerifier) self.failIf(v2.is_mutable()) self.failUnlessEqual(v2.to_string(), v1.to_string()) # Now attenuate and make sure that works correctly. r3 = d2.get_readonly() v3 = r3.get_verify_cap() self.failUnlessIsInstance(v3, uri.MDMFDirectoryURIVerifier) self.failIf(v3.is_mutable()) self.failUnlessEqual(v3.to_string(), v1.to_string()) r4 = uri.from_string(r3.to_string()) v4 = r4.get_verify_cap() self.failUnlessIsInstance(v4, uri.MDMFDirectoryURIVerifier) self.failIf(v4.is_mutable()) self.failUnlessEqual(v4.to_string(), v3.to_string())
def _then(ign): mdmf_uri = self.mdmf_node.get_uri() cap = uri.from_string(mdmf_uri) self.failUnless(isinstance(cap, uri.WriteableMDMFFileURI)) readonly_mdmf_uri = self.mdmf_node.get_readonly_uri() cap = uri.from_string(readonly_mdmf_uri) self.failUnless(isinstance(cap, uri.ReadonlyMDMFFileURI))
def test_immutable(self): readkey = "\x01" * 16 uri_extension_hash = hashutil.uri_extension_hash("stuff") needed_shares = 3 total_shares = 10 size = 1234 fnuri = uri.CHKFileURI(key=readkey, uri_extension_hash=uri_extension_hash, needed_shares=needed_shares, total_shares=total_shares, size=size) fncap = fnuri.to_string() self.failUnlessReallyEqual(fncap, "URI:CHK:aeaqcaibaeaqcaibaeaqcaibae:nf3nimquen7aeqm36ekgxomalstenpkvsdmf6fplj7swdatbv5oa:3:10:1234") u1 = uri.ImmutableDirectoryURI(fnuri) self.failUnless(u1.is_readonly()) self.failIf(u1.is_mutable()) self.failUnless(IURI.providedBy(u1)) self.failIf(IFileURI.providedBy(u1)) self.failUnless(IDirnodeURI.providedBy(u1)) self.failUnless("DirectoryURI" in str(u1)) u1_filenode = u1.get_filenode_cap() self.failIf(u1_filenode.is_mutable()) self.failUnless(u1_filenode.is_readonly()) self.failUnlessReallyEqual(u1_filenode.to_string(), fncap) self.failUnless(str(u1)) u2 = uri.from_string(u1.to_string()) self.failUnlessReallyEqual(u1.to_string(), u2.to_string()) self.failUnless(u2.is_readonly()) self.failIf(u2.is_mutable()) self.failUnless(IURI.providedBy(u2)) self.failIf(IFileURI.providedBy(u2)) self.failUnless(IDirnodeURI.providedBy(u2)) u2i = uri.from_string(u1.to_string(), deep_immutable=True) self.failUnlessReallyEqual(u1.to_string(), u2i.to_string()) u3 = u2.get_readonly() self.failUnlessReallyEqual(u3.to_string(), u2.to_string()) self.failUnless(str(u3)) u3i = uri.from_string(u2.to_string(), deep_immutable=True) self.failUnlessReallyEqual(u2.to_string(), u3i.to_string()) u2_verifier = u2.get_verify_cap() self.failUnless(isinstance(u2_verifier, uri.ImmutableDirectoryURIVerifier), u2_verifier) self.failUnless(IVerifierURI.providedBy(u2_verifier)) u2vs = u2_verifier.to_string() # URI:DIR2-CHK-Verifier:$key:$ueb:$k:$n:$size self.failUnless(u2vs.startswith("URI:DIR2-CHK-Verifier:"), u2vs) u2_verifier_fileuri = u2_verifier.get_filenode_cap() self.failUnless(IVerifierURI.providedBy(u2_verifier_fileuri)) u2vfs = u2_verifier_fileuri.to_string() # URI:CHK-Verifier:$key:$ueb:$k:$n:$size self.failUnlessReallyEqual(u2vfs, fnuri.get_verify_cap().to_string()) self.failUnlessReallyEqual(u2vs[len("URI:DIR2-"):], u2vfs[len("URI:"):]) self.failUnless(str(u2_verifier))
def _help_test(self, data): u = uri.LiteralFileURI(data) self.failUnless(IURI.providedBy(u)) self.failUnless(IFileURI.providedBy(u)) self.failIf(IDirnodeURI.providedBy(u)) self.failUnlessReallyEqual(u.data, data) self.failUnlessReallyEqual(u.get_size(), len(data)) self.failUnless(u.is_readonly()) self.failIf(u.is_mutable()) u2 = uri.from_string(u.to_string()) self.failUnless(IURI.providedBy(u2)) self.failUnless(IFileURI.providedBy(u2)) self.failIf(IDirnodeURI.providedBy(u2)) self.failUnlessReallyEqual(u2.data, data) self.failUnlessReallyEqual(u2.get_size(), len(data)) self.failUnless(u2.is_readonly()) self.failIf(u2.is_mutable()) u2i = uri.from_string(u.to_string(), deep_immutable=True) self.failUnless(IFileURI.providedBy(u2i)) self.failIf(IDirnodeURI.providedBy(u2i)) self.failUnlessReallyEqual(u2i.data, data) self.failUnlessReallyEqual(u2i.get_size(), len(data)) self.failUnless(u2i.is_readonly()) self.failIf(u2i.is_mutable()) u3 = u.get_readonly() self.failUnlessIdentical(u, u3) self.failUnlessReallyEqual(u.get_verify_cap(), None) he = u.to_human_encoding() u_h = uri.LiteralFileURI.init_from_human_encoding(he) self.failUnlessReallyEqual(u, u_h)
def _help_test(self, data): u = uri.LiteralFileURI(data) self.failUnless(IURI.providedBy(u)) self.failUnless(IFileURI.providedBy(u)) self.failIf(IDirnodeURI.providedBy(u)) self.failUnlessReallyEqual(u.data, data) self.failUnlessReallyEqual(u.get_size(), len(data)) self.failUnless(u.is_readonly()) self.failIf(u.is_mutable()) u2 = uri.from_string(u.to_string()) self.failUnless(IURI.providedBy(u2)) self.failUnless(IFileURI.providedBy(u2)) self.failIf(IDirnodeURI.providedBy(u2)) self.failUnlessReallyEqual(u2.data, data) self.failUnlessReallyEqual(u2.get_size(), len(data)) self.failUnless(u2.is_readonly()) self.failIf(u2.is_mutable()) u2i = uri.from_string(u.to_string(), deep_immutable=True) self.failUnless(IFileURI.providedBy(u2i)) self.failIf(IDirnodeURI.providedBy(u2i)) self.failUnlessReallyEqual(u2i.data, data) self.failUnlessReallyEqual(u2i.get_size(), len(data)) self.failUnless(u2i.is_readonly()) self.failIf(u2i.is_mutable()) u3 = u.get_readonly() self.failUnlessIdentical(u, u3) self.failUnlessReallyEqual(u.get_verify_cap(), None)
def magic_folder_invite(node_directory, alias, nickname, treq): """ Invite a user identified by the nickname to a folder owned by the alias :param unicode node_directory: The root of the Tahoe-LAFS node. :param unicode alias: The alias of the folder to which the invitation is being generated. :param unicode nickname: The nickname of the invitee. :param HTTPClient treq: An ``HTTPClient`` or similar object to use to make the queries. :return Deferred[unicode]: A secret invitation code. """ aliases = get_aliases(node_directory)[alias] nodeurl = get_node_url(node_directory) node_url = DecodedURL.from_text(unicode(nodeurl, 'utf-8')) # create an unlinked directory and get the dmd write-cap dmd_write_cap = yield tahoe_mkdir(node_url, treq) # derive a dmd read-only cap from it. dmd_readonly_cap = uri.from_string( dmd_write_cap).get_readonly().to_string() if dmd_readonly_cap is None: raise Exception("failed to diminish dmd write cap") # Now, we need to create a link to the nickname from inside the # collective to this read-cap. For that we will need to know # the write-cap of the collective (which is stored by the private/aliases # file in the node_directory) so that a link can be created inside it # to the . # To do that, we use tahoe ln dmd_read_cap <collective-write-cap>/<alias> magic_write_cap = get_aliases(node_directory)[alias] magic_readonly_cap = uri.from_string( magic_write_cap).get_readonly().to_string() # tahoe ln CLIENT_READCAP COLLECTIVE_WRITECAP/NICKNAME from_file = unicode(dmd_readonly_cap, 'utf-8') to_file = u"%s/%s" % (unicode(magic_write_cap, 'utf-8'), nickname) try: yield tahoe_mv(node_url, aliases, from_file, to_file, treq) except Exception: raise # return invite code, which is: # magic_readonly_cap + INVITE_SEPARATOR + dmd_write_cap invite_code = "{}{}{}".format(magic_readonly_cap, INVITE_SEPARATOR, dmd_write_cap) returnValue(invite_code)
def create_from_cap(self, writecap, readcap=None, deep_immutable=False, name=u"<unknown name>"): # this returns synchronously. It starts with a "cap string". assert isinstance(writecap, (str, type(None))), type(writecap) assert isinstance(readcap, (str, type(None))), type(readcap) bigcap = writecap or readcap if not bigcap: # maybe the writecap was hidden because we're in a readonly # directory, and the future cap format doesn't have a readcap, or # something. return UnknownNode(None, None) # deep_immutable and name not needed # The name doesn't matter for caching since it's only used in the error # attribute of an UnknownNode, and we don't cache those. if deep_immutable: memokey = "I" + bigcap else: memokey = "M" + bigcap if memokey in self._node_cache: return self._node_cache[memokey] cap = uri.from_string(bigcap, deep_immutable=deep_immutable, name=name) node = self._create_from_single_cap(cap) if node: self._node_cache[memokey] = node # note: WeakValueDictionary else: # don't cache UnknownNode node = UnknownNode(writecap, readcap, deep_immutable=deep_immutable, name=name) return node
def test_mdmf_cap_ignore_extensions(self): # MDMF caps can be arbitrarily extended after the fingerprint and # key/storage index fields. tahoe-1.9 is supposed to ignore any # extensions, and not add any itself. u1 = uri.WriteableMDMFFileURI(self.writekey, self.fingerprint) cap = u1.to_string() cap2 = cap + ":I COME FROM THE FUTURE" u2 = uri.WriteableMDMFFileURI.init_from_string(cap2) self.failUnlessReallyEqual(self.writekey, u2.writekey) self.failUnlessReallyEqual(self.fingerprint, u2.fingerprint) self.failIf(u2.is_readonly()) self.failUnless(u2.is_mutable()) cap3 = cap + ":" + os.urandom(40) # parse *that*! u3 = uri.WriteableMDMFFileURI.init_from_string(cap3) self.failUnlessReallyEqual(self.writekey, u3.writekey) self.failUnlessReallyEqual(self.fingerprint, u3.fingerprint) self.failIf(u3.is_readonly()) self.failUnless(u3.is_mutable()) cap4 = u1.get_readonly().to_string() + ":ooh scary future stuff" u4 = uri.from_string_mutable_filenode(cap4) self.failUnlessReallyEqual(self.readkey, u4.readkey) self.failUnlessReallyEqual(self.fingerprint, u4.fingerprint) self.failUnless(u4.is_readonly()) self.failUnless(u4.is_mutable()) cap5 = u1.get_verify_cap().to_string() + ":spoilers!" u5 = uri.from_string(cap5) self.failUnlessReallyEqual(self.storage_index, u5.storage_index) self.failUnlessReallyEqual(self.fingerprint, u5.fingerprint) self.failUnless(u5.is_readonly()) self.failIf(u5.is_mutable())
def _finished(self, ur): assert interfaces.IUploadResults.providedBy(ur), ur vcapstr = ur.get_verifycapstr() precondition(isinstance(vcapstr, str), vcapstr) v = uri.from_string(vcapstr) f_times = self._fetcher.get_times() hur = upload.HelperUploadResults() hur.timings = { "cumulative_fetch": f_times["cumulative_fetch"], "total_fetch": f_times["total"], } for key, val in ur.get_timings().items(): hur.timings[key] = val hur.uri_extension_hash = v.uri_extension_hash hur.ciphertext_fetched = self._fetcher.get_ciphertext_fetched() hur.preexisting_shares = ur.get_preexisting_shares() # hur.sharemap needs to be {shnum: set(serverid)} hur.sharemap = {} for shnum, servers in ur.get_sharemap().items(): hur.sharemap[shnum] = set([s.get_serverid() for s in servers]) # and hur.servermap needs to be {serverid: set(shnum)} hur.servermap = {} for server, shnums in ur.get_servermap().items(): hur.servermap[server.get_serverid()] = set(shnums) hur.pushed_shares = ur.get_pushed_shares() hur.file_size = ur.get_file_size() hur.uri_extension_data = ur.get_uri_extension_data() hur.verifycapstr = vcapstr self._reader.close() os.unlink(self._encoding_file) self._finished_observers.fire(hur) self._helper.upload_finished(self._storage_index, v.size) del self._reader
def dump_cap(options): from allmydata import uri from allmydata.util import base32 from base64 import b32decode import urlparse, urllib out = options.stdout cap = options.cap nodeid = None if options['nodeid']: nodeid = b32decode(options['nodeid'].upper()) secret = None if options['client-secret']: secret = base32.a2b(options['client-secret']) elif options['client-dir']: secretfile = os.path.join(options['client-dir'], "private", "secret") try: secret = base32.a2b(open(secretfile, "r").read().strip()) except EnvironmentError: pass if cap.startswith("http"): scheme, netloc, path, params, query, fragment = urlparse.urlparse(cap) assert path.startswith("/uri/") cap = urllib.unquote(path[len("/uri/"):]) u = uri.from_string(cap) print(file=out) dump_uri_instance(u, nodeid, secret, out)
def just_created(self, writecap): # TODO: maybe integrate this with the constructor self.writecap = writecap self.readcap = uri.from_string(writecap).get_readonly().to_string() self.mutable = True self.children_d = {} self.children = {}
def _repair_finished(repair_results): self.cr_results.repair_successful = repair_results.get_successful() r = CheckResults(from_string(self._node.get_uri()), self._storage_index) self.cr_results.post_repair_results = r self._fill_checker_results(repair_results.servermap, r) self.cr_results.repair_results = repair_results # TODO?
def render_GET(self, req): """ Historically, accessing this via "GET /uri?uri=<capabilitiy>" was/is a feature -- which simply redirects to the more-common "GET /uri/<capability>" with any other query args preserved. New code should use "/uri/<cap>" """ uri_arg = req.args.get(b"uri", [None])[0] if uri_arg is None: raise WebError("GET /uri requires uri=") # shennanigans like putting "%2F" or just "/" itself, or ../ # etc in the <cap> might be a vector for weirdness so we # validate that this is a valid capability before proceeding. cap = uri.from_string(uri_arg) if isinstance(cap, uri.UnknownURI): raise WebError("Invalid capability") # so, using URL.from_text(req.uri) isn't going to work because # it seems Nevow was creating absolute URLs including # host/port whereas req.uri is absolute (but lacks host/port) redir_uri = URL.from_text(req.prePathURL().decode('utf8')) redir_uri = redir_uri.child(urllib.quote(uri_arg).decode('utf8')) # add back all the query args that AREN'T "?uri=" for k, values in req.args.items(): if k != b"uri": for v in values: redir_uri = redir_uri.add(k.decode('utf8'), v.decode('utf8')) return redirectTo(redir_uri.to_text().encode('utf8'), req)
def dump_cap(options): from allmydata import uri from allmydata.util import base32 from base64 import b32decode import urlparse, urllib out = options.stdout cap = options.cap nodeid = None if options['nodeid']: nodeid = b32decode(options['nodeid'].upper()) secret = None if options['client-secret']: secret = base32.a2b(options['client-secret']) elif options['client-dir']: secretfile = os.path.join(options['client-dir'], "private", "secret") try: secret = base32.a2b(open(secretfile, "r").read().strip()) except EnvironmentError: pass if cap.startswith("http"): scheme, netloc, path, params, query, fragment = urlparse.urlparse(cap) assert path.startswith("/uri/") cap = urllib.unquote(path[len("/uri/"):]) u = uri.from_string(cap) print >>out dump_uri_instance(u, nodeid, secret, out)
def test_joined_magic_folder((rc, stdout, stderr)): readonly_cap = unicode( uri.from_string(upload_dircap).get_readonly().to_string(), 'utf-8') s = re.search(readonly_cap, stdout) self.failUnless(s is not None) return None
def _finished(self, ur): assert interfaces.IUploadResults.providedBy(ur), ur vcapstr = ur.get_verifycapstr() precondition(isinstance(vcapstr, str), vcapstr) v = uri.from_string(vcapstr) f_times = self._fetcher.get_times() hur = upload.HelperUploadResults() hur.timings = {"cumulative_fetch": f_times["cumulative_fetch"], "total_fetch": f_times["total"], } for key,val in ur.get_timings().items(): hur.timings[key] = val hur.uri_extension_hash = v.uri_extension_hash hur.ciphertext_fetched = self._fetcher.get_ciphertext_fetched() hur.preexisting_shares = ur.get_preexisting_shares() # hur.sharemap needs to be {shnum: set(serverid)} hur.sharemap = {} for shnum, servers in ur.get_sharemap().items(): hur.sharemap[shnum] = set([s.get_serverid() for s in servers]) # and hur.servermap needs to be {serverid: set(shnum)} hur.servermap = {} for server, shnums in ur.get_servermap().items(): hur.servermap[server.get_serverid()] = set(shnums) hur.pushed_shares = ur.get_pushed_shares() hur.file_size = ur.get_file_size() hur.uri_extension_data = ur.get_uri_extension_data() hur.verifycapstr = vcapstr self._reader.close() os.unlink(self._encoding_file) self._finished_observers.fire(hur) self._helper.upload_finished(self._storage_index, v.size) del self._reader
def test_joined_magic_folder(args): (rc, stdout, stderr) = args readonly_cap = unicode( uri.from_string(upload_dircap).get_readonly().to_string(), 'utf-8') s = re.search(readonly_cap, stdout) self.assertTrue(s is not None) return None
def __init__(self, node, storage_broker, history, monitor): self._node = node self._storage_broker = storage_broker self._history = history self._monitor = monitor self.bad_shares = [] # list of (nodeid,shnum,failure) self._storage_index = self._node.get_storage_index() self.results = CheckResults(from_string(node.get_uri()), self._storage_index) self.need_repair = False self.responded = set() # set of (binary) nodeids
def invite(options): precondition(isinstance(options.alias, unicode), alias=options.alias) precondition(isinstance(options.nickname, unicode), nickname=options.nickname) from allmydata.scripts import tahoe_mkdir mkdir_options = _delegate_options(options, MakeDirectoryOptions()) mkdir_options.where = None rc = tahoe_mkdir.mkdir(mkdir_options) if rc != 0: print("magic-folder: failed to mkdir\n", file=options.stderr) return rc # FIXME this assumes caps are ASCII. dmd_write_cap = mkdir_options.stdout.getvalue().strip() dmd_readonly_cap = uri.from_string( dmd_write_cap).get_readonly().to_string() if dmd_readonly_cap is None: print("magic-folder: failed to diminish dmd write cap\n", file=options.stderr) return 1 magic_write_cap = get_aliases(options["node-directory"])[options.alias] magic_readonly_cap = uri.from_string( magic_write_cap).get_readonly().to_string() # tahoe ln CLIENT_READCAP COLLECTIVE_WRITECAP/NICKNAME ln_options = _delegate_options(options, LnOptions()) ln_options.from_file = unicode(dmd_readonly_cap, 'utf-8') ln_options.to_file = u"%s/%s" % (unicode(magic_write_cap, 'utf-8'), options.nickname) rc = tahoe_mv.mv(ln_options, mode="link") if rc != 0: print("magic-folder: failed to create link\n", file=options.stderr) print(ln_options.stderr.getvalue(), file=options.stderr) return rc # FIXME: this assumes caps are ASCII. print("%s%s%s" % (magic_readonly_cap, INVITE_SEPARATOR, dmd_write_cap), file=options.stdout) return 0
def _get_alias_details(nodedir): aliases = get_aliases(nodedir) alias_names = sorted(aliases.keys()) data = {} for name in alias_names: dircap = uri.from_string(aliases[name]) data[name] = { "readwrite": dircap.to_string(), "readonly": dircap.get_readonly().to_string(), } return data
def create_from_cap(self, writecap, readcap=None, deep_immutable=False, name=u"<unknown name>"): # this returns synchronously. It starts with a "cap string". assert isinstance(writecap, (str, type(None))), type(writecap) assert isinstance(readcap, (str, type(None))), type(readcap) bigcap = writecap or readcap if not bigcap: # maybe the writecap was hidden because we're in a readonly # directory, and the future cap format doesn't have a readcap, or # something. return UnknownNode(None, None) # deep_immutable and name not needed # The name doesn't matter for caching since it's only used in the error # attribute of an UnknownNode, and we don't cache those. if deep_immutable: memokey = "I" + bigcap else: memokey = "M" + bigcap if memokey in self._node_cache: node = self._node_cache[memokey] else: cap = uri.from_string(bigcap, deep_immutable=deep_immutable, name=name) node = self._create_from_single_cap(cap) # node is None for an unknown URI, otherwise it is a type for which # is_mutable() is known. We avoid cacheing mutable nodes due to # ticket #1679. if node is None: # don't cache UnknownNode node = UnknownNode(writecap, readcap, deep_immutable=deep_immutable, name=name) elif node.is_mutable(): self._node_cache[memokey] = node # note: WeakValueDictionary if self.blacklist: si = node.get_storage_index() # if this node is blacklisted, return the reason, otherwise return None reason = self.blacklist.check_storageindex(si) if reason is not None: # The original node object is cached above, not the ProhibitedNode wrapper. # This ensures that removing the blacklist entry will make the node # accessible if create_from_cap is called again. node = ProhibitedNode(node, reason) return node
def test_mdmf_attenuation(self): writekey = "\x01" * 16 fingerprint = "\x02" * 32 uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint) d1 = uri.MDMFDirectoryURI(uri1) self.failUnless(d1.is_mutable()) self.failIf(d1.is_readonly()) self.failUnless(IURI.providedBy(d1)) self.failUnless(IDirnodeURI.providedBy(d1)) d1_uri = d1.to_string() d1_uri_from_fn = uri.MDMFDirectoryURI( d1.get_filenode_cap()).to_string() self.failUnlessEqual(d1_uri_from_fn, d1_uri) uri2 = uri.from_string(d1_uri) self.failUnlessIsInstance(uri2, uri.MDMFDirectoryURI) self.failUnless(IURI.providedBy(uri2)) self.failUnless(IDirnodeURI.providedBy(uri2)) self.failUnless(uri2.is_mutable()) self.failIf(uri2.is_readonly()) ro = uri2.get_readonly() self.failUnlessIsInstance(ro, uri.ReadonlyMDMFDirectoryURI) self.failUnless(ro.is_mutable()) self.failUnless(ro.is_readonly()) self.failUnless(IURI.providedBy(ro)) self.failUnless(IDirnodeURI.providedBy(ro)) ro_uri = ro.to_string() n = uri.from_string(ro_uri, deep_immutable=True) self.failUnlessIsInstance(n, uri.UnknownURI) fn_cap = ro.get_filenode_cap() fn_ro_cap = fn_cap.get_readonly() d3 = uri.ReadonlyMDMFDirectoryURI(fn_ro_cap) self.failUnlessEqual(ro.to_string(), d3.to_string()) self.failUnless(ro.is_mutable()) self.failUnless(ro.is_readonly())
def test_mdmf_attenuation(self): writekey = "\x01" * 16 fingerprint = "\x02" * 32 uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint) d1 = uri.MDMFDirectoryURI(uri1) self.failUnless(d1.is_mutable()) self.failIf(d1.is_readonly()) self.failUnless(IURI.providedBy(d1)) self.failUnless(IDirnodeURI.providedBy(d1)) d1_uri = d1.to_string() d1_uri_from_fn = uri.MDMFDirectoryURI(d1.get_filenode_cap()).to_string() self.failUnlessEqual(d1_uri_from_fn, d1_uri) uri2 = uri.from_string(d1_uri) self.failUnlessIsInstance(uri2, uri.MDMFDirectoryURI) self.failUnless(IURI.providedBy(uri2)) self.failUnless(IDirnodeURI.providedBy(uri2)) self.failUnless(uri2.is_mutable()) self.failIf(uri2.is_readonly()) ro = uri2.get_readonly() self.failUnlessIsInstance(ro, uri.ReadonlyMDMFDirectoryURI) self.failUnless(ro.is_mutable()) self.failUnless(ro.is_readonly()) self.failUnless(IURI.providedBy(ro)) self.failUnless(IDirnodeURI.providedBy(ro)) ro_uri = ro.to_string() n = uri.from_string(ro_uri, deep_immutable=True) self.failUnlessIsInstance(n, uri.UnknownURI) fn_cap = ro.get_filenode_cap() fn_ro_cap = fn_cap.get_readonly() d3 = uri.ReadonlyMDMFDirectoryURI(fn_ro_cap) self.failUnlessEqual(ro.to_string(), d3.to_string()) self.failUnless(ro.is_mutable()) self.failUnless(ro.is_readonly())
def _clobber_shares(ignored): # delete one, corrupt a second shares = self.find_uri_shares(self.uri) self.failUnlessReallyEqual(len(shares), 10) os.unlink(shares[0][2]) cso = debug.CorruptShareOptions() cso.stdout = StringIO() cso.parseOptions([shares[1][2]]) storage_index = uri.from_string(self.uri).get_storage_index() self._corrupt_share_line = " server %s, SI %s, shnum %d" % \ (base32.b2a(shares[1][1]), base32.b2a(storage_index), shares[1][0]) debug.corrupt_share(cso)
def test_mdmf(self): writekey = "\x01" * 16 fingerprint = "\x02" * 32 uri1 = uri.WriteableMDMFFileURI(writekey, fingerprint) d1 = uri.MDMFDirectoryURI(uri1) self.failIf(d1.is_readonly()) self.failUnless(d1.is_mutable()) self.failUnless(IURI.providedBy(d1)) self.failUnless(IDirnodeURI.providedBy(d1)) d1_uri = d1.to_string() d2 = uri.from_string(d1_uri) self.failUnlessIsInstance(d2, uri.MDMFDirectoryURI) self.failIf(d2.is_readonly()) self.failUnless(d2.is_mutable()) self.failUnless(IURI.providedBy(d2)) self.failUnless(IDirnodeURI.providedBy(d2)) # It doesn't make sense to ask for a deep immutable URI for a # mutable directory, and we should get back a result to that # effect. d3 = uri.from_string(d2.to_string(), deep_immutable=True) self.failUnlessIsInstance(d3, uri.UnknownURI)
def _copy_share(self, share, to_server): (sharenum, sharefile) = share (id, ss) = to_server shares_dir = os.path.join(ss.original._server.storedir, "shares") si = uri.from_string(self.uri).get_storage_index() si_dir = os.path.join(shares_dir, storage_index_to_dir(si)) if not os.path.exists(si_dir): os.makedirs(si_dir) new_sharefile = os.path.join(si_dir, str(sharenum)) shutil.copy(sharefile, new_sharefile) self.shares = self.find_uri_shares(self.uri) # Make sure that the storage server has the share. self.failUnless((sharenum, ss.original._server.my_nodeid, new_sharefile) in self.shares)
def _copy_share(self, share, to_server): (sharenum, sharefile) = share (id, ss) = to_server shares_dir = os.path.join(ss.original.storedir, "shares") si = uri.from_string(self.uri).get_storage_index() si_dir = os.path.join(shares_dir, storage_index_to_dir(si)) if not os.path.exists(si_dir): os.makedirs(si_dir) new_sharefile = os.path.join(si_dir, str(sharenum)) shutil.copy(sharefile, new_sharefile) self.shares = self.find_uri_shares(self.uri) # Make sure that the storage server has the share. self.failUnless((sharenum, ss.original.my_nodeid, new_sharefile) in self.shares)
def _finished(self, uploadresults): precondition(isinstance(uploadresults.verifycapstr, str), uploadresults.verifycapstr) assert interfaces.IUploadResults.providedBy(uploadresults), uploadresults r = uploadresults v = uri.from_string(r.verifycapstr) r.uri_extension_hash = v.uri_extension_hash f_times = self._fetcher.get_times() r.timings["cumulative_fetch"] = f_times["cumulative_fetch"] r.ciphertext_fetched = self._fetcher.get_ciphertext_fetched() r.timings["total_fetch"] = f_times["total"] self._reader.close() os.unlink(self._encoding_file) self._finished_observers.fire(r) self._helper.upload_finished(self._storage_index, v.size) del self._reader
def invite(options): precondition(isinstance(options.alias, unicode), alias=options.alias) precondition(isinstance(options.nickname, unicode), nickname=options.nickname) from allmydata.scripts import tahoe_mkdir mkdir_options = _delegate_options(options, MakeDirectoryOptions()) mkdir_options.where = None rc = tahoe_mkdir.mkdir(mkdir_options) if rc != 0: print >>options.stderr, "magic-folder: failed to mkdir\n" return rc # FIXME this assumes caps are ASCII. dmd_write_cap = mkdir_options.stdout.getvalue().strip() dmd_readonly_cap = uri.from_string(dmd_write_cap).get_readonly().to_string() if dmd_readonly_cap is None: print >>options.stderr, "magic-folder: failed to diminish dmd write cap\n" return 1 magic_write_cap = get_aliases(options["node-directory"])[options.alias] magic_readonly_cap = uri.from_string(magic_write_cap).get_readonly().to_string() # tahoe ln CLIENT_READCAP COLLECTIVE_WRITECAP/NICKNAME ln_options = _delegate_options(options, LnOptions()) ln_options.from_file = unicode(dmd_readonly_cap, 'utf-8') ln_options.to_file = u"%s/%s" % (unicode(magic_write_cap, 'utf-8'), options.nickname) rc = tahoe_mv.mv(ln_options, mode="link") if rc != 0: print >>options.stderr, "magic-folder: failed to create link\n" print >>options.stderr, ln_options.stderr.getvalue() return rc # FIXME: this assumes caps are ASCII. print >>options.stdout, "%s%s%s" % (magic_readonly_cap, INVITE_SEPARATOR, dmd_write_cap) return 0
def _clobber_shares(ignored): shares = self.find_uri_shares(self.uris[u"g\u00F6\u00F6d"]) self.failUnlessReallyEqual(len(shares), 10) os.unlink(shares[0][2]) shares = self.find_uri_shares(self.uris["mutable"]) cso = debug.CorruptShareOptions() cso.stdout = StringIO() cso.parseOptions([shares[1][2]]) storage_index = uri.from_string( self.uris["mutable"]).get_storage_index() self._corrupt_share_line = " corrupt: server %s, SI %s, shnum %d" % \ (str(base32.b2a(shares[1][1]), "ascii"), str(base32.b2a(storage_index), "ascii"), shares[1][0]) debug.corrupt_share(cso)
def find_uri_shares(self, uri): si = tahoe_uri.from_string(uri).get_storage_index() prefixdir = storage_index_to_dir(si) shares = [] for i, ss in self.g.servers_by_number.items(): serverid = ss.my_nodeid basedir = os.path.join(ss.sharedir, prefixdir) if not os.path.exists(basedir): continue for f in os.listdir(basedir): try: shnum = int(f) shares.append((shnum, serverid, os.path.join(basedir, f))) except ValueError: pass return sorted(shares)
def find_uri_shares(self, uri): si = tahoe_uri.from_string(uri).get_storage_index() prefixdir = storage_index_to_dir(si) shares = [] for i,ss in self.g.servers_by_number.items(): serverid = ss.my_nodeid basedir = os.path.join(ss.sharedir, prefixdir) if not os.path.exists(basedir): continue for f in os.listdir(basedir): try: shnum = int(f) shares.append((shnum, serverid, os.path.join(basedir, f))) except ValueError: pass return sorted(shares)
def _reduce_max_outstanding_requests_and_download(ign): # we need to hang the first 5 servers, so we have to # figure out where the shares were placed. si = uri.from_string(self.uri).get_storage_index() placed = self.c0.storage_broker.get_servers_for_psi(si) self._hang([(s.get_serverid(), s) for s in placed[:5]]) n = self.c0.create_node_from_uri(self.uri) n._cnode._maybe_create_download_node() self._sf = n._cnode._node._sharefinder self._sf.max_outstanding_requests = 5 self._sf.OVERDUE_TIMEOUT = 1000.0 d2 = download_to_data(n) # start download, but don't wait for it to complete yet def _done(res): done.append(res) # we will poll for this later d2.addBoth(_done)
def _finished(self, uploadresults): precondition(isinstance(uploadresults.verifycapstr, str), uploadresults.verifycapstr) assert interfaces.IUploadResults.providedBy( uploadresults), uploadresults r = uploadresults v = uri.from_string(r.verifycapstr) r.uri_extension_hash = v.uri_extension_hash f_times = self._fetcher.get_times() r.timings["cumulative_fetch"] = f_times["cumulative_fetch"] r.ciphertext_fetched = self._fetcher.get_ciphertext_fetched() r.timings["total_fetch"] = f_times["total"] self._reader.close() os.unlink(self._encoding_file) self._finished_observers.fire(r) self._helper.upload_finished(self._storage_index, v.size) del self._reader
def list_directory(self, uri): api_uri = self.node_uri.child( u"uri", uri.to_string().decode("ascii"), ).add( u"t", u"json", ).to_uri().to_text().encode("ascii") action = start_action( action_type=u"magic-folder:cli:list-dir", filenode_uri=uri.to_string().decode("ascii"), api_uri=api_uri, ) with action.context(): response = yield self.agent.request( b"GET", api_uri, ) if response.code != 200: raise Exception( "Error response from list endpoint: {}".format(response)) kind, dirinfo = json.loads((yield readBody(response))) if kind != u"dirnode": raise ValueError( "Object is a {}, not a directory".format(kind)) action.add_success_fields(children=dirinfo[u"children"], ) returnValue({ name: ( Node( self, from_string( json_metadata.get( "rw_uri", json_metadata["ro_uri"]).encode("ascii"), ), ), json_metadata[u"metadata"], ) for (name, (child_kind, json_metadata)) in dirinfo[u"children"].items() })
def copy_share_to_server(self, uri, share_number, server_number): ss = self.g.servers_by_number[server_number] # Copy share i from the directory associated with the first # storage server to the directory associated with this one. assert self.g, "I tried to find a grid at self.g, but failed" assert self.shares, "I tried to find shares at self.shares, but failed" old_share_location = self.shares[share_number][2] new_share_location = os.path.join(ss.storedir, "shares") si = tahoe_uri.from_string(self.uri).get_storage_index() new_share_location = os.path.join(new_share_location, storage_index_to_dir(si)) if not os.path.exists(new_share_location): os.makedirs(new_share_location) new_share_location = os.path.join(new_share_location, str(share_number)) if old_share_location != new_share_location: shutil.copy(old_share_location, new_share_location) shares = self.find_uri_shares(uri) # Make sure that the storage server has the share. self.failUnless((share_number, ss.my_nodeid, new_share_location) in shares)
def add_node(self, node, childpath): if isinstance(node, UnknownNode): self.add("count-unknown") elif IDirectoryNode.providedBy(node): self.add("count-directories") elif IMutableFileNode.providedBy(node): self.add("count-files") self.add("count-mutable-files") # TODO: update the servermap, compute a size, add it to # size-mutable-files, max it into "largest-mutable-file" elif IImmutableFileNode.providedBy(node): # CHK and LIT self.add("count-files") size = node.get_size() self.histogram("size-files-histogram", size) theuri = from_string(node.get_uri()) if isinstance(theuri, LiteralFileURI): self.add("count-literal-files") self.add("size-literal-files", size) else: self.add("count-immutable-files") self.add("size-immutable-files", size) self.max("largest-immutable-file", size)
def copy_sdmf_shares(self): # We'll basically be short-circuiting the upload process. servernums = self.g.servers_by_number.keys() assert len(servernums) == 10 assignments = zip(self.sdmf_old_shares.keys(), servernums) # Get the storage index. cap = uri.from_string(self.sdmf_old_cap) si = cap.get_storage_index() # Now execute each assignment by writing the storage. for (share, servernum) in assignments: sharedata = base64.b64decode(self.sdmf_old_shares[share]) storedir = self.get_serverdir(servernum) storage_path = os.path.join(storedir, "shares", storage_index_to_dir(si)) fileutil.make_dirs(storage_path) fileutil.write(os.path.join(storage_path, "%d" % share), sharedata) # ...and verify that the shares are there. shares = self.find_uri_shares(self.sdmf_old_cap) assert len(shares) == 10
def list_aliases(options): nodedir = options['node-directory'] stdout = options.stdout stderr = options.stderr aliases = get_aliases(nodedir) alias_names = sorted(aliases.keys()) max_width = max([len(quote_output(name)) for name in alias_names] + [0]) fmt = "%" + str(max_width) + "s: %s" rc = 0 for name in alias_names: dircap = uri.from_string(aliases[name]) if options['readonly-uri']: dircap = dircap.get_readonly() try: print >>stdout, fmt % (unicode_to_output(name), unicode_to_output(dircap.to_string().decode('utf-8'))) except (UnicodeEncodeError, UnicodeDecodeError): print >>stderr, fmt % (quote_output(name), quote_output(aliases[name])) rc = 1 if rc == 1: print >>stderr, "\nThis listing included aliases or caps that could not be converted to the terminal" \ "\noutput encoding. These are shown using backslash escapes and in quotes." return rc
def test_1654(self): # test that the Retrieve object unconditionally verifies the block # hash tree root for mutable shares. The failure mode is that # carefully crafted shares can cause undetected corruption (the # retrieve appears to finish successfully, but the result is # corrupted). When fixed, these shares always cause a # CorruptShareError, which results in NotEnoughSharesError in this # 2-of-2 file. self.basedir = "mutable/Problems/test_1654" self.set_up_grid(num_servers=2) cap = uri.from_string(TEST_1654_CAP) si = cap.get_storage_index() for share, shnum in [(TEST_1654_SH0, 0), (TEST_1654_SH1, 1)]: sharedata = base64.b64decode(share) storedir = self.get_serverdir(shnum) storage_path = os.path.join(storedir, "shares", storage_index_to_dir(si)) fileutil.make_dirs(storage_path) fileutil.write(os.path.join(storage_path, "%d" % shnum), sharedata) nm = self.g.clients[0].nodemaker n = nm.create_from_cap(TEST_1654_CAP) # to exercise the problem correctly, we must ensure that sh0 is # processed first, and sh1 second. NoNetworkGrid has facilities to # stall the first request from a single server, but it's not # currently easy to extend that to stall the second request (mutable # retrievals will see two: first the mapupdate, then the fetch). # However, repeated executions of this run without the #1654 fix # suggests that we're failing reliably even without explicit stalls, # probably because the servers are queried in a fixed order. So I'm # ok with relying upon that. d = self.shouldFail(NotEnoughSharesError, "test #1654 share corruption", "ran out of servers", n.download_best_version) return d
def test_pack(self): key = "\x00\x01\x02\x03\x04\x05\x06\x07\x08\x09\x0a\x0b\x0c\x0d\x0e\x0f" storage_index = hashutil.storage_index_hash(key) uri_extension_hash = hashutil.uri_extension_hash("stuff") needed_shares = 25 total_shares = 100 size = 1234 u = uri.CHKFileURI(key=key, uri_extension_hash=uri_extension_hash, needed_shares=needed_shares, total_shares=total_shares, size=size) self.failUnlessReallyEqual(u.get_storage_index(), storage_index) self.failUnlessReallyEqual(u.key, key) self.failUnlessReallyEqual(u.uri_extension_hash, uri_extension_hash) self.failUnlessReallyEqual(u.needed_shares, needed_shares) self.failUnlessReallyEqual(u.total_shares, total_shares) self.failUnlessReallyEqual(u.size, size) self.failUnless(u.is_readonly()) self.failIf(u.is_mutable()) self.failUnless(IURI.providedBy(u)) self.failUnless(IFileURI.providedBy(u)) self.failIf(IDirnodeURI.providedBy(u)) self.failUnlessReallyEqual(u.get_size(), 1234) u_ro = u.get_readonly() self.failUnlessIdentical(u, u_ro) he = u.to_human_encoding() self.failUnlessReallyEqual(he, "http://127.0.0.1:3456/uri/" + u.to_string()) self.failUnlessReallyEqual(uri.CHKFileURI.init_from_human_encoding(he), u) u2 = uri.from_string(u.to_string()) self.failUnlessReallyEqual(u2.get_storage_index(), storage_index) self.failUnlessReallyEqual(u2.key, key) self.failUnlessReallyEqual(u2.uri_extension_hash, uri_extension_hash) self.failUnlessReallyEqual(u2.needed_shares, needed_shares) self.failUnlessReallyEqual(u2.total_shares, total_shares) self.failUnlessReallyEqual(u2.size, size) self.failUnless(u2.is_readonly()) self.failIf(u2.is_mutable()) self.failUnless(IURI.providedBy(u2)) self.failUnless(IFileURI.providedBy(u2)) self.failIf(IDirnodeURI.providedBy(u2)) self.failUnlessReallyEqual(u2.get_size(), 1234) u2i = uri.from_string(u.to_string(), deep_immutable=True) self.failUnlessReallyEqual(u.to_string(), u2i.to_string()) u2ro = uri.from_string(uri.ALLEGED_READONLY_PREFIX + u.to_string()) self.failUnlessReallyEqual(u.to_string(), u2ro.to_string()) u2imm = uri.from_string(uri.ALLEGED_IMMUTABLE_PREFIX + u.to_string()) self.failUnlessReallyEqual(u.to_string(), u2imm.to_string()) v = u.get_verify_cap() self.failUnless(isinstance(v.to_string(), str)) self.failUnless(v.is_readonly()) self.failIf(v.is_mutable()) v2 = uri.from_string(v.to_string()) self.failUnlessReallyEqual(v, v2) he = v.to_human_encoding() v2_h = uri.CHKFileVerifierURI.init_from_human_encoding(he) self.failUnlessReallyEqual(v2, v2_h) v3 = uri.CHKFileVerifierURI(storage_index="\x00"*16, uri_extension_hash="\x00"*32, needed_shares=3, total_shares=10, size=1234) self.failUnless(isinstance(v3.to_string(), str)) self.failUnless(v3.is_readonly()) self.failIf(v3.is_mutable())
def test_pack(self): writekey = "\x01" * 16 fingerprint = "\x02" * 32 u = uri.WriteableSSKFileURI(writekey, fingerprint) self.failUnlessReallyEqual(u.writekey, writekey) self.failUnlessReallyEqual(u.fingerprint, fingerprint) self.failIf(u.is_readonly()) self.failUnless(u.is_mutable()) self.failUnless(IURI.providedBy(u)) self.failUnless(IMutableFileURI.providedBy(u)) self.failIf(IDirnodeURI.providedBy(u)) self.failUnless("WriteableSSKFileURI" in str(u)) he = u.to_human_encoding() u_h = uri.WriteableSSKFileURI.init_from_human_encoding(he) self.failUnlessReallyEqual(u, u_h) u2 = uri.from_string(u.to_string()) self.failUnlessReallyEqual(u2.writekey, writekey) self.failUnlessReallyEqual(u2.fingerprint, fingerprint) self.failIf(u2.is_readonly()) self.failUnless(u2.is_mutable()) self.failUnless(IURI.providedBy(u2)) self.failUnless(IMutableFileURI.providedBy(u2)) self.failIf(IDirnodeURI.providedBy(u2)) u2i = uri.from_string(u.to_string(), deep_immutable=True) self.failUnless(isinstance(u2i, uri.UnknownURI), u2i) u2ro = uri.from_string(uri.ALLEGED_READONLY_PREFIX + u.to_string()) self.failUnless(isinstance(u2ro, uri.UnknownURI), u2ro) u2imm = uri.from_string(uri.ALLEGED_IMMUTABLE_PREFIX + u.to_string()) self.failUnless(isinstance(u2imm, uri.UnknownURI), u2imm) u3 = u2.get_readonly() readkey = hashutil.ssk_readkey_hash(writekey) self.failUnlessReallyEqual(u3.fingerprint, fingerprint) self.failUnlessReallyEqual(u3.readkey, readkey) self.failUnless(u3.is_readonly()) self.failUnless(u3.is_mutable()) self.failUnless(IURI.providedBy(u3)) self.failUnless(IMutableFileURI.providedBy(u3)) self.failIf(IDirnodeURI.providedBy(u3)) u3i = uri.from_string(u3.to_string(), deep_immutable=True) self.failUnless(isinstance(u3i, uri.UnknownURI), u3i) u3ro = uri.from_string(uri.ALLEGED_READONLY_PREFIX + u3.to_string()) self.failUnlessReallyEqual(u3.to_string(), u3ro.to_string()) u3imm = uri.from_string(uri.ALLEGED_IMMUTABLE_PREFIX + u3.to_string()) self.failUnless(isinstance(u3imm, uri.UnknownURI), u3imm) he = u3.to_human_encoding() u3_h = uri.ReadonlySSKFileURI.init_from_human_encoding(he) self.failUnlessReallyEqual(u3, u3_h) u4 = uri.ReadonlySSKFileURI(readkey, fingerprint) self.failUnlessReallyEqual(u4.fingerprint, fingerprint) self.failUnlessReallyEqual(u4.readkey, readkey) self.failUnless(u4.is_readonly()) self.failUnless(u4.is_mutable()) self.failUnless(IURI.providedBy(u4)) self.failUnless(IMutableFileURI.providedBy(u4)) self.failIf(IDirnodeURI.providedBy(u4)) u4i = uri.from_string(u4.to_string(), deep_immutable=True) self.failUnless(isinstance(u4i, uri.UnknownURI), u4i) u4ro = uri.from_string(uri.ALLEGED_READONLY_PREFIX + u4.to_string()) self.failUnlessReallyEqual(u4.to_string(), u4ro.to_string()) u4imm = uri.from_string(uri.ALLEGED_IMMUTABLE_PREFIX + u4.to_string()) self.failUnless(isinstance(u4imm, uri.UnknownURI), u4imm) u4a = uri.from_string(u4.to_string()) self.failUnlessReallyEqual(u4a, u4) self.failUnless("ReadonlySSKFileURI" in str(u4a)) self.failUnlessIdentical(u4a.get_readonly(), u4a) u5 = u4.get_verify_cap() self.failUnless(IVerifierURI.providedBy(u5)) self.failUnlessReallyEqual(u5.get_storage_index(), u.get_storage_index()) u7 = u.get_verify_cap() self.failUnless(IVerifierURI.providedBy(u7)) self.failUnlessReallyEqual(u7.get_storage_index(), u.get_storage_index()) he = u5.to_human_encoding() u5_h = uri.SSKVerifierURI.init_from_human_encoding(he) self.failUnlessReallyEqual(u5, u5_h)
def test_pack(self): writekey = "\x01" * 16 fingerprint = "\x02" * 32 n = uri.WriteableSSKFileURI(writekey, fingerprint) u1 = uri.DirectoryURI(n) self.failIf(u1.is_readonly()) self.failUnless(u1.is_mutable()) self.failUnless(IURI.providedBy(u1)) self.failIf(IFileURI.providedBy(u1)) self.failUnless(IDirnodeURI.providedBy(u1)) self.failUnless("DirectoryURI" in str(u1)) u1_filenode = u1.get_filenode_cap() self.failUnless(u1_filenode.is_mutable()) self.failIf(u1_filenode.is_readonly()) u2 = uri.from_string(u1.to_string()) self.failUnlessReallyEqual(u1.to_string(), u2.to_string()) self.failIf(u2.is_readonly()) self.failUnless(u2.is_mutable()) self.failUnless(IURI.providedBy(u2)) self.failIf(IFileURI.providedBy(u2)) self.failUnless(IDirnodeURI.providedBy(u2)) u2i = uri.from_string(u1.to_string(), deep_immutable=True) self.failUnless(isinstance(u2i, uri.UnknownURI)) u3 = u2.get_readonly() self.failUnless(u3.is_readonly()) self.failUnless(u3.is_mutable()) self.failUnless(IURI.providedBy(u3)) self.failIf(IFileURI.providedBy(u3)) self.failUnless(IDirnodeURI.providedBy(u3)) u3i = uri.from_string(u2.to_string(), deep_immutable=True) self.failUnless(isinstance(u3i, uri.UnknownURI)) u3n = u3._filenode_uri self.failUnless(u3n.is_readonly()) self.failUnless(u3n.is_mutable()) u3_filenode = u3.get_filenode_cap() self.failUnless(u3_filenode.is_mutable()) self.failUnless(u3_filenode.is_readonly()) u3a = uri.from_string(u3.to_string()) self.failUnlessIdentical(u3a, u3a.get_readonly()) u4 = uri.ReadonlyDirectoryURI(u2._filenode_uri.get_readonly()) self.failUnlessReallyEqual(u4.to_string(), u3.to_string()) self.failUnless(u4.is_readonly()) self.failUnless(u4.is_mutable()) self.failUnless(IURI.providedBy(u4)) self.failIf(IFileURI.providedBy(u4)) self.failUnless(IDirnodeURI.providedBy(u4)) u4_verifier = u4.get_verify_cap() u4_verifier_filenode = u4_verifier.get_filenode_cap() self.failUnless(isinstance(u4_verifier_filenode, uri.SSKVerifierURI)) verifiers = [u1.get_verify_cap(), u2.get_verify_cap(), u3.get_verify_cap(), u4.get_verify_cap(), uri.DirectoryURIVerifier(n.get_verify_cap()), ] for v in verifiers: self.failUnless(IVerifierURI.providedBy(v)) self.failUnlessReallyEqual(v._filenode_uri, u1.get_verify_cap()._filenode_uri)