def make_nodes(self): self.nodes = [] for i in range(self.numnodes): nodedir = os.path.join(self.testdir, "node%d" % i) os.mkdir(nodedir) f = open(os.path.join(nodedir, "tahoe.cfg"), "w") f.write("[client]\n" "introducer.furl = %s\n" "shares.happy = 1\n" "[storage]\n" % (self.introducer_furl, )) # the only tests for which we want the internal nodes to actually # retain shares are the ones where somebody's going to download # them. if self.mode in ("download", "download-GET", "download-GET-slow"): # retain shares pass else: # for these tests, we tell the storage servers to pretend to # accept shares, but really just throw them out, since we're # only testing upload and not download. f.write("debug_discard = true\n") if self.mode in ("receive", ): # for this mode, the client-under-test gets all the shares, # so our internal nodes can refuse requests f.write("readonly = true\n") f.close() c = client.Client(basedir=nodedir) c.setServiceParent(self) self.nodes.append(c)
def test_loadable(self): basedir = "test_client.Run.test_loadable" os.mkdir(basedir) dummy = "pb://[email protected]:58889/bogus" fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG_I % dummy) fileutil.write(os.path.join(basedir, client.Client.EXIT_TRIGGER_FILE), "") client.Client(basedir)
def make_nodes(self): root = FilePath(self.testdir) self.nodes = [] for i in range(self.numnodes): nodedir = root.child("node%d" % (i, )) private = nodedir.child("private") private.makedirs() write_introducer(nodedir, "default", self.introducer_url) config = ("[client]\n" "shares.happy = 1\n" "[storage]\n") # the only tests for which we want the internal nodes to actually # retain shares are the ones where somebody's going to download # them. if self.mode in ("download", "download-GET", "download-GET-slow"): # retain shares pass else: # for these tests, we tell the storage servers to pretend to # accept shares, but really just throw them out, since we're # only testing upload and not download. config += "debug_discard = true\n" if self.mode in ("receive", ): # for this mode, the client-under-test gets all the shares, # so our internal nodes can refuse requests config += "readonly = true\n" nodedir.child("tahoe.cfg").setContent(config) c = client.Client(basedir=nodedir.path) c.setServiceParent(self) self.nodes.append(c)
def _check(config, expected_furl): fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG + config) c = client.Client(basedir) uploader = c.getServiceNamed("uploader") furl, connected = uploader.get_helper_info() self.failUnlessEqual(furl, expected_furl)
def test_reloadable(self): basedir = "test_client.Run.test_reloadable" os.mkdir(basedir) dummy = "pb://[email protected]:58889/bogus" open(os.path.join(basedir, "introducer.furl"), "w").write(dummy) c1 = client.Client(basedir) c1.setServiceParent(self.sparent) # delay to let the service start up completely. I'm not entirely sure # this is necessary. d = self.stall(delay=2.0) d.addCallback(lambda res: c1.disownServiceParent()) # the cygwin buildslave seems to need more time to let the old # service completely shut down. When delay=0.1, I saw this test fail, # probably due to the logport trying to reclaim the old socket # number. This suggests that either we're dropping a Deferred # somewhere in the shutdown sequence, or that cygwin is just cranky. d.addCallback(self.stall, delay=2.0) def _restart(res): # TODO: pause for slightly over one second, to let # Client._check_hotline poll the file once. That will exercise # another few lines. Then add another test in which we don't # update the file at all, and watch to see the node shutdown. (to # do this, use a modified node which overrides Node.shutdown(), # also change _check_hotline to use it instead of a raw # reactor.stop, also instrument the shutdown event in an # attribute that we can check) c2 = client.Client(basedir) c2.setServiceParent(self.sparent) return c2.disownServiceParent() d.addCallback(_restart) return d
def test_loadable(self): basedir = "test_client.Run.test_loadable" os.mkdir(basedir) dummy = "pb://[email protected]:58889/bogus" open(os.path.join(basedir, "introducer.furl"), "w").write(dummy) open(os.path.join(basedir, "suicide_prevention_hotline"), "w") client.Client(basedir)
def test_nodekey_no_storage(self): basedir = "test_client.Basic.test_nodekey_no_storage" os.mkdir(basedir) fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG + "[storage]\n" + "enabled = false\n") c = client.Client(basedir) self.failUnless(c.get_long_nodeid().startswith("v0-"))
def test_reserved_1(self): basedir = "client.Basic.test_reserved_1" os.mkdir(basedir) fileutil.write(os.path.join(basedir, "tahoe.cfg"), \ BASECONFIG + \ "[storage]\n" + \ "enabled = true\n" + \ "reserved_space = 1000\n") c = client.Client(basedir) self.failUnlessEqual(c.getServiceNamed("storage").reserved_space, 1000)
def test_ftp_auth_url(self): basedir = u"client.Basic.test_ftp_auth_url" os.mkdir(basedir) fileutil.write(os.path.join(basedir, "tahoe.cfg"), (BASECONFIG + "[ftpd]\n" "enabled = true\n" "port = tcp:0:interface=127.0.0.1\n" "accounts.url = http://0.0.0.0/\n")) c = client.Client(basedir) # just make sure it can be instantiated del c
def test_loadable_old_storage_config_bits(self): basedir = "test_client.Basic.test_loadable_old_storage_config_bits" os.mkdir(basedir) open(os.path.join(basedir, "introducer.furl"), "w").write("") open(os.path.join(basedir, "readonly_storage"), "w").write("") open(os.path.join(basedir, "debug_discard_storage"), "w").write("") c = client.Client(basedir) s = c.getServiceNamed("storage") self.failUnless(s.no_storage) self.failUnless(s.readonly_storage)
def test_reserved_1(self): basedir = "client.Basic.test_reserved_1" os.mkdir(basedir) f = open(os.path.join(basedir, "tahoe.cfg"), "w") f.write(BASECONFIG) f.write("[storage]\n") f.write("enabled = true\n") f.write("reserved_space = 1000\n") f.close() c = client.Client(basedir) self.failUnlessEqual(c.getServiceNamed("storage").reserved_space, 1000)
def test_secrets(self): basedir = "test_client.Basic.test_secrets" os.mkdir(basedir) open(os.path.join(basedir, "introducer.furl"), "w").write("") c = client.Client(basedir) secret_fname = os.path.join(basedir, "private", "secret") self.failUnless(os.path.exists(secret_fname), secret_fname) renew_secret = c.get_renewal_secret() self.failUnless(base32.b2a(renew_secret)) cancel_secret = c.get_cancel_secret() self.failUnless(base32.b2a(cancel_secret))
def test_ftp_auth_keyfile(self): basedir = u"client.Basic.test_ftp_auth_keyfile" os.mkdir(basedir) fileutil.write(os.path.join(basedir, "tahoe.cfg"), (BASECONFIG + "[ftpd]\n" "enabled = true\n" "port = tcp:0:interface=127.0.0.1\n" "accounts.file = private/accounts\n")) os.mkdir(os.path.join(basedir, "private")) fileutil.write(os.path.join(basedir, "private", "accounts"), "\n") c = client.Client(basedir) # just make sure it can be instantiated del c
def test_secrets(self): basedir = "test_client.Basic.test_secrets" os.mkdir(basedir) fileutil.write(os.path.join(basedir, "tahoe.cfg"), \ BASECONFIG) c = client.Client(basedir) secret_fname = os.path.join(basedir, "private", "secret") self.failUnless(os.path.exists(secret_fname), secret_fname) renew_secret = c.get_renewal_secret() self.failUnless(base32.b2a(renew_secret)) cancel_secret = c.get_cancel_secret() self.failUnless(base32.b2a(cancel_secret))
def _restart(res): # TODO: pause for slightly over one second, to let # Client._check_hotline poll the file once. That will exercise # another few lines. Then add another test in which we don't # update the file at all, and watch to see the node shutdown. (to # do this, use a modified node which overrides Node.shutdown(), # also change _check_hotline to use it instead of a raw # reactor.stop, also instrument the shutdown event in an # attribute that we can check) c2 = client.Client(basedir) c2.setServiceParent(self.sparent) return c2.disownServiceParent()
def test_manhole_keyfile(self): basedir = u"client.Basic.test_manhole_keyfile" os.mkdir(basedir) fileutil.write( os.path.join(basedir, "tahoe.cfg"), BASECONFIG + "[node]\n" + "ssh.port = tcp:0:interface=127.0.0.1\n" + "ssh.authorized_keys_file = relative\n") c = client.Client(basedir) m = [s for s in c if isinstance(s, AuthorizedKeysManhole)][0] abs_basedir = fileutil.abspath_expanduser_unicode(basedir) expected = fileutil.abspath_expanduser_unicode(u"relative", abs_basedir) self.failUnlessReallyEqual(m.keyfile, expected)
def test_web_staticdir(self): basedir = u"client.Basic.test_web_staticdir" os.mkdir(basedir) fileutil.write( os.path.join(basedir, "tahoe.cfg"), BASECONFIG + "[node]\n" + "web.port = tcp:0:interface=127.0.0.1\n" + "web.static = relative\n") c = client.Client(basedir) w = c.getServiceNamed("webish") abs_basedir = fileutil.abspath_expanduser_unicode(basedir) expected = fileutil.abspath_expanduser_unicode(u"relative", abs_basedir) self.failUnlessReallyEqual(w.staticdir, expected)
def test_loadable_old_config_bits(self): basedir = "test_client.Basic.test_loadable_old_config_bits" os.mkdir(basedir) open(os.path.join(basedir, "introducer.furl"), "w").write("") open(os.path.join(basedir, "no_storage"), "w").write("") open(os.path.join(basedir, "readonly_storage"), "w").write("") open(os.path.join(basedir, "debug_discard_storage"), "w").write("") c = client.Client(basedir) try: c.getServiceNamed("storage") self.fail("that was supposed to fail") except KeyError: pass
def test_versions(self): basedir = "test_client.Basic.test_versions" os.mkdir(basedir) open(os.path.join(basedir, "introducer.furl"), "w").write("") c = client.Client(basedir) ss = c.getServiceNamed("storage") verdict = ss.remote_get_version() self.failUnlessReallyEqual(verdict["application-version"], str(allmydata.__full_version__)) self.failIfEqual(str(allmydata.__version__), "unknown") self.failUnless("." in str(allmydata.__full_version__), "non-numeric version in '%s'" % allmydata.__version__) all_versions = allmydata.get_package_versions_string() self.failUnless(allmydata.__appname__ in all_versions) log.msg("tahoe versions: %s" % all_versions) # also test stats stats = c.get_stats() self.failUnless("node.uptime" in stats) self.failUnless(isinstance(stats["node.uptime"], float))
def test_comment(self): should_fail = [r"test#test", r"#testtest", r"test\\#test"] should_not_fail = [r"test\#test", r"test\\\#test", r"testtest"] basedir = "test_client.Basic.test_comment" os.mkdir(basedir) def write_config(s): config = ("[client]\n" "introducer.furl = %s\n" % s) fileutil.write(os.path.join(basedir, "tahoe.cfg"), config) for s in should_fail: self.failUnless(Node._contains_unescaped_hash(s)) write_config(s) self.failUnlessRaises(UnescapedHashError, client.Client, basedir) for s in should_not_fail: self.failIf(Node._contains_unescaped_hash(s)) write_config(s) client.Client(basedir)
def run(config, stdout, stderr): from twisted.internet import reactor from twisted.python import log, logfile from allmydata import client basedir = config['basedir'] precondition(isinstance(basedir, unicode), basedir) if not os.path.isdir(basedir): print >> stderr, "%s does not look like a directory at all" % quote_output( basedir) return 1 for fn in listdir_unicode(basedir): if fn.endswith(u".tac"): tac = str(fn) break else: print >> stderr, "%s does not look like a node directory (no .tac file)" % quote_output( basedir) return 1 if "client" not in tac: print >> stderr, ("%s looks like it contains a non-client node (%s).\n" "Use 'tahoe start' instead of 'tahoe run'." % (quote_output(basedir), tac)) return 1 os.chdir(basedir) # set up twisted logging. this will become part of the node rsn. logdir = os.path.join(basedir, 'logs') if not os.path.exists(logdir): os.makedirs(logdir) lf = logfile.LogFile('tahoesvc.log', logdir) log.startLogging(lf) # run the node itself c = client.Client(basedir) reactor.callLater(0, c.startService) # after reactor startup reactor.run() return 0
def test_versions(self): basedir = "test_client.Basic.test_versions" os.mkdir(basedir) fileutil.write(os.path.join(basedir, "tahoe.cfg"), \ BASECONFIG + \ "[storage]\n" + \ "enabled = true\n") c = client.Client(basedir) ss = c.getServiceNamed("storage") verdict = ss.remote_get_version() self.failUnlessReallyEqual(verdict["application-version"], str(allmydata.__full_version__)) self.failIfEqual(str(allmydata.__version__), "unknown") self.failUnless("." in str(allmydata.__full_version__), "non-numeric version in '%s'" % allmydata.__version__) all_versions = allmydata.get_package_versions_string() self.failUnless(allmydata.__appname__ in all_versions) # also test stats stats = c.get_stats() self.failUnless("node.uptime" in stats) self.failUnless(isinstance(stats["node.uptime"], float))
def test_maker(self): basedir = "client/NodeMaker/maker" fileutil.make_dirs(basedir) f = open(os.path.join(basedir, "tahoe.cfg"), "w") f.write(BASECONFIG) f.close() c = client.Client(basedir) n = c.create_node_from_uri( "URI:CHK:6nmrpsubgbe57udnexlkiwzmlu:bjt7j6hshrlmadjyr7otq3dc24end5meo5xcr5xe5r663po6itmq:3:10:7277" ) self.failUnless(IFilesystemNode.providedBy(n)) self.failUnless(IFileNode.providedBy(n)) self.failUnless(IImmutableFileNode.providedBy(n)) self.failIf(IMutableFileNode.providedBy(n)) self.failIf(IDirectoryNode.providedBy(n)) self.failUnless(n.is_readonly()) self.failIf(n.is_mutable()) n = c.create_node_from_uri("URI:LIT:n5xgk") self.failUnless(IFilesystemNode.providedBy(n)) self.failUnless(IFileNode.providedBy(n)) self.failUnless(IImmutableFileNode.providedBy(n)) self.failIf(IMutableFileNode.providedBy(n)) self.failIf(IDirectoryNode.providedBy(n)) self.failUnless(n.is_readonly()) self.failIf(n.is_mutable()) n = c.create_node_from_uri( "URI:SSK:n6x24zd3seu725yluj75q5boaa:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq" ) self.failUnless(IFilesystemNode.providedBy(n)) self.failUnless(IFileNode.providedBy(n)) self.failIf(IImmutableFileNode.providedBy(n)) self.failUnless(IMutableFileNode.providedBy(n)) self.failIf(IDirectoryNode.providedBy(n)) self.failIf(n.is_readonly()) self.failUnless(n.is_mutable()) n = c.create_node_from_uri( "URI:SSK-RO:b7sr5qsifnicca7cbk3rhrhbvq:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq" ) self.failUnless(IFilesystemNode.providedBy(n)) self.failUnless(IFileNode.providedBy(n)) self.failIf(IImmutableFileNode.providedBy(n)) self.failUnless(IMutableFileNode.providedBy(n)) self.failIf(IDirectoryNode.providedBy(n)) self.failUnless(n.is_readonly()) self.failUnless(n.is_mutable()) n = c.create_node_from_uri( "URI:DIR2:n6x24zd3seu725yluj75q5boaa:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq" ) self.failUnless(IFilesystemNode.providedBy(n)) self.failIf(IFileNode.providedBy(n)) self.failIf(IImmutableFileNode.providedBy(n)) self.failIf(IMutableFileNode.providedBy(n)) self.failUnless(IDirectoryNode.providedBy(n)) self.failIf(n.is_readonly()) self.failUnless(n.is_mutable()) n = c.create_node_from_uri( "URI:DIR2-RO:b7sr5qsifnicca7cbk3rhrhbvq:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq" ) self.failUnless(IFilesystemNode.providedBy(n)) self.failIf(IFileNode.providedBy(n)) self.failIf(IImmutableFileNode.providedBy(n)) self.failIf(IMutableFileNode.providedBy(n)) self.failUnless(IDirectoryNode.providedBy(n)) self.failUnless(n.is_readonly()) self.failUnless(n.is_mutable()) unknown_rw = "lafs://from_the_future" unknown_ro = "lafs://readonly_from_the_future" n = c.create_node_from_uri(unknown_rw, unknown_ro) self.failUnless(IFilesystemNode.providedBy(n)) self.failIf(IFileNode.providedBy(n)) self.failIf(IImmutableFileNode.providedBy(n)) self.failIf(IMutableFileNode.providedBy(n)) self.failIf(IDirectoryNode.providedBy(n)) self.failUnless(n.is_unknown()) self.failUnlessReallyEqual(n.get_uri(), unknown_rw) self.failUnlessReallyEqual(n.get_write_uri(), unknown_rw) self.failUnlessReallyEqual(n.get_readonly_uri(), "ro." + unknown_ro) # Note: it isn't that we *intend* to deploy non-ASCII caps in # the future, it is that we want to make sure older Tahoe-LAFS # versions wouldn't choke on them if we were to do so. See # #1051 and wiki:NewCapDesign for details. unknown_rw = u"lafs://from_the_future_rw_\u263A".encode('utf-8') unknown_ro = u"lafs://readonly_from_the_future_ro_\u263A".encode( 'utf-8') n = c.create_node_from_uri(unknown_rw, unknown_ro) self.failUnless(IFilesystemNode.providedBy(n)) self.failIf(IFileNode.providedBy(n)) self.failIf(IImmutableFileNode.providedBy(n)) self.failIf(IMutableFileNode.providedBy(n)) self.failIf(IDirectoryNode.providedBy(n)) self.failUnless(n.is_unknown()) self.failUnlessReallyEqual(n.get_uri(), unknown_rw) self.failUnlessReallyEqual(n.get_write_uri(), unknown_rw) self.failUnlessReallyEqual(n.get_readonly_uri(), "ro." + unknown_ro)
def test_create_magic_folder_service(self): class MockMagicFolder(service.MultiService): name = 'magic-folder' def __init__(self, client, upload_dircap, collective_dircap, local_path_u, dbfile, umask, inotify=None, uploader_delay=1.0, clock=None, downloader_delay=3): service.MultiService.__init__(self) self.client = client self._umask = umask self.upload_dircap = upload_dircap self.collective_dircap = collective_dircap self.local_dir = local_path_u self.dbfile = dbfile self.inotify = inotify def ready(self): pass self.patch(allmydata.frontends.magic_folder, 'MagicFolder', MockMagicFolder) upload_dircap = "URI:DIR2:blah" local_dir_u = self.unicode_or_fallback(u"loc\u0101l_dir", u"local_dir") local_dir_utf8 = local_dir_u.encode('utf-8') config = (BASECONFIG + "[storage]\n" + "enabled = false\n" + "[magic_folder]\n" + "enabled = true\n") basedir1 = "test_client.Basic.test_create_magic_folder_service1" os.mkdir(basedir1) fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config + "local.directory = " + local_dir_utf8 + "\n") self.failUnlessRaises(MissingConfigEntry, client.Client, basedir1) fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config) fileutil.write( os.path.join(basedir1, "private", "magic_folder_dircap"), "URI:DIR2:blah") fileutil.write(os.path.join(basedir1, "private", "collective_dircap"), "URI:DIR2:meow") self.failUnlessRaises(MissingConfigEntry, client.Client, basedir1) fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config.replace("[magic_folder]\n", "[drop_upload]\n")) self.failUnlessRaises(OldConfigOptionError, client.Client, basedir1) fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config + "local.directory = " + local_dir_utf8 + "\n") c1 = client.Client(basedir1) magicfolder = c1.getServiceNamed('magic-folder') self.failUnless(isinstance(magicfolder, MockMagicFolder), magicfolder) self.failUnlessReallyEqual(magicfolder.client, c1) self.failUnlessReallyEqual(magicfolder.upload_dircap, upload_dircap) self.failUnlessReallyEqual(os.path.basename(magicfolder.local_dir), local_dir_u) self.failUnless(magicfolder.inotify is None, magicfolder.inotify) self.failUnless(magicfolder.running) class Boom(Exception): pass def BoomMagicFolder(client, upload_dircap, collective_dircap, local_path_u, dbfile, umask, inotify=None, uploader_delay=1.0, clock=None, downloader_delay=3): raise Boom() self.patch(allmydata.frontends.magic_folder, 'MagicFolder', BoomMagicFolder) basedir2 = "test_client.Basic.test_create_magic_folder_service2" os.mkdir(basedir2) os.mkdir(os.path.join(basedir2, "private")) fileutil.write( os.path.join(basedir2, "tahoe.cfg"), BASECONFIG + "[magic_folder]\n" + "enabled = true\n" + "local.directory = " + local_dir_utf8 + "\n") fileutil.write( os.path.join(basedir2, "private", "magic_folder_dircap"), "URI:DIR2:blah") fileutil.write(os.path.join(basedir2, "private", "collective_dircap"), "URI:DIR2:meow") self.failUnlessRaises(Boom, client.Client, basedir2)
def test_loadable(self): basedir = "test_client.Basic.test_loadable" os.mkdir(basedir) open(os.path.join(basedir, "introducer.furl"), "w").write("") client.Client(basedir)
def launch_node(self): try: logmsg("main thread startup") # import dependencies so that py2exe finds them # nevow requires all these for its voodoo module import time adaptor registrations from nevow import accessors, appserver, static, rend, url, util, query, i18n, flat from nevow import guard, stan, testutil, context from nevow.flat import flatmdom, flatstan, twist from formless import webform, processors, annotate, iformless from decimal import Decimal import allmydata.web # junk to appease pyflakes's outrage at py2exe's needs [ accessors, appserver, static, rend, url, util, query, i18n, flat, guard, stan, testutil, context, flatmdom, flatstan, twist, webform, processors, annotate, iformless, Decimal, allmydata, ] from twisted.internet import reactor from twisted.python import log, logfile from allmydata import client # set up twisted logging. this will become part of the node rsn. logdir = os.path.join(basedir, 'logs') if not os.path.exists(logdir): os.makedirs(logdir) lf = logfile.LogFile('tahoesvc.log', logdir) log.startLogging(lf) # run the node itself c = client.Client(basedir) reactor.callLater(0, c.startService) # after reactor startup reactor.run(installSignalHandlers=False) logmsg("main thread shutdown") except: logmsg("exception") traceback.print_exc(None, logfilehandle) logfilehandle.flush() os.abort()
def test_loadable(self): basedir = "test_client.Basic.test_loadable" os.mkdir(basedir) fileutil.write(os.path.join(basedir, "tahoe.cfg"), \ BASECONFIG) client.Client(basedir)
def test_create_drop_uploader(self, mock_drop_uploader, mock_log_msg): class MockDropUploader(service.MultiService): name = 'drop-upload' def __init__(self, client, upload_dircap, local_dir_utf8, inotify=None): service.MultiService.__init__(self) self.client = client self.upload_dircap = upload_dircap self.local_dir_utf8 = local_dir_utf8 self.inotify = inotify mock_drop_uploader.side_effect = MockDropUploader upload_dircap = "URI:DIR2:blah" local_dir_utf8 = u"loc\u0101l_dir".encode('utf-8') config = (BASECONFIG + "[storage]\n" + "enabled = false\n" + "[drop_upload]\n" + "enabled = true\n") basedir1 = "test_client.Basic.test_create_drop_uploader1" os.mkdir(basedir1) fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config + "local.directory = " + local_dir_utf8 + "\n") self.failUnlessRaises(MissingConfigEntry, client.Client, basedir1) fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config) fileutil.write(os.path.join(basedir1, "private", "drop_upload_dircap"), "URI:DIR2:blah") self.failUnlessRaises(MissingConfigEntry, client.Client, basedir1) fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config + "upload.dircap = " + upload_dircap + "\n") self.failUnlessRaises(OldConfigOptionError, client.Client, basedir1) fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config + "local.directory = " + local_dir_utf8 + "\n") c1 = client.Client(basedir1) uploader = c1.getServiceNamed('drop-upload') self.failUnless(isinstance(uploader, MockDropUploader), uploader) self.failUnlessReallyEqual(uploader.client, c1) self.failUnlessReallyEqual(uploader.upload_dircap, upload_dircap) self.failUnlessReallyEqual(uploader.local_dir_utf8, local_dir_utf8) self.failUnless(uploader.inotify is None, uploader.inotify) self.failUnless(uploader.running) class Boom(Exception): pass mock_drop_uploader.side_effect = Boom() basedir2 = "test_client.Basic.test_create_drop_uploader2" os.mkdir(basedir2) os.mkdir(os.path.join(basedir2, "private")) fileutil.write( os.path.join(basedir2, "tahoe.cfg"), BASECONFIG + "[drop_upload]\n" + "enabled = true\n" + "local.directory = " + local_dir_utf8 + "\n") fileutil.write(os.path.join(basedir2, "private", "drop_upload_dircap"), "URI:DIR2:blah") c2 = client.Client(basedir2) self.failUnlessRaises(KeyError, c2.getServiceNamed, 'drop-upload') self.failUnless([ True for arg in mock_log_msg.call_args_list if "Boom" in repr(arg) ], mock_log_msg.call_args_list)
def test_maker(self): basedir = "client/NodeMaker/maker" fileutil.make_dirs(basedir) fileutil.write(os.path.join(basedir, "tahoe.cfg"), BASECONFIG) c = client.Client(basedir) n = c.create_node_from_uri( "URI:CHK:6nmrpsubgbe57udnexlkiwzmlu:bjt7j6hshrlmadjyr7otq3dc24end5meo5xcr5xe5r663po6itmq:3:10:7277" ) self.failUnless(IFilesystemNode.providedBy(n)) self.failUnless(IFileNode.providedBy(n)) self.failUnless(IImmutableFileNode.providedBy(n)) self.failIf(IMutableFileNode.providedBy(n)) self.failIf(IDirectoryNode.providedBy(n)) self.failUnless(n.is_readonly()) self.failIf(n.is_mutable()) # Testing #1679. There was a bug that would occur when downloader was # downloading the same readcap more than once concurrently, so the # filenode object was cached, and there was a failure from one of the # servers in one of the download attempts. No subsequent download # attempt would attempt to use that server again, which would lead to # the file being undownloadable until the gateway was restarted. The # current fix for this (hopefully to be superceded by a better fix # eventually) is to prevent re-use of filenodes, so the NodeMaker is # hereby required *not* to cache and re-use filenodes for CHKs. other_n = c.create_node_from_uri( "URI:CHK:6nmrpsubgbe57udnexlkiwzmlu:bjt7j6hshrlmadjyr7otq3dc24end5meo5xcr5xe5r663po6itmq:3:10:7277" ) self.failIf(n is other_n, (n, other_n)) n = c.create_node_from_uri("URI:LIT:n5xgk") self.failUnless(IFilesystemNode.providedBy(n)) self.failUnless(IFileNode.providedBy(n)) self.failUnless(IImmutableFileNode.providedBy(n)) self.failIf(IMutableFileNode.providedBy(n)) self.failIf(IDirectoryNode.providedBy(n)) self.failUnless(n.is_readonly()) self.failIf(n.is_mutable()) n = c.create_node_from_uri( "URI:SSK:n6x24zd3seu725yluj75q5boaa:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq" ) self.failUnless(IFilesystemNode.providedBy(n)) self.failUnless(IFileNode.providedBy(n)) self.failIf(IImmutableFileNode.providedBy(n)) self.failUnless(IMutableFileNode.providedBy(n)) self.failIf(IDirectoryNode.providedBy(n)) self.failIf(n.is_readonly()) self.failUnless(n.is_mutable()) n = c.create_node_from_uri( "URI:SSK-RO:b7sr5qsifnicca7cbk3rhrhbvq:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq" ) self.failUnless(IFilesystemNode.providedBy(n)) self.failUnless(IFileNode.providedBy(n)) self.failIf(IImmutableFileNode.providedBy(n)) self.failUnless(IMutableFileNode.providedBy(n)) self.failIf(IDirectoryNode.providedBy(n)) self.failUnless(n.is_readonly()) self.failUnless(n.is_mutable()) n = c.create_node_from_uri( "URI:DIR2:n6x24zd3seu725yluj75q5boaa:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq" ) self.failUnless(IFilesystemNode.providedBy(n)) self.failIf(IFileNode.providedBy(n)) self.failIf(IImmutableFileNode.providedBy(n)) self.failIf(IMutableFileNode.providedBy(n)) self.failUnless(IDirectoryNode.providedBy(n)) self.failIf(n.is_readonly()) self.failUnless(n.is_mutable()) n = c.create_node_from_uri( "URI:DIR2-RO:b7sr5qsifnicca7cbk3rhrhbvq:mm6yoqjhl6ueh7iereldqxue4nene4wl7rqfjfybqrehdqmqskvq" ) self.failUnless(IFilesystemNode.providedBy(n)) self.failIf(IFileNode.providedBy(n)) self.failIf(IImmutableFileNode.providedBy(n)) self.failIf(IMutableFileNode.providedBy(n)) self.failUnless(IDirectoryNode.providedBy(n)) self.failUnless(n.is_readonly()) self.failUnless(n.is_mutable()) unknown_rw = "lafs://from_the_future" unknown_ro = "lafs://readonly_from_the_future" n = c.create_node_from_uri(unknown_rw, unknown_ro) self.failUnless(IFilesystemNode.providedBy(n)) self.failIf(IFileNode.providedBy(n)) self.failIf(IImmutableFileNode.providedBy(n)) self.failIf(IMutableFileNode.providedBy(n)) self.failIf(IDirectoryNode.providedBy(n)) self.failUnless(n.is_unknown()) self.failUnlessReallyEqual(n.get_uri(), unknown_rw) self.failUnlessReallyEqual(n.get_write_uri(), unknown_rw) self.failUnlessReallyEqual(n.get_readonly_uri(), "ro." + unknown_ro) # Note: it isn't that we *intend* to deploy non-ASCII caps in # the future, it is that we want to make sure older Tahoe-LAFS # versions wouldn't choke on them if we were to do so. See # #1051 and wiki:NewCapDesign for details. unknown_rw = u"lafs://from_the_future_rw_\u263A".encode('utf-8') unknown_ro = u"lafs://readonly_from_the_future_ro_\u263A".encode( 'utf-8') n = c.create_node_from_uri(unknown_rw, unknown_ro) self.failUnless(IFilesystemNode.providedBy(n)) self.failIf(IFileNode.providedBy(n)) self.failIf(IImmutableFileNode.providedBy(n)) self.failIf(IMutableFileNode.providedBy(n)) self.failIf(IDirectoryNode.providedBy(n)) self.failUnless(n.is_unknown()) self.failUnlessReallyEqual(n.get_uri(), unknown_rw) self.failUnlessReallyEqual(n.get_write_uri(), unknown_rw) self.failUnlessReallyEqual(n.get_readonly_uri(), "ro." + unknown_ro)
def test_create_drop_uploader(self): class MockDropUploader(service.MultiService): name = 'drop-upload' def __init__(self, client, upload_dircap, local_dir_utf8, inotify=None): service.MultiService.__init__(self) self.client = client self.upload_dircap = upload_dircap self.local_dir_utf8 = local_dir_utf8 self.inotify = inotify self.patch(allmydata.frontends.drop_upload, 'DropUploader', MockDropUploader) upload_dircap = "URI:DIR2:blah" local_dir_utf8 = u"loc\u0101l_dir".encode('utf-8') config = (BASECONFIG + "[storage]\n" + "enabled = false\n" + "[drop_upload]\n" + "enabled = true\n") basedir1 = "test_client.Basic.test_create_drop_uploader1" os.mkdir(basedir1) fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config + "local.directory = " + local_dir_utf8 + "\n") self.failUnlessRaises(MissingConfigEntry, client.Client, basedir1) fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config) fileutil.write(os.path.join(basedir1, "private", "drop_upload_dircap"), "URI:DIR2:blah") self.failUnlessRaises(MissingConfigEntry, client.Client, basedir1) fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config + "upload.dircap = " + upload_dircap + "\n") self.failUnlessRaises(OldConfigOptionError, client.Client, basedir1) fileutil.write(os.path.join(basedir1, "tahoe.cfg"), config + "local.directory = " + local_dir_utf8 + "\n") c1 = client.Client(basedir1) uploader = c1.getServiceNamed('drop-upload') self.failUnless(isinstance(uploader, MockDropUploader), uploader) self.failUnlessReallyEqual(uploader.client, c1) self.failUnlessReallyEqual(uploader.upload_dircap, upload_dircap) self.failUnlessReallyEqual(uploader.local_dir_utf8, local_dir_utf8) self.failUnless(uploader.inotify is None, uploader.inotify) self.failUnless(uploader.running) class Boom(Exception): pass def BoomDropUploader(client, upload_dircap, local_dir_utf8, inotify=None): raise Boom() logged_messages = [] def mock_log(*args, **kwargs): logged_messages.append("%r %r" % (args, kwargs)) self.patch(allmydata.util.log, 'msg', mock_log) self.patch(allmydata.frontends.drop_upload, 'DropUploader', BoomDropUploader) basedir2 = "test_client.Basic.test_create_drop_uploader2" os.mkdir(basedir2) os.mkdir(os.path.join(basedir2, "private")) fileutil.write( os.path.join(basedir2, "tahoe.cfg"), BASECONFIG + "[drop_upload]\n" + "enabled = true\n" + "local.directory = " + local_dir_utf8 + "\n") fileutil.write(os.path.join(basedir2, "private", "drop_upload_dircap"), "URI:DIR2:blah") c2 = client.Client(basedir2) self.failUnlessRaises(KeyError, c2.getServiceNamed, 'drop-upload') self.failUnless([True for arg in logged_messages if "Boom" in arg], logged_messages)