def test_unicode_dirnames(self): self.basedir = "cli/Cp/unicode_dirnames" fn1 = os.path.join(unicode(self.basedir), u"\u00C4rtonwall") try: fn1_arg = fn1.encode(get_io_encoding()) del fn1_arg # hush pyflakes artonwall_arg = u"\u00C4rtonwall".encode(get_io_encoding()) except UnicodeEncodeError: raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.") self.skip_if_cannot_represent_filename(fn1) self.set_up_grid() d = self.do_cli("create-alias", "tahoe") d.addCallback(lambda res: self.do_cli("mkdir", "tahoe:test/" + artonwall_arg)) d.addCallback(lambda res: self.do_cli("cp", "-r", "tahoe:test", "tahoe:test2")) d.addCallback(lambda res: self.do_cli("ls", "tahoe:test2/test")) def _check((rc, out, err)): try: unicode_to_output(u"\u00C4rtonwall") except UnicodeEncodeError: self.failUnlessReallyEqual(rc, 1) self.failUnlessReallyEqual(out, "") self.failUnlessIn(quote_output(u"\u00C4rtonwall"), err) self.failUnlessIn("files whose names could not be converted", err) else: self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"\u00C4rtonwall\n") self.failUnlessReallyEqual(err, "") d.addCallback(_check) return d
def test_immutable_from_file_unicode(self): # tahoe put "\u00E0 trier.txt" "\u00E0 trier.txt" try: a_trier_arg = u"\u00E0 trier.txt".encode(get_io_encoding()) except UnicodeEncodeError: raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.") skip_if_cannot_represent_filename(u"\u00E0 trier.txt") self.basedir = "cli/Put/immutable_from_file_unicode" self.set_up_grid(oneshare=True) rel_fn = os.path.join(unicode(self.basedir), u"\u00E0 trier.txt") # we make the file small enough to fit in a LIT file, for speed DATA = "short file" fileutil.write(rel_fn, DATA) d = self.do_cli("create-alias", "tahoe") d.addCallback(lambda res: self.do_cli("put", rel_fn.encode(get_io_encoding()), a_trier_arg)) def _uploaded((rc, out, err)): readcap = out.strip() self.failUnless(readcap.startswith("URI:LIT:"), readcap) self.failUnlessIn("201 Created", err) self.readcap = readcap d.addCallback(_uploaded) d.addCallback(lambda res: self.do_cli("get", "tahoe:" + a_trier_arg)) d.addCallback(lambda (rc, out, err): self.failUnlessReallyEqual(out, DATA)) return d
def test_unicode_dirnames(self): self.basedir = "cli/Cp/unicode_dirnames" fn1 = os.path.join(unicode(self.basedir), u"\u00C4rtonwall") try: fn1_arg = fn1.encode(get_io_encoding()) del fn1_arg # hush pyflakes artonwall_arg = u"\u00C4rtonwall".encode(get_io_encoding()) except UnicodeEncodeError: raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.") skip_if_cannot_represent_filename(fn1) self.set_up_grid(oneshare=True) d = self.do_cli("create-alias", "tahoe") d.addCallback(lambda res: self.do_cli("mkdir", "tahoe:test/" + artonwall_arg)) d.addCallback(lambda res: self.do_cli("cp", "-r", "tahoe:test", "tahoe:test2")) d.addCallback(lambda res: self.do_cli("ls", "tahoe:test2/test")) def _check((rc, out, err)): try: unicode_to_output(u"\u00C4rtonwall") except UnicodeEncodeError: self.failUnlessReallyEqual(rc, 1) self.failUnlessReallyEqual(out, "") self.failUnlessIn(quote_output(u"\u00C4rtonwall"), err) self.failUnlessIn("files whose names could not be converted", err) else: self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"\u00C4rtonwall\n") self.failUnlessReallyEqual(err, "") d.addCallback(_check) return d
def test_unicode_filename(self): self.basedir = "cli/Cp/unicode_filename" fn1 = os.path.join(unicode(self.basedir), u"\u00C4rtonwall") try: fn1_arg = fn1.encode(get_io_encoding()) artonwall_arg = u"\u00C4rtonwall".encode(get_io_encoding()) except UnicodeEncodeError: raise unittest.SkipTest( "A non-ASCII command argument could not be encoded on this platform." ) skip_if_cannot_represent_filename(fn1) self.set_up_grid(oneshare=True) DATA1 = "unicode file content" fileutil.write(fn1, DATA1) fn2 = os.path.join(self.basedir, "Metallica") DATA2 = "non-unicode file content" fileutil.write(fn2, DATA2) d = self.do_cli("create-alias", "tahoe") d.addCallback(lambda res: self.do_cli("cp", fn1_arg, "tahoe:")) d.addCallback(lambda res: self.do_cli("get", "tahoe:" + artonwall_arg)) d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual( rc_out_err[1], DATA1)) d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:")) d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica")) d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual( rc_out_err[1], DATA2)) d.addCallback(lambda res: self.do_cli("ls", "tahoe:")) def _check(args): (rc, out, err) = args try: unicode_to_output(u"\u00C4rtonwall") except UnicodeEncodeError: self.failUnlessReallyEqual(rc, 1) self.failUnlessReallyEqual(out, "Metallica\n") self.failUnlessIn(quote_output(u"\u00C4rtonwall"), err) self.failUnlessIn("files whose names could not be converted", err) else: self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"Metallica\n\u00C4rtonwall\n") self.failUnlessReallyEqual(err, "") d.addCallback(_check) return d
def test_create_unicode(self): self.basedir = "cli/CreateAlias/create_unicode" self.set_up_grid(oneshare=True) try: etudes_arg = u"\u00E9tudes".encode(get_io_encoding()) lumiere_arg = u"lumi\u00E8re.txt".encode(get_io_encoding()) except UnicodeEncodeError: raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.") d = self.do_cli("create-alias", etudes_arg) def _check_create_unicode((rc, out, err)): self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(err, "") self.failUnlessIn("Alias %s created" % quote_output(u"\u00E9tudes"), out) aliases = get_aliases(self.get_clientdir()) self.failUnless(aliases[u"\u00E9tudes"].startswith("URI:DIR2:")) d.addCallback(_check_create_unicode) d.addCallback(lambda res: self.do_cli("ls", etudes_arg + ":")) def _check_ls1((rc, out, err)): self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(err, "") self.failUnlessReallyEqual(out, "") d.addCallback(_check_ls1) d.addCallback(lambda res: self.do_cli("put", "-", etudes_arg + ":uploaded.txt", stdin="Blah blah blah")) d.addCallback(lambda res: self.do_cli("ls", etudes_arg + ":")) def _check_ls2((rc, out, err)): self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(err, "") self.failUnlessReallyEqual(out, "uploaded.txt\n") d.addCallback(_check_ls2) d.addCallback(lambda res: self.do_cli("get", etudes_arg + ":uploaded.txt")) def _check_get((rc, out, err)): self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(err, "") self.failUnlessReallyEqual(out, "Blah blah blah") d.addCallback(_check_get) # Ensure that an Unicode filename in an Unicode alias works as expected d.addCallback(lambda res: self.do_cli("put", "-", etudes_arg + ":" + lumiere_arg, stdin="Let the sunshine In!")) d.addCallback(lambda res: self.do_cli("get", get_aliases(self.get_clientdir())[u"\u00E9tudes"] + "/" + lumiere_arg)) def _check_get2((rc, out, err)): self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(err, "") self.failUnlessReallyEqual(out, "Let the sunshine In!") d.addCallback(_check_get2) return d
def test_unicode_filename(self): self.basedir = "cli/Cp/unicode_filename" fn1 = os.path.join(unicode(self.basedir), u"\u00C4rtonwall") try: fn1_arg = fn1.encode(get_io_encoding()) artonwall_arg = u"\u00C4rtonwall".encode(get_io_encoding()) except UnicodeEncodeError: raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.") skip_if_cannot_represent_filename(fn1) self.set_up_grid(oneshare=True) DATA1 = "unicode file content" fileutil.write(fn1, DATA1) fn2 = os.path.join(self.basedir, "Metallica") DATA2 = "non-unicode file content" fileutil.write(fn2, DATA2) d = self.do_cli("create-alias", "tahoe") d.addCallback(lambda res: self.do_cli("cp", fn1_arg, "tahoe:")) d.addCallback(lambda res: self.do_cli("get", "tahoe:" + artonwall_arg)) d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA1)) d.addCallback(lambda res: self.do_cli("cp", fn2, "tahoe:")) d.addCallback(lambda res: self.do_cli("get", "tahoe:Metallica")) d.addCallback(lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA2)) d.addCallback(lambda res: self.do_cli("ls", "tahoe:")) def _check(args): (rc, out, err) = args try: unicode_to_output(u"\u00C4rtonwall") except UnicodeEncodeError: self.failUnlessReallyEqual(rc, 1) self.failUnlessReallyEqual(out, "Metallica\n") self.failUnlessIn(quote_output(u"\u00C4rtonwall"), err) self.failUnlessIn("files whose names could not be converted", err) else: self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"Metallica\n\u00C4rtonwall\n") self.failUnlessReallyEqual(err, "") d.addCallback(_check) return d
def test_immutable_from_file_unicode(self): # tahoe put "\u00E0 trier.txt" "\u00E0 trier.txt" a_trier_arg = u"\u00E0 trier.txt" skip_if_cannot_represent_filename(u"\u00E0 trier.txt") self.basedir = "cli/Put/immutable_from_file_unicode" self.set_up_grid(oneshare=True) rel_fn = os.path.join(str(self.basedir), u"\u00E0 trier.txt") # we make the file small enough to fit in a LIT file, for speed DATA = b"short file \xff bytes" fileutil.write(rel_fn, DATA) d = self.do_cli("create-alias", "tahoe") d.addCallback(lambda res: self.do_cli( "put", rel_fn.encode(get_io_encoding()), a_trier_arg)) def _uploaded(args): (rc, out, err) = args readcap = out.strip() self.failUnless(readcap.startswith("URI:LIT:"), readcap) self.failUnlessIn("201 Created", err) self.readcap = readcap d.addCallback(_uploaded) d.addCallback(lambda res: self.do_cli( "get", "tahoe:" + a_trier_arg, return_bytes=True)) d.addCallback( lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA)) return d
def skip_if_cannot_represent_argv(u): precondition(isinstance(u, unicode)) try: u.encode(get_io_encoding()) except UnicodeEncodeError: raise unittest.SkipTest( "A non-ASCII argv could not be encoded on this platform.")
def runner(argv, run_by_human=True, stdin=None, stdout=None, stderr=None, install_node_control=True, additional_commands=None): stdin = stdin or sys.stdin stdout = stdout or sys.stdout stderr = stderr or sys.stderr config = Options() if install_node_control: config.subCommands.extend(startstop_node.subCommands) ac_dispatch = {} if additional_commands: for ac in additional_commands: config.subCommands.extend(ac.subCommands) ac_dispatch.update(ac.dispatch) try: config.parseOptions(argv) except usage.error, e: if not run_by_human: raise c = config while hasattr(c, 'subOptions'): c = c.subOptions print >>stdout, str(c) try: msg = e.args[0].decode(get_io_encoding()) except Exception: msg = repr(e) print >>stdout, "%s: %s\n" % (sys.argv[0], quote_output(msg, quotemarks=False)) return 1
def test_get_io_encoding_not_from_stdout(self): preferredencoding = 'koi8-r' def call_locale_getpreferredencoding(): return preferredencoding self.patch(locale, 'getpreferredencoding', call_locale_getpreferredencoding) mock_stdout = MockStdout() self.patch(sys, 'stdout', mock_stdout) expected = sys.platform == "win32" and 'utf-8' or 'koi8-r' _reload() self.failUnlessReallyEqual(get_io_encoding(), expected) mock_stdout.encoding = None _reload() self.failUnlessReallyEqual(get_io_encoding(), expected) preferredencoding = None _reload() self.failUnlessReallyEqual(get_io_encoding(), 'utf-8')
def test_get_io_encoding_not_from_stdout(self): preferredencoding = 'koi8-r' def call_locale_getpreferredencoding(): return preferredencoding self.patch(locale, 'getpreferredencoding', call_locale_getpreferredencoding) mock_stdout = MockStdout() self.patch(sys, 'stdout', mock_stdout) expected = sys.platform == "win32" and 'utf-8' or 'koi8-r' _reload() self.failUnlessReallyEqual(get_io_encoding(), expected) mock_stdout.encoding = None _reload() self.failUnlessReallyEqual(get_io_encoding(), expected) preferredencoding = None _reload() self.assertEqual(get_io_encoding(), 'utf-8')
def test_get_io_encoding(self, mock_stdout): mock_stdout.encoding = 'UTF-8' _reload() self.failUnlessReallyEqual(get_io_encoding(), 'utf-8') mock_stdout.encoding = 'cp65001' _reload() self.failUnlessReallyEqual(get_io_encoding(), 'utf-8') mock_stdout.encoding = 'koi8-r' expected = sys.platform == "win32" and 'utf-8' or 'koi8-r' _reload() self.failUnlessReallyEqual(get_io_encoding(), expected) mock_stdout.encoding = 'nonexistent_encoding' if sys.platform == "win32": _reload() self.failUnlessReallyEqual(get_io_encoding(), 'utf-8') else: self.failUnlessRaises(AssertionError, _reload)
def _check((rc, out, err)): try: unicode_to_output(u"\u00C4rtonwall") except UnicodeEncodeError: self.failUnlessReallyEqual(rc, 1) self.failUnlessReallyEqual(out, "Metallica\n") self.failUnlessIn(quote_output(u"\u00C4rtonwall"), err) self.failUnlessIn("files whose names could not be converted", err) else: self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(out.decode(get_io_encoding()), u"Metallica\n\u00C4rtonwall\n") self.failUnlessReallyEqual(err, "")
def test_get_io_encoding_not_from_stdout(self): preferredencoding = "koi8-r" def call_locale_getpreferredencoding(): return preferredencoding self.patch(locale, "getpreferredencoding", call_locale_getpreferredencoding) mock_stdout = MockStdout() self.patch(sys, "stdout", mock_stdout) expected = sys.platform == "win32" and "utf-8" or "koi8-r" _reload() self.failUnlessReallyEqual(get_io_encoding(), expected) mock_stdout.encoding = None _reload() self.failUnlessReallyEqual(get_io_encoding(), expected) preferredencoding = None _reload() self.failUnlessReallyEqual(get_io_encoding(), "utf-8")
def test_immutable_from_file_unicode(self): # tahoe put "\u00E0 trier.txt" "\u00E0 trier.txt" try: a_trier_arg = u"\u00E0 trier.txt".encode(get_io_encoding()) except UnicodeEncodeError: raise unittest.SkipTest( "A non-ASCII command argument could not be encoded on this platform." ) skip_if_cannot_represent_filename(u"\u00E0 trier.txt") self.basedir = "cli/Put/immutable_from_file_unicode" self.set_up_grid(oneshare=True) rel_fn = os.path.join(unicode(self.basedir), u"\u00E0 trier.txt") # we make the file small enough to fit in a LIT file, for speed DATA = "short file" fileutil.write(rel_fn, DATA) d = self.do_cli("create-alias", "tahoe") d.addCallback(lambda res: self.do_cli( "put", rel_fn.encode(get_io_encoding()), a_trier_arg)) def _uploaded(args): (rc, out, err) = args readcap = out.strip() self.failUnless(readcap.startswith("URI:LIT:"), readcap) self.failUnlessIn("201 Created", err) self.readcap = readcap d.addCallback(_uploaded) d.addCallback(lambda res: self.do_cli("get", "tahoe:" + a_trier_arg)) d.addCallback( lambda rc_out_err: self.failUnlessReallyEqual(rc_out_err[1], DATA)) return d
def test_get_io_encoding_not_from_stdout(self, mock_locale_getpreferredencoding): locale # hush pyflakes mock_locale_getpreferredencoding.return_value = 'koi8-r' class DummyStdout: pass old_stdout = sys.stdout sys.stdout = DummyStdout() try: expected = sys.platform == "win32" and 'utf-8' or 'koi8-r' _reload() self.failUnlessReallyEqual(get_io_encoding(), expected) sys.stdout.encoding = None _reload() self.failUnlessReallyEqual(get_io_encoding(), expected) mock_locale_getpreferredencoding.return_value = None _reload() self.failUnlessReallyEqual(get_io_encoding(), 'utf-8') finally: sys.stdout = old_stdout
def unicode_or_fallback(self, unicode_name, fallback_name, io_as_well=False): if not unicode_platform(): try: unicode_name.encode(get_filesystem_encoding()) except UnicodeEncodeError: return fallback_name if io_as_well: try: unicode_name.encode(get_io_encoding()) except UnicodeEncodeError: return fallback_name return unicode_name
def test_get_io_encoding(self): mock_stdout = MockStdout() self.patch(sys, "stdout", mock_stdout) mock_stdout.encoding = "UTF-8" _reload() self.failUnlessReallyEqual(get_io_encoding(), "utf-8") mock_stdout.encoding = "cp65001" _reload() self.failUnlessReallyEqual(get_io_encoding(), "utf-8") mock_stdout.encoding = "koi8-r" expected = sys.platform == "win32" and "utf-8" or "koi8-r" _reload() self.failUnlessReallyEqual(get_io_encoding(), expected) mock_stdout.encoding = "nonexistent_encoding" if sys.platform == "win32": _reload() self.failUnlessReallyEqual(get_io_encoding(), "utf-8") else: self.failUnlessRaises(AssertionError, _reload)
def parse_or_exit_with_explanation(argv, stdout=sys.stdout): config = Options() try: parse_options(argv, config=config) except usage.error, e: c = config while hasattr(c, 'subOptions'): c = c.subOptions print >>stdout, str(c) try: msg = e.args[0].decode(get_io_encoding()) except Exception: msg = repr(e) print >>stdout, "%s: %s\n" % (sys.argv[0], quote_output(msg, quotemarks=False)) sys.exit(1)
def parse_or_exit_with_explanation(argv, stdout=sys.stdout): config = Options() try: parse_options(argv, config=config) except usage.error as e: c = config while hasattr(c, 'subOptions'): c = c.subOptions print(str(c), file=stdout) try: msg = e.args[0].decode(get_io_encoding()) except Exception: msg = repr(e) print("%s: %s\n" % (sys.argv[0], quote_output(msg, quotemarks=False)), file=stdout) sys.exit(1) return config
def _check(args): (rc, out, err) = args try: unicode_to_output(u"\u00C4rtonwall") except UnicodeEncodeError: self.failUnlessReallyEqual(rc, 1) self.assertEqual(len(out), 0, out) self.failUnlessIn(quote_output(u"\u00C4rtonwall"), err) self.failUnlessIn("files whose names could not be converted", err) else: self.failUnlessReallyEqual(rc, 0) if PY2: out = out.decode(get_io_encoding()) self.failUnlessReallyEqual(out, u"\u00C4rtonwall\n") self.assertEqual(len(err), 0, err)
def test_exclude_options_unicode(self): nice_doc = u"nice_d\u00F8c.lyx" try: doc_pattern_arg = u"*d\u00F8c*".encode(get_io_encoding()) except UnicodeEncodeError: raise unittest.SkipTest( "A non-ASCII command argument could not be encoded on this platform." ) root_listdir = (u'lib.a', u'_darcs', u'subdir', nice_doc) basedir = "cli/Backup/exclude_options_unicode" fileutil.make_dirs(basedir) nodeurl_path = os.path.join(basedir, 'node.url') fileutil.write(nodeurl_path, 'http://example.net:2357/') def parse(args): return parse_options(basedir, "backup", args) # test simple exclude backup_options = parse(['--exclude', doc_pattern_arg, 'from', 'to']) filtered = list(backup_options.filter_listdir(root_listdir)) self._check_filtering(filtered, root_listdir, (u'lib.a', u'_darcs', u'subdir'), (nice_doc, )) # multiple exclude backup_options = parse( ['--exclude', doc_pattern_arg, '--exclude', 'lib.?', 'from', 'to']) filtered = list(backup_options.filter_listdir(root_listdir)) self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'), (nice_doc, u'lib.a')) # read exclude patterns from file exclusion_string = doc_pattern_arg + "\nlib.?" excl_filepath = os.path.join(basedir, 'exclusion') fileutil.write(excl_filepath, exclusion_string) backup_options = parse(['--exclude-from', excl_filepath, 'from', 'to']) filtered = list(backup_options.filter_listdir(root_listdir)) self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'), (nice_doc, u'lib.a')) # test that an iterator works too backup_options = parse(['--exclude', doc_pattern_arg, 'from', 'to']) filtered = list(backup_options.filter_listdir(iter(root_listdir))) self._check_filtering(filtered, root_listdir, (u'lib.a', u'_darcs', u'subdir'), (nice_doc, ))
def lineReceived(self, line): if self.in_error: print(quote_output(line, quotemarks=False), file=self.stderr) return if line.startswith(b"ERROR:"): self.in_error = True self.streamer.rc = 1 print(quote_output(line, quotemarks=False), file=self.stderr) return d = json.loads(line) stdout = self.stdout if d["type"] not in ("file", "directory"): return self.num_objects += 1 # non-verbose means print a progress marker every 100 files if self.num_objects % 100 == 0: print("%d objects checked.." % self.num_objects, file=stdout) cr = d["check-results"] if cr["results"]["healthy"]: self.files_healthy += 1 else: self.files_unhealthy += 1 if self.verbose: # verbose means also print one line per file path = d["path"] if not path: path = ["<root>"] # LIT files and directories do not have a "summary" field. summary = cr.get("summary", "Healthy (LIT)") # When Python 2 is dropped the ensure_text()/ensure_str() will be unnecessary. print(ensure_text(ensure_str("%s: %s") % (quote_path(path), quote_output(summary, quotemarks=False)), encoding=get_io_encoding()), file=stdout) # always print out corrupt shares for shareloc in cr["results"].get("list-corrupt-shares", []): (serverid, storage_index, sharenum) = shareloc print(" corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum), file=stdout)
def test_exclude_options_unicode(self): nice_doc = u"nice_d\u00F8c.lyx" try: doc_pattern_arg = u"*d\u00F8c*".encode(get_io_encoding()) except UnicodeEncodeError: raise unittest.SkipTest("A non-ASCII command argument could not be encoded on this platform.") root_listdir = (u'lib.a', u'_darcs', u'subdir', nice_doc) basedir = "cli/Backup/exclude_options_unicode" fileutil.make_dirs(basedir) nodeurl_path = os.path.join(basedir, 'node.url') fileutil.write(nodeurl_path, 'http://example.net:2357/') def parse(args): return parse_options(basedir, "backup", args) # test simple exclude backup_options = parse(['--exclude', doc_pattern_arg, 'from', 'to']) filtered = list(backup_options.filter_listdir(root_listdir)) self._check_filtering(filtered, root_listdir, (u'lib.a', u'_darcs', u'subdir'), (nice_doc,)) # multiple exclude backup_options = parse(['--exclude', doc_pattern_arg, '--exclude', 'lib.?', 'from', 'to']) filtered = list(backup_options.filter_listdir(root_listdir)) self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'), (nice_doc, u'lib.a')) # read exclude patterns from file exclusion_string = doc_pattern_arg + "\nlib.?" excl_filepath = os.path.join(basedir, 'exclusion') fileutil.write(excl_filepath, exclusion_string) backup_options = parse(['--exclude-from', excl_filepath, 'from', 'to']) filtered = list(backup_options.filter_listdir(root_listdir)) self._check_filtering(filtered, root_listdir, (u'_darcs', u'subdir'), (nice_doc, u'lib.a')) # test that an iterator works too backup_options = parse(['--exclude', doc_pattern_arg, 'from', 'to']) filtered = list(backup_options.filter_listdir(iter(root_listdir))) self._check_filtering(filtered, root_listdir, (u'lib.a', u'_darcs', u'subdir'), (nice_doc,))
def create_node(config): out = config.stdout err = config.stderr basedir = config['basedir'] # This should always be called with an absolute Unicode basedir. precondition(isinstance(basedir, unicode), basedir) if os.path.exists(basedir): if listdir_unicode(basedir): print >>err, "The base directory %s is not empty." % quote_local_unicode_path(basedir) print >>err, "To avoid clobbering anything, I am going to quit now." print >>err, "Please use a different directory, or empty this one." defer.returnValue(-1) # we're willing to use an empty directory else: os.mkdir(basedir) write_tac(basedir, "client") # if we're doing magic-wormhole stuff, do it now if config['join'] is not None: try: remote_config = yield _get_config_via_wormhole(config) except RuntimeError as e: print >>err, str(e) defer.returnValue(1) # configuration we'll allow the inviter to set whitelist = [ 'shares-happy', 'shares-needed', 'shares-total', 'introducer', 'nickname', ] sensitive_keys = ['introducer'] print >>out, "Encoding: {shares-needed} of {shares-total} shares, on at least {shares-happy} servers".format(**remote_config) print >>out, "Overriding the following config:" for k in whitelist: v = remote_config.get(k, None) if v is not None: # we're faking usually argv-supplied options :/ if isinstance(v, unicode): v = v.encode(get_io_encoding()) config[k] = v if k not in sensitive_keys: if k not in ['shares-happy', 'shares-total', 'shares-needed']: print >>out, " {}: {}".format(k, v) else: print >>out, " {}: [sensitive data; see tahoe.cfg]".format(k) fileutil.make_dirs(os.path.join(basedir, "private"), 0700) with open(os.path.join(basedir, "tahoe.cfg"), "w") as c: yield write_node_config(c, config) write_client_config(c, config) print >>out, "Node created in %s" % quote_local_unicode_path(basedir) tahoe_cfg = quote_local_unicode_path(os.path.join(basedir, "tahoe.cfg")) if not config.get("introducer", ""): print >>out, " Please set [client]introducer.furl= in %s!" % tahoe_cfg print >>out, " The node cannot connect to a grid without it." if not config.get("nickname", ""): print >>out, " Please set [node]nickname= in %s" % tahoe_cfg defer.returnValue(0)
def lineReceived(self, line): if self.in_error: print(quote_output(line, quotemarks=False), file=self.stderr) return if line.startswith(b"ERROR:"): self.in_error = True self.streamer.rc = 1 print(quote_output(line, quotemarks=False), file=self.stderr) return d = json.loads(line) stdout = self.stdout if d["type"] not in ("file", "directory"): return self.num_objects += 1 # non-verbose means print a progress marker every 100 files if self.num_objects % 100 == 0: print("%d objects checked.." % self.num_objects, file=stdout) crr = d["check-and-repair-results"] if d["storage-index"]: if crr["pre-repair-results"]["results"]["healthy"]: was_healthy = True self.pre_repair_files_healthy += 1 else: was_healthy = False self.pre_repair_files_unhealthy += 1 if crr["post-repair-results"]["results"]["healthy"]: self.post_repair_files_healthy += 1 else: self.post_repair_files_unhealthy += 1 else: # LIT file was_healthy = True self.pre_repair_files_healthy += 1 self.post_repair_files_healthy += 1 if crr["repair-attempted"]: self.repairs_attempted += 1 if crr["repair-successful"]: self.repairs_successful += 1 if self.verbose: # verbose means also print one line per file path = d["path"] if not path: path = ["<root>"] # we don't seem to have a summary available, so build one # When Python 2 is dropped the ensure_text/ensure_str crap can be # dropped. if was_healthy: summary = ensure_str("healthy") else: summary = ensure_str("not healthy") print(ensure_text(ensure_str("%s: %s") % (quote_path(path), summary), encoding=get_io_encoding()), file=stdout) # always print out corrupt shares prr = crr.get("pre-repair-results", {}) for shareloc in prr.get("results", {}).get("list-corrupt-shares", []): (serverid, storage_index, sharenum) = shareloc print(" corrupt: %s" % _quote_serverid_index_share(serverid, storage_index, sharenum), file=stdout) # always print out repairs if crr["repair-attempted"]: if crr["repair-successful"]: print(" repair successful", file=stdout) else: print(" repair failed", file=stdout)
def skip_if_cannot_represent_argv(u): precondition(isinstance(u, unicode)) try: u.encode(get_io_encoding()) except UnicodeEncodeError: raise unittest.SkipTest("A non-ASCII argv could not be encoded on this platform.")
def test_list(self): self.basedir = "cli/List/list" self.set_up_grid() c0 = self.g.clients[0] small = "small" # u"g\u00F6\u00F6d" might not be representable in the argv and/or output encodings. # It is initially included in the directory in any case. try: good_arg = u"g\u00F6\u00F6d".encode(get_io_encoding()) except UnicodeEncodeError: good_arg = None try: good_out = u"g\u00F6\u00F6d".encode(get_io_encoding()) except UnicodeEncodeError: good_out = None d = c0.create_dirnode() def _stash_root_and_create_file(n): self.rootnode = n self.rooturi = n.get_uri() return n.add_file(u"g\u00F6\u00F6d", upload.Data(small, convergence="")) d.addCallback(_stash_root_and_create_file) def _stash_goodcap(n): self.goodcap = n.get_uri() d.addCallback(_stash_goodcap) d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"1share")) d.addCallback(lambda n: self.delete_shares_numbered(n.get_uri(), range(1,10))) d.addCallback(lambda ign: self.rootnode.create_subdirectory(u"0share")) d.addCallback(lambda n: self.delete_shares_numbered(n.get_uri(), range(0,10))) d.addCallback(lambda ign: self.do_cli("add-alias", "tahoe", self.rooturi)) d.addCallback(lambda ign: self.do_cli("ls")) def _check1((rc,out,err)): if good_out is None: self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("files whose names could not be converted", err) self.failUnlessIn(quote_output(u"g\u00F6\u00F6d"), err) self.failUnlessReallyEqual(sorted(out.splitlines()), sorted(["0share", "1share"])) else: self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(err, "") self.failUnlessReallyEqual(sorted(out.splitlines()), sorted(["0share", "1share", good_out])) d.addCallback(_check1) d.addCallback(lambda ign: self.do_cli("ls", "missing")) def _check2((rc,out,err)): self.failIfEqual(rc, 0) self.failUnlessReallyEqual(err.strip(), "No such file or directory") self.failUnlessReallyEqual(out, "") d.addCallback(_check2) d.addCallback(lambda ign: self.do_cli("ls", "1share")) def _check3((rc,out,err)): self.failIfEqual(rc, 0) self.failUnlessIn("Error during GET: 410 Gone", err) self.failUnlessIn("UnrecoverableFileError:", err) self.failUnlessIn("could not be retrieved, because there were " "insufficient good shares.", err) self.failUnlessReallyEqual(out, "") d.addCallback(_check3) d.addCallback(lambda ign: self.do_cli("ls", "0share")) d.addCallback(_check3) def _check4((rc, out, err)): if good_out is None: self.failUnlessReallyEqual(rc, 1) self.failUnlessIn("files whose names could not be converted", err) self.failUnlessIn(quote_output(u"g\u00F6\u00F6d"), err) self.failUnlessReallyEqual(out, "") else: # listing a file (as dir/filename) should have the edge metadata, # including the filename self.failUnlessReallyEqual(rc, 0) self.failUnlessIn(good_out, out) self.failIfIn("-r-- %d -" % len(small), out, "trailing hyphen means unknown date") if good_arg is not None: d.addCallback(lambda ign: self.do_cli("ls", "-l", good_arg)) d.addCallback(_check4) # listing a file as $DIRCAP/filename should work just like dir/filename d.addCallback(lambda ign: self.do_cli("ls", "-l", self.rooturi + "/" + good_arg)) d.addCallback(_check4) # and similarly for $DIRCAP:./filename d.addCallback(lambda ign: self.do_cli("ls", "-l", self.rooturi + ":./" + good_arg)) d.addCallback(_check4) def _check5((rc, out, err)): # listing a raw filecap should not explode, but it will have no # metadata, just the size self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual("-r-- %d -" % len(small), out.strip()) d.addCallback(lambda ign: self.do_cli("ls", "-l", self.goodcap)) d.addCallback(_check5) # Now rename 'g\u00F6\u00F6d' to 'good' and repeat the tests that might have been skipped due # to encoding problems. d.addCallback(lambda ign: self.rootnode.move_child_to(u"g\u00F6\u00F6d", self.rootnode, u"good")) d.addCallback(lambda ign: self.do_cli("ls")) def _check1_ascii((rc,out,err)): self.failUnlessReallyEqual(rc, 0) self.failUnlessReallyEqual(err, "") self.failUnlessReallyEqual(sorted(out.splitlines()), sorted(["0share", "1share", "good"])) d.addCallback(_check1_ascii) def _check4_ascii((rc, out, err)): # listing a file (as dir/filename) should have the edge metadata, # including the filename self.failUnlessReallyEqual(rc, 0) self.failUnlessIn("good", out) self.failIfIn("-r-- %d -" % len(small), out, "trailing hyphen means unknown date") d.addCallback(lambda ign: self.do_cli("ls", "-l", "good")) d.addCallback(_check4_ascii) # listing a file as $DIRCAP/filename should work just like dir/filename d.addCallback(lambda ign: self.do_cli("ls", "-l", self.rooturi + "/good")) d.addCallback(_check4_ascii) # and similarly for $DIRCAP:./filename d.addCallback(lambda ign: self.do_cli("ls", "-l", self.rooturi + ":./good")) d.addCallback(_check4_ascii) unknown_immcap = "imm.URI:unknown" def _create_unknown(ign): nm = c0.nodemaker kids = {u"unknownchild-imm": (nm.create_from_cap(unknown_immcap), {})} return self.rootnode.create_subdirectory(u"unknown", initial_children=kids, mutable=False) d.addCallback(_create_unknown) def _check6((rc, out, err)): # listing a directory referencing an unknown object should print # an extra message to stderr self.failUnlessReallyEqual(rc, 0) self.failUnlessIn("?r-- ? - unknownchild-imm\n", out) self.failUnlessIn("included unknown objects", err) d.addCallback(lambda ign: self.do_cli("ls", "-l", "unknown")) d.addCallback(_check6) def _check7((rc, out, err)): # listing an unknown cap directly should print an extra message # to stderr (currently this only works if the URI starts with 'URI:' # after any 'ro.' or 'imm.' prefix, otherwise it will be confused # with an alias). self.failUnlessReallyEqual(rc, 0) self.failUnlessIn("?r-- ? -\n", out) self.failUnlessIn("included unknown objects", err) d.addCallback(lambda ign: self.do_cli("ls", "-l", unknown_immcap)) d.addCallback(_check7) return d
def create_node(config): out = config.stdout err = config.stderr basedir = config['basedir'] # This should always be called with an absolute Unicode basedir. precondition(isinstance(basedir, unicode), basedir) if os.path.exists(basedir): if listdir_unicode(basedir): print >> err, "The base directory %s is not empty." % quote_local_unicode_path( basedir) print >> err, "To avoid clobbering anything, I am going to quit now." print >> err, "Please use a different directory, or empty this one." defer.returnValue(-1) # we're willing to use an empty directory else: os.mkdir(basedir) write_tac(basedir, "client") # if we're doing magic-wormhole stuff, do it now if config['join'] is not None: try: remote_config = yield _get_config_via_wormhole(config) except RuntimeError as e: print >> err, str(e) defer.returnValue(1) # configuration we'll allow the inviter to set whitelist = [ 'shares-happy', 'shares-needed', 'shares-total', 'introducer', 'nickname', ] sensitive_keys = ['introducer'] print >> out, "Encoding: {shares-needed} of {shares-total} shares, on at least {shares-happy} servers".format( **remote_config) print >> out, "Overriding the following config:" for k in whitelist: v = remote_config.get(k, None) if v is not None: # we're faking usually argv-supplied options :/ if isinstance(v, unicode): v = v.encode(get_io_encoding()) config[k] = v if k not in sensitive_keys: if k not in [ 'shares-happy', 'shares-total', 'shares-needed' ]: print >> out, " {}: {}".format(k, v) else: print >> out, " {}: [sensitive data; see tahoe.cfg]".format( k) fileutil.make_dirs(os.path.join(basedir, "private"), 0700) with open(os.path.join(basedir, "tahoe.cfg"), "w") as c: yield write_node_config(c, config) write_client_config(c, config) print >> out, "Node created in %s" % quote_local_unicode_path(basedir) tahoe_cfg = quote_local_unicode_path(os.path.join(basedir, "tahoe.cfg")) if not config.get("introducer", ""): print >> out, " Please set [client]introducer.furl= in %s!" % tahoe_cfg print >> out, " The node cannot connect to a grid without it." if not config.get("nickname", ""): print >> out, " Please set [node]nickname= in %s" % tahoe_cfg defer.returnValue(0)