def test_simple(self): recent_items = active_items = [ UploadStatus(), DownloadStatus(b"abcd", 12345), PublishStatus(), RetrieveStatus(), UpdateStatus(), FakeStatus(), ] values = [ BytesIO( json.dumps({ "active": list(marshal_json(item) for item in active_items), "recent": list(marshal_json(item) for item in recent_items), }).encode("utf-8")), BytesIO( json.dumps({ "counters": { "bytes_downloaded": 0, }, "stats": { "node.uptime": 0, } }).encode("utf-8")), ] def do_http(*args, **kw): return values.pop(0) do_status(self.options, do_http)
def test_any_bytes_unsupported_by_default(self): """By default non-UTF-8 bytes raise error.""" bytestring = b"abc\xff\x00" with self.assertRaises(UnicodeDecodeError): jsonbytes.dumps(bytestring) with self.assertRaises(UnicodeDecodeError): jsonbytes.dumps_bytes(bytestring) with self.assertRaises(UnicodeDecodeError): json.dumps(bytestring, cls=jsonbytes.UTF8BytesJSONEncoder)
def test_any_bytes(self): """If any_bytes is True, non-UTF-8 bytes don't break encoding.""" bytestring = b"abc\xff\xff123" o = {bytestring: bytestring} expected = {"abc\\xff\\xff123": "abc\\xff\\xff123"} self.assertEqual( json.loads(jsonbytes.dumps(o, any_bytes=True)), expected, ) self.assertEqual( json.loads(json.dumps(o, cls=jsonbytes.AnyBytesJSONEncoder)), expected, ) self.assertEqual(json.loads(jsonbytes.dumps(o, any_bytes=True)), expected)
def list_aliases(options): """ Show aliases that exist. """ data = _get_alias_details(options['node-directory']) if options['json']: dumped = json.dumps(data, indent=4) if isinstance(dumped, bytes): dumped = dumped.decode("utf-8") output = _escape_format(dumped) else: def dircap(details): return (details['readonly'] if options['readonly-uri'] else details['readwrite']).decode("utf-8") def format_dircap(name, details): return fmt % (name, dircap(details)) max_width = max([len(quote_output(name)) for name in data.keys()] + [0]) fmt = "%" + str(max_width) + "s: %s" output = "\n".join( list( format_dircap(name, details) for name, details in data.items())) if output: # Show whatever we computed. Skip this if there is no output to avoid # a spurious blank line. show_output(options.stdout, output) return 0
def test_encode_unicode(self): """jsonbytes.dumps() encodes Unicode string as usual.""" expected = { u"hello": [1, u"cd"], } encoded = jsonbytes.dumps(expected) self.assertEqual(json.loads(encoded), expected)
def test_no_operations(self, http): values = [ StringIO(ensure_text(json.dumps({ "active": [], "recent": [], }))), StringIO(ensure_text(json.dumps({ "counters": { "bytes_downloaded": 0, }, "stats": { "node.uptime": 0, } }))), ] http.side_effect = lambda *args, **kw: values.pop(0) do_status(self.options)
def render_JSON(self, req): req.setHeader("content-type", "text/plain") d = { "stats": self._storage.get_stats(), "bucket-counter": self._storage.bucket_counter.get_state(), "lease-checker": self._storage.lease_checker.get_state(), "lease-checker-progress": self._storage.lease_checker.get_progress(), } return json.dumps(d, indent=1) + "\n"
def _pack_normalized_children(children, writekey, deep_immutable=False): """Take a dict that maps: children[unicode_nfc_name] = (IFileSystemNode, metadata_dict) and pack it into a single string, for use as the contents of the backing file. This is the same format as is returned by _unpack_contents. I also accept an AuxValueDict, in which case I'll use the auxilliary cached data as the pre-packed entry, which is faster than re-packing everything each time. If writekey is provided then I will superencrypt the child's writecap with writekey. If deep_immutable is True, I will require that all my children are deeply immutable, and will raise a MustBeDeepImmutableError if not. """ precondition((writekey is None) or isinstance(writekey, bytes), writekey) has_aux = isinstance(children, AuxValueDict) entries = [] for name in sorted(children.keys()): assert isinstance(name, str) entry = None (child, metadata) = children[name] child.raise_error() if deep_immutable and not child.is_allowed_in_immutable_directory(): raise MustBeDeepImmutableError( "child %r is not allowed in an immutable directory" % (name,), name) if has_aux: entry = children.get_aux(name) if not entry: assert IFilesystemNode.providedBy(child), (name,child) assert isinstance(metadata, dict) rw_uri = child.get_write_uri() if rw_uri is None: rw_uri = b"" assert isinstance(rw_uri, bytes), rw_uri # should be prevented by MustBeDeepImmutableError check above assert not (rw_uri and deep_immutable) ro_uri = child.get_readonly_uri() if ro_uri is None: ro_uri = b"" assert isinstance(ro_uri, bytes), ro_uri if writekey is not None: writecap = netstring(_encrypt_rw_uri(writekey, rw_uri)) else: writecap = ZERO_LEN_NETSTR entry = b"".join([netstring(name.encode("utf-8")), netstring(strip_prefix_for_ro(ro_uri, deep_immutable)), writecap, netstring(json.dumps(metadata).encode("utf-8"))]) entries.append(netstring(entry)) return b"".join(entries)
def test_encode_bytes(self): """BytesJSONEncoder can encode bytes.""" data = { b"hello": [1, b"cd"], } expected = { u"hello": [1, u"cd"], } # Bytes get passed through as if they were UTF-8 Unicode: encoded = jsonbytes.dumps(data) self.assertEqual(json.loads(encoded), expected) self.assertEqual(jsonbytes.loads(encoded), expected)
def get_client_resource(self, configuration): """ :return: A static data resource that produces the given configuration when rendered, as an aid to testing. """ items = configuration.items(self._client_section_name, []) resource = Data( native_str_to_bytes(dumps(dict(items))), ensure_str("text/json"), ) # Give it some dynamic stuff too. resource.putChild(b"counter", GetCounter()) return resource
def mkdir(contents, options): kids = dict([(childname, (contents[childname][0], { "ro_uri": contents[childname][1], "metadata": contents[childname][2], })) for childname in contents]) body = json.dumps(kids).encode("utf-8") url = options['node-url'] + "uri?t=mkdir-immutable" resp = do_http("POST", url, body) if resp.status < 200 or resp.status >= 300: raise HTTPError("Error during mkdir", resp) dircap = to_bytes(resp.read().strip()) return dircap
def test_no_operations(self): values = [ StringIO(ensure_text(json.dumps({ "active": [], "recent": [], }))), StringIO( ensure_text( json.dumps({ "counters": { "bytes_downloaded": 0, }, "stats": { "node.uptime": 0, } }))), ] def do_http(*args, **kw): return values.pop(0) do_status(self.options, do_http)
def render_JSON(self, req): req.setHeader("content-type", "application/json; charset=utf-8") intro_summaries = [ s.summary for s in self._client.introducer_connection_statuses() ] sb = self._client.get_storage_broker() servers = self._describe_known_servers(sb) result = { "introducers": { "statuses": intro_summaries, }, "servers": servers } return json.dumps(result, indent=1) + "\n"
def _file_json_metadata(req, filenode, edge_metadata): rw_uri = filenode.get_write_uri() ro_uri = filenode.get_readonly_uri() data = ("filenode", get_filenode_metadata(filenode)) if ro_uri: data[1]['ro_uri'] = ro_uri if rw_uri: data[1]['rw_uri'] = rw_uri verifycap = filenode.get_verify_cap() if verifycap: data[1]['verify_uri'] = verifycap.to_string() if edge_metadata is not None: data[1]['metadata'] = edge_metadata return text_plain(json.dumps(data, indent=1) + "\n", req)
def test_encode_bytes(self): """BytesJSONEncoder can encode bytes. Bytes are presumed to be UTF-8 encoded. """ snowman = u"def\N{SNOWMAN}\uFF00" data = { b"hello": [1, b"cd", {b"abc": [123, snowman.encode("utf-8")]}], } expected = { u"hello": [1, u"cd", {u"abc": [123, snowman]}], } # Bytes get passed through as if they were UTF-8 Unicode: encoded = jsonbytes.dumps(data) self.assertEqual(json.loads(encoded), expected) self.assertEqual(jsonbytes.loads(encoded), expected)
def sign_to_foolscap(announcement, signing_key): """ :param signing_key: a (private) signing key, as returned from e.g. :func:`allmydata.crypto.ed25519.signing_keypair_from_string` :returns: 3-tuple of (msg, sig, vk) where msg is a UTF8 JSON serialization of the `announcement` (bytes), sig is bytes (a signature of msg) and vk is the verifying key bytes """ # return (bytes, sig-str, pubkey-str). A future HTTP-based serialization # will use JSON({msg:b64(JSON(msg).utf8), sig:v0-b64(sig), # pubkey:v0-b64(pubkey)}) . msg = json.dumps(announcement).encode("utf-8") sig = b"v0-" + base32.b2a(ed25519.sign_data(signing_key, msg)) verifying_key_string = ed25519.string_from_verifying_key( ed25519.verifying_key_from_signing_key(signing_key)) ann_t = (msg, sig, remove_prefix(verifying_key_string, b"pub-")) return ann_t
def render_JSON(self, req): req.setHeader("content-type", "text/plain") res = self.monitor.get_status() data = {} data["finished"] = self.monitor.is_finished() data["root-storage-index"] = res.get_root_storage_index_string() c = res.get_counters() data["count-objects-checked"] = c["count-objects-checked"] data["count-objects-healthy-pre-repair"] = c[ "count-objects-healthy-pre-repair"] data["count-objects-unhealthy-pre-repair"] = c[ "count-objects-unhealthy-pre-repair"] data["count-objects-healthy-post-repair"] = c[ "count-objects-healthy-post-repair"] data["count-objects-unhealthy-post-repair"] = c[ "count-objects-unhealthy-post-repair"] data["count-repairs-attempted"] = c["count-repairs-attempted"] data["count-repairs-successful"] = c["count-repairs-successful"] data["count-repairs-unsuccessful"] = c["count-repairs-unsuccessful"] data["count-corrupt-shares-pre-repair"] = c[ "count-corrupt-shares-pre-repair"] data["count-corrupt-shares-post-repair"] = c[ "count-corrupt-shares-pre-repair"] data["list-corrupt-shares"] = [ (s.get_longname(), base32.b2a(storage_index), shnum) for (s, storage_index, shnum) in res.get_corrupt_shares() ] remaining_corrupt = [(s.get_longname(), base32.b2a(storage_index), shnum) for (s, storage_index, shnum) in res.get_remaining_corrupt_shares()] data["list-remaining-corrupt-shares"] = remaining_corrupt unhealthy = [(path_t, json_check_results(crr.get_pre_repair_results())) for (path_t, crr) in res.get_all_results().items() if not crr.get_pre_repair_results().is_healthy()] data["list-unhealthy-files"] = unhealthy data["stats"] = res.get_stats() return json.dumps(data, indent=1) + "\n"
def render_JSON(self, req): req.setHeader("content-type", "text/plain") data = {} data["finished"] = self.monitor.is_finished() res = self.monitor.get_status() data["root-storage-index"] = res.get_root_storage_index_string() c = res.get_counters() data["count-objects-checked"] = c["count-objects-checked"] data["count-objects-healthy"] = c["count-objects-healthy"] data["count-objects-unhealthy"] = c["count-objects-unhealthy"] data["count-corrupt-shares"] = c["count-corrupt-shares"] data["list-corrupt-shares"] = [ (s.get_longname(), base32.b2a(storage_index), shnum) for (s, storage_index, shnum) in res.get_corrupt_shares() ] data["list-unhealthy-files"] = [ (path_t, json_check_results(r)) for (path_t, r) in res.get_all_results().items() if not r.is_healthy() ] data["stats"] = res.get_stats() return json.dumps(data, indent=1) + "\n"
def render_JSON(self, req): """ Render JSON describing this introducer node. """ res = {} counts = {} for s in self.introducer_service.get_subscribers(): if s.service_name not in counts: counts[s.service_name] = 0 counts[s.service_name] += 1 res[u"subscription_summary"] = counts announcement_summary = {} for ad in self.introducer_service.get_announcements(): service_name = ad.service_name if service_name not in announcement_summary: announcement_summary[service_name] = 0 announcement_summary[service_name] += 1 res[u"announcement_summary"] = announcement_summary return (json.dumps(res, indent=1) + "\n").encode("utf-8")
def test_mkdir_with_children(alice): """ create a directory using ?t=mkdir-with-children """ # create a file to put in our directory FILE_CONTENTS = u"some file contents\n" * 500 resp = requests.put( util.node_url(alice.node_dir, u"uri"), data=FILE_CONTENTS, ) filecap = resp.content.strip() # create a (sub) directory to put in our directory resp = requests.post( util.node_url(alice.node_dir, u"uri"), params={ u"t": u"mkdir", } ) # (we need both the read-write and read-only URIs I guess) dircap = resp.content dircap_obj = allmydata.uri.from_string(dircap) dircap_ro = dircap_obj.get_readonly().to_string() # create json information about our directory meta = { "a_file": [ "filenode", { "ro_uri": filecap, "metadata": { "ctime": 1202777696.7564139, "mtime": 1202777696.7564139, "tahoe": { "linkcrtime": 1202777696.7564139, "linkmotime": 1202777696.7564139 } } } ], "some_subdir": [ "dirnode", { "rw_uri": dircap, "ro_uri": dircap_ro, "metadata": { "ctime": 1202778102.7589991, "mtime": 1202778111.2160511, "tahoe": { "linkcrtime": 1202777696.7564139, "linkmotime": 1202777696.7564139 } } } ] } # create a new directory with one file and one sub-dir (all-at-once) resp = util.web_post( alice, u"uri", params={u"t": "mkdir-with-children"}, data=json.dumps(meta), ) assert resp.startswith(b"URI:DIR2") cap = allmydata.uri.from_string(resp) assert isinstance(cap, allmydata.uri.DirectoryURI)
def render_JSON(self, req): req.setHeader("content-type", "text/plain") data = json_check_and_repair_results(self._results) return json.dumps(data, indent=1) + "\n"
def render_GET(self, request): self.value += 1 return native_str_to_bytes(dumps({"value": self.value}))