def test_json_serialization(self, tmpdir): ob = { "foo": [ "bar", "baz", 42, ], "t": datetime.datetime(2015, 9, 1, 4, 0, 0), "f": 0.42, } res = json.dumps(ob, default=default_json_serialization, separators=(",", ":"), sort_keys=True) assert res == '{"f":0.42,"foo":["bar","baz",42],"t":"2015-09-01T04:00:00Z"}' assert isinstance(json_encode(ob), str) assert isinstance(json_encode(ob, binary=True), bytes) assert "\n" not in json_encode(ob) assert "\n" in json_encode(ob, compact=False) output_file = tmpdir.join("test.json").strpath write_json_file(output_file, ob) with open(output_file, "r") as fp: ob2 = json.load(fp) ob_ = dict(ob, t=ob["t"].isoformat() + "Z") assert ob2 == ob_ write_json_file(output_file, ob, compact=True) with open(output_file, "r") as fp: output_data = fp.read() assert "\n" not in output_data ob2_ = json.loads(output_data) assert ob2 == ob2_
def _response_handler(self, method): self.server.log.debug("Request: %s %r", method, self.path) path = self.path.lstrip("/").split("/") resp = None try: yield path except HttpResponse as ex: resp = ex except Exception as ex: # pylint: disable=broad-except msg = "server failure: {0.__class__.__name__}: {0}".format(ex) self.server.log.exception(msg) resp = HttpResponse(msg, status=503) else: resp = HttpResponse("no response generated", status=500) if resp.error: self.server.log.warning(str(resp)) resp.headers.setdefault("content-type", "text/plain") else: self.server.log.debug(str(resp)) resp.headers.setdefault("content-type", "application/octet-stream") if isinstance(resp.msg, dict): bmsg = json_encode(resp.msg, compact=False, binary=True) resp.headers["content-type"] = "application/json" elif resp.msg: bmsg = resp.msg.encode("utf-8") else: bmsg = b"" resp.headers["content-length"] = str(len(bmsg)) self.send_response(resp.status) for k, v in resp.headers.items(): self.send_header(k, v) self.end_headers() self.wfile.write(bmsg)
def test_json_serialization(self): ob = { "foo": [ "bar", "baz", 42, ], "t": datetime.datetime(2015, 9, 1, 4, 0, 0), "f": 0.42, } res = json.dumps(ob, default=default_json_serialization, separators=(",", ":"), sort_keys=True) assert res == '{"f":0.42,"foo":["bar","baz",42],"t":"2015-09-01T04:00:00Z"}' assert isinstance(json_encode(ob), str) assert isinstance(json_encode(ob, binary=True), bytes) assert "\n" not in json_encode(ob) assert "\n" in json_encode(ob, compact=False)
def write_backup_files(what): for bb, bb_data in what.items(): wal_start, hexdigests = bb_data if bb: bb_path = os.path.join(basebackup_storage_path, bb) date_parts = [ int(part) for part in bb.replace("_", "-").split("-") ] start_time = datetime.datetime( *date_parts, tzinfo=datetime.timezone.utc) metadata = { "manifest": { "snapshot_result": { "state": { "files": [{ "relative_path": h, "hexdigest": h } for h in hexdigests] } } } } mtime = time.time() blob = io.BytesIO(common.json_encode(metadata, binary=True)) ti = tarfile.TarInfo(name=".pghoard_tar_metadata.json") ti.size = len(blob.getbuffer()) ti.mtime = mtime with open(bb_path, "wb") as fp: with rohmufile.file_writer( compression_algorithm="snappy", compression_level=0, fileobj=fp) as output_obj: with tarfile.TarFile(fileobj=output_obj, mode="w") as tar: tar.addfile(ti, blob) input_size = output_obj.tell() for h in hexdigests: with open(Path(basebackup_delta_path) / h, "w") as digest_file, \ open((Path(basebackup_delta_path) / (h + ".metadata")), "w") as digest_meta_file: json.dump({}, digest_file) json.dump({}, digest_meta_file) with open(bb_path + ".metadata", "w") as fp: json.dump( { "start-wal-segment": wal_start, "start-time": start_time.isoformat(), "format": BaseBackupFormat.delta_v1, "compression-algorithm": "snappy", "original-file-size": input_size }, fp)