def http_restore(pghoard): pgdata = get_pg_wal_directory( pghoard.config["backup_sites"][pghoard.test_site]) return HTTPRestore("localhost", pghoard.config["http_port"], site=pghoard.test_site, pgdata=pgdata)
def http_restore(pghoard): pgdata = os.path.dirname( pghoard.config["backup_sites"][pghoard.test_site]["pg_xlog_directory"]) return HTTPRestore("localhost", pghoard.config["http_port"], site=pghoard.test_site, pgdata=pgdata)
def setUp(self): self.log = logging.getLogger("TestWebServer") self.temp_dir = tempfile.mkdtemp() self.compressed_xlog_path = os.path.join(self.temp_dir, "default", "compressed_xlog") self.basebackup_path = os.path.join(self.temp_dir, "default", "basebackup") self.compressed_timeline_path = os.path.join(self.temp_dir, "default", "compressed_timeline") self.pgdata_path = os.path.join(self.temp_dir, "pgdata") self.pg_xlog_dir = os.path.join(self.pgdata_path, "pg_xlog") self.config = { "backup_sites": { "default": { "pg_xlog_directory": self.pg_xlog_dir, "object_storage": {}, }, }, "http_address": "127.0.0.1", "http_port": random.randint(1024, 32000), "backup_location": self.temp_dir, } self.compression_queue = Queue() self.transfer_queue = Queue() os.makedirs(self.compressed_xlog_path) os.makedirs(self.basebackup_path) os.makedirs(self.compressed_timeline_path) os.makedirs(self.pgdata_path) os.makedirs(self.pg_xlog_dir) self.uncompressed_foo_path = os.path.join(self.pg_xlog_dir, "00000001000000000000000C") with open(self.uncompressed_foo_path, "wb") as out_file: out_file.write(b"foo") self.foo_path = os.path.join(self.compressed_xlog_path, "00000001000000000000000C") with open(self.foo_path, "wb") as out_file: out_file.write(b"foo") with open(self.foo_path, "rb") as fp: lzma_open(self.foo_path + ".xz", mode="wb", preset=0).write(fp.read()) self.webserver = WebServer(config=self.config, compression_queue=self.compression_queue, transfer_queue=self.transfer_queue) self.webserver.start() self.http_restore = HTTPRestore("localhost", self.config['http_port'], site="default", pgdata=self.pgdata_path) time.sleep(0.05) # Hack to give the server time to start up
class TestWebServer(TestCase): def setUp(self): self.log = logging.getLogger("TestWebServer") self.temp_dir = tempfile.mkdtemp() self.compressed_xlog_path = os.path.join(self.temp_dir, "default", "compressed_xlog") self.basebackup_path = os.path.join(self.temp_dir, "default", "basebackup") self.compressed_timeline_path = os.path.join(self.temp_dir, "default", "compressed_timeline") self.pgdata_path = os.path.join(self.temp_dir, "pgdata") self.pg_xlog_dir = os.path.join(self.pgdata_path, "pg_xlog") self.config = { "backup_sites": { "default": { "pg_xlog_directory": self.pg_xlog_dir, "object_storage": {}, }, }, "http_address": "127.0.0.1", "http_port": random.randint(1024, 32000), "backup_location": self.temp_dir, } self.compression_queue = Queue() self.transfer_queue = Queue() os.makedirs(self.compressed_xlog_path) os.makedirs(self.basebackup_path) os.makedirs(self.compressed_timeline_path) os.makedirs(self.pgdata_path) os.makedirs(self.pg_xlog_dir) self.uncompressed_foo_path = os.path.join(self.pg_xlog_dir, "00000001000000000000000C") with open(self.uncompressed_foo_path, "wb") as out_file: out_file.write(b"foo") self.foo_path = os.path.join(self.compressed_xlog_path, "00000001000000000000000C") with open(self.foo_path, "wb") as out_file: out_file.write(b"foo") with open(self.foo_path, "rb") as fp: lzma_open(self.foo_path + ".xz", mode="wb", preset=0).write(fp.read()) self.webserver = WebServer(config=self.config, compression_queue=self.compression_queue, transfer_queue=self.transfer_queue) self.webserver.start() self.http_restore = HTTPRestore("localhost", self.config['http_port'], site="default", pgdata=self.pgdata_path) time.sleep(0.05) # Hack to give the server time to start up def test_list_empty_basebackups(self): self.assertEqual(self.http_restore.list_basebackups(), []) # pylint: disable=protected-access def test_archiving(self): compressor = Compressor(config=self.config, compression_queue=self.compression_queue, transfer_queue=self.transfer_queue) compressor.start() xlog_file = "00000001000000000000000C" self.assertTrue(archive(port=self.config['http_port'], site="default", xlog_file=xlog_file)) self.assertTrue(os.path.exists(os.path.join(self.compressed_xlog_path, xlog_file))) self.log.error(os.path.join(self.compressed_xlog_path, xlog_file)) compressor.running = False def test_archiving_backup_label_from_archive_command(self): compressor = Compressor(config=self.config, compression_queue=self.compression_queue, transfer_queue=self.transfer_queue) compressor.start() xlog_file = "000000010000000000000002.00000028.backup" xlog_path = os.path.join(self.pg_xlog_dir, xlog_file) with open(xlog_path, "w") as fp: fp.write("jee") self.assertTrue(archive(port=self.config['http_port'], site="default", xlog_file=xlog_file)) self.assertFalse(os.path.exists(os.path.join(self.compressed_xlog_path, xlog_file))) compressor.running = False # def test_get_basebackup_file(self): # self.http_restore.get_basebackup_file() def test_get_archived_file(self): xlog_file = "00000001000000000000000F" filepath = os.path.join(self.compressed_xlog_path, xlog_file) lzma_open(filepath + ".xz", mode="wb", preset=0).write(b"jee") self.http_restore.get_archive_file(xlog_file, "pg_xlog/" + xlog_file, path_prefix=self.pgdata_path) self.assertTrue(os.path.exists(os.path.join(self.pg_xlog_dir, xlog_file))) def tearDown(self): self.webserver.close() shutil.rmtree(self.temp_dir)