def testLongFilenames(self): """See if long quoted filenames cause crash""" MakeOutputDir() outrp = rpath.RPath(Globals.local_connection, abs_output_dir) inrp = rpath.RPath(Globals.local_connection, os.path.join(abs_test_dir, b"quotetest")) re_init_rpath_dir(inrp) long_filename = b"A" * 200 # when quoted should cause overflow longrp = inrp.append(long_filename) longrp.touch() shortrp = inrp.append(b"B") shortrp.touch() rdiff_backup(1, 1, inrp.path, outrp.path, 100000, extra_options=b"--override-chars-to-quote A") longrp_out = outrp.append(long_filename) assert not longrp_out.lstat() shortrp_out = outrp.append('B') assert shortrp_out.lstat() rdiff_backup(1, 1, os.path.join(old_test_dir, b"empty"), outrp.path, 200000) shortrp_out.setdata() assert not shortrp_out.lstat() rdiff_backup(1, 1, inrp.path, outrp.path, 300000) shortrp_out.setdata() assert shortrp_out.lstat()
def testRestoreSingle(self): """Test restoring files one at at a time""" MakeOutputDir() for rfc in self.get_rfcs(): if rfc.rf.inc_rp.isincfile(): continue log.Log("Comparing %a" % (rfc.rf.inc_rp.path, ), 5) rfc.compare_all()
def testRestoreSingle(self): """Test restoring files one at at a time""" MakeOutputDir() for rfc in self.get_rfcs(): if rfc.rf.inc_rp.isincfile(): continue log.Log("Comparing %a" % (rfc.rf.inc_rp.path, ), 5) # compare all returns an empty list if everything is OK self.assertFalse(rfc.compare_all())
def test_rorpiter_xfer(self): """Test if hashes are transferred in files, rorpiter""" Globals.security_level = 'override' conn = SetConnections.init_connection( b'%b %b/server.py' % (os.fsencode(sys.executable), abs_testing_dir)) assert conn.reval("lambda x: x+1", 4) == 5 # connection sanity check fp = hash.FileWrapper(io.BytesIO(self.s1.encode())) conn.Globals.set('tmp_file', fp) fp_remote = conn.Globals.get('tmp_file') assert fp_remote.read() == self.s1.encode() assert fp_remote.close().sha1_digest == self.s1_hash # Tested xfer of file, now test xfer of files in rorpiter root = MakeOutputDir() rp1 = root.append('s1') rp1.write_string(self.s1) rp2 = root.append('s2') rp2.write_string(self.s2) rp1.setfile(hash.FileWrapper(rp1.open('rb'))) rp2.setfile(hash.FileWrapper(rp2.open('rb'))) rpiter = iter([rp1, rp2]) conn.Globals.set('tmp_conn_iter', rpiter) remote_iter = conn.Globals.get('tmp_conn_iter') rorp1 = next(remote_iter) fp = hash.FileWrapper(rorp1.open('rb')) read_s1 = fp.read().decode() assert read_s1 == self.s1, "Read string 1 %s isn't the same as written string %s" % ( read_s1, self.s1) ret_val = fp.close() assert isinstance(ret_val, hash.Report), ret_val assert ret_val.sha1_digest == self.s1_hash rorp2 = next(remote_iter) fp2 = hash.FileWrapper(rorp2.open('rb')) read_s2 = fp2.read().decode() assert read_s2 == self.s2, "Read string 2 %s isn't the same as written string %s" % ( read_s2, self.s2) assert fp2.close().sha1_digest == self.s2_hash conn.quit()
def test_rorpiter_xfer(self): """Test if hashes are transferred in files, rorpiter""" Security._security_level = "override" conn = SetConnections._init_connection( b'%b %b/server.py' % (os.fsencode(sys.executable), abs_testing_dir)) # make a connection sanity check self.assertEqual(conn.reval("lambda x: x+1", 4), 5) fp = hash.FileWrapper(io.BytesIO(self.s1.encode())) conn.Globals.set('tmp_file', fp) fp_remote = conn.Globals.get('tmp_file') self.assertEqual(fp_remote.read(), self.s1.encode()) self.assertEqual(fp_remote.close().sha1_digest, self.s1_hash) # Tested xfer of file, now test xfer of files in rorpiter root = MakeOutputDir() rp1 = root.append('s1') rp1.write_string(self.s1) rp2 = root.append('s2') rp2.write_string(self.s2) rp1.setfile(hash.FileWrapper(rp1.open('rb'))) rp2.setfile(hash.FileWrapper(rp2.open('rb'))) rpiter = iter([rp1, rp2]) conn.Globals.set('tmp_conn_iter', rpiter) remote_iter = conn.Globals.get('tmp_conn_iter') rorp1 = next(remote_iter) fp = hash.FileWrapper(rorp1.open('rb')) read_s1 = fp.read().decode() self.assertEqual(read_s1, self.s1) ret_val = fp.close() self.assertIsInstance(ret_val, hash.Report) self.assertEqual(ret_val.sha1_digest, self.s1_hash) rorp2 = next(remote_iter) fp2 = hash.FileWrapper(rorp2.open('rb')) read_s2 = fp2.read().decode() self.assertEqual(read_s2, self.s2) self.assertEqual(fp2.close().sha1_digest, self.s2_hash) conn.quit()
def testEmptyDirInclude(self): """Make sure empty directories are included with **xx exps This checks for a bug present in 1.0.3/1.1.5 and similar. """ outrp = MakeOutputDir() # we need to change directory to be able to work with relative paths os.chdir(abs_test_dir) os.chdir(os.pardir) # chdir one level up selrp = rpath.RPath(Globals.local_connection, 'testfiles/seltest') re_init_rpath_dir(selrp) emptydir = selrp.append('emptydir') emptydir.mkdir() rdiff_backup(1, 1, selrp.path, outrp.path, extra_options=(b"--include **XX " b"--exclude testfiles/seltest/YYYY")) outempty = outrp.append('emptydir') assert outempty.isdir(), outempty
def testInnerRestore(self): """Restore part of a dir, see if hard links preserved""" MakeOutputDir() output = rpath.RPath(Globals.local_connection, abs_output_dir) hlout1_dir = os.path.join(abs_test_dir, b"out_hardlink1") hlout2_dir = os.path.join(abs_test_dir, b"out_hardlink2") # Now set up directories out_hardlink1 and out_hardlink2 hlout1 = rpath.RPath(Globals.local_connection, hlout1_dir) if hlout1.lstat(): hlout1.delete() hlout1.mkdir() hlout1_sub = hlout1.append("subdir") hlout1_sub.mkdir() hl1_1 = hlout1_sub.append("hardlink1") hl1_2 = hlout1_sub.append("hardlink2") hl1_3 = hlout1_sub.append("hardlink3") hl1_4 = hlout1_sub.append("hardlink4") # 1 and 2 are hard linked, as are 3 and 4 hl1_1.touch() hl1_2.hardlink(hl1_1.path) hl1_3.touch() hl1_4.hardlink(hl1_3.path) hlout2 = rpath.RPath(Globals.local_connection, hlout2_dir) if hlout2.lstat(): hlout2.delete() xcopytree(hlout1_dir, hlout2_dir) hlout2_sub = hlout2.append("subdir") hl2_1 = hlout2_sub.append("hardlink1") hl2_2 = hlout2_sub.append("hardlink2") hl2_3 = hlout2_sub.append("hardlink3") hl2_4 = hlout2_sub.append("hardlink4") # Now 2 and 3 are hard linked, also 1 and 4 rpath.copy_with_attribs(hl1_1, hl2_1) rpath.copy_with_attribs(hl1_2, hl2_2) hl2_3.delete() hl2_3.hardlink(hl2_2.path) hl2_4.delete() hl2_4.hardlink(hl2_1.path) rpath.copy_attribs(hlout1_sub, hlout2_sub) # Now try backing up twice, making sure hard links are preserved InternalBackup(1, 1, hlout1.path, output.path) out_subdir = output.append("subdir") self.assertEqual( out_subdir.append("hardlink1").getinode(), out_subdir.append("hardlink2").getinode()) self.assertEqual( out_subdir.append("hardlink3").getinode(), out_subdir.append("hardlink4").getinode()) self.assertNotEqual( out_subdir.append("hardlink1").getinode(), out_subdir.append("hardlink3").getinode()) time.sleep(1) InternalBackup(1, 1, hlout2.path, output.path) out_subdir.setdata() self.assertEqual( out_subdir.append("hardlink1").getinode(), out_subdir.append("hardlink4").getinode()) self.assertEqual( out_subdir.append("hardlink2").getinode(), out_subdir.append("hardlink3").getinode()) self.assertNotEqual( out_subdir.append("hardlink1").getinode(), out_subdir.append("hardlink2").getinode()) # Now try restoring, still checking hard links. sub_dir = os.path.join(abs_output_dir, b"subdir") out2_dir = os.path.join(abs_test_dir, b"out2") out2 = rpath.RPath(Globals.local_connection, out2_dir) hlout1 = out2.append("hardlink1") hlout2 = out2.append("hardlink2") hlout3 = out2.append("hardlink3") hlout4 = out2.append("hardlink4") if out2.lstat(): out2.delete() InternalRestore(1, 1, sub_dir, out2_dir, 1) out2.setdata() for rp in [hlout1, hlout2, hlout3, hlout4]: rp.setdata() self.assertEqual(hlout1.getinode(), hlout2.getinode()) self.assertEqual(hlout3.getinode(), hlout4.getinode()) self.assertNotEqual(hlout1.getinode(), hlout3.getinode()) if out2.lstat(): out2.delete() InternalRestore(1, 1, sub_dir, out2_dir, int(time.time())) out2.setdata() for rp in [hlout1, hlout2, hlout3, hlout4]: rp.setdata() self.assertEqual(hlout1.getinode(), hlout4.getinode()) self.assertEqual(hlout2.getinode(), hlout3.getinode()) self.assertNotEqual(hlout1.getinode(), hlout2.getinode())
def test_moving_hardlinks(self): """Test moving the first hardlinked file in a series to later place in the series. This test is directed at some previously buggy code that failed to always keep a sha1 hash in the metadata for the first (and only the first) file among a series of linked files. The condition that triggered this bug involved removing the first file from a list of linked files, while also adding a new file at some later position in the list. The total number of hardlinked files in the list remains unchanged. None of the files had a sha1 hash saved in its metadata. The bug was originally reported here: https://savannah.nongnu.org/bugs/?26848 """ # Setup initial backup MakeOutputDir() output = rpath.RPath(Globals.local_connection, abs_output_dir) hlsrc_dir = os.path.join(abs_test_dir, b"src_hardlink") hlsrc = rpath.RPath(Globals.local_connection, hlsrc_dir) if hlsrc.lstat(): hlsrc.delete() hlsrc.mkdir() hlsrc_sub = hlsrc.append("subdir") hlsrc_sub.mkdir() hl_file1 = hlsrc_sub.append("hardlink1") hl_file1.write_string(self.hello_str) hl_file2 = hlsrc_sub.append("hardlink2") hl_file2.hardlink(hl_file1.path) InternalBackup(1, 1, hlsrc.path, output.path, 10000) out_subdir = output.append("subdir") self.assertEqual( out_subdir.append("hardlink1").getinode(), out_subdir.append("hardlink2").getinode()) # validate that hashes and link counts are correctly saved in metadata meta_prefix = rpath.RPath( Globals.local_connection, os.path.join(abs_output_dir, b"rdiff-backup-data", b"mirror_metadata")) incs = meta_prefix.get_incfiles_list() self.assertEqual(len(incs), 1) metadata_rp = incs[0] hashes, link_counts = self.extract_metadata(metadata_rp) # hashes for ., ./subdir, ./subdir/hardlink1, ./subdir/hardlink3 expected_hashes = [None, None, self.hello_str_hash, None] self.assertEqual(expected_hashes, hashes) expected_link_counts = [1, 1, 2, 2] self.assertEqual(expected_link_counts, link_counts) # Move the first hardlinked file to be last hl_file3 = hlsrc_sub.append("hardlink3") rpath.rename(hl_file1, hl_file3) InternalBackup(1, 1, hlsrc.path, output.path, 20000) self.assertEqual( out_subdir.append("hardlink2").getinode(), out_subdir.append("hardlink3").getinode()) # validate that hashes and link counts are correctly saved in metadata incs = meta_prefix.get_incfiles_list() self.assertEqual(len(incs), 2) if incs[0].getinctype() == b'snapshot': metadata_rp = incs[0] else: metadata_rp = incs[1] hashes, link_counts = self.extract_metadata(metadata_rp) # hashes for ., ./subdir/, ./subdir/hardlink2, ./subdir/hardlink3 expected_hashes = [None, None, self.hello_str_hash, None] # The following assertion would fail as a result of bugs that are now fixed self.assertEqual(expected_hashes, hashes) expected_link_counts = [1, 1, 2, 2] self.assertEqual(expected_link_counts, link_counts) # Now try restoring, still checking hard links. sub_path = os.path.join(abs_output_dir, b"subdir") restore_path = os.path.join(abs_test_dir, b"hl_restore") restore_dir = rpath.RPath(Globals.local_connection, restore_path) hlrestore_file1 = restore_dir.append("hardlink1") hlrestore_file2 = restore_dir.append("hardlink2") hlrestore_file3 = restore_dir.append("hardlink3") if restore_dir.lstat(): restore_dir.delete() InternalRestore(1, 1, sub_path, restore_path, 10000) for rp in [hlrestore_file1, hlrestore_file2]: rp.setdata() self.assertEqual(hlrestore_file1.getinode(), hlrestore_file2.getinode()) if restore_dir.lstat(): restore_dir.delete() InternalRestore(1, 1, sub_path, restore_path, 20000) for rp in [hlrestore_file2, hlrestore_file3]: rp.setdata() self.assertEqual(hlrestore_file2.getinode(), hlrestore_file3.getinode())
def test_adding_hardlinks(self): """Test the addition of a new hardlinked file. This test is directed at some previously buggy code that 1) failed to keep the correct number of hardlinks in the mirror metadata, and 2) failed to restore hardlinked files so that they are linked the same as when they were backed up. One of the conditions that triggered these bugs included adding a new hardlinked file somewhere in the middle of a list of previously linked files. The bug was originally reported here: https://savannah.nongnu.org/bugs/?26848 """ # Setup initial backup MakeOutputDir() output = rpath.RPath(Globals.local_connection, abs_output_dir) hlsrc_dir = os.path.join(abs_test_dir, b"src_hardlink") hlsrc = rpath.RPath(Globals.local_connection, hlsrc_dir) if hlsrc.lstat(): hlsrc.delete() hlsrc.mkdir() hlsrc_sub = hlsrc.append("subdir") hlsrc_sub.mkdir() hl_file1 = hlsrc_sub.append("hardlink1") hl_file1.write_string(self.hello_str) hl_file3 = hlsrc_sub.append("hardlink3") hl_file3.hardlink(hl_file1.path) InternalBackup(1, 1, hlsrc.path, output.path, 10000) out_subdir = output.append("subdir") self.assertEqual( out_subdir.append("hardlink1").getinode(), out_subdir.append("hardlink3").getinode()) # validate that hashes and link counts are correctly saved in metadata meta_prefix = rpath.RPath( Globals.local_connection, os.path.join(abs_output_dir, b"rdiff-backup-data", b"mirror_metadata")) incs = meta_prefix.get_incfiles_list() self.assertEqual(len(incs), 1) metadata_rp = incs[0] hashes, link_counts = self.extract_metadata(metadata_rp) # hashes for ., ./subdir, ./subdir/hardlink1, ./subdir/hardlink3 expected_hashes = [None, None, self.hello_str_hash, None] self.assertEqual(expected_hashes, hashes) expected_link_counts = [1, 1, 2, 2] self.assertEqual(expected_link_counts, link_counts) # Create a new hardlinked file between "hardlink1" and "hardlink3" and perform another backup hl_file2 = hlsrc_sub.append("hardlink2") hl_file2.hardlink(hl_file1.path) InternalBackup(1, 1, hlsrc.path, output.path, 20000) self.assertEqual( out_subdir.append("hardlink1").getinode(), out_subdir.append("hardlink2").getinode()) self.assertEqual( out_subdir.append("hardlink1").getinode(), out_subdir.append("hardlink3").getinode()) # validate that hashes and link counts are correctly saved in metadata incs = meta_prefix.get_incfiles_list() self.assertEqual(len(incs), 2) if incs[0].getinctype() == b'snapshot': metadata_rp = incs[0] else: metadata_rp = incs[1] hashes, link_counts = self.extract_metadata(metadata_rp) # hashes for ., ./subdir/, ./subdir/hardlink1, ./subdir/hardlink2, ./subdir/hardlink3 expected_hashes = [None, None, self.hello_str_hash, None, None] self.assertEqual(expected_hashes, hashes) expected_link_counts = [1, 1, 3, 3, 3] # The following assertion would fail as a result of bugs that are now fixed self.assertEqual(expected_link_counts, link_counts) # Now try restoring, still checking hard links. sub_path = os.path.join(abs_output_dir, b"subdir") restore_path = os.path.join(abs_test_dir, b"hl_restore") restore_dir = rpath.RPath(Globals.local_connection, restore_path) hlrestore_file1 = restore_dir.append("hardlink1") hlrestore_file2 = restore_dir.append("hardlink2") hlrestore_file3 = restore_dir.append("hardlink3") if restore_dir.lstat(): restore_dir.delete() InternalRestore(1, 1, sub_path, restore_path, 10000) for rp in [hlrestore_file1, hlrestore_file3]: rp.setdata() self.assertEqual(hlrestore_file1.getinode(), hlrestore_file3.getinode()) if restore_dir.lstat(): restore_dir.delete() InternalRestore(1, 1, sub_path, restore_path, 20000) for rp in [hlrestore_file1, hlrestore_file2, hlrestore_file3]: rp.setdata() self.assertEqual(hlrestore_file1.getinode(), hlrestore_file2.getinode()) # The following assertion would fail as a result of bugs that are now fixed self.assertEqual(hlrestore_file1.getinode(), hlrestore_file3.getinode())
def setUp(self): Globals.set('isbackup_writer', 1) MakeOutputDir()