def testCopy(self): """Test copy of various files""" for rp in [self.sl, self.rf, self.fifo, self.dir]: rpath.copy(rp, self.dest) assert self.dest.lstat(), "%a doesn't exist" % self.dest.path assert rpath.cmp(rp, self.dest) assert rpath.cmp(self.dest, rp) self.dest.delete()
def testCopy(self): """Test copy of various files""" for rp in [self.sl, self.rf, self.fifo, self.dir]: rpath.copy(rp, self.dest) self.assertTrue(self.dest.lstat()) self.assertTrue(rpath.cmp(rp, self.dest)) self.assertTrue(rpath.cmp(self.dest, rp)) self.dest.delete()
def testCopy(self): """Test copy of various files""" for rp in [self.sl, self.rf, self.fifo, self.dir]: rpath.copy(rp, self.dest) assert self.dest.lstat(), "%s doesn't exist" % self.dest.path assert rpath.cmp(rp, self.dest) assert rpath.cmp(self.dest, rp) self.dest.delete()
def testDuplicateMetadataTimestamp(self): """This test is based on issue #322 where a diff and a snapshot metadata mirror files had the same timestamp, which made rdiff-backup choke. We check that rdiff-backup still fails by default but can be taught to ignore the error with --allow-duplicate-timestamps so that the repo can be fixed.""" # create an empty directory test_base_rp = self.makerp(abs_test_dir).append("dupl_meta_time") re_init_rpath_dir(test_base_rp) # create enough incremental backups to have one metadata snapshot # in-between, which we can manipulate to simulate the error source_rp = test_base_rp.append("source") target_rp = test_base_rp.append("target") source_rp.mkdir() for suffix in range(1, 15): source_rp.append("file%02d" % suffix).touch() rdiff_backup(1, 1, source_rp.__fspath__(), target_rp.__fspath__(), current_time=suffix * 10000) # identify the oldest (aka first) mirror metadata snapshot # and sort the list because some filesystems don't respect the order rb_data_rp = target_rp.append("rdiff-backup-data") files_list = sorted(filter( lambda x: x.startswith(b"mirror_metadata."), rb_data_rp.listdir())) meta_snapshot_rp = rb_data_rp.append(files_list[8]) # create a diff with the same data as the identified snapshot meta_dupldiff_rp = rb_data_rp.append(files_list[8].replace( b".snapshot.gz", b".diff.gz")) rpath.copy(meta_snapshot_rp, meta_dupldiff_rp) # this succeeds rdiff_backup(1, 1, target_rp.__fspath__(), None, extra_options=b"--check-destination-dir") # now this should fail source_rp.append("file15").touch() rdiff_backup(1, 1, source_rp.__fspath__(), target_rp.__fspath__(), current_time=15 * 10000, expected_ret_val=1) # and this should also fail rdiff_backup(1, 1, target_rp.__fspath__(), None, expected_ret_val=1, extra_options=b"--check-destination-dir") # but this should succeed rdiff_backup(1, 1, target_rp.__fspath__(), None, extra_options=b"--allow-duplicate-timestamps --check-destination-dir") # now we can clean-up, getting rid of the duplicate metadata mirrors # NOTE: we could have cleaned-up even without checking/fixing the directory # but this shouldn't be the recommended practice. rdiff_backup(1, 1, target_rp.__fspath__(), None, extra_options=b"--remove-older-than 100000 --force") # and this should at last succeed source_rp.append("file16").touch() rdiff_backup(1, 1, source_rp.__fspath__(), target_rp.__fspath__(), current_time=16 * 10000)
def testCopyAttribs(self): """Test copying attributes""" t = rpath.RPath(self.lc, self.write_dir, ("testattribs", )) if t.lstat(): t.delete() for rp in [ self.noperms, self.nowrite, self.rf, self.exec1, self.exec2, self.hl1, self.dir ]: rpath.copy(rp, t) rpath.copy_attribs(rp, t) self.assertTrue(t.equal_loose(rp)) t.delete()
def testCopyAttribs(self): """Test copying attributes""" t = rpath.RPath(self.lc, self.write_dir, ("testattribs", )) if t.lstat(): t.delete() for rp in [ self.noperms, self.nowrite, self.rf, self.exec1, self.exec2, self.hl1, self.dir ]: rpath.copy(rp, t) rpath.copy_attribs(rp, t) assert t.equal_loose(rp), \ "Attributes for file %s not copied successfully" % rp.path t.delete()
def testGzipRegexp(self): """Here a .gz file shouldn't be compressed""" Globals.compression = 1 rpath.copy(rf, out_gz) assert out_gz.lstat() rp = increment.Increment(rf, out_gz, target) self.check_time(rp) assert rp.equal_verbose(out_gz, check_index=0, compare_size=0) Rdiff.patch_local(rf, rp, out2) assert rpath.cmp(out_gz, out2) rp.delete() out2.delete() out_gz.delete()
def testGzipRegexp(self): """Here a .gz file shouldn't be compressed""" Globals.compression = 1 rpath.copy(rf, out_gz) assert out_gz.lstat() rp = increment.Increment(rf, out_gz, target) self.check_time(rp) assert rp.equal_verbose(out_gz, check_index = 0, compare_size = 0) Rdiff.patch_local(rf, rp, out2) assert rpath.cmp(out_gz, out2) rp.delete() out2.delete() out_gz.delete()
def _patch_to_temp(self, basis_rp, diff_rorp, new): """Patch basis_rp, writing output in new, which doesn't exist yet""" if diff_rorp.isflaglinked(): map_hardlinks.link_rp(diff_rorp, new, self.basis_root_rp) return if diff_rorp.get_attached_filetype() == 'snapshot': copy_report = rpath.copy(diff_rorp, new) else: assert diff_rorp.get_attached_filetype() == 'diff', ( "File '{drp}' must be of type '{dtype}'.".format( drp=diff_rorp, dtype='diff')) copy_report = Rdiff.patch_local(basis_rp, diff_rorp, new) self._check_hash(copy_report, diff_rorp) if new.lstat(): rpath.copy_attribs(diff_rorp, new)