Пример #1
0
    def make_unreadable(self):
        """Make unreadable input directory

        The directory needs to be readable initially (otherwise it
        just won't get backed up, and then later we will turn it
        unreadable.

        """
        rp = rpath.RPath(Globals.local_connection,
                         os.path.join(abs_test_dir, b"regress"))
        if rp.lstat():
            Myrm(rp.path)
        rp.setdata()
        rp.mkdir()
        rp1 = rp.append('unreadable_dir')
        rp1.mkdir()
        rp1_1 = rp1.append('to_be_unreadable')
        rp1_1.write_string('aensuthaoeustnahoeu')
        return rp
Пример #2
0
def create_nested(dirname, s, depth, branch_factor):
    """Create many short files in branching directory"""

    def write(rp):
        fp = rp.open("w")
        fp.write(s)
        fp.close()

    def helper(rp, depth):
        if (not rp.isdir()):
            rp.mkdir()
        sub_rps = [rp.append("file_%d" % i) for i in range(branch_factor)]
        if depth == 1:
            list(map(write, sub_rps))
        else:
            list(map(lambda rp: helper(rp, depth - 1), sub_rps))

    re_init_subdir(abs_test_dir, b'nested_out')
    helper(rpath.RPath(Globals.local_connection, dirname), depth)
Пример #3
0
    def write_metadata_to_temp(self):
        """If necessary, write metadata of bigdir to file metadata.gz"""
        global tempdir
        temprp = tempdir.append(
            "mirror_metadata.2005-11-03T14:51:06-06:00.snapshot.gz")
        if temprp.lstat(): return temprp

        self.make_temp()
        rootrp = rpath.RPath(Globals.local_connection,
                             os.path.join(old_test_dir, "bigdir"))
        rpath_iter = selection.Select(rootrp).set_iter()

        start_time = time.time()
        mf = MetadataFile(temprp, 'w')
        for rp in rpath_iter:
            mf.write_object(rp)
        mf.close()
        print("Writing metadata took %s seconds" % (time.time() - start_time))
        return temprp
Пример #4
0
    def getinc_paths(self, basename, directory, quoted=0):
        """Returns a sorted list of files which starts with basename
        within a given directory."""

        if quoted:
            FilenameMapping.set_init_quote_vals()
            dirrp = FilenameMapping.QuotedRPath(Globals.local_connection,
                                                directory)
        else:
            dirrp = rpath.RPath(Globals.local_connection, directory)
        incbasenames = [
            filename for filename in robust.listrp(dirrp)
            if filename.startswith(basename)
        ]
        incbasenames.sort()
        incrps = list(map(dirrp.append, incbasenames))
        return [
            x.path for x in [incrp for incrp in incrps if incrp.isincfile()]
        ]
Пример #5
0
    def testWriteDeltaGzip(self):
        """Same as above but delta is written gzipped"""
        rplist = [self.basis, self.new, self.delta, self.output]
        MakeRandomFile(self.basis.path)
        MakeRandomFile(self.new.path)
        list(map(rpath.RPath.setdata, [self.basis, self.new]))
        assert self.basis.lstat() and self.new.lstat()
        delta_gz = rpath.RPath(self.delta.conn, self.delta.path + b".gz")
        if delta_gz.lstat():
            delta_gz.delete()

        Rdiff.write_delta(self.basis, self.new, delta_gz, 1)
        assert delta_gz.lstat()
        os.system(b"gunzip %s" % delta_gz.path)
        delta_gz.setdata()
        self.delta.setdata()
        Rdiff.patch_local(self.basis, self.delta, self.output)
        assert rpath.cmp(self.new, self.output)
        list(map(rpath.RPath.delete, rplist))
Пример #6
0
	def testStatistics(self):
		"""Test the writing of statistics

		The file sizes are approximate because the size of directories
		could change with different file systems...

		"""
		def sorti(inclist):
			l = [(inc.getinctime(), inc) for inc in inclist]
			l.sort()
			return [inc for (t, inc) in l]

		Globals.compression = 1
		Myrm("testfiles/output")
		InternalBackup(1, 1, "testfiles/stattest1", "testfiles/output")
		InternalBackup(1, 1, "testfiles/stattest2", "testfiles/output",
					   time.time()+1)

		rbdir = rpath.RPath(Globals.local_connection,
							"testfiles/output/rdiff-backup-data")

		incs = sorti(restore.get_inclist(rbdir.append("session_statistics")))
		assert len(incs) == 2
		s2 = statistics.StatsObj().read_stats_from_rp(incs[0])
		assert s2.SourceFiles == 7
		assert 700000 <= s2.SourceFileSize < 750000, s2.SourceFileSize
		self.stats_check_initial(s2)

		root_stats = statistics.StatsObj().read_stats_from_rp(incs[1])
		assert root_stats.SourceFiles == 7, root_stats.SourceFiles
		assert 550000 <= root_stats.SourceFileSize < 570000
		assert root_stats.MirrorFiles == 7
		assert 700000 <= root_stats.MirrorFileSize < 750000
		assert root_stats.NewFiles == 1
		assert root_stats.NewFileSize == 0
		assert root_stats.DeletedFiles == 1, root_stats.DeletedFiles
		assert root_stats.DeletedFileSize == 200000
		assert 3 <= root_stats.ChangedFiles <= 4, root_stats.ChangedFiles
		assert 450000 <= root_stats.ChangedSourceSize < 470000
		assert 400000 <= root_stats.ChangedMirrorSize < 420000, \
			   root_stats.ChangedMirrorSize
		assert 10 < root_stats.IncrementFileSize < 30000
Пример #7
0
    def test_dest_delete(self):
        """Test deleting a directory from the destination dir

		Obviously that directory can no longer be restored, but the
		rest of the files should be OK.  Just runs locally for now.

		"""
        in_dir1, in_subdir, in_dir2 = self.make_dir()
        self.set_connections(None, None, None, None)
        self.exec_rb(10000, in_dir1.path, 'testfiles/output')

        out_subdir = rpath.RPath(
            Globals.local_connection,
            'testfiles/output/%s' % (in_subdir.index[-1], ))
        log.Log("Deleting %s" % (out_subdir.path, ), 3)
        out_subdir.delete()
        self.exec_rb(20000, in_dir2.path, 'testfiles/output')

        self.exec_rb_restore(10000, 'testfiles/output',
                             'testfiles/restoretarget1')
Пример #8
0
    def test_overlapping_dirs(self):
        """Test if we can backup a directory containing the backup repo
        while ignoring this repo"""

        testrp = rpath.RPath(Globals.local_connection,
                             abs_test_dir).append('selection_overlap')
        re_init_rpath_dir(testrp)
        backuprp = testrp.append('backup')
        emptyrp = testrp.append('empty')  # just to have something to backup
        emptyrp.mkdir()

        rdiff_backup(1,
                     1,
                     testrp.path,
                     backuprp.path,
                     extra_options=b"--exclude %s" % backuprp.path)

        assert backuprp.append('rdiff-backup-data').isdir() and \
            backuprp.append('empty').isdir(), \
            "Backup to %s didn't happen properly." % backuprp.getsafepath()
Пример #9
0
    def testEmptyDirInclude(self):
        """Make sure empty directories are included with **xx exps

		This checks for a bug present in 1.0.3/1.1.5 and similar.

		"""
        outrp = MakeOutputDir()
        selrp = rpath.RPath(Globals.local_connection, 'testfiles/seltest')
        re_init_dir(selrp)
        emptydir = selrp.append('emptydir')
        emptydir.mkdir()

        rdiff_backup(1,
                     1,
                     selrp.path,
                     outrp.path,
                     extra_options=("--include **XX "
                                    "--exclude testfiles/seltest/YYYY"))

        outempty = outrp.append('emptydir')
        assert outempty.isdir(), outempty
Пример #10
0
    def testLongSock(self):
        """Test making a socket with a long name

        On some systems, the name of a socket is restricted, and
        cannot be as long as a regular file.  When this happens, a
        SkipFileException should be raised.

        """
        sock = rpath.RPath(self.lc, self.write_dir, (
            "socketaoeusthaoeaoeutnhaonseuhtansoeuthasoneuthasoeutnhasonuthaoensuhtasoneuhtsanouhonetuhasoneuthsaoenaonsetuaosenuhtaoensuhaoeu",
        ))
        assert not sock.lstat()
        try:
            sock.mksock()
        except rpath.SkipFileException:
            pass
        else:
            print("Warning, making long socket did not fail")
        sock.setdata()
        if sock.lstat():
            sock.delete()
Пример #11
0
    def test_numerical_mapping(self):
        """Test --preserve-numerical-ids option

        This doesn't really test much, since we don't have a
        convenient system with different uname/ids.

        """

        def write_ownership_dir():
            """Write the directory testfiles/root_mapping"""
            rp = rpath.RPath(Globals.local_connection,
                             os.path.join(abs_test_dir, b"root_mapping"))
            re_init_rpath_dir(rp)
            rp1 = rp.append('1')
            rp1.touch()
            rp2 = rp.append('2')
            rp2.touch()
            rp2.chown(userid, 1)  # use groupid 1, usually bin
            return rp

        def get_ownership(dir_rp):
            """Return pair (ids of dir_rp/1, ids of dir_rp2) of ids"""
            rp1, rp2 = list(map(dir_rp.append, ('1', '2')))
            self.assertTrue(rp1.isreg())
            self.assertTrue(rp2.isreg())
            return (rp1.getuidgid(), rp2.getuidgid())

        in_rp = write_ownership_dir()
        out_rp = rpath.RPath(Globals.local_connection, abs_output_dir)
        if out_rp.lstat():
            Myrm(out_rp.path)

        self.assertEqual(get_ownership(in_rp), ((0, 0), (userid, 1)))

        rdiff_backup(1,
                     0,
                     in_rp.path,
                     out_rp.path,
                     extra_options=(b"--preserve-numerical-ids"))
        self.assertEqual(get_ownership(out_rp), ((0, 0), (userid, 1)))
Пример #12
0
    def testRoot(self):
        """testRoot - / may be a counterexample to several of these.."""
        root = rpath.RPath(Globals.local_connection, "/")
        select = Select(root)

        assert select.glob_get_sf("/", 1)(root) == 1
        assert select.glob_get_sf("/foo", 1)(root) == 1
        assert select.glob_get_sf("/foo/bar", 1)(root) == 1
        assert select.glob_get_sf("/", 0)(root) == 0
        assert select.glob_get_sf("/foo", 0)(root) is None

        assert select.glob_get_sf("**.py", 1)(root) == 2
        assert select.glob_get_sf("**", 1)(root) == 1
        assert select.glob_get_sf("ignorecase:/", 1)(root) == 1
        assert select.glob_get_sf("**.py", 0)(root) is None
        assert select.glob_get_sf("**", 0)(root) == 0
        assert select.glob_get_sf("/foo/*", 0)(root) is None

        assert select.filelist_get_sf(io.BytesIO(b"/"), 1, "test")(root) == 1
        assert select.filelist_get_sf(io.BytesIO(b"/foo/bar"), 1, "test")(root) == 1
        assert select.filelist_get_sf(io.BytesIO(b"/"), 0, "test")(root) == 0
        assert select.filelist_get_sf(io.BytesIO(b"/foo/bar"), 0, "test")(root) is None
Пример #13
0
	def test_write(self):
		"""Test writing to metadata file, then reading back contents"""
		global tempdir
		temprp = tempdir.append("mirror_metadata.2005-11-03T12:51:06-06:00.snapshot.gz")
		if temprp.lstat(): temprp.delete()

		self.make_temp()
		rootrp = rpath.RPath(Globals.local_connection,
							 "testfiles/various_file_types")
		dirlisting = rootrp.listdir()
		dirlisting.sort()
		rps = map(rootrp.append, dirlisting)

		assert not temprp.lstat()
		write_mf = MetadataFile(temprp, 'w')
		for rp in rps: write_mf.write_object(rp)
		write_mf.close()
		assert temprp.lstat()

		reread_rps = list(MetadataFile(temprp, 'r').get_objects())
		assert len(reread_rps) == len(rps), (len(reread_rps), len(rps))
		for i in range(len(reread_rps)):
			assert reread_rps[i] == rps[i], i
Пример #14
0
	def test_long_socket(self):
		"""Test backing up a directory with long sockets in them

		For some reason many unicies don't allow sockets with long
		names to be made in the usual way.

		"""
		sockdir = rpath.RPath(Globals.local_connection, "testfiles/sockettest")
		re_init_dir(sockdir)
		tmp_sock = sockdir.append("sock")
		tmp_sock.mksock()
		sock1 = sockdir.append("Long_socket_name---------------------------------------------------------------------------------------------------")
		self.assertRaises(rpath.SkipFileException, sock1.mksock)
		rpath.rename(tmp_sock, sock1)
		assert sock1.issock()
		sock2 = sockdir.append("Medium_socket_name--------------------------------------------------------------")
		sock2.mksock()

		Myrm(Local.rpout.path)
		InternalBackup(1, 1, sockdir.path, Local.rpout.path,
					   current_time = 1)
		InternalBackup(1, 1, "testfiles/empty", Local.rpout.path,
					   current_time = 10000)
Пример #15
0
class LongNameTest(unittest.TestCase):
    """Test the longname module"""
    root_rp = rpath.RPath(Globals.local_connection, "testfiles")
    out_rp = root_rp.append_path('output')

    def test_length_limit(self):
        """Confirm that length limit is max_len

		Some of these tests depend on the length being at most
		max_len, so check to make sure it's accurate.

		"""
        Myrm(self.out_rp.path)
        self.out_rp.mkdir()

        really_long = self.out_rp.append('a' * max_len)
        really_long.touch()

        try:
            too_long = self.out_rp.append("a" * (max_len + 1))
        except EnvironmentError, e:
            assert errno.errorcode[e[0]] == 'ENAMETOOLONG', e
        else:
Пример #16
0
    def setUp(self):
        """Make testfiles/output directory and a few files"""
        Myrm(abs_output_dir)
        self.outputrp = rpath.RPath(Globals.local_connection, abs_output_dir)
        self.regfile1 = self.outputrp.append("reg1")
        self.regfile2 = self.outputrp.append("reg2")
        self.regfile3 = self.outputrp.append("reg3")

        self.outputrp.mkdir()

        with self.regfile1.open("wb") as fp:
            fp.write(b"hello")
        self.regfile1.setfile(self.regfile1.open("rb"))

        self.regfile2.touch()
        self.regfile2.setfile(self.regfile2.open("rb"))

        with self.regfile3.open("wb") as fp:
            fp.write(b"goodbye")
        self.regfile3.setfile(self.regfile3.open("rb"))

        self.regfile1.setdata()
        self.regfile2.setdata()
        self.regfile3.setdata()
Пример #17
0
    def test_maybe_gzip(self):
        """Test MaybeGzip"""
        dirrp = rpath.RPath(self.lc, abs_output_dir)
        re_init_rpath_dir(dirrp)

        base_rp = dirrp.append('foo')
        fileobj = rpath.MaybeGzip(base_rp)
        fileobj.close()
        base_rp.setdata()
        assert base_rp.isreg(), base_rp
        assert base_rp.getsize() == 0
        base_rp.delete()

        base_gz = dirrp.append('foo.gz')
        assert not base_gz.lstat()
        fileobj = rpath.MaybeGzip(base_rp)
        fileobj.write(b"lala")
        fileobj.close()
        base_rp.setdata()
        base_gz.setdata()
        assert not base_rp.lstat()
        assert base_gz.isreg(), base_gz
        data = base_gz.get_bytes(compressed=1)
        assert data == b"lala", data
Пример #18
0
    def test_maybe_gzip(self):
        """Test MaybeGzip"""
        dirrp = rpath.RPath(self.lc, abs_output_dir)
        re_init_rpath_dir(dirrp)

        base_rp = dirrp.append('foo')
        fileobj = rpath.MaybeGzip(base_rp)
        fileobj.close()
        base_rp.setdata()
        self.assertTrue(base_rp.isreg())
        self.assertEqual(base_rp.getsize(), 0)
        base_rp.delete()

        base_gz = dirrp.append('foo.gz')
        self.assertFalse(base_gz.lstat())
        fileobj = rpath.MaybeGzip(base_rp)
        fileobj.write(b"lala")
        fileobj.close()
        base_rp.setdata()
        base_gz.setdata()
        self.assertFalse(base_rp.lstat())
        self.assertTrue(base_gz.isreg())
        data = base_gz.get_bytes(compressed=1)
        self.assertEqual(data, b"lala")
Пример #19
0
    def testEmptyDirInclude(self):
        """Make sure empty directories are included with **xx exps

        This checks for a bug present in 1.0.3/1.1.5 and similar.

        """
        outrp = MakeOutputDir()
        # we need to change directory to be able to work with relative paths
        os.chdir(abs_test_dir)
        os.chdir(os.pardir)  # chdir one level up
        selrp = rpath.RPath(Globals.local_connection, 'testfiles/seltest')
        re_init_rpath_dir(selrp)
        emptydir = selrp.append('emptydir')
        emptydir.mkdir()

        rdiff_backup(1,
                     1,
                     selrp.path,
                     outrp.path,
                     extra_options=(b"--include **XX "
                                    b"--exclude testfiles/seltest/YYYY"))

        outempty = outrp.append('emptydir')
        assert outempty.isdir(), outempty
Пример #20
0
class HardlinkTest(unittest.TestCase):
    """Test cases for Hard links"""
    outputrp = rpath.RPath(Globals.local_connection, abs_output_dir)
    re_init_rpath_dir(outputrp)

    hlinks_dir = os.path.join(old_test_dir, b"hardlinks")
    hlinks_dir1 = os.path.join(hlinks_dir, b"dir1")
    hlinks_dir1copy = os.path.join(hlinks_dir, b"dir1copy")
    hlinks_dir2 = os.path.join(hlinks_dir, b"dir2")
    hlinks_dir3 = os.path.join(hlinks_dir, b"dir3")
    hlinks_rp1 = rpath.RPath(Globals.local_connection, hlinks_dir1)
    hlinks_rp1copy = rpath.RPath(Globals.local_connection, hlinks_dir1copy)
    hlinks_rp2 = rpath.RPath(Globals.local_connection, hlinks_dir2)
    hlinks_rp3 = rpath.RPath(Globals.local_connection, hlinks_dir3)
    hello_str = "Hello, world!"
    hello_str_hash = "943a702d06f34599aee1f8da8ef9f7296031d699"

    def testEquality(self):
        """Test rorp_eq function in conjunction with compare_recursive"""
        self.assertTrue(compare_recursive(self.hlinks_rp1,
                                          self.hlinks_rp1copy))
        self.assertTrue(
            compare_recursive(self.hlinks_rp1,
                              self.hlinks_rp2,
                              compare_hardlinks=None))
        self.assertFalse(
            compare_recursive(self.hlinks_rp1,
                              self.hlinks_rp2,
                              compare_hardlinks=1))

    def testBuildingDict(self):
        """See if the partial inode dictionary is correct"""
        Globals.preserve_hardlinks = 1
        reset_hardlink_dicts()
        for dsrp in selection.Select(self.hlinks_rp3).set_iter():
            Hardlink.add_rorp(dsrp)

        self.assertEqual(len(list(Hardlink._inode_index.keys())), 3)

    def testCompletedDict(self):
        """See if the hardlink dictionaries are built correctly"""
        reset_hardlink_dicts()
        for dsrp in selection.Select(self.hlinks_rp1).set_iter():
            Hardlink.add_rorp(dsrp)
            Hardlink.del_rorp(dsrp)
        self.assertEqual(Hardlink._inode_index, {})

        reset_hardlink_dicts()
        for dsrp in selection.Select(self.hlinks_rp2).set_iter():
            Hardlink.add_rorp(dsrp)
            Hardlink.del_rorp(dsrp)
        self.assertEqual(Hardlink._inode_index, {})

    def testSeries(self):
        """Test hardlink system by backing up and restoring a few dirs"""
        dirlist = [
            self.hlinks_dir1, self.hlinks_dir2, self.hlinks_dir3,
            os.path.join(old_test_dir, b'various_file_types')
        ]
        BackupRestoreSeries(None, None, dirlist, compare_hardlinks=1)
        BackupRestoreSeries(1, 1, dirlist, compare_hardlinks=1)

    def testInnerRestore(self):
        """Restore part of a dir, see if hard links preserved"""
        MakeOutputDir()
        output = rpath.RPath(Globals.local_connection, abs_output_dir)
        hlout1_dir = os.path.join(abs_test_dir, b"out_hardlink1")
        hlout2_dir = os.path.join(abs_test_dir, b"out_hardlink2")

        # Now set up directories out_hardlink1 and out_hardlink2
        hlout1 = rpath.RPath(Globals.local_connection, hlout1_dir)
        if hlout1.lstat():
            hlout1.delete()
        hlout1.mkdir()
        hlout1_sub = hlout1.append("subdir")
        hlout1_sub.mkdir()
        hl1_1 = hlout1_sub.append("hardlink1")
        hl1_2 = hlout1_sub.append("hardlink2")
        hl1_3 = hlout1_sub.append("hardlink3")
        hl1_4 = hlout1_sub.append("hardlink4")
        # 1 and 2 are hard linked, as are 3 and 4
        hl1_1.touch()
        hl1_2.hardlink(hl1_1.path)
        hl1_3.touch()
        hl1_4.hardlink(hl1_3.path)

        hlout2 = rpath.RPath(Globals.local_connection, hlout2_dir)
        if hlout2.lstat():
            hlout2.delete()
        xcopytree(hlout1_dir, hlout2_dir)
        hlout2_sub = hlout2.append("subdir")
        hl2_1 = hlout2_sub.append("hardlink1")
        hl2_2 = hlout2_sub.append("hardlink2")
        hl2_3 = hlout2_sub.append("hardlink3")
        hl2_4 = hlout2_sub.append("hardlink4")
        # Now 2 and 3 are hard linked, also 1 and 4
        rpath.copy_with_attribs(hl1_1, hl2_1)
        rpath.copy_with_attribs(hl1_2, hl2_2)
        hl2_3.delete()
        hl2_3.hardlink(hl2_2.path)
        hl2_4.delete()
        hl2_4.hardlink(hl2_1.path)
        rpath.copy_attribs(hlout1_sub, hlout2_sub)

        # Now try backing up twice, making sure hard links are preserved
        InternalBackup(1, 1, hlout1.path, output.path)
        out_subdir = output.append("subdir")
        self.assertEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink2").getinode())
        self.assertEqual(
            out_subdir.append("hardlink3").getinode(),
            out_subdir.append("hardlink4").getinode())
        self.assertNotEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink3").getinode())

        time.sleep(1)
        InternalBackup(1, 1, hlout2.path, output.path)
        out_subdir.setdata()
        self.assertEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink4").getinode())
        self.assertEqual(
            out_subdir.append("hardlink2").getinode(),
            out_subdir.append("hardlink3").getinode())
        self.assertNotEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink2").getinode())

        # Now try restoring, still checking hard links.
        sub_dir = os.path.join(abs_output_dir, b"subdir")
        out2_dir = os.path.join(abs_test_dir, b"out2")
        out2 = rpath.RPath(Globals.local_connection, out2_dir)
        hlout1 = out2.append("hardlink1")
        hlout2 = out2.append("hardlink2")
        hlout3 = out2.append("hardlink3")
        hlout4 = out2.append("hardlink4")

        if out2.lstat():
            out2.delete()
        InternalRestore(1, 1, sub_dir, out2_dir, 1)
        out2.setdata()
        for rp in [hlout1, hlout2, hlout3, hlout4]:
            rp.setdata()
        self.assertEqual(hlout1.getinode(), hlout2.getinode())
        self.assertEqual(hlout3.getinode(), hlout4.getinode())
        self.assertNotEqual(hlout1.getinode(), hlout3.getinode())

        if out2.lstat():
            out2.delete()
        InternalRestore(1, 1, sub_dir, out2_dir, int(time.time()))
        out2.setdata()
        for rp in [hlout1, hlout2, hlout3, hlout4]:
            rp.setdata()
        self.assertEqual(hlout1.getinode(), hlout4.getinode())
        self.assertEqual(hlout2.getinode(), hlout3.getinode())
        self.assertNotEqual(hlout1.getinode(), hlout2.getinode())

    def extract_metadata(self, metadata_rp):
        """Return lists of hashes and hardlink counts in the metadata_rp"""
        hashes = []
        link_counts = []
        comp = metadata_rp.isinccompressed()
        extractor = metadata.RorpExtractor(metadata_rp.open("r", comp))
        for rorp in extractor.iterate():
            link_counts.append(rorp.getnumlinks())
            if rorp.has_sha1():
                hashes.append(rorp.get_sha1())
            else:
                hashes.append(None)
        return (hashes, link_counts)

    def test_adding_hardlinks(self):
        """Test the addition of a new hardlinked file.

        This test is directed at some previously buggy code that 1) failed to
        keep the correct number of hardlinks in the mirror metadata, and 2)
        failed to restore hardlinked files so that they are linked the same as
        when they were backed up. One of the conditions that triggered these
        bugs included adding a new hardlinked file somewhere in the middle of a
        list of previously linked files.  The bug was originally reported here:
        https://savannah.nongnu.org/bugs/?26848
        """

        # Setup initial backup
        MakeOutputDir()
        output = rpath.RPath(Globals.local_connection, abs_output_dir)
        hlsrc_dir = os.path.join(abs_test_dir, b"src_hardlink")

        hlsrc = rpath.RPath(Globals.local_connection, hlsrc_dir)
        if hlsrc.lstat():
            hlsrc.delete()
        hlsrc.mkdir()
        hlsrc_sub = hlsrc.append("subdir")
        hlsrc_sub.mkdir()
        hl_file1 = hlsrc_sub.append("hardlink1")
        hl_file1.write_string(self.hello_str)
        hl_file3 = hlsrc_sub.append("hardlink3")
        hl_file3.hardlink(hl_file1.path)

        InternalBackup(1, 1, hlsrc.path, output.path, 10000)
        out_subdir = output.append("subdir")
        self.assertEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink3").getinode())

        # validate that hashes and link counts are correctly saved in metadata
        meta_prefix = rpath.RPath(
            Globals.local_connection,
            os.path.join(abs_output_dir, b"rdiff-backup-data",
                         b"mirror_metadata"))
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 1)
        metadata_rp = incs[0]
        hashes, link_counts = self.extract_metadata(metadata_rp)
        # hashes for ., ./subdir, ./subdir/hardlink1, ./subdir/hardlink3
        expected_hashes = [None, None, self.hello_str_hash, None]
        self.assertEqual(expected_hashes, hashes)
        expected_link_counts = [1, 1, 2, 2]
        self.assertEqual(expected_link_counts, link_counts)

        # Create a new hardlinked file between "hardlink1" and "hardlink3" and perform another backup
        hl_file2 = hlsrc_sub.append("hardlink2")
        hl_file2.hardlink(hl_file1.path)

        InternalBackup(1, 1, hlsrc.path, output.path, 20000)
        self.assertEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink2").getinode())
        self.assertEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink3").getinode())

        # validate that hashes and link counts are correctly saved in metadata
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 2)
        if incs[0].getinctype() == b'snapshot':
            metadata_rp = incs[0]
        else:
            metadata_rp = incs[1]
        hashes, link_counts = self.extract_metadata(metadata_rp)
        # hashes for ., ./subdir/, ./subdir/hardlink1, ./subdir/hardlink2, ./subdir/hardlink3
        expected_hashes = [None, None, self.hello_str_hash, None, None]
        self.assertEqual(expected_hashes, hashes)
        expected_link_counts = [1, 1, 3, 3, 3]
        # The following assertion would fail as a result of bugs that are now fixed
        self.assertEqual(expected_link_counts, link_counts)

        # Now try restoring, still checking hard links.
        sub_path = os.path.join(abs_output_dir, b"subdir")
        restore_path = os.path.join(abs_test_dir, b"hl_restore")
        restore_dir = rpath.RPath(Globals.local_connection, restore_path)
        hlrestore_file1 = restore_dir.append("hardlink1")
        hlrestore_file2 = restore_dir.append("hardlink2")
        hlrestore_file3 = restore_dir.append("hardlink3")

        if restore_dir.lstat():
            restore_dir.delete()
        InternalRestore(1, 1, sub_path, restore_path, 10000)
        for rp in [hlrestore_file1, hlrestore_file3]:
            rp.setdata()
        self.assertEqual(hlrestore_file1.getinode(),
                         hlrestore_file3.getinode())

        if restore_dir.lstat():
            restore_dir.delete()
        InternalRestore(1, 1, sub_path, restore_path, 20000)
        for rp in [hlrestore_file1, hlrestore_file2, hlrestore_file3]:
            rp.setdata()
        self.assertEqual(hlrestore_file1.getinode(),
                         hlrestore_file2.getinode())
        # The following assertion would fail as a result of bugs that are now fixed
        self.assertEqual(hlrestore_file1.getinode(),
                         hlrestore_file3.getinode())

    def test_moving_hardlinks(self):
        """Test moving the first hardlinked file in a series to later place in the series.

        This test is directed at some previously buggy code that failed to
        always keep a sha1 hash in the metadata for the first (and only the
        first) file among a series of linked files. The condition that
        triggered this bug involved removing the first file from a list of
        linked files, while also adding a new file at some later position in
        the list. The total number of hardlinked files in the list remains
        unchanged.  None of the files had a sha1 hash saved in its metadata.
        The bug was originally reported here:
        https://savannah.nongnu.org/bugs/?26848
        """

        # Setup initial backup
        MakeOutputDir()
        output = rpath.RPath(Globals.local_connection, abs_output_dir)
        hlsrc_dir = os.path.join(abs_test_dir, b"src_hardlink")

        hlsrc = rpath.RPath(Globals.local_connection, hlsrc_dir)
        if hlsrc.lstat():
            hlsrc.delete()
        hlsrc.mkdir()
        hlsrc_sub = hlsrc.append("subdir")
        hlsrc_sub.mkdir()
        hl_file1 = hlsrc_sub.append("hardlink1")
        hl_file1.write_string(self.hello_str)
        hl_file2 = hlsrc_sub.append("hardlink2")
        hl_file2.hardlink(hl_file1.path)

        InternalBackup(1, 1, hlsrc.path, output.path, 10000)
        out_subdir = output.append("subdir")
        self.assertEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink2").getinode())

        # validate that hashes and link counts are correctly saved in metadata
        meta_prefix = rpath.RPath(
            Globals.local_connection,
            os.path.join(abs_output_dir, b"rdiff-backup-data",
                         b"mirror_metadata"))
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 1)
        metadata_rp = incs[0]
        hashes, link_counts = self.extract_metadata(metadata_rp)
        # hashes for ., ./subdir, ./subdir/hardlink1, ./subdir/hardlink3
        expected_hashes = [None, None, self.hello_str_hash, None]
        self.assertEqual(expected_hashes, hashes)
        expected_link_counts = [1, 1, 2, 2]
        self.assertEqual(expected_link_counts, link_counts)

        # Move the first hardlinked file to be last
        hl_file3 = hlsrc_sub.append("hardlink3")
        rpath.rename(hl_file1, hl_file3)

        InternalBackup(1, 1, hlsrc.path, output.path, 20000)
        self.assertEqual(
            out_subdir.append("hardlink2").getinode(),
            out_subdir.append("hardlink3").getinode())

        # validate that hashes and link counts are correctly saved in metadata
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 2)
        if incs[0].getinctype() == b'snapshot':
            metadata_rp = incs[0]
        else:
            metadata_rp = incs[1]
        hashes, link_counts = self.extract_metadata(metadata_rp)
        # hashes for ., ./subdir/, ./subdir/hardlink2, ./subdir/hardlink3
        expected_hashes = [None, None, self.hello_str_hash, None]
        # The following assertion would fail as a result of bugs that are now fixed
        self.assertEqual(expected_hashes, hashes)
        expected_link_counts = [1, 1, 2, 2]
        self.assertEqual(expected_link_counts, link_counts)

        # Now try restoring, still checking hard links.
        sub_path = os.path.join(abs_output_dir, b"subdir")
        restore_path = os.path.join(abs_test_dir, b"hl_restore")
        restore_dir = rpath.RPath(Globals.local_connection, restore_path)
        hlrestore_file1 = restore_dir.append("hardlink1")
        hlrestore_file2 = restore_dir.append("hardlink2")
        hlrestore_file3 = restore_dir.append("hardlink3")

        if restore_dir.lstat():
            restore_dir.delete()
        InternalRestore(1, 1, sub_path, restore_path, 10000)
        for rp in [hlrestore_file1, hlrestore_file2]:
            rp.setdata()
        self.assertEqual(hlrestore_file1.getinode(),
                         hlrestore_file2.getinode())

        if restore_dir.lstat():
            restore_dir.delete()
        InternalRestore(1, 1, sub_path, restore_path, 20000)
        for rp in [hlrestore_file2, hlrestore_file3]:
            rp.setdata()
        self.assertEqual(hlrestore_file2.getinode(),
                         hlrestore_file3.getinode())
Пример #21
0
    def testInnerRestore(self):
        """Restore part of a dir, see if hard links preserved"""
        MakeOutputDir()
        output = rpath.RPath(Globals.local_connection, abs_output_dir)
        hlout1_dir = os.path.join(abs_test_dir, b"out_hardlink1")
        hlout2_dir = os.path.join(abs_test_dir, b"out_hardlink2")

        # Now set up directories out_hardlink1 and out_hardlink2
        hlout1 = rpath.RPath(Globals.local_connection, hlout1_dir)
        if hlout1.lstat():
            hlout1.delete()
        hlout1.mkdir()
        hlout1_sub = hlout1.append("subdir")
        hlout1_sub.mkdir()
        hl1_1 = hlout1_sub.append("hardlink1")
        hl1_2 = hlout1_sub.append("hardlink2")
        hl1_3 = hlout1_sub.append("hardlink3")
        hl1_4 = hlout1_sub.append("hardlink4")
        # 1 and 2 are hard linked, as are 3 and 4
        hl1_1.touch()
        hl1_2.hardlink(hl1_1.path)
        hl1_3.touch()
        hl1_4.hardlink(hl1_3.path)

        hlout2 = rpath.RPath(Globals.local_connection, hlout2_dir)
        if hlout2.lstat():
            hlout2.delete()
        xcopytree(hlout1_dir, hlout2_dir)
        hlout2_sub = hlout2.append("subdir")
        hl2_1 = hlout2_sub.append("hardlink1")
        hl2_2 = hlout2_sub.append("hardlink2")
        hl2_3 = hlout2_sub.append("hardlink3")
        hl2_4 = hlout2_sub.append("hardlink4")
        # Now 2 and 3 are hard linked, also 1 and 4
        rpath.copy_with_attribs(hl1_1, hl2_1)
        rpath.copy_with_attribs(hl1_2, hl2_2)
        hl2_3.delete()
        hl2_3.hardlink(hl2_2.path)
        hl2_4.delete()
        hl2_4.hardlink(hl2_1.path)
        rpath.copy_attribs(hlout1_sub, hlout2_sub)

        # Now try backing up twice, making sure hard links are preserved
        InternalBackup(1, 1, hlout1.path, output.path)
        out_subdir = output.append("subdir")
        self.assertEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink2").getinode())
        self.assertEqual(
            out_subdir.append("hardlink3").getinode(),
            out_subdir.append("hardlink4").getinode())
        self.assertNotEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink3").getinode())

        time.sleep(1)
        InternalBackup(1, 1, hlout2.path, output.path)
        out_subdir.setdata()
        self.assertEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink4").getinode())
        self.assertEqual(
            out_subdir.append("hardlink2").getinode(),
            out_subdir.append("hardlink3").getinode())
        self.assertNotEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink2").getinode())

        # Now try restoring, still checking hard links.
        sub_dir = os.path.join(abs_output_dir, b"subdir")
        out2_dir = os.path.join(abs_test_dir, b"out2")
        out2 = rpath.RPath(Globals.local_connection, out2_dir)
        hlout1 = out2.append("hardlink1")
        hlout2 = out2.append("hardlink2")
        hlout3 = out2.append("hardlink3")
        hlout4 = out2.append("hardlink4")

        if out2.lstat():
            out2.delete()
        InternalRestore(1, 1, sub_dir, out2_dir, 1)
        out2.setdata()
        for rp in [hlout1, hlout2, hlout3, hlout4]:
            rp.setdata()
        self.assertEqual(hlout1.getinode(), hlout2.getinode())
        self.assertEqual(hlout3.getinode(), hlout4.getinode())
        self.assertNotEqual(hlout1.getinode(), hlout3.getinode())

        if out2.lstat():
            out2.delete()
        InternalRestore(1, 1, sub_dir, out2_dir, int(time.time()))
        out2.setdata()
        for rp in [hlout1, hlout2, hlout3, hlout4]:
            rp.setdata()
        self.assertEqual(hlout1.getinode(), hlout4.getinode())
        self.assertEqual(hlout2.getinode(), hlout3.getinode())
        self.assertNotEqual(hlout1.getinode(), hlout2.getinode())
Пример #22
0
    def test_moving_hardlinks(self):
        """Test moving the first hardlinked file in a series to later place in the series.

        This test is directed at some previously buggy code that failed to
        always keep a sha1 hash in the metadata for the first (and only the
        first) file among a series of linked files. The condition that
        triggered this bug involved removing the first file from a list of
        linked files, while also adding a new file at some later position in
        the list. The total number of hardlinked files in the list remains
        unchanged.  None of the files had a sha1 hash saved in its metadata.
        The bug was originally reported here:
        https://savannah.nongnu.org/bugs/?26848
        """

        # Setup initial backup
        MakeOutputDir()
        output = rpath.RPath(Globals.local_connection, abs_output_dir)
        hlsrc_dir = os.path.join(abs_test_dir, b"src_hardlink")

        hlsrc = rpath.RPath(Globals.local_connection, hlsrc_dir)
        if hlsrc.lstat():
            hlsrc.delete()
        hlsrc.mkdir()
        hlsrc_sub = hlsrc.append("subdir")
        hlsrc_sub.mkdir()
        hl_file1 = hlsrc_sub.append("hardlink1")
        hl_file1.write_string(self.hello_str)
        hl_file2 = hlsrc_sub.append("hardlink2")
        hl_file2.hardlink(hl_file1.path)

        InternalBackup(1, 1, hlsrc.path, output.path, 10000)
        out_subdir = output.append("subdir")
        self.assertEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink2").getinode())

        # validate that hashes and link counts are correctly saved in metadata
        meta_prefix = rpath.RPath(
            Globals.local_connection,
            os.path.join(abs_output_dir, b"rdiff-backup-data",
                         b"mirror_metadata"))
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 1)
        metadata_rp = incs[0]
        hashes, link_counts = self.extract_metadata(metadata_rp)
        # hashes for ., ./subdir, ./subdir/hardlink1, ./subdir/hardlink3
        expected_hashes = [None, None, self.hello_str_hash, None]
        self.assertEqual(expected_hashes, hashes)
        expected_link_counts = [1, 1, 2, 2]
        self.assertEqual(expected_link_counts, link_counts)

        # Move the first hardlinked file to be last
        hl_file3 = hlsrc_sub.append("hardlink3")
        rpath.rename(hl_file1, hl_file3)

        InternalBackup(1, 1, hlsrc.path, output.path, 20000)
        self.assertEqual(
            out_subdir.append("hardlink2").getinode(),
            out_subdir.append("hardlink3").getinode())

        # validate that hashes and link counts are correctly saved in metadata
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 2)
        if incs[0].getinctype() == b'snapshot':
            metadata_rp = incs[0]
        else:
            metadata_rp = incs[1]
        hashes, link_counts = self.extract_metadata(metadata_rp)
        # hashes for ., ./subdir/, ./subdir/hardlink2, ./subdir/hardlink3
        expected_hashes = [None, None, self.hello_str_hash, None]
        # The following assertion would fail as a result of bugs that are now fixed
        self.assertEqual(expected_hashes, hashes)
        expected_link_counts = [1, 1, 2, 2]
        self.assertEqual(expected_link_counts, link_counts)

        # Now try restoring, still checking hard links.
        sub_path = os.path.join(abs_output_dir, b"subdir")
        restore_path = os.path.join(abs_test_dir, b"hl_restore")
        restore_dir = rpath.RPath(Globals.local_connection, restore_path)
        hlrestore_file1 = restore_dir.append("hardlink1")
        hlrestore_file2 = restore_dir.append("hardlink2")
        hlrestore_file3 = restore_dir.append("hardlink3")

        if restore_dir.lstat():
            restore_dir.delete()
        InternalRestore(1, 1, sub_path, restore_path, 10000)
        for rp in [hlrestore_file1, hlrestore_file2]:
            rp.setdata()
        self.assertEqual(hlrestore_file1.getinode(),
                         hlrestore_file2.getinode())

        if restore_dir.lstat():
            restore_dir.delete()
        InternalRestore(1, 1, sub_path, restore_path, 20000)
        for rp in [hlrestore_file2, hlrestore_file3]:
            rp.setdata()
        self.assertEqual(hlrestore_file2.getinode(),
                         hlrestore_file3.getinode())
Пример #23
0
    def test_adding_hardlinks(self):
        """Test the addition of a new hardlinked file.

        This test is directed at some previously buggy code that 1) failed to
        keep the correct number of hardlinks in the mirror metadata, and 2)
        failed to restore hardlinked files so that they are linked the same as
        when they were backed up. One of the conditions that triggered these
        bugs included adding a new hardlinked file somewhere in the middle of a
        list of previously linked files.  The bug was originally reported here:
        https://savannah.nongnu.org/bugs/?26848
        """

        # Setup initial backup
        MakeOutputDir()
        output = rpath.RPath(Globals.local_connection, abs_output_dir)
        hlsrc_dir = os.path.join(abs_test_dir, b"src_hardlink")

        hlsrc = rpath.RPath(Globals.local_connection, hlsrc_dir)
        if hlsrc.lstat():
            hlsrc.delete()
        hlsrc.mkdir()
        hlsrc_sub = hlsrc.append("subdir")
        hlsrc_sub.mkdir()
        hl_file1 = hlsrc_sub.append("hardlink1")
        hl_file1.write_string(self.hello_str)
        hl_file3 = hlsrc_sub.append("hardlink3")
        hl_file3.hardlink(hl_file1.path)

        InternalBackup(1, 1, hlsrc.path, output.path, 10000)
        out_subdir = output.append("subdir")
        self.assertEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink3").getinode())

        # validate that hashes and link counts are correctly saved in metadata
        meta_prefix = rpath.RPath(
            Globals.local_connection,
            os.path.join(abs_output_dir, b"rdiff-backup-data",
                         b"mirror_metadata"))
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 1)
        metadata_rp = incs[0]
        hashes, link_counts = self.extract_metadata(metadata_rp)
        # hashes for ., ./subdir, ./subdir/hardlink1, ./subdir/hardlink3
        expected_hashes = [None, None, self.hello_str_hash, None]
        self.assertEqual(expected_hashes, hashes)
        expected_link_counts = [1, 1, 2, 2]
        self.assertEqual(expected_link_counts, link_counts)

        # Create a new hardlinked file between "hardlink1" and "hardlink3" and perform another backup
        hl_file2 = hlsrc_sub.append("hardlink2")
        hl_file2.hardlink(hl_file1.path)

        InternalBackup(1, 1, hlsrc.path, output.path, 20000)
        self.assertEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink2").getinode())
        self.assertEqual(
            out_subdir.append("hardlink1").getinode(),
            out_subdir.append("hardlink3").getinode())

        # validate that hashes and link counts are correctly saved in metadata
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 2)
        if incs[0].getinctype() == b'snapshot':
            metadata_rp = incs[0]
        else:
            metadata_rp = incs[1]
        hashes, link_counts = self.extract_metadata(metadata_rp)
        # hashes for ., ./subdir/, ./subdir/hardlink1, ./subdir/hardlink2, ./subdir/hardlink3
        expected_hashes = [None, None, self.hello_str_hash, None, None]
        self.assertEqual(expected_hashes, hashes)
        expected_link_counts = [1, 1, 3, 3, 3]
        # The following assertion would fail as a result of bugs that are now fixed
        self.assertEqual(expected_link_counts, link_counts)

        # Now try restoring, still checking hard links.
        sub_path = os.path.join(abs_output_dir, b"subdir")
        restore_path = os.path.join(abs_test_dir, b"hl_restore")
        restore_dir = rpath.RPath(Globals.local_connection, restore_path)
        hlrestore_file1 = restore_dir.append("hardlink1")
        hlrestore_file2 = restore_dir.append("hardlink2")
        hlrestore_file3 = restore_dir.append("hardlink3")

        if restore_dir.lstat():
            restore_dir.delete()
        InternalRestore(1, 1, sub_path, restore_path, 10000)
        for rp in [hlrestore_file1, hlrestore_file3]:
            rp.setdata()
        self.assertEqual(hlrestore_file1.getinode(),
                         hlrestore_file3.getinode())

        if restore_dir.lstat():
            restore_dir.delete()
        InternalRestore(1, 1, sub_path, restore_path, 20000)
        for rp in [hlrestore_file1, hlrestore_file2, hlrestore_file3]:
            rp.setdata()
        self.assertEqual(hlrestore_file1.getinode(),
                         hlrestore_file2.getinode())
        # The following assertion would fail as a result of bugs that are now fixed
        self.assertEqual(hlrestore_file1.getinode(),
                         hlrestore_file3.getinode())
Пример #24
0
 def get_local_rp(ext):
     return rpath.RPath(Globals.local_connection, os.path.join(abs_test_dir, ext))
Пример #25
0
class ACLTest(unittest.TestCase):
    """Test access control lists"""

    current_user = os.getenv('RDIFF_TEST_USER',
                             pwd.getpwuid(os.getuid()).pw_name)
    current_group = os.getenv('RDIFF_TEST_GROUP',
                              grp.getgrgid(os.getgid()).gr_name)

    sample_acl = AccessControlLists((), """user::rwx
user:root:rwx
group::r-x
group:root:r-x
mask::r-x
other::---""")
    dir_acl = AccessControlLists((), """user::rwx
user:root:rwx
group::r-x
group:root:r-x
mask::r-x
other::---
default:user::rwx
default:user:root:---
default:group::r-x
default:mask::r-x
default:other::---""")
    acl1 = AccessControlLists((b'a1', ), """user::r--
user:{0}:---
group::---
group:root:---
mask::---
other::---""".format(current_user))
    acl2 = AccessControlLists((b'a2', ), """user::rwx
group::r-x
group:{0}:rwx
mask::---
other::---""".format(current_group))
    acl3 = AccessControlLists((b'a3', ), """user::rwx
user:root:---
group::r-x
mask::---
other::---""")
    empty_acl = AccessControlLists((), "user::rwx\ngroup::---\nother::---")
    acl_test1_dir = os.path.join(abs_test_dir, b'acl_test1')
    acl_test1_rpath = rpath.RPath(Globals.local_connection, acl_test1_dir)
    acl_test2_dir = os.path.join(abs_test_dir, b'acl_test2')
    acl_test2_rpath = rpath.RPath(Globals.local_connection, acl_test2_dir)
    acl_empty_dir = os.path.join(abs_test_dir, b'acl_empty')
    acl_empty_rpath = rpath.RPath(Globals.local_connection, acl_empty_dir)

    def make_temp_out_dirs(self):
        """Make temp output and restore directories empty"""
        tempdir.setdata()  # in case the file changed in-between
        if tempdir.lstat():
            tempdir.delete()
        tempdir.mkdir()
        restore_dir.setdata()  # in case the file changed in-between
        if restore_dir.lstat():
            restore_dir.delete()

    def testBasic(self):
        """Test basic writing and reading of ACLs"""
        self.make_temp_out_dirs()
        new_acl = AccessControlLists(())
        tempdir.chmod(0o700)
        new_acl.read_from_rp(tempdir)
        assert new_acl.is_basic(), str(new_acl)
        assert not new_acl == self.sample_acl
        assert new_acl != self.sample_acl
        assert new_acl == self.empty_acl, \
            (str(new_acl), str(self.empty_acl))

        self.sample_acl.write_to_rp(tempdir)
        new_acl.read_from_rp(tempdir)
        assert str(new_acl) == str(self.sample_acl), \
            (str(new_acl), str(self.sample_acl))
        assert new_acl == self.sample_acl

    def testBasicDir(self):
        """Test reading and writing of ACL w/ defaults to directory"""
        self.make_temp_out_dirs()
        new_acl = AccessControlLists(())
        new_acl.read_from_rp(tempdir)
        assert new_acl.is_basic()
        assert new_acl != self.dir_acl

        self.dir_acl.write_to_rp(tempdir)
        new_acl.read_from_rp(tempdir)
        assert not new_acl.is_basic()
        if not new_acl == self.dir_acl:
            assert new_acl.eq_verbose(self.dir_acl)
            assert 0, "Shouldn't be here---eq != eq_verbose?"

    def testRecord(self):
        """Test writing a record and reading it back"""
        record = ACL2Record(self.sample_acl)
        new_acl = Record2ACL(record)
        if new_acl != self.sample_acl:
            print("New_acl", new_acl.entry_list)
            print("sample_acl", self.sample_acl.entry_list)
            print("New_acl text", str(new_acl))
            print("sample acl text", str(self.sample_acl))
            assert 0

        record2 = ACL2Record(self.dir_acl)
        new_acl2 = Record2ACL(record2)
        if not new_acl2 == self.dir_acl:
            assert new_acl2.eq_verbose(self.dir_acl)
            assert 0

    def testExtractor(self):
        """Test seeking inside a record list"""
        record_list = """# file: 0foo
user::r--
user:{0}:---
group::---
group:root:---
mask::---
other::---
# file: 1foo/bar/baz
user::r--
user:{0}:---
group::---
group:root:---
mask::---
other::---
# file: 2foo/\\012
user::r--
user:{0}:---
group::---
group:root:---
mask::---
other::---
""".format(self.current_user)
        extractor = ACLExtractor(io.BytesIO(os.fsencode(record_list)))
        acl_iter = extractor.iterate_starting_with(())
        first = next(acl_iter)
        assert first.index == (b'0foo', ), first
        second = next(acl_iter)
        assert second.index == (b'1foo', b'bar', b'baz'), second
        third = next(acl_iter)  # Test quoted filenames
        assert third.index == (b'2foo', b'\n'), third.index
        try:
            next(acl_iter)
        except StopIteration:
            pass
        else:
            assert 0, "Too many elements in iterator"

        extractor = ACLExtractor(io.BytesIO(os.fsencode(record_list)))
        acl_iter = extractor.iterate_starting_with((b'1foo', b'bar'))
        assert next(acl_iter).index == (b'1foo', b'bar', b'baz')
        try:
            next(acl_iter)
        except StopIteration:
            pass
        else:
            assert 0, "Too many elements in iterator"

    def make_backup_dirs(self):
        """Create testfiles/acl_test[12] directories"""
        if self.acl_test1_rpath.lstat():
            self.acl_test1_rpath.delete()
        self.acl_test1_rpath.mkdir()
        rp1_1 = self.acl_test1_rpath.append('a1')
        rp1_2 = self.acl_test1_rpath.append('a2')
        rp1_3 = self.acl_test1_rpath.append('a3')
        list(map(rpath.RPath.touch, [rp1_1, rp1_2, rp1_3]))
        self.dir_acl.write_to_rp(self.acl_test1_rpath)
        self.acl1.write_to_rp(rp1_1)
        self.acl2.write_to_rp(rp1_2)
        self.acl3.write_to_rp(rp1_3)

        if self.acl_test2_rpath.lstat():
            self.acl_test2_rpath.delete()
        self.acl_test2_rpath.mkdir()
        rp2_1, rp2_2, rp2_3 = list(
            map(self.acl_test2_rpath.append, ('a1', 'a2', 'a3')))
        list(map(rpath.RPath.touch, (rp2_1, rp2_2, rp2_3)))
        self.sample_acl.write_to_rp(self.acl_test2_rpath)
        self.acl3.write_to_rp(rp2_1)
        self.acl1.write_to_rp(rp2_2)
        self.acl2.write_to_rp(rp2_3)

        # just create an empty dir for tests
        if self.acl_empty_rpath.lstat():
            self.acl_empty_rpath.delete()
        self.acl_empty_rpath.mkdir()

    def testIterate(self):
        """Test writing several records and then reading them back"""
        self.make_backup_dirs()
        self.make_temp_out_dirs()
        rp1 = self.acl_test1_rpath.append('a1')
        rp2 = self.acl_test1_rpath.append('a2')
        rp3 = self.acl_test1_rpath.append('a3')

        # Now write records corresponding to above rps into file
        Globals.rbdir = tempdir
        man = metadata.PatchDiffMan()
        writer = man.get_acl_writer('snapshot', 10000)
        for rp in [self.acl_test1_rpath, rp1, rp2, rp3]:
            acl = AccessControlLists(rp.index)
            acl.read_from_rp(rp)
            writer.write_object(acl)
        writer.close()

        # Read back records and compare
        acl_iter = man.get_acls_at_time(10000, None)
        assert acl_iter, "No acl file found"
        dir_acl_reread = next(acl_iter)
        assert dir_acl_reread == self.dir_acl
        acl1_reread = next(acl_iter)
        assert acl1_reread == self.acl1
        acl2_reread = next(acl_iter)
        assert acl2_reread == self.acl2
        acl3_reread = next(acl_iter)
        assert acl3_reread == self.acl3
        try:
            extra = next(acl_iter)
        except StopIteration:
            pass
        else:
            assert 0, "Got unexpected object: " + repr(extra)

    def testSeriesLocal(self):
        """Test backing up and restoring directories with ACLs locally"""
        self.make_backup_dirs()
        dirlist = [
            self.acl_test1_dir, self.acl_empty_dir, self.acl_test2_dir,
            self.acl_test1_dir
        ]
        BackupRestoreSeries(1, 1, dirlist, compare_acls=1)

    def testSeriesRemote(self):
        """Test backing up, restoring directories with EA remotely"""
        self.make_backup_dirs()
        dirlist = [
            self.acl_test1_dir, self.acl_test2_dir, self.acl_empty_dir,
            self.acl_test1_dir
        ]
        BackupRestoreSeries(None, None, dirlist, compare_acls=1)

    def test_final_local(self):
        """Test backing up and restoring using 'rdiff-backup' script"""
        self.make_backup_dirs()
        self.make_temp_out_dirs()
        rdiff_backup(1,
                     1,
                     self.acl_test1_rpath.path,
                     tempdir.path,
                     current_time=10000)
        assert CompareRecursive(self.acl_test1_rpath, tempdir, compare_acls=1)

        rdiff_backup(1,
                     1,
                     self.acl_test2_rpath.path,
                     tempdir.path,
                     current_time=20000)
        assert CompareRecursive(self.acl_test2_rpath, tempdir, compare_acls=1)

        rdiff_backup(1,
                     1,
                     tempdir.path,
                     restore_dir.path,
                     extra_options=b'-r 10000')
        assert CompareRecursive(self.acl_test1_rpath,
                                restore_dir,
                                compare_acls=1)

        restore_dir.delete()
        rdiff_backup(1,
                     1,
                     tempdir.path,
                     restore_dir.path,
                     extra_options=b'-r now')
        assert CompareRecursive(self.acl_test2_rpath,
                                restore_dir,
                                compare_acls=1)

    def test_acl_mapping(self):
        """Test mapping ACL names"""
        def make_dir(rootrp):
            if rootrp.lstat():
                rootrp.delete()
            rootrp.mkdir()
            rp = rootrp.append('a1')
            rp.touch()
            acl = AccessControlLists(('a1', ), """user::rwx
user:root:rwx
user:{0}:---
user:bin:r--
group::r-x
group:root:r-x
group:{1}:-w-
mask::r-x
other::---""".format(self.current_user, self.current_group))
            rp.write_acl(acl)
            return rp

        def write_mapping_file(rootrp):
            map_rp = rootrp.append('mapping_file')
            map_rp.write_string("root:{1}\n{0}:bin\nbin:root".format(
                self.current_user, self.current_group))
            return map_rp

        def get_perms_of_user(acl, user):
            """Return the permissions of ACL_USER in acl, or None"""
            for typechar, owner_pair, perms in acl.entry_list:
                if typechar == "u" and owner_pair[1] == user:
                    return perms
            return None

        self.make_temp_out_dirs()
        rootrp = rpath.RPath(Globals.local_connection,
                             os.path.join(abs_test_dir, b'acl_map_test'))
        make_dir(rootrp)
        map_rp = write_mapping_file(rootrp)

        rdiff_backup(1,
                     1,
                     rootrp.path,
                     tempdir.path,
                     extra_options=b"--user-mapping-file %b" % (map_rp.path, ))

        out_rp = tempdir.append('a1')
        assert out_rp.isreg()
        out_acl = tempdir.append('a1').get_acl()
        assert get_perms_of_user(out_acl, 'root') == 4
        assert get_perms_of_user(out_acl, self.current_user) == 7
        assert get_perms_of_user(out_acl, 'bin') == 0

    def test_acl_dropping(self):
        """Test dropping of ACL names"""
        self.make_temp_out_dirs()
        rp = tempdir.append('a1')
        rp.touch()
        """ben uses a dvorak keyboard, and these sequences are
        analogous to asdfsjkd for a qwerty user... these
        users and groups are not expected to exist. -dean"""
        acl = AccessControlLists(('a1', ), """user::rwx
user:aoensutheu:r--
group::r-x
group:aeuai:r-x
group:enutohnh:-w-
other::---""")
        rp.write_acl(acl)
        rp2 = tempdir.append('a1')
        acl2 = AccessControlLists(('a1', ))
        acl2.read_from_rp(rp2)
        assert acl2.is_basic()
        Globals.never_drop_acls = 1
        try:
            rp.write_acl(acl)
        except SystemExit:
            pass
        else:
            assert 0, "Above should have exited with fatal error"
        Globals.never_drop_acls = None

    def test_nochange(self):
        """Make sure files with ACLs not unnecessarily flagged changed"""
        self.make_temp_out_dirs()
        self.make_backup_dirs()
        rdiff_backup(1,
                     1,
                     self.acl_test1_rpath.path,
                     tempdir.path,
                     current_time=10000)
        rdiff_backup(1,
                     1,
                     self.acl_test1_rpath.path,
                     tempdir.path,
                     current_time=20000)
        incdir = tempdir.append('rdiff-backup-data', 'increments')
        assert incdir.isdir(), incdir
        assert not incdir.listdir(), incdir.listdir()
Пример #26
0
class EATest(unittest.TestCase):
    """Test extended attributes"""
    sample_ea = ExtendedAttributes(
        (), {
            b'user.empty':
            b'',
            b'user.not_empty':
            b'foobar',
            b'user.third':
            b'hello',
            b'user.binary':
            bytes((0, 1, 2, 140)) + b'/="',
            b'user.multiline':
            b"""This is a fairly long extended attribute.
                Encoding it will require several lines of
                base64.""" + bytes((177, ) * 300)
        })
    empty_ea = ExtendedAttributes(())
    ea1 = ExtendedAttributes(('e1', ), sample_ea.attr_dict.copy())
    ea1.delete(b'user.not_empty')
    ea2 = ExtendedAttributes(('e2', ), sample_ea.attr_dict.copy())
    ea2.set(b'user.third', b'Another random attribute')
    ea3 = ExtendedAttributes(('e3', ))
    ea4 = ExtendedAttributes(('e4', ),
                             {b'user.deleted': b'File to be deleted'})
    ea_test1_dir = os.path.join(abs_test_dir, b'ea_test1')
    ea_test1_rpath = rpath.RPath(Globals.local_connection, ea_test1_dir)
    ea_test2_dir = os.path.join(abs_test_dir, b'ea_test2')
    ea_test2_rpath = rpath.RPath(Globals.local_connection, ea_test2_dir)
    ea_empty_dir = os.path.join(abs_test_dir, b'ea_empty')
    ea_empty_rpath = rpath.RPath(Globals.local_connection, ea_empty_dir)

    def make_temp_out_dirs(self):
        """Make temp output and restore directories empty"""
        tempdir.setdata()  # in case the file changed in-between
        if tempdir.lstat():
            tempdir.delete()
        tempdir.mkdir()
        restore_dir.setdata()
        if restore_dir.lstat():
            restore_dir.delete()

    def testBasic(self):
        """Test basic writing and reading of extended attributes"""
        self.make_temp_out_dirs()
        new_ea = ExtendedAttributes(())
        new_ea.read_from_rp(tempdir)
        # we ignore SELinux extended attributes for comparaison
        if new_ea.attr_dict:
            new_ea.attr_dict.pop(b'security.selinux', None)
        assert not new_ea.attr_dict, "The attributes of %s should have been empty: %s" % (
            tempdir, new_ea.attr_dict)
        assert not new_ea == self.sample_ea
        assert new_ea != self.sample_ea
        assert new_ea == self.empty_ea

        self.sample_ea.write_to_rp(tempdir)
        new_ea.read_from_rp(tempdir)
        if new_ea.attr_dict:
            new_ea.attr_dict.pop(b'security.selinux', None)
        assert new_ea.attr_dict == self.sample_ea.attr_dict, \
            (new_ea.attr_dict, self.sample_ea.attr_dict)
        assert new_ea == self.sample_ea

    def testRecord(self):
        """Test writing a record and reading it back"""
        record = EA2Record(self.sample_ea)
        new_ea = Record2EA(record)
        if not new_ea == self.sample_ea:
            new_list = list(new_ea.attr_dict.keys())
            sample_list = list(self.sample_ea.attr_dict.keys())
            new_list.sort()
            sample_list.sort()
            assert new_list == sample_list, (new_list, sample_list)
            for name in new_list:
                assert self.sample_ea.get(name) == new_ea.get(name), \
                    (self.sample_ea.get(name), new_ea.get(name))
            assert self.sample_ea.index == new_ea.index, \
                (self.sample_ea.index, new_ea.index)
            assert 0, "We shouldn't have gotten this far"

    def testExtractor(self):
        """Test seeking inside a record list"""
        record_list = """# file: 0foo
user.multiline=0sVGhpcyBpcyBhIGZhaXJseSBsb25nIGV4dGVuZGVkIGF0dHJpYnV0ZS4KCQkJIEVuY29kaW5nIGl0IHdpbGwgcmVxdWlyZSBzZXZlcmFsIGxpbmVzIG9mCgkJCSBiYXNlNjQusbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGx
user.third=0saGVsbG8=
user.not_empty=0sZm9vYmFy
user.binary=0sAAECjC89Ig==
user.empty
# file: 1foo/bar/baz
user.multiline=0sVGhpcyBpcyBhIGZhaXJseSBsb25nIGV4dGVuZGVkIGF0dHJpYnV0ZS4KCQkJIEVuY29kaW5nIGl0IHdpbGwgcmVxdWlyZSBzZXZlcmFsIGxpbmVzIG9mCgkJCSBiYXNlNjQusbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGxsbGx
user.third=0saGVsbG8=
user.binary=0sAAECjC89Ig==
user.empty
# file: 2foo/\\012
user.empty
"""
        extractor = EAExtractor(io.BytesIO(os.fsencode(record_list)))
        ea_iter = extractor.iterate_starting_with(())
        first = next(ea_iter)
        assert first.index == (b'0foo', ), first
        second = next(ea_iter)
        assert second.index == (b'1foo', b'bar', b'baz'), second
        third = next(ea_iter)  # Test quoted filenames
        assert third.index == (b'2foo', b'\n'), third.index
        try:
            next(ea_iter)
        except StopIteration:
            pass
        else:
            assert 0, "Too many elements in iterator"

        extractor = EAExtractor(io.BytesIO(os.fsencode(record_list)))
        ea_iter = extractor.iterate_starting_with((b'1foo', b'bar'))
        assert next(ea_iter).index == (b'1foo', b'bar', b'baz')
        try:
            next(ea_iter)
        except StopIteration:
            pass
        else:
            assert 0, "Too many elements in iterator"

    def make_backup_dirs(self):
        """Create testfiles/ea_test[12] directories

        Goal is to set range of extended attributes, to give good test
        to extended attribute code.

        """
        if self.ea_test1_rpath.lstat():
            self.ea_test1_rpath.delete()
        if self.ea_test2_rpath.lstat():
            self.ea_test2_rpath.delete()
        self.ea_test1_rpath.mkdir()
        rp1_1 = self.ea_test1_rpath.append('e1')
        rp1_2 = self.ea_test1_rpath.append('e2')
        rp1_3 = self.ea_test1_rpath.append('e3')
        rp1_4 = self.ea_test1_rpath.append('e4')
        list(map(rpath.RPath.touch, [rp1_1, rp1_2, rp1_3, rp1_4]))
        self.sample_ea.write_to_rp(self.ea_test1_rpath)
        self.ea1.write_to_rp(rp1_1)
        self.ea2.write_to_rp(rp1_2)
        self.ea4.write_to_rp(rp1_4)

        self.ea_test2_rpath.mkdir()
        rp2_1 = self.ea_test2_rpath.append('e1')
        rp2_2 = self.ea_test2_rpath.append('e2')
        rp2_3 = self.ea_test2_rpath.append('e3')
        list(map(rpath.RPath.touch, [rp2_1, rp2_2, rp2_3]))
        self.ea3.write_to_rp(self.ea_test2_rpath)
        self.sample_ea.write_to_rp(rp2_1)
        self.ea1.write_to_rp(rp2_2)
        self.ea2.write_to_rp(rp2_3)

        # just create an empty dir for tests
        if self.ea_empty_rpath.lstat():
            self.ea_empty_rpath.delete()
        self.ea_empty_rpath.mkdir()

    def testIterate(self):
        """Test writing several records and then reading them back"""
        self.make_backup_dirs()
        rp1 = self.ea_test1_rpath.append('e1')
        rp2 = self.ea_test1_rpath.append('e2')
        rp3 = self.ea_test1_rpath.append('e3')

        # Now write records corresponding to above rps into file
        Globals.rbdir = tempdir
        man = metadata.PatchDiffMan()
        writer = man.get_ea_writer('snapshot', 10000)
        for rp in [self.ea_test1_rpath, rp1, rp2, rp3]:
            ea = ExtendedAttributes(rp.index)
            ea.read_from_rp(rp)
            writer.write_object(ea)
        writer.close()

        # Read back records and compare
        ea_iter = man.get_eas_at_time(10000, None)
        assert ea_iter, "No extended_attributes.<time> file found"
        sample_ea_reread = next(ea_iter)
        # we ignore SELinux extended attributes for comparaison
        if sample_ea_reread.attr_dict:
            sample_ea_reread.attr_dict.pop(b'security.selinux', None)
        assert sample_ea_reread == self.sample_ea, "Re-read EAs %s are different from %s" % \
            (sample_ea_reread.attr_dict, self.sample_ea.attr_dict)
        ea1_reread = next(ea_iter)
        if ea1_reread.attr_dict:
            ea1_reread.attr_dict.pop(b'security.selinux', None)
        assert ea1_reread == self.ea1, "Re-read EAs %s are different from %s" % \
            (ea1_reread.attr_dict, self.ea1.attr_dict)
        ea2_reread = next(ea_iter)
        if ea2_reread.attr_dict:
            ea2_reread.attr_dict.pop(b'security.selinux', None)
        assert ea2_reread == self.ea2, "Re-read EAs %s are different from %s" % \
            (ea2_reread.attr_dict, self.ea2.attr_dict)
        ea3_reread = next(ea_iter)
        if ea3_reread.attr_dict:
            ea3_reread.attr_dict.pop(b'security.selinux', None)
        assert ea3_reread == self.ea3, "Re-read EAs %s are different from %s" % \
            (ea3_reread.attr_dict, self.ea3.attr_dict)
        try:
            next(ea_iter)
        except StopIteration:
            pass
        else:
            assert 0, "Expected end to iterator"

    def testSeriesLocal(self):
        """Test backing up and restoring directories with EAs locally"""
        self.make_backup_dirs()
        dirlist = [
            self.ea_test1_dir, self.ea_empty_dir, self.ea_test2_dir,
            self.ea_test1_dir
        ]
        BackupRestoreSeries(1, 1, dirlist, compare_eas=1)

    def testSeriesRemote(self):
        """Test backing up, restoring directories with EA remotely"""
        self.make_backup_dirs()
        dirlist = [
            self.ea_test1_dir, self.ea_test2_dir, self.ea_empty_dir,
            self.ea_test1_dir
        ]
        BackupRestoreSeries(None, None, dirlist, compare_eas=1)

    def test_final_local(self):
        """Test backing up and restoring using 'rdiff-backup' script"""
        self.make_backup_dirs()
        self.make_temp_out_dirs()
        rdiff_backup(1,
                     1,
                     self.ea_test1_rpath.path,
                     tempdir.path,
                     current_time=10000)
        assert CompareRecursive(self.ea_test1_rpath, tempdir, compare_eas=1)

        rdiff_backup(1,
                     1,
                     self.ea_test2_rpath.path,
                     tempdir.path,
                     current_time=20000)
        assert CompareRecursive(self.ea_test2_rpath, tempdir, compare_eas=1)

        rdiff_backup(1,
                     1,
                     tempdir.path,
                     restore_dir.path,
                     extra_options=b'-r 10000')
        assert CompareRecursive(self.ea_test1_rpath,
                                restore_dir,
                                compare_eas=1)
Пример #27
0
import unittest
import os
import io
import pwd
import grp
from rdiff_backup.eas_acls import AccessControlLists, metadata, ACLExtractor, \
    Record2ACL, ACL2Record, ExtendedAttributes, EAExtractor, EA2Record, Record2EA
from rdiff_backup import Globals, rpath, user_group
from commontest import rdiff_backup, abs_test_dir, abs_output_dir, abs_restore_dir, \
    BackupRestoreSeries, CompareRecursive

user_group.init_user_mapping()
user_group.init_group_mapping()
tempdir = rpath.RPath(Globals.local_connection, abs_output_dir)
restore_dir = rpath.RPath(Globals.local_connection, abs_restore_dir)


class EATest(unittest.TestCase):
    """Test extended attributes"""
    sample_ea = ExtendedAttributes(
        (), {
            b'user.empty':
            b'',
            b'user.not_empty':
            b'foobar',
            b'user.third':
            b'hello',
            b'user.binary':
            bytes((0, 1, 2, 140)) + b'/="',
            b'user.multiline':
            b"""This is a fairly long extended attribute.
class LibrsyncTest(unittest.TestCase):
    """Test various librsync wrapper functions"""
    basis = rpath.RPath(Globals.local_connection,
                        os.path.join(abs_test_dir, b"basis"))
    new = rpath.RPath(Globals.local_connection,
                      os.path.join(abs_test_dir, b"new"))
    new2 = rpath.RPath(Globals.local_connection,
                       os.path.join(abs_test_dir, b"new2"))
    sig = rpath.RPath(Globals.local_connection,
                      os.path.join(abs_test_dir, b"signature"))
    sig2 = rpath.RPath(Globals.local_connection,
                       os.path.join(abs_test_dir, b"signature2"))
    delta = rpath.RPath(Globals.local_connection,
                        os.path.join(abs_test_dir, b"delta"))

    def sig_file_test_helper(self, blocksize, iterations, file_len=None):
        """Compare SigFile output to rdiff output at given blocksize"""
        for i in range(iterations):
            MakeRandomFile(self.basis.path, file_len)
            self._clean_file(self.sig)
            rdiff_help_text = subprocess.check_output(["rdiff", "--help"])
            if b'-R' in rdiff_help_text:
                assert not os.system(
                    b"rdiff -b %i -R rollup -H md4 signature %b %b" %
                    (blocksize, self.basis.path, self.sig.path))
            elif b'-H' in rdiff_help_text:
                assert not os.system(
                    b"rdiff -b %i -H md4 signature %b %b" %
                    (blocksize, self.basis.path, self.sig.path))
            else:
                assert not os.system(
                    b"rdiff -b %i signature %b %b" %
                    (blocksize, self.basis.path, self.sig.path))
            with self.sig.open("rb") as fp:
                rdiff_sig = fp.read()

            sf = librsync.SigFile(self.basis.open("rb"), blocksize)
            librsync_sig = sf.read()
            sf.close()

            assert rdiff_sig == librsync_sig, \
                (len(rdiff_sig), len(librsync_sig))

    def _clean_file(self, rp):
        """Make sure the given rpath is properly cleaned"""
        rp.setdata()
        if rp.lstat():
            rp.delete()

    def testSigFile(self):
        """Make sure SigFile generates same data as rdiff, blocksize 512"""
        self.sig_file_test_helper(512, 5)

    def testSigFile2(self):
        """Test SigFile like above, but try various blocksize"""
        self.sig_file_test_helper(2048, 1, 60000)
        self.sig_file_test_helper(7168, 1, 6000)
        self.sig_file_test_helper(204800, 1, 40 * 1024 * 1024)

    def testSigGenerator(self):
        """Test SigGenerator, make sure it's same as SigFile"""
        for i in range(5):
            MakeRandomFile(self.basis.path)

            sf = librsync.SigFile(self.basis.open("rb"))
            sigfile_string = sf.read()
            sf.close()

            sig_gen = librsync.SigGenerator()
            with self.basis.open("rb") as infile:
                while 1:
                    buf = infile.read(1000)
                    if not buf:
                        break
                    sig_gen.update(buf)
                siggen_string = sig_gen.getsig()

            assert sigfile_string == siggen_string, \
                (len(sigfile_string), len(siggen_string))

    def OldtestDelta(self):
        """Test delta generation against Rdiff"""
        MakeRandomFile(self.basis.path)
        assert not os.system(b"rdiff signature %s %s" %
                             (self.basis.path, self.sig.path))
        for i in range(5):
            MakeRandomFile(self.new.path)
            assert not os.system(
                b"rdiff delta %b %b %b" %
                (self.sig.path, self.new.path, self.delta.path))
            fp = self.delta.open("rb")
            rdiff_delta = fp.read()
            fp.close()

            df = librsync.DeltaFile(self.sig.open("rb"), self.new.open("rb"))
            librsync_delta = df.read()
            df.close()

            print(len(rdiff_delta), len(librsync_delta))
            print(repr(rdiff_delta[:100]))
            print(repr(librsync_delta[:100]))
            assert rdiff_delta == librsync_delta

    def testDelta(self):
        """Test delta generation by making sure rdiff can process output

        There appears to be some indeterminism so we can't just
        byte-compare the deltas produced by rdiff and DeltaFile.

        """
        MakeRandomFile(self.basis.path)
        self._clean_file(self.sig)
        assert not os.system(b"rdiff signature %s %s" %
                             (self.basis.path, self.sig.path))
        for i in range(5):
            MakeRandomFile(self.new.path)
            df = librsync.DeltaFile(self.sig.open("rb"), self.new.open("rb"))
            librsync_delta = df.read()
            df.close()
            fp = self.delta.open("wb")
            fp.write(librsync_delta)
            fp.close()

            self._clean_file(self.new2)
            assert not os.system(
                b"rdiff patch %s %s %s" %
                (self.basis.path, self.delta.path, self.new2.path))
            new_fp = self.new.open("rb")
            new = new_fp.read()
            new_fp.close()

            new2_fp = self.new2.open("rb")
            new2 = new2_fp.read()
            new2_fp.close()

            assert new == new2, (len(new), len(new2))

    def testPatch(self):
        """Test patching against Rdiff"""
        MakeRandomFile(self.basis.path)
        self._clean_file(self.sig)
        assert not os.system(b"rdiff signature %s %s" %
                             (self.basis.path, self.sig.path))
        for i in range(5):
            MakeRandomFile(self.new.path)
            self._clean_file(self.delta)
            assert not os.system(
                b"rdiff delta %s %s %s" %
                (self.sig.path, self.new.path, self.delta.path))
            fp = self.new.open("rb")
            real_new = fp.read()
            fp.close()

            pf = librsync.PatchedFile(self.basis.open("rb"),
                                      self.delta.open("rb"))
            librsync_new = pf.read()
            pf.close()

            assert real_new == librsync_new, \
                (len(real_new), len(librsync_new))
Пример #29
0
class HashTest(unittest.TestCase):
    """Test the hash module"""
    s1 = "Hello, world!"
    s1_hash = "943a702d06f34599aee1f8da8ef9f7296031d699"
    s2 = "The quick brown dog jumped over the lazy fox"
    s2_hash = "eab21fb1a18b408909bae552b847f6b13f370f62"
    s3 = "foobar"
    s3_hash = "8843d7f92416211de9ebb963ff4ce28125932878"

    root_rp = rpath.RPath(Globals.local_connection, abs_test_dir)

    def test_basic(self):
        """Compare sha1sum of a few strings"""
        b1 = self.s1.encode()
        sfile = io.BytesIO(b1)
        fw = hash.FileWrapper(sfile)
        self.assertEqual(fw.read(), b1)
        report = fw.close()
        self.assertEqual(report.sha1_digest, self.s1_hash)

        sfile2 = io.BytesIO(b1)
        fw2 = hash.FileWrapper(sfile2)
        self.assertEqual(fw2.read(5), b1[:5])
        self.assertEqual(fw2.read(), b1[5:])
        report2 = fw2.close()
        self.assertEqual(report2.sha1_digest, self.s1_hash)

    def make_dirs(self):
        """Make two input directories"""
        d1 = self.root_rp.append("hashtest1")
        re_init_rpath_dir(d1)
        d2 = self.root_rp.append("hashtest2")
        re_init_rpath_dir(d2)

        d1f1 = d1.append("file1")
        d1f1.write_string(self.s1)
        d1f1l = d1.append("file1_linked")
        d1f1l.hardlink(d1f1.path)

        d1f2 = d1.append("file2")
        d1f2.write_string(self.s2)
        d1f2l = d1.append("file2_linked")
        d1f2l.hardlink(d1f2.path)

        d1_hashlist = [
            None, self.s1_hash, self.s1_hash, self.s2_hash, self.s2_hash
        ]

        d2f1 = d2.append("file1")
        rpath.copy_with_attribs(d1f1, d2f1)
        d2f1l = d2.append("file1_linked")
        d2f1l.write_string(self.s3)

        d1f2 = d2.append("file2")
        d1f2.mkdir()

        d2_hashlist = [None, self.s1_hash, self.s3_hash, None]

        return (d1, d1_hashlist, d2, d2_hashlist)

    def extract_hashs(self, metadata_rp):
        """Return list of hashes in the metadata_rp"""
        result = []
        comp = metadata_rp.isinccompressed()
        extractor = stdattr.AttrExtractor(metadata_rp.open("r", comp))
        for rorp in extractor.iterate():
            if rorp.has_sha1():
                result.append(rorp.get_sha1())
            else:
                result.append(None)
        return result

    @unittest.skip("Skipping until hash of hard links is fixed, see issue #23."
                   )
    def test_session(self):
        """Run actual sessions and make sure proper hashes recorded

        There are a few code paths here we need to test:  creating
        ordinary files, updating ordinary files with diffs, hard
        linking, and keeping files the same.

        """
        in_rp1, hashlist1, in_rp2, hashlist2 = self.make_dirs()
        Myrm(abs_output_dir)

        rdiff_backup(1, 1, in_rp1.path, abs_output_dir, 10000)
        meta_prefix = rpath.RPath(
            Globals.local_connection,
            os.path.join(abs_output_dir, b"rdiff-backup-data",
                         b"mirror_metadata"))
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 1)
        metadata_rp = incs[0]
        hashlist = self.extract_hashs(metadata_rp)
        self.assertEqual(hashlist, hashlist1)

        rdiff_backup(1, 1, in_rp2.path, abs_output_dir, 20000)
        incs = meta_prefix.get_incfiles_list()
        self.assertEqual(len(incs), 2)
        if incs[0].getinctype() == 'snapshot':
            inc = incs[0]
        else:
            inc = incs[1]
        hashlist = self.extract_hashs(inc)
        self.assertEqual(hashlist, hashlist2)

    def test_rorpiter_xfer(self):
        """Test if hashes are transferred in files, rorpiter"""
        Security._security_level = "override"
        conn = SetConnections._init_connection(
            b'%b %b/server.py' %
            (os.fsencode(sys.executable), abs_testing_dir))
        # make a connection sanity check
        self.assertEqual(conn.reval("lambda x: x+1", 4), 5)

        fp = hash.FileWrapper(io.BytesIO(self.s1.encode()))
        conn.Globals.set('tmp_file', fp)
        fp_remote = conn.Globals.get('tmp_file')
        self.assertEqual(fp_remote.read(), self.s1.encode())
        self.assertEqual(fp_remote.close().sha1_digest, self.s1_hash)

        # Tested xfer of file, now test xfer of files in rorpiter
        root = MakeOutputDir()
        rp1 = root.append('s1')
        rp1.write_string(self.s1)
        rp2 = root.append('s2')
        rp2.write_string(self.s2)
        rp1.setfile(hash.FileWrapper(rp1.open('rb')))
        rp2.setfile(hash.FileWrapper(rp2.open('rb')))
        rpiter = iter([rp1, rp2])

        conn.Globals.set('tmp_conn_iter', rpiter)
        remote_iter = conn.Globals.get('tmp_conn_iter')

        rorp1 = next(remote_iter)
        fp = hash.FileWrapper(rorp1.open('rb'))
        read_s1 = fp.read().decode()
        self.assertEqual(read_s1, self.s1)
        ret_val = fp.close()
        self.assertIsInstance(ret_val, hash.Report)
        self.assertEqual(ret_val.sha1_digest, self.s1_hash)
        rorp2 = next(remote_iter)
        fp2 = hash.FileWrapper(rorp2.open('rb'))
        read_s2 = fp2.read().decode()
        self.assertEqual(read_s2, self.s2)
        self.assertEqual(fp2.close().sha1_digest, self.s2_hash)

        conn.quit()
Пример #30
0
 def setUp(self):
     self.lc = Globals.local_connection
     self.inc0rp = rpath.RPath(self.lc, "testfiles/empty", ())
     self.inc1rp = rpath.RPath(self.lc, "testfiles/inc-reg-perms1", ())
     self.inc2rp = rpath.RPath(self.lc, "testfiles/inc-reg-perms2", ())
     self.output = rpath.RPath(self.lc, "testfiles/output", ())