def get_selection_functions(): """Return generators of files in source, dest""" src_rp.setdata() dest_rp.setdata() src_select = selection.Select(src_rp) dest_select = selection.Select(dest_rp) if ignore_tmp_files: # Ignoring temp files can be useful when we want to check the # correctness of a backup which aborted in the middle. In # these cases it is OK to have tmp files lying around. src_select.add_selection_func( src_select.regexp_get_sf(".*rdiff-backup.tmp.[^/]+$", 0)) dest_select.add_selection_func( dest_select.regexp_get_sf(".*rdiff-backup.tmp.[^/]+$", 0)) if exclude_rbdir: # Exclude rdiff-backup-data directory src_select.parse_rbdir_exclude() dest_select.parse_rbdir_exclude() # always exclude files with non-UTF-8 conform names src_select.parse_brokenname_exclude() dest_select.parse_brokenname_exclude() return src_select.set_iter(), dest_select.set_iter()
def testCompletedDict(self): """See if the hardlink dictionaries are built correctly""" reset_hardlink_dicts() for dsrp in selection.Select(self.hardlink_dir1).set_iter(): Hardlink.add_rorp(dsrp) Hardlink.del_rorp(dsrp) assert Hardlink._inode_index == {}, Hardlink._inode_index reset_hardlink_dicts() for dsrp in selection.Select(self.hardlink_dir2).set_iter(): Hardlink.add_rorp(dsrp) Hardlink.del_rorp(dsrp) assert Hardlink._inode_index == {}, Hardlink._inode_index
def testCompletedDict(self): """See if the hardlink dictionaries are built correctly""" reset_hardlink_dicts() for dsrp in selection.Select(self.hlinks_rp1).set_iter(): Hardlink.add_rorp(dsrp) Hardlink.del_rorp(dsrp) self.assertEqual(Hardlink._inode_index, {}) reset_hardlink_dicts() for dsrp in selection.Select(self.hlinks_rp2).set_iter(): Hardlink.add_rorp(dsrp) Hardlink.del_rorp(dsrp) self.assertEqual(Hardlink._inode_index, {})
def compare(man, rootrp, time): sel = selection.Select(rootrp) sel.parse_selection_args( (), ()) # make sure incorrect files are filtered out self.assertTrue( iter_equal(sel.set_iter(), man._get_meta_main_at_time(time, None)))
def testDSIter(self): """Testing destructive stepping iterator from baserp""" for i in range(2): sel = selection.Select( destructive_stepping.DSRPath(1, self.noperms)).set_iter() ds_iter = sel.iterate_with_finalizer() noperms = ds_iter.next() assert noperms.isdir() and noperms.getperms() == 0, \ (noperms.isdir(), noperms.getperms()) bar = ds_iter.next() assert bar.isreg() and bar.getperms() == 0, \ "%s %s" % (bar.isreg(), bar.getperms()) barbuf = bar.open("rb").read() assert len(barbuf) > 0 foo = ds_iter.next() assert foo.isreg() and foo.getperms() == 0 assert foo.getmtime() < 1000300000 fuz = ds_iter.next() assert fuz.isreg() and fuz.getperms() == 0200 fuzbuf = fuz.open("rb").read() assert len(fuzbuf) > 0 self.assertRaises(StopIteration, ds_iter.next)
def get_mirror_select(): """Return iterator of mirror rpaths""" mirror_base = self.base_dir.new_index(self.restore_index) mirror_select = selection.Select(mirror_base) if not self.restore_index: # must exclude rdiff-backup-directory mirror_select.parse_rbdir_exclude() return mirror_select.set_iter()
def Myrm(dirstring): """Run myrm on given directory string""" root_rp = rpath.RPath(Globals.local_connection, dirstring) for rp in selection.Select(root_rp).set_iter(): if rp.isdir(): rp.chmod(0o700) # otherwise may not be able to remove assert not os.system(b"rm -rf %s" % (root_rp.path, ))
def test_write(self): """Test writing to metadata file, then reading back contents""" global tempdir temprp = tempdir.append( "mirror_metadata.2005-11-03T12:51:06-06:00.snapshot.gz") if temprp.lstat(): temprp.delete() self.make_temp() rootrp = rpath.RPath(Globals.local_connection, os.path.join(old_test_dir, b"various_file_types")) # the following 3 lines make sure that we ignore incorrect files sel = selection.Select(rootrp) sel.parse_selection_args((), ()) rps = list(sel.set_iter()) self.assertFalse(temprp.lstat()) write_mf = stdattr.AttrFile(temprp, 'w') for rp in rps: write_mf.write_object(rp) write_mf.close() self.assertTrue(temprp.lstat()) reread_rps = list(stdattr.AttrFile(temprp, 'r').get_objects()) self.assertEqual(len(reread_rps), len(rps)) for i in range(len(reread_rps)): self.assertEqual(reread_rps[i], rps[i])
def write_dir_to_meta(manager, rp, time): """Record the metadata under rp to a mirror_metadata file""" metawriter = man.get_meta_writer('snapshot', time) sel = selection.Select(rp) sel.ParseArgs((), ()) # make sure incorrect files are filtered out for rorp in sel.set_iter(): metawriter.write_object(rorp) metawriter.close()
def get_inc_select(): """Return iterator of increment rpaths""" for base_inc in self.ref_inc.get_incfiles_list(): yield base_inc if self.ref_inc.isdir(): inc_select = selection.Select(self.ref_inc).get_select_iter() for inc in inc_select: yield inc
def testBuildingDict(self): """See if the partial inode dictionary is correct""" Globals.preserve_hardlinks = 1 reset_hardlink_dicts() for dsrp in selection.Select(self.hlinks_rp3).set_iter(): Hardlink.add_rorp(dsrp) self.assertEqual(len(list(Hardlink._inode_index.keys())), 3)
def testBuildingDict(self): """See if the partial inode dictionary is correct""" Globals.preserve_hardlinks = 1 reset_hardlink_dicts() for dsrp in selection.Select(self.hardlink_dir3).set_iter(): Hardlink.add_rorp(dsrp) assert len(Hardlink._inode_index.keys()) == 3, \ Hardlink._inode_index
def get_inc_select(): """Return iterator of increment rpaths""" inc_base = self.data_dir.append_path(b'increments', self.restore_index) for base_inc in inc_base.get_incfiles_list(): yield base_inc if inc_base.isdir(): inc_select = selection.Select(inc_base).set_iter() for inc in inc_select: yield inc
def write_dir_to_meta(manager, rp, time): """Record the metadata under rp to a mirror_metadata file""" metawriter = man._writer_helper(b'snapshot', time, stdattr.get_plugin_class()) sel = selection.Select(rp) sel.parse_selection_args( (), ()) # make sure incorrect files are filtered out for rorp in sel.set_iter(): metawriter.write_object(rorp) metawriter.close()
def Myrm(dirstring): """Run myrm on given directory string""" root_rp = rpath.RPath(Globals.local_connection, dirstring) for rp in selection.Select(root_rp).set_iter(): if rp.isdir(): rp.chmod(0o700) # otherwise may not be able to remove path = root_rp.path if os.path.isdir(path): shutil.rmtree(path) elif os.path.isfile(path): os.remove(path)
def _get_selection_functions(src_rp, dest_rp, exclude_rbdir=True, ignore_tmp_files=False): """Return generators of files in source, dest""" src_rp.setdata() dest_rp.setdata() src_select = selection.Select(src_rp) dest_select = selection.Select(dest_rp) if ignore_tmp_files: # Ignoring temp files can be useful when we want to check the # correctness of a backup which aborted in the middle. In # these cases it is OK to have tmp files lying around. src_select._add_selection_func( src_select._regexp_get_sf(".*rdiff-backup.tmp.[^/]+$", 0)) dest_select._add_selection_func( dest_select._regexp_get_sf(".*rdiff-backup.tmp.[^/]+$", 0)) if exclude_rbdir: # Exclude rdiff-backup-data directory src_select.parse_rbdir_exclude() dest_select.parse_rbdir_exclude() return src_select.get_select_iter(), dest_select.get_select_iter()
def write_metadata_to_temp(self): """If necessary, write metadata of bigdir to file metadata.gz""" global tempdir temprp = tempdir.append("mirror_metadata.2005-11-03T14:51:06-06:00.snapshot.gz") if temprp.lstat(): return temprp self.make_temp() rootrp = rpath.RPath(Globals.local_connection, "testfiles/bigdir") rpath_iter = selection.Select(rootrp).set_iter() start_time = time.time() mf = MetadataFile(temprp, 'w') for rp in rpath_iter: mf.write_object(rp) mf.close() print "Writing metadata took %s seconds" % (time.time() - start_time) return temprp
def set_source_select(cls, rpath, tuplelist, *filelists): """Initialize select object using tuplelist Note that each list in filelists must each be passed as separate arguments, so each is recognized as a file by the connection. Otherwise we will get an error because a list containing files can't be pickled. Also, cls._source_select needs to be cached so get_diffs below can retrieve the necessary rps. """ sel = selection.Select(rpath) sel.parse_selection_args(tuplelist, filelists) sel_iter = sel.get_select_iter() cache_size = Globals.pipeline_max_length * 3 # to and from+leeway cls._source_select = rorpiter.CacheIndexable(sel_iter, cache_size)
def _detect_resource_fork_readonly(self, dir_rp): """ Test for resource fork support by testing an regular file Launches search for regular file in given directory. If no regular file is found, resource_fork support will be turned off by default. """ for rp in selection.Select(dir_rp).get_select_iter(): if rp.isreg(): try: rfork = rp.append(b'..namedfork', b'rsrc') fp = rfork.open('rb') fp.read() fp.close() except OSError: self.resource_forks = False return self.resource_forks = True return self.resource_forks = False
def get_mirror_select(): """Return iterator of mirror rpaths""" mirror_select = selection.Select(self.ref_path) if not self.ref_index: # must exclude rdiff-backup-directory mirror_select.parse_rbdir_exclude() return mirror_select.get_select_iter()
def delete_long(base_rp, length=100): """Delete filenames longer than length given""" for rp in selection.Select(base_rp).set_iter(): if len(rp.dirsplit()[1]) > length: rp.delete()
def get_iter_from_fs(): """Get the combined iterator from the filesystem""" sel = selection.Select(rpath) sel.parse_rbdir_exclude() return sel.get_select_iter()
def compare(man, rootrp, time): sel = selection.Select(rootrp) sel.ParseArgs((), ()) # make sure incorrect files are filtered out assert iter_equal(sel.set_iter(), man.get_meta_at_time(time, None))
def get_initial_iter(cls, target): """Return selector previously set with set_select""" if cls._select: return cls._select.get_select_iter() else: return selection.Select(target).get_select_iter()
def set_select(cls, target, select_opts, *filelists): """Return a selection object iterating the rorpaths in target""" if not select_opts: return # nothing to do... cls._select = selection.Select(target) cls._select.parse_selection_args(select_opts, filelists)
def write_dir_to_meta(manager, rp, time): """Record the metadata under rp to a mirror_metadata file""" metawriter = man.get_meta_writer('snapshot', time) for rorp in selection.Select(rp).set_iter(): metawriter.write_object(rorp) metawriter.close()
def compare(man, rootrp, time): assert lazy.Iter.equal(selection.Select(rootrp).set_iter(), man.get_meta_at_time(time, None))