def process_local_dir(action, local_pathname): """Check local directory, set globals.local_path""" local_path = path.Path(path.Path(local_pathname).get_canonical()) if action == "restore": if (local_path.exists() and not local_path.isemptydir()) and not globals.force: log.FatalError( _("Restore destination directory %s already " "exists.\nWill not overwrite.") % (util.ufn(local_path.name), ), log.ErrorCode.restore_dir_exists) elif action == "verify": if not local_path.exists(): log.FatalError( _("Verify directory %s does not exist") % (util.ufn(local_path.name), ), log.ErrorCode.verify_dir_doesnt_exist) else: assert action == "full" or action == "inc" if not local_path.exists(): log.FatalError( _("Backup source directory %s does not exist.") % (util.ufn(local_path.name), ), log.ErrorCode.backup_dir_doesnt_exist) globals.local_path = local_path
def gpg_cycle(self, s, profile=None): u"""Test encryption/decryption cycle on string s""" epath = path.Path(u"testfiles/output/encrypted_file") if not profile: profile = self.default_profile encrypted_file = gpg.GPGFile(1, epath, profile) encrypted_file.write(s) encrypted_file.close() epath2 = path.Path(u"testfiles/output/encrypted_file") decrypted_file = gpg.GPGFile(0, epath2, profile) dec_buf = decrypted_file.read() decrypted_file.close() assert s == dec_buf, (len(s), len(dec_buf))
def test_long_filenames(self): """Test backing up a directory with long filenames in it""" # Note that some versions of ecryptfs (at least through Ubuntu 11.10) # have a bug where they treat the max path segment length as 143 # instead of 255. So make sure that these segments don't break that. lf_dir = path.Path("testfiles/long_filenames") if lf_dir.exists(): lf_dir.deltree() lf_dir.mkdir() lf1 = lf_dir.append("AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA") lf1.mkdir() lf2 = lf1.append("BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB") lf2.mkdir() lf3 = lf2.append("CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC") lf3.mkdir() lf4 = lf3.append("DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD") lf4.touch() lf4_1 = lf3.append("SYMLINK--------------------------------------------------------------------------------------------") os.symlink("SYMLINK-DESTINATION-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------", lf4_1.name) lf4_1.setdata() assert lf4_1.issym() lf4_2 = lf3.append("DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD") fp = lf4_2.open("wb") fp.write("hello" * 1000) assert not fp.close() self.runtest(["testfiles/empty_dir", lf_dir.name, "testfiles/empty_dir", lf_dir.name])
def setUp(self): UnitTestCase.setUp(self) assert not os.system(u"rm -rf testfiles") os.makedirs(u'testfiles') self.backend = None self.local = path.Path(u'testfiles/local') self.local.writefileobj(io.BytesIO(b"hello"))
def test_move(self): if self.backend is None: return if not hasattr(self.backend, '_move'): return copy = path.Path('testfiles/copy') self.local.copy(copy) self.backend._move(self.local, 'a') self.assertTrue('a' in self.backend._list()) self.assertFalse(self.local.exists()) getfile = path.Path('testfiles/getfile') self.backend._get('a', getfile) self.assertTrue(copy.compare_data(getfile))
def test_get(self): if self.backend is None: return self.backend._put(self.local, b'file-a') getfile = path.Path(u'{0}/testfiles/getfile'.format(_runtest_dir)) self.backend._get(b'file-a', getfile) self.assertTrue(self.local.compare_data(getfile))
def setUp(self): UnitTestCase.setUp(self) assert not os.system(u"rm -rf {0}/testfiles".format(_runtest_dir)) os.makedirs(u'{0}/testfiles'.format(_runtest_dir)) self.backend = None self.local = path.Path(u'{0}/testfiles/local'.format(_runtest_dir)) self.local.writefileobj(io.BytesIO(b"hello"))
def test_move(self): if self.backend is None: return if not hasattr(self.backend, u'_move'): return copy = path.Path(u'{0}/testfiles/copy'.format(_runtest_dir)) self.local.copy(copy) self.backend._move(self.local, b'file-a') self.assertTrue(b'file-a' in self.backend._list()) self.assertFalse(self.local.exists()) getfile = path.Path(u'{0}/testfiles/getfile'.format(_runtest_dir)) self.backend._get(b'file-a', getfile) self.assertTrue(copy.compare_data(getfile))
def setUp(self): UnitTestCase.setUp(self) assert not os.system("rm -rf testfiles") os.makedirs('testfiles') self.backend = None self.local = path.Path('testfiles/local') self.local.writefileobj(StringIO.StringIO("hello"))
def test_get(self): if self.backend is None: return self.backend._put(self.local, 'a') getfile = path.Path('testfiles/getfile') self.backend._get('a', getfile) self.assertTrue(self.local.compare_data(getfile))
def test_corrupt_filelist(self): vi1 = manifest.VolumeInfo() vi1.set_info(3, (b"hello", ), None, (), None) vi2 = manifest.VolumeInfo() vi2.set_info(4, (b"goodbye", b"there"), None, (b"aoeusht", ), None) vi3 = manifest.VolumeInfo() vi3.set_info(34, (), None, (), None) m = manifest.Manifest() for vi in [vi1, vi2, vi3]: m.add_volume_info(vi) self.set_config(u'local_path', path.Path(u"Foobar")) m.set_dirinfo() m.set_files_changed_info([ (b'one', b'new'), (b'two', b'changed'), (b'three', b'new'), ]) # build manifest string s = m.to_string() # make filecount higher than files in list s2 = re.sub(b'Filelist 3', b'Filelist 5', s) m2 = manifest.Manifest().from_string(s2) assert hasattr(m2, u'corrupt_filelist')
def run_cycle(self, dirname_list): u"""Run diff/patch cycle on directories in dirname_list""" assert len(dirname_list) >= 2 seq_path = path.Path(u"testfiles/output/sequence") new_path = path.Path(dirname_list[0]) delta_path = path.Path(u"testfiles/output/delta.tar") sig_path = path.Path(u"testfiles/output/sig.tar") self.run_cmd(u"cp -pR %s %s" % (new_path.uc_name, seq_path.uc_name)) seq_path.setdata() self.run_rdiffdir(u"sig %s %s" % (seq_path.uc_name, sig_path.uc_name)) sig_path.setdata() assert sig_path.exists() # FIXME: How does this work? Path comparisons don't seem to work right # assert new_path.compare_recursive(seq_path, verbose = 1) for dirname in dirname_list[1:]: new_path = path.Path(dirname) # Make delta if delta_path.exists(): delta_path.delete() assert not delta_path.exists() self.run_rdiffdir( u"delta %s %s %s" % (sig_path.uc_name, new_path.uc_name, delta_path.uc_name)) delta_path.setdata() assert delta_path.exists() # patch and compare self.run_rdiffdir(u"patch %s %s" % (seq_path.uc_name, delta_path.uc_name)) seq_path.setdata() new_path.setdata() assert new_path.compare_recursive(seq_path, verbose=1) # Make new signature sig_path.delete() assert not sig_path.exists() self.run_rdiffdir(u"sig %s %s" % (seq_path.uc_name, sig_path.uc_name)) sig_path.setdata() assert sig_path.isreg()
def setUp(self): super(CollectionTest, self).setUp() self.unpack_testfiles() col_test_dir = path.Path(u"testfiles/collectionstest") archive_dir_path = col_test_dir.append(u"archive_dir") self.set_global(u'archive_dir_path', archive_dir_path) self.archive_dir_backend = backend.get_backend( u"file://testfiles/collectionstest" u"/archive_dir") self.real_backend = backend.get_backend( u"file://%s/%s" % (col_test_dir.uc_name, u"remote_dir")) self.output_dir = path.Path( u"testfiles/output") # used as a temp directory self.output_dir_backend = backend.get_backend( u"file://testfiles/output")
def try_basic(self, backend): """Try basic operations with given backend. Requires backend be empty at first, and all operations are allowed. """ def cmp_list(l): """Assert that backend.list is same as l""" blist = backend.list() blist.sort() l.sort() assert blist == l, \ ("Got list: %s Wanted: %s\n" % (repr(blist), repr(l))) # Identify test that's running print self.my_test_id, "... ", assert not os.system("rm -rf testfiles/backend_tmp") assert not os.system("mkdir testfiles/backend_tmp") regpath = path.Path("testfiles/various_file_types/regular_file") normal_file = "testfile" colonfile = ("file%swith.%scolons_-and%s%setc" % ((globals.time_separator,) * 4)) tmpregpath = path.Path("testfiles/backend_tmp/regfile") # Test list and put cmp_list([]) backend.put(regpath, normal_file) cmp_list([normal_file]) backend.put(regpath, colonfile) cmp_list([normal_file, colonfile]) # Test get regfilebuf = regpath.open("rb").read() backend.get(colonfile, tmpregpath) backendbuf = tmpregpath.open("rb").read() assert backendbuf == regfilebuf # Test delete backend.delete([colonfile, normal_file]) cmp_list([])
def __init__(self, parsed_url): duplicity.backend.Backend.__init__(self, parsed_url) # The URL form "file:MyFile" is not a valid duplicity target. if not parsed_url.path.startswith(u'//'): raise BackendException(u"Bad file:// path syntax.") self.remote_pathdir = path.Path(parsed_url.path[2:]) try: os.makedirs(self.remote_pathdir.base) except Exception: pass
def test_write_path(self): """Test reading and writing of statistics object""" p = path.Path("testfiles/statstest") if p.exists(): p.delete() s = StatsObj() self.set_obj(s) s.write_stats_to_path(p) s2 = StatsObj() assert not s2.stats_equal(s) s2.read_stats_from_path(p) assert s2.stats_equal(s)
def run_cycle(self, dirname_list): """Run diff/patch cycle on directories in dirname_list""" assert len(dirname_list) >= 2 self.del_tmp() seq_path = path.Path("testfiles/output/sequence") new_path = path.Path(dirname_list[0]) delta_path = path.Path("testfiles/output/delta.tar") sig_path = path.Path("testfiles/output/sig.tar") self.run_cmd("cp -pR %s %s" % (new_path.name, seq_path.name)) seq_path.setdata() self.run_rdiffdir("sig %s %s" % (seq_path.name, sig_path.name)) sig_path.setdata() assert sig_path.exists() assert new_path.compare_recursive(seq_path, verbose=1) for dirname in dirname_list[1:]: new_path = path.Path(dirname) # Make delta if delta_path.exists(): delta_path.delete() assert not delta_path.exists() self.run_rdiffdir("delta %s %s %s" % (sig_path.name, new_path.name, delta_path.name)) delta_path.setdata() assert delta_path.exists() # patch and compare self.run_rdiffdir("patch %s %s" % (seq_path.name, delta_path.name)) seq_path.setdata() new_path.setdata() assert new_path.compare_recursive(seq_path, verbose=1) # Make new signature sig_path.delete() assert not sig_path.exists() self.run_rdiffdir("sig %s %s" % (seq_path.name, sig_path.name)) sig_path.setdata() assert sig_path.isreg()
def set_archive_dir(dirstring): """Check archive dir and set global""" if not os.path.exists(dirstring): try: os.makedirs(dirstring) except Exception: pass archive_dir = path.Path(dirstring) if not archive_dir.isdir(): log.FatalError(_("Specified archive directory '%s' does not exist, " "or is not a directory") % (util.ufn(archive_dir.name),), log.ErrorCode.bad_archive_dir) globals.archive_dir = archive_dir
def test_gpg_signing_and_hidden_encryption(self): """Test to make sure GPG reports the proper signature key even with hidden encryption key id""" plaintext = "hello" * 50000 signing_profile = gpg.GPGProfile(passphrase=self.sign_passphrase, sign_key=self.sign_key, hidden_recipients=[self.encrypt_key1]) epath = path.Path("testfiles/output/encrypted_file") encrypted_signed_file = gpg.GPGFile(1, epath, signing_profile) encrypted_signed_file.write(plaintext) encrypted_signed_file.close() decrypted_file = gpg.GPGFile(0, epath, signing_profile) assert decrypted_file.read() == plaintext decrypted_file.close() sig = decrypted_file.get_signature() assert sig == self.sign_key, sig[-8:]
def test_gpg_signing(self): u"""Test to make sure GPG reports the proper signature key""" plaintext = b"hello" * 50000 signing_profile = gpg.GPGProfile(passphrase=self.sign_passphrase, sign_key=self.sign_key, recipients=[self.encrypt_key1]) epath = path.Path(u"testfiles/output/encrypted_file") encrypted_signed_file = gpg.GPGFile(1, epath, signing_profile) encrypted_signed_file.write(plaintext) encrypted_signed_file.close() decrypted_file = gpg.GPGFile(0, epath, signing_profile) assert decrypted_file.read() == plaintext decrypted_file.close() sig = decrypted_file.get_signature() assert sig == self.sign_key, sig
def test_basic(self): vi1 = manifest.VolumeInfo() vi1.set_info(3, ("hello", ), None, (), None) vi2 = manifest.VolumeInfo() vi2.set_info(4, ("goodbye", "there"), None, ("aoeusht", ), None) vi3 = manifest.VolumeInfo() vi3.set_info(34, (), None, (), None) m = manifest.Manifest() for vi in [vi1, vi2, vi3]: m.add_volume_info(vi) self.set_global('local_path', path.Path("Foobar")) m.set_dirinfo() s = m.to_string() assert s.lower().startswith("hostname") assert s.endswith("\n") m2 = manifest.Manifest().from_string(s) assert m == m2
def test_long_filenames(self): """Test backing up a directory with long filenames in it""" lf_dir = path.Path("testfiles/long_filenames") if lf_dir.exists(): lf_dir.deltree() lf_dir.mkdir() lf1 = lf_dir.append( "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" ) lf1.mkdir() lf2 = lf1.append( "BBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB" ) lf2.mkdir() lf3 = lf2.append( "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC" ) lf3.mkdir() lf4 = lf3.append( "DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD" ) lf4.touch() lf4_1 = lf3.append( "SYMLINK--------------------------------------------------------------------------------------------" ) os.symlink( "SYMLINK-DESTINATION-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------", lf4_1.name) lf4_1.setdata() assert lf4_1.issym() lf4_2 = lf3.append( "DDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDDD" ) fp = lf4_2.open("wb") fp.write("hello" * 1000) assert not fp.close() self.runtest([ "testfiles/empty_dir", lf_dir.name, "testfiles/empty_dir", lf_dir.name ])
def check_same(self, filename1, filename2): u"""Verify two filenames are the same""" path1, path2 = path.Path(filename1), path.Path(filename2) assert path1.compare_recursive(path2, verbose=1)
def test_sha(self): testhash = gpg.get_hash( u"SHA1", path.Path(u"{0}/testfiles/various_file_types/regular_file".format( _runtest_dir))) assert testhash == u"886d722999862724e1e62d0ac51c468ee336ef8e", testhash
def test_sha(self): hash = gpg.get_hash( u"SHA1", path.Path(u"testfiles/various_file_types/regular_file")) assert hash == u"886d722999862724e1e62d0ac51c468ee336ef8e", hash
def GPGWriteFile(block_iter, filename, profile, size=200 * 1024 * 1024, max_footer_size=16 * 1024): u""" Write GPG compressed file of given size This function writes a gpg compressed file by reading from the input iter and writing to filename. When it has read an amount close to the size limit, it "tops off" the incoming data with incompressible data, to try to hit the limit exactly. block_iter should have methods .next(size), which returns the next block of data, which should be at most size bytes long. Also .get_footer() returns a string to write at the end of the input file. The footer should have max length max_footer_size. Because gpg uses compression, we don't assume that putting bytes_in bytes into gpg will result in bytes_out = bytes_in out. However, do assume that bytes_out <= bytes_in approximately. Returns true if succeeded in writing until end of block_iter. """ # workaround for circular module imports from duplicity import path def top_off(bytelen, file): u""" Add bytelen of incompressible data to to_gpg_fp In this case we take the incompressible data from the beginning of filename (it should contain enough because size >> largest block size). """ incompressible_fp = open(filename, u"rb") assert util.copyfileobj(incompressible_fp, file.gpg_input, bytelen) == bytelen incompressible_fp.close() def get_current_size(): return os.stat(filename).st_size target_size = size - 50 * 1024 # fudge factor, compensate for gpg buffering data_size = target_size - max_footer_size file = GPGFile(True, path.Path(filename), profile) at_end_of_blockiter = 0 try: while True: bytes_to_go = data_size - get_current_size() if bytes_to_go < block_iter.get_read_size(): break try: data = block_iter.__next__().data except StopIteration: at_end_of_blockiter = 1 break file.write(data) file.write(block_iter.get_footer()) if not at_end_of_blockiter: # don't pad last volume cursize = get_current_size() if cursize < target_size: top_off(target_size - cursize, file) file.close() return at_end_of_blockiter except Exception: # ensure that GPG processing terminates file.close() raise
"duplicity-full.2002-08-17T16:17:01-07:00.vol4.difftar.gpg", "duplicity-full.2002-08-17T16:17:01-07:00.vol5.difftar.gpg", "duplicity-full.2002-08-17T16:17:01-07:00.vol6.difftar.gpg", "duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.manifest.gpg", "duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.vol1.difftar.gpg", "The following are extraneous duplicity files", "duplicity-new-signatures.2001-08-17T02:05:13-05:00.to.2002-08-17T05:05:14-05:00.sigtar.gpg", "duplicity-full.2002-08-15T01:01:01-07:00.vol1.difftar.gpg", "duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.manifest.gpg", "duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.vol1.difftar.gpg", "Extra stuff to be ignored" ] assert not os.system("tar xzf testfiles.tar.gz > /dev/null 2>&1") col_test_dir = path.Path("testfiles/collectionstest") archive_dir = col_test_dir.append("archive_dir") globals.archive_dir = archive_dir archive_dir_backend = backend.get_backend("file://testfiles/collectionstest" "/archive_dir") dummy_backend = None real_backend = backend.get_backend("file://%s/%s" % (col_test_dir.name, "remote_dir")) output_dir = path.Path("testfiles/output") # used as a temp directory output_dir_backend = backend.get_backend("file://testfiles/output") class CollectionTest(unittest.TestCase): """Test collections""" def setUp(self):