def cryptdisks_start_helper(self, emulated): """ Test cryptdisks_start integration and emulation. This test requires the following line to be present in ``/etc/crypttab``:: linux-utils /tmp/linux-utils.img /tmp/linux-utils.key discard,luks,noauto,readonly,tries=1 """ if not any(entry.target == TEST_TARGET_NAME and entry.source == TEST_IMAGE_FILE and entry.key_file == TEST_KEY_FILE and 'luks' in entry.options for entry in parse_crypttab()): return self.skipTest( "/etc/crypttab isn't set up to test cryptdisks_start!") context = LocalContext() if emulated: # Disable the use of the `cryptdisks_start' program. context.find_program = MagicMock(return_value=[]) # Generate the key file. with TemporaryKeyFile(filename=TEST_KEY_FILE): # Create the image file and the encrypted filesystem. create_image_file(filename=TEST_IMAGE_FILE, size=coerce_size('10 MiB')) create_encrypted_filesystem(device_file=TEST_IMAGE_FILE, key_file=TEST_KEY_FILE) # Make sure the mapped device file doesn't exist yet. assert not os.path.exists(TEST_TARGET_DEVICE) # Unlock the encrypted filesystem using `cryptdisks_start'. if emulated: cryptdisks_start(context=context, target=TEST_TARGET_NAME) else: returncode, output = run_cli(cryptdisks_start_cli, TEST_TARGET_NAME) assert returncode == 0 # Make sure the mapped device file has appeared. assert os.path.exists(TEST_TARGET_DEVICE) # Unlock the encrypted filesystem again (this should be a no-op). cryptdisks_start(context=context, target=TEST_TARGET_NAME) # Make sure the mapped device file still exists. assert os.path.exists(TEST_TARGET_DEVICE) # Lock the filesystem before we finish. if emulated: cryptdisks_stop(context=context, target=TEST_TARGET_NAME) else: returncode, output = run_cli(cryptdisks_stop_cli, TEST_TARGET_NAME) assert returncode == 0 # Make sure the mapped device file has disappeared. assert not os.path.exists(TEST_TARGET_DEVICE) # Lock the filesystem again (this should be a no-op). cryptdisks_stop(context=context, target=TEST_TARGET_NAME) # Make sure the mapped device file is still gone. assert not os.path.exists(TEST_TARGET_DEVICE) # Test the error handling. for function in cryptdisks_start, cryptdisks_stop: self.assertRaises( ValueError if emulated else ExternalCommandFailed, function, context=context, target=TEST_UNKNOWN_TARGET, )
def test_argument_validation(self): """Test argument validation.""" # Test that an invalid ionice scheduling class causes an error to be reported. returncode, output = run_cli(main, '--ionice=unsupported-class') assert returncode != 0 # Test that an invalid rotation scheme causes an error to be reported. returncode, output = run_cli(main, '--hourly=not-a-number') assert returncode != 0 # Argument validation tests that require an empty directory. with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: # Test that non-existing directories cause an error to be reported. returncode, output = run_cli(main, os.path.join(root, 'does-not-exist')) assert returncode != 0 # Test that loading of a custom configuration file raises an # exception when the configuration file cannot be loaded. self.assertRaises( ValueError, lambda: list( load_config_file(os.path.join(root, 'rotate-backups.ini'))) ) # Test that an empty rotation scheme raises an exception. self.create_sample_backup_set(root) self.assertRaises( ValueError, lambda: RotateBackups(rotation_scheme={}).rotate_backups(root)) # Argument validation tests that assume the current user isn't root. if os.getuid() != 0: # I'm being lazy and will assume that this test suite will only be # run on systems where users other than root do not have access to # /root. returncode, output = run_cli(main, '-n', '/root') assert returncode != 0
def test_argument_validation(self): """Test argument validation done by setters of :class:`py2deb.converter.PackageConverter`.""" converter = self.create_isolated_converter() self.assertRaises(ValueError, converter.set_repository, '/foo/bar/baz') self.assertRaises(ValueError, converter.set_name_prefix, '') self.assertRaises(ValueError, converter.rename_package, 'old-name', '') self.assertRaises(ValueError, converter.rename_package, '', 'new-name') self.assertRaises(ValueError, converter.set_install_prefix, '') self.assertRaises(ValueError, converter.install_alternative, 'link', '') self.assertRaises(ValueError, converter.install_alternative, '', 'path') self.assertRaises(ValueError, converter.set_conversion_command, 'package-name', '') self.assertRaises(ValueError, converter.set_conversion_command, '', 'command') exit_code, output = run_cli(main, '--unsupported-option') assert exit_code != 0 exit_code, output = run_cli( main, '--report-dependencies', '/tmp/definitely-not-an-existing-control-file') assert exit_code != 0 os.environ[ 'PY2DEB_CONFIG'] = '/tmp/definitely-not-an-existing-configuration-file' try: exit_code, output = run_cli(main) assert exit_code != 0 finally: del os.environ['PY2DEB_CONFIG']
def test_rotate_concurrent(self): """Test the :func:`.rotate_concurrent()` function.""" # These are the backups expected to be preserved # (the same as in test_rotate_backups). expected_to_be_preserved = set([ '2013-10-10@20:07', # monthly, yearly (1) '2013-11-01@20:06', # monthly (2) '2013-12-01@20:07', # monthly (3) '2014-01-01@20:07', # monthly (4), yearly (2) '2014-02-01@20:05', # monthly (5) '2014-03-01@20:04', # monthly (6) '2014-04-01@20:03', # monthly (7) '2014-05-01@20:06', # monthly (8) '2014-06-01@20:01', # monthly (9) '2014-06-09@20:01', # weekly (1) '2014-06-16@20:02', # weekly (2) '2014-06-23@20:04', # weekly (3) '2014-06-26@20:04', # daily (1) '2014-06-27@20:02', # daily (2) '2014-06-28@20:02', # daily (3) '2014-06-29@20:01', # daily (4) '2014-06-30@20:03', # daily (5), weekly (4) '2014-07-01@20:02', # daily (6), monthly (10) '2014-07-02@20:03', # hourly (1), daily (7) 'some-random-directory', # no recognizable time stamp, should definitely be preserved ]) with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: self.create_sample_backup_set(root) run_cli( main, '--verbose', '--hourly=24', '--daily=7', '--weekly=4', '--monthly=12', '--yearly=always', '--parallel', root, ) backups_that_were_preserved = set(os.listdir(root)) assert backups_that_were_preserved == expected_to_be_preserved
def test_argument_validation(self): """Test argument validation.""" # Test that an invalid ionice scheduling class causes an error to be reported. returncode, output = run_cli(main, '--ionice=unsupported-class') assert returncode != 0 # Test that an invalid rotation scheme causes an error to be reported. returncode, output = run_cli(main, '--hourly=not-a-number') assert returncode != 0 # Argument validation tests that require an empty directory. with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: # Test that non-existing directories cause an error to be reported. returncode, output = run_cli(main, os.path.join(root, 'does-not-exist')) assert returncode != 0 # Test that loading of a custom configuration file raises an # exception when the configuration file cannot be loaded. self.assertRaises(ValueError, lambda: list(load_config_file(os.path.join(root, 'rotate-backups.ini')))) # Test that an empty rotation scheme raises an exception. self.create_sample_backup_set(root) self.assertRaises(ValueError, lambda: RotateBackups(rotation_scheme={}).rotate_backups(root)) # Argument validation tests that assume the current user isn't root. if os.getuid() != 0: # I'm being lazy and will assume that this test suite will only be # run on systems where users other than root do not have access to # /root. returncode, output = run_cli(main, '-n', '/root') assert returncode != 0
def test_minutely_rotation(self): """Test rotation with multiple backups per hour.""" with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: os.mkdir(os.path.join(root, 'backup-2016-01-10_21-15-00')) os.mkdir(os.path.join(root, 'backup-2016-01-10_21-30-00')) os.mkdir(os.path.join(root, 'backup-2016-01-10_21-45-00')) run_cli(main, '--prefer-recent', '--relaxed', '--minutely=2', root) assert not os.path.exists(os.path.join(root, 'backup-2016-01-10_21-15-00')) assert os.path.exists(os.path.join(root, 'backup-2016-01-10_21-30-00')) assert os.path.exists(os.path.join(root, 'backup-2016-01-10_21-45-00'))
def test_prefer_new(self): """Test the alternative preference for the newest backup in each time slot.""" with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: os.mkdir(os.path.join(root, 'backup-2016-01-10_21-15-00')) os.mkdir(os.path.join(root, 'backup-2016-01-10_21-30-00')) os.mkdir(os.path.join(root, 'backup-2016-01-10_21-45-00')) run_cli(main, '--hourly=1', '--prefer-recent', root) assert not os.path.exists(os.path.join(root, 'backup-2016-01-10_21-15-00')) assert not os.path.exists(os.path.join(root, 'backup-2016-01-10_21-30-00')) assert os.path.exists(os.path.join(root, 'backup-2016-01-10_21-45-00'))
def test_relaxed_rotation(self): """Test relaxed rotation.""" with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: os.mkdir(os.path.join(root, 'galera_backup_db4.sl.example.lab_2016-03-17_10-00')) os.mkdir(os.path.join(root, 'galera_backup_db4.sl.example.lab_2016-03-17_12-00')) os.mkdir(os.path.join(root, 'galera_backup_db4.sl.example.lab_2016-03-17_16-00')) run_cli(main, '--hourly=3', '--daily=1', '--relaxed', root) assert os.path.exists(os.path.join(root, 'galera_backup_db4.sl.example.lab_2016-03-17_10-00')) assert os.path.exists(os.path.join(root, 'galera_backup_db4.sl.example.lab_2016-03-17_12-00')) assert os.path.exists(os.path.join(root, 'galera_backup_db4.sl.example.lab_2016-03-17_16-00'))
def test_invalid_arguments(self): """Test the handling of incorrect command line arguments.""" # More than two arguments should report an error. exit_code, output = run_cli(main, 'a', 'b', 'c', merged=True) assert exit_code != 0 assert "Error" in output # Invalid `ionice' values should report an error. exit_code, output = run_cli(main, '--ionice=foo', merged=True) assert exit_code != 0 assert "Error" in output
def test_dry_run(self): """Make sure dry run doesn't remove any backups.""" with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: self.create_sample_backup_set(root) run_cli( main, '--dry-run', '--verbose', '--daily=7', '--weekly=7', '--monthly=12', '--yearly=always', root, ) backups_that_were_preserved = set(os.listdir(root)) assert backups_that_were_preserved == SAMPLE_BACKUP_SET
def test_check_package(self): """Test the command line interface for static analysis of package archives.""" with Context() as finalizers: directory = finalizers.mkdtemp() root_package, conflicting_package = self.create_version_conflict(directory) # This *should* raise SystemExit. returncode, output = run_cli(main, '--check', root_package) assert returncode != 0 # Test for lack of duplicate files. os.unlink(conflicting_package) # This should *not* raise SystemExit. returncode, output = run_cli(main, '--check', root_package) assert returncode == 0
def test_check_package(self): """Test the command line interface for static analysis of package archives.""" with Context() as finalizers: directory = finalizers.mkdtemp() root_package, conflicting_package = self.create_version_conflict(directory) # This *should* raise SystemExit. returncode, output = run_cli(main, '--check', root_package) assert returncode != 0 # Test for lack of duplicate files. os.unlink(conflicting_package) # This should *not* raise SystemExit. returncode, output = run_cli(main, '--check', root_package) assert returncode == 0
def test_exclude_list(self): """Test exclude list logic.""" # These are the backups expected to be preserved. After each backup # I've noted which rotation scheme it falls in and the number of # preserved backups within that rotation scheme (counting up as we # progress through the backups sorted by date). expected_to_be_preserved = set([ '2013-10-10@20:07', # monthly (1), yearly (1) '2013-11-01@20:06', # monthly (2) '2013-12-01@20:07', # monthly (3) '2014-01-01@20:07', # monthly (4), yearly (2) '2014-02-01@20:05', # monthly (5) '2014-03-01@20:04', # monthly (6) '2014-04-01@20:03', # monthly (7) '2014-05-01@20:06', # monthly (8) '2014-05-19@20:02', # weekly (1) '2014-05-26@20:05', # weekly (2) '2014-06-01@20:01', # monthly (9) '2014-06-09@20:01', # weekly (3) '2014-06-16@20:02', # weekly (4) '2014-06-23@20:04', # weekly (5) '2014-06-26@20:04', # daily (1) '2014-06-27@20:02', # daily (2) '2014-06-28@20:02', # daily (3) '2014-06-29@20:01', # daily (4) '2014-06-30@20:03', # daily (5), weekly (6) '2014-07-01@20:02', # daily (6), monthly (10) '2014-07-02@20:03', # hourly (1), daily (7) 'some-random-directory', # no recognizable time stamp, should definitely be preserved ]) for name in SAMPLE_BACKUP_SET: if name.startswith('2014-05-'): expected_to_be_preserved.add(name) with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: self.create_sample_backup_set(root) run_cli( main, '--verbose', '--ionice=idle', '--hourly=24', '--daily=7', '--weekly=4', '--monthly=12', '--yearly=always', '--exclude=2014-05-*', root, ) backups_that_were_preserved = set(os.listdir(root)) assert backups_that_were_preserved == expected_to_be_preserved
def test_minutely_rotation(self): """Test rotation with multiple backups per hour.""" with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: os.mkdir(os.path.join(root, 'backup-2016-01-10_21-15-00')) os.mkdir(os.path.join(root, 'backup-2016-01-10_21-30-00')) os.mkdir(os.path.join(root, 'backup-2016-01-10_21-45-00')) run_cli(main, '--prefer-recent', '--relaxed', '--minutely=2', root) assert not os.path.exists( os.path.join(root, 'backup-2016-01-10_21-15-00')) assert os.path.exists( os.path.join(root, 'backup-2016-01-10_21-30-00')) assert os.path.exists( os.path.join(root, 'backup-2016-01-10_21-45-00'))
def test_prefer_new(self): """Test the alternative preference for the newest backup in each time slot.""" with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: os.mkdir(os.path.join(root, 'backup-2016-01-10_21-15-00')) os.mkdir(os.path.join(root, 'backup-2016-01-10_21-30-00')) os.mkdir(os.path.join(root, 'backup-2016-01-10_21-45-00')) run_cli(main, '--hourly=1', '--prefer-recent', root) assert not os.path.exists( os.path.join(root, 'backup-2016-01-10_21-15-00')) assert not os.path.exists( os.path.join(root, 'backup-2016-01-10_21-30-00')) assert os.path.exists( os.path.join(root, 'backup-2016-01-10_21-45-00'))
def test_refuse_to_overwrite(self): """Test that local modifications are not overwritten.""" with TemporaryDirectory() as temporary_directory: filename = os.path.join(temporary_directory, 'config') # Create the configuration file and directory. write_file(filename, "Original content.\n") # Use the command line interface to initialize the directory. returncode, output = run_cli(main, filename) assert returncode == 0 # Modify the generated configuration file. write_file(filename, "Not the same thing.\n") # Use the command line interface to update the configuration file. returncode, output = run_cli(main, filename, merged=True) assert returncode != 0 assert "refusing to overwrite" in output
def test_refuse_to_overwrite(self): """Test that local modifications are not overwritten.""" with TemporaryDirectory() as temporary_directory: filename = os.path.join(temporary_directory, 'config') # Create the configuration file and directory. write_file(filename, "Original content.\n") # Use the command line interface to initialize the directory. returncode, output = run_cli(main, filename) assert returncode == 0 # Modify the generated configuration file. write_file(filename, "Not the same thing.\n") # Use the command line interface to update the configuration file. returncode, output = run_cli(main, filename, merged=True) assert returncode != 0 assert "refusing to overwrite" in output
def test_sum_revision_numbers(self): """Test :func:`vcs_repo_mgr.sum_revision_numbers()`.""" with MockedHomeDirectory() as home: # Prepare two local repositories. repo_one = GitRepo(author=AUTHOR_COMBINED, bare=False, local=os.path.join(home, 'repo-one')) repo_two = HgRepo(author=AUTHOR_COMBINED, bare=False, local=os.path.join(home, 'repo-two')) # Create an initial commit in each of the repositories. for repo in repo_one, repo_two: repo.create() repo.context.write_file('README', "This is a %s repository.\n" % repo.friendly_name) repo.add_files('README') repo.commit("Initial commit") # Check the argument validation in sum_revision_numbers(). self.assertRaises(ValueError, sum_revision_numbers, repo_one.local) # Prepare a configuration file so we can test the command line interface. prepare_config({ 'repo-one': { 'type': repo_one.ALIASES[0], 'local': repo_one.local, }, 'repo-two': { 'type': repo_two.ALIASES[0], 'local': repo_two.local, }, }) # Make sure `vcs-tool --sum-revisions' works. returncode, output = run_cli( main, '--sum-revisions', 'repo-one', repo_one.default_revision, 'repo-two', repo_two.default_revision, ) self.assertEquals(returncode, 0) initial_summed_revision_number = int(output) self.assertEquals(initial_summed_revision_number, sum([ repo_one.find_revision_number(), repo_two.find_revision_number(), ])) # Create an additional commit. repo_one.context.write_file('README', "Not the same contents.\n") repo_one.commit("Additional commit") # Make sure the revision number has increased. returncode, output = run_cli( main, '--sum-revisions', 'repo-one', repo_one.default_revision, 'repo-two', repo_two.default_revision, ) updated_summed_revision_number = int(output) assert updated_summed_revision_number > initial_summed_revision_number
def test_cli_quiet(self): """Test copying of a password without echoing the entry's text.""" # Generate a password and some additional text for a dummy password store entry. a_password = random_string() additional_text = random_string() raw_entry = a_password + "\n\n" + additional_text # Prepare a mock method to test that the password is copied, # but without actually invoking the `pass' program. copy_password_method = MagicMock() # Some voodoo to mock methods in classes that # have yet to be instantiated follows :-). mocked_class = type("TestPasswordEntry", (PasswordEntry, ), dict(text=raw_entry)) setattr(mocked_class, "copy_password", copy_password_method) with PatchedAttribute(qpass, "PasswordEntry", mocked_class): with PatchedAttribute(cli, "is_clipboard_supported", lambda: True): with TemporaryDirectory() as directory: touch(os.path.join(directory, "foo.gpg")) returncode, output = run_cli( main, "--password-store=%s" % directory, "--quiet", "foo") # Make sure the command succeeded. assert returncode == 0 # Make sure the password was copied to the clipboard. assert copy_password_method.called # Make sure no output was generated. assert not output.strip()
def test_conversion_with_configuration_file(self): """ Convert a group of packages based on the settings in a configuration file. Repeats the same test as :func:`test_conversion_of_isolated_packages()` but instead of using command line options the conversion process is configured using a configuration file. """ # Use a temporary directory as py2deb's repository directory so that we # can easily find the *.deb archive generated by py2deb. with TemporaryDirectory() as directory: configuration_file = os.path.join(directory, 'py2deb.ini') with open(configuration_file, 'w') as handle: handle.write(dedent(''' [py2deb] repository = {repository} name-prefix = pip-accel install-prefix = /usr/lib/pip-accel auto-install = false [alternatives] /usr/bin/pip-accel = /usr/lib/pip-accel/bin/pip-accel [package:pip-accel] no-name-prefix = true [package:coloredlogs] rename = pip-accel-coloredlogs-renamed ''', repository=directory)) # Run the conversion command. exit_code, output = run_cli(main, '--config=%s' % configuration_file, 'pip-accel==0.12.6') assert exit_code == 0 # Check the results. self.check_converted_pip_accel_packages(directory)
def test_natural_order(self): """Verify the natural order sorting of the snippets in the configuration file.""" first = "This should be the first line.\n" middle = "This should appear in the middle.\n" last = "This should be the last line.\n" with TemporaryDirectory() as temporary_directory: filename = os.path.join(temporary_directory, 'config') directory = '%s.d' % filename # Create the configuration file and directory. write_file(filename) os.makedirs(directory) # Create the files with configuration snippets. for number, contents in (1, first), (5, middle), (10, last): write_file(os.path.join(directory, '%i.conf' % number), contents) # Use the command line interface to update the configuration file. returncode, output = run_cli(main, filename) assert returncode == 0 # Make sure the configuration file was updated. assert os.path.isfile(filename) assert os.path.getsize(filename) > 0 with open(filename) as handle: lines = handle.readlines() # Make sure all of the expected lines are present. assert first in lines assert middle in lines assert last in lines # Check the order of the lines (natural order instead of lexicographical). assert lines.index(first) < lines.index(middle) assert lines.index(middle) < lines.index(last)
def test_package_building(self, repository=None, overrides={}, contents={}): """Test building of Debian binary packages.""" with Context() as finalizers: build_directory = finalizers.mkdtemp() control_fields = merge_control_fields(TEST_PACKAGE_FIELDS, overrides) # Create the package template. os.mkdir(os.path.join(build_directory, 'DEBIAN')) with open(os.path.join(build_directory, 'DEBIAN', 'control'), 'wb') as handle: control_fields.dump(handle) if contents: for filename, data in contents.items(): filename = os.path.join(build_directory, filename) directory = os.path.dirname(filename) makedirs(directory) with open(filename, 'w') as handle: handle.write(data) else: with open(os.path.join(build_directory, 'DEBIAN', 'conffiles'), 'wb') as handle: handle.write(b'/etc/file1\n') handle.write(b'/etc/file2\n') # Create the directory with configuration files. os.mkdir(os.path.join(build_directory, 'etc')) touch(os.path.join(build_directory, 'etc', 'file1')) touch(os.path.join(build_directory, 'etc', 'file3')) # Create a directory that should be cleaned up by clean_package_tree(). makedirs(os.path.join(build_directory, 'tmp', '.git')) # Create a file that should be cleaned up by clean_package_tree(). with open(os.path.join(build_directory, 'tmp', '.gitignore'), 'w') as handle: handle.write('\n') # Build the package (without any contents :-). returncode, output = run_cli(main, '--build', build_directory) assert returncode == 0 package_file = os.path.join(tempfile.gettempdir(), '%s_%s_%s.deb' % (control_fields['Package'], control_fields['Version'], control_fields['Architecture'])) assert os.path.isfile(package_file) if repository: shutil.move(package_file, repository) return os.path.join(repository, os.path.basename(package_file)) else: finalizers.register(os.unlink, package_file) # Verify the package metadata. fields, contents = inspect_package(package_file) for name in TEST_PACKAGE_FIELDS: assert fields[name] == TEST_PACKAGE_FIELDS[name] # Verify that the package contains the `/' and `/tmp' # directories (since it doesn't contain any actual files). assert contents['/'].permissions[0] == 'd' assert contents['/'].permissions[1:] == 'rwxr-xr-x' assert contents['/'].owner == 'root' assert contents['/'].group == 'root' assert contents['/tmp/'].permissions[0] == 'd' assert contents['/tmp/'].owner == 'root' assert contents['/tmp/'].group == 'root' # Verify that clean_package_tree() cleaned up properly # (`/tmp/.git' and `/tmp/.gitignore' have been cleaned up). assert '/tmp/.git/' not in contents assert '/tmp/.gitignore' not in contents return package_file
def test_collect_packages_concurrent(self): """Test concurrent collection of related packages.""" with Context() as finalizers: source_directory = finalizers.mkdtemp() target_directory = finalizers.mkdtemp() # Prepare some packages to collect. package1 = self.test_package_building(source_directory, overrides=dict( Package='package-1', Depends='package-3', )) package2 = self.test_package_building(source_directory, overrides=dict( Package='package-2', Depends='package-4', )) package3 = self.test_package_building(source_directory, overrides=dict( Package='package-3', )) package4 = self.test_package_building(source_directory, overrides=dict( Package='package-4', )) # Run `deb-pkg-tools --collect' ... returncode, output = run_cli( main, '--collect=%s' % target_directory, '--yes', package1, package2, ) assert returncode == 0 # Make sure the expected packages were promoted. assert sorted(os.listdir(target_directory)) == \ sorted(map(os.path.basename, [package1, package2, package3, package4]))
def test_repository_activation(self): """Test the activation of trivial repositories.""" if SKIP_SLOW_TESTS: return self.skipTest("skipping slow tests") elif os.getuid() != 0: return self.skipTest("need superuser privileges") repository = self.test_repository_creation(preserve=True) returncode, output = run_cli(main, '-vv', '--activate-repo=%s' % repository) assert returncode == 0 try: handle = os.popen('apt-cache show %s' % TEST_PACKAGE_NAME) fields = Deb822(handle) assert fields['Package'] == TEST_PACKAGE_NAME finally: returncode, output = run_cli(main, '-vv', '--deactivate-repo=%s' % repository) assert returncode == 0
def test_export(self): """Test exporting of revisions.""" with TemporaryDirectory() as directory: # Change the current working directory to our temporary directory # so that we can give a relative pathname to export(). This is a # regression test for a bug that was fixed in vcs-repo-mgr 4.1.3. os.chdir(directory) # Initialize a repository object of the parametrized type. repository = self.get_instance(bare=False, local=os.path.join(directory, 'repo')) repository.create() # Commit a file to the repository. versioned_filename = random_string(10) versioned_contents = random_string(250) self.commit_file( repository=repository, filename=versioned_filename, contents=versioned_contents, message="Initial commit", ) # Export the initial revision. export_directory_relative = 'export' export_directory_absolute = os.path.join(directory, export_directory_relative) returncode, output = run_cli( main, '--repository=%s' % repository.local, '--export=%s' % export_directory_relative, ) self.assertEquals(returncode, 0) # Check that the file we committed was exported. exported_file = os.path.join(export_directory_absolute, versioned_filename) self.assertTrue(os.path.isfile(exported_file)) with codecs.open(exported_file, 'r', 'UTF-8') as handle: self.assertEquals(handle.read(), versioned_contents) # Reset the working directory. os.chdir(tempfile.gettempdir())
def test_usage(self): """Test the usage message.""" # Make sure the usage message is shown when no arguments # are given and when the -h or --help option is given. for options in [], ['-h'], ['--help']: exit_code, output = run_cli(main, *options) assert "Usage:" in output
def test_command_line_interface(self): """Test the command line interface.""" if SKIP_SLOW_TESTS: return self.skipTest("skipping slow tests") with Context() as finalizers: directory = finalizers.mkdtemp() # Test `deb-pkg-tools --inspect PKG'. package_file = self.test_package_building(directory) returncode, output = run_cli(main, '--verbose', '--inspect', package_file) assert returncode == 0 lines = output.splitlines() for field, value in TEST_PACKAGE_FIELDS.items(): assert match('^ - %s: (.+)$' % field, lines) == value # Test `deb-pkg-tools --update=DIR' with a non-existing directory. returncode, output = run_cli(main, '--update', '/a/directory/that/will/never/exist') assert returncode != 0
def test_package_building(self, repository=None, overrides={}, contents={}): """Test building of Debian binary packages.""" with Context() as finalizers: build_directory = finalizers.mkdtemp() control_fields = merge_control_fields(TEST_PACKAGE_FIELDS, overrides) # Create the package template. os.mkdir(os.path.join(build_directory, 'DEBIAN')) with open(os.path.join(build_directory, 'DEBIAN', 'control'), 'wb') as handle: control_fields.dump(handle) if contents: for filename, data in contents.items(): filename = os.path.join(build_directory, filename) directory = os.path.dirname(filename) makedirs(directory) with open(filename, 'w') as handle: handle.write(data) else: with open(os.path.join(build_directory, 'DEBIAN', 'conffiles'), 'wb') as handle: handle.write(b'/etc/file1\n') handle.write(b'/etc/file2\n') # Create the directory with configuration files. os.mkdir(os.path.join(build_directory, 'etc')) touch(os.path.join(build_directory, 'etc', 'file1')) touch(os.path.join(build_directory, 'etc', 'file3')) # Create a directory that should be cleaned up by clean_package_tree(). makedirs(os.path.join(build_directory, 'tmp', '.git')) # Create a file that should be cleaned up by clean_package_tree(). with open(os.path.join(build_directory, 'tmp', '.gitignore'), 'w') as handle: handle.write('\n') # Build the package (without any contents :-). returncode, output = run_cli(main, '--build', build_directory) assert returncode == 0 package_file = os.path.join(tempfile.gettempdir(), '%s_%s_%s.deb' % (control_fields['Package'], control_fields['Version'], control_fields['Architecture'])) assert os.path.isfile(package_file) if repository: shutil.move(package_file, repository) return os.path.join(repository, os.path.basename(package_file)) else: finalizers.register(os.unlink, package_file) # Verify the package metadata. fields, contents = inspect_package(package_file) for name in TEST_PACKAGE_FIELDS: assert fields[name] == TEST_PACKAGE_FIELDS[name] # Verify that the package contains the `/' and `/tmp' # directories (since it doesn't contain any actual files). assert contents['/'].permissions[0] == 'd' assert contents['/'].permissions[1:] == 'rwxr-xr-x' assert contents['/'].owner == 'root' assert contents['/'].group == 'root' assert contents['/tmp/'].permissions[0] == 'd' assert contents['/tmp/'].owner == 'root' assert contents['/tmp/'].group == 'root' # Verify that clean_package_tree() cleaned up properly # (`/tmp/.git' and `/tmp/.gitignore' have been cleaned up). assert '/tmp/.git/' not in contents assert '/tmp/.gitignore' not in contents return package_file
def test_collect_packages_with_prompt(self): """Test the confirmation prompt during interactive package collection.""" with Context() as finalizers: # Temporarily change stdin to respond with `y' (for `yes'). finalizers.register(setattr, sys, 'stdin', sys.stdin) sys.stdin = StringIO('y') # Prepare some packages to collect. source_directory = finalizers.mkdtemp() target_directory = finalizers.mkdtemp() package1 = self.test_package_building( source_directory, overrides=dict( Package='deb-pkg-tools-package-1', Depends='deb-pkg-tools-package-2', )) package2 = self.test_package_building( source_directory, overrides=dict(Package='deb-pkg-tools-package-2', )) # Run `deb-pkg-tools --collect' ... returncode, output = run_cli(main, '--collect=%s' % target_directory, package1) assert returncode == 0 assert sorted(os.listdir(target_directory)) == sorted( map(os.path.basename, [package1, package2]))
def test_natural_order(self): """Verify the natural order sorting of the snippets in the configuration file.""" first = "This should be the first line.\n" middle = "This should appear in the middle.\n" last = "This should be the last line.\n" with TemporaryDirectory() as temporary_directory: filename = os.path.join(temporary_directory, 'config') directory = '%s.d' % filename # Create the configuration file and directory. write_file(filename) os.makedirs(directory) # Create the files with configuration snippets. for number, contents in (1, first), (5, middle), (10, last): write_file(os.path.join(directory, '%i.conf' % number), contents) # Use the command line interface to update the configuration file. returncode, output = run_cli(main, filename) assert returncode == 0 # Make sure the configuration file was updated. assert os.path.isfile(filename) assert os.path.getsize(filename) > 0 with open(filename) as handle: lines = handle.readlines() # Make sure all of the expected lines are present. assert first in lines assert middle in lines assert last in lines # Check the order of the lines (natural order instead of lexicographical). assert lines.index(first) < lines.index(middle) assert lines.index(middle) < lines.index(last)
def test_collect_packages_concurrent(self): """Test concurrent collection of related packages.""" with Context() as finalizers: source_directory = finalizers.mkdtemp() target_directory = finalizers.mkdtemp() # Prepare some packages to collect. package1 = self.test_package_building(source_directory, overrides=dict( Package='package-1', Depends='package-3', )) package2 = self.test_package_building(source_directory, overrides=dict( Package='package-2', Depends='package-4', )) package3 = self.test_package_building(source_directory, overrides=dict( Package='package-3', )) package4 = self.test_package_building(source_directory, overrides=dict( Package='package-4', )) # Run `deb-pkg-tools --collect' ... returncode, output = run_cli( main, '--collect=%s' % target_directory, '--yes', package1, package2, ) assert returncode == 0 # Make sure the expected packages were promoted. assert sorted(os.listdir(target_directory)) == \ sorted(map(os.path.basename, [package1, package2, package3, package4]))
def test_cryptdisks_start_stop_error_reporting(self): """Test the ``cryptdisks-start-fallback`` error reporting.""" for fallback in cryptdisks_start_cli, cryptdisks_stop_cli: returncode, output = run_cli(fallback, TEST_UNKNOWN_TARGET, merged=True) assert returncode != 0
def test_show_entry(self): """Test showing of an entry on the terminal.""" password = random_string() # Some voodoo to mock methods in classes that # have yet to be instantiated follows :-). mocked_class = type( 'TestPasswordEntry', (PasswordEntry, ), dict(text=password), ) with PatchedAttribute(qpass, 'PasswordEntry', mocked_class): with TemporaryDirectory() as directory: name = 'some/random/password' touch(os.path.join(directory, '%s.gpg' % name)) returncode, output = run_cli( main, '--password-store=%s' % directory, '--no-clipboard', name, ) assert returncode == 0 assert dedent(output) == dedent( """ {title} Password: {password} """, title=name.replace('/', ' / '), password=password, )
def test_list_releases(self): """Test listing of releases.""" with MockedHomeDirectory() as home: repository = self.get_instance( bare=False, local=os.path.join(home, 'repo'), release_scheme='branches', release_filter=r'^r(\d{4})$', ) self.create_initial_commit(repository) # Create some release branches to test with. releases = '1720', '1722', '1723', '1724', '1726' features = '12345', '23456', '34567', '45678' for release_id in releases: repository.create_branch('r' + release_id) self.commit_file(repository) for feature_id in features: repository.create_branch('c' + feature_id) self.commit_file(repository) # Configure the repository's release scheme and filter. prepare_config({ 'list-repo': { 'local': repository.local, 'release-filter': repository.release_filter, 'release-scheme': repository.release_scheme, 'type': repository.ALIASES[0], } }) returncode, output = run_cli(main, '--repository=list-repo', '--list-releases') listed_releases = output.splitlines() assert returncode == 0 for release_id in releases: assert release_id in listed_releases for feature_id in features: assert feature_id not in listed_releases
def test_command_line_interface(self): """Test the command line interface.""" if SKIP_SLOW_TESTS: return self.skipTest("skipping slow tests") with Context() as finalizers: directory = finalizers.mkdtemp() # Test `deb-pkg-tools --inspect PKG'. package_file = self.test_package_building(directory) returncode, output = run_cli(main, '--verbose', '--inspect', package_file) assert returncode == 0 lines = output.splitlines() for field, value in TEST_PACKAGE_FIELDS.items(): assert match('^ - %s: (.+)$' % field, lines) == value # Test `deb-pkg-tools --update=DIR' with a non-existing directory. returncode, output = run_cli(main, '--update', '/a/directory/that/will/never/exist') assert returncode != 0
def test_executable(self): """Test that executable files are run, and non-executable ones aren't.""" executable = "#!/bin/sh\necho I am echo output.\n" exec_result = "I am echo output.\n" non_executable = "Don't run me.\n" with TemporaryDirectory() as temporary_directory: filename = os.path.join(temporary_directory, 'config') directory = '%s.d' % filename # Create the configuration file and directory. write_file(filename) os.makedirs(directory) # Create the files with configuration snippets. write_file(os.path.join(directory, '01-exec.conf'), executable) write_file(os.path.join(directory, '02-noexec.conf'), non_executable) # Make 01-exec.conf executable and make 02-noexec.conf not. os.chmod(os.path.join(directory, '01-exec.conf'), int('755', 8)) os.chmod(os.path.join(directory, '02-noexec.conf'), int('644', 8)) # Use the command line interface to update the configuration file. returncode, output = run_cli(main, filename) assert returncode == 0 # Make sure the configuration file was updated. assert os.path.isfile(filename) assert os.path.getsize(filename) > 0 with open(filename) as handle: lines = handle.readlines() # Make sure the lines are present. assert exec_result in lines assert non_executable in lines
def test_collect_packages(self): """Test the command line interface for collection of related packages.""" with Context() as finalizers: source_directory = finalizers.mkdtemp() target_directory = finalizers.mkdtemp() package1 = self.test_package_building( source_directory, overrides=dict( Package='deb-pkg-tools-package-1', Depends='deb-pkg-tools-package-2', )) package2 = self.test_package_building( source_directory, overrides=dict( Package='deb-pkg-tools-package-2', Depends='deb-pkg-tools-package-3', )) package3 = self.test_package_building( source_directory, overrides=dict(Package='deb-pkg-tools-package-3', )) returncode, output = run_cli( main, '--yes', '--collect=%s' % target_directory, package1, ) assert returncode == 0 assert sorted(os.listdir(target_directory)) == \ sorted(map(os.path.basename, [package1, package2, package3]))
def test_exclude_list(self): """Test that ``rsync-system-backup --exclude`` works as intended.""" with TemporaryDirectory() as temporary_directory: source = os.path.join(temporary_directory, 'source') destination = os.path.join(temporary_directory, 'destination') latest_directory = os.path.join(destination, 'latest') # Create a source directory with two files. os.makedirs(source) with open(os.path.join(source, 'included.txt'), 'w') as handle: handle.write("This file should be included.\n") with open(os.path.join(source, 'excluded.txt'), 'w') as handle: handle.write("This file should be excluded.\n") # Run the program through the command line interface. exit_code, output = run_cli( main, '--backup', '--exclude=excluded.txt', '--no-sudo', '--disable-notifications', source, latest_directory, ) assert exit_code == 0 # Make sure one of the files was copied and the other wasn't. assert os.path.isfile( os.path.join(latest_directory, 'included.txt')) assert not os.path.exists( os.path.join(latest_directory, 'excluded.txt'))
def test_conversion_of_isolated_packages(self): """ Convert a group of packages with a custom name and installation prefix. Converts pip-accel_ and its dependencies to a group of "isolated Debian packages" that are installed with a custom name prefix and installation prefix and sanity check the result. Also tests the ``--rename=FROM,TO`` command line option. Performs static checks on the metadata and contents of the resulting package archive. .. _pip-accel: https://github.com/paylogic/pip-accel """ # Use a temporary directory as py2deb's repository directory so that we # can easily find the *.deb archive generated by py2deb. with TemporaryDirectory() as directory: # Run the conversion command. exit_code, output = run_cli( main, '--repository=%s' % directory, '--name-prefix=pip-accel', '--install-prefix=/usr/lib/pip-accel', # By default py2deb will generate a package called # `pip-accel-pip-accel'. The --no-name-prefix=PKG # option can be used to avoid this. '--no-name-prefix=pip-accel', # Strange but valid use case (renaming a dependency): # pip-accel-coloredlogs -> pip-accel-coloredlogs-renamed '--rename=coloredlogs,pip-accel-coloredlogs-renamed', # Also test the update-alternatives integration. '--install-alternative=/usr/bin/pip-accel,/usr/lib/pip-accel/bin/pip-accel', 'pip-accel==0.12.6', ) assert exit_code == 0 # Check the results. self.check_converted_pip_accel_packages(directory)
def test_rotate_backups(self): """Test the :func:`.rotate_backups()` function.""" # These are the backups expected to be preserved. After each backup # I've noted which rotation scheme it falls in and the number of # preserved backups within that rotation scheme (counting up as we # progress through the backups sorted by date). expected_to_be_preserved = set([ '2013-10-10@20:07', # monthly (1), yearly (1) '2013-11-01@20:06', # monthly (2) '2013-12-01@20:07', # monthly (3) '2014-01-01@20:07', # monthly (4), yearly (2) '2014-02-01@20:05', # monthly (5) '2014-03-01@20:04', # monthly (6) '2014-04-01@20:03', # monthly (7) '2014-05-01@20:06', # monthly (8) '2014-06-01@20:01', # monthly (9) '2014-06-09@20:01', # weekly (1) '2014-06-16@20:02', # weekly (2) '2014-06-23@20:04', # weekly (3) '2014-06-26@20:04', # daily (1) '2014-06-27@20:02', # daily (2) '2014-06-28@20:02', # daily (3) '2014-06-29@20:01', # daily (4) '2014-06-30@20:03', # daily (5), weekly (4) '2014-07-01@20:02', # daily (6), monthly (10) '2014-07-02@20:03', # hourly (1), daily (7) 'some-random-directory', # no recognizable time stamp, should definitely be preserved 'rotate-backups.ini', # no recognizable time stamp, should definitely be preserved ]) with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: # Specify the rotation scheme and options through a configuration file. config_file = os.path.join(root, 'rotate-backups.ini') parser = configparser.RawConfigParser() parser.add_section(root) parser.set(root, 'hourly', '24') parser.set(root, 'daily', '7') parser.set(root, 'weekly', '4') parser.set(root, 'monthly', '12') parser.set(root, 'yearly', 'always') parser.set(root, 'ionice', 'idle') with open(config_file, 'w') as handle: parser.write(handle) self.create_sample_backup_set(root) run_cli(main, '--verbose', '--config=%s' % config_file) backups_that_were_preserved = set(os.listdir(root)) assert backups_that_were_preserved == expected_to_be_preserved
def test_cli_defaults(self): """Test default password store discovery in command line interface.""" with MockedHomeDirectory() as home: touch(os.path.join(home, '.password-store', 'the-only-entry.gpg')) returncode, output = run_cli(main, '-l') assert returncode == 0 entries = output.splitlines(False) assert entries == ['the-only-entry']
def test_with_repo_cli(self): """Test ``deb-pkg-tools --with-repo``.""" if SKIP_SLOW_TESTS: return self.skipTest("skipping slow tests") elif os.getuid() != 0: return self.skipTest("need superuser privileges") with Context() as finalizers: directory = finalizers.mkdtemp() self.test_package_building(directory) with CaptureOutput() as capturer: run_cli( main, '--with-repo=%s' % directory, 'apt-cache show %s' % TEST_PACKAGE_NAME, ) # Check whether apt-cache sees the package. expected_line = "Package: %s" % TEST_PACKAGE_NAME assert expected_line in capturer.get_lines()
def test_with_repo_cli(self): """Test ``deb-pkg-tools --with-repo``.""" if SKIP_SLOW_TESTS: return self.skipTest("skipping slow tests") elif os.getuid() != 0: return self.skipTest("need superuser privileges") with Context() as finalizers: directory = finalizers.mkdtemp() self.test_package_building(directory) with CaptureOutput() as capturer: run_cli( main, '--with-repo=%s' % directory, 'apt-cache show %s' % TEST_PACKAGE_NAME, ) # Check whether apt-cache sees the package. expected_line = "Package: %s" % TEST_PACKAGE_NAME assert expected_line in capturer.get_lines()
def test_dry_run(self): """Make sure dry run doesn't remove any backups.""" with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: self.create_sample_backup_set(root) run_cli( main, '--dry-run', '--verbose', '--daily=7', '--weekly=7', '--monthly=12', '--yearly=always', root, ) backups_that_were_preserved = set(os.listdir(root)) assert backups_that_were_preserved == SAMPLE_BACKUP_SET
def test_report_available_mirrors_more(self): """Test that print the available mirrors to the terminal.""" test_custom_mirrors = os.path.join(os.getcwd(), 'test_custom_mirrors.txt') exit_code, output = run_cli(main, '--list-mirrors', '--file-to-read', test_custom_mirrors, '--exclude', '*edu*', '--url-char-len', '51') assert exit_code == 0
def test_rotate_backups(self): """Test the :func:`.rotate_backups()` function.""" # These are the backups expected to be preserved. After each backup # I've noted which rotation scheme it falls in and the number of # preserved backups within that rotation scheme (counting up as we # progress through the backups sorted by date). expected_to_be_preserved = set([ '2013-10-10@20:07', # monthly (1), yearly (1) '2013-11-01@20:06', # monthly (2) '2013-12-01@20:07', # monthly (3) '2014-01-01@20:07', # monthly (4), yearly (2) '2014-02-01@20:05', # monthly (5) '2014-03-01@20:04', # monthly (6) '2014-04-01@20:03', # monthly (7) '2014-05-01@20:06', # monthly (8) '2014-06-01@20:01', # monthly (9) '2014-06-09@20:01', # weekly (1) '2014-06-16@20:02', # weekly (2) '2014-06-23@20:04', # weekly (3) '2014-06-26@20:04', # daily (1) '2014-06-27@20:02', # daily (2) '2014-06-28@20:02', # daily (3) '2014-06-29@20:01', # daily (4) '2014-06-30@20:03', # daily (5), weekly (4) '2014-07-01@20:02', # daily (6), monthly (10) '2014-07-02@20:03', # hourly (1), daily (7) 'some-random-directory', # no recognizable time stamp, should definitely be preserved 'rotate-backups.ini', # no recognizable time stamp, should definitely be preserved ]) with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: # Specify the rotation scheme and options through a configuration file. config_file = os.path.join(root, 'rotate-backups.ini') parser = configparser.RawConfigParser() parser.add_section(root) parser.set(root, 'hourly', '24') parser.set(root, 'daily', '7') parser.set(root, 'weekly', '4') parser.set(root, 'monthly', '12') parser.set(root, 'yearly', 'always') parser.set(root, 'ionice', 'idle') with open(config_file, 'w') as handle: parser.write(handle) self.create_sample_backup_set(root) run_cli(main, '--verbose', '--config=%s' % config_file) backups_that_were_preserved = set(os.listdir(root)) assert backups_that_were_preserved == expected_to_be_preserved
def test_include_list(self): """Test include list logic.""" # These are the backups expected to be preserved within the year 2014 # (other years are excluded and so should all be preserved, see below). # After each backup I've noted which rotation scheme it falls in. expected_to_be_preserved = set([ '2014-01-01@20:07', # monthly, yearly '2014-02-01@20:05', # monthly '2014-03-01@20:04', # monthly '2014-04-01@20:03', # monthly '2014-05-01@20:06', # monthly '2014-06-01@20:01', # monthly '2014-06-09@20:01', # weekly '2014-06-16@20:02', # weekly '2014-06-23@20:04', # weekly '2014-06-26@20:04', # daily '2014-06-27@20:02', # daily '2014-06-28@20:02', # daily '2014-06-29@20:01', # daily '2014-06-30@20:03', # daily, weekly '2014-07-01@20:02', # daily, monthly '2014-07-02@20:03', # hourly, daily 'some-random-directory', # no recognizable time stamp, should definitely be preserved ]) for name in SAMPLE_BACKUP_SET: if not name.startswith('2014-'): expected_to_be_preserved.add(name) with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: self.create_sample_backup_set(root) run_cli( main, '--verbose', '--ionice=idle', '--hourly=24', '--daily=7', '--weekly=4', '--monthly=12', '--yearly=always', '--include=2014-*', root, ) backups_that_were_preserved = set(os.listdir(root)) assert backups_that_were_preserved == expected_to_be_preserved
def test_rotate_concurrent(self): """Test the :func:`.rotate_concurrent()` function.""" # These are the backups expected to be preserved # (the same as in test_rotate_backups). expected_to_be_preserved = set([ '2013-10-10@20:07', # monthly, yearly (1) '2013-11-01@20:06', # monthly (2) '2013-12-01@20:07', # monthly (3) '2014-01-01@20:07', # monthly (4), yearly (2) '2014-02-01@20:05', # monthly (5) '2014-03-01@20:04', # monthly (6) '2014-04-01@20:03', # monthly (7) '2014-05-01@20:06', # monthly (8) '2014-06-01@20:01', # monthly (9) '2014-06-09@20:01', # weekly (1) '2014-06-16@20:02', # weekly (2) '2014-06-23@20:04', # weekly (3) '2014-06-26@20:04', # daily (1) '2014-06-27@20:02', # daily (2) '2014-06-28@20:02', # daily (3) '2014-06-29@20:01', # daily (4) '2014-06-30@20:03', # daily (5), weekly (4) '2014-07-01@20:02', # daily (6), monthly (10) '2014-07-02@20:03', # hourly (1), daily (7) 'some-random-directory', # no recognizable time stamp, should definitely be preserved ]) with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: self.create_sample_backup_set(root) run_cli( main, '--verbose', '--hourly=24', '--daily=7', '--weekly=4', '--monthly=12', '--yearly=always', '--parallel', root, ) backups_that_were_preserved = set(os.listdir(root)) assert backups_that_were_preserved == expected_to_be_preserved
def test_exclude_list(self): """Test exclude list logic.""" # These are the backups expected to be preserved. After each backup # I've noted which rotation scheme it falls in and the number of # preserved backups within that rotation scheme (counting up as we # progress through the backups sorted by date). expected_to_be_preserved = set([ '2013-10-10@20:07', # monthly (1), yearly (1) '2013-11-01@20:06', # monthly (2) '2013-12-01@20:07', # monthly (3) '2014-01-01@20:07', # monthly (4), yearly (2) '2014-02-01@20:05', # monthly (5) '2014-03-01@20:04', # monthly (6) '2014-04-01@20:03', # monthly (7) '2014-05-01@20:06', # monthly (8) '2014-05-19@20:02', # weekly (1) '2014-05-26@20:05', # weekly (2) '2014-06-01@20:01', # monthly (9) '2014-06-09@20:01', # weekly (3) '2014-06-16@20:02', # weekly (4) '2014-06-23@20:04', # weekly (5) '2014-06-26@20:04', # daily (1) '2014-06-27@20:02', # daily (2) '2014-06-28@20:02', # daily (3) '2014-06-29@20:01', # daily (4) '2014-06-30@20:03', # daily (5), weekly (6) '2014-07-01@20:02', # daily (6), monthly (10) '2014-07-02@20:03', # hourly (1), daily (7) 'some-random-directory', # no recognizable time stamp, should definitely be preserved ]) for name in SAMPLE_BACKUP_SET: if name.startswith('2014-05-'): expected_to_be_preserved.add(name) with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: self.create_sample_backup_set(root) run_cli( main, '--verbose', '--ionice=idle', '--hourly=24', '--daily=7', '--weekly=4', '--monthly=12', '--yearly=always', '--exclude=2014-05-*', root, ) backups_that_were_preserved = set(os.listdir(root)) assert backups_that_were_preserved == expected_to_be_preserved
def test_force_overwrite(self): """Test that local modifications can be overwritten when allowed.""" expected_contents = "Original content.\n" with TemporaryDirectory() as temporary_directory: filename = os.path.join(temporary_directory, 'config') # Create the configuration file and directory. write_file(filename, expected_contents) # Use the command line interface to initialize the directory. returncode, output = run_cli(main, filename) assert returncode == 0 # Modify the generated configuration file. write_file(filename, "Not the same thing.\n") # Use the command line interface to update the configuration file, # overriding the normal sanity check. returncode, output = run_cli(main, '--force', filename, merged=True) assert returncode == 0 assert "overwriting anyway" in output # Make sure the original content was restored. with open(filename) as handle: assert handle.read() == expected_contents
def test_vcs_control_field(self): """Test that Debian ``Vcs-*`` control file fields can be generated.""" with TemporaryDirectory() as directory: repository = self.get_instance(bare=False, local=directory) self.create_initial_commit(repository) returncode, output = run_cli( main, '--repository=%s' % repository.local, '--vcs-control-field', ) self.assertEquals(returncode, 0) assert repository.control_field in output assert repository.find_revision_id() in output
def test_server_to_client_full_transfer(self): """Test copying a file from the server to the client (no delta transfer).""" with Context() as context: # Synchronize the file using the command line interface. returncode, output = run_cli(main, context.source.location, context.target.pathname, capture=False) # Check that the command line interface reported success. assert returncode == 0 # Check that the input and output file have the same content. assert filecmp.cmp(context.source.pathname, context.target.pathname)
def test_include_list(self): """Test include list logic.""" # These are the backups expected to be preserved within the year 2014 # (other years are excluded and so should all be preserved, see below). # After each backup I've noted which rotation scheme it falls in. expected_to_be_preserved = set([ '2014-01-01@20:07', # monthly, yearly '2014-02-01@20:05', # monthly '2014-03-01@20:04', # monthly '2014-04-01@20:03', # monthly '2014-05-01@20:06', # monthly '2014-06-01@20:01', # monthly '2014-06-09@20:01', # weekly '2014-06-16@20:02', # weekly '2014-06-23@20:04', # weekly '2014-06-26@20:04', # daily '2014-06-27@20:02', # daily '2014-06-28@20:02', # daily '2014-06-29@20:01', # daily '2014-06-30@20:03', # daily, weekly '2014-07-01@20:02', # daily, monthly '2014-07-02@20:03', # hourly, daily 'some-random-directory', # no recognizable time stamp, should definitely be preserved ]) for name in SAMPLE_BACKUP_SET: if not name.startswith('2014-'): expected_to_be_preserved.add(name) with TemporaryDirectory(prefix='rotate-backups-', suffix='-test-suite') as root: self.create_sample_backup_set(root) run_cli( main, '--verbose', '--ionice=idle', '--hourly=24', '--daily=7', '--weekly=4', '--monthly=12', '--yearly=always', '--include=2014-*', root, ) backups_that_were_preserved = set(os.listdir(root)) assert backups_that_were_preserved == expected_to_be_preserved
def test_find_directory(self): """Test the translation of repository names into repository directories.""" with MockedHomeDirectory() as home: repository = GitRepo(local=os.path.join(home, 'repo')) prepare_config({ 'find-directory-test': { 'local': repository.local, 'type': repository.ALIASES[0], } }) returncode, output = run_cli( main, '--repository=find-directory-test', '--find-directory', ) self.assertEquals(returncode, 0) self.assertEquals(output.strip(), repository.local)
def test_find_revision_id(self): """Test querying the command line interface for global revision ids.""" with TemporaryDirectory() as directory: repository = self.get_instance(bare=False, local=directory) repository.create() self.create_initial_commit(repository) # Check the global revision id of the initial commit. revision_id = repository.find_revision_id() self.assertIsInstance(revision_id, string_types) self.assertTrue(revision_id) # Get the global revision id using the command line interface. returncode, output = run_cli( main, '--repository=%s' % repository.local, '--find-revision-id', ) self.assertEquals(returncode, 0) self.assertEquals(output.strip(), revision_id)
def test_control_file_patching_and_loading(self): """Test patching and loading of control files.""" deb822_package = Deb822(['Package: unpatched-example', 'Depends: some-dependency']) with Context() as finalizers: control_file = tempfile.mktemp() finalizers.register(os.unlink, control_file) with open(control_file, 'wb') as handle: deb822_package.dump(handle) returncode, output = run_cli( main, '--patch=%s' % control_file, '--set=Package: patched-example', '--set=Depends: another-dependency', ) assert returncode == 0 patched_fields = load_control_file(control_file) assert patched_fields['Package'] == 'patched-example' assert str(patched_fields['Depends']) == 'another-dependency, some-dependency'
def test_select_release(self): """Test release selection.""" with MockedHomeDirectory() as home: repository = self.get_instance( bare=False, local=os.path.join(home, 'repo'), release_scheme='branches', release_filter=r'^release-(.+)$', ) self.create_initial_commit(repository) # Make sure the correct exception is raised when no matching release is found. self.assertRaises(NoMatchingReleasesError, repository.select_release, '1.1') # Create some release branches to test with. for release in ('1.0', '1.1', '1.2', '2.0', '2.1', '2.2', '2.3', '3.0', '3.1'): repository.create_branch('release-%s' % release) self.commit_file(repository) # Try to select a non-existing release. release = repository.select_release('2.7') # Make sure the highest release that is isn't # higher than the given release was selected. self.assertIsInstance(release, Release) self.assertEquals(release.identifier, '2.3') self.assertEquals(release.revision.branch, 'release-2.3') # Try the same thing we did above, but now using the command line # interface. To do this we first need to configure the repository's # release scheme and filter. prepare_config({ 'select-repo': { 'bare': 'false', 'local': repository.local, 'release-filter': '^release-(.+)$', 'release-scheme': 'branches', 'type': repository.ALIASES[0], } }) returncode, output = run_cli( main, '--repository=select-repo', '--select-release=2.7', merged=True, ) assert returncode == 0 assert output.strip() == '2.3'
def test_create_directory(self): """Test that the ``.d`` directory is created on the first run.""" expected_contents = "This content should be preserved.\n" with TemporaryDirectory() as temporary_directory: filename = os.path.join(temporary_directory, 'config') directory = '%s.d' % filename moved_file = os.path.join(directory, 'local') # Create the configuration file. write_file(filename, expected_contents) # Use the command line interface to initialize the directory. returncode, output = run_cli(main, filename) assert returncode == 0 # Make sure the directory was created. assert os.path.isdir(directory) # Make sure the contents were moved. assert os.path.isfile(moved_file) # Validate the contents of the configuration file. with open(filename) as handle: lines = handle.readlines() assert lines == [expected_contents]
def test_collect_packages_with_prompt(self): """Test the confirmation prompt during interactive package collection.""" with Context() as finalizers: # Temporarily change stdin to respond with `y' (for `yes'). finalizers.register(setattr, sys, 'stdin', sys.stdin) sys.stdin = StringIO('y') # Prepare some packages to collect. source_directory = finalizers.mkdtemp() target_directory = finalizers.mkdtemp() package1 = self.test_package_building(source_directory, overrides=dict( Package='deb-pkg-tools-package-1', Depends='deb-pkg-tools-package-2', )) package2 = self.test_package_building(source_directory, overrides=dict( Package='deb-pkg-tools-package-2', )) # Run `deb-pkg-tools --collect' ... returncode, output = run_cli(main, '--collect=%s' % target_directory, package1) assert returncode == 0 assert sorted(os.listdir(target_directory)) == sorted(map(os.path.basename, [package1, package2]))
def test_find_revision_number(self): """Test querying the command line interface for local revision numbers.""" with TemporaryDirectory() as directory: repository = self.get_instance(bare=False, local=directory) repository.create() self.create_initial_commit(repository) # Check the revision number of the initial commit. initial_revision_number = repository.find_revision_number() assert initial_revision_number in (0, 1) # Create a second commit. self.create_followup_commit(repository) # Check the revision number of the second commit. second_revision_number = repository.find_revision_number() assert second_revision_number in (1, 2) assert second_revision_number > initial_revision_number # Get the local revision number of a revision using the command line interface. returncode, output = run_cli( main, '--repository=%s' % repository.local, '--find-revision-number', ) self.assertEquals(returncode, 0) self.assertEquals(int(output), second_revision_number)