def check_package(archive, cache=None): """ Perform static checks on a package's dependency set. :param archive: The pathname of an existing ``*.deb`` archive (a string). :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`). :raises: :exc:`BrokenPackage` when one or more checks failed. """ timer = Timer() logger.info("Checking %s ..", format_path(archive)) dependency_set = collect_related_packages(archive, cache=cache) failed_checks = [] # Check for duplicate files in the dependency set. try: check_duplicate_files(dependency_set, cache=cache) except BrokenPackage as e: failed_checks.append(e) except ValueError: # Silenced. pass # Check for version conflicts in the dependency set. try: check_version_conflicts(dependency_set, cache=cache) except BrokenPackage as e: failed_checks.append(e) if len(failed_checks) == 1: raise failed_checks[0] elif failed_checks: raise BrokenPackage('\n\n'.join(map(str, failed_checks))) else: logger.info("Finished checking in %s, no problems found.", timer)
def check_package(archive, cache=None): """ Perform static checks on a package's dependency set. :param archive: The pathname of an existing ``*.deb`` archive (a string). :param cache: The :py:class:`.PackageCache` to use (defaults to ``None``). :raises: :py:class:`BrokenPackage` when one or more checks failed. """ timer = Timer() logger.info("Checking %s ..", format_path(archive)) dependency_set = collect_related_packages(archive, cache=cache) failed_checks = [] # Check for duplicate files in the dependency set. try: check_duplicate_files(dependency_set, cache=cache) except BrokenPackage as e: failed_checks.append(e) except ValueError: # Silenced. pass # Check for version conflicts in the dependency set. try: check_version_conflicts(dependency_set, cache=cache) except BrokenPackage as e: failed_checks.append(e) if len(failed_checks) == 1: raise failed_checks[0] elif failed_checks: raise BrokenPackage('\n\n'.join(map(str, failed_checks))) else: logger.info("Finished checking in %s, no problems found.", timer)
def test_collect_packages_preference_for_newer_versions(self): """Test the preference of package collection for newer versions.""" with Context() as finalizers: directory = finalizers.mkdtemp() package1 = self.test_package_building(directory, overrides=dict( Package='deb-pkg-tools-package-1', Depends='deb-pkg-tools-package-2', )) package2_1 = self.test_package_building(directory, overrides=dict( Package='deb-pkg-tools-package-2', Version='1', Depends='deb-pkg-tools-package-3 (= 1)', )) package2_2 = self.test_package_building(directory, overrides=dict( Package='deb-pkg-tools-package-2', Version='2', Depends='deb-pkg-tools-package-3 (= 2)', )) package3_1 = self.test_package_building(directory, overrides=dict( Package='deb-pkg-tools-package-3', Version='1', )) package3_2 = self.test_package_building(directory, overrides=dict( Package='deb-pkg-tools-package-3', Version='2', )) related_packages = [p.filename for p in collect_related_packages(package1, cache=self.package_cache)] # Make sure deb-pkg-tools-package-2 version 1 wasn't collected. assert package2_1 not in related_packages # Make sure deb-pkg-tools-package-2 version 2 was collected. assert package2_2 in related_packages # Make sure deb-pkg-tools-package-3 version 1 wasn't collected. assert package3_1 not in related_packages # Make sure deb-pkg-tools-package-3 version 2 was collected. assert package3_2 in related_packages
def test_collect_packages_preference_for_newer_versions(self): """Test the preference of package collection for newer versions.""" with Context() as finalizers: directory = finalizers.mkdtemp() package1 = self.test_package_building(directory, overrides=dict( Package='deb-pkg-tools-package-1', Depends='deb-pkg-tools-package-2', )) package2_1 = self.test_package_building(directory, overrides=dict( Package='deb-pkg-tools-package-2', Version='1', Depends='deb-pkg-tools-package-3 (= 1)', )) package2_2 = self.test_package_building(directory, overrides=dict( Package='deb-pkg-tools-package-2', Version='2', Depends='deb-pkg-tools-package-3 (= 2)', )) package3_1 = self.test_package_building(directory, overrides=dict( Package='deb-pkg-tools-package-3', Version='1', )) package3_2 = self.test_package_building(directory, overrides=dict( Package='deb-pkg-tools-package-3', Version='2', )) related_packages = [p.filename for p in collect_related_packages(package1, cache=self.package_cache)] # Make sure deb-pkg-tools-package-2 version 1 wasn't collected. assert package2_1 not in related_packages # Make sure deb-pkg-tools-package-2 version 2 was collected. assert package2_2 in related_packages # Make sure deb-pkg-tools-package-3 version 1 wasn't collected. assert package3_1 not in related_packages # Make sure deb-pkg-tools-package-3 version 2 was collected. assert package3_2 in related_packages
def test_collect_packages_interactive(self): with Context() as finalizers: directory = finalizers.mkdtemp() package1 = self.test_package_building( directory, overrides=dict(Package='deb-pkg-tools-package-1', Depends='deb-pkg-tools-package-2')) package2 = self.test_package_building( directory, overrides=dict(Package='deb-pkg-tools-package-2', Depends='deb-pkg-tools-package-3')) package3_1 = self.test_package_building( directory, overrides=dict(Package='deb-pkg-tools-package-3', Version='0.1')) package3_2 = self.test_package_building( directory, overrides=dict(Package='deb-pkg-tools-package-3', Version='0.2')) related_packages = [ p.filename for p in collect_related_packages(package1, cache=self.package_cache) ] # Make sure deb-pkg-tools-package-2 was collected. assert package2 in related_packages # Make sure deb-pkg-tools-package-3 version 0.1 wasn't collected. assert package3_1 not in related_packages # Make sure deb-pkg-tools-package-3 version 0.2 was collected. assert package3_2 in related_packages
def test_collect_packages_interactive(self): with Context() as finalizers: directory = finalizers.mkdtemp() package1 = self.test_package_building(directory, overrides=dict(Package='deb-pkg-tools-package-1', Depends='deb-pkg-tools-package-2')) package2 = self.test_package_building(directory, overrides=dict(Package='deb-pkg-tools-package-2', Depends='deb-pkg-tools-package-3')) self.test_package_building(directory, overrides=dict(Package='deb-pkg-tools-package-3')) package4 = self.test_package_building(directory, overrides=dict(Package='deb-pkg-tools-package-3', Version='0.2')) self.assertEqual(sorted(p.filename for p in collect_related_packages(package1, cache=self.package_cache)), [package2, package4])
def test_version_conflicts_check(self): with Context() as finalizers: # Check that version conflicts raise an exception. directory = finalizers.mkdtemp() root_package, conflicting_package = self.create_version_conflict(directory) packages_to_scan = collect_related_packages(root_package) # Test the duplicate files check. self.assertRaises(VersionConflictFound, check_version_conflicts, packages_to_scan, self.package_cache) # Test for lack of duplicate files. os.unlink(conflicting_package) self.assertEqual(check_version_conflicts(packages_to_scan, cache=self.package_cache), None)
def test_version_conflicts_check(self): """Test static analysis of version conflicts.""" with Context() as finalizers: # Check that version conflicts raise an exception. directory = finalizers.mkdtemp() root_package, conflicting_package = self.create_version_conflict(directory) packages_to_scan = collect_related_packages(root_package) # Test the duplicate files check. self.assertRaises(VersionConflictFound, check_version_conflicts, packages_to_scan, self.package_cache) # Test for lack of duplicate files. os.unlink(conflicting_package) assert check_version_conflicts(packages_to_scan, cache=self.package_cache) is None
def test_collect_packages_interactive(self): with Context() as finalizers: directory = finalizers.mkdtemp() package1 = self.test_package_building(directory, overrides=dict(Package='deb-pkg-tools-package-1', Depends='deb-pkg-tools-package-2')) package2 = self.test_package_building(directory, overrides=dict(Package='deb-pkg-tools-package-2', Depends='deb-pkg-tools-package-3')) package3_1 = self.test_package_building(directory, overrides=dict(Package='deb-pkg-tools-package-3', Version='0.1')) package3_2 = self.test_package_building(directory, overrides=dict(Package='deb-pkg-tools-package-3', Version='0.2')) related_packages = [p.filename for p in collect_related_packages(package1, cache=self.package_cache)] # Make sure deb-pkg-tools-package-2 was collected. assert package2 in related_packages # Make sure deb-pkg-tools-package-3 version 0.1 wasn't collected. assert package3_1 not in related_packages # Make sure deb-pkg-tools-package-3 version 0.2 was collected. assert package3_2 in related_packages
def collect_packages_worker(args): """Helper for :func:`collect_packages()` that enables concurrent collection.""" try: return collect_related_packages(args[0], cache=args[1], interactive=False) except Exception: # Log a full traceback in the child process because the multiprocessing # module doesn't preserve the traceback when propagating the exception # to the parent process. logger.exception(compact(""" Encountered unhandled exception during collection of related packages! (propagating exception to parent process) """)) # Propagate the exception to the parent process. raise
def collect_packages(archives, directory, prompt=True, cache=None): # Find all related packages. related_archives = set() for filename in archives: related_archives.add(parse_filename(filename)) related_archives.update(collect_related_packages(filename, cache=cache)) # Ignore package archives that are already in the target directory. relevant_archives = set() for archive in related_archives: basename = os.path.basename(archive.filename) if not os.path.isfile(os.path.join(directory, basename)): relevant_archives.add(archive) # Interactively move the package archives. if relevant_archives: relevant_archives = sorted(relevant_archives) pluralized = pluralize(len(relevant_archives), "package archive", "package archives") print("Found %s:" % pluralized) for file_to_collect in relevant_archives: print(" - %s" % format_path(file_to_collect.filename)) try: if prompt: # Ask permission to copy the file(s). prompt = "Copy %s to %s? [Y/n] " % (pluralized, format_path(directory)) assert raw_input(prompt).lower() in ('', 'y', 'yes') # Copy the file(s). for file_to_collect in relevant_archives: copy_from = file_to_collect.filename copy_to = os.path.join(directory, os.path.basename(copy_from)) logger.debug("Copying %s -> %s ..", format_path(copy_from), format_path(copy_to)) shutil.copy(copy_from, copy_to) logger.info("Done! Copied %s to %s.", pluralized, format_path(directory)) except (AssertionError, KeyboardInterrupt, EOFError) as e: if isinstance(e, KeyboardInterrupt): # Control-C interrupts the prompt without emitting a newline. We'll # print one manually so the console output doesn't look funny. sys.stderr.write('\n') logger.warning("Not copying archive(s) to %s! (aborted by user)", format_path(directory)) if isinstance(e, KeyboardInterrupt): # Maybe we shouldn't actually swallow Control-C, it can make # for a very unfriendly user experience... :-) raise
def test_collect_packages_with_conflict_resolution(self): """Test conflict resolution in collection of related packages.""" with Context() as finalizers: directory = finalizers.mkdtemp() # The following names are a bit confusing, this is to enforce # implicit sorting on file system level (exposing an otherwise # unnoticed bug). package_a = self.test_package_building( directory, overrides=dict( Package='package-a', Depends='package-b, package-c', )) package_b = self.test_package_building(directory, overrides=dict( Package='package-b', Depends='package-d', )) package_c = self.test_package_building( directory, overrides=dict( Package='package-c', Depends='package-d (= 1)', )) package_d1 = self.test_package_building(directory, overrides=dict( Package='package-d', Version='1', )) package_d2 = self.test_package_building(directory, overrides=dict( Package='package-d', Version='2', )) related_packages = [ p.filename for p in collect_related_packages(package_a, cache=self.package_cache) ] # Make sure package-b was collected. assert package_b in related_packages # Make sure package-c was collected. assert package_c in related_packages # Make sure package-d1 was collected. assert package_d1 in related_packages # Make sure package-d2 wasn't collected. assert package_d2 not in related_packages
def collect_packages_worker(args): """Helper for :func:`collect_packages()` that enables concurrent collection.""" try: return collect_related_packages(args[0], cache=args[1], interactive=False) except Exception: # Log a full traceback in the child process because the multiprocessing # module doesn't preserve the traceback when propagating the exception # to the parent process. logger.exception( compact(""" Encountered unhandled exception during collection of related packages! (propagating exception to parent process) """)) # Propagate the exception to the parent process. raise
def test_collect_packages_with_conflict_resolution(self): with Context() as finalizers: directory = finalizers.mkdtemp() # The following names are a bit confusing, this is to enforce implicit sorting on file system level (exposing an otherwise unnoticed bug). package_a = self.test_package_building(directory, overrides=dict(Package='package-a', Depends='package-b, package-c')) package_b = self.test_package_building(directory, overrides=dict(Package='package-b', Depends='package-d')) package_c = self.test_package_building(directory, overrides=dict(Package='package-c', Depends='package-d (= 1)')) package_d1 = self.test_package_building(directory, overrides=dict(Package='package-d', Version='1')) package_d2 = self.test_package_building(directory, overrides=dict(Package='package-d', Version='2')) related_packages = [p.filename for p in collect_related_packages(package_a, cache=self.package_cache)] # Make sure package-b was collected. assert package_b in related_packages # Make sure package-c was collected. assert package_c in related_packages # Make sure package-d1 was collected. assert package_d1 in related_packages # Make sure package-d2 wasn't collected. assert package_d2 not in related_packages
def test_collect_packages_interactive(self): with Context() as finalizers: directory = finalizers.mkdtemp() package1 = self.test_package_building( directory, overrides=dict(Package='deb-pkg-tools-package-1', Depends='deb-pkg-tools-package-2')) package2 = self.test_package_building( directory, overrides=dict(Package='deb-pkg-tools-package-2', Depends='deb-pkg-tools-package-3')) self.test_package_building( directory, overrides=dict(Package='deb-pkg-tools-package-3')) package4 = self.test_package_building( directory, overrides=dict(Package='deb-pkg-tools-package-3', Version='0.2')) self.assertEqual( sorted(p.filename for p in collect_related_packages( package1, cache=self.package_cache)), [package2, package4])
def collect_packages(archives, directory, prompt=True, cache=None): # Find all related packages. related_archives = set() for filename in archives: related_archives.add(parse_filename(filename)) related_archives.update(collect_related_packages(filename, cache=cache)) # Ignore package archives that are already in the target directory. relevant_archives = set() for archive in related_archives: basename = os.path.basename(archive.filename) if not os.path.isfile(os.path.join(directory, basename)): relevant_archives.add(archive) # Interactively move the package archives. if relevant_archives: relevant_archives = sorted(relevant_archives) pluralized = pluralize(len(relevant_archives), "package archive", "package archives") print("Found %s:" % pluralized) for file_to_collect in relevant_archives: print(" - %s" % format_path(file_to_collect.filename)) try: if prompt: # Ask permission to copy the file(s). prompt = "Copy %s to %s? [Y/n] " % (pluralized, format_path(directory)) assert raw_input(prompt).lower() in ('', 'y', 'yes') # Copy the file(s). for file_to_collect in relevant_archives: copy_from = file_to_collect.filename copy_to = os.path.join(directory, os.path.basename(copy_from)) logger.debug("Copying %s -> %s ..", format_path(copy_from), format_path(copy_to)) shutil.copy(copy_from, copy_to) logger.info("Done! Copied %s to %s.", pluralized, format_path(directory)) except (AssertionError, KeyboardInterrupt, EOFError) as e: if isinstance(e, KeyboardInterrupt): # Control-C interrupts the prompt without emitting a newline. We'll # print one manually so the console output doesn't look funny. sys.stderr.write('\n') logger.warning("Not copying archive(s) to %s! (aborted by user)", format_path(directory)) if isinstance(e, KeyboardInterrupt): # Maybe we shouldn't actually swallow Control-C, it can make # for a very unfriendly user experience... :-) raise
def collect_packages(archives, directory, prompt=True, cache=None, concurrency=None): """ Interactively copy packages and their dependencies. :param archives: An iterable of strings with the filenames of one or more ``*.deb`` files. :param directory: The pathname of a directory where the package archives and dependencies should be copied to (a string). :param prompt: :data:`True` (the default) to ask confirmation from the operator (using a confirmation prompt rendered on the terminal), :data:`False` to skip the prompt. :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`). :param concurrency: Override the number of concurrent processes (defaults to the number of `archives` given or to the value of :func:`multiprocessing.cpu_count()`, whichever is smaller). :raises: :exc:`~exceptions.ValueError` when no archives are given. When more than one archive is given a :mod:`multiprocessing` pool is used to collect related archives concurrently, in order to speed up the process of collecting large dependency sets. """ archives = list(archives) related_archives = set(map(parse_filename, archives)) if not archives: raise ValueError("At least one package archive is required!") elif len(archives) == 1: # Find the related packages of a single archive. related_archives.update( collect_related_packages(archives[0], cache=cache)) else: # Find the related packages of multiple archives (concurrently). with AutomaticSpinner(label="Collecting related packages"): concurrency = min(len(archives), concurrency or multiprocessing.cpu_count()) pool = multiprocessing.Pool(concurrency) try: arguments = [(archive, cache) for archive in archives] for result in pool.map(collect_packages_worker, arguments, chunksize=1): related_archives.update(result) finally: pool.terminate() # Ignore package archives that are already in the target directory. relevant_archives = set() for archive in related_archives: basename = os.path.basename(archive.filename) if not os.path.isfile(os.path.join(directory, basename)): relevant_archives.add(archive) # Interactively move the package archives. if relevant_archives: relevant_archives = sorted(relevant_archives) pluralized = pluralize(len(relevant_archives), "package archive", "package archives") say("Found %s:", pluralized) for file_to_collect in relevant_archives: say(" - %s", format_path(file_to_collect.filename)) prompt_text = "Copy %s to %s?" % (pluralized, format_path(directory)) if prompt and not prompt_for_confirmation( prompt_text, default=True, padding=False): logger.warning("Not copying archive(s) to %s! (aborted by user)", format_path(directory)) else: # Link or copy the file(s). for file_to_collect in relevant_archives: src = file_to_collect.filename dst = os.path.join(directory, os.path.basename(src)) smart_copy(src, dst) logger.info("Done! Copied %s to %s.", pluralized, format_path(directory)) else: logger.info("Nothing to do! (%s previously copied)", pluralize(len(related_archives), "package archive"))
def collect_packages(archives, directory, prompt=True, cache=None, concurrency=None): """ Interactively copy packages and their dependencies. :param archives: An iterable of strings with the filenames of one or more ``*.deb`` files. :param directory: The pathname of a directory where the package archives and dependencies should be copied to (a string). :param prompt: :data:`True` (the default) to ask confirmation from the operator (using a confirmation prompt rendered on the terminal), :data:`False` to skip the prompt. :param cache: The :class:`.PackageCache` to use (defaults to :data:`None`). :param concurrency: Override the number of concurrent processes (defaults to the number of `archives` given or to the value of :func:`multiprocessing.cpu_count()`, whichever is smaller). :raises: :exc:`~exceptions.ValueError` when no archives are given. When more than one archive is given a :mod:`multiprocessing` pool is used to collect related archives concurrently, in order to speed up the process of collecting large dependency sets. """ archives = list(archives) related_archives = set(map(parse_filename, archives)) if not archives: raise ValueError("At least one package archive is required!") elif len(archives) == 1: # Find the related packages of a single archive. related_archives.update(collect_related_packages(archives[0], cache=cache)) else: # Find the related packages of multiple archives (concurrently). with AutomaticSpinner(label="Collecting related packages"): concurrency = min(len(archives), concurrency or multiprocessing.cpu_count()) pool = multiprocessing.Pool(concurrency) try: arguments = [(archive, cache) for archive in archives] for result in pool.map(collect_packages_worker, arguments, chunksize=1): related_archives.update(result) finally: pool.terminate() # Ignore package archives that are already in the target directory. relevant_archives = set() for archive in related_archives: basename = os.path.basename(archive.filename) if not os.path.isfile(os.path.join(directory, basename)): relevant_archives.add(archive) # Interactively move the package archives. if relevant_archives: relevant_archives = sorted(relevant_archives) pluralized = pluralize(len(relevant_archives), "package archive", "package archives") say("Found %s:", pluralized) for file_to_collect in relevant_archives: say(" - %s", format_path(file_to_collect.filename)) prompt_text = "Copy %s to %s?" % (pluralized, format_path(directory)) if prompt and not prompt_for_confirmation(prompt_text, default=True, padding=False): logger.warning("Not copying archive(s) to %s! (aborted by user)", format_path(directory)) else: # Link or copy the file(s). for file_to_collect in relevant_archives: src = file_to_collect.filename dst = os.path.join(directory, os.path.basename(src)) smart_copy(src, dst) logger.info("Done! Copied %s to %s.", pluralized, format_path(directory)) else: logger.info("Nothing to do! (%s previously copied)", pluralize(len(related_archives), "package archive"))