def locked_requirements(packages=None, requirements=None, ignore_list=None): """Returns all requirements locked to the required versions :param packages: Collection of packages :param requirements: Collection of requirements :param ignore_list: List of package keys to ignore :return: All requirements locked to their installed versions """ packages = packages or PackageCollection() requirements = requirements or RequirementCollection() ignore_list = ignore_list or [] if not requirements: return RequirementCollection( sorted([ Requirement.from_package(package=package) for package in packages ])) locked = [] for package in packages: specified_in_requirements = package.key in requirements.keys() required_by_requirements = required_by(requirement=package, packages=requirements.flatten()) if specified_in_requirements or required_by_requirements: locked.append(package) return RequirementCollection( sorted([requirement for requirement in locked]))
def test_locked_requirements(virtualenv, mocker): # preconditions locked_package1_path = PACKAGES_DIR / 'locked' / 'locked-package1' locked_package2_path = PACKAGES_DIR / 'locked' / 'locked-package2' locked_package3_path = PACKAGES_DIR / 'locked' / 'locked-package3' expected_result = ['locked-package1', 'locked-package2', 'locked-package3'] packages = prepare_packages(venv=virtualenv, mocker=mocker, packages=[ locked_package1_path, locked_package2_path, locked_package3_path, ]) requirements_empty = RequirementCollection() requirements = RequirementCollection() package_1 = packages.get(key='locked-package1') requirement_1 = Requirement.from_package(package_1) requirements.append(requirement_1) # action locked_no_requirements = operations.locked_requirements( packages=packages, requirements=requirements_empty) locked = operations.locked_requirements(packages=packages, requirements=requirements) # verification assert expected_result == locked_no_requirements.keys() assert expected_result == locked.keys() for lock in locked_no_requirements: assert lock.version.specifier == '==1.0.0' for lock in locked: assert lock.version.specifier == '==1.0.0'
def test_check_unnecessary_requirements(capsys, test_data): # preconditions package_list, requirements_list, locked_list = test_data packages = PackageCollection([ Package(key=key, name=key, obj=None, version=None) for key in package_list ]) requirements = RequirementCollection([ Requirement.from_requirement_string(key) for key in requirements_list ]) locked = RequirementCollection( [Requirement.from_requirement_string(key) for key in locked_list]) # action success = validate.check_unnecessary_packages( packages=packages, requirements=requirements, locked=locked, ) # verification captured = capsys.readouterr() required = set(requirements_list + locked_list) assert success is False if len(package_list) > len(required) else success if success: assert messages.PACKAGE_NOT_REQUIRED_OK in captured.out else: assert messages.PACKAGE_NOT_REQUIRED_FOUND in captured.out for package in package_list: if package not in required: assert package in captured.out
def unlocked_requirements(requirements=None): """Returns all requirements that do not have a set version :param requirements: Collection of requirements :return: Requirements whose versions are not set """ requirements = requirements or RequirementCollection() return RequirementCollection( sorted((requirement for requirement in requirements if str(requirement.specified_version) == Config.any_version)))
def unset_requirements(packages=None, requirements=None): """Returns all package requirements that are not in provided requirements :param packages: Collection of packages :param requirements: Collection of requirements :return: All requirements that are not in requirements """ packages = packages or PackageCollection() requirements = requirements or RequirementCollection() return RequirementCollection((package for package in packages.independent_packages if package.key not in requirements.keys()))
def test_unset_locks(): # preconditions requirement_file_path = str(PACKAGES_DIR / 'requirements-unset.txt') lock_file_path = str(PACKAGES_DIR / 'requirements-unset.lock') requirements = RequirementCollection.from_file( filepath=requirement_file_path) locked = RequirementCollection.from_file(filepath=lock_file_path) # action unset = operations.unset_locks(requirements=requirements, locked=locked) # verification assert ['package2', 'package3'] == unset.keys()
def unset_locks(requirements=None, locked=None): """Returns all requirements that are not version locked :param requirements: Collection of requirements :param locked: Collection of locked requirements :return: Requirements that are not locked """ requirements = requirements or RequirementCollection() locked = locked or RequirementCollection() return RequirementCollection( sorted([ requirement for requirement in set(requirements.flatten()) if requirement.key not in locked.keys() ]))
def required_version_mismatch(requirements=None, locked=None): """Return detected version mismatches between requirements and locked requirements :param requirements: Collection of requirements :param locked: All detected mismatches between set requirements and locked requirements :return: All detected mismatches between requirements and locked requirements """ requirements = requirements or RequirementCollection() locked = locked or RequirementCollection() return [(requirement, str(lock.specified_version)) for requirement in requirements for lock in locked if requirement.key == lock.key and requirement.conflicting()]
def test_unnecessary_requirements(virtualenv, mocker): # preconditions example_package_path = PACKAGES_DIR / 'example-package' packages = prepare_packages(venv=virtualenv, mocker=mocker, packages=[example_package_path]) requirements = RequirementCollection() locked = RequirementCollection() # action unnecessary = operations.unnecessary_packages(packages=packages, requirements=requirements, locked=locked) # verification assert ['example-package'] == unnecessary.keys()
def test_check_version_mismatch(capsys, test_data): # preconditions package_list, locked_list = test_data packages = PackageCollection([ Package(key=key, name=key, obj=None, version=InstalledVersion(version)) for key, version in package_list ]) locked = RequirementCollection([ Requirement(key=key, name=key, obj=None, version=RequiredVersion(version)) for key, version in locked_list ]) # action success = validate.check_package_version_mismatch(packages=packages, locked=locked) # verification captured = capsys.readouterr() assert success is False if package_list != locked_list else success if success: assert messages.PACKAGE_VERSION_MISMATCH_OK in captured.out else: assert messages.PACKAGE_VERSION_MISMATCH_FOUND in captured.out for package in package_list: for lock in locked_list: if package[0] == lock[0] and package[1] != lock[1]: assert package[0] in captured.out
def test_lock_version_mismatch(virtualenv, mocker): # preconditions mismatch_package1_path = PACKAGES_DIR / 'mismatch' / 'mismatch-package1' mismatch_package2_path = PACKAGES_DIR / 'mismatch' / 'mismatch-package2' mismatch_package3_path = PACKAGES_DIR / 'mismatch' / 'mismatch-package3' requirements_file_path = PACKAGES_DIR / 'requirements-mismatch.lock' packages = prepare_packages(venv=virtualenv, mocker=mocker, packages=[ mismatch_package1_path, mismatch_package2_path, mismatch_package3_path, ]) expected_mismatch = [ (packages.get('mismatch-package2'), '==2.0.0'), (packages.get('mismatch-package3'), '>=3.0.0'), ] requirements = RequirementCollection.from_file( filepath=requirements_file_path) # action mismatch = operations.lock_version_mismatch(packages=packages, locked=requirements) # verification assert expected_mismatch == mismatch
def parse_requirements_file(filepath): """Parse requirements file and return a collection of requirements :param filepath: Filepath of requirements file :return: Collection of requirements """ with open(str(filepath), 'r') as requirements_file: data = [ # Remove whitespaces and newlines row.replace(' ', '').strip() # In every row for row in requirements_file.readlines() # Ignore comments and empty lines if not row.startswith('#') and not row.startswith('-r ') and not row.startswith('-c ') and row != '\n' ] requirements = RequirementCollection([ Requirement.from_requirement_string(row) for row in data ]) return requirements
def unnecessary_locks(requirements=None, locked=None, ignore_list=None): """Return locked requirements that are not required by requirements files :param requirements: Collection of requirements :param locked: Collection of locked requirements :param ignore_list: List of package keys to ignore :return: All packages that do not need to be locked """ requirements = requirements or RequirementCollection() locked = locked or RequirementCollection() ignore_list = ignore_list or [] return RequirementCollection( sorted([ lock for lock in locked if lock.key not in requirements.flatten().keys() and lock.key not in ignore_list ]))
def unnecessary_packages(packages=None, requirements=None, locked=None): """Return locked packages that are not required by specified requirements or locked requirements :param packages: Collection of packages :param requirements: Collection of requirements :param locked: Collection of locked requirements :return: All packages that are not required """ packages = packages or PackageCollection() requirements = requirements or RequirementCollection() locked = locked or RequirementCollection() unset = unset_locks(requirements=requirements, locked=locked) return PackageCollection( sorted([ package for package in packages if package.key not in locked.keys() and package.key not in unset.keys() ]))
def list_command(args, packages=None, exit_on_failure=True): """List installed dependencies with configurable filters :param args: Command arguments :param packages: Collection of packages :param exit_on_failure: Enable/disable exiting application on failure :return: None """ requirements_files = (args.requirements or Config.requirements_files or []) ignore_list = (args.ignore or Config.ignore_list or []) printer = Printer() if not validate_files(files=requirements_files, printer=printer, exit_on_failure=exit_on_failure): return False requirements = RequirementCollection() for requirements_file in requirements_files: requirements.extend( RequirementCollection.from_file(filepath=requirements_file)) requirements = requirements if requirements else None packages = (packages or dependency_list(ignore_list=ignore_list, requirements=requirements)) headers = [messages.PACKAGE, messages.INSTALLED] tabular_data = [[ printer.colored_message(message=package.key, message_color=printer.color_package), package.version_id ] for package in packages] if tabular_data: printer.table(headers=headers, tabular_data=tabular_data) else: printer.info(messages.PACKAGES_NOT_FOUND) return True
def missing_requirements(packages=None, requirements=None, ignore_list=None): """Returns all requirements that are not installed :param packages: Collection of packages :param requirements: Collection of requirements :param ignore_list: List of package keys to ignore :return: All requirements that are not found in packages """ packages = packages or PackageCollection() requirements = requirements or RequirementCollection() ignore_list = ignore_list or [] return [(requirement, required_by(requirement, packages)) for requirement in requirements.flatten() if requirement.key not in ignore_list + packages.keys()]
def lock_version_mismatch(packages=None, locked=None): """Return detected version mismatches between packages and locked requirements :param packages: Collection of packages :param locked: Collection of locked requirements :return: All detected mismatches between installed packages and locked requirements """ packages = packages or PackageCollection() locked = locked or RequirementCollection() return [(package, str(lock.specified_version)) for package in packages for lock in locked if package.key == lock.key and package.specified_version != lock.specified_version]
def test_check_unset_locks(capsys, test_data): # preconditions requirements_list, locked_list = test_data requirements = RequirementCollection([ Requirement.from_requirement_string(key) for key in requirements_list ]) locked = RequirementCollection( [Requirement.from_requirement_string(key) for key in locked_list]) # action success = validate.check_unset_locks(requirements=requirements, locked=locked) # verification captured = capsys.readouterr() assert success is False if requirements_list != locked_list else success if success: assert messages.UNSET_LOCKS_OK in captured.out else: assert messages.UNSET_LOCKS_FOUND in captured.out for requirement in requirements_list: if requirement not in locked_list: assert requirement in captured.out
def test_check_unlocked_requirements(capsys, unlocked): # preconditions requirements = RequirementCollection( Requirement.from_requirement_string(key) for key in unlocked) # action success = validate.check_unlocked_requirements(requirements=requirements) # verification captured = capsys.readouterr() assert success is False if unlocked else success if success: assert messages.UNLOCKED_REQUIREMENTS_OK in captured.out else: assert messages.UNLOCKED_REQUIREMENTS_FOUND in captured.out for requirement in unlocked: assert requirement in captured.out
def test_parse_requirements_file(): # preconditions num_packages = 3 filepath = FILE_DIR / 'requirements.txt' version = RequiredVersion('1.0.0') expected_requirements = RequirementCollection([ Requirement(key='package{}'.format(i), name='p{}'.format(i), obj=None, version=version) for i in range(1, num_packages + 1) ]) # action requirements = PipParser.parse_requirements_file(filepath=filepath) # verification assert expected_requirements == requirements
def test_dependency_list(virtualenv, mocker): # preconditions package1_path = PACKAGES_DIR / 'dependencies' / 'dependency-package1' package2_path = PACKAGES_DIR / 'dependencies' / 'dependency-package2' working_set = prepare_working_set(venv=virtualenv, packages=[package1_path, package2_path]) requirements = RequirementCollection( [Requirement.from_requirement_string('dependency-package1')]) mocker.patch('dante.vendor.pkg_resources.working_set', working_set) # action installed_dependencies = operations.dependency_list() installed_dependencies_with_requirements = operations.dependency_list( requirements=requirements) # verification assert (['dependency-package1', 'dependency-package2'] == installed_dependencies.keys()) assert (['dependency-package1' ] == installed_dependencies_with_requirements.keys())
def test_save_lock_file(mocker, tmp_path): # preconditions num_packages = 6 version = '1.0.0' filepath = tmp_path / 'requirements.lock' expected_filepath = FILE_DIR / 'requirements.lock' expected_data = expected_filepath.read_text() requirements = RequirementCollection([ Requirement.from_requirement_string( requirement_string='package{}=={}'.format(i, version), ) for i in range(1, num_packages + 1) ]) # action mocker.patch('dante.core.models.Requirement.version_id', version) PipParser.save_lock_file(requirements=requirements, filepath=filepath) # verification data = filepath.read_text() assert expected_data == data
def lock_command(args, packages=None, exit_on_failure=True): """Display or save locked requirements for current environment :param args: Command arguments :param packages: Collection of packages :param exit_on_failure: Enable/disable exiting application on failure :return: None """ requirements_files = (args.requirements or Config.requirements_files or []) ignore_list = (args.ignore or Config.ignore_list or []) save_lock = args.save or False lock_filepath = args.file or Config.lock_file_path printer = Printer() if not validate_files(files=requirements_files, printer=printer, exit_on_failure=exit_on_failure): return False requirements = RequirementCollection() for requirements_file in requirements_files: requirements.extend( RequirementCollection.from_file(filepath=requirements_file)) lock_ignore_list = [ key for key in ignore_list if key not in requirements.keys() ] packages = (packages or dependency_list(ignore_list=lock_ignore_list)) locked = locked_requirements(packages=packages, requirements=requirements) if save_lock: locked.save_lock_file(filepath=lock_filepath) printer.success(message=messages.LOCK_EXPORTED.format( file_path=lock_filepath)) else: for item in locked: printer.info("{}=={}".format( printer.colored_message(item.key, color.DEFAULT_PACKAGE), item.version_id)) return True
def missing_requirements_command(args, packages=None, exit_on_failure=True): """Runs detection of required packages that are not installed :param args: Command arguments :param packages: Collection of packages :param exit_on_failure: Enable/disable exiting application on failure :return: None """ requirements_files = ( args.requirements or Config.requirements_files or [] ) ignore_list = ( args.ignore or Config.ignore_list or [] ) printer = Printer() if not validate_files( files=requirements_files, printer=printer, exit_on_failure=exit_on_failure): return False requirements = RequirementCollection() for requirements_file in requirements_files: requirements.extend( RequirementCollection.from_file(filepath=requirements_file) ) packages = ( packages or dependency_list(ignore_list=ignore_list) ) missing = [ (package, required_by) for package, required_by in missing_requirements( packages=packages, requirements=requirements, ignore_list=ignore_list ) ] headers = [ messages.PACKAGE, messages.REQUIRED, messages.REQUIRED_BY, ] tabular_data = [] for package, requirers in missing: if requirers: for required_by, required_version in requirers: tabular_data.append([ printer.colored_message( message=package.key, message_color=printer.color_package ), required_version, required_by.key, ]) else: tabular_data.append([ printer.colored_message( message=package.key, message_color=printer.color_package ), package.version.specifier, "Requirements", ]) if tabular_data: printer.error(messages.MISSING_FOUND) printer.table(headers=headers, tabular_data=tabular_data) if exit_on_failure: sys.exit(1) return False printer.success(messages.MISSING_OK) return True
def tree_command(args, packages=None, exit_on_failure=True): """Display dependency tree for a single package or the entire environment :param args: Command arguments :param packages: Collection of packages :param exit_on_failure: Enable/disable exiting application on failure :return: None """ package_key = args.package requirements_files = (args.requirements or Config.requirements_files or []) ignore_list = (args.ignore or Config.ignore_list or []) printer = Printer() if not validate_files(files=requirements_files, printer=printer, exit_on_failure=exit_on_failure): return False requirements = RequirementCollection() for requirements_file in requirements_files: requirements.extend( RequirementCollection.from_file(filepath=requirements_file)) package_string = '{package} [{installed}: {version}]' requirement_string = ( '{spacing}{package} [{installed}: {version} | {required}: {spec}]') packages = (packages or dependency_list(ignore_list=ignore_list)) if package_key: package = packages.get(key=package_key) if not package: printer.error( messages.PACKAGE_NOT_FOUND.format(package=package_key)) sys.exit(1) tree = {package: package_dependency_tree(dependency=package)} else: tree = dependency_tree( packages=packages, requirements=requirements if requirements else None) if not tree: printer.info(messages.PACKAGES_NOT_FOUND) def print_dependency_tree(requirements_list, indent=0): spacing = ' ' * indent for requirement in requirements_list: printer.info( requirement_string.format( spacing=spacing, package=printer.colored_message( message=requirement.key, message_color=printer.color_package), installed=messages.INSTALLED, version=requirement.version_id, required=messages.REQUIRED, spec=requirement.specified_version), ) print_dependency_tree(requirements_list[requirement], indent + 2) for dependency in tree: printer.info( package_string.format(package=printer.colored_message( message=dependency.key, message_color=printer.color_package), installed=messages.INSTALLED, version=dependency.version)) print_dependency_tree(tree[dependency], indent=2) return True
def validate_command(args, packages=None, exit_on_failure=True): """Runs requirement file validation :param args: Command arguments :param packages: Collection of packages :param exit_on_failure: Enable/disable exiting application on failure :return: None """ strict = args.strict or False ignore_list = ( args.ignore or Config.ignore_list or [] ) requirements_files = ( args.requirements or Config.requirements_files or [] ) lock_files = ( args.lock or Config.lock_files or [] ) printer = Printer() if not validate_files( files=requirements_files, printer=printer, exit_on_failure=exit_on_failure ) or not validate_files( files=lock_files, printer=printer, exit_on_failure=exit_on_failure): return False try: requirements = RequirementCollection.from_files( filepaths=requirements_files ) locked = RequirementCollection.from_files(filepaths=lock_files) except Exception as e: # Always exit on invalid requirements printer.error('{}: {}'.format(messages.REQUIREMENTS_PARSING_ERROR, e)) sys.exit(1) checks_ok = [] packages = ( packages or dependency_list(ignore_list=ignore_list) ) checks_ok.append(check_unlocked_requirements( requirements=requirements, printer=printer, )) checks_ok.append(check_unset_locks( requirements=requirements, locked=locked, printer=printer, )) checks_ok.append(check_package_version_mismatch( packages=packages, locked=locked, printer=printer, )) checks_ok.append(check_requirement_version_mismatch( requirements=requirements, locked=locked, printer=printer, )) if strict: checks_ok.append(check_unnecessary_packages( packages=packages, requirements=requirements, locked=locked, printer=printer, )) checks_ok.append(check_unnecessary_locks( requirements=requirements, locked=locked, ignore_list=ignore_list, printer=printer, )) return ( sys.exit(1) if exit_on_failure else False if not all(checks_ok) else True )