def execute(self, operations): # type: (Operation) -> int self._total_operations = len(operations) for job_type in self._executed: self._executed[job_type] = 0 self._skipped[job_type] = 0 if operations and (self._enabled or self._dry_run): self._display_summary(operations) # We group operations by priority groups = itertools.groupby(operations, key=lambda o: -o.priority) self._sections = OrderedDict() for _, group in groups: tasks = [] for operation in group: if self._shutdown: break tasks.append(self._executor.submit(self._execute_operation, operation)) try: wait(tasks) except KeyboardInterrupt: self._shutdown = True if self._shutdown: # Cancelling further tasks from being executed [task.cancel() for task in tasks] self._executor.shutdown(wait=True) break return self._shutdown
def __init__(self, env, pool, config, io, parallel=True): self._env = env self._io = io self._dry_run = False self._enabled = True self._verbose = False self._authenticator = Authenticator(config, self._io) self._chef = Chef(config, self._env) self._chooser = Chooser(pool, self._env) if parallel and not (PY2 and WINDOWS): # This should be directly handled by ThreadPoolExecutor # however, on some systems the number of CPUs cannot be determined # (it raises a NotImplementedError), so, in this case, we assume # that the system only has one CPU. try: self._max_workers = cpu_count() + 4 except NotImplementedError: self._max_workers = 5 else: self._max_workers = 1 self._executor = ThreadPoolExecutor(max_workers=self._max_workers) self._total_operations = 0 self._executed_operations = 0 self._executed = {"install": 0, "update": 0, "uninstall": 0} self._skipped = {"install": 0, "update": 0, "uninstall": 0} self._sections = OrderedDict() self._lock = threading.Lock() self._shutdown = False
def execute(self, operations): # type: (Operation) -> int self._total_operations = len(operations) for job_type in self._executed: self._executed[job_type] = 0 self._skipped[job_type] = 0 if operations and (self._enabled or self._dry_run): self._display_summary(operations) # We group operations by priority groups = itertools.groupby(operations, key=lambda o: -o.priority) self._sections = OrderedDict() for _, group in groups: tasks = [] serial_operations = [] for operation in group: if self._shutdown: break # Some operations are unsafe, we must execute them serially in a group # https://github.com/python-poetry/poetry/issues/3086 # https://github.com/python-poetry/poetry/issues/2658 # # We need to explicitly check source type here, see: # https://github.com/python-poetry/poetry-core/pull/98 is_parallel_unsafe = operation.job_type == "uninstall" or ( operation.package.develop and operation.package.source_type in {"directory", "git"}) if not operation.skipped and is_parallel_unsafe: serial_operations.append(operation) continue tasks.append( self._executor.submit(self._execute_operation, operation)) try: wait(tasks) for operation in serial_operations: wait([ self._executor.submit(self._execute_operation, operation) ]) except KeyboardInterrupt: self._shutdown = True if self._shutdown: # Cancelling further tasks from being executed [task.cancel() for task in tasks] self._executor.shutdown(wait=True) break return 1 if self._shutdown else 0
def set_lock_data(self, root, packages): # type: (...) -> bool files = table() packages = self._lock_packages(packages) # Retrieving hashes for package in packages: if package["name"] not in files: files[package["name"]] = [] for f in package["files"]: file_metadata = inline_table() for k, v in sorted(f.items()): file_metadata[k] = v files[package["name"]].append(file_metadata) if files[package["name"]]: files[package["name"]] = item(files[package["name"]]).multiline(True) del package["files"] lock = document() lock["package"] = packages if root.extras: lock["extras"] = { extra: [dep.pretty_name for dep in deps] for extra, deps in sorted(root.extras.items()) } lock["metadata"] = OrderedDict( [ ("lock-version", self._VERSION), ("python-versions", root.python_versions), ("content-hash", self._content_hash), ("files", files), ] ) if not self.is_locked() or lock != self.lock_data: self._write_lock_data(lock) return True return False
class Executor(object): def __init__(self, env, pool, config, io, parallel=True): self._env = env self._io = io self._dry_run = False self._enabled = True self._verbose = False self._authenticator = Authenticator(config, self._io) self._chef = Chef(config, self._env) self._chooser = Chooser(pool, self._env) if parallel and not (PY2 and WINDOWS): # This should be directly handled by ThreadPoolExecutor # however, on some systems the number of CPUs cannot be determined # (it raises a NotImplementedError), so, in this case, we assume # that the system only has one CPU. try: self._max_workers = cpu_count() + 4 except NotImplementedError: self._max_workers = 5 else: self._max_workers = 1 self._executor = ThreadPoolExecutor(max_workers=self._max_workers) self._total_operations = 0 self._executed_operations = 0 self._executed = {"install": 0, "update": 0, "uninstall": 0} self._skipped = {"install": 0, "update": 0, "uninstall": 0} self._sections = OrderedDict() self._lock = threading.Lock() self._shutdown = False @property def installations_count(self): # type: () -> int return self._executed["install"] @property def updates_count(self): # type: () -> int return self._executed["update"] @property def removals_count(self): # type: () -> int return self._executed["uninstall"] def supports_fancy_output(self): # type: () -> bool return self._io.supports_ansi() and not self._dry_run def disable(self): self._enabled = False return self def dry_run(self, dry_run=True): self._dry_run = dry_run return self def verbose(self, verbose=True): self._verbose = verbose return self def execute(self, operations): # type: (Operation) -> int self._total_operations = len(operations) for job_type in self._executed: self._executed[job_type] = 0 self._skipped[job_type] = 0 if operations and (self._enabled or self._dry_run): self._display_summary(operations) # We group operations by priority groups = itertools.groupby(operations, key=lambda o: -o.priority) self._sections = OrderedDict() for _, group in groups: tasks = [] serial_operations = [] for operation in group: if self._shutdown: break # Some operations are unsafe, we must execute them serially in a group # https://github.com/python-poetry/poetry/issues/3086 # https://github.com/python-poetry/poetry/issues/2658 # # We need to explicitly check source type here, see: # https://github.com/python-poetry/poetry-core/pull/98 is_parallel_unsafe = operation.job_type == "uninstall" or ( operation.package.develop and operation.package.source_type in {"directory", "git"}) if not operation.skipped and is_parallel_unsafe: serial_operations.append(operation) continue tasks.append( self._executor.submit(self._execute_operation, operation)) try: wait(tasks) for operation in serial_operations: wait([ self._executor.submit(self._execute_operation, operation) ]) except KeyboardInterrupt: self._shutdown = True if self._shutdown: # Cancelling further tasks from being executed [task.cancel() for task in tasks] self._executor.shutdown(wait=True) break return 1 if self._shutdown else 0 def _write(self, operation, line): if not self.supports_fancy_output( ) or not self._should_write_operation(operation): return if self._io.is_debug(): with self._lock: section = self._sections[id(operation)] section.write_line(line) return with self._lock: section = self._sections[id(operation)] section.output.clear() section.write(line) def _execute_operation(self, operation): try: if self.supports_fancy_output(): if id(operation) not in self._sections: if self._should_write_operation(operation): with self._lock: self._sections[id(operation)] = self._io.section() self._sections[id(operation)].write_line( " <fg=blue;options=bold>•</> {message}: <fg=blue>Pending...</>" .format(message=self.get_operation_message( operation), ), ) else: if self._should_write_operation(operation): if not operation.skipped: self._io.write_line( " <fg=blue;options=bold>•</> {message}".format( message=self.get_operation_message( operation), ), ) else: self._io.write_line( " <fg=default;options=bold,dark>•</> {message}: " "<fg=default;options=bold,dark>Skipped</> " "<fg=default;options=dark>for the following reason:</> " "<fg=default;options=bold,dark>{reason}</>".format( message=self.get_operation_message(operation), reason=operation.skip_reason, )) try: result = self._do_execute_operation(operation) except EnvCommandError as e: if e.e.returncode == -2: result = -2 else: raise # If we have a result of -2 it means a KeyboardInterrupt # in the any python subprocess, so we raise a KeyboardInterrupt # error to be picked up by the error handler. if result == -2: raise KeyboardInterrupt except Exception as e: try: from clikit.ui.components.exception_trace import ExceptionTrace if not self.supports_fancy_output(): io = self._io else: message = " <error>•</error> {message}: <error>Failed</error>".format( message=self.get_operation_message(operation, error=True), ) self._write(operation, message) io = self._sections.get(id(operation), self._io) with self._lock: trace = ExceptionTrace(e) trace.render(io) io.write_line("") finally: with self._lock: self._shutdown = True except KeyboardInterrupt: try: message = " <warning>•</warning> {message}: <warning>Cancelled</warning>".format( message=self.get_operation_message(operation, warning=True), ) if not self.supports_fancy_output(): self._io.write_line(message) else: self._write(operation, message) finally: with self._lock: self._shutdown = True def _do_execute_operation(self, operation): method = operation.job_type operation_message = self.get_operation_message(operation) if operation.skipped: if self.supports_fancy_output(): self._write( operation, " <fg=default;options=bold,dark>•</> {message}: " "<fg=default;options=bold,dark>Skipped</> " "<fg=default;options=dark>for the following reason:</> " "<fg=default;options=bold,dark>{reason}</>".format( message=operation_message, reason=operation.skip_reason, ), ) self._skipped[operation.job_type] += 1 return 0 if not self._enabled or self._dry_run: self._io.write_line( " <fg=blue;options=bold>•</> {message}".format( message=operation_message, )) return 0 result = getattr(self, "_execute_{}".format(method))(operation) if result != 0: return result message = " <fg=green;options=bold>•</> {message}".format( message=self.get_operation_message(operation, done=True), ) self._write(operation, message) self._increment_operations_count(operation, True) return result def _increment_operations_count(self, operation, executed): with self._lock: if executed: self._executed_operations += 1 self._executed[operation.job_type] += 1 else: self._skipped[operation.job_type] += 1 def run_pip(self, *args, **kwargs): # type: (...) -> int try: self._env.run_pip(*args, **kwargs) except EnvCommandError as e: output = decode(e.e.output) if ("KeyboardInterrupt" in output or "ERROR: Operation cancelled by user" in output): return -2 raise return 0 def get_operation_message(self, operation, done=False, error=False, warning=False): base_tag = "fg=default" operation_color = "c2" source_operation_color = "c2" package_color = "c1" if error: operation_color = "error" elif warning: operation_color = "warning" elif done: operation_color = "success" if operation.skipped: base_tag = "fg=default;options=dark" operation_color += "_dark" source_operation_color += "_dark" package_color += "_dark" if operation.job_type == "install": return "<{}>Installing <{}>{}</{}> (<{}>{}</>)</>".format( base_tag, package_color, operation.package.name, package_color, operation_color, operation.package.full_pretty_version, ) if operation.job_type == "uninstall": return "<{}>Removing <{}>{}</{}> (<{}>{}</>)</>".format( base_tag, package_color, operation.package.name, package_color, operation_color, operation.package.full_pretty_version, ) if operation.job_type == "update": return "<{}>Updating <{}>{}</{}> (<{}>{}</{}> -> <{}>{}</>)</>".format( base_tag, package_color, operation.initial_package.name, package_color, source_operation_color, operation.initial_package.full_pretty_version, source_operation_color, operation_color, operation.target_package.full_pretty_version, ) return "" def _display_summary(self, operations): installs = 0 updates = 0 uninstalls = 0 skipped = 0 for op in operations: if op.skipped: skipped += 1 continue if op.job_type == "install": installs += 1 elif op.job_type == "update": updates += 1 elif op.job_type == "uninstall": uninstalls += 1 if not installs and not updates and not uninstalls and not self._verbose: self._io.write_line("") self._io.write_line("No dependencies to install or update") return self._io.write_line("") self._io.write_line("<b>Package operations</b>: " "<info>{}</> install{}, " "<info>{}</> update{}, " "<info>{}</> removal{}" "{}".format( installs, "" if installs == 1 else "s", updates, "" if updates == 1 else "s", uninstalls, "" if uninstalls == 1 else "s", ", <info>{}</> skipped".format(skipped) if skipped and self._verbose else "", )) self._io.write_line("") def _execute_install(self, operation): # type: (Install) -> None return self._install(operation) def _execute_update(self, operation): # type: (Update) -> None return self._update(operation) def _execute_uninstall(self, operation): # type: (Uninstall) -> None message = " <fg=blue;options=bold>•</> {message}: <info>Removing...</info>".format( message=self.get_operation_message(operation), ) self._write(operation, message) return self._remove(operation) def _install(self, operation): package = operation.package if package.source_type == "directory": return self._install_directory(operation) if package.source_type == "git": return self._install_git(operation) if package.source_type == "file": archive = self._prepare_file(operation) elif package.source_type == "url": archive = self._download_link(operation, Link(package.source_url)) else: archive = self._download(operation) operation_message = self.get_operation_message(operation) message = " <fg=blue;options=bold>•</> {message}: <info>Installing...</info>".format( message=operation_message, ) self._write(operation, message) args = ["install", "--no-deps", str(archive)] if operation.job_type == "update": args.insert(2, "-U") return self.run_pip(*args) def _update(self, operation): return self._install(operation) def _remove(self, operation): package = operation.package # If we have a VCS package, remove its source directory if package.source_type == "git": src_dir = self._env.path / "src" / package.name if src_dir.exists(): safe_rmtree(str(src_dir)) try: return self.run_pip("uninstall", package.name, "-y") except CalledProcessError as e: if "not installed" in str(e): return 0 raise def _prepare_file(self, operation): package = operation.package message = " <fg=blue;options=bold>•</> {message}: <info>Preparing...</info>".format( message=self.get_operation_message(operation), ) self._write(operation, message) archive = Path(package.source_url) if not Path(package.source_url).is_absolute() and package.root_dir: archive = package.root_dir / archive archive = self._chef.prepare(archive) return archive def _install_directory(self, operation): from poetry.factory import Factory package = operation.package operation_message = self.get_operation_message(operation) message = " <fg=blue;options=bold>•</> {message}: <info>Building...</info>".format( message=operation_message, ) self._write(operation, message) if package.root_dir: req = os.path.join(str(package.root_dir), package.source_url) else: req = os.path.realpath(package.source_url) args = ["install", "--no-deps", "-U"] pyproject = PyProjectTOML(os.path.join(req, "pyproject.toml")) if pyproject.is_poetry_project(): # Even if there is a build system specified # some versions of pip (< 19.0.0) don't understand it # so we need to check the version of pip to know # if we can rely on the build system legacy_pip = self._env.pip_version < self._env.pip_version.__class__( 19, 0, 0) package_poetry = Factory().create_poetry( pyproject.file.path.parent) if package.develop and not package_poetry.package.build_script: from poetry.masonry.builders.editable import EditableBuilder # This is a Poetry package in editable mode # we can use the EditableBuilder without going through pip # to install it, unless it has a build script. builder = EditableBuilder(package_poetry, self._env, NullIO()) builder.build() return 0 elif legacy_pip or package_poetry.package.build_script: from poetry.core.masonry.builders.sdist import SdistBuilder # We need to rely on creating a temporary setup.py # file since the version of pip does not support # build-systems # We also need it for non-PEP-517 packages builder = SdistBuilder(package_poetry) with builder.setup_py(): if package.develop: args.append("-e") args.append(req) return self.run_pip(*args) if package.develop: args.append("-e") args.append(req) return self.run_pip(*args) def _install_git(self, operation): from poetry.core.vcs import Git package = operation.package operation_message = self.get_operation_message(operation) message = " <fg=blue;options=bold>•</> {message}: <info>Cloning...</info>".format( message=operation_message, ) self._write(operation, message) src_dir = self._env.path / "src" / package.name if src_dir.exists(): safe_rmtree(str(src_dir)) src_dir.parent.mkdir(exist_ok=True) git = Git() git.clone(package.source_url, src_dir) git.checkout(package.source_reference, src_dir) # Now we just need to install from the source directory package._source_url = str(src_dir) return self._install_directory(operation) def _download(self, operation): # type: (Operation) -> Path link = self._chooser.choose_for(operation.package) return self._download_link(operation, link) def _download_link(self, operation, link): package = operation.package archive = self._chef.get_cached_archive_for_link(link) if archive is link: # No cached distributions was found, so we download and prepare it try: archive = self._download_archive(operation, link) except BaseException: cache_directory = self._chef.get_cache_directory_for_link(link) cached_file = cache_directory.joinpath(link.filename) # We can't use unlink(missing_ok=True) because it's not available # in pathlib2 for Python 2.7 if cached_file.exists(): cached_file.unlink() raise # TODO: Check readability of the created archive if not link.is_wheel: archive = self._chef.prepare(archive) if package.files: archive_hash = "sha256:" + FileDependency(package.name, archive).hash() if archive_hash not in {f["hash"] for f in package.files}: raise RuntimeError( "Invalid hash for {} using archive {}".format( package, archive.name)) return archive def _download_archive(self, operation, link): # type: (Operation, Link) -> Path response = self._authenticator.request("get", link.url, stream=True, io=self._sections.get( id(operation), self._io)) wheel_size = response.headers.get("content-length") operation_message = self.get_operation_message(operation) message = " <fg=blue;options=bold>•</> {message}: <info>Downloading...</>".format( message=operation_message, ) progress = None if self.supports_fancy_output(): if wheel_size is None: self._write(operation, message) else: from clikit.ui.components.progress_bar import ProgressBar progress = ProgressBar(self._sections[id(operation)].output, max=int(wheel_size)) progress.set_format(message + " <b>%percent%%</b>") if progress: with self._lock: progress.start() done = 0 archive = self._chef.get_cache_directory_for_link(link) / link.filename archive.parent.mkdir(parents=True, exist_ok=True) with archive.open("wb") as f: for chunk in response.iter_content(chunk_size=4096): if not chunk: break done += len(chunk) if progress: with self._lock: progress.set_progress(done) f.write(chunk) if progress: with self._lock: progress.finish() return archive def _should_write_operation(self, operation): # type: (Operation) -> bool if not operation.skipped: return True return self._dry_run or self._verbose
def complete_package( self, package): # type: (DependencyPackage) -> DependencyPackage if package.is_root(): package = package.clone() if not package.is_root() and package.source_type not in { "directory", "file", "url", "git", }: package = DependencyPackage( package.dependency, self._pool.package( package.name, package.version.text, extras=package.requires_extras, repository=package.dependency.source_name, ), ) dependencies = [ r for r in package.requires if self._package.python_constraint.allows_any(r.python_constraint) ] # Searching for duplicate dependencies # # If the duplicate dependencies have the same constraint, # the requirements will be merged. # # For instance: # - enum34; python_version=="2.7" # - enum34; python_version=="3.3" # # will become: # - enum34; python_version=="2.7" or python_version=="3.3" # # If the duplicate dependencies have different constraints # we have to split the dependency graph. # # An example of this is: # - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6" # - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6" duplicates = OrderedDict() for dep in dependencies: if dep.name not in duplicates: duplicates[dep.name] = [] duplicates[dep.name].append(dep) dependencies = [] for dep_name, deps in duplicates.items(): if len(deps) == 1: dependencies.append(deps[0]) continue self.debug("<debug>Duplicate dependencies for {}</debug>".format( dep_name)) # Regrouping by constraint by_constraint = OrderedDict() for dep in deps: if dep.constraint not in by_constraint: by_constraint[dep.constraint] = [] by_constraint[dep.constraint].append(dep) # We merge by constraint for constraint, _deps in by_constraint.items(): new_markers = [] for dep in _deps: marker = dep.marker.without_extras() if marker.is_empty(): # No marker or only extras continue new_markers.append(marker) if not new_markers: continue dep = _deps[0] dep.marker = dep.marker.union(MarkerUnion(*new_markers)) by_constraint[constraint] = [dep] continue if len(by_constraint) == 1: self.debug("<debug>Merging requirements for {}</debug>".format( str(deps[0]))) dependencies.append(list(by_constraint.values())[0][0]) continue # We leave dependencies as-is if they have the same # python/platform constraints. # That way the resolver will pickup the conflict # and display a proper error. _deps = [value[0] for value in by_constraint.values()] seen = set() for _dep in _deps: pep_508_dep = _dep.to_pep_508(False) if ";" not in pep_508_dep: _requirements = "" else: _requirements = pep_508_dep.split(";")[1].strip() if _requirements not in seen: seen.add(_requirements) if len(_deps) != len(seen): for _dep in _deps: dependencies.append(_dep) continue # At this point, we raise an exception that will # tell the solver to enter compatibility mode # which means it will resolve for subsets # Python constraints # # For instance, if our root package requires Python ~2.7 || ^3.6 # And we have one dependency that requires Python <3.6 # and the other Python >=3.6 than the solver will solve # dependencies for Python >=2.7,<2.8 || >=3.4,<3.6 # and Python >=3.6,<4.0 python_constraints = [] for constraint, _deps in by_constraint.items(): python_constraints.append(_deps[0].python_versions) _deps = [str(_dep[0]) for _dep in by_constraint.values()] self.debug( "<warning>Different requirements found for {}.</warning>". format(", ".join(_deps[:-1]) + " and " + _deps[-1])) raise CompatibilityError(*python_constraints) # Modifying dependencies as needed for dep in dependencies: if not package.dependency.python_constraint.is_any(): dep.transitive_python_versions = str( dep.python_constraint.intersect( package.dependency.python_constraint)) if (package.dependency.is_directory() or package.dependency.is_file()) and (dep.is_directory() or dep.is_file()): if dep.path.as_posix().startswith(package.source_url): relative = (Path(package.source_url) / dep.path).relative_to(package.source_url) else: relative = Path(package.source_url) / dep.path # TODO: Improve the way we set the correct relative path for dependencies dep._path = relative package.requires = dependencies return package
def _dump_package(self, package): # type: (Package) -> dict dependencies = OrderedDict() for dependency in sorted(package.requires, key=lambda d: d.name): if dependency.pretty_name not in dependencies: dependencies[dependency.pretty_name] = [] constraint = inline_table() if dependency.is_directory() or dependency.is_file(): constraint["path"] = dependency.path.as_posix() if dependency.is_directory() and dependency.develop: constraint["develop"] = True elif dependency.is_url(): constraint["url"] = dependency.url elif dependency.is_vcs(): constraint[dependency.vcs] = dependency.source if dependency.branch: constraint["branch"] = dependency.branch elif dependency.tag: constraint["tag"] = dependency.tag elif dependency.rev: constraint["rev"] = dependency.rev else: constraint["version"] = str(dependency.pretty_constraint) if dependency.extras: constraint["extras"] = sorted(dependency.extras) if dependency.is_optional(): constraint["optional"] = True if not dependency.marker.is_any(): constraint["markers"] = str(dependency.marker) dependencies[dependency.pretty_name].append(constraint) # All the constraints should have the same type, # but we want to simplify them if it's possible for dependency, constraints in tuple(dependencies.items()): if all( len(constraint) == 1 and "version" in constraint for constraint in constraints): dependencies[dependency] = [ constraint["version"] for constraint in constraints ] data = OrderedDict([ ("name", package.pretty_name), ("version", package.pretty_version), ("description", package.description or ""), ("category", package.category), ("optional", package.optional), ("python-versions", package.python_versions), ("files", sorted(package.files, key=lambda x: x["file"])), ]) if dependencies: data["dependencies"] = table() for k, constraints in dependencies.items(): if len(constraints) == 1: data["dependencies"][k] = constraints[0] else: data["dependencies"][k] = array().multiline(True) for constraint in constraints: data["dependencies"][k].append(constraint) if package.extras: extras = {} for name, deps in package.extras.items(): # TODO: This should use dep.to_pep_508() once this is fixed # https://github.com/python-poetry/poetry-core/pull/102 extras[name] = [ dep.base_pep_508_name if not dep.constraint.is_any() else dep.name for dep in deps ] data["extras"] = extras if package.source_url: url = package.source_url if package.source_type in ["file", "directory"]: # The lock file should only store paths relative to the root project url = Path( os.path.relpath( Path(url).as_posix(), self._lock.path.parent.as_posix())).as_posix() data["source"] = OrderedDict() if package.source_type: data["source"]["type"] = package.source_type data["source"]["url"] = url if package.source_reference: data["source"]["reference"] = package.source_reference if package.source_resolved_reference: data["source"][ "resolved_reference"] = package.source_resolved_reference if package.source_type in ["directory", "git"]: data["develop"] = package.develop return data
def _parse_requirements( self, requirements): # type: (List[str]) -> List[Dict[str, str]] from poetry.puzzle.provider import Provider result = [] try: cwd = self.poetry.file.parent except RuntimeError: cwd = Path.cwd() for requirement in requirements: requirement = requirement.strip() extras = [] extras_m = re.search(r"\[([\w\d,-_]+)\]$", requirement) if extras_m: extras = [e.strip() for e in extras_m.group(1).split(",")] requirement, _ = requirement.split("[") url_parsed = urlparse.urlparse(requirement) if url_parsed.scheme and url_parsed.netloc: # Url if url_parsed.scheme in ["git+https", "git+ssh"]: from poetry.core.vcs.git import Git from poetry.core.vcs.git import ParsedUrl parsed = ParsedUrl.parse(requirement) url = Git.normalize_url(requirement) pair = OrderedDict([("name", parsed.name), ("git", url.url)]) if parsed.rev: pair["rev"] = url.revision if extras: pair["extras"] = extras package = Provider.get_package_from_vcs( "git", url.url, reference=pair.get("rev")) pair["name"] = package.name result.append(pair) continue elif url_parsed.scheme in ["http", "https"]: package = Provider.get_package_from_url(requirement) pair = OrderedDict([("name", package.name), ("url", package.source_url)]) if extras: pair["extras"] = extras result.append(pair) continue elif (os.path.sep in requirement or "/" in requirement) and cwd.joinpath(requirement).exists(): path = cwd.joinpath(requirement) if path.is_file(): package = Provider.get_package_from_file(path.resolve()) else: package = Provider.get_package_from_directory(path) result.append( OrderedDict([ ("name", package.name), ("path", path.relative_to(cwd).as_posix()), ] + ([("extras", extras)] if extras else []))) continue pair = re.sub("^([^@=: ]+)(?:@|==|(?<![<>~!])=|:| )(.*)$", "\\1 \\2", requirement) pair = pair.strip() require = OrderedDict() if " " in pair: name, version = pair.split(" ", 2) extras_m = re.search(r"\[([\w\d,-_]+)\]$", name) if extras_m: extras = [e.strip() for e in extras_m.group(1).split(",")] name, _ = name.split("[") require["name"] = name if version != "latest": require["version"] = version else: m = re.match(r"^([^><=!: ]+)((?:>=|<=|>|<|!=|~=|~|\^).*)$", requirement.strip()) if m: name, constraint = m.group(1), m.group(2) extras_m = re.search(r"\[([\w\d,-_]+)\]$", name) if extras_m: extras = [ e.strip() for e in extras_m.group(1).split(",") ] name, _ = name.split("[") require["name"] = name require["version"] = constraint else: extras_m = re.search(r"\[([\w\d,-_]+)\]$", pair) if extras_m: extras = [ e.strip() for e in extras_m.group(1).split(",") ] pair, _ = pair.split("[") require["name"] = pair if extras: require["extras"] = extras result.append(require) return result
def complete_package( self, package): # type: (DependencyPackage) -> DependencyPackage if package.is_root(): package = package.clone() requires = package.all_requires elif not package.is_root() and package.source_type not in { "directory", "file", "url", "git", }: package = DependencyPackage( package.dependency, self._pool.package( package.name, package.version.text, extras=package.dependency.extras, repository=package.dependency.source_name, ), ) requires = package.requires else: requires = package.requires if self._load_deferred: # Retrieving constraints for deferred dependencies for r in requires: if r.is_directory(): self.search_for_directory(r) elif r.is_file(): self.search_for_file(r) elif r.is_vcs(): self.search_for_vcs(r) elif r.is_url(): self.search_for_url(r) optional_dependencies = [] activated_extras = [] for extra in package.dependency.extras: if extra not in package.extras: continue activated_extras.append(extra) optional_dependencies += [d.name for d in package.extras[extra]] _dependencies = [] # If some extras/features were required, we need to # add a special dependency representing the base package # to the current package if package.dependency.extras: if activated_extras: package = package.with_features(activated_extras) _dependencies.append(package.without_features().to_dependency()) for dep in requires: if not self._python_constraint.allows_any(dep.python_constraint): continue if dep.name in self.UNSAFE_PACKAGES: continue if self._env and not dep.marker.validate(self._env.marker_env): continue if (dep.is_optional() and dep.name not in optional_dependencies and not package.is_root()): continue _dependencies.append(dep) overrides = self._overrides.get(package, {}) dependencies = [] overridden = [] for dep in _dependencies: if dep.name in overrides: if dep.name in overridden: continue dependencies.append(overrides[dep.name]) overridden.append(dep.name) continue dependencies.append(dep) # Searching for duplicate dependencies # # If the duplicate dependencies have the same constraint, # the requirements will be merged. # # For instance: # - enum34; python_version=="2.7" # - enum34; python_version=="3.3" # # will become: # - enum34; python_version=="2.7" or python_version=="3.3" # # If the duplicate dependencies have different constraints # we have to split the dependency graph. # # An example of this is: # - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6" # - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6" duplicates = OrderedDict() for dep in dependencies: if dep.name not in duplicates: duplicates[dep.name] = [] duplicates[dep.name].append(dep) dependencies = [] for dep_name, deps in duplicates.items(): if len(deps) == 1: dependencies.append(deps[0]) continue self.debug("<debug>Duplicate dependencies for {}</debug>".format( dep_name)) # Regrouping by constraint by_constraint = OrderedDict() for dep in deps: if dep.constraint not in by_constraint: by_constraint[dep.constraint] = [] by_constraint[dep.constraint].append(dep) # We merge by constraint for constraint, _deps in by_constraint.items(): new_markers = [] for dep in _deps: marker = dep.marker.without_extras() if marker.is_any(): # No marker or only extras continue new_markers.append(marker) if not new_markers: continue dep = _deps[0] dep.marker = dep.marker.union(MarkerUnion(*new_markers)) by_constraint[constraint] = [dep] continue if len(by_constraint) == 1: self.debug("<debug>Merging requirements for {}</debug>".format( str(deps[0]))) dependencies.append(list(by_constraint.values())[0][0]) continue # We leave dependencies as-is if they have the same # python/platform constraints. # That way the resolver will pickup the conflict # and display a proper error. _deps = [value[0] for value in by_constraint.values()] seen = set() for _dep in _deps: pep_508_dep = _dep.to_pep_508(False) if ";" not in pep_508_dep: _requirements = "" else: _requirements = pep_508_dep.split(";")[1].strip() if _requirements not in seen: seen.add(_requirements) if len(_deps) != len(seen): for _dep in _deps: dependencies.append(_dep) continue # At this point, we raise an exception that will # tell the solver to make new resolutions with specific overrides. # # For instance, if the foo (1.2.3) package has the following dependencies: # - bar (>=2.0) ; python_version >= "3.6" # - bar (<2.0) ; python_version < "3.6" # # then the solver will need to make two new resolutions # with the following overrides: # - {<Package foo (1.2.3): {"bar": <Dependency bar (>=2.0)>} # - {<Package foo (1.2.3): {"bar": <Dependency bar (<2.0)>} markers = [] for constraint, _deps in by_constraint.items(): markers.append(_deps[0].marker) _deps = [_dep[0] for _dep in by_constraint.values()] self.debug( "<warning>Different requirements found for {}.</warning>". format(", ".join( "<c1>{}</c1> <fg=default>(<c2>{}</c2>)</> with markers <b>{}</b>" .format( d.name, d.pretty_constraint, d.marker if not d.marker.is_any() else "*", ) for d in _deps[:-1]) + " and " + "<c1>{}</c1> <fg=default>(<c2>{}</c2>)</> with markers <b>{}</b>" .format( _deps[-1].name, _deps[-1].pretty_constraint, _deps[-1]. marker if not _deps[-1].marker.is_any() else "*", ))) # We need to check if one of the duplicate dependencies # has no markers. If there is one, we need to change its # environment markers to the inverse of the union of the # other dependencies markers. # For instance, if we have the following dependencies: # - ipython # - ipython (1.2.4) ; implementation_name == "pypy" # # the marker for `ipython` will become `implementation_name != "pypy"`. any_markers_dependencies = [d for d in _deps if d.marker.is_any()] other_markers_dependencies = [ d for d in _deps if not d.marker.is_any() ] if any_markers_dependencies: marker = other_markers_dependencies[0].marker for other_dep in other_markers_dependencies[1:]: marker = marker.union(other_dep.marker) for i, d in enumerate(_deps): if d.marker.is_any(): _deps[i].marker = marker.invert() overrides = [] for _dep in _deps: current_overrides = self._overrides.copy() package_overrides = current_overrides.get(package, {}).copy() package_overrides.update({_dep.name: _dep}) current_overrides.update({package: package_overrides}) overrides.append(current_overrides) raise OverrideNeeded(*overrides) # Modifying dependencies as needed clean_dependencies = [] for dep in dependencies: if not package.dependency.transitive_marker.without_extras( ).is_any(): marker_intersection = package.dependency.transitive_marker.without_extras( ).intersect(dep.marker.without_extras()) if marker_intersection.is_empty(): # The dependency is not needed, since the markers specified # for the current package selection are not compatible with # the markers for the current dependency, so we skip it continue dep.transitive_marker = marker_intersection if not package.dependency.python_constraint.is_any(): python_constraint_intersection = dep.python_constraint.intersect( package.dependency.python_constraint) if python_constraint_intersection.is_empty(): # This dependency is not needed under current python constraint. continue dep.transitive_python_versions = str( python_constraint_intersection) clean_dependencies.append(dep) package.requires = clean_dependencies return package
def _dump_package(self, package): # type: (Package) -> dict dependencies = {} for dependency in sorted(package.requires, key=lambda d: d.name): if dependency.pretty_name not in dependencies: dependencies[dependency.pretty_name] = [] constraint = inline_table() constraint["version"] = str(dependency.pretty_constraint) if dependency.extras: constraint["extras"] = sorted(dependency.extras) if dependency.is_optional(): constraint["optional"] = True if not dependency.marker.is_any(): constraint["markers"] = str(dependency.marker) dependencies[dependency.pretty_name].append(constraint) # All the constraints should have the same type, # but we want to simplify them if it's possible for dependency, constraints in tuple(dependencies.items()): if all(len(constraint) == 1 for constraint in constraints): dependencies[dependency] = [ constraint["version"] for constraint in constraints ] data = OrderedDict([ ("name", package.pretty_name), ("version", package.pretty_version), ("description", package.description or ""), ("category", package.category), ("optional", package.optional), ("python-versions", package.python_versions), ("files", sorted(package.files, key=lambda x: x["file"])), ]) if dependencies: data["dependencies"] = table() for k, constraints in dependencies.items(): if len(constraints) == 1: data["dependencies"][k] = constraints[0] else: data["dependencies"][k] = array().multiline(True) for constraint in constraints: data["dependencies"][k].append(constraint) if package.extras: extras = {} for name, deps in package.extras.items(): extras[name] = [ str(dep) if not dep.constraint.is_any() else dep.name for dep in deps ] data["extras"] = extras if package.source_url: url = package.source_url if package.source_type in ["file", "directory"]: # The lock file should only store paths relative to the root project url = Path( os.path.relpath( Path(url).as_posix(), self._lock.path.parent.as_posix())).as_posix() data["source"] = OrderedDict() if package.source_type: data["source"]["type"] = package.source_type data["source"]["url"] = url if package.source_reference: data["source"]["reference"] = package.source_reference if package.source_resolved_reference: data["source"][ "resolved_reference"] = package.source_resolved_reference if package.source_type == "directory": data["develop"] = package.develop return data