def handle(self): from poetry.layouts import layout from poetry.utils._compat import Path from poetry.vcs.git import GitConfig if self.option("src"): layout_ = layout("src") else: layout_ = layout("standard") path = Path.cwd() / Path(self.argument("path")) name = self.option("name") if not name: name = path.name if path.exists(): if list(path.glob("*")): # Directory is not empty. Aborting. raise RuntimeError( "Destination <fg=yellow>{}</> " "exists and is not empty".format(path) ) readme_format = "rst" config = GitConfig() author = None if config.get("user.name"): author = config["user.name"] author_email = config.get("user.email") if author_email: author += " <{}>".format(author_email) layout_ = layout_(name, "0.1.0", author=author, readme_format=readme_format) layout_.create(path) self.line( "Created package <info>{}</> in <fg=blue>{}</>".format( name, path.relative_to(Path.cwd()) ) )
def __init__(self, name, directory=".", packages=None, includes=None): self._name = module_name(name) self._in_src = False self._is_package = False self._path = Path(directory) self._includes = [] packages = packages or [] includes = includes or [] if not packages: # It must exist either as a .py file or a directory, but not both pkg_dir = Path(directory, self._name) py_file = Path(directory, self._name + ".py") if pkg_dir.is_dir() and py_file.is_file(): raise ValueError("Both {} and {} exist".format(pkg_dir, py_file)) elif pkg_dir.is_dir(): packages = [{"include": str(pkg_dir.relative_to(self._path))}] elif py_file.is_file(): packages = [{"include": str(py_file.relative_to(self._path))}] else: # Searching for a src module src = Path(directory, "src") src_pkg_dir = src / self._name src_py_file = src / (self._name + ".py") if src_pkg_dir.is_dir() and src_py_file.is_file(): raise ValueError("Both {} and {} exist".format(pkg_dir, py_file)) elif src_pkg_dir.is_dir(): packages = [ { "include": str(src_pkg_dir.relative_to(src)), "from": str(src.relative_to(self._path)), } ] elif src_py_file.is_file(): packages = [ { "include": str(src_py_file.relative_to(src)), "from": str(src.relative_to(self._path)), } ] else: raise ValueError("No file/folder found for package {}".format(name)) for package in packages: self._includes.append( PackageInclude(self._path, package["include"], package.get("from")) ) for include in includes: self._includes.append(Include(self._path, include))
def create_venv(cls, io, name=None, cwd=None): # type: (IO, bool, Path) -> Env if cls._env is not None: return cls._env env = cls.get(cwd=cwd) if env.is_venv(): # Already inside a virtualenv. return env config = Config.create("config.toml") create_venv = config.setting("settings.virtualenvs.create") root_venv = config.setting("settings.virtualenvs.in-project") venv_path = config.setting("settings.virtualenvs.path") if root_venv: if not cwd: raise RuntimeError( "Unable to determine the project's directory") venv_path = cwd / ".venv" elif venv_path is None: venv_path = Path(CACHE_DIR) / "virtualenvs" else: venv_path = Path(venv_path) if not name: if not cwd: cwd = Path.cwd() name = cwd.name name = "{}-py{}".format( name, ".".join([str(v) for v in sys.version_info[:2]])) if root_venv: venv = venv_path else: venv = venv_path / name if not venv.exists(): if create_venv is False: io.writeln("<fg=black;bg=yellow>" "Skipping virtualenv creation, " "as specified in config file." "</>") return SystemEnv(Path(sys.prefix)) io.writeln("Creating virtualenv <info>{}</> in {}".format( name, str(venv_path))) cls.build_venv(str(venv)) else: if io.is_very_verbose(): io.writeln( "Virtualenv <info>{}</> already exists.".format(name)) # venv detection: # stdlib venv may symlink sys.executable, so we can't use realpath. # but others can symlink *to* the venv Python, # so we can't just use sys.executable. # So we just check every item in the symlink tree (generally <= 3) p = os.path.normcase(sys.executable) paths = [p] while os.path.islink(p): p = os.path.normcase( os.path.join(os.path.dirname(p), os.readlink(p))) paths.append(p) p_venv = os.path.normcase(str(venv)) if any(p.startswith(p_venv) for p in paths): # Running properly in the virtualenv, don't need to do anything return SystemEnv(Path(sys.prefix), cls.get_base_prefix()) return VirtualEnv(venv)
def handle(self): from poetry.layouts import layout from poetry.utils._compat import Path from poetry.utils.env import Env from poetry.vcs.git import GitConfig if (Path.cwd() / "pyproject.toml").exists(): self.error("A pyproject.toml file already exists.") return 1 vcs_config = GitConfig() self.line([ "", "This command will guide you through creating your <info>pyproject.toml</> config.", "", ]) name = self.option("name") if not name: name = Path.cwd().name.lower() question = self.create_question( "Package name [<comment>{}</comment>]: ".format(name), default=name) name = self.ask(question) version = "0.1.0" question = self.create_question( "Version [<comment>{}</comment>]: ".format(version), default=version) version = self.ask(question) description = self.option("description") or "" question = self.create_question( "Description [<comment>{}</comment>]: ".format(description), default=description, ) description = self.ask(question) author = self.option("author") if not author and vcs_config and vcs_config.get("user.name"): author = vcs_config["user.name"] author_email = vcs_config.get("user.email") if author_email: author += " <{}>".format(author_email) question = self.create_question( "Author [<comment>{}</comment>, n to skip]: ".format(author), default=author) question.validator = lambda v: self._validate_author(v, author) author = self.ask(question) if not author: authors = [] else: authors = [author] license = self.option("license") or "" question = self.create_question( "License [<comment>{}</comment>]: ".format(license), default=license) question.validator = self._validate_license license = self.ask(question) current_env = Env.get(Path.cwd()) default_python = "^{}".format(".".join( str(v) for v in current_env.version_info[:2])) question = self.create_question( "Compatible Python versions [<comment>{}</comment>]: ".format( default_python), default=default_python, ) python = self.ask(question) self.line("") requirements = {} question = ("Would you like to define your dependencies" " (require) interactively?") if self.confirm(question, True): requirements = self._format_requirements( self._determine_requirements(self.option("dependency"))) dev_requirements = {} question = ("Would you like to define your dev dependencies" " (require-dev) interactively") if self.confirm(question, True): dev_requirements = self._format_requirements( self._determine_requirements(self.option("dev-dependency"))) layout_ = layout("standard")( name, version, description=description, author=authors[0] if authors else None, license=license, python=python, dependencies=requirements, dev_dependencies=dev_requirements, ) content = layout_.generate_poetry_content() if self.input.is_interactive(): self.line("<info>Generated file</info>") self.line(["", content, ""]) if not self.confirm("Do you confirm generation?", True): self.line("<error>Command aborted</error>") return 1 with (Path.cwd() / "pyproject.toml").open("w", encoding="utf-8") as f: f.write(content)
def _parse_requirements( self, requirements): # type: (List[str]) -> List[Dict[str, str]] from poetry.puzzle.provider import Provider result = [] try: cwd = self.poetry.file.parent except RuntimeError: cwd = Path.cwd() for requirement in requirements: requirement = requirement.strip() extras = [] extras_m = re.search(r"\[([\w\d,-_]+)\]$", requirement) if extras_m: extras = [e.strip() for e in extras_m.group(1).split(",")] requirement, _ = requirement.split("[") url_parsed = urlparse.urlparse(requirement) if url_parsed.scheme and url_parsed.netloc: # Url if url_parsed.scheme in ["git+https", "git+ssh"]: url = requirement.lstrip("git+") rev = None if "@" in url: url, rev = url.split("@") pair = OrderedDict([("name", url.split("/")[-1].rstrip(".git")), ("git", url)]) if rev: pair["rev"] = rev if extras: pair["extras"] = extras package = Provider.get_package_from_vcs( "git", url, reference=pair.get("rev")) pair["name"] = package.name result.append(pair) continue elif url_parsed.scheme in ["http", "https"]: package = Provider.get_package_from_url(requirement) pair = OrderedDict([("name", package.name), ("url", package.source_url)]) if extras: pair["extras"] = extras result.append(pair) continue elif (os.path.sep in requirement or "/" in requirement) and cwd.joinpath(requirement).exists(): path = cwd.joinpath(requirement) if path.is_file(): package = Provider.get_package_from_file(path.resolve()) else: package = Provider.get_package_from_directory(path) result.append( OrderedDict([ ("name", package.name), ("path", path.relative_to(cwd).as_posix()), ] + ([("extras", extras)] if extras else []))) continue pair = re.sub("^([^@=: ]+)(?:@|==|(?<![<>~!])=|:| )(.*)$", "\\1 \\2", requirement) pair = pair.strip() require = OrderedDict() if " " in pair: name, version = pair.split(" ", 2) require["name"] = name if version != "latest": require["version"] = version else: m = re.match("^([^><=!: ]+)((?:>=|<=|>|<|!=|~=|~|\^).*)$", requirement.strip()) if m: name, constraint = m.group(1), m.group(2) extras_m = re.search(r"\[([\w\d,-_]+)\]$", name) if extras_m: extras = [ e.strip() for e in extras_m.group(1).split(",") ] name, _ = name.split("[") require["name"] = name require["version"] = constraint else: extras_m = re.search(r"\[([\w\d,-_]+)\]$", pair) if extras_m: extras = [ e.strip() for e in extras_m.group(1).split(",") ] pair, _ = pair.split("[") require["name"] = pair if extras: require["extras"] = extras result.append(require) return result
def test_prepare_metadata_for_build_wheel(): entry_points = """\ [console_scripts] extra-script=my_package.extra:main[time] my-2nd-script=my_package:main2 my-script=my_package:main """ wheel_data = """\ Wheel-Version: 1.0 Generator: poetry {} Root-Is-Purelib: true Tag: py3-none-any """.format(__version__) metadata = """\ Metadata-Version: 2.1 Name: my-package Version: 1.2.3 Summary: Some description. Home-page: https://poetry.eustace.io/ License: MIT Keywords: packaging,dependency,poetry Author: Sébastien Eustace Author-email: [email protected] Requires-Python: >=3.6,<4.0 Classifier: License :: OSI Approved :: MIT License Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Topic :: Software Development :: Build Tools Classifier: Topic :: Software Development :: Libraries :: Python Modules Provides-Extra: time Requires-Dist: cachy[msgpack] (>=0.2.0,<0.3.0) Requires-Dist: cleo (>=0.6,<0.7) Requires-Dist: pendulum (>=1.4,<2.0); extra == "time" Project-URL: Documentation, https://poetry.eustace.io/docs Project-URL: Repository, https://github.com/sdispater/poetry Description-Content-Type: text/x-rst My Package ========== """ with temporary_directory() as tmp_dir, cwd( os.path.join(fixtures, "complete")): dirname = api.prepare_metadata_for_build_wheel(tmp_dir) assert "my_package-1.2.3.dist-info" == dirname dist_info = Path(tmp_dir, dirname) assert (dist_info / "entry_points.txt").exists() assert (dist_info / "WHEEL").exists() assert (dist_info / "METADATA").exists() with (dist_info / "entry_points.txt").open(encoding="utf-8") as f: assert entry_points == decode(f.read()) with (dist_info / "WHEEL").open(encoding="utf-8") as f: assert wheel_data == decode(f.read()) with (dist_info / "METADATA").open(encoding="utf-8") as f: assert metadata == decode(f.read())
import pytest from poetry.masonry.utils.package_include import PackageInclude from poetry.utils._compat import Path fixtures_dir = Path(__file__).parent / "fixtures" with_includes = fixtures_dir / "with_includes" def test_package_include_with_multiple_dirs(): pkg_include = PackageInclude(base=fixtures_dir, include="with_includes") assert pkg_include.elements == [ with_includes / "__init__.py", with_includes / "bar", with_includes / "bar/baz.py", with_includes / "extra_package", with_includes / "extra_package/some_dir", with_includes / "extra_package/some_dir/foo.py", with_includes / "extra_package/some_dir/quux.py", with_includes / "not_a_python_pkg", with_includes / "not_a_python_pkg/baz.txt", ] def test_package_include_with_simple_dir(): pkg_include = PackageInclude(base=with_includes, include="bar") assert pkg_include.elements == [with_includes / "bar/baz.py"] def test_package_include_with_nested_dir(): pkg_include = PackageInclude(base=with_includes,
def update(self, release): from poetry.utils._compat import Path from poetry.utils.helpers import temporary_directory version = release.version self.line('Updating to <info>{}</info>'.format(version)) prefix = sys.prefix base_prefix = getattr(sys, 'base_prefix', None) real_prefix = getattr(sys, 'real_prefix', None) prefix_poetry = Path(prefix) / 'bin' / 'poetry' if prefix_poetry.exists(): pip = (prefix_poetry.parent / 'pip').resolve() elif (base_prefix and base_prefix != prefix and (Path(base_prefix) / 'bin' / 'poetry').exists()): pip = Path(base_prefix) / 'bin' / 'pip' elif real_prefix: pip = Path(real_prefix) / 'bin' / 'pip' else: raise RuntimeError('Unable to determine poetry\'s path') with temporary_directory(prefix='poetry-update-') as temp_dir: temp_dir = Path(temp_dir) dist = temp_dir / 'dist' self.line(' - Getting dependencies') self.process(str(pip), 'install', '-U', 'poetry=={}'.format(release.version), '--target', str(dist)) self.line(' - Vendorizing dependencies') poetry_dir = dist / 'poetry' vendor_dir = poetry_dir / '_vendor' # Everything, except poetry itself, should # be put in the _vendor directory for file in dist.glob('*'): if file.name.startswith('poetry'): continue dest = vendor_dir / file.name if file.is_dir(): shutil.copytree(str(file), str(dest)) shutil.rmtree(str(file)) else: shutil.copy(str(file), str(dest)) os.unlink(str(file)) wheel_data = dist / 'poetry-{}.dist-info'.format(version) / 'WHEEL' with wheel_data.open() as f: wheel_data = Parser().parsestr(f.read()) tag = wheel_data['Tag'] # Repack everything and install self.line(' - Updating <info>poetry</info>') shutil.make_archive(str(temp_dir / 'poetry-{}-{}'.format(version, tag)), format='zip', root_dir=str(dist)) os.rename( str(temp_dir / 'poetry-{}-{}.zip'.format(version, tag)), str(temp_dir / 'poetry-{}-{}.whl'.format(version, tag)), ) self.process( str(pip), 'install', '--upgrade', '--no-deps', str(temp_dir / 'poetry-{}-{}.whl'.format(version, tag))) self.line('') self.line('<info>poetry</> (<comment>{}</>) ' 'successfully installed!'.format(version))
def fixture(name): file = TomlFile( Path(__file__).parent / "fixtures" / "{}.test".format(name)) return file.read()
from typing import Set import pytest from poetry.inspection.info import PackageInfo from poetry.inspection.info import PackageInfoError from poetry.utils._compat import PY35 from poetry.utils._compat import CalledProcessError from poetry.utils._compat import Path from poetry.utils._compat import decode from poetry.utils.env import EnvCommandError from poetry.utils.env import VirtualEnv FIXTURE_DIR_BASE = Path(__file__).parent.parent / "fixtures" FIXTURE_DIR_INSPECTIONS = FIXTURE_DIR_BASE / "inspection" @pytest.fixture(autouse=True) def pep517_metadata_mock(): pass @pytest.fixture def demo_sdist(): # type: () -> Path return FIXTURE_DIR_BASE / "distributions" / "demo-0.1.0.tar.gz" @pytest.fixture def demo_wheel(): # type: () -> Path return FIXTURE_DIR_BASE / "distributions" / "demo-0.1.0-py2.py3-none-any.whl"
def complete_package( self, package): # type: (DependencyPackage) -> DependencyPackage if package.is_root(): package = package.clone() if not package.is_root() and package.source_type not in { "directory", "file", "git", }: package = DependencyPackage( package.dependency, self._pool.package(package.name, package.version.text, extras=package.requires_extras), ) dependencies = [ r for r in package.requires if self._package.python_constraint.allows_any(r.python_constraint) ] # Searching for duplicate dependencies # # If the duplicate dependencies have the same constraint, # the requirements will be merged. # # For instance: # - enum34; python_version=="2.7" # - enum34; python_version=="3.3" # # will become: # - enum34; python_version=="2.7" or python_version=="3.3" # # If the duplicate dependencies have different constraints # we have to split the dependency graph. # # An example of this is: # - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6" # - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6" duplicates = {} for dep in dependencies: if dep.name not in duplicates: duplicates[dep.name] = [] duplicates[dep.name].append(dep) dependencies = [] for dep_name, deps in duplicates.items(): if len(deps) == 1: dependencies.append(deps[0]) continue self.debug("<debug>Duplicate dependencies for {}</debug>".format( dep_name)) # Regrouping by constraint by_constraint = {} for dep in deps: if dep.constraint not in by_constraint: by_constraint[dep.constraint] = [] by_constraint[dep.constraint].append(dep) # We merge by constraint for constraint, _deps in by_constraint.items(): new_markers = [] for dep in _deps: pep_508_dep = dep.to_pep_508(False) if ";" not in pep_508_dep: continue markers = pep_508_dep.split(";")[1].strip() if not markers: # One of the constraint has no markers # so this means we don't actually need to merge new_markers = [] break new_markers.append("({})".format(markers)) if not new_markers: dependencies += _deps continue dep = _deps[0] new_requirement = "{}; {}".format( dep.to_pep_508(False).split(";")[0], " or ".join(new_markers)) new_dep = dependency_from_pep_508(new_requirement) if dep.is_optional() and not dep.is_activated(): new_dep.deactivate() else: new_dep.activate() by_constraint[constraint] = [new_dep] continue if len(by_constraint) == 1: self.debug("<debug>Merging requirements for {}</debug>".format( str(deps[0]))) dependencies.append(list(by_constraint.values())[0][0]) continue # We leave dependencies as-is if they have the same # python/platform constraints. # That way the resolver will pickup the conflict # and display a proper error. _deps = [value[0] for value in by_constraint.values()] seen = set() for _dep in _deps: pep_508_dep = _dep.to_pep_508(False) if ";" not in pep_508_dep: _requirements = "" else: _requirements = pep_508_dep.split(";")[1].strip() if _requirements not in seen: seen.add(_requirements) if len(_deps) != len(seen): for _dep in _deps: dependencies.append(_dep) continue # At this point, we raise an exception that will # tell the solver to enter compatibility mode # which means it will resolve for subsets # Python constraints # # For instance, if our root package requires Python ~2.7 || ^3.6 # And we have one dependency that requires Python <3.6 # and the other Python >=3.6 than the solver will solve # dependencies for Python >=2.7,<2.8 || >=3.4,<3.6 # and Python >=3.6,<4.0 python_constraints = [] for constraint, _deps in by_constraint.items(): python_constraints.append(_deps[0].python_versions) _deps = [str(_dep[0]) for _dep in by_constraint.values()] self.debug( "<warning>Different requirements found for {}.</warning>". format(", ".join(_deps[:-1]) + " and " + _deps[-1])) raise CompatibilityError(*python_constraints) # Modifying dependencies as needed for dep in dependencies: if not package.dependency.python_constraint.is_any(): dep.transitive_python_versions = str( dep.python_constraint.intersect( package.dependency.python_constraint)) if (package.dependency.is_directory() or package.dependency.is_file()) and (dep.is_directory() or dep.is_file()): if dep.path.as_posix().startswith(package.source_url): relative = (Path(package.source_url) / dep.path).relative_to(package.source_url) else: relative = Path(package.source_url) / dep.path # TODO: Improve the way we set the correct relative path for dependencies dep._path = relative package.requires = dependencies return package
def search_for_directory( self, dependency): # type: (DirectoryDependency) -> List[Package] if dependency.supports_poetry(): from poetry.poetry import Poetry poetry = Poetry.create(dependency.full_path) pkg = poetry.package package = Package(pkg.name, pkg.version) for dep in pkg.requires: if not dep.is_optional(): package.requires.append(dep) for extra, deps in pkg.extras.items(): if extra not in package.extras: package.extras[extra] = [] for dep in deps: package.extras[extra].append(dep) package.python_versions = pkg.python_versions else: # Execute egg_info current_dir = os.getcwd() os.chdir(str(dependency.full_path)) try: cwd = dependency.full_path venv = Env.get(cwd) venv.run("python", "setup.py", "egg_info") except EnvCommandError: result = SetupReader.read_from_directory(dependency.full_path) if not result["name"]: # The name could not be determined # We use the dependency name result["name"] = dependency.name if not result["version"]: # The version could not be determined # so we raise an error since it is mandatory raise RuntimeError( "Unable to retrieve the package version for {}".format( dependency.path)) package_name = result["name"] package_version = result["version"] python_requires = result["python_requires"] if python_requires is None: python_requires = "*" package_summary = "" requires = "" for dep in result["install_requires"]: requires += dep + "\n" if result["extras_require"]: requires += "\n" for extra_name, deps in result["extras_require"].items(): requires += "[{}]\n".format(extra_name) for dep in deps: requires += dep + "\n" requires += "\n" reqs = parse_requires(requires) else: os.chdir(current_dir) # Sometimes pathlib will fail on recursive # symbolic links, so we need to workaround it # and use the glob module instead. # Note that this does not happen with pathlib2 # so it's safe to use it for Python < 3.4. if PY35: egg_info = next( Path(p) for p in glob.glob( os.path.join(str(dependency.full_path), "**", "*.egg-info"), recursive=True, )) else: egg_info = next(dependency.full_path.glob("**/*.egg-info")) meta = pkginfo.UnpackedSDist(str(egg_info)) package_name = meta.name package_version = meta.version package_summary = meta.summary python_requires = meta.requires_python if meta.requires_dist: reqs = list(meta.requires_dist) else: reqs = [] requires = egg_info / "requires.txt" if requires.exists(): with requires.open() as f: reqs = parse_requires(f.read()) finally: os.chdir(current_dir) package = Package(package_name, package_version) if dependency.name != package.name: # For now, the dependency's name must match the actual package's name raise RuntimeError( "The dependency name for {} does not match the actual package's name: {}" .format(dependency.name, package.name)) package.description = package_summary for req in reqs: dep = dependency_from_pep_508(req) if dep.in_extras: for extra in dep.in_extras: if extra not in package.extras: package.extras[extra] = [] package.extras[extra].append(dep) if not dep.is_optional(): package.requires.append(dep) if python_requires: package.python_versions = python_requires package.source_type = "directory" package.source_url = dependency.path.as_posix() for extra in dependency.extras: if extra in package.extras: for dep in package.extras[extra]: dep.activate() package.requires += package.extras[extra] return [package]
from typing import Optional import pytest from pytest_mock.plugin import MockFixture from poetry.core.packages import Package from poetry.repositories.installed_repository import InstalledRepository from poetry.utils._compat import PY36 from poetry.utils._compat import Path from poetry.utils._compat import metadata from poetry.utils._compat import zipp from poetry.utils.env import MockEnv as BaseMockEnv FIXTURES_DIR = Path(__file__).parent / "fixtures" ENV_DIR = (FIXTURES_DIR / "installed").resolve() SITE_PURELIB = ENV_DIR / "lib" / "python3.7" / "site-packages" SITE_PLATLIB = ENV_DIR / "lib64" / "python3.7" / "site-packages" SRC = ENV_DIR / "src" VENDOR_DIR = ENV_DIR / "vendor" / "py3.7" INSTALLED_RESULTS = [ metadata.PathDistribution(SITE_PURELIB / "cleo-0.7.6.dist-info"), metadata.PathDistribution(SRC / "pendulum" / "pendulum.egg-info"), metadata.PathDistribution( zipp.Path(str(SITE_PURELIB / "foo-0.1.0-py3.8.egg"), "EGG-INFO")), metadata.PathDistribution(VENDOR_DIR / "attrs-19.3.0.dist-info"), metadata.PathDistribution(SITE_PURELIB / "standard-1.2.3.dist-info"), metadata.PathDistribution(SITE_PURELIB / "editable-2.3.4.dist-info"), metadata.PathDistribution(SITE_PURELIB / "editable-with-import-2.3.4.dist-info"), metadata.PathDistribution(SITE_PLATLIB / "lib64-2.3.4.dist-info"),
def load(cls, env): # type: (Env) -> InstalledRepository """ Load installed packages. """ repo = cls() seen = set() for entry in reversed(env.sys_path): for distribution in sorted( metadata.distributions(path=[entry]), key=lambda d: str(d._path), ): name = distribution.metadata["name"] path = Path(str(distribution._path)) version = distribution.metadata["version"] package = Package(name, version, version) package.description = distribution.metadata.get("summary", "") if package.name in seen: continue try: path.relative_to(_VENDORS) except ValueError: pass else: continue seen.add(package.name) repo.add_package(package) is_standard_package = env.is_path_relative_to_lib(path) if is_standard_package: if path.name.endswith(".dist-info"): paths = cls.get_package_paths(env=env, name=package.pretty_name) if paths: is_editable_package = False for src in paths: if cls.is_vcs_package(src, env): cls.set_package_vcs_properties( package, env) break if not (is_editable_package or env.is_path_relative_to_lib(src)): is_editable_package = True else: # TODO: handle multiple source directories? if is_editable_package: package._source_type = "directory" package._source_url = paths.pop().as_posix( ) continue if cls.is_vcs_package(path, env): cls.set_package_vcs_properties(package, env) else: # If not, it's a path dependency package._source_type = "directory" package._source_url = str(path.parent) return repo
import itertools from typing import Set from typing import Union from poetry.core.packages import Package from poetry.core.utils.helpers import module_name from poetry.utils._compat import Path from poetry.utils._compat import metadata from poetry.utils.env import Env from .repository import Repository _VENDORS = Path(__file__).parent.parent.joinpath("_vendor") try: FileNotFoundError except NameError: FileNotFoundError = OSError class InstalledRepository(Repository): @classmethod def get_package_paths(cls, env, name): # type: (Env, str) -> Set[Path] """ Process a .pth file within the site-packages directories, and return any valid paths. We skip executable .pth files as there is no reliable means to do this without side-effects to current run-time. Mo check is made that the item refers to a directory rather than a file, however, in order to maintain backwards compatibility, we allow non-existing paths to be discovered. The latter behaviour is different to how Python's site-specific hook configuration works.
def handle(self): from poetry.layouts import layout from poetry.utils._compat import Path from poetry.vcs.git import GitConfig if (Path.cwd() / "pyproject.toml").exists(): self.error("A pyproject.toml file already exists.") return 1 vcs_config = GitConfig() self.line( [ "", "This command will guide you through creating your <info>pyproject.toml</> config.", "", ] ) name = self.option("name") if not name: name = Path.cwd().name.lower() question = self.create_question( "Package name [<comment>{}</comment>]: ".format(name), default=name ) name = self.ask(question) version = "0.1.0" question = self.create_question( "Version [<comment>{}</comment>]: ".format(version), default=version ) version = self.ask(question) description = self.option("description") or "" question = self.create_question( "Description [<comment>{}</comment>]: ".format(description), default=description, ) description = self.ask(question) author = self.option("author") if not author and vcs_config and vcs_config.get("user.name"): author = vcs_config["user.name"] author_email = vcs_config.get("user.email") if author_email: author += " <{}>".format(author_email) question = self.create_question( "Author [<comment>{}</comment>, n to skip]: ".format(author), default=author ) question.validator = lambda v: self._validate_author(v, author) author = self.ask(question) if not author: authors = [] else: authors = [author] license = self.option("license") or "" question = self.create_question( "License [<comment>{}</comment>]: ".format(license), default=license ) question.validator = self._validate_license license = self.ask(question) question = self.create_question("Compatible Python versions [*]: ", default="*") python = self.ask(question) self.line("") requirements = {} question = "Would you like to define your dependencies" " (require) interactively?" if self.confirm(question, True): requirements = self._format_requirements( self._determine_requirements(self.option("dependency")) ) dev_requirements = {} question = "Would you like to define your dev dependencies" " (require-dev) interactively" if self.confirm(question, True): dev_requirements = self._format_requirements( self._determine_requirements(self.option("dev-dependency")) ) layout_ = layout("standard")( name, version, description=description, author=authors[0] if authors else None, license=license, python=python, dependencies=requirements, dev_dependencies=dev_requirements, ) content = layout_.generate_poetry_content() if self.input.is_interactive(): self.line("<info>Generated file</info>") self.line(["", content, ""]) if not self.confirm("Do you confirm generation?", True): self.line("<error>Command aborted</error>") return 1 with (Path.cwd() / "pyproject.toml").open("w") as f: f.write(content)
def create(cls, io, name=None, cwd=None): # type: (...) -> Venv if "VIRTUAL_ENV" not in os.environ: # Not in a virtualenv # Checking if we need to create one # First we check if there is a .venv # at the root of the project. if cwd and (cwd / ".venv").exists(): venv = cwd / ".venv" else: config = Config.create("config.toml") create_venv = config.setting("settings.virtualenvs.create") root_venv = config.setting("settings.virtualenvs.in-project") venv_path = config.setting("settings.virtualenvs.path") if root_venv: if not cwd: raise RuntimeError( "Unable to determine the project's directory" ) venv_path = cwd / ".venv" elif venv_path is None: venv_path = Path(CACHE_DIR) / "virtualenvs" else: venv_path = Path(venv_path) if not name: name = Path.cwd().name name = "{}-py{}".format( name, ".".join([str(v) for v in sys.version_info[:2]]) ) if root_venv: venv = venv_path else: venv = venv_path / name if not venv.exists(): if create_venv is False: io.writeln( "<fg=black;bg=yellow>" "Skipping virtualenv creation, " "as specified in config file." "</>" ) return cls() io.writeln( "Creating virtualenv <info>{}</> in {}".format( name, str(venv_path) ) ) cls.build(str(venv)) else: if io.is_very_verbose(): io.writeln( "Virtualenv <info>{}</> already exists.".format(name) ) os.environ["VIRTUAL_ENV"] = str(venv) # venv detection: # stdlib venv may symlink sys.executable, so we can't use realpath. # but others can symlink *to* the venv Python, # so we can't just use sys.executable. # So we just check every item in the symlink tree (generally <= 3) p = os.path.normcase(sys.executable) paths = [p] while os.path.islink(p): p = os.path.normcase(os.path.join(os.path.dirname(p), os.readlink(p))) paths.append(p) p_venv = os.path.normcase(os.environ["VIRTUAL_ENV"]) if any(p.startswith(p_venv) for p in paths): # Running properly in the virtualenv, don't need to do anything return cls() venv = os.environ["VIRTUAL_ENV"] return cls(venv)
def package(): p = ProjectPackage("root", "1.0") p.root_dir = Path.cwd() return p
def publish(self, repository_name, username, password): if repository_name: self._io.writeln( 'Publishing <info>{}</info> (<comment>{}</comment>) ' 'to <fg=cyan>{}</>'.format( self._package.pretty_name, self._package.pretty_version, repository_name ) ) else: self._io.writeln( 'Publishing <info>{}</info> (<comment>{}</comment>) ' 'to <fg=cyan>PyPI</>'.format( self._package.pretty_name, self._package.pretty_version ) ) if not repository_name: url = 'https://upload.pypi.org/legacy/' repository_name = 'pypi' else: # Retrieving config information config_file = Path(CONFIG_DIR) / 'config.toml' if not config_file.exists(): raise RuntimeError( 'Config file does not exist. ' 'Unable to get repository information' ) with config_file.open() as f: config = toml.loads(f.read()) if ( 'repositories' not in config or repository_name not in config['repositories'] ): raise RuntimeError( 'Repository {} is not defined'.format(repository_name) ) url = config['repositories'][repository_name]['url'] if not (username and password): auth_file = Path(CONFIG_DIR) / 'auth.toml' if auth_file.exists(): with auth_file.open() as f: auth_config = toml.loads(f.read()) if 'http-basic' in auth_config and repository_name in auth_config['http-basic']: config = auth_config['http-basic'][repository_name] username = config.get('username') password = config.get('password') # Requesting missing credentials if not username: username = self._io.ask('Username:'******'Password:') # TODO: handle certificates self._uploader.auth(username, password) return self._uploader.upload(url)
def test_exporter_exports_requirements_txt_with_legacy_packages_and_credentials( tmp_dir, poetry, config ): poetry.pool.add_repository( LegacyRepository( "custom", "https://example.com/simple", auth=Auth("https://example.com/simple", "foo", "bar"), ) ) poetry.locker.mock_lock_data( { "package": [ { "name": "foo", "version": "1.2.3", "category": "main", "optional": False, "python-versions": "*", }, { "name": "bar", "version": "4.5.6", "category": "dev", "optional": False, "python-versions": "*", "source": { "type": "legacy", "url": "https://example.com/simple", "reference": "", }, }, ], "metadata": { "python-versions": "*", "content-hash": "123456789", "hashes": {"foo": ["12345"], "bar": ["67890"]}, }, } ) exporter = Exporter(poetry) exporter.export( "requirements.txt", Path(tmp_dir), "requirements.txt", dev=True, with_credentials=True, ) with (Path(tmp_dir) / "requirements.txt").open(encoding="utf-8") as f: content = f.read() expected = """\ --extra-index-url https://foo:[email protected]/simple bar==4.5.6 \\ --hash=sha256:67890 foo==1.2.3 \\ --hash=sha256:12345 """ assert expected == content
import pytest from poetry.packages import FileDependency from poetry.utils._compat import Path DIST_PATH = Path(__file__).parent.parent / "fixtures" / "distributions" def test_file_dependency_wrong_path(): with pytest.raises(ValueError): FileDependency("demo", DIST_PATH / "demo-0.2.0.tar.gz") def test_file_dependency_dir(): with pytest.raises(ValueError): FileDependency("demo", DIST_PATH)
def test_locker_dumps_dependency_information_correctly(locker, root): root_dir = Path(__file__).parent.parent.joinpath("fixtures") package_a = get_package("A", "1.0.0") package_a.add_dependency( Factory.create_dependency( "B", {"path": "project_with_extras", "develop": True}, root_dir=root_dir ) ) package_a.add_dependency( Factory.create_dependency( "C", {"path": "directory/project_with_transitive_directory_dependencies"}, root_dir=root_dir, ) ) package_a.add_dependency( Factory.create_dependency( "D", {"path": "distributions/demo-0.1.0.tar.gz"}, root_dir=root_dir ) ) package_a.add_dependency( Factory.create_dependency( "E", {"url": "https://python-poetry.org/poetry-1.2.0.tar.gz"} ) ) package_a.add_dependency( Factory.create_dependency( "F", {"git": "https://github.com/python-poetry/poetry.git", "branch": "foo"} ) ) packages = [package_a] locker.set_lock_data(root, packages) with locker.lock.open(encoding="utf-8") as f: content = f.read() expected = """[[package]] name = "A" version = "1.0.0" description = "" category = "main" optional = false python-versions = "*" [package.dependencies] B = {path = "project_with_extras", develop = true} C = {path = "directory/project_with_transitive_directory_dependencies"} D = {path = "distributions/demo-0.1.0.tar.gz"} E = {url = "https://python-poetry.org/poetry-1.2.0.tar.gz"} F = {git = "https://github.com/python-poetry/poetry.git", branch = "foo"} [metadata] lock-version = "1.1" python-versions = "*" content-hash = "115cf985d932e9bf5f540555bbdd75decbb62cac81e399375fc19f6277f8c1d8" [metadata.files] A = [] """ assert expected == content
def source_dir(tmp_path): # type: (Path) -> Path yield Path(tmp_path.as_posix())
def add_dependency( self, name, # type: str constraint=None, # type: Union[str, dict, None] category="main", # type: str ): # type: (...) -> Dependency if constraint is None: constraint = "*" if isinstance(constraint, dict): optional = constraint.get("optional", False) python_versions = constraint.get("python") platform = constraint.get("platform") allows_prereleases = constraint.get("allows-prereleases", False) if "git" in constraint: # VCS dependency dependency = VCSDependency( name, "git", constraint["git"], branch=constraint.get("branch", None), tag=constraint.get("tag", None), rev=constraint.get("rev", None), optional=optional, ) elif "file" in constraint: file_path = Path(constraint["file"]) dependency = FileDependency(file_path, category=category, base=self.root_dir) elif "path" in constraint: path = Path(constraint["path"]) if self.root_dir: is_file = (self.root_dir / path).is_file() else: is_file = path.is_file() if is_file: dependency = FileDependency(path, category=category, optional=optional, base=self.root_dir) else: dependency = DirectoryDependency( path, category=category, optional=optional, base=self.root_dir, develop=constraint.get("develop", True), ) else: version = constraint["version"] dependency = Dependency( name, version, optional=optional, category=category, allows_prereleases=allows_prereleases, ) marker = AnyMarker() if python_versions: dependency.python_versions = python_versions marker = marker.intersect( parse_marker( create_nested_marker("python_version", dependency.python_constraint))) if platform: marker = marker.intersect( parse_marker( create_nested_marker( "sys_platform", parse_generic_constraint(platform)))) if not marker.is_any(): dependency.marker = marker if "extras" in constraint: for extra in constraint["extras"]: dependency.extras.append(extra) else: dependency = Dependency(name, constraint, category=category) if category == "dev": self.dev_requires.append(dependency) else: self.requires.append(dependency) return dependency
def handle(self): from poetry.layouts import layout from poetry.utils._compat import Path from poetry.utils.env import SystemEnv from poetry.vcs.git import GitConfig if (Path.cwd() / "pyproject.toml").exists(): self.line("<error>A pyproject.toml file already exists.</error>") return 1 vcs_config = GitConfig() self.line("") self.line( "This command will guide you through creating your <info>pyproject.toml</> config." ) self.line("") name = self.option("name") if not name: name = Path.cwd().name.lower() question = self.create_question( "Package name [<comment>{}</comment>]: ".format(name), default=name) name = self.ask(question) version = "0.1.0" question = self.create_question( "Version [<comment>{}</comment>]: ".format(version), default=version) version = self.ask(question) description = self.option("description") or "" question = self.create_question( "Description [<comment>{}</comment>]: ".format(description), default=description, ) description = self.ask(question) author = self.option("author") if not author and vcs_config and vcs_config.get("user.name"): author = vcs_config["user.name"] author_email = vcs_config.get("user.email") if author_email: author += " <{}>".format(author_email) question = self.create_question( "Author [<comment>{}</comment>, n to skip]: ".format(author), default=author) question.set_validator(lambda v: self._validate_author(v, author)) author = self.ask(question) if not author: authors = [] else: authors = [author] license = self.option("license") or "" question = self.create_question( "License [<comment>{}</comment>]: ".format(license), default=license) question.set_validator(self._validate_license) license = self.ask(question) current_env = SystemEnv(Path(sys.executable)) default_python = "^{}".format(".".join( str(v) for v in current_env.version_info[:2])) question = self.create_question( "Compatible Python versions [<comment>{}</comment>]: ".format( default_python), default=default_python, ) python = self.ask(question) self.line("") requirements = {} question = "Would you like to define your main dependencies interactively?" help_message = ( "You can specify a package in the following forms:\n" " - A single name (<b>requests</b>)\n" " - A name and a constraint (<b>requests ^2.23.0</b>)\n" " - A git url (<b>https://github.com/sdispater/poetry.git</b>)\n" " - A git url with a revision (<b>https://github.com/sdispater/poetry.git@develop</b>)\n" " - A file path (<b>../my-package/my-package.whl</b>)\n" " - A directory (<b>../my-package/</b>)\n" " - An url (<b>https://example.com/packages/my-package-0.1.0.tar.gz</b>)\n" ) help_displayed = False if self.confirm(question, True): self.line(help_message) help_displayed = True requirements = self._format_requirements( self._determine_requirements(self.option("dependency"))) self.line("") dev_requirements = {} question = ("Would you like to define your dev dependencies" " (require-dev) interactively") if self.confirm(question, True): if not help_displayed: self.line(help_message) dev_requirements = self._format_requirements( self._determine_requirements(self.option("dev-dependency"))) self.line("") layout_ = layout("standard")( name, version, description=description, author=authors[0] if authors else None, license=license, python=python, dependencies=requirements, dev_dependencies=dev_requirements, ) content = layout_.generate_poetry_content() if self.io.is_interactive(): self.line("<info>Generated file</info>") self.line("") self.line(content) self.line("") if not self.confirm("Do you confirm generation?", True): self.line("<error>Command aborted</error>") return 1 with (Path.cwd() / "pyproject.toml").open("w", encoding="utf-8") as f: f.write(content)
def get_package_from_directory( cls, directory, name=None): # type: (Path, Optional[str]) -> Package supports_poetry = False pyproject = directory.joinpath("pyproject.toml") if pyproject.exists(): pyproject = TomlFile(pyproject) pyproject_content = pyproject.read() supports_poetry = ("tool" in pyproject_content and "poetry" in pyproject_content["tool"]) if supports_poetry: poetry = Factory().create_poetry(directory) pkg = poetry.package package = Package(pkg.name, pkg.version) for dep in pkg.requires: if not dep.is_optional(): package.requires.append(dep) for extra, deps in pkg.extras.items(): if extra not in package.extras: package.extras[extra] = [] for dep in deps: package.extras[extra].append(dep) package.python_versions = pkg.python_versions else: # Execute egg_info current_dir = os.getcwd() os.chdir(str(directory)) try: cls._execute_setup() except EnvCommandError: result = SetupReader.read_from_directory(directory) if not result["name"]: # The name could not be determined # We use the dependency name result["name"] = name if not result["version"]: # The version could not be determined # so we raise an error since it is mandatory raise RuntimeError( "Unable to retrieve the package version for {}".format( directory)) package_name = result["name"] package_version = result["version"] python_requires = result["python_requires"] if python_requires is None: python_requires = "*" package_summary = "" requires = "" for dep in result["install_requires"]: requires += dep + "\n" if result["extras_require"]: requires += "\n" for extra_name, deps in result["extras_require"].items(): requires += "[{}]\n".format(extra_name) for dep in deps: requires += dep + "\n" requires += "\n" reqs = parse_requires(requires) else: os.chdir(current_dir) # Sometimes pathlib will fail on recursive # symbolic links, so we need to workaround it # and use the glob module instead. # Note that this does not happen with pathlib2 # so it's safe to use it for Python < 3.4. if PY35: egg_info = next( Path(p) for p in glob.glob( os.path.join(str(directory), "**", "*.egg-info"), recursive=True, )) else: egg_info = next(directory.glob("**/*.egg-info")) meta = pkginfo.UnpackedSDist(str(egg_info)) package_name = meta.name package_version = meta.version package_summary = meta.summary python_requires = meta.requires_python if meta.requires_dist: reqs = list(meta.requires_dist) else: reqs = [] requires = egg_info / "requires.txt" if requires.exists(): with requires.open(encoding="utf-8") as f: reqs = parse_requires(f.read()) finally: os.chdir(current_dir) package = Package(package_name, package_version) package.description = package_summary for req in reqs: dep = dependency_from_pep_508(req) if dep.in_extras: for extra in dep.in_extras: if extra not in package.extras: package.extras[extra] = [] package.extras[extra].append(dep) if not dep.is_optional(): package.requires.append(dep) if python_requires: package.python_versions = python_requires if name and name != package.name: # For now, the dependency's name must match the actual package's name raise RuntimeError( "The dependency name for {} does not match the actual package's name: {}" .format(name, package.name)) package.source_type = "directory" package.source_url = directory.as_posix() return package
from typing import Optional from typing import Union import tomlkit from cleo.testers import CommandTester from poetry.core.semver import Version from poetry.utils._compat import Path from poetry.utils.env import EnvManager from poetry.utils.env import MockEnv from poetry.utils.toml_file import TomlFile CWD = Path(__file__).parent.parent / "fixtures" / "simple_project" def build_venv(path, executable=None): # type: (Union[Path,str], Optional[str]) -> () os.mkdir(str(path)) def check_output_wrapper(version=Version.parse("3.7.1")): def check_output(cmd, *args, **kwargs): if "sys.version_info[:3]" in cmd: return version.text elif "sys.version_info[:2]" in cmd: return "{}.{}".format(version.major, version.minor) else: return str(Path("/prefix"))
def complete_package( self, package): # type: (DependencyPackage) -> DependencyPackage if package.is_root(): package = package.clone() requires = package.all_requires elif not package.is_root() and package.source_type not in { "directory", "file", "url", "git", }: package = DependencyPackage( package.dependency, self._pool.package( package.name, package.version.text, extras=package.requires_extras, repository=package.dependency.source_name, ), ) requires = package.requires else: requires = package.requires # Retrieving constraints for deferred dependencies for r in requires: if r.is_directory(): self.search_for_directory(r) elif r.is_file(): self.search_for_file(r) elif r.is_vcs(): self.search_for_vcs(r) elif r.is_url(): self.search_for_url(r) dependencies = [ r for r in requires if self._package.python_constraint.allows_any(r.python_constraint) ] # Searching for duplicate dependencies # # If the duplicate dependencies have the same constraint, # the requirements will be merged. # # For instance: # - enum34; python_version=="2.7" # - enum34; python_version=="3.3" # # will become: # - enum34; python_version=="2.7" or python_version=="3.3" # # If the duplicate dependencies have different constraints # we have to split the dependency graph. # # An example of this is: # - pypiwin32 (220); sys_platform == "win32" and python_version >= "3.6" # - pypiwin32 (219); sys_platform == "win32" and python_version < "3.6" duplicates = OrderedDict() for dep in dependencies: if dep.name not in duplicates: duplicates[dep.name] = [] duplicates[dep.name].append(dep) dependencies = [] for dep_name, deps in duplicates.items(): if len(deps) == 1: dependencies.append(deps[0]) continue self.debug("<debug>Duplicate dependencies for {}</debug>".format( dep_name)) # Regrouping by constraint by_constraint = OrderedDict() for dep in deps: if dep.constraint not in by_constraint: by_constraint[dep.constraint] = [] by_constraint[dep.constraint].append(dep) # We merge by constraint for constraint, _deps in by_constraint.items(): new_markers = [] for dep in _deps: marker = dep.marker.without_extras() if marker.is_any(): # No marker or only extras continue new_markers.append(marker) if not new_markers: continue dep = _deps[0] dep.marker = dep.marker.union(MarkerUnion(*new_markers)) by_constraint[constraint] = [dep] continue if len(by_constraint) == 1: self.debug("<debug>Merging requirements for {}</debug>".format( str(deps[0]))) dependencies.append(list(by_constraint.values())[0][0]) continue # We leave dependencies as-is if they have the same # python/platform constraints. # That way the resolver will pickup the conflict # and display a proper error. _deps = [value[0] for value in by_constraint.values()] seen = set() for _dep in _deps: pep_508_dep = _dep.to_pep_508(False) if ";" not in pep_508_dep: _requirements = "" else: _requirements = pep_508_dep.split(";")[1].strip() if _requirements not in seen: seen.add(_requirements) if len(_deps) != len(seen): for _dep in _deps: dependencies.append(_dep) continue # At this point, we raise an exception that will # tell the solver to enter compatibility mode # which means it will resolve for subsets # Python constraints # # For instance, if our root package requires Python ~2.7 || ^3.6 # And we have one dependency that requires Python <3.6 # and the other Python >=3.6 than the solver will solve # dependencies for Python >=2.7,<2.8 || >=3.4,<3.6 # and Python >=3.6,<4.0 python_constraints = [] for constraint, _deps in by_constraint.items(): python_constraints.append(_deps[0].python_versions) _deps = [str(_dep[0]) for _dep in by_constraint.values()] self.debug( "<warning>Different requirements found for {}.</warning>". format(", ".join(_deps[:-1]) + " and " + _deps[-1])) raise CompatibilityError(*python_constraints) # Modifying dependencies as needed clean_dependencies = [] for dep in dependencies: if not package.dependency.transitive_marker.without_extras( ).is_any(): marker_intersection = package.dependency.transitive_marker.without_extras( ).intersect(dep.marker.without_extras()) if marker_intersection.is_empty(): # The dependency is not needed, since the markers specified # for the current package selection are not compatible with # the markers for the current dependency, so we skip it continue dep.transitive_marker = marker_intersection if not package.dependency.python_constraint.is_any(): python_constraint_intersection = dep.python_constraint.intersect( package.dependency.python_constraint) if python_constraint_intersection.is_empty(): # This dependency is not needed under current python constraint. continue dep.transitive_python_versions = str( python_constraint_intersection) if (package.dependency.is_directory() or package.dependency.is_file()) and (dep.is_directory() or dep.is_file()): relative_path = Path( os.path.relpath(dep.full_path.as_posix(), package.root_dir.as_posix())) # TODO: Improve the way we set the correct relative path for dependencies dep._path = relative_path clean_dependencies.append(dep) package.requires = clean_dependencies return package
def test_get_client_cert(config): client_cert = "path/to/client.pem" config.merge({"certificates": {"foo": {"client-cert": client_cert}}}) assert get_client_cert(config, "foo") == Path(client_cert)
def _execute_setup(cls): with temporary_directory() as tmp_dir: EnvManager.build_venv(tmp_dir) venv = VirtualEnv(Path(tmp_dir), Path(tmp_dir)) venv.run("python", "setup.py", "egg_info")
def handle(self): from cachy import CacheManager from poetry.locations import CACHE_DIR from poetry.utils._compat import Path cache = self.argument("cache") parts = cache.split(":") root = parts[0] base_cache = Path(CACHE_DIR) / "cache" / "repositories" cache_dir = base_cache / root try: cache_dir.relative_to(base_cache) except ValueError: raise ValueError("{} is not a valid repository cache".format(root)) cache = CacheManager({ "default": parts[0], "serializer": "json", "stores": { parts[0]: { "driver": "file", "path": str(cache_dir) } }, }) if len(parts) == 1: if not self.option("all"): raise RuntimeError( "Add the --all option if you want to clear all " "{} caches".format(parts[0])) if not os.path.exists(cache_dir): self.line("No cache entries for {}".format(parts[0])) return 0 # Calculate number of entries entries_count = 0 for path, dirs, files in os.walk(str(cache_dir)): entries_count += len(files) delete = self.confirm( "<question>Delete {} entries?</>".format(entries_count)) if not delete: return 0 cache.flush() elif len(parts) == 2: raise RuntimeError( "Only specifying the package name is not yet supported. " "Add a specific version to clear") elif len(parts) == 3: package = parts[1] version = parts[2] if not cache.has("{}:{}".format(package, version)): self.line("No cache entries for {}:{}".format( package, version)) return 0 delete = self.confirm("Delete cache entry {}:{}".format( package, version)) if not delete: return 0 cache.forget("{}:{}".format(package, version)) else: raise ValueError("Invalid cache key")
def project(name): return Path(__file__).parent / "fixtures" / name
def add_dependency( self, name, # type: str constraint=None, # type: Union[str, dict, None] category="main", # type: str ): # type: (...) -> Dependency if constraint is None: constraint = "*" if isinstance(constraint, dict): optional = constraint.get("optional", False) python_versions = constraint.get("python") platform = constraint.get("platform") allows_prereleases = constraint.get("allows-prereleases", False) if "git" in constraint: # VCS dependency dependency = VCSDependency( name, "git", constraint["git"], branch=constraint.get("branch", None), tag=constraint.get("tag", None), rev=constraint.get("rev", None), optional=optional, ) elif "file" in constraint: file_path = Path(constraint["file"]) dependency = FileDependency( file_path, category=category, base=self.root_dir ) elif "path" in constraint: path = Path(constraint["path"]) if self.root_dir: is_file = (self.root_dir / path).is_file() else: is_file = path.is_file() if is_file: dependency = FileDependency( path, category=category, optional=optional, base=self.root_dir ) else: dependency = DirectoryDependency( path, category=category, optional=optional, base=self.root_dir, develop=constraint.get("develop", False), ) else: version = constraint["version"] dependency = Dependency( name, version, optional=optional, category=category, allows_prereleases=allows_prereleases, ) if python_versions: dependency.python_versions = python_versions if platform: dependency.platform = platform if "extras" in constraint: for extra in constraint["extras"]: dependency.extras.append(extra) else: dependency = Dependency(name, constraint, category=category) if category == "dev": self.dev_requires.append(dependency) else: self.requires.append(dependency) return dependency
def env(): return MockEnv(path=Path("/prefix"), base=Path("/base/prefix"), is_venv=True)
def search_for_vcs(self, dependency): # type: (VCSDependency) -> List[Package] """ Search for the specifications that match the given VCS dependency. Basically, we clone the repository in a temporary directory and get the information we need by checking out the specified reference. """ if dependency.vcs != "git": raise ValueError("Unsupported VCS dependency {}".format(dependency.vcs)) tmp_dir = Path(mkdtemp(prefix="pypoetry-git-{}".format(dependency.name))) try: git = Git() git.clone(dependency.source, tmp_dir) git.checkout(dependency.reference, tmp_dir) revision = git.rev_parse(dependency.reference, tmp_dir).strip() if dependency.tag or dependency.rev: revision = dependency.reference pyproject = TomlFile(tmp_dir / "pyproject.toml") pyproject_content = None has_poetry = False if pyproject.exists(): pyproject_content = pyproject.read() has_poetry = ( "tool" in pyproject_content and "poetry" in pyproject_content["tool"] ) if pyproject_content and has_poetry: # If a pyproject.toml file exists # We use it to get the information we need info = pyproject_content["tool"]["poetry"] name = info["name"] version = info["version"] package = Package(name, version, version) package.source_type = dependency.vcs package.source_url = dependency.source package.source_reference = dependency.reference for req_name, req_constraint in info["dependencies"].items(): if req_name == "python": package.python_versions = req_constraint continue package.add_dependency(req_name, req_constraint) else: # We need to use setup.py here # to figure the information we need # We need to place ourselves in the proper # folder for it to work venv = Venv.create(self._io) current_dir = os.getcwd() os.chdir(tmp_dir.as_posix()) try: venv.run("python", "setup.py", "egg_info") # Sometimes pathlib will fail on recursive # symbolic links, so we need to workaround it # and use the glob module instead. # Note that this does not happen with pathlib2 # so it's safe to use it for Python < 3.4. if PY35: egg_info = next( Path(p) for p in glob.glob( os.path.join(str(tmp_dir), "**", "*.egg-info"), recursive=True, ) ) else: egg_info = next(tmp_dir.glob("**/*.egg-info")) meta = pkginfo.UnpackedSDist(str(egg_info)) if meta.requires_dist: reqs = list(meta.requires_dist) else: reqs = [] requires = egg_info / "requires.txt" if requires.exists(): with requires.open() as f: reqs = parse_requires(f.read()) package = Package(meta.name, meta.version) for req in reqs: dep = dependency_from_pep_508(req) if dep.in_extras: for extra in dep.in_extras: if extra not in package.extras: package.extras[extra] = [] package.extras[extra].append(dep) package.requires.append(dep) except Exception: raise finally: os.chdir(current_dir) package.source_type = "git" package.source_url = dependency.source package.source_reference = revision except Exception: raise finally: shutil.rmtree(tmp_dir.as_posix()) if dependency.name != package.name: # For now, the dependency's name must match the actual package's name raise RuntimeError( "The dependency name for {} does not match the actual package's name: {}".format( dependency.name, package.name ) ) if dependency.extras: for extra in dependency.extras: if extra in package.extras: for dep in package.extras[extra]: dep.activate() return [package]
def home(self): from poetry.utils._compat import Path return Path(os.environ.get("POETRY_HOME", "~/.poetry")).expanduser()