def test_dir_hash(): # type: () -> None with temporary_dir() as tmp_dir: safe_mkdir(os.path.join(tmp_dir, "a", "b")) with safe_open(os.path.join(tmp_dir, "c", "d", "e.py"), "w") as fp: fp.write("contents1") with safe_open(os.path.join(tmp_dir, "f.py"), "w") as fp: fp.write("contents2") hash1 = CacheHelper.dir_hash(tmp_dir) os.rename(os.path.join(tmp_dir, "c"), os.path.join(tmp_dir, "c-renamed")) assert hash1 != CacheHelper.dir_hash(tmp_dir) os.rename(os.path.join(tmp_dir, "c-renamed"), os.path.join(tmp_dir, "c")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(tmp_dir, "c", "d", "e.pyc")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(tmp_dir, "c", "d", "e.pyc.123456789")) assert hash1 == CacheHelper.dir_hash(tmp_dir) pycache_dir = os.path.join(tmp_dir, "__pycache__") safe_mkdir(pycache_dir) touch(os.path.join(pycache_dir, "f.pyc")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(pycache_dir, "f.pyc.123456789")) assert hash1 == CacheHelper.dir_hash(tmp_dir) touch(os.path.join(pycache_dir, "f.py")) assert hash1 == CacheHelper.dir_hash( tmp_dir ), "All content under __pycache__ directories should be ignored."
def assert_chroot_perms(copyfn): with temporary_dir() as src: one = os.path.join(src, "one") touch(one) two = os.path.join(src, "two") touch(two) chmod_plus_x(two) with temporary_dir() as dst: chroot = Chroot(dst) copyfn(chroot, one, "one") copyfn(chroot, two, "two") assert extract_perms(one) == extract_perms( os.path.join(chroot.path(), "one")) assert extract_perms(two) == extract_perms( os.path.join(chroot.path(), "two")) zip_path = os.path.join(src, "chroot.zip") chroot.zip(zip_path) with temporary_dir() as extract_dir: with contextlib.closing(PermPreservingZipFile(zip_path)) as zf: zf.extractall(extract_dir) assert extract_perms(one) == extract_perms( os.path.join(extract_dir, "one")) assert extract_perms(two) == extract_perms( os.path.join(extract_dir, "two"))
def pex(): # type: () -> Iterator[str] with temporary_dir() as tmpdir: pex_path = os.path.join(tmpdir, "fabric.pex") src_dir = os.path.join(tmpdir, "src") touch(os.path.join(src_dir, "user/__init__.py")) touch(os.path.join(src_dir, "user/package/__init__.py")) # N.B.: --unzip just speeds up runs 2+ of the pex file and is otherwise not relevant to # these tests. run_pex_command( args=[ "fabric=={}".format(FABRIC_VERSION), "-c", "fab", "--sources-directory", src_dir, "-o", pex_path, "--unzip", "--include-tools", ] ) yield os.path.realpath(pex_path)
def test_can_write_dir_writeable_perms(): with temporary_dir() as writeable: assert can_write_dir(writeable) path = os.path.join(writeable, 'does/not/exist/yet') assert can_write_dir(path) touch(path) assert not can_write_dir(path), 'Should not be able to write to a file.'
def test_can_write_dir_writeable_perms(): # type: () -> None with temporary_dir() as writeable: assert can_write_dir(writeable) path = os.path.join(writeable, "does/not/exist/yet") assert can_write_dir(path) touch(path) assert not can_write_dir(path), "Should not be able to write to a file."
def test_atomic_directory_empty_workdir_failure(): class SimulatedRuntimeError(RuntimeError): pass with temporary_dir() as sandbox: target_dir = os.path.join(sandbox, 'target_dir') with pytest.raises(SimulatedRuntimeError): with atomic_directory(target_dir) as work_dir: touch(os.path.join(work_dir, 'created')) raise SimulatedRuntimeError() assert not os.path.exists(work_dir), 'The work_dir should always be cleaned up.' assert not os.path.exists(target_dir), ( 'When the context raises the work_dir it was given should not be moved to the target_dir.' )
def bootstrap_python_installer(dest): # type: (str) -> None safe_rmtree(dest) for _ in range(3): try: subprocess.check_call(["git", "clone", "https://github.com/pyenv/pyenv.git", dest]) except subprocess.CalledProcessError as e: print("caught exception: %r" % e) continue else: break else: raise RuntimeError("Helper method could not clone pyenv from git after 3 tries") # Create an empty file indicating the fingerprint of the correct set of test interpreters. touch(os.path.join(dest, _INTERPRETER_SET_FINGERPRINT))
def create_packages(self): """Create missing packages joining the vendor root to the base of the vendored distribution. For example, given a root at ``/home/jake/dev/pantsbuild/pex`` and a vendored distribution at ``pex/vendor/_vendored/requests`` this method would create the following package files:: pex/vendor/_vendored/__init__.py pex/vendor/_vendored/requests/__init__.py These package files allow for standard python importers to find vendored code via re-directs from a `PEP-302 <https://www.python.org/dev/peps/pep-0302/>`_ importer like :class:`pex.third_party.VendorImporter`. """ for index, _ in enumerate(self._subpath_components): relpath = _PACKAGE_COMPONENTS + self._subpath_components[:index + 1] + ['__init__.py'] touch(os.path.join(self.ROOT, *relpath))
def test_atomic_directory_empty_workdir_failure(): # type: () -> None class SimulatedRuntimeError(RuntimeError): pass with temporary_dir() as sandbox: target_dir = os.path.join(sandbox, "target_dir") with pytest.raises(SimulatedRuntimeError): with atomic_directory(target_dir) as work_dir: touch(os.path.join(work_dir, "created")) raise SimulatedRuntimeError() assert not os.path.exists(work_dir), "The work_dir should always be cleaned up." # type: ignore[unreachable] assert not os.path.exists( target_dir ), "When the context raises the work_dir it was given should not be moved to the target_dir."
def bootstrap_python_installer(dest): safe_rmtree(dest) for _ in range(3): try: subprocess.check_call( ['git', 'clone', 'https://github.com/pyenv/pyenv.git', dest] ) except subprocess.CalledProcessError as e: print('caught exception: %r' % e) continue else: break else: raise RuntimeError("Helper method could not clone pyenv from git after 3 tries") # Create an empty file indicating the fingerprint of the correct set of test interpreters. touch(os.path.join(dest, _INTERPRETER_SET_FINGERPRINT))
def zip_fixture(): with temporary_dir() as target_dir: one = os.path.join(target_dir, "one") touch(one) two = os.path.join(target_dir, "two") touch(two) chmod_plus_x(two) assert extract_perms(one) != extract_perms(two) zip_file = os.path.join(target_dir, "test.zip") with contextlib.closing(PermPreservingZipFile(zip_file, "w")) as zf: zf.write(one, "one") zf.write(two, "two") yield zip_file, os.path.join(target_dir, "extract"), one, two
def zip_fixture(): with temporary_dir() as target_dir: one = os.path.join(target_dir, 'one') touch(one) two = os.path.join(target_dir, 'two') touch(two) chmod_plus_x(two) assert extract_perms(one) != extract_perms(two) zip_file = os.path.join(target_dir, 'test.zip') with contextlib.closing(PermPreservingZipFile(zip_file, 'w')) as zf: zf.write(one, 'one') zf.write(two, 'two') yield zip_file, os.path.join(target_dir, 'extract'), one, two
def test_atomic_directory_empty_workdir_finalize(): with temporary_dir() as sandbox: target_dir = os.path.join(sandbox, 'target_dir') assert not os.path.exists(target_dir) with atomic_directory(target_dir) as work_dir: assert work_dir is not None assert os.path.exists(work_dir) assert os.path.isdir(work_dir) assert [] == os.listdir(work_dir) touch(os.path.join(work_dir, 'created')) assert not os.path.exists(target_dir) assert not os.path.exists(work_dir), 'The work_dir should always be cleaned up.' assert os.path.exists(os.path.join(target_dir, 'created'))
def zip_fixture(): with temporary_dir() as target_dir: one = os.path.join(target_dir, 'one') touch(one) two = os.path.join(target_dir, 'two') touch(two) chmod_plus_x(two) assert extract_perms(one) != extract_perms(two) zip_file = os.path.join(target_dir, 'test.zip') with contextlib.closing(PermPreservingZipFile(zip_file, 'w')) as zf: zf.write(one, 'one') zf.write(two, 'two') yield zip_file, os.path.join(target_dir, 'extract'), one, two
def test_atomic_directory_empty_workdir_finalize(): # type: () -> None with temporary_dir() as sandbox: target_dir = os.path.join(sandbox, "target_dir") assert not os.path.exists(target_dir) with atomic_directory(target_dir) as work_dir: assert work_dir is not None assert os.path.exists(work_dir) assert os.path.isdir(work_dir) assert [] == os.listdir(work_dir) touch(os.path.join(work_dir, "created")) assert not os.path.exists(target_dir) assert not os.path.exists(work_dir), "The work_dir should always be cleaned up." assert os.path.exists(os.path.join(target_dir, "created"))
def test_atomic_directory_empty_workdir_finalize(): # type: () -> None with temporary_dir() as sandbox: target_dir = os.path.join(sandbox, "target_dir") assert not os.path.exists(target_dir) with atomic_directory(target_dir, exclusive=False) as atomic_dir: assert not atomic_dir.is_finalized assert target_dir == atomic_dir.target_dir assert os.path.exists(atomic_dir.work_dir) assert os.path.isdir(atomic_dir.work_dir) assert [] == os.listdir(atomic_dir.work_dir) touch(os.path.join(atomic_dir.work_dir, "created")) assert not os.path.exists(target_dir) assert not os.path.exists(atomic_dir.work_dir), "The work_dir should always be cleaned up." assert os.path.exists(os.path.join(target_dir, "created"))
def assert_chroot_perms(copyfn): with temporary_dir() as src: one = os.path.join(src, 'one') touch(one) two = os.path.join(src, 'two') touch(two) chmod_plus_x(two) with temporary_dir() as dst: chroot = Chroot(dst) copyfn(chroot, one, 'one') copyfn(chroot, two, 'two') assert extract_perms(one) == extract_perms(os.path.join(chroot.path(), 'one')) assert extract_perms(two) == extract_perms(os.path.join(chroot.path(), 'two')) zip_path = os.path.join(src, 'chroot.zip') chroot.zip(zip_path) with temporary_dir() as extract_dir: with contextlib.closing(PermPreservingZipFile(zip_path)) as zf: zf.extractall(extract_dir) assert extract_perms(one) == extract_perms(os.path.join(extract_dir, 'one')) assert extract_perms(two) == extract_perms(os.path.join(extract_dir, 'two'))
def test_is_exe(temporary_working_dir): # type: (str) -> None touch("all_exe") chmod_plus_x("all_exe") assert is_exe("all_exe") touch("other_exe") os.chmod("other_exe", 0o665) assert not is_exe("other_exe") touch("not_exe") assert not is_exe("not_exe") os.mkdir("exe_dir") chmod_plus_x("exe_dir") assert not is_exe("exe_dir")
def invalid_interpreter(self): # type: () -> Iterator[str] with temporary_dir() as bin_dir: invalid_interpreter = os.path.join(bin_dir, "python") touch(invalid_interpreter) yield invalid_interpreter
def test_parse_requirements_stress(chroot): # type: (str) -> None with safe_open(os.path.join(chroot, "other-requirements.txt"), "w") as fp: fp.write( # This includes both example snippets taken directly from # https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format # not already covered by # https://pip.pypa.io/en/stable/reference/pip_install/#example-requirements-file. dedent( """\ SomeProject SomeProject == 1.3 SomeProject >=1.2,<2.0 SomeProject[foo, bar] SomeProject~=1.4.2 SomeProject ==5.4 ; python_version < '2.7' SomeProject; sys_platform == 'win32' SomeProject @ https://example.com/somewhere/over/here SomeProject @ file:somewhere/over/here FooProject >= 1.2 --global-option="--no-user-cfg" \\ --install-option="--prefix='/usr/local'" \\ --install-option="--no-compile" git+https://git.example.com/MyProject.git@da39a3ee5e6b4b0d3255bfef95601890afd80709#egg=MyProject git+ssh://git.example.com/MyProject#egg=MyProject git+file:///home/user/projects/MyProject#egg=MyProject&subdirectory=pkg_dir # N.B. This is not from the Pip docs unlike the examples above. We just want to # chain in one more set of stress tests. -r extra/stress.txt """ ) ) touch("somewhere/over/here/pyproject.toml") with safe_open(os.path.join(chroot, "extra", "stress.txt"), "w") as fp: fp.write( # These are tests of edge cases not included anywhere in the examples found in # https://pip.pypa.io/en/stable/reference/pip_install/#requirements-file-format. dedent( """\ -c file:subdir/more-requirements.txt a/local/project[foo]; python_full_version == "2.7.8" ./another/local/project;python_version == "2.7.*" ./another/local/project ./ # Local projects with basenames that are invalid Python project names (trailing _): tmp/tmpW8tdb_ tmp/tmpW8tdb_[foo] tmp/tmpW8tdb_[foo];python_version == "3.9" hg+http://hg.example.com/MyProject@da39a3ee5e6b#egg=AnotherProject[extra,more];python_version=="3.9.*"&subdirectory=foo/bar ftp://a/${PROJECT_NAME}-1.0.tar.gz http://a/${PROJECT_NAME}-1.0.zip https://a/numpy-1.9.2-cp34-none-win32.whl Django@ git+https://github.com/django/django.git Django@git+https://github.com/django/django.git@stable/2.1.x Django@ git+https://github.com/django/django.git@fd209f62f1d83233cc634443cfac5ee4328d98b8 Django @ file:projects/django-2.3.zip; python_version >= "3.10" """ ) ) touch("extra/pyproject.toml") touch("extra/a/local/project/pyproject.toml") touch("extra/another/local/project/setup.py") touch("extra/tmp/tmpW8tdb_/setup.py") touch("extra/projects/django-2.3.zip") with safe_open(os.path.join(chroot, "subdir", "more-requirements.txt"), "w") as fp: fp.write( # This checks requirements (`ReqInfo`s) are wrapped up into `Constraints`. dedent( """\ AnotherProject """ ) ) req_iter = parse_requirements( Source.from_text( # N.B.: Taken verbatim from: # https://pip.pypa.io/en/stable/reference/pip_install/#example-requirements-file dedent( """\ # ####### example-requirements.txt ####### # ###### Requirements without Version Specifiers ###### nose nose-cov beautifulsoup4 # ###### Requirements with Version Specifiers ###### # See https://www.python.org/dev/peps/pep-0440/#version-specifiers docopt == 0.6.1 # Version Matching. Must be version 0.6.1 keyring >= 4.1.1 # Minimum version 4.1.1 coverage != 3.5 # Version Exclusion. Anything except version 3.5 Mopidy-Dirble ~= 1.1 # Compatible release. Same as >= 1.1, == 1.* # ###### Refer to other requirements files ###### -r other-requirements.txt # # ###### A particular file ###### ./downloads/numpy-1.9.2-cp34-none-win32.whl http://wxpython.org/Phoenix/snapshot-builds/wxPython_Phoenix-3.0.3.dev1820+49a8884-cp34-none-win_amd64.whl # ###### Additional Requirements without Version Specifiers ###### # Same as 1st section, just here to show that you can put things in any order. rejected green # """ ), ) ) # Ensure local non-distribution files matching distribution names are not erroneously probed # as distributions to find name and version metadata. touch("nose") touch("downloads/numpy-1.9.2-cp34-none-win32.whl") with environment_as(PROJECT_NAME="Project"): results = normalize_results(req_iter) assert [ req(project_name="nose"), req(project_name="nose-cov"), req(project_name="beautifulsoup4"), req(project_name="docopt", specifier="==0.6.1"), req(project_name="keyring", specifier=">=4.1.1"), req(project_name="coverage", specifier="!=3.5"), req(project_name="Mopidy-Dirble", specifier="~=1.1"), req(project_name="SomeProject"), req(project_name="SomeProject", specifier="==1.3"), req(project_name="SomeProject", specifier=">=1.2,<2.0"), req(project_name="SomeProject", extras=["foo", "bar"]), req(project_name="SomeProject", specifier="~=1.4.2"), req(project_name="SomeProject", specifier="==5.4", marker="python_version < '2.7'"), req(project_name="SomeProject", marker="sys_platform == 'win32'"), url_req(project_name="SomeProject", url="https://example.com/somewhere/over/here"), local_req(path=os.path.realpath("somewhere/over/here")), req(project_name="FooProject", specifier=">=1.2"), url_req( project_name="MyProject", url="git+https://git.example.com/MyProject.git@da39a3ee5e6b4b0d3255bfef95601890afd80709", ), url_req(project_name="MyProject", url="git+ssh://git.example.com/MyProject"), url_req(project_name="MyProject", url="git+file:/home/user/projects/MyProject"), Constraint(DUMMY_LINE, Requirement.parse("AnotherProject")), local_req( path=os.path.realpath("extra/a/local/project"), extras=["foo"], marker="python_full_version == '2.7.8'", ), local_req( path=os.path.realpath("extra/another/local/project"), marker="python_version == '2.7.*'", ), local_req(path=os.path.realpath("extra/another/local/project")), local_req(path=os.path.realpath("extra")), local_req(path=os.path.realpath("extra/tmp/tmpW8tdb_")), local_req(path=os.path.realpath("extra/tmp/tmpW8tdb_"), extras=["foo"]), local_req( path=os.path.realpath("extra/tmp/tmpW8tdb_"), extras=["foo"], marker="python_version == '3.9'", ), url_req( project_name="AnotherProject", url="hg+http://hg.example.com/MyProject@da39a3ee5e6b", extras=["more", "extra"], marker="python_version == '3.9.*'", ), url_req(project_name="Project", url="ftp://a/Project-1.0.tar.gz", specifier="==1.0"), url_req(project_name="Project", url="http://a/Project-1.0.zip", specifier="==1.0"), url_req( project_name="numpy", url="https://a/numpy-1.9.2-cp34-none-win32.whl", specifier="==1.9.2", ), url_req(project_name="Django", url="git+https://github.com/django/django.git"), url_req(project_name="Django", url="git+https://github.com/django/django.git@stable/2.1.x"), url_req( project_name="Django", url="git+https://github.com/django/django.git@fd209f62f1d83233cc634443cfac5ee4328d98b8", ), url_req( project_name="Django", url=os.path.realpath("extra/projects/django-2.3.zip"), specifier="==2.3", marker="python_version>='3.10'", ), url_req( project_name="numpy", url=os.path.realpath("./downloads/numpy-1.9.2-cp34-none-win32.whl"), specifier="==1.9.2", ), url_req( project_name="wxPython_Phoenix", url="http://wxpython.org/Phoenix/snapshot-builds/wxPython_Phoenix-3.0.3.dev1820+49a8884-cp34-none-win_amd64.whl", specifier="==3.0.3.dev1820+49a8884", ), req(project_name="rejected"), req(project_name="green"), ] == results