def test_unpack_file_url_excludes_expected_dirs(tmpdir, exclude_dir): src_dir = tmpdir / 'src' dst_dir = tmpdir / 'dst' src_included_file = src_dir.joinpath('file.txt') src_excluded_dir = src_dir.joinpath(exclude_dir) src_excluded_file = src_dir.joinpath(exclude_dir, 'file.txt') src_included_dir = src_dir.joinpath('subdir', exclude_dir) # set up source directory src_excluded_dir.mkdir(parents=True) src_included_dir.mkdir(parents=True) src_included_file.touch() src_excluded_file.touch() dst_included_file = dst_dir.joinpath('file.txt') dst_excluded_dir = dst_dir.joinpath(exclude_dir) dst_excluded_file = dst_dir.joinpath(exclude_dir, 'file.txt') dst_included_dir = dst_dir.joinpath('subdir', exclude_dir) src_link = Link(path_to_url(src_dir)) unpack_file_url( src_link, dst_dir, download_dir=None ) assert not os.path.isdir(dst_excluded_dir) assert not os.path.isfile(dst_excluded_file) assert os.path.isfile(dst_included_file) assert os.path.isdir(dst_included_dir)
def test_unpack_file_url_thats_a_dir(self, tmpdir, data): self.prep(tmpdir, data) dist_path = data.packages.joinpath("FSPkg") dist_url = Link(path_to_url(dist_path)) unpack_file_url(dist_url, self.build_dir, download_dir=self.download_dir) assert os.path.isdir(os.path.join(self.build_dir, 'fspkg'))
def test_unpack_file_url_download_bad_hash(self, tmpdir, data, monkeypatch): """ Test when existing download has different hash from the file url fragment """ self.prep(tmpdir, data) # add in previous download (copy simple-2.0 as simple-1.0 so it's wrong # hash) dest_file = os.path.join(self.download_dir, self.dist_file) copy(self.dist_path2, dest_file) with open(self.dist_path, 'rb') as f: dist_path_md5 = hashlib.md5(f.read()).hexdigest() with open(dest_file, 'rb') as f: dist_path2_md5 = hashlib.md5(f.read()).hexdigest() assert dist_path_md5 != dist_path2_md5 url = '{}#md5={}'.format(self.dist_url.url, dist_path_md5) dist_url = Link(url) unpack_file_url(dist_url, self.build_dir, download_dir=self.download_dir, hashes=Hashes({'md5': [dist_path_md5]})) # confirm hash is for simple1-1.0 # the previous bad download has been removed with open(dest_file, 'rb') as f: assert hashlib.md5(f.read()).hexdigest() == dist_path_md5
def test_unpack_file_url_bad_hash(self, tmpdir, data, monkeypatch): """ Test when the file url hash fragment is wrong """ self.prep(tmpdir, data) url = '{}#md5=bogus'.format(self.dist_url.url) dist_url = Link(url) with pytest.raises(HashMismatch): unpack_file_url(dist_url, self.build_dir, hashes=Hashes({'md5': ['bogus']}))
def test_unpack_file_url_download_already_exists(self, tmpdir, data, monkeypatch): self.prep(tmpdir, data) # add in previous download (copy simple-2.0 as simple-1.0) # so we can tell it didn't get overwritten dest_file = os.path.join(self.download_dir, self.dist_file) copy(self.dist_path2, dest_file) with open(self.dist_path2, 'rb') as f: dist_path2_md5 = hashlib.md5(f.read()).hexdigest() unpack_file_url(self.dist_url, self.build_dir, download_dir=self.download_dir) # our hash should be the same, i.e. not overwritten by simple-1.0 hash with open(dest_file, 'rb') as f: assert dist_path2_md5 == hashlib.md5(f.read()).hexdigest()
def unpack_url( link, # type: Link location, # type: str download_dir=None, # type: Optional[str] only_download=False, # type: bool session=None, # type: Optional[PipSession] hashes=None, # type: Optional[Hashes] progress_bar="on" # type: str ): # type: (...) -> None """Unpack link. If link is a VCS link: if only_download, export into download_dir and ignore location else unpack into location for other types of link: - unpack into location - if download_dir, copy the file into download_dir - if only_download, mark location for deletion :param hashes: A Hashes object, one of whose embedded hashes must match, or HashMismatch will be raised. If the Hashes is empty, no matches are required, and unhashable types of requirements (like VCS ones, which would ordinarily raise HashUnsupported) are allowed. """ # non-editable vcs urls if is_vcs_url(link): unpack_vcs_link(link, location) # file urls elif is_file_url(link): unpack_file_url(link, location, download_dir, hashes=hashes) # http urls else: if session is None: session = PipSession() unpack_http_url( link, location, download_dir, session, hashes=hashes, progress_bar=progress_bar ) if only_download: write_delete_marker_file(location)
def build( self, requirements, # type: Iterable[InstallRequirement] autobuilding=False # type: bool ): # type: (...) -> List[InstallRequirement] """Build wheels. :param unpack: If True, replace the sdist we built from with the newly built wheel, in preparation for installation. :return: True if all the wheels built correctly. """ buildset = [] format_control = self.finder.format_control # Whether a cache directory is available for autobuilding=True. cache_available = bool(self._wheel_dir or self.wheel_cache.cache_dir) for req in requirements: ephem_cache = should_use_ephemeral_cache( req, format_control=format_control, autobuilding=autobuilding, cache_available=cache_available, ) if ephem_cache is None: continue buildset.append((req, ephem_cache)) if not buildset: return [] # Is any wheel build not using the ephemeral cache? if any(not ephem_cache for _, ephem_cache in buildset): have_directory_for_build = self._wheel_dir or ( autobuilding and self.wheel_cache.cache_dir) assert have_directory_for_build # TODO by @pradyunsg # Should break up this method into 2 separate methods. # Build the wheels. logger.info( 'Building wheels for collected packages: %s', ', '.join([req.name for (req, _) in buildset]), ) _cache = self.wheel_cache # shorter name with indent_log(): build_success, build_failure = [], [] for req, ephem in buildset: python_tag = None if autobuilding: python_tag = pep425tags.implementation_tag if ephem: output_dir = _cache.get_ephem_path_for_link(req.link) else: output_dir = _cache.get_path_for_link(req.link) try: ensure_dir(output_dir) except OSError as e: logger.warning("Building wheel for %s failed: %s", req.name, e) build_failure.append(req) continue else: output_dir = self._wheel_dir wheel_file = self._build_one( req, output_dir, python_tag=python_tag, ) if wheel_file: build_success.append(req) if autobuilding: # XXX: This is mildly duplicative with prepare_files, # but not close enough to pull out to a single common # method. # The code below assumes temporary source dirs - # prevent it doing bad things. if req.source_dir and not os.path.exists( os.path.join(req.source_dir, PIP_DELETE_MARKER_FILENAME)): raise AssertionError( "bad source dir - missing marker") # Delete the source we built the wheel from req.remove_temporary_source() # set the build directory again - name is known from # the work prepare_files did. req.source_dir = req.build_location( self.preparer.build_dir) # Update the link for this. req.link = Link(path_to_url(wheel_file)) assert req.link.is_wheel # extract the wheel into the dir unpack_file_url(link=req.link, location=req.source_dir) else: build_failure.append(req) # notify success/failure if build_success: logger.info( 'Successfully built %s', ' '.join([req.name for req in build_success]), ) if build_failure: logger.info( 'Failed to build %s', ' '.join([req.name for req in build_failure]), ) # Return a list of requirements that failed to build return build_failure
def test_unpack_file_url_and_download(self, tmpdir, data): self.prep(tmpdir, data) unpack_file_url(self.dist_url, self.build_dir, download_dir=self.download_dir) assert os.path.isdir(os.path.join(self.build_dir, 'simple')) assert os.path.isfile(os.path.join(self.download_dir, self.dist_file))