def test_unpack_file_url_thats_a_dir(self, tmpdir, data): self.prep(tmpdir, data) dist_path = data.packages.join("FSPkg") dist_url = Link(path_to_url(dist_path)) unpack_file_url(dist_url, self.build_dir, download_dir=self.download_dir) assert os.path.isdir(os.path.join(self.build_dir, 'fspkg'))
def unpack_url(self, link, location, download_dir=None, only_download=False): if download_dir is None: download_dir = self.download_dir # non-editable vcs urls if is_vcs_url(link): if only_download: loc = download_dir else: loc = location unpack_vcs_link(link, loc, only_download) # file urls elif is_file_url(link): unpack_file_url(link, location, download_dir) if only_download: write_delete_marker_file(location) # http urls else: unpack_http_url( link, location, self.download_cache, download_dir, self.session, ) if only_download: write_delete_marker_file(location)
def test_unpack_file_url_download_bad_hash(self, tmpdir, data, monkeypatch): """ Test when existing download has different hash from the file url fragment """ self.prep(tmpdir, data) # add in previous download (copy simple-2.0 as simple-1.0 so it's wrong # hash) dest_file = os.path.join(self.download_dir, self.dist_file) copy(self.dist_path2, dest_file) dist_path_md5 = hashlib.md5( open(self.dist_path, 'rb').read()).hexdigest() dist_path2_md5 = hashlib.md5(open(dest_file, 'rb').read()).hexdigest() assert dist_path_md5 != dist_path2_md5 self.dist_url.url = "%s#md5=%s" % ( self.dist_url.url, dist_path_md5 ) unpack_file_url(self.dist_url, self.build_dir, download_dir=self.download_dir) # confirm hash is for simple1-1.0 # the previous bad download has been removed assert (hashlib.md5(open(dest_file, 'rb').read()).hexdigest() == dist_path_md5 ), hashlib.md5(open(dest_file, 'rb').read()).hexdigest()
def test_unpack_file_url_and_download(self, tmpdir, data): self.prep(tmpdir, data) unpack_file_url(self.dist_url, self.build_dir, download_dir=self.download_dir) assert os.path.isdir(os.path.join(self.build_dir, 'simple')) assert os.path.isfile(os.path.join(self.download_dir, self.dist_file))
def test_unpack_file_url_bad_hash(self, tmpdir, data, monkeypatch): """ Test when the file url hash fragment is wrong """ self.prep(tmpdir, data) self.dist_url.url = "%s#md5=bogus" % self.dist_url.url with pytest.raises(HashMismatch): unpack_file_url(self.dist_url, self.build_dir)
def test_unpack_file_url_download_already_exists(self, tmpdir, data, monkeypatch): self.prep(tmpdir, data) # add in previous download (copy simple-2.0 as simple-1.0) # so we can tell it didn't get overwritten dest_file = os.path.join(self.download_dir, self.dist_file) copy(self.dist_path2, dest_file) dist_path2_md5 = hashlib.md5(open(self.dist_path2, "rb").read()).hexdigest() unpack_file_url(self.dist_url, self.build_dir, download_dir=self.download_dir) # our hash should be the same, i.e. not overwritten by simple-1.0 hash assert dist_path2_md5 == hashlib.md5(open(dest_file, "rb").read()).hexdigest()
def test_unpack_file_url_download_already_exists(self, tmpdir, data, monkeypatch): self.prep(tmpdir, data) # add in previous download (copy simple-2.0 as simple-1.0) # so we can tell it didn't get overwritten dest_file = os.path.join(self.download_dir, self.dist_file) copy(self.dist_path2, dest_file) dist_path2_md5 = hashlib.md5( open(self.dist_path2, 'rb').read()).hexdigest() unpack_file_url(self.dist_url, self.build_dir, download_dir=self.download_dir) # our hash should be the same, i.e. not overwritten by simple-1.0 hash assert dist_path2_md5 == hashlib.md5( open(dest_file, 'rb').read()).hexdigest()
def download_url(self, link, location=None): cache = join(self.base_path, '.cache') self.mkdir(cache) location = location or self.build_path if is_vcs_url(link): return unpack_vcs_link(link, location, only_download=False) elif is_file_url(link): return unpack_file_url(link, location) else: return unpack_http_url(link, location, cache, False)
def decompress(archive, dir_name): """ Decompresses an archive into a directory, the directory must already exist. """ unpack_file_url(_DummyLink('file:' + archive), dir_name)