def get_go_artifacts(url, target, ver): """Get artifacts required to be a go proxy alternative.""" for name in [f"{ver}.{x}" for x in ["info", "mod", "zip"]]: path = os.path.join(target, name) if not os.path.exists(path): download.do_curl(os.path.join(url, name), dest=path, is_fatal=True) sha1 = get_sha1sum(path) write_upstream(sha1, name, mode="a")
def check_or_get_file(self, upstream_url, tarfile, mode="w"): """Download tarball from url unless it is present locally.""" tarball_path = self.config.download_path + "/" + tarfile if not os.path.isfile(tarball_path): download.do_curl(upstream_url, dest=tarball_path, is_fatal=True) self.write_upstream(get_sha1sum(tarball_path), tarfile, mode) else: self.write_upstream(get_sha1sum(tarball_path), tarfile, mode) return tarball_path
def check_or_get_file(upstream_url, tarfile, mode="w"): """Download tarball from url unless it is present locally.""" tarball_path = build.download_path + "/" + tarfile # check if url signifies a go dependency, which needs special handling if tarfile == "list": process_go_dependency(upstream_url, build.download_path) elif not os.path.isfile(tarball_path): download.do_curl(upstream_url, dest=tarball_path, is_fatal=True) write_upstream(get_sha1sum(tarball_path), tarfile, mode) else: write_upstream(get_sha1sum(tarball_path), tarfile, mode) return tarball_path
def fetch_shasum(shasum_url): """Get shasum file from gnome.org.""" data = download.do_curl(shasum_url) if data: return data.getvalue().decode('utf-8') else: return None
def test_download_get_success_no_dest(self, test_curl): """ Test successful GET request when dest is not set. """ instance = init_curl_instance(test_curl) instance.setopt.side_effect = test_opts data = download.do_curl("foo") self.assertEqual(b'foobar', data.getvalue())
def pkg_search(name): """Query the pypi json API for name and return True if found.""" query = f"https://pypi.org/pypi/{name}/json/" resp = download.do_curl(query) if resp is not None: return True else: return False
def get_sign(self): """Attempt to download gpg signature file.""" sign_file = download.do_curl(self.key_url, self.package_sign_path) if sign_file is not None: return True else: msg = "Unable to download file {}" self.print_result(False, msg.format(self.key_url))
def fetch_shasum(self): """Fetch sha256 file associated with the package URL.""" shasum_url = "{}.sha256".format(self.package_url) data = download.do_curl(shasum_url) if data: return data.getvalue().decode('utf-8') else: return None
def get_info(package_name): """Get json dump of pypi package.""" url = PYPIORG_API.format(package_name) data = download.do_curl(url) if data: return json.loads(data.getvalue().decode('utf-8')) else: return None
def get_rubygems_info(package_name): """Get json dump of ruby gem.""" url = RUBYORG_API.format(package_name) data = download.do_curl(url) if data: return json.loads(data.getvalue().decode('utf-8')) else: return None
def test_download_get_success_dest(self, test_curl, test_open): """ Test successful GET request when dest is set. """ instance = init_curl_instance(test_curl) instance.setopt.side_effect = test_opts data = download.do_curl("foo", "testdest") test_open.assert_called_once_with('testdest', 'wb') test_open().write.assert_called_once_with(b'foobar')
def test_download_get_failure_fatal(self, test_curl, test_exit): """ Test failed GET request when is_fatal is set. """ instance = init_curl_instance(test_curl) instance.setopt.side_effect = test_opts instance.perform.side_effect = pycurl.error data = download.do_curl("foo", is_fatal=True) test_exit.assert_called_once_with(1)
def test_download_get_failure_no_dest(self, test_curl): """ Test failed GET request when dest is not set. """ instance = init_curl_instance(test_curl) instance.setopt.side_effect = test_opts instance.perform.side_effect = pycurl.error data = download.do_curl("foo") self.assertIsNone(data)
def license_from_copying_hash(copying, srcdir, config): """Add licenses based on the hash of the copying file.""" try: data = get_contents(copying) except FileNotFoundError: # LICENSE file is a bad symlink (qemu-4.2.0!) return if data.startswith(b'#!'): # Not a license if this is a script return data = decode_license(data) if not data: return hash_sum = get_sha1sum(copying) if config.license_fetch: values = {'hash': hash_sum, 'text': data, 'package': tarball.name} data = urllib.parse.urlencode(values) data = data.encode('utf-8') buffer = download.do_curl(config.license_fetch, post=data, is_fatal=True) response = buffer.getvalue() page = response.decode('utf-8').strip() if page: print("License : ", page, " (server) (", hash_sum, ")") process_licenses(page, config.license_translations, config.license_blacklist) if page != "none": # Strip the build source directory off the front lic_path = copying[len(srcdir):] # Strip any leading slashes while lic_path.startswith('/'): lic_path = lic_path[1:] lic_path = shlex.quote(lic_path) license_files.append(lic_path) hashes[lic_path] = hash_sum return if hash_sum in config.license_hashes: add_license(config.license_hashes[hash_sum], config.license_translations, config.license_blacklist) else: if not config.license_show: return print_warning("Unknown license {0} with hash {1}".format( copying, hash_sum)) hash_url = config.license_show % {'HASH': hash_sum} print_warning("Visit {0} to enter".format(hash_url))
def test_download_set_post(self, test_curl): """ Test setting of POSTFIELDS curl option """ instance = init_curl_instance(test_curl) instance.setopt.side_effect = test_opts data = download.do_curl("foo", post='postdata') calls = [ call().setopt(MockOpts.POSTFIELDS, 'postdata'), ] test_curl.assert_has_calls(calls)
def test_download_write_fail_fatal(self, test_curl, test_open, test_path, test_exit): """ Test fatal failure to write to dest after successful GET request. """ instance = init_curl_instance(test_curl) instance.setopt.side_effect = test_opts test_open.side_effect = IOError test_path.return_value = None data = download.do_curl("foo", "testdest", is_fatal=True) test_exit.assert_called_once_with(1)
def test_download_get_write_fail_dest(self, test_curl, test_open, test_path): """ Test failure to write to dest after successful GET request. """ instance = init_curl_instance(test_curl) instance.setopt.side_effect = test_opts test_open.side_effect = IOError test_path.return_value = None data = download.do_curl("foo", "testdest") self.assertIsNone(data)
def test_download_write_fail_remove_dest(self, test_curl, test_open, test_path, test_unlink): """ Test removal of dest following a write failure. """ instance = init_curl_instance(test_curl) instance.setopt.side_effect = test_opts test_open.side_effect = IOError test_path.return_value = True data = download.do_curl("foo", "testdest") test_path.assert_called_once_with("testdest") test_unlink.assert_called_once_with("testdest")
def license_from_copying_hash(copying, srcdir): """Add licenses based on the hash of the copying file.""" data = tarball.get_contents(copying) if data.startswith(b'#!'): # Not a license if this is a script return sh = hashlib.sha1() sh.update(data) hash_sum = sh.hexdigest() """ decode license text """ detected = chardet.detect(data) license_charset = detected['encoding'] if license_charset == 'ISO-8859-1': if b'\xff' in data: license_charset = 'ISO-8859-13' elif b'\xd2' in data and b'\xd3' in data: license_charset = 'mac_roman' if not license_charset: # This is not a text file return data = data.decode(license_charset) if config.license_fetch: values = {'hash': hash_sum, 'text': data, 'package': tarball.name} data = urllib.parse.urlencode(values) data = data.encode('utf-8') buffer = download.do_curl(config.license_fetch, post=data, is_fatal=True) response = buffer.getvalue() page = response.decode('utf-8').strip() if page: print("License : ", page, " (server) (", hash_sum, ")") process_licenses(page) if page != "none": lic_path = copying[len(srcdir) + 1:] license_files.append(shlex.quote(lic_path)) return if hash_sum in config.license_hashes: add_license(config.license_hashes[hash_sum]) else: if not config.license_show: return print_warning("Unknown license {0} with hash {1}".format( copying, hash_sum)) hash_url = config.license_show % {'HASH': hash_sum} print_warning("Visit {0} to enter".format(hash_url))
def test_download_set_basic(self, test_curl): """ Test curl option settings set by default """ instance = init_curl_instance(test_curl) instance.setopt.side_effect = test_opts data = download.do_curl("foo") calls = [ call().setopt(MockOpts.URL, 'foo'), call().setopt(MockOpts.FOLLOWLOCATION, True), call().setopt(MockOpts.FAILONERROR, True), ] test_curl.assert_has_calls(calls)
def get_signature_file(package_url, package_path): """Attempt to build signature file URL and download it.""" sign_urls = [] if 'samba.org' in package_url: sign_urls.append(package_url + '.asc') elif '://pypi.' in package_url[:13]: sign_urls.append(package_url + '.asc') elif 'mirrors.kernel.org' in package_url: sign_urls.append(package_url + '.sig') else: iter = (package_url + "." + ext for ext in ("asc", "sig", "sign")) for sign_url in iter: sign_urls.append(sign_url) sign_file = None dest = None for url in sign_urls: dest = os.path.join(package_path, os.path.basename(url)) sign_file = download.do_curl(url, dest) if sign_file is not None: return sign_file return None
def get_signature_file(package_url, package_path): """Attempt to build signature file URL and download it.""" sign_urls = [] netloc = urlparse(package_url).netloc if 'samba.org' in netloc: sign_urls.append(package_url + '.asc') elif any(loc in netloc for loc in PYPI_DOMAINS): sign_urls.append(package_url + '.asc') elif 'mirrors.kernel.org' in netloc: sign_urls.append(package_url + '.sig') else: iter = (package_url + "." + ext for ext in ("asc", "sig", "sign")) for sign_url in iter: sign_urls.append(sign_url) sign_file = None dest = None for url in sign_urls: dest = os.path.join(package_path, os.path.basename(url)) sign_file = download.do_curl(url, dest) if sign_file is not None: return sign_file return None
def check_or_get_file(upstream_url, tarfile): """Download tarball from url unless it is present locally.""" tarball_path = build.download_path + "/" + tarfile if not os.path.isfile(tarball_path): download.do_curl(upstream_url, dest=tarball_path, is_fatal=True) return tarball_path
def git_archive_all(path, name, url, branch, force_module, force_fullclone, conf, is_fatal=True): """Clone package directly from a git repository.""" cmd_args = f"{branch} {url} {name}" clone_path = f"{path}{name}" if util.debugging: print_debug(f"path: {path}") print_debug(f"force_module {str(force_module)}") print_debug(f"force_fullclone {str(force_fullclone)}") is_url = validators.url(url) if is_url is True: if "pypi.org/project/" in url: latest_pypi_source = latest_pypi(url, output_format="source", pre_ok=True) print_info(f"pypi.org/project/: {latest_pypi_source}") latest_pypi_source_basename=os.path.basename(latest_pypi_source) download.do_curl(latest_pypi_source, dest=f"./{latest_pypi_source_basename}", is_fatal=True) absolute_url_file=f"file://{os.path.abspath(latest_pypi_source_basename)}" return absolute_url_file else: git_clone(url=url, path=path, cmd_args=cmd_args, clone_path=clone_path, force_module=force_module, force_fullclone=force_fullclone, is_fatal=is_fatal) try: outputVersion = find_version_git(url=url, clone_path=clone_path, path=path, conf=conf) except: if is_fatal: remove_clone_archive(path, clone_path, is_fatal) print_fatal(f"Unexpected error: {sys.exc_info()[0]}") sys.exit(1) if not outputVersion.startswith("v") and not outputVersion.startswith("V"): outputVersion = f"v{outputVersion}" clone_file = f"{name}-{outputVersion}.tar.gz" absolute_file_path = os.path.abspath(clone_file) absolute_url_file = f"file://{absolute_file_path}" if util.debugging: print_debug(f"{clone_file}") print_debug(f"clone_path: {clone_path}") print_debug(f"absolute_file_path: {absolute_file_path}") print_debug(f"absolute_url_file: {absolute_url_file}") try: process = subprocess.run( f"tar --create --file=- {clone_path}/ | pigz -9 -p 20 > {clone_file}", check=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, universal_newlines=True, cwd=path, ) except subprocess.CalledProcessError as err: remove_clone_archive(path, clone_path, is_fatal) print_fatal(f"Unable to archive {clone_path} in {clone_file} from {url}: {err}") sys.exit(1) remove_clone_archive(path, clone_path, is_fatal) return absolute_url_file else: if os.path.isdir(url): clone_path = url outputVersion = find_version_git(url=url, clone_path=clone_path, path=path, conf=conf) if not outputVersion.startswith("v") and not outputVersion.startswith("V"): outputVersion = f"v{outputVersion}" clone_file = f"{name}-{outputVersion}.tar.gz" clone_path_norm = os.path.normpath(clone_path) absolute_file_path = os.path.abspath(clone_file) absolute_url_file = f"file://{absolute_file_path}" if util.debugging: print_debug(f"{clone_file}") print_debug(f"clone_path: {clone_path}") print_debug(f"absolute_file_path: {absolute_file_path}") print_debug(f"absolute_url_file: {absolute_url_file}") try: process = subprocess.run( f"tar --create --exclude=.github --exclude=.git --file=- {os.path.basename(clone_path_norm)}/ | pigz -9 -p 20 > {absolute_file_path}", check=True, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True, universal_newlines=True, cwd=os.path.dirname(clone_path_norm), ) except subprocess.CalledProcessError as err: if is_fatal: remove_clone_archive(path, clone_path, is_fatal) print_fatal(f"Unable to archive {clone_path} in {clone_file} from {url}: {err}") sys.exit(1) return absolute_url_file else: print_fatal(f"Unable to archive {clone_path} in {clone_file} from {url}") sys.exit(1)