def data_from_archive(self): """Returns all metadata extractable from the whl pydist.json Returns: dictionary containing metadata extracted from json data """ archive_data = {} archive_data['license'] = self.license archive_data['summary'] = self.summary archive_data['home_page'] = self.home_page archive_data['doc_files'] = self.doc_files archive_data['has_pth'] = self.has_pth archive_data['runtime_deps'] = utils.unique_deps(self.runtime_deps) archive_data['build_deps'] = utils.unique_deps([['BuildRequires', 'python2-devel'], ['BuildRequires', 'python-setuptools']] + self.build_deps) archive_data['py_modules'] = self.modules archive_data['scripts'] = self.scripts archive_data['has_test_suite'] = self.has_test_suite archive_data['has_extension'] = self.has_extension py_vers = self.versions_from_archive archive_data['base_python_version'] = py_vers[0] if py_vers \ else settings.DEFAULT_PYTHON_VERSION archive_data['python_versions'] = py_vers[1:] if py_vers \ else [settings.DEFAULT_ADDITIONAL_VERSION] archive_data['description'] = self.description return archive_data
def data_from_archive(self): """Returns all metadata extractable from the archive. Returns: dictionary containing metadata extracted from the archive """ archive_data = {} archive_data['license'] = self.license_from_archive archive_data['has_pth'] = self.has_pth archive_data['scripts'] = self.scripts archive_data['has_extension'] = self.has_extension if self.archive.is_egg: archive_data['runtime_deps'] = self.runtime_deps_from_egg_info archive_data['build_deps'] = [['BuildRequires', 'python2-devel'], [ 'BuildRequires', 'python-setuptools' ]] else: archive_data['runtime_deps'] = self.runtime_deps_from_setup_py archive_data['build_deps'] = utils.unique_deps( [['BuildRequires', 'python2-devel'], ['BuildRequires', 'python-setuptools']] + self.build_deps_from_setup_py) py_vers = self.versions_from_archive archive_data['base_python_version'] = py_vers[0] if py_vers \ else settings.DEFAULT_PYTHON_VERSION archive_data['python_versions'] = py_vers[1:] if py_vers \ else [settings.DEFAULT_ADDITIONAL_VERSION] (archive_data['doc_files'], archive_data['doc_license']) = self.separate_license_files( self.doc_files) archive_data['py_modules'] = self.py_modules archive_data['has_test_suite'] = self.has_test_suite archive_data['has_bundled_egg_info'] = self.has_bundled_egg_info archive_data['has_packages'] = self.has_packages archive_data['packages'] = self.packages sphinx_dir = self.sphinx_dir if sphinx_dir: archive_data['sphinx_dir'] = "/".join(sphinx_dir.split("/")[1:]) archive_data['build_deps'].append( ['BuildRequires', 'python-sphinx']) return archive_data
def data_from_archive(self): """Returns all metadata extractable from the archive. Returns: dictionary containing metadata extracted from the archive """ archive_data = {} archive_data['license'] = self.license_from_archive archive_data['has_pth'] = self.has_pth archive_data['scripts'] = self.scripts archive_data['has_extension'] = self.has_extension if self.archive.is_egg: archive_data['runtime_deps'] = self.runtime_deps_from_egg_info archive_data['build_deps'] = [['BuildRequires', 'python2-devel'], ['BuildRequires', 'python-setuptools']] else: archive_data['runtime_deps'] = self.runtime_deps_from_setup_py archive_data['build_deps'] = utils.unique_deps([['BuildRequires', 'python2-devel'], ['BuildRequires', 'python-setuptools']] + self.build_deps_from_setup_py) py_vers = self.versions_from_archive archive_data['base_python_version'] = py_vers[0] if py_vers \ else settings.DEFAULT_PYTHON_VERSION archive_data['python_versions'] = py_vers[1:] if py_vers \ else [settings.DEFAULT_ADDITIONAL_VERSION] (archive_data['doc_files'], archive_data['doc_license']) = self.separate_license_files(self.doc_files) archive_data['py_modules'] = self.py_modules archive_data['has_test_suite'] = self.has_test_suite archive_data['has_bundled_egg_info'] = self.has_bundled_egg_info archive_data['has_packages'] = self.has_packages archive_data['packages'] = self.packages sphinx_dir = self.sphinx_dir if sphinx_dir: archive_data['sphinx_dir'] = "/".join(sphinx_dir.split("/")[1:]) archive_data['build_deps'].append( ['BuildRequires', 'python-sphinx']) return archive_data
def extract_data(self): """Extracts data from archive. Returns: PackageData object containing the extracted data. """ data = PackageData( self.local_file, self.name, self.name_convertor.rpm_name(self.name) if self.rpm_name is None else self.rpm_name, self.version, ) with self.archive: data.set_from(self.data_from_archive) data.set_from(self.data_from_venv, update=True) data.data["build_deps"] += utils.runtime_to_build(data.data["runtime_deps"]) setattr(data, "build_deps", utils.unique_deps(data.data["build_deps"])) # for example nose has attribute `packages` but instead of name listing the pacakges # is using function to find them, that makes data.packages an empty set if data.has_packages and not data.packages: data.packages.add(data.name) return data
def test_unique_deps(self, input, expected): assert utils.unique_deps(input) == expected
def extract_data(self): """Extracts data from PyPI and archive. Returns: PackageData object containing data extracted from PyPI and archive. """ try: release_urls = self.client.release_urls(self.name, self.version) release_data = self.client.release_data(self.name, self.version) except: # some kind of error with client => return TODO: log the failure logger.debug("Client: {0} Name: {1} Version: {2}.".format(self.client, self.name, self.version)) logger.warn("Some kind of error while communicating with client: {0}.".format(self.client), exc_info=True) return PackageData( self.local_file, self.name, self.name_convertor.rpm_name(self.name) if self.rpm_name is None else self.rpm_name, self.version, "FAILED TO EXTRACT FROM PYPI", "FAILED TO EXTRACT FROM PYPI", ) url = "" md5_digest = None if len(release_urls): for release_url in release_urls: if release_url["url"].endswith("tar.gz"): url = release_url["url"] md5_digest = release_url["md5_digest"] if url == "": url = release_urls[0]["url"] md5_digest = release_urls[0]["md5_digest"] elif release_data: url = release_data["download_url"] data = PackageData( self.local_file, self.name, self.name_convertor.rpm_name(self.name) if self.rpm_name is None else self.rpm_name, self.version, md5_digest, url, ) for data_field in settings.PYPI_USABLE_DATA: setattr(data, data_field, release_data.get(data_field, "")) pypi_license = data.license with self.archive: data.set_from(self.data_from_archive) data.set_from(self.data_from_venv, update=True) setattr(data, "scripts", utils.remove_major_minor_suffix(data.data["scripts"])) data.data["build_deps"] += utils.runtime_to_build(data.data["runtime_deps"]) setattr(data, "build_deps", utils.unique_deps(data.data["build_deps"])) # Append all runtime deps to build deps and unique the result # for example nose has attribute `packages` but instead of name listing the # packages is using function to find them, that makes data.packages an empty set if data.has_packages and not data.packages: data.packages.add(data.name) # we usually get better license representation from trove classifiers data.license = utils.license_from_trove(release_data.get("classifiers", "")) or data.license or pypi_license return data