def __init__(self, path, # type: Path category='main', # type: str optional=False, # type: bool base=None # type: Path ): self._path = path self._base = base self._full_path = path if self._base and not self._path.is_absolute(): self._full_path = self._base / self._path if not self._full_path.exists(): raise ValueError('File {} does not exist'.format(self._path)) if self._full_path.is_dir(): raise ValueError( '{} is a directory, expected a file'.format(self._path) ) if self._path.suffix == '.whl': self._meta = pkginfo.Wheel(str(self._full_path)) else: # Assume sdist self._meta = pkginfo.SDist(str(self._full_path)) super(FileDependency, self).__init__( self._meta.name, self._meta.version, category=category, optional=optional, allows_prereleases=True )
def get_pkginfo(package, filename, pypiurl, digest, python_version, extra_specs, config, setup_options): # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. tempdir = mkdtemp('conda_skeleton_' + filename) if not isdir(config.src_cache): makedirs(config.src_cache) hash_type = digest[0] hash_value = digest[1] try: # Download it to the build source cache. That way, you have # it. download_path = join(config.src_cache, filename) if not isfile(download_path) or \ hashsum_file(download_path, hash_type) != hash_value: download(pypiurl, join(config.src_cache, filename)) if hashsum_file(download_path, hash_type) != hash_value: raise RuntimeError( ' Download of {} failed' ' checksum type {} expected value {}. Please' ' try again.'.format(package, hash_type, hash_value)) else: print("Using cached download") # Calculate the preferred hash type here if necessary. # Needs to be done in this block because this is where we have # access to the source file. if hash_type != 'sha256': new_hash_value = hashsum_file(download_path, 'sha256') else: new_hash_value = '' print("Unpacking %s..." % package) unpack(join(config.src_cache, filename), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) # TODO: find args parameters needed by run_setuppy run_setuppy(src_dir, tempdir, python_version, extra_specs=extra_specs, config=config, setup_options=setup_options) try: with open(join(tempdir, 'pkginfo.yaml')) as fn: pkg_info = yaml.safe_load(fn) except IOError: pkg_info = pkginfo.SDist(download_path).__dict__ if new_hash_value: pkg_info['new_hash_value'] = ('sha256', new_hash_value) finally: rm_rf(tempdir) return pkg_info
def get_package_from_file(cls, file_path): # type: (Path) -> Package if file_path.suffix == ".whl": meta = pkginfo.Wheel(str(file_path)) else: # Assume sdist meta = pkginfo.SDist(str(file_path)) package = Package(meta.name, meta.version) package.source_type = "file" package.source_url = file_path.as_posix() package.description = meta.summary for req in meta.requires_dist: dep = dependency_from_pep_508(req) for extra in dep.in_extras: if extra not in package.extras: package.extras[extra] = [] package.extras[extra].append(dep) if not dep.is_optional(): package.requires.append(dep) if meta.requires_python: package.python_versions = meta.requires_python return package
def package_info(path): path = Path(path) if not path.exists(): raise FileNotFoundError("File not found") d = {'type': 'unknown', 'path': str(path), 'size': path.stat().st_size, 'modified': datetime.utcfromtimestamp(path.stat().st_mtime), 'filename': str(path.name)} pkg = None try: if path.suffix == '.whl': pkg = pkginfo.Wheel(path) d['type'] = 'wheel' elif path.suffix in ('.gz', '.bz2'): pkg = pkginfo.SDist(path) d['type'] = 'sdist' except ValueError as e: d['error'] = str(e) if pkg: d['metadata'] = {k: getattr(pkg, k, None) for k in pkg} return d
def render_package_information(self, package_name: str): package = Package.find_by_name(package_name) if package is None: return abort(404, "package does not exist in repository") folder = self.package_folder.joinpath(package_name) payload = { "name": package_name, "packages": [], "use_prompt": package.private and not self.is_pip, } if self.is_pip and package.private and not package.is_valid_token( self.token): return render_template('package-info.html', **payload) for path in folder.iterdir(): filename = path.name if path.is_file() and (filename.endswith('.whl') or filename.endswith('.tar.gz')): filepath = path.as_posix() info = pkginfo.Wheel(filepath) if filename.endswith( '.whl') else pkginfo.SDist(filepath) payload["packages"].append({ "link": f"/simple/{package_name}/{path.name}", "title": path.name, "requires_python": info.requires_python }) return render_template('package-info.html', **payload)
def _from_sdist_file(cls, path): # type: (Path) -> PackageInfo """ Helper method to parse package information from an sdist file. We attempt to first inspect the file using `pkginfo.SDist`. If this does not provide us with package requirements, we extract the source and handle it as a directory. :param path: The sdist file to parse information from. """ info = None try: info = cls._from_distribution(pkginfo.SDist(str(path))) except ValueError: # Unable to determine dependencies # We pass and go deeper pass else: if info.requires_dist is not None: # we successfully retrieved dependencies from sdist metadata return info # Still not dependencies found # So, we unpack and introspect suffix = path.suffix if suffix == ".zip": context = zipfile.ZipFile else: if suffix == ".bz2": suffixes = path.suffixes if len(suffixes) > 1 and suffixes[-2] == ".tar": suffix = ".tar.bz2" else: suffix = ".tar.gz" context = tarfile.open with temporary_directory() as tmp: tmp = Path(tmp) with context(path.as_posix()) as archive: archive.extractall(tmp.as_posix()) # a little bit of guess work to determine the directory we care about elements = list(tmp.glob("*")) if len(elements) == 1 and elements[0].is_dir(): sdist_dir = elements[0] else: sdist_dir = tmp / path.name.rstrip(suffix) if not sdist_dir.is_dir(): sdist_dir = tmp # now this is an unpacked directory we know how to deal with new_info = cls.from_directory(path=sdist_dir) if not info: return new_info return info.update(new_info)
def _get_pkginfo(filename: str): try: if ".whl" in filename: return pkginfo.Wheel(filename) else: return pkginfo.SDist(filename) except Exception: return None
def get_pkginfo(f): fp, ext = os.path.splitext(f) #print(f) try: if ext == '.whl': return pkg.Wheel(f) elif ext == '.gz': return pkg.SDist(f) elif ext == '.zip': return pkg.SDist(f) elif ext == '.egg': return pkg.BDist(f) elif ext == '.bz2': return pkg.SDist(f) elif ext == '.tgz': return pkg.SDist(f) except ReadError: return None except ValueError: return None else: return None
def get_install_command(args): index = Index(os.path.expanduser('~/.curds')) index.scan() cmd = Install({ 'log_level': args.log_level, 'pypi_urls': args.index or DEFAULT_PYPI_INDEX_LIST, 'curdling_urls': args.curdling_index, 'force': args.force, 'upload': args.upload, 'index': index, }) tarballs = [ pkg for pkg in args.packages if os.path.isfile(pkg) and acceptable_file_type(pkg) ] args.packages = [pkg for pkg in args.packages if pkg not in tarballs] initial_requirements = get_packages_from_args(args) # Callbacks that show feedback for the user if not args.quiet and initial_requirements: cmd.connect('update_retrieve_and_build', build_and_retrieve_progress) cmd.connect('update_install', partial(progress, 'Installing')) cmd.connect('update_upload', partial(progress, 'Uploading')) cmd.connect('finished', show_report) # This is the last thing called in the software. It will raise a # SystemExit to return the right code to the OS depending on the # value of received by the callback below: cmd.connect('finished', handle_install_exit) # Let's start the required services and request the installation of the # received packages before returning the command instance cmd.pipeline() cmd.start() for pkg in tarballs: metadata = pkginfo.SDist(pkg) cmd.queue('main', tarball=pkg, requirement=metadata.name, directory=None) for pkg in initial_requirements: cmd.queue('main', requirement=pkg) return cmd
def search_for_file(self, dependency): # type: (FileDependency) -> List[Package] if dependency.path.suffix == ".whl": meta = pkginfo.Wheel(str(dependency.full_path)) else: # Assume sdist meta = pkginfo.SDist(str(dependency.full_path)) if dependency.name != meta.name: # For now, the dependency's name must match the actual package's name raise RuntimeError( "The dependency name for {} does not match the actual package's name: {}".format( dependency.name, meta.name ) ) package = Package(meta.name, meta.version) package.source_type = "file" package.source_url = dependency.path.as_posix() package.description = meta.summary for req in meta.requires_dist: dep = dependency_from_pep_508(req) for extra in dep.in_extras: if extra not in package.extras: package.extras[extra] = [] package.extras[extra].append(dep) if not dep.is_optional(): package.requires.append(dep) if meta.requires_python: package.python_versions = meta.requires_python package.hashes = [dependency.hash()] for extra in dependency.extras: if extra in package.extras: for dep in package.extras[extra]: dep.activate() package.requires += package.extras[extra] return [package]
def get_pkginfo(package, filename, pypiurl, md5, python_version, extra_specs, config, setup_options): # Unfortunately, two important pieces of metadata are only stored in # the package itself: the dependencies, and the entry points (if the # package uses distribute). Our strategy is to download the package # and "fake" distribute/setuptools's setup() function to get this # information from setup.py. If this sounds evil, keep in mind that # distribute itself already works by monkeypatching distutils. tempdir = mkdtemp('conda_skeleton_' + filename) if not isdir(config.src_cache): makedirs(config.src_cache) try: # Download it to the build source cache. That way, you have # it. download_path = join(config.src_cache, filename) if not isfile(download_path) or \ hashsum_file(download_path, 'md5') != md5: download(pypiurl, join(config.src_cache, filename)) else: print("Using cached download") print("Unpacking %s..." % package) unpack(join(config.src_cache, filename), tempdir) print("done") print("working in %s" % tempdir) src_dir = get_dir(tempdir) # TODO: find args parameters needed by run_setuppy run_setuppy(src_dir, tempdir, python_version, extra_specs=extra_specs, config=config, setup_options=setup_options) try: with open(join(tempdir, 'pkginfo.yaml')) as fn: pkg_info = yaml.load(fn) except IOError: pkg_info = pkginfo.SDist(download_path).__dict__ finally: rm_rf(tempdir) return pkg_info
def _get_info_from_sdist( self, url): # type: (str) -> Dict[str, Union[str, List, None]] self._log( "Downloading sdist: {}".format( urlparse.urlparse(url).path.rsplit("/")[-1]), level="debug", ) info = {"summary": "", "requires_python": None, "requires_dist": None} filename = os.path.basename(urlparse.urlparse(url).path) with temporary_directory() as temp_dir: filepath = Path(temp_dir) / filename self._download(url, str(filepath)) try: meta = pkginfo.SDist(str(filepath)) if meta.summary: info["summary"] = meta.summary if meta.requires_python: info["requires_python"] = meta.requires_python if meta.requires_dist: info["requires_dist"] = list(meta.requires_dist) return info except ValueError: # Unable to determine dependencies # We pass and go deeper pass # Still not dependencies found # So, we unpack and introspect suffix = filepath.suffix gz = None if suffix == ".zip": tar = zipfile.ZipFile(str(filepath)) else: if suffix == ".bz2": gz = BZ2File(str(filepath)) suffixes = filepath.suffixes if len(suffixes) > 1 and suffixes[-2] == ".tar": suffix = ".tar.bz2" else: gz = GzipFile(str(filepath)) suffix = ".tar.gz" tar = tarfile.TarFile(str(filepath), fileobj=gz) try: tar.extractall(os.path.join(temp_dir, "unpacked")) finally: if gz: gz.close() tar.close() unpacked = Path(temp_dir) / "unpacked" sdist_dir = unpacked / Path(filename).name.rstrip(suffix) # Checking for .egg-info at root eggs = list(sdist_dir.glob("*.egg-info")) if eggs: egg_info = eggs[0] requires = egg_info / "requires.txt" if requires.exists(): with requires.open(encoding="utf-8") as f: info["requires_dist"] = parse_requires(f.read()) return info # Searching for .egg-info in sub directories eggs = list(sdist_dir.glob("**/*.egg-info")) if eggs: egg_info = eggs[0] requires = egg_info / "requires.txt" if requires.exists(): with requires.open(encoding="utf-8") as f: info["requires_dist"] = parse_requires(f.read()) return info # Still nothing, try reading (without executing it) # the setup.py file. try: setup_info = self._inspect_sdist_with_setup(sdist_dir) for key, value in info.items(): if value: continue info[key] = setup_info[key] return info except Exception as e: self._log( "An error occurred when reading setup.py or setup.cfg: {}". format(str(e)), "warning", ) return info
def _get_info_from_sdist( self, url): # type: (str) -> Dict[str, Union[str, List, None]] info = {"summary": "", "requires_python": None, "requires_dist": None} filename = os.path.basename(urlparse.urlparse(url).path) with temporary_directory() as temp_dir: filepath = Path(temp_dir) / filename self._download(url, str(filepath)) try: meta = pkginfo.SDist(str(filepath)) if meta.summary: info["summary"] = meta.summary if meta.requires_python: info["requires_python"] = meta.requires_python if meta.requires_dist: info["requires_dist"] = list(meta.requires_dist) return info except ValueError: # Unable to determine dependencies # We pass and go deeper pass # Still not dependencies found # So, we unpack and introspect suffix = filepath.suffix gz = None if suffix == ".zip": tar = zipfile.ZipFile(str(filepath)) else: if suffix == ".bz2": gz = BZ2File(str(filepath)) else: gz = GzipFile(str(filepath)) suffix = ".tar.gz" tar = tarfile.TarFile(str(filepath), fileobj=gz) try: tar.extractall(os.path.join(temp_dir, "unpacked")) finally: if gz: gz.close() tar.close() unpacked = Path(temp_dir) / "unpacked" sdist_dir = unpacked / Path(filename).name.rstrip(suffix) # Checking for .egg-info at root eggs = list(sdist_dir.glob("*.egg-info")) if eggs: egg_info = eggs[0] requires = egg_info / "requires.txt" if requires.exists(): with requires.open() as f: info["requires_dist"] = parse_requires(f.read()) return info # Searching for .egg-info in sub directories eggs = list(sdist_dir.glob("**/*.egg-info")) if eggs: egg_info = eggs[0] requires = egg_info / "requires.txt" if requires.exists(): with requires.open() as f: info["requires_dist"] = parse_requires(f.read()) return info # Still nothing, assume no dependencies # We could probably get them by executing # python setup.py egg-info but I don't feel # confortable executing a file just for the sake # of getting dependencies. return info
def inspect_sdist( self, file_path): # type: (Path) -> Dict[str, Union[str, List[str]]] info = { "name": "", "version": "", "summary": "", "requires_python": None, "requires_dist": None, } try: meta = pkginfo.SDist(str(file_path)) if meta.name: info["name"] = meta.name if meta.version: info["version"] = meta.version if meta.summary: info["summary"] = meta.summary if meta.requires_python: info["requires_python"] = meta.requires_python if meta.requires_dist: info["requires_dist"] = list(meta.requires_dist) return info except ValueError: # Unable to determine dependencies # We pass and go deeper pass # Still not dependencies found # So, we unpack and introspect suffix = file_path.suffix gz = None if suffix == ".zip": tar = zipfile.ZipFile(str(file_path)) else: if suffix == ".bz2": gz = BZ2File(str(file_path)) suffixes = file_path.suffixes if len(suffixes) > 1 and suffixes[-2] == ".tar": suffix = ".tar.bz2" else: gz = GzipFile(str(file_path)) suffix = ".tar.gz" tar = tarfile.TarFile(str(file_path), fileobj=gz) try: tar.extractall(os.path.join(str(file_path.parent), "unpacked")) finally: if gz: gz.close() tar.close() unpacked = file_path.parent / "unpacked" elements = list(unpacked.glob("*")) if len(elements) == 1 and elements[0].is_dir(): sdist_dir = elements[0] else: sdist_dir = unpacked / file_path.name.rstrip(suffix) pyproject = TomlFile(sdist_dir / "pyproject.toml") if pyproject.exists(): from poetry.factory import Factory pyproject_content = pyproject.read() if "tool" in pyproject_content and "poetry" in pyproject_content[ "tool"]: package = Factory().create_poetry(sdist_dir).package return { "name": package.name, "version": package.version.text, "summary": package.description, "requires_dist": [dep.to_pep_508() for dep in package.requires], "requires_python": package.python_versions, } # Checking for .egg-info at root eggs = list(sdist_dir.glob("*.egg-info")) if eggs: egg_info = eggs[0] requires = egg_info / "requires.txt" if requires.exists(): with requires.open(encoding="utf-8") as f: info["requires_dist"] = parse_requires(f.read()) return info # Searching for .egg-info in sub directories eggs = list(sdist_dir.glob("**/*.egg-info")) if eggs: egg_info = eggs[0] requires = egg_info / "requires.txt" if requires.exists(): with requires.open(encoding="utf-8") as f: info["requires_dist"] = parse_requires(f.read()) return info # Still nothing, try reading (without executing it) # the setup.py file. try: setup_info = self._inspect_sdist_with_setup(sdist_dir) for key, value in info.items(): if value: continue info[key] = setup_info[key] return info except Exception as e: logger.warning( "An error occurred when reading setup.py or setup.cfg: {}". format(str(e))) return info
def _get_requires_dist_from_sdist(self, url ): # type: (str) -> Union[list, None] filename = os.path.basename(urlparse.urlparse(url).path) with temporary_directory() as temp_dir: filepath = Path(temp_dir) / filename self._download(url, str(filepath)) try: meta = pkginfo.SDist(str(filepath)) if meta.requires_dist: return meta.requires_dist except ValueError: # Unable to determine dependencies # We pass and go deeper pass # Still not dependencies found # So, we unpack and introspect suffix = filepath.suffix gz = None if suffix == '.zip': tar = zipfile.ZipFile(str(filepath)) else: if suffix == '.bz2': gz = BZ2File(str(filepath)) else: gz = GzipFile(str(filepath)) tar = tarfile.TarFile(str(filepath), fileobj=gz) try: tar.extractall(os.path.join(temp_dir, 'unpacked')) finally: if gz: gz.close() tar.close() unpacked = Path(temp_dir) / 'unpacked' sdist_dir = unpacked / Path(filename).name.rstrip('.tar.gz') # Checking for .egg-info eggs = list(sdist_dir.glob('*.egg-info')) if eggs: egg_info = eggs[0] requires = egg_info / 'requires.txt' if requires.exists(): with requires.open() as f: return parse_requires(f.read()) return # Still nothing, assume no dependencies # We could probably get them by executing # python setup.py egg-info but I don't feel # confortable executing a file just for the sake # of getting dependencies. return
# # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # Extract package info metadata for use by curl. import pkginfo import sys if len(sys.argv) < 3: exit() info = pkginfo.SDist(sys.argv[1]) curl_config = open(sys.argv[2], 'w') meta_items = { 'metadata_version': info.metadata_version, 'summary': info.summary, 'home_page': info.home_page, 'author': info.author, 'author_email': info.author_email, 'license': info.license, 'description': info.description, 'keywords': info.keywords, 'platform': info.platforms, 'classifiers': info.classifiers, 'download_url': info.download_url, 'provides': info.provides,