def _get_candidates(self, name) -> dict: """ returns all candidates for the give name which are available for the current python version """ key = self.unify_key(name) candidates = {} try: self.data[key] except KeyError: return {} for ver, pyvers in self.data[key].items(): # in case pyvers is a string, it is a reference to another ver which we need to resolve if isinstance(pyvers, str): pyvers = self.data[key][pyvers] # in case pyver is a string, it is a reference to another pyver which we need to resolve if self.py_ver_digits in pyvers: pkg_data = pyvers[self.py_ver_digits] if isinstance(pkg_data, str): pkg_data = pyvers[pkg_data] if 'python_requires' in pkg_data: specs = ",".join(pkg_data['python_requires']) parsed_py_requires = list(parse_reqs(f"python{specs}")) if not filter_versions([self.py_ver.version], parsed_py_requires[0]): continue candidates[ver] = pkg_data return candidates
def get_pkg_reqs( self, pkg_name, pkg_version: Version, extras=None) -> Tuple[List[Requirement], List[Requirement]]: """ Get requirements for package """ if pkg_version not in self._get_candidates(pkg_name): raise PackageNotFound(pkg_name, pkg_version, self.name) pkg = self._get_candidates(pkg_name)[pkg_version] requirements = dict(setup_requires=[], install_requires=[]) for t in ("setup_requires", "install_requires"): if t not in pkg: requirements[t] = [] else: reqs_raw = pkg[t] reqs = parse_reqs(reqs_raw) requirements[t] = list( filter_reqs_by_eval_marker(reqs, self.context)) if not extras: extras = [] # even if no extras are selected we need to collect reqs for extras, # because some extras consist of only a marker which needs to be evaluated requirements['install_requires'] += self.get_reqs_for_extras( pkg_name, pkg_version, extras) return requirements['install_requires'], requirements['setup_requires']
def main(): disable_checks = load_env('disable_checks') nixpkgs_commit = load_env('nixpkgs_commit') nixpkgs_tarball_sha256 = load_env('nixpkgs_tarball_sha256') nixpkgs_json = load_env('nixpkgs_json') out_file = load_env('out_file') py_ver_str = load_env('py_ver_str') prefer_nixpkgs = load_env('prefer_nixpkgs') pypi_deps_db_data_dir = load_env('pypi_deps_db_data_dir') pypi_fetcher_commit = load_env('pypi_fetcher_commit') pypi_fetcher_tarball_sha256 = load_env('pypi_fetcher_tarball_sha256') requirements = load_env('requirements') py_ver = PyVer(py_ver_str) nixpkgs = NixpkgsDirectory(nixpkgs_json) deps_db = DependencyDB(py_ver, pypi_deps_db_data_dir) generator = OverlaysGenerator( py_ver, nixpkgs_commit, nixpkgs_tarball_sha256, nixpkgs, pypi_fetcher_commit, pypi_fetcher_tarball_sha256, disable_checks, ResolvelibResolver(nixpkgs, deps_db), prefer_nixpkgs=prefer_nixpkgs, ) reqs = parse_reqs(requirements) expr = generator.generate(reqs) with open(out_file, 'w') as f: f.write(expr)
def python_ok(self, build): for dep in build['depends']: if dep == "pypy" or dep.startswith("pypy "): return False if dep.startswith("python "): req = next(iter(parse_reqs([dep]))) if not filter_versions([self.py_ver_parsed], req): return False return True
def _python_requires_ok(self, wheel: WheelRelease): if not wheel.requires_python: return True ver = parse('.'.join(self.py_ver_digits)) try: parsed_py_requires = list(parse_reqs(f"python{wheel.requires_python}")) return bool(filter_versions([ver], parsed_py_requires[0].specs)) except RequirementParseError: print(f"WARNING: `requires_python` attribute of wheel {wheel.name}:{wheel.ver} could not be parsed") return False
def get_pkg_reqs(self, pkg_name, pkg_version: Version, extras=None) -> Tuple[List[Requirement], List[Requirement]]: """ Get requirements for package """ reqs_raw = self._choose_wheel(pkg_name, pkg_version).requires_dist if reqs_raw is None: reqs_raw = [] # handle extras by evaluationg markers install_reqs = list(filter_reqs_by_eval_marker(parse_reqs(reqs_raw), self.context_wheel, extras)) return install_reqs, []
def get_pkg_reqs(self, c: Candidate) -> Tuple[List[Requirement], List[Requirement]]: candidate = c.provider_info.data depends = list(filter( lambda d: d.split()[0] not in self.ignored_pkgs, # lambda d: d.split()[0] not in self.ignored_pkgs and not d.startswith('_'), candidate['depends'] # always add optional dependencies to ensure constraints are applied + (candidate['constrains'] if 'constrains' in candidate else []) )) return list(parse_reqs(depends)), []
def get_pkg_reqs(self, c: Candidate) -> Tuple[List[Requirement], List[Requirement]]: """ Get requirements for package """ reqs_raw = c.provider_info.data.requires_dist if reqs_raw is None: reqs_raw = [] # handle extras by evaluationg markers install_reqs = list(filter_reqs_by_eval_marker(parse_reqs(reqs_raw), self.context_wheel, c.selected_extras)) return install_reqs, []
def get_pkg_reqs( self, c: Candidate) -> Tuple[List[Requirement], List[Requirement]]: name = normalize_name(c.name) deviated_ver = self.deviated_version(name, c.ver, c.build) candidate = self.pkgs[name][deviated_ver][c.build] depends = list( filter( lambda d: d.split()[0] not in self.ignored_pkgs, # lambda d: d.split()[0] not in self.ignored_pkgs and not d.startswith('_'), candidate['depends'] # always add optional dependencies to ensure constraints are applied + (candidate['constrains'] if 'constrains' in candidate else []))) return list(parse_reqs(depends)), []
def get_pkg_reqs(self, c: Candidate) -> Tuple[Optional[List[Requirement]], Optional[List[Requirement]]]: requirements = self.nixpkgs.get_requirements(c.name, c.ver) if requirements is not None: return list(parse_reqs(requirements)), None for provider in (self.sdist_provider, self.wheel_provider): candidates = [ candidate for candidate in provider.all_candidates(c.name, None, None) if candidate.ver == c.ver ] if len(candidates) > 0: return provider.get_pkg_reqs(candidates[0]) return None, None
def get_reqs_for_extras(self, pkg_name, pkg_ver, extras): name = self.unify_key(pkg_name) pkg = self._get_candidates(name)[pkg_ver] extras = set(extras) requirements = [] if 'extras_require' in pkg: for name, reqs_str in pkg['extras_require'].items(): # handle extras with marker in key if ':' in name: name, marker = name.split(':') if not distlib.markers.interpret(marker, self.context): continue if name == '' or name in extras: requirements += list(filter_reqs_by_eval_marker(parse_reqs(reqs_str), self.context)) return requirements
def get_pkg_reqs( self, pkg_name, pkg_version, extras=None) -> Tuple[List[Requirement], List[Requirement]]: """ Get requirements for package """ ver_str = str(pkg_version) if not self.exists(pkg_name) or ver_str not in self[pkg_name]: raise Exception(f'Cannot find {pkg_name}:{pkg_version} in db') pkg = self[pkg_name][ver_str] requirements = dict(setup_requires=[], install_requires=[]) for t in ("setup_requires", "install_requires"): if t not in pkg: requirements[t] = [] else: reqs_raw = pkg[t] reqs = list(parse_reqs(reqs_raw)) requirements[t] = list(strip_reqs_by_marker( reqs, self.context)) extras = set(extras) if extras else [] if 'extras_require' in pkg: for name, reqs_str in pkg['extras_require'].items(): # handle extras with marker in key if ':' in name: name, marker = name.split(':') if not distlib.markers.interpret(marker, self.context): continue # handle if extra's key only contains marker. like ':python_version < "3.7"' if name == '' or name in extras: requirements['install_requires'] += list( strip_reqs_by_marker(list(parse_reqs(reqs_str)), self.context)) return requirements['install_requires'], requirements['setup_requires']
def _get_reqs_for_extras(self, pkg, extras): if extras is None: return [] extras = set(extras) requirements = [] if 'extras_require' in pkg: for name, reqs_str in pkg['extras_require'].items(): # handle extras with marker in key if ':' in name: name, marker = name.split(':') if not distlib.markers.interpret(marker, self.context): continue if name == '' or name in extras: requirements += list(filter_reqs_by_eval_marker(parse_reqs(reqs_str), self.context)) return requirements
def do(): providers_json = load_env('providers') conda_channels_json = load_env('conda_channels_json') disable_checks = load_env('disable_checks') nixpkgs_json = load_env('nixpkgs_json') out_file = load_env('out_file') provider_settings = ProviderSettings(providers_json) py_ver_str = load_env('py_ver_str') pypi_deps_db_src = load_env('pypi_deps_db_src') pypi_fetcher_commit = load_env('pypi_fetcher_commit') pypi_fetcher_sha256 = load_env('pypi_fetcher_sha256') requirements = load_env('requirements') platform, system = load_env('system').split('-') py_ver = PyVer(py_ver_str) nixpkgs = NixpkgsIndex(nixpkgs_json) deps_provider = CombinedDependencyProvider( conda_channels_json=conda_channels_json, nixpkgs=nixpkgs, provider_settings=provider_settings, pypi_deps_db_src=pypi_deps_db_src, py_ver=py_ver, platform=platform, system=system) generator = OverridesGenerator( py_ver, nixpkgs, pypi_fetcher_commit, pypi_fetcher_sha256, disable_checks, ResolvelibResolver(nixpkgs, deps_provider), ) reqs = filter_reqs_by_eval_marker(parse_reqs(requirements), context(py_ver, platform, system)) try: expr = generator.generate(reqs) except ResolutionImpossible as e: handle_resolution_impossible(e, requirements, providers_json, py_ver_str) exit(1) else: with open(out_file, 'w') as f: f.write(expr)
def get_pkg_reqs(self, c: Candidate) -> Tuple[List[Requirement], List[Requirement]]: """ Get requirements for package """ pkg = c.provider_info.data requirements = dict( setup_requires=[], install_requires=[] ) for t in ("setup_requires", "install_requires"): if t not in pkg: requirements[t] = [] else: reqs_raw = pkg[t] reqs = parse_reqs(reqs_raw) requirements[t] = list(filter_reqs_by_eval_marker(reqs, self.context)) # even if no extras are selected we need to collect reqs for extras, # because some extras consist of only a marker which needs to be evaluated requirements['install_requires'] += self._get_reqs_for_extras(pkg, c.selected_extras) return requirements['install_requires'], requirements['setup_requires']