def _wheel_candidate(source, filename): # type: (str, str) -> Optional[Candidate] filename = os.path.basename(filename) data_parts = filename[:-4].split("-") if len(data_parts) < 5: logging.getLogger("req_compile.repository").debug( "Unable to use %s, improper filename", filename) return None has_build_tag = len(data_parts) == 6 build_tag = "" if has_build_tag: build_tag = data_parts.pop(2) name = data_parts[0] abi = data_parts[3] # Convert old-style post-versions to new style so it will sort correctly version = parse_version(data_parts[1].replace("_", "-")) plats = data_parts[4].split(".") requires_python = WheelVersionTags(tuple(data_parts[2].split("."))) return Candidate( name, filename, version, requires_python, abi if abi != "none" else None, plats, source, candidate_type=DistributionType.WHEEL, extra_sort_info=build_tag, )
def _parse_flat_metadata(contents): name = None version = None raw_reqs = [] for line in contents.split("\n"): lower_line = line.lower() if name is None and lower_line.startswith("name:"): name = line.split(":")[1].strip() elif version is None and lower_line.startswith("version:"): version = utils.parse_version(line.split(":")[1].strip()) elif lower_line.startswith("requires-dist:"): raw_reqs.append(line.partition(":")[2].strip()) return DistInfo(name, version, list(utils.parse_requirements(raw_reqs)))
def parse_source_filename(full_filename): filename = full_filename.replace(".tar.gz", "") filename = filename.replace(".tar.bz2", "") filename = filename.replace(".zip", "") filename = filename.replace(".tgz", "") # Source directories don't express a version if full_filename == filename: return full_filename, None filename = filename.replace("_", "-") dash_parts = filename.split("-") version_start = None for idx, part in enumerate(dash_parts): if not part: continue # pylint: disable=too-many-boolean-expressions if (idx != 0 and idx >= len(dash_parts) - 3) and ( part[0].isdigit() or (len(part) > 1 and part[0].lower() == "v" and part[1].isdigit())): if (idx == len(dash_parts) - 2 and "." in dash_parts[idx + 1] and ("." not in part or re.sub(r"[\d.]+", "", part))): continue version_start = idx break if version_start is None: return os.path.basename(filename), None if version_start == 0: raise ValueError("Package name missing: {}".format(full_filename)) pkg_name = "-".join(dash_parts[:version_start]) version_str = "-".join(dash_parts[version_start:]).replace("_", "-") version_parts = version_str.split(".") for idx, part in enumerate(version_parts): if idx != 0 and (part.startswith("linux") or part.startswith("windows") or part.startswith("macos")): version_parts = version_parts[:idx] break version = utils.parse_version(".".join(version_parts)) return pkg_name, version
def __init__( self, name, # type: str filename, # type: Optional[str] version, # type: packaging.version.Version py_version, # type: Optional[WheelVersionTags] abi, # type: Optional[str] plats, # type: Union[str, Iterable[str]] link, # type: Optional[str] candidate_type=DistributionType.SDIST, # type: DistributionType extra_sort_info="", # type: str ): # type: (...) -> None """ Args: name: Name of the candidate filename: The filename of the source of the candidate version: Version of the candidate py_version (RequiresPython): Python version abi: The ABI implemented plats: Platforms supported by this candidate link: URL from which to obtain the wheel candidate_type: The nature of the candidate, describing the distribution """ self.name = name self.filename = filename self.version = version or parse_version( "0.0.0") # type: packaging.version.Version self.py_version = py_version self.abi = abi if isinstance(plats, six.string_types): self.platforms = {plats} else: self.platforms = set(plats) self.link = link self.type = candidate_type # Sort based on tags to make sure the most specific distributions # are matched first self._sortkey = ( None ) # type: Optional[Tuple[packaging.version.Version, str, int, Tuple[int, int, int, int]]] self._extra_sort_info = extra_sort_info self.preparsed = None # type: Optional[RequirementContainer]
def _add_distribution(self, source_dir, result): # type: (str, RequirementContainer) -> None if result.version is None: self.logger.debug("Source dir %s did not provide a version") result.version = parse_version("0") candidate = req_compile.repos.repository.Candidate( result.name, source_dir, result.version, None, None, "any", None, req_compile.repos.repository.DistributionType.SOURCE, ) candidate.preparsed = result self.distributions[utils.normalize_project_name( result.name)].append(candidate)
def compile_roots( node, # type: DependencyNode source, # type: Optional[DependencyNode] repo, # type: BaseRepository dists, # type: DistributionCollection options, # type: CompileOptions depth=1, # type: int max_downgrade=MAX_DOWNGRADE, # type: int _path=None, ): # pylint: disable=too-many-statements,too-many-locals,too-many-branches # type: (...) -> None """ Args: node: The node to compile source: The source node of this provided node. This is used to build the graph repo: The repository to provide candidates. dists: The solution that is being built incrementally options: Static options for the compile (including extras) depth: Depth the compilation has descended into max_downgrade: The maximum number of version downgrades that will be allowed for conflicts _path: The path back to root - all nodes along the way """ if _path is None: _path = set() logger = LOG logger.debug("Processing node %s", node) if node.key in dists and dists[node.key] is not node: logger.debug("No need to process this node, it has been removed") elif node.metadata is not None: if not node.complete: if depth > MAX_COMPILE_DEPTH: raise ValueError("Recursion too deep") try: for req in sorted(node.dependencies, key=lambda node: node.key): if req in _path: if options.allow_circular_dependencies: logger.error( "Skipping node %s because it includes this node", req, ) else: raise ValueError( "Circular dependency: {node} -> {req} -> {node}" .format( node=node, req=req, )) else: compile_roots( req, node, repo, dists, options, depth=depth + 1, max_downgrade=max_downgrade, _path=_path | {node}, ) node.complete = True except NoCandidateException: if max_downgrade == 0: raise compile_roots( node, source, repo, dists, options, depth=depth, max_downgrade=0, _path=_path, ) else: logger.info("Reusing dist %s %s", node.metadata.name, node.metadata.version) else: spec_req = node.build_constraints() if options.pinned_requirements: pin = options.pinned_requirements.get( normalize_project_name(spec_req.project_name), spec_req) spec_req = merge_requirements(spec_req, pin) try: metadata, cached = repo.get_candidate(spec_req, max_downgrade=max_downgrade) logger.debug( "Acquired candidate %s %s [%s] (%s)", metadata, spec_req, metadata.origin, "cached" if cached else "download", ) reason = None if source is not None: if node in source.dependencies: reason = source.dependencies[node] if options.extras and isinstance(metadata.origin, SourceRepository): reason = merge_requirements( reason, parse_requirement(reason.project_name + "[" + ",".join(options.extras) + "]"), ) nodes_to_recurse = dists.add_dist(metadata, source, reason) for recurse_node in sorted(nodes_to_recurse): compile_roots( recurse_node, source, repo, dists, options, depth=depth + 1, max_downgrade=max_downgrade, _path=_path, ) except NoCandidateException: if max_downgrade == 0: raise exc_info = sys.exc_info() nodes = sorted(node.reverse_deps) violate_score = defaultdict(int) # type: Dict[DependencyNode, int] for idx, revnode in enumerate(nodes): for next_node in nodes[idx + 1:]: if not is_possible( merge_requirements(revnode.dependencies[node], next_node.dependencies[node])): logger.error( "Requirement %s was not possible. Violating pair: %s %s", node.build_constraints(), revnode, next_node, ) violate_score[revnode] += 1 violate_score[next_node] += 1 try: baddest_node = next( node for node, _ in sorted(violate_score.items(), key=operator.itemgetter(1)) if node.metadata is not None and not node.metadata.meta) except StopIteration: six.reraise(*exc_info) bad_meta = baddest_node.metadata assert bad_meta is not None logger.debug("The node %s had the most conflicts", baddest_node) new_constraints = [ parse_requirement("{}!={}".format(bad_meta.name, bad_meta.version)) ] bad_constraint = req_compile.containers.DistInfo( "#bad#-{}-{}".format(baddest_node, depth), parse_version("0.0.0"), new_constraints, meta=True, ) dists.remove_dists(baddest_node, remove_upstream=False) baddest_node.complete = False dists.remove_dists(node, remove_upstream=False) node.complete = False bad_constraints = dists.add_dist(bad_constraint, None, None) try: logger.debug("Finding new solutions for %s and %s", node, baddest_node) for node_to_compile in (node, baddest_node): compile_roots( node_to_compile, None, repo, dists, options, depth=depth, max_downgrade=max_downgrade - 1, _path=_path, ) print( "Could not use {} {} - pin to this version to see why not". format(bad_meta.name, bad_meta.version), file=sys.stderr, ) finally: dists.remove_dists(bad_constraints, remove_upstream=True)
def _fetch_from_setup_py( source_file, name, version, extractor ): # pylint: disable=too-many-branches """Attempt a set of executions to obtain metadata from the setup.py without having to build a wheel. First attempt without mocking __import__ at all. This means that projects which import a package inside of themselves will not succeed, but all other simple source distributions will. If this fails, allow mocking of __import__ to extract from tar files and zip files. Imports will trigger files to be extracted and executed. If this fails, due to true build prerequisites not being satisfied or the mocks being insufficient, build the wheel and extract the metadata from it. Args: source_file (str): The source archive or directory name (str): The project name. Use if it cannot be determined from the archive extractor (Extractor): The extractor to use to obtain files from the archive Returns: (DistInfo) The resulting distribution metadata """ results = None setattr(THREADLOCAL, "curdir", extractor.fake_root) def _fake_chdir(new_dir): if os.path.isabs(new_dir): dir_test = os.path.relpath(new_dir, extractor.fake_root) if dir_test != "." and dir_test.startswith("."): raise ValueError( "Cannot operate outside of setup dir ({})".format(dir_test) ) elif new_dir == "..": new_dir = "/".join(re.split(r"[/\\]", os.getcwd())[:-1]) setattr(THREADLOCAL, "curdir", os.path.abspath(new_dir)) def _fake_getcwd(): return getattr(THREADLOCAL, "curdir") def _fake_abspath(path): """Return the absolute version of a path.""" if not os.path.isabs(path): if six.PY2 and isinstance( path, unicode # pylint: disable=undefined-variable ): cwd = os.getcwdu() # pylint: disable=no-member else: cwd = os.getcwd() path = cwd + "/" + path return path # fmt: off patches = patch( os, 'chdir', _fake_chdir, os, 'getcwd', _fake_getcwd, os, 'getcwdu', _fake_getcwd, os.path, 'abspath', _fake_abspath, ) # fmt: on with patches: setup_file = find_in_archive(extractor, "setup.py", max_depth=1) if name == "setuptools": LOG.debug("Not running setup.py for setuptools") return None if setup_file is None: LOG.warning( "Could not find a setup.py in %s", os.path.basename(source_file) ) return None try: LOG.info("Parsing setup.py %s", setup_file) results = _parse_setup_py(name, setup_file, extractor) except (Exception, RuntimeError, ImportError): # pylint: disable=broad-except LOG.warning("Failed to parse %s", name, exc_info=True) if results is None: results = _build_egg_info(name, extractor, setup_file) if results is None or (results.name is None and results.version is None): return None if results.name is None: results.name = name if results.version is None or (version and results.version != version): LOG.debug( "Parsed version of %s did not match filename %s", results.version, version ) results.version = version or utils.parse_version("0.0.0") if not isinstance(extractor, NonExtractor) and utils.normalize_project_name( results.name ) != utils.normalize_project_name(name): LOG.warning("Name coming from setup.py does not match: %s", results.name) results.name = name return results
def setup( results, *_args, **kwargs ): # pylint: disable=too-many-branches,too-many-locals # pbr uses a dangerous pattern that only works when you build using setuptools # d2to1 uses unknown config options in setup.cfg setup_frameworks = ("pbr", "d2to1", "use_pyscaffold") for framework in setup_frameworks: if framework in kwargs: raise ValueError("Must run egg-info if {} is used".format(framework)) if "setup_requires" in kwargs and ( "pbr" in kwargs["setup_requires"] or "setupmeta" in kwargs["setup_requires"] ): raise ValueError("Must run egg-info if pbr/setupmeta is in setup_requires") if os.path.exists("setup.cfg"): _add_setup_cfg_kwargs(kwargs) name = kwargs.get("name", None) version = kwargs.get("version", None) reqs = kwargs.get("install_requires", []) extra_reqs = kwargs.get("extras_require", {}) if version is not None: version = utils.parse_version(str(version)) if isinstance(reqs, str): reqs = [reqs] all_reqs = list(utils.parse_requirements(reqs)) for extra, extra_req_strs in extra_reqs.items(): extra = extra.strip() if not extra: continue try: if isinstance(extra_req_strs, six.string_types): extra_req_strs = [extra_req_strs] cur_reqs = utils.parse_requirements(extra_req_strs) if extra.startswith(":"): req_with_marker = [ parse_req_with_marker(str(cur_req), extra[1:]) for cur_req in cur_reqs ] else: req_with_marker = [ parse_req_with_marker( str(cur_req), 'extra=="{}"'.format(extra.replace('"', '\\"')) ) for cur_req in cur_reqs ] all_reqs.extend(req_with_marker) except pkg_resources.RequirementParseError as ex: print( "Failed to parse extra requirement ({}) " "from the set:\n{}".format(str(ex), extra_reqs), file=sys.stderr, ) raise if name is not None: name = name.replace(" ", "-") results.append(DistInfo(name, version, all_reqs)) # Some projects inspect the setup() result class FakeResult(object): def __getattr__(self, item): return None return FakeResult()