def generate_pipeline_products( self, graph: GraphDatabase) -> Generator[PipelineProduct, None, None]: """Generate projects in stack candidates. All the candidates are discarded after calling this function. """ while self._stack_candidates: heap_item = heappop(self._stack_candidates) sort_key, justification, stack_candidate = heap_item score = sort_key[0] package_versions_locked = [ self.get_package_version_tuple(package_tuple) for package_tuple in stack_candidate ] # Print out packages if user requested so. if bool(os.getenv("THOTH_ADVISER_SHOW_PACKAGES", 0)): _LOGGER.info("Packages forming found stack (score: %f):", score) for item in stack_candidate: _LOGGER.info(" %r", item) project = Project.from_package_versions( packages=self.input_project.iter_dependencies(with_devel=True), packages_locked=package_versions_locked, meta=self.input_project.pipfile.meta, ) yield PipelineProduct(project=project, score=score, justification=justification, graph=graph)
def test_indexes_in_meta(self): """Check indexes being adjusted when inserting a new package.""" package_version = PackageVersion( name="tensorflow", version="==1.9.0", develop=False, index=Source( "http://tensorflow.pypi.thoth-station.ninja/index/fedora28/jemalloc/simple/tensorflow/" ), ) project = Project.from_package_versions([package_version]) project_dict = project.to_dict() pipfile_dict = project_dict["requirements"] pipfile_lock_dict = project_dict["requirements_locked"] assert pipfile_lock_dict is None assert len(pipfile_dict["source"]) == 1 assert pipfile_dict["source"] == [{ "url": "http://tensorflow.pypi.thoth-station.ninja/index/fedora28/jemalloc/simple/tensorflow/", "verify_ssl": True, "name": "tensorflow-pypi-thoth-station-ninja", }]
def from_final_state(cls, *, context: Context, state: State) -> "Product": """Instantiate advised stack from final state produced by adviser's pipeline.""" assert state.is_final(), "Instantiating product from a non-final state" package_versions_locked = [] for package_tuple in state.resolved_dependencies.values(): package_version: PackageVersion = context.get_package_version( package_tuple, graceful=False) # Fill package hashes before instantiating the final product. if not package_version.hashes: # We can re-use already existing package-version - in that case it already keeps hashes from # a previous product instantiation. hashes = context.graph.get_python_package_hashes_sha256( *package_tuple) package_version.hashes = ["sha256:" + h for h in hashes] if not package_version.hashes: _LOGGER.warning("No hashes found for package %r", package_tuple) # Fill environment markers by checking dependencies that introduced this dependency. # We do it only if we have no hashes - if hashes are present, the environment marker was # already picked (can be set to None if no marker is present). # For direct dependencies, dependents can return an empty set (if dependency is not # shared with other dependencies) and marker is propagated from PackageVersion registered in # Context.register_package_version. dependents_tuples = context.dependents[ package_tuple[0]][package_tuple] for dependent_tuple in dependents_tuples: environment_marker = context.graph.get_python_environment_marker( *dependent_tuple[0], dependency_name=package_tuple[0], dependency_version=package_tuple[1], os_name=dependent_tuple[1], os_version=dependent_tuple[2], python_version=dependent_tuple[3], ) if package_version.markers and environment_marker: # Install dependency if any of dependents need it. package_version.markers = f"({package_version.markers}) or ({environment_marker})" elif not package_version.markers and environment_marker: package_version.markers = environment_marker package_versions_locked.append(package_version) advised_project = Project.from_package_versions( packages=list(context.project.iter_dependencies(with_devel=True)), packages_locked=package_versions_locked, ) return cls( project=advised_project, score=state.score, justification=state.justification, advised_runtime_environment=state.advised_runtime_environment, )
def create_pipfile(index_url: str, framework: str, framework_version: str, pipfile_path: str): """Create Pipfile from inputs.""" packages = [ PackageVersion( name=f"{framework}", version=f"=={framework_version}", develop=False, index=Source(index_url), ) ] project = Project.from_package_versions(packages) if not index_url == "https://pypi.org/simple": project.add_source("https://pypi.org/simple") project.set_python_version("3.6") _LOGGER.info(f"Pipfile created:\n {project.pipfile.to_string()}") with open(pipfile_path, "w+") as pipfile: pipfile.write(project.pipfile.to_string())
def from_final_state(cls, *, context: Context, state: State) -> "Product": """Instantiate advised stack from final state produced by adviser's pipeline.""" assert state.is_final(), "Instantiating product from a non-final state" package_versions_locked = [] for package_tuple in state.resolved_dependencies.values(): package_version: PackageVersion = context.get_package_version( package_tuple, graceful=False) # Fill package hashes before instantiating the final product. if not package_version.hashes: # We can re-use already existing package-version - in that case it already keeps hashes from # a previous product instantiation. hashes = context.graph.get_python_package_hashes_sha256( *package_tuple) package_version.hashes = ["sha256:" + h for h in hashes] if not package_version.hashes: log_once(_LOGGER, cls._LOG_HASHES, package_tuple, "No hashes found for package %r", package_tuple) # Fill environment markers by checking dependencies that introduced this dependency. # We do it only if we have no hashes - if hashes are present, the environment marker was # already picked (can be set to None if no marker is present). # For direct dependencies, dependents can return an empty set (if dependency is not # shared with other dependencies) and marker is propagated from PackageVersion registered in # Context.register_package_version. dependents_tuples = context.dependents[ package_tuple[0]][package_tuple] # Marker depends based on the stack that was resolved. Do not change package_version directly, # rather clone it and used a cloned version not to clash with environment markers. environment_marker = None for dependent_tuple in dependents_tuples: try: marker = context.graph.get_python_environment_marker( *dependent_tuple[0], dependency_name=package_tuple[0], dependency_version=package_tuple[1], os_name=dependent_tuple[1], os_version=dependent_tuple[2], python_version=dependent_tuple[3], ) except NotFoundError: # This can happen if we do resolution that is agnostic to runtime # environment. In that case a dependency introduced in one runtime # environment does not need to co-exist in another runtime environment considering # marker evaluation result. continue if marker and environment_marker: # Multiple markers based on dependency that introduced it. environment_marker = f"({environment_marker}) or ({marker})" elif marker and not environment_marker: environment_marker = marker if environment_marker: package_version = attr.evolve(package_version, markers=environment_marker) package_versions_locked.append(package_version) advised_project = Project.from_package_versions( packages=list(context.project.iter_dependencies(with_devel=True)), packages_locked=package_versions_locked, meta=context.project.pipfile.meta, runtime_environment=context.project.runtime_environment, ) return cls( project=advised_project, score=state.score, justification=state.justification, advised_runtime_environment=state.advised_runtime_environment, )
def from_final_state(cls, *, context: Context, state: State) -> "Product": """Instantiate advised stack from final state produced by adviser's pipeline.""" assert state.is_final(), "Instantiating product from a non-final state" package_versions_locked = [] for package_tuple in state.resolved_dependencies.values(): package_version: PackageVersion = context.get_package_version(package_tuple, graceful=False) # Fill package hashes before instantiating the final product. if not package_version.hashes: # We can re-use already existing package-version - in that case it already keeps hashes from # a previous product instantiation. hashes = context.graph.get_python_package_hashes_sha256(*package_tuple) package_version.hashes = ["sha256:" + h for h in hashes] if not package_version.hashes: log_once(_LOGGER, cls._LOG_HASHES, package_tuple, "No hashes found for package %r", package_tuple) # Fill environment markers by checking dependencies that introduced this dependency. # We do it only if we have no hashes - if hashes are present, the environment marker was # already picked (can be set to None if no marker is present). # For direct dependencies, dependents can return an empty set (if dependency is not # shared with other dependencies) and marker is propagated from PackageVersion registered in # Context.register_package_version. dependents_tuples = context.dependents[package_tuple[0]][package_tuple] # Marker depends based on the stack that was resolved. Do not change package_version directly, # rather clone it and used a cloned version not to clash with environment markers. environment_markers = [] for dependent_tuple in dependents_tuples: try: marker = context.graph.get_python_environment_marker( *dependent_tuple[0], dependency_name=package_tuple[0], dependency_version=package_tuple[1], os_name=dependent_tuple[1], os_version=dependent_tuple[2], python_version=dependent_tuple[3], ) except NotFoundError: # This can happen if we do resolution that is agnostic to runtime # environment. In that case a dependency introduced in one runtime # environment does not need to co-exist in another runtime environment considering # marker evaluation result. continue if marker and marker not in environment_markers: environment_markers.append(marker) elif not marker: # One or multiple dependencies require this dependency to be always present, clear any # environment markers. environment_markers.clear() break if environment_markers: if len(environment_markers) > 1: markers = " or ".join(f"({m})" for m in environment_markers) else: markers = environment_markers[0] package_version = attr.evolve( package_version, markers=markers, ) package_versions_locked.append(package_version) advised_project = Project.from_package_versions( packages=list(context.project.iter_dependencies(with_devel=True)), packages_locked=package_versions_locked, meta=context.project.pipfile.meta, runtime_environment=context.project.runtime_environment, ) # Keep thoth section untouched. advised_project.pipfile.thoth = context.project.pipfile.thoth justification_metadata: List[Dict[str, Any]] = [] metadata = os.getenv("THOTH_ADVISER_METADATA") if metadata: try: metadata_content = json.loads(metadata) justification_metadata = (metadata_content.get("thoth.adviser") or {}).get("justification") or [] except Exception: _LOGGER.exception("Failed to parse adviser metadata") return cls( project=advised_project, score=state.score, justification=justification_metadata + state.justification, advised_runtime_environment=state.advised_runtime_environment, advised_manifest_changes=state.advised_manifest_changes, )