def _instantiate_project( requirements: str, requirements_locked: Optional[str] = None, *, runtime_environment: RuntimeEnvironment = None, constraints: Optional[str] = None, ): """Create Project instance based on arguments passed to CLI.""" try: with open(requirements, "r") as requirements_file: requirements = requirements_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. requirements = requirements.replace("\\n", "\n") if requirements_locked: try: with open(requirements_locked, "r") as requirements_file: requirements_locked = requirements_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. requirements_locked = requirements_locked.replace("\\n", "\n") pipfile = Pipfile.from_string(requirements) pipfile_lock = None if requirements_locked and requirements_locked != "null": pipfile_lock = PipfileLock.from_string(requirements_locked, pipfile) constraints_instance = None if constraints: try: with open(constraints, "r") as constraints_file: constraints_content = constraints_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. constraints_content = constraints.replace("\\n", "\n") try: constraints_instance = Constraints.from_dict( json.loads(constraints_content)) except json.decoder.JSONDecodeError: constraints_instance = Constraints.from_string(constraints_content) runtime_environment = runtime_environment or RuntimeEnvironment.from_dict( {}) if not runtime_environment.platform: runtime_environment.platform = _DEFAULT_PLATFORM project = Project( pipfile=pipfile, pipfile_lock=pipfile_lock, runtime_environment=runtime_environment, constraints=constraints_instance or Constraints(), ) return project
def get_project( self, runtime_environment_name: Optional[str] = None, *, missing_dir_ok: bool = False, ) -> Project: """Get the given overlay.""" path = self.get_overlays_directory( runtime_environment_name=runtime_environment_name, missing_dir_ok=missing_dir_ok, ) runtime_environment = RuntimeEnvironment.from_dict( self.get_runtime_environment(runtime_environment_name)) if self.requirements_format == "pipenv": pipfile_lock_path: Optional[str] = os.path.join( path, "Pipfile.lock") if pipfile_lock_path and not os.path.exists(pipfile_lock_path): pipfile_lock_path = None pipfile_path = os.path.join(path, "Pipfile") if not os.path.isfile(pipfile_path): if not os.path.isdir(path): _LOGGER.info("Creating directory structure in %r", path) os.makedirs(path, exist_ok=True) pipfile = Pipfile.from_dict({}) pipfile.to_file(path=pipfile_path) project = Project.from_files( pipfile_path=pipfile_path, pipfile_lock_path=pipfile_lock_path, runtime_environment=runtime_environment, without_pipfile_lock=pipfile_lock_path is None, ) else: requirements_in_file_path = os.path.join(path, "requirements.in") if not os.path.isfile(requirements_in_file_path): requirements_txt_file_path = os.path.join( path, "requirements.txt") if os.path.isfile(requirements_txt_file_path): _LOGGER.warning("Using %r for direct dependencies", requirements_in_file_path) project = Project.from_pip_compile_files( requirements_path=requirements_txt_file_path, requirements_lock_path=None, allow_without_lock=True, runtime_environment=runtime_environment, ) else: raise NotImplementedError( "No requirements.txt/requirements.in files found, it is recommended to " "use Pipenv files for managing dependencies") else: project = Project.from_pip_compile_files( requirements_path=requirements_in_file_path, requirements_lock_path=None, allow_without_lock=True, runtime_environment=runtime_environment, ) return project
def test_acceptable_with_error(self) -> None: """Test accepted with an error.""" package_version, project = self._get_case() ( GraphDatabase.should_receive("has_python_solver_error") .with_args( package_version.name, package_version.locked_version, package_version.index.url, os_name=None, os_version=None, python_version=None, ) .and_return(True) .once() ) context = flexmock( graph=GraphDatabase(), project=flexmock(runtime_environment=RuntimeEnvironment.from_dict({})), ) with SolvedSieve.assigned_context(context): sieve = SolvedSieve() sieve.update_configuration({"without_error": False}) assert list(sieve.run(p for p in [package_version])) == [package_version]
def __init__(self, runtime_environment: RuntimeEnvironment = None, graph_db=None): """Initialize graph release fetcher.""" super().__init__() self._graph_db = graph_db self.runtime_environment = runtime_environment or RuntimeEnvironment.from_dict( {} )
def from_files( cls, pipfile_path: Optional[str] = None, pipfile_lock_path: Optional[str] = None, *, runtime_environment: Optional[RuntimeEnvironment] = None, without_pipfile_lock: bool = False, ): """Create project from Pipfile and Pipfile.lock files.""" try: pipfile = Pipfile.from_file(pipfile_path) except Exception as exc: raise FileLoadError( f"Failed to load Pipfile (path: {os.getcwd() if not pipfile_path else pipfile_path}: {str(exc)}" ) from exc pipfile_lock = None if not without_pipfile_lock: try: pipfile_lock = PipfileLock.from_file(pipfile_lock_path, pipfile=pipfile) except Exception as exc: raise FileLoadError( f"Failed to load Pipfile.lock " f"(path: {os.getcwd() if not pipfile_lock_path else pipfile_lock_path}: {str(exc)}" ) from exc return cls( pipfile, pipfile_lock, runtime_environment=runtime_environment if runtime_environment else RuntimeEnvironment.from_dict({}), )
def test_os_sieve_no_error(self): """Test no error raised if no packages satisfy OS specific requirements.""" package_versions = [ PackageVersion( name="tensorflow", version="==1.9.0", index=Source( "https://tensorflow.pypi.thoth-station.ninja/index/fedora/30/jemalloc/simple/" ), develop=False, ) ] sieve_context = SieveContext.from_package_versions(package_versions) project = Project.from_strings( pipfile_str=self._PIPFILE_CONTENT_AICOE, runtime_environment=RuntimeEnvironment.from_dict( {"operating_system": { "name": "ubi", "version": "9" }})) os_sieve = OperatingSystemSieve(graph=None, project=project) os_sieve.run(sieve_context) assert set(sieve_context.iter_direct_dependencies_tuple()) == { ("tensorflow", "1.9.0", "https://tensorflow.pypi.thoth-station.ninja/index/fedora/30/jemalloc/simple/" ), }
def test_os_sieve(self): """Test removal of packages based on AICoE package source index configuration. We keep only TensorFlow release which is from PyPI and manylinux2010 build as there is no match on OS release. """ package_versions = self._get_packages_aicoe() sieve_context = SieveContext.from_package_versions(package_versions) # Do not assign runtime environment intentionally - it will default to no environment. project = Project.from_strings( pipfile_str=self._PIPFILE_CONTENT_AICOE, runtime_environment=RuntimeEnvironment.from_dict( {"operating_system": { "name": "rhel", "version": "7.5" }})) os_sieve = OperatingSystemSieve(graph=None, project=project) os_sieve.run(sieve_context) expected = { ("pytest", "3.0.0", "https://pypi.org/simple"), ("tensorflow", "1.9.0", "https://pypi.org/simple"), # Filtering out this entry is left on another sieve which ensures runtime environment compatibility. ("tensorflow", "1.9.0", "https://tensorflow.pypi.thoth-station.ninja/index/manylinux2010/jemalloc/simple/" ), # These are filtered out: # ("tensorflow", "1.9.0", "https://tensorflow.pypi.thoth-station.ninja/index/os/fedora/30/jemalloc/simple/"), # ("tensorflow", "1.9.0", "https://tensorflow.pypi.thoth-station.ninja/index/os/rhel/7.6/jemalloc/simple/") } assert set(sieve_context.iter_direct_dependencies_tuple()) == expected
def test_should_include_runtime_environment( self, builder_context: PipelineBuilderContext, prescription_runtime_environments: Dict[str, Any], used_runtime_environment_dict: Dict[str, Any], include: bool, ) -> None: """Test parsing and including the given should include entry.""" PRESCRIPTION_UNIT_SHOULD_INCLUDE_RUNTIME_ENVIRONMENTS_SCHEMA( prescription_runtime_environments) builder_context.project.runtime_environment = RuntimeEnvironment.from_dict( used_runtime_environment_dict) UnitPrescription._PRESCRIPTION = { "name": "Bar", "should_include": { "adviser_pipeline": True, "runtime_environments": prescription_runtime_environments, }, } builder_context.should_receive("is_included").with_args( UnitPrescription).and_return(False).once() assert builder_context.is_adviser_pipeline() assert UnitPrescription._should_include_base( builder_context) == include
def step_impl(context) -> None: """Retrieve Dependency Monkey report.""" url = ( f"{context.scheme}://{context.management_api_host}/api/v1/dependency-monkey/" f"python/{context.analysis_id}/report" ) response = requests.get(url) assert response.status_code == 200, f"Error in HTTP status code {response.status_code} for {url!r}: {response.text}" response_body = response.json() assert "report" in response_body, f"No report available in the response: {response_body}" assert "result" in response_body["report"], f"No result available in the response: {response_body}" assert response_body["report"]["result"]["error"] is False assert response_body["report"]["result"]["parameters"]["pipeline"] == context.pipeline assert response_body["report"]["result"]["parameters"]["requirements"] == context.pipfile assert ( response_body["report"]["result"]["parameters"]["runtime_environment"] == RuntimeEnvironment.from_dict(context.environment).to_dict() ) assert response_body["report"]["result"]["parameters"]["context"] == context.amun_context assert response_body["report"]["result"]["parameters"]["count"] == context.count assert response_body["report"]["result"]["parameters"]["predictor"] == context.predictor assert (response_body["report"]["result"]["parameters"]["predictor_config"] or {}) == ( context.predictor_config or {} )
def test_run( self, context: Context, runtime_environment_dict: Dict[str, Any], expected: List[Dict[str, str]], use_constraints: bool, use_library_usage: bool, ) -> None: """Test providing information about the runtime environment.""" if use_constraints: context.project.constraints = Constraints.from_string( "flask>=1.3.0") if use_library_usage: context.library_usage = {"flask": ["flask.App"]} context.project.runtime_environment = RuntimeEnvironment.from_dict( runtime_environment_dict) assert not context.stack_info unit = self.UNIT_TESTED() with unit.assigned_context(context): assert unit.run() is None assert context.stack_info == expected assert self.verify_justification_schema(context.stack_info)
def test_not_solved_without_error(self, context: Context) -> None: """Test a not found package is not accepted by sieve.""" package_version, project = self._get_case() (GraphDatabase.should_receive("has_python_solver_error").with_args( package_version.name, package_version.locked_version, package_version.index.url, os_name=None, os_version=None, python_version=None, ).and_return(True).once()) context.graph = GraphDatabase() context.project = flexmock( runtime_environment=RuntimeEnvironment.from_dict({})) assert not context.stack_info, "No stack info should be provided before test run" sieve = SolvedSieve() sieve.pre_run() with SolvedSieve.assigned_context(context): assert list(sieve.run(p for p in [package_version])) == [] assert context.stack_info, "No stack info provided by the pipeline unit" assert self.verify_justification_schema(context.stack_info) is True
def _instantiate_project( requirements: str, requirements_locked: typing.Optional[str], files: bool, runtime_environment: RuntimeEnvironment = None, ): """Create Project instance based on arguments passed to CLI.""" if files: with open(requirements, "r") as requirements_file: requirements = requirements_file.read() if requirements_locked: with open(requirements_locked, "r") as requirements_file: requirements_locked = requirements_file.read() del requirements_file else: # We we gather values from env vars, un-escape new lines. requirements = requirements.replace("\\n", "\n") if requirements_locked: requirements_locked = requirements_locked.replace("\\n", "\n") pipfile = Pipfile.from_string(requirements) pipfile_lock = ( PipfileLock.from_string(requirements_locked, pipfile) if requirements_locked else None ) project = Project( pipfile=pipfile, pipfile_lock=pipfile_lock, runtime_environment=runtime_environment or RuntimeEnvironment.from_dict({}), ) return project
def _instantiate_project( requirements: str, requirements_locked: Optional[str] = None, runtime_environment: RuntimeEnvironment = None, ): """Create Project instance based on arguments passed to CLI.""" try: with open(requirements, "r") as requirements_file: requirements = requirements_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. requirements = requirements.replace("\\n", "\n") if requirements_locked: try: with open(requirements_locked, "r") as requirements_file: requirements_locked = requirements_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. requirements_locked = requirements_locked.replace("\\n", "\n") pipfile = Pipfile.from_string(requirements) pipfile_lock = PipfileLock.from_string( requirements_locked, pipfile) if requirements_locked else None project = Project( pipfile=pipfile, pipfile_lock=pipfile_lock, runtime_environment=runtime_environment or RuntimeEnvironment.from_dict({}), ) return project
def test_to_dict(self, pipeline_config: PipelineConfig) -> None: """Test conversion to a dict.""" report = Report(count=3, pipeline=pipeline_config) project = flexmock() project_dict = {"aresto momentum": "avada kedavra"} project.should_receive("to_dict").with_args( keep_thoth_section=True).and_return( project_dict).twice() # In test and in the report. product = Product( project=project, score=0.666, justification=[{ "gryffindor": "le gladium leviosa" }], advised_runtime_environment=RuntimeEnvironment.from_dict( {"python_version": "3.6"}), ) report.add_product(product) assert report.product_count() == 1 assert list(report.iter_products()) == [product] assert report.to_dict() == { "pipeline": pipeline_config.to_dict(), "products": [product.to_dict()], "stack_info": [], "resolver_iterations": 0, "accepted_final_states_count": 0, "discarded_final_states_count": 0, }
def project() -> Project: """Create a fixture for a project representation.""" flexmock(Project) flexmock(RuntimeEnvironment) pipfile_path = AdviserTestCase.data_dir / "projects" / "Pipfile" pipfile_lock_path = AdviserTestCase.data_dir / "projects" / "Pipfile.lock" return Project.from_files( pipfile_path=str(pipfile_path), pipfile_lock_path=str(pipfile_lock_path), runtime_environment=RuntimeEnvironment.from_dict({}), )
def from_strings( cls, pipfile_str: str, pipfile_lock_str: str = None, *, runtime_environment: RuntimeEnvironment = None ): """Create project from Pipfile and Pipfile.lock loaded into strings.""" pipfile = Pipfile.from_string(pipfile_str) pipfile_lock = None if pipfile_lock_str: pipfile_lock = PipfileLock.from_string(pipfile_lock_str, pipfile) return cls( pipfile, pipfile_lock, runtime_environment=runtime_environment if runtime_environment else RuntimeEnvironment.from_dict({}) )
def state() -> State: """A fixture for a non-final state.""" state = State( score=0.1, unresolved_dependencies=OrderedDict( {"flask": ("flask", "1.1.1", "https://pypi.org/simple")} ), resolved_dependencies=OrderedDict( {"hexsticker": ("hexsticker", "1.0.0", "https://pypi.org/simple")} ), advised_runtime_environment=RuntimeEnvironment.from_dict( {"python_version": "3.6"} ), ) state.add_justification([{"foo": "bar"}, {"bar": "baz"}]) return state
def clone(self) -> "State": """Return a swallow copy of this state that can be used as a next state.""" cloned_advised_environment = None if self.advised_runtime_environment: cloned_advised_environment = RuntimeEnvironment.from_dict( self.advised_runtime_environment.to_dict()) return self.__class__( score=self.score, latest_version_offset=self.latest_version_offset, iteration=self.iteration, iteration_states_added=self.iteration_states_added, unresolved_dependencies=OrderedDict(self.unresolved_dependencies), resolved_dependencies=OrderedDict(self.resolved_dependencies), advised_runtime_environment=cloned_advised_environment, justification=list(self.justification), )
def _get_state() -> State: """Create a fixture for a non-final state.""" unresolved_dependency = ("flask", "1.1.1", "https://pypi.org/simple") unresolved_dependencies = { id(unresolved_dependency): unresolved_dependency } state = State( score=0.1, unresolved_dependencies={"flask": unresolved_dependencies}, resolved_dependencies={ "hexsticker": ("hexsticker", "1.0.0", "https://pypi.org/simple") }, advised_runtime_environment=RuntimeEnvironment.from_dict( {"python_version": "3.6"}), ) state.add_justification(AdviserTestCase.JUSTIFICATION_SAMPLE_1) return state
def test_to_dict_metadata(self, pipeline_config: PipelineConfig) -> None: """Test conversion to a dict with passed metadata.""" report = Report(count=3, pipeline=pipeline_config) project = flexmock() project_dict = {"aresto momentum": "avada kedavra"} project.should_receive("to_dict").with_args( keep_thoth_section=True).and_return(project_dict) product = Product( project=project, score=0.666, justification=[{ "gryffindor": "le gladium leviosa" }], advised_runtime_environment=RuntimeEnvironment.from_dict( {"python_version": "3.6"}), ) report.add_product(product) stack_info = [{"type": "WARNING", "message": "Hello, metadata"}] stack_info_metadata = { "thoth.adviser": { "stack_info": stack_info, } } report.set_stack_info([{"foo": "bar"}]) assert "THOTH_ADVISER_METADATA" not in os.environ os.environ["THOTH_ADVISER_METADATA"] = json.dumps(stack_info_metadata) try: assert report.product_count() == 1 assert list(report.iter_products()) == [product] assert report.to_dict() == { "pipeline": pipeline_config.to_dict(), "products": [product.to_dict()], "stack_info": list(chain(stack_info, report.stack_info)), "resolver_iterations": 0, "accepted_final_states_count": 0, "discarded_final_states_count": 0, } except Exception: os.environ.pop("THOTH_ADVISER_METADATA") raise
def get_configuration_check_report( self) -> Optional[Tuple[dict, List[dict]]]: """Get a report on project configuration for the given runtime environment.""" result = [] changes_in_config = False # We check Python version if there is used Pipfile, it should match runtime configuration. pipfile_python_version = self.pipfile.meta.requires.get( "python_version") runtime_python_version = self.runtime_environment.python_version # Keep the current runtime environment untouched. recommended_runtime_configuration_entry = RuntimeEnvironment.from_dict( self.runtime_environment.to_dict()) if not pipfile_python_version and not runtime_python_version: result.append({ "type": "WARNING", "justification": "Please specify Python version in Pipfile using `pipenv --python <VERSION>` " "and in Thoth's configuration file to have reproducible deployment and " "recommendations targeting specific Python version", }) elif pipfile_python_version and not runtime_python_version: changes_in_config = True recommended_runtime_configuration_entry.python_version = pipfile_python_version result.append({ "type": "WARNING", "justification": "Use Python version in Thoth's configuration file to have " "recommendations matching configuration in Pipfile, configured Python version " f"in Pipfile is {pipfile_python_version}", }) # if pipfile_python_version and runtime_python_version: # This case is not related to adjustments in Thoth's configuration but rather in Pipfile - that is handled # in scoring. if not result: return None return recommended_runtime_configuration_entry.to_dict( without_none=True) if changes_in_config else None, result
def clone(self) -> "State": """Return a swallow copy of this state that can be used as a next state.""" cloned_advised_environment = None if self.advised_runtime_environment: cloned_advised_environment = RuntimeEnvironment.from_dict(self.advised_runtime_environment.to_dict()) unresolved_dependencies = self.unresolved_dependencies.copy() for dependency_name in unresolved_dependencies.keys(): unresolved_dependencies[dependency_name] = unresolved_dependencies[dependency_name].copy() return self.__class__( score=self.score, iteration=self.iteration, unresolved_dependencies=unresolved_dependencies, resolved_dependencies=self.resolved_dependencies.copy(), advised_runtime_environment=cloned_advised_environment, advised_manifest_changes=self.advised_manifest_changes.copy(), justification=self.justification.copy(), parent=weakref.ref(self), )
def test_not_found(self) -> None: """Test a not found package is not accepted by sieve.""" package_version, project = self._get_case() (GraphDatabase.should_receive("has_python_solver_error").with_args( package_version.name, package_version.locked_version, package_version.index.url, os_name=None, os_version=None, python_version=None, ).and_raise(NotFoundError).once()) context = flexmock( graph=GraphDatabase(), project=flexmock( runtime_environment=RuntimeEnvironment.from_dict({})), ) with SolvedSieve.assigned_context(context): sieve = SolvedSieve() assert list(sieve.run(p for p in [package_version])) == []
def from_files( cls, pipfile_path: str = None, pipfile_lock_path: str = None, *, runtime_environment: RuntimeEnvironment = None, without_pipfile_lock: bool = False, ): """Create project from Pipfile and Pipfile.lock files.""" try: with open(pipfile_path or "Pipfile", "r") as pipfile_file: pipfile_str = pipfile_file.read() except Exception as exc: raise FileLoadError( f"Failed to load Pipfile (path: {os.getcwd() if not pipfile_path else pipfile_path}: {str(exc)}" ) from exc pipfile = Pipfile.from_string(pipfile_str) pipfile_lock = None if not without_pipfile_lock: try: with open(pipfile_lock_path or "Pipfile.lock", "r") as pipfile_lock_file: pipfile_lock_str = pipfile_lock_file.read() except Exception as exc: raise FileLoadError( f"Failed to load Pipfile.lock " f"(path: {os.getcwd() if not pipfile_lock_path else pipfile_lock_path}: {str(exc)}" ) from exc pipfile_lock = PipfileLock.from_string(pipfile_lock_str, pipfile=pipfile) return cls( pipfile, pipfile_lock, runtime_environment=runtime_environment if runtime_environment else RuntimeEnvironment.from_dict({}), )
def test_os_sieve_no_remove(self): """Test the TensorFlow package is not removed as it has no other candidate.""" package_versions = [ PackageVersion( name="tensorflow", version="==1.9.0", index=Source( "https://tensorflow.pypi.thoth-station.ninja/index/fedora/30/jemalloc/simple/" ), develop=False, ), PackageVersion( name="pytest", version="==3.0.0", index=Source("https://pypi.org/simple"), develop=True, ), ] sieve_context = SieveContext.from_package_versions(package_versions) # Do not assign runtime environment intentionally - it will default to no environment. project = Project.from_strings( pipfile_str=self._PIPFILE_CONTENT_AICOE, runtime_environment=RuntimeEnvironment.from_dict( {"operating_system": { "name": "rhel", "version": "7.5" }})) os_sieve = OperatingSystemSieve(graph=None, project=project) os_sieve.run(sieve_context) expected = { ("pytest", "3.0.0", "https://pypi.org/simple"), ("tensorflow", "1.9.0", "https://tensorflow.pypi.thoth-station.ninja/index/fedora/30/jemalloc/simple/" ), } assert set(sieve_context.iter_direct_dependencies_tuple()) == expected
def from_files( cls, pipfile_path: str = None, pipfile_lock_path: str = None, *, runtime_environment: RuntimeEnvironment = None, without_pipfile_lock: bool = False, ): """Create project from Pipfile and Pipfile.lock files.""" with open(pipfile_path or "Pipfile", "r") as pipfile_file: pipfile = Pipfile.from_string(pipfile_file.read()) pipfile_lock = None if not without_pipfile_lock: with open(pipfile_lock_path or "Pipfile.lock", "r") as pipfile_lock_file: pipfile_lock = PipfileLock.from_string(pipfile_lock_file.read(), pipfile) return cls( pipfile, pipfile_lock, runtime_environment=runtime_environment if runtime_environment else RuntimeEnvironment.from_dict({}) )
def test_not_solved_without_error(self, context: Context) -> None: """Test a not found package is not accepted by sieve.""" package_version, project = self._get_case() (GraphDatabase.should_receive("has_python_solver_error").with_args( package_version.name, package_version.locked_version, package_version.index.url, os_name=None, os_version=None, python_version=None, ).and_return(True).once()) context.graph = GraphDatabase() context.project = flexmock( runtime_environment=RuntimeEnvironment.from_dict({})) assert not context.stack_info, "No stack info should be provided before test run" sieve = SolvedSieve() sieve.pre_run() with SolvedSieve.assigned_context(context): assert list(sieve.run(p for p in [package_version])) == [] sieve.post_run() assert context.stack_info, "No stack info provided by the pipeline unit" assert context.stack_info == [{ "link": jl("install_error"), "message": "The following versions of 'tensorflow' from " "'https://pypi.org/simple' were removed due to installation " "issues in the target environment: 2.0.0", "type": "WARNING", }] assert self.verify_justification_schema(context.stack_info) is True
def test_from_final_state(self, context: Context) -> None: """Test instantiating product from a final state.""" state = State( score=0.5, resolved_dependencies={ "daiquiri": ("daiquiri", "1.6.0", "https://pypi.org/simple"), "numpy": ("numpy", "1.17.4", "https://pypi.org/simple"), "tensorflow": ("tensorflow", "2.0.0", "https://pypi.org/simple"), }, unresolved_dependencies={}, advised_runtime_environment=RuntimeEnvironment.from_dict({"python_version": "3.6"}), ) state.add_justification(self.JUSTIFICATION_SAMPLE_1) pypi = Source("https://pypi.org/simple") pv_daiquiri_locked = PackageVersion(name="daiquiri", version="==1.6.0", index=pypi, develop=False) pv_numpy_locked = PackageVersion(name="numpy", version="==1.17.4", index=pypi, develop=False) pv_tensorflow_locked = PackageVersion(name="tensorflow", version="==2.0.0", index=pypi, develop=False) context.should_receive("get_package_version").with_args( ("daiquiri", "1.6.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_daiquiri_locked).ordered() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "daiquiri", "1.6.0", "https://pypi.org/simple" ).and_return(["000"]).ordered() context.should_receive("get_package_version").with_args( ("numpy", "1.17.4", "https://pypi.org/simple"), graceful=False ).and_return(pv_numpy_locked).ordered() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "numpy", "1.17.4", "https://pypi.org/simple" ).and_return(["111"]).ordered() context.should_receive("get_package_version").with_args( ("tensorflow", "2.0.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_tensorflow_locked).ordered() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "tensorflow", "2.0.0", "https://pypi.org/simple" ).and_return(["222"]).ordered() pv_daiquiri = PackageVersion(name="daiquiri", version="*", index=pypi, develop=False) pv_tensorflow = PackageVersion(name="tensorflow", version=">=2.0.0", index=pypi, develop=False) project = flexmock( pipfile=Pipfile.from_string(self._PIPFILE), runtime_environment=RuntimeEnvironment.from_dict({"operating_system": {"name": "rhel"}}), ) project.should_receive("iter_dependencies").with_args(with_devel=True).and_return( [pv_daiquiri, pv_tensorflow] ).once() context.project = project context.dependencies = { "daiquiri": { ("daiquiri", "1.6.0", "https://pypi.org/simple"): set(), }, "numpy": {("numpy", "1.17.4", "https://pypi.org/simple"): set()}, "tensorflow": { ("tensorflow", "2.0.0", "https://pypi.org/simple"): {("numpy", "1.17.4", "https://pypi.org/simple")} }, } context.dependents = { "daiquiri": { ("daiquiri", "1.6.0", "https://pypi.org/simple"): set(), }, "numpy": { ("numpy", "1.17.4", "https://pypi.org/simple"): { ( ("tensorflow", "2.0.0", "https://pypi.org/simple"), "fedora", "31", "3.7", ) } }, "tensorflow": {("tensorflow", "2.0.0", "https://pypi.org/simple"): set()}, } context.graph.should_receive("get_python_environment_marker").with_args( "tensorflow", "2.0.0", "https://pypi.org/simple", dependency_name="numpy", dependency_version="1.17.4", os_name="fedora", os_version="31", python_version="3.7", marker_evaluation_result=True, ).and_return("python_version >= '3.7'").once() assert "THOTH_ADVISER_METADATA" not in os.environ metadata_justification = {"thoth.adviser": {"justification": [{"bar": "baz"}]}} os.environ["THOTH_ADVISER_METADATA"] = json.dumps(metadata_justification) try: product = Product.from_final_state(state=state, context=context) finally: os.environ.pop("THOTH_ADVISER_METADATA") assert product.score == state.score assert product.justification == list( chain(metadata_justification["thoth.adviser"]["justification"], state.justification) ) assert product.advised_runtime_environment == state.advised_runtime_environment assert product.project.to_dict() == { "constraints": [], "requirements": { "packages": { "daiquiri": {"index": "pypi-org-simple", "version": "*"}, "tensorflow": {"index": "pypi-org-simple", "version": ">=2.0.0"}, }, "dev-packages": {}, "requires": {"python_version": "3.7"}, "source": [ { "url": "https://pypi.org/simple", "verify_ssl": True, "name": "pypi-org", }, { "url": "https://pypi.org/simple", "verify_ssl": True, "name": "pypi-org-simple", }, ], "thoth": { "allow_prereleases": {"black": True}, }, }, "requirements_locked": { "_meta": { "sources": [ {"name": "pypi-org", "url": "https://pypi.org/simple", "verify_ssl": True}, { "url": "https://pypi.org/simple", "verify_ssl": True, "name": "pypi-org-simple", }, ], "requires": {"python_version": "3.7"}, "hash": {"sha256": "6cc8365e799b949fb6cc564cea2d8e0e8a782ab676a006e65abbe14621b93381"}, "pipfile-spec": 6, }, "default": { "daiquiri": { "version": "==1.6.0", "hashes": ["sha256:000"], "index": "pypi-org-simple", }, "numpy": { "version": "==1.17.4", "hashes": ["sha256:111"], "index": "pypi-org-simple", "markers": "python_version >= '3.7'", }, "tensorflow": { "version": "==2.0.0", "hashes": ["sha256:222"], "index": "pypi-org-simple", }, }, "develop": {}, }, "runtime_environment": { "hardware": {"cpu_family": None, "cpu_model": None, "gpu_model": None}, "operating_system": {"name": "rhel", "version": None}, "python_version": None, "cuda_version": None, "labels": None, "cudnn_version": None, "name": None, "platform": None, "base_image": None, "mkl_version": None, "openblas_version": None, "openmpi_version": None, "recommendation_type": None, }, }
def update_keb_installation(): """Load files and pass them to storages update function.""" if _SLUG is None: _LOGGER.info("No slug present, continuing to next step in task.") return service = GithubService( github_app_id=os.getenv("GITHUB_APP_ID"), github_app_private_key_path=os.getenv("GITHUB_PRIVATE_KEY_PATH"), ) # TODO: extend to use other services project = service.get_project(namespace=_SLUG.split("/")[0], repo=_SLUG.split("/")[1]) raw_thoth_config = project.get_file_content(".thoth.yaml") with TemporaryDirectory() as repo_path, cwd(repo_path): thoth_config.load_config_from_string(raw_thoth_config) requirements_format = thoth_config.content["requirements_format"] overlays_dir = thoth_config.content.get("overlays_dir") to_update: List[RuntimeEnvironment] if overlays_dir is not None: to_update = [RuntimeEnvironment.from_dict(r) for r in thoth_config.list_runtime_environments()] else: to_update = [RuntimeEnvironment.from_dict(thoth_config.get_runtime_environment())] for runtime_environment in to_update: if overlays_dir: prefix = f"{overlays_dir}/{runtime_environment.name}/" else: prefix = "" if requirements_format == "pipenv": pipfile_r = project.get_file_content(f"{prefix}Pipfile") with open("Pipfile", "wb") as f: f.write(pipfile_r) try: piplock_r = project.get_file_content(f"{prefix}Pipfile.lock") with open("Pipfile.lock", "wb") as f: f.write(piplock_r) project = Project.from_files(pipfile_path="Pipfile", pipfile_lock_path="Pipfile.lock") except Exception: _LOGGER.debug("No Pipfile.lock found") project = Project.from_files( pipfile_path="Pipfile", without_pipfile_lock=True, runtime_environment=runtime_environment, ) elif requirements_format in ["pip", "pip-tools", "pip-compile"]: try: requirements_r = project.get_file_content(f"{prefix}requirements.txt") with open("requirements.txt", "wb") as f: f.write(requirements_r) project = Project.from_pip_compile_files( requirements_path="requirements.txt", allow_without_lock=True, runtime_environment=runtime_environment, ) except Exception: _LOGGER.debug("No requirements.txt found, trying to download requirements.in") requirements_r = project.get_file_content(f"{prefix}requirements.in") with open("requirements.in", "wb") as f: f.write(requirements_r.content) project = Project.from_pip_compile_files( requirements_path="requirements.in", allow_without_lock=True, runtime_environment=runtime_environment, ) project = Project.from_pip_compile_files(allow_without_lock=True) else: raise NotImplementedError(f"{requirements_format} requirements format not supported.") db.update_kebechet_installation_using_files( slug=_SLUG, runtime_environment_name=runtime_environment.name, installation_id=str(project.github_repo.id), requirements=project.pipfile.to_dict(), requirements_lock=project.pipfile_lock.to_dict(), thoth_config=thoth_config, ) present_installations = db.get_kebechet_github_app_installations_all(slug=_SLUG) cur_env_names = {r.name for r in to_update} all_env_names = {installation["runtime_environment_name"] for installation in present_installations} to_delete = all_env_names - cur_env_names for name in to_delete: db.delete_kebechet_github_app_installations(slug=_SLUG, runtime_environment=name)
def test_from_final_state(self, context: Context) -> None: """Test instantiating product from a final state.""" state = State( score=0.5, resolved_dependencies=OrderedDict( { "daiquiri": ("daiquiri", "1.6.0", "https://pypi.org/simple"), "numpy": ("numpy", "1.17.4", "https://pypi.org/simple"), "tensorflow": ("tensorflow", "2.0.0", "https://pypi.org/simple"), } ), unresolved_dependencies=OrderedDict(), advised_runtime_environment=RuntimeEnvironment.from_dict( {"python_version": "3.6"} ), ) state.add_justification([{"foo": "bar"}]) pypi = Source("https://pypi.org/simple") pv_daiquiri_locked = PackageVersion( name="daiquiri", version="==1.6.0", index=pypi, develop=False ) pv_numpy_locked = PackageVersion( name="numpy", version="==1.17.4", index=pypi, develop=False ) pv_tensorflow_locked = PackageVersion( name="tensorflow", version="==2.0.0", index=pypi, develop=False ) context.should_receive("get_package_version").with_args( ("daiquiri", "1.6.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_daiquiri_locked).ordered() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "daiquiri", "1.6.0", "https://pypi.org/simple" ).and_return(["000"]).ordered() context.should_receive("get_package_version").with_args( ("numpy", "1.17.4", "https://pypi.org/simple"), graceful=False ).and_return(pv_numpy_locked).ordered() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "numpy", "1.17.4", "https://pypi.org/simple" ).and_return(["111"]).ordered() context.should_receive("get_package_version").with_args( ("tensorflow", "2.0.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_tensorflow_locked).ordered() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "tensorflow", "2.0.0", "https://pypi.org/simple" ).and_return(["222"]).ordered() pv_daiquiri = PackageVersion( name="daiquiri", version="*", index=pypi, develop=False ) pv_tensorflow = PackageVersion( name="tensorflow", version=">=2.0.0", index=pypi, develop=False ) project = flexmock() project.should_receive("iter_dependencies").with_args( with_devel=True ).and_return([pv_daiquiri, pv_tensorflow]).once() context.project = project context.dependencies = { "daiquiri": { ("daiquiri", "1.6.0", "https://pypi.org/simple"): set(), }, "numpy": { ("numpy", "1.17.4", "https://pypi.org/simple"): set() }, "tensorflow": { ("tensorflow", "2.0.0", "https://pypi.org/simple"): { ("numpy", "1.17.4", "https://pypi.org/simple") } }, } context.dependents = { "daiquiri": { ("daiquiri", "1.6.0", "https://pypi.org/simple"): set(), }, "numpy": { ("numpy", "1.17.4", "https://pypi.org/simple"): { (("tensorflow", "2.0.0", "https://pypi.org/simple"), "fedora", "31", "3.7") } }, "tensorflow": { ("tensorflow", "2.0.0", "https://pypi.org/simple"): set() }, } context.graph.should_receive("get_python_environment_marker").with_args( "tensorflow", "2.0.0", "https://pypi.org/simple", dependency_name="numpy", dependency_version="1.17.4", os_name="fedora", os_version="31", python_version="3.7", ).and_return("python_version >= '3.7'").once() product = Product.from_final_state(state=state, context=context) assert product.score == state.score assert product.justification == state.justification assert product.advised_runtime_environment == state.advised_runtime_environment assert product.project.to_dict() == { "requirements": { "packages": { "daiquiri": {"index": "pypi-org", "version": "*"}, "tensorflow": {"index": "pypi-org", "version": ">=2.0.0"}, }, "dev-packages": {}, "source": [ { "url": "https://pypi.org/simple", "verify_ssl": True, "name": "pypi-org", } ], }, "requirements_locked": { "_meta": { "sources": [ { "url": "https://pypi.org/simple", "verify_ssl": True, "name": "pypi-org", } ], "requires": {}, "hash": { "sha256": "c3a2f42932b6e5cd30f5664b11eda605f5fbd672f1b88729561d0d3edd10b5d9" }, "pipfile-spec": 6, }, "default": { "daiquiri": { "version": "==1.6.0", "hashes": ["sha256:000"], "index": "pypi-org", }, "numpy": { "version": "==1.17.4", "hashes": ["sha256:111"], "index": "pypi-org", "markers": "python_version >= '3.7'", }, "tensorflow": { "version": "==2.0.0", "hashes": ["sha256:222"], "index": "pypi-org", }, }, "develop": {}, }, "runtime_environment": { "hardware": {"cpu_family": None, "cpu_model": None}, "operating_system": {"name": None, "version": None}, "python_version": None, "cuda_version": None, "name": None, }, }