def test_add_package_develop(self): """Test add package develop.""" pipfile = Pipfile.from_file( os.path.join(self.data_dir, "pipfiles", "Pipfile_test1")) pipfile_lock = PipfileLock.from_file( os.path.join(self.data_dir, "pipfiles", "Pipfile_test1.lock")) project = Project(pipfile=pipfile, pipfile_lock=pipfile_lock) source = Source(name="foo", url="https://foo.bar", verify_ssl=True, warehouse=False) assert "selinon" not in project.pipfile.dev_packages.packages with pytest.raises(InternalError): # Trying to add package with source but source is not present in the meta. project.add_package("selinon", "==1.0.0", develop=True, source=source) source = project.add_source(url="https://foo.bar") project.add_package("selinon", "==1.0.0", develop=True, source=source) assert "selinon" in project.pipfile.dev_packages.packages assert project.pipfile.dev_packages["selinon"].version == "==1.0.0" assert project.pipfile.dev_packages["selinon"].index.name == "foo-bar" assert project.pipfile.dev_packages["selinon"].develop is True # Do not add the package to the lock - lock has to be explicitly done. assert "selinon" not in project.pipfile_lock.dev_packages.packages
def _instantiate_project( requirements: str, requirements_locked: Optional[str] = None, runtime_environment: RuntimeEnvironment = None, ): """Create Project instance based on arguments passed to CLI.""" try: with open(requirements, "r") as requirements_file: requirements = requirements_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. requirements = requirements.replace("\\n", "\n") if requirements_locked: try: with open(requirements_locked, "r") as requirements_file: requirements_locked = requirements_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. requirements_locked = requirements_locked.replace("\\n", "\n") pipfile = Pipfile.from_string(requirements) pipfile_lock = PipfileLock.from_string( requirements_locked, pipfile) if requirements_locked else None project = Project( pipfile=pipfile, pipfile_lock=pipfile_lock, runtime_environment=runtime_environment or RuntimeEnvironment.from_dict({}), ) return project
def _instantiate_project( requirements: str, requirements_locked: typing.Optional[str], files: bool, runtime_environment: RuntimeEnvironment = None, ): """Create Project instance based on arguments passed to CLI.""" if files: with open(requirements, "r") as requirements_file: requirements = requirements_file.read() if requirements_locked: with open(requirements_locked, "r") as requirements_file: requirements_locked = requirements_file.read() del requirements_file else: # We we gather values from env vars, un-escape new lines. requirements = requirements.replace("\\n", "\n") if requirements_locked: requirements_locked = requirements_locked.replace("\\n", "\n") pipfile = Pipfile.from_string(requirements) pipfile_lock = ( PipfileLock.from_string(requirements_locked, pipfile) if requirements_locked else None ) project = Project( pipfile=pipfile, pipfile_lock=pipfile_lock, runtime_environment=runtime_environment or RuntimeEnvironment.from_dict({}), ) return project
def test_add_package_develop(self): pipfile = Pipfile.from_file( os.path.join(self.data_dir, 'pipfiles', 'Pipfile_test1')) pipfile_lock = PipfileLock.from_file( os.path.join(self.data_dir, 'pipfiles', 'Pipfile_test1.lock')) project = Project(pipfile=pipfile, pipfile_lock=pipfile_lock) source = Source(name='foo', url='https://foo.bar', verify_ssl=True, warehouse=False) assert 'selinon' not in project.pipfile.dev_packages.packages with pytest.raises(InternalError): # Trying to add package with source but source is not present in the meta. project.add_package('selinon', '==1.0.0', develop=True, source=source) source = project.add_source(url='https://foo.bar') project.add_package('selinon', '==1.0.0', develop=True, source=source) assert 'selinon' in project.pipfile.dev_packages.packages assert project.pipfile.dev_packages['selinon'].version == '==1.0.0' assert project.pipfile.dev_packages['selinon'].index == 'foo-bar' assert project.pipfile.dev_packages['selinon'].develop is True # Do not add the package to the lock - lock has to be explicitly done. assert 'selinon' not in project.pipfile_lock.dev_packages.packages
def test_construct_requirements_txt(self): """Test construct requirements txt.""" pipfile_lock = PipfileLock.from_file(os.path.join(self.data_dir, "pipfiles", "Pipfile_requirements.lock")) assert ( pipfile_lock.construct_requirements_txt() == """# # This file is autogenerated by Thoth and is meant to be used with pip-compile # as provided by pip-tools. # --index-url https://pypi.org/simple --extra-index-url https://index-aicoe.a3c1.starter-us-west-1.openshiftapps.com/fedora28/1.9/jemalloc absl-py==0.5.0 \\ --hash=sha256:6fcc3c04dc881fd93d793674a42ee8c73155570eda8f8b90c4477c8522478b7b click==6.6 \\ --hash=sha256:cc6a19da8ebff6e7074f731447ef7e112bd23adf3de5c597cf9989f2fd8defe9 \\ --hash=sha256:fcf697e1fd4b567d817c69dab10a4035937fe6af175c05fd6806b69f74cbc6c4 python-dateutil==2.7.3; python_version >= '2.7' \\ --hash=sha256:1adb80e7a782c12e52ef9a8182bebeb73f1d7e24e374397af06fb4956c8dc5c0 \\ --hash=sha256:e27001de32f627c22380a688bcc43ce83504a7bc5da472209b4c70f02829f0b8 tensorflow==1.9.0rc0 \\ --hash=sha256:0588ac4f2b2e3994a5245c9be13a58e6128c26f7e6eb61c2ef90d82b58b78d4c \\ --hash=sha256:1a83b8e789a5b9bfdfc671d4368b976a8d9cc5d217209264cc987885ff55a6b1 \\ --hash=sha256:4dedb5dacd20df1e545835a40ad6b337fda11e32432bd643e4a8cd484d72fe0a # # dev packages # autopep8==1.4 \\ --hash=sha256:655e3ee8b4545be6cfed18985f581ee9ecc74a232550ee46e9797b6fbf4f336d """ )
def _instantiate_project( requirements: str, requirements_locked: Optional[str] = None, *, runtime_environment: RuntimeEnvironment = None, constraints: Optional[str] = None, ): """Create Project instance based on arguments passed to CLI.""" try: with open(requirements, "r") as requirements_file: requirements = requirements_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. requirements = requirements.replace("\\n", "\n") if requirements_locked: try: with open(requirements_locked, "r") as requirements_file: requirements_locked = requirements_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. requirements_locked = requirements_locked.replace("\\n", "\n") pipfile = Pipfile.from_string(requirements) pipfile_lock = None if requirements_locked and requirements_locked != "null": pipfile_lock = PipfileLock.from_string(requirements_locked, pipfile) constraints_instance = None if constraints: try: with open(constraints, "r") as constraints_file: constraints_content = constraints_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. constraints_content = constraints.replace("\\n", "\n") try: constraints_instance = Constraints.from_dict( json.loads(constraints_content)) except json.decoder.JSONDecodeError: constraints_instance = Constraints.from_string(constraints_content) runtime_environment = runtime_environment or RuntimeEnvironment.from_dict( {}) if not runtime_environment.platform: runtime_environment.platform = _DEFAULT_PLATFORM project = Project( pipfile=pipfile, pipfile_lock=pipfile_lock, runtime_environment=runtime_environment, constraints=constraints_instance or Constraints(), ) return project
def test_from_string(self, pipfile_lock: str): """Test from string.""" with open(os.path.join(self.data_dir, "pipfiles", pipfile_lock), "r") as pipfile_lock_file: content = pipfile_lock_file.read() with open(os.path.join(self.data_dir, "pipfiles", pipfile_lock[: -len(".lock")]), "r") as pipfile_file: pipfile_content = pipfile_file.read() pipfile_instance = Pipfile.from_string(pipfile_content) instance = PipfileLock.from_string(content, pipfile=pipfile_instance) assert instance.to_string() == content
def test_from_string(self, pipfile_lock: str): with open(os.path.join(self.data_dir, 'pipfiles', pipfile_lock), 'r') as pipfile_lock_file: content = pipfile_lock_file.read() with open( os.path.join(self.data_dir, 'pipfiles', pipfile_lock[:-len('.lock')]), 'r') as pipfile_file: pipfile_content = pipfile_file.read() pipfile_instance = Pipfile.from_string(pipfile_content) instance = PipfileLock.from_string(content, pipfile=pipfile_instance) assert instance.to_string() == content
def test_add_source(self): """Test add source.""" pipfile = Pipfile.from_file(os.path.join(self.data_dir, "pipfiles", "Pipfile_test1")) pipfile_lock = PipfileLock.from_file(os.path.join(self.data_dir, "pipfiles", "Pipfile_test1.lock")) project = Project(pipfile=pipfile, pipfile_lock=pipfile_lock) source = project.add_source(url="https://foo.bar") assert source.name is not None assert source.name in project.pipfile.meta.sources assert source is project.pipfile.meta.sources[source.name] assert source.name in project.pipfile_lock.meta.sources assert source is project.pipfile_lock.meta.sources[source.name]
def test_add_package(self): """Test add package.""" pipfile = Pipfile.from_file(os.path.join(self.data_dir, "pipfiles", "Pipfile_test1")) pipfile_lock = PipfileLock.from_file(os.path.join(self.data_dir, "pipfiles", "Pipfile_test1.lock")) project = Project(pipfile=pipfile, pipfile_lock=pipfile_lock) assert "selinon" not in project.pipfile.packages.packages project.add_package("selinon", "==1.0.0") assert "selinon" in project.pipfile.packages.packages assert project.pipfile.packages["selinon"].version == "==1.0.0" assert project.pipfile.packages["selinon"].index is None assert project.pipfile.packages["selinon"].develop is False # Do not add the package to the lock - lock has to be explicitly done. assert "selinon" not in project.pipfile_lock.packages.packages
def test_add_source(self): pipfile = Pipfile.from_file( os.path.join(self.data_dir, 'pipfiles', 'Pipfile_test1')) pipfile_lock = PipfileLock.from_file( os.path.join(self.data_dir, 'pipfiles', 'Pipfile_test1.lock')) project = Project(pipfile=pipfile, pipfile_lock=pipfile_lock) source = project.add_source(url='https://foo.bar') assert source.name is not None assert source.name in project.pipfile.meta.sources assert source is project.pipfile.meta.sources[source.name] assert source.name in project.pipfile_lock.meta.sources assert source is project.pipfile_lock.meta.sources[source.name]
def test_add_package(self): pipfile = Pipfile.from_file( os.path.join(self.data_dir, 'pipfiles', 'Pipfile_test1')) pipfile_lock = PipfileLock.from_file( os.path.join(self.data_dir, 'pipfiles', 'Pipfile_test1.lock')) project = Project(pipfile=pipfile, pipfile_lock=pipfile_lock) assert 'selinon' not in project.pipfile.packages.packages project.add_package('selinon', '==1.0.0') assert 'selinon' in project.pipfile.packages.packages assert project.pipfile.packages['selinon'].version == '==1.0.0' assert project.pipfile.packages['selinon'].index is None assert project.pipfile.packages['selinon'].develop is False # Do not add the package to the lock - lock has to be explicitly done. assert 'selinon' not in project.pipfile_lock.packages.packages
def test_extras_parsing(self): """Test extras parsing.""" pipfile_instance = Pipfile.from_file(os.path.join(self.data_dir, "pipfiles", "Pipfile_extras")) instance = PipfileLock.from_file( os.path.join(self.data_dir, "pipfiles", "Pipfile_extras.lock"), pipfile=pipfile_instance ) assert instance is not None assert len(instance.packages.packages) == 34 assert "selinon" in instance.packages.packages package_version = instance.packages.packages["selinon"] assert set(package_version.to_dict().pop("extras")) == { "celery", "mongodb", "postgresql", "redis", "s3", "sentry", } assert set(package_version.extras) == {"celery", "mongodb", "postgresql", "redis", "s3", "sentry"}
def horus_extract_command( notebook_path: str, store_files_path: str, pipfile: bool = False, pipfile_lock: bool = False, thoth_config: bool = False, use_overlay: bool = False, force: bool = False, ): """Horus extract command.""" results = {} results["kernel_name"] = "" results["resolution_engine"] = "" extract_all: bool = False if not pipfile and not pipfile_lock and not thoth_config: # If no parameter to be extracted is set, extract all is set. extract_all = True notebook = get_notebook_content(notebook_path=notebook_path) notebook_metadata = notebook.get("metadata") if notebook_metadata.get("language_info"): language = notebook_metadata["language_info"]["name"] if language and language != "python": raise Exception("Only Python kernels are currently supported.") if notebook_metadata.get("kernelspec"): kernelspec = notebook_metadata.get("kernelspec") kernel_name = kernelspec.get("name") else: kernel_name = "python3" results["kernel_name"] = kernel_name store_path: Path = Path(store_files_path) if use_overlay: if not kernel_name: raise KeyError( "No kernel name identified in notebook metadata kernelspec.") store_path = store_path.joinpath("overlays").joinpath(kernel_name) store_path.mkdir(parents=True, exist_ok=True) dependency_resolution_engine = notebook_metadata.get( "dependency_resolution_engine") if not dependency_resolution_engine: raise KeyError("No Resolution engine identified in notebook metadata.") results["resolution_engine"] = dependency_resolution_engine if pipfile or pipfile_lock or extract_all: pipfile_string = notebook_metadata.get("requirements") if not pipfile_string: raise KeyError("No Pipfile identified in notebook metadata.") pipfile_ = Pipfile.from_string(pipfile_string) if pipfile or extract_all: pipfile_path = store_path.joinpath("Pipfile") if pipfile_path.exists() and not force: raise FileExistsError( f"Cannot store Pipfile because it already exists at path: {pipfile_path.as_posix()!r}. " "Use --force to overwrite existing content or --show-only to visualize it." ) else: pipfile_.to_file(path=pipfile_path) if pipfile_lock or extract_all: pipfile_lock_string = notebook_metadata.get("requirements_lock") if not pipfile_lock_string: raise KeyError("No Pipfile.lock identified in notebook metadata.") pipfile_lock_ = PipfileLock.from_string( pipfile_content=pipfile_lock_string, pipfile=pipfile_) pipfile_lock_path = store_path.joinpath("Pipfile.lock") if pipfile_lock_path.exists() and not force: raise FileExistsError( f"Cannot store Pipfile.lock because it already exists at path: {pipfile_lock_path.as_posix()!r}. " "Use --force to overwrite existing content or --show-only to visualize it." ) else: pipfile_lock_.to_file(path=pipfile_lock_path) if thoth_config or extract_all: thoth_config_string = notebook_metadata.get("thoth_config") if not thoth_config_string: raise KeyError("No .thoth.yaml identified in notebook metadata.") config = _Configuration() config.load_config_from_string(thoth_config_string) yaml_path = Path(".thoth.yaml") if yaml_path.exists() and not force: raise FileExistsError( f"Cannot store .thoth.yaml because it already exists at path: {yaml_path.as_posix()!r}. " "Use --force to overwrite existing content or --show-only to visualize it." ) else: config.save_config() return results
def lock_dependencies_with_pipenv( kernel_name: str, pipfile_string: str, kernels_path: Path = Path.home().joinpath(".local/share/thoth/kernels"), ) -> typing.Tuple[int, dict]: """Lock dependencies using Pipenv resolution engine.""" initial_path = Path.cwd() env_path = kernels_path.joinpath(kernel_name) # Delete and recreate folder if not env_path.exists(): _ = subprocess.call(f"rm -rf ./{kernel_name} ", shell=True, cwd=kernels_path) env_path.mkdir(parents=True, exist_ok=True) result = {"requirements_lock": "", "error": False, "error_msg": ""} returncode = 0 ## Create virtualenv cli_run([str(env_path)]) pipfile_path = env_path.joinpath("Pipfile") _LOGGER.info("Resolution engine used: pipenv") with open(pipfile_path, "w") as pipfile_file: pipfile_file.write(pipfile_string) _LOGGER.info(f"kernel path: {env_path}") _LOGGER.info(f"Input Pipfile: \n{pipfile_string}") # 2. Install pipenv if not installed already package = "pipenv" check_install = subprocess.run( f"python3 -c \"import sys, pkgutil; sys.exit(0 if pkgutil.find_loader('{package}') else 1)\"", shell=True, cwd=kernels_path, capture_output=True, ) if check_install.returncode != 0: _LOGGER.debug( f"pipenv is not installed in the host!: {check_install.stderr!r}") try: subprocess.run("pip install pipenv", cwd=kernels_path, shell=True) except Exception as pipenv_install_error: _LOGGER.warning("error installing pipenv: %r", pipenv_install_error) result["error"] = True result["error_msg"] = pipenv_install_error returncode = 1 os.chdir(initial_path) return returncode, result else: _LOGGER.debug("pipenv is already present on the host!") pipfile_lock_path = env_path.joinpath("Pipfile.lock") try: output = subprocess.run( f". {kernel_name}/bin/activate && cd {kernel_name} && pipenv lock", env=dict(os.environ, PIPENV_CACHE_DIR="/tmp"), cwd=kernels_path, shell=True, capture_output=True, ) except Exception as pipenv_error: _LOGGER.warning("error locking dependencies using Pipenv: %r", pipenv_error) result["error"] = True result["error_msg"] = str(pipenv_error) returncode = 1 if output.returncode != 0: _LOGGER.warning( "error in process trying to lock dependencies with pipenv: %r", output.stderr) result["error"] = True result["error_msg"] = str(output.stderr) returncode = 1 os.chdir(env_path) if not result["error"]: if pipfile_lock_path.exists(): with open(pipfile_lock_path, "r") as pipfile_lock_file: pipfile_lock_str = pipfile_lock_file.read() pipfile = Pipfile.from_string(pipfile_string) pipfile_lock_: PipfileLock = PipfileLock.from_string( pipfile_lock_str, pipfile=pipfile) result["requirements_lock"] = pipfile_lock_.to_dict() _LOGGER.debug(f"result from pipenv received: {result}") else: _LOGGER.warning("Pipfile.lock cannot be found at: %r", str(pipfile_lock_path)) result["error"] = True result[ "error_msg"] = "Error retrieving Pipfile.lock created from pipenv." os.chdir(initial_path) return returncode, result
def test_construct_requirements_txt(self): pipfile_lock = PipfileLock.from_file( os.path.join(self.data_dir, 'pipfiles', 'Pipfile_requirements.lock')) assert pipfile_lock.construct_requirements_txt() == """#
def horus_show_command( path: str, pipfile: bool = False, pipfile_lock: bool = False, thoth_config: bool = False, ): """Horus show command.""" show_all: bool = False if not pipfile and not pipfile_lock and not thoth_config: # If no parameter to be shown is set, show all is set. show_all = True results = {} results["kernel_name"] = "" results["dependency_resolution_engine"] = "" results["pipfile"] = "" results["pipfile_lock"] = "" results["thoth_config"] = "" notebook = get_notebook_content(notebook_path=path) notebook_metadata = notebook.get("metadata") if notebook_metadata.get("language_info"): language = notebook_metadata["language_info"]["name"] if language and language != "python": raise Exception("Only Python kernels are currently supported.") if notebook_metadata.get("kernelspec"): kernelspec = notebook_metadata.get("kernelspec") kernel_name = kernelspec.get("name") else: kernel_name = "python3" results["kernel_name"] = kernel_name dependency_resolution_engine = notebook_metadata.get( "dependency_resolution_engine") results["dependency_resolution_engine"] = dependency_resolution_engine pipfile_string = notebook_metadata.get("requirements") if pipfile or pipfile_lock or show_all: if not pipfile_string: results["pipfile"] = "No Pipfile identified in notebook metadata." else: pipfile_ = Pipfile.from_string(pipfile_string) if pipfile or show_all: results["pipfile"] = f"\nPipfile:\n\n{pipfile_.to_string()}" if pipfile_lock or show_all: if pipfile_string: pipfile_lock_string = notebook_metadata.get("requirements_lock") if not pipfile_lock_string: results[ "pipfile_lock"] = "No Pipfile.lock identified in notebook metadata." else: pipfile_lock_ = PipfileLock.from_string( pipfile_content=pipfile_lock_string, pipfile=pipfile_) results[ "pipfile_lock"] = f"\nPipfile.lock:\n\n{pipfile_lock_.to_string()}" else: results[ "pipfile_lock"] = "No Pipfile identified in notebook metadata, therefore Pipfile.lock cannot be created." if thoth_config or show_all: thoth_config_string = notebook_metadata.get("thoth_config") if not thoth_config_string: results[ "thoth_config"] = "No .thoth.yaml identified in notebook metadata." else: config = _Configuration() config.load_config_from_string(thoth_config_string) results[ "thoth_config"] = f"\n.thoth.yaml:\n\n{yaml.dump(config.content)}" return results
def horus_set_kernel_command( path: str, kernel_name: typing.Optional[str], save_in_notebook: bool = True, resolution_engine: typing.Optional[str] = None, is_magic_command: bool = False, ): """Create kernel using dependencies in notebook metadata.""" results = {} results["kernel_name"] = "" results["dependency_resolution_engine"] = "" # 0. Check if all metadata for dependencies are present in the notebook notebook = get_notebook_content(notebook_path=path) notebook_metadata = notebook.get("metadata") if notebook_metadata.get("language_info"): language = notebook_metadata["language_info"]["name"] if language and language != "python": raise Exception("Only Python kernels are currently supported.") kernelspec = notebook_metadata.get("kernelspec") notebook_kernel = kernelspec.get("name") if not kernel_name: kernel = notebook_kernel else: kernel = kernel_name if kernel == "python3": kernel = "jupyterlab-requirements" results["kernel_name"]: str = kernel home = Path.home() store_path: Path = home.joinpath(".local/share/thoth/kernels") if not resolution_engine: dependency_resolution_engine = notebook_metadata.get( "dependency_resolution_engine") if not dependency_resolution_engine: raise KeyError( "No Resolution engine identified in notebook metadata.") else: dependency_resolution_engine = resolution_engine results["dependency_resolution_engine"] = dependency_resolution_engine complete_path: Path = store_path.joinpath(kernel) if not is_magic_command: if complete_path.exists(): delete_kernel(kernel_name=kernel) complete_path.mkdir(parents=True, exist_ok=True) # 1. Get Pipfile, Pipfile.lock and .thoth.yaml and store them in ./.local/share/kernel/{kernel_name} # requirements if not is_magic_command: pipfile_string = notebook_metadata.get("requirements") pipfile_ = Pipfile.from_string(pipfile_string) pipfile_path = complete_path.joinpath("Pipfile") pipfile_.to_file(path=pipfile_path) # requirements lock if not is_magic_command: pipfile_lock_string = notebook_metadata.get("requirements_lock") pipfile_lock_ = PipfileLock.from_string( pipfile_content=pipfile_lock_string, pipfile=pipfile_) pipfile_lock_path = complete_path.joinpath("Pipfile.lock") pipfile_lock_.to_file(path=pipfile_lock_path) if dependency_resolution_engine == "thoth" and not is_magic_command: # thoth thoth_config_string = notebook_metadata.get("thoth_config") config = _Configuration() config.load_config_from_string(thoth_config_string) config_path = complete_path.joinpath(".thoth.yaml") config.save_config(path=config_path) # 2. Create virtualenv and install dependencies install_packages( kernel_name=kernel, resolution_engine=dependency_resolution_engine, is_cli=True, is_magic_command=is_magic_command, ) # 3. Install packages using micropipenv create_kernel(kernel_name=kernel) if save_in_notebook: # Update kernel name if different name selected. kernelspec["name"] = kernel notebook_metadata["kernelspec"] = kernelspec notebook["metadata"] = notebook_metadata save_notebook_content(notebook_path=path, notebook=notebook) return results
class TestProduct(AdviserTestCase): """Test manipulation with product.""" _PIPFILE = """ [[source]] name = "pypi-org" url = "https://pypi.org/simple" verify_ssl = true [dev-packages] [packages] tensorflow = "*" [requires] python_version = "3.7" [thoth.allow_prereleases] black = true """ _DEPENDENCIES_NO_CYCLE = { "absl-py": { ("absl-py", "0.8.1", "https://pypi.org/simple"): { ("six", "1.13.0", "https://pypi.org/simple"), ("six", "1.9.0", "https://pypi.org/simple"), } }, "astor": { ("astor", "0.8.0", "https://pypi.org/simple"): { ("six", "1.13.0", "https://pypi.org/simple"), } }, "tensorflow": { ("tensorflow", "1.9.0", "https://pypi.org/simple"): { ("astor", "0.8.0", "https://pypi.org/simple"), ("absl-py", "0.8.1", "https://pypi.org/simple"), } }, } _DEPENDENCIES_CYCLE = { "a": { ("a", "1.0.0", "https://pypi.org/simple"): { ("b", "1.0.0", "https://pypi.org/simple"), } }, "b": { ("b", "1.0.0", "https://pypi.org/simple"): { ("c", "1.1.0", "https://pypi.org/simple"), } }, "c": { ("c", "1.0.0", "https://pypi.org/simple"): { ("a", "1.0.0", "https://pypi.org/simple"), ("b", "1.0.0", "https://pypi.org/simple"), } }, } _DEPENDENCIES_NO_CYCLE_PIPFILE_LOCK = PipfileLock.from_dict( { "_meta": { "hash": {"sha256": "001e85311f65a97fe23d21061c0b68139015dfea5b4bfb8b91551ede367fe1d1"}, "pipfile-spec": 6, "requires": {"python_version": "3.6"}, "sources": [{"name": "pypi", "url": "https://pypi.org/simple", "verify_ssl": True}], }, "default": { "absl-py": { "hashes": ["sha256:d9129186431e150d7fe455f1cb1ecbb92bb5dba9da9bc3ef7b012d98c4db2526"], "index": "pypi", "version": "==0.8.1", }, "astor": { "hashes": [ "sha256:37a6eed8b371f1228db08234ed7f6cfdc7817a3ed3824797e20cbb11dc2a7862", ], "index": "pypi", "version": "==0.8.0", }, "six": { "hashes": [ "sha256:13f9f196f330c7c2c5d7a5cf91af894110ca0215ac051b5844701f2bfd934d52", ], "index": "pypi", "version": "==1.13.0", }, "tensorflow": { "hashes": [ "sha256:d351f7db08b8de322536c5886fada3e37feae809bfd37368050f9eeea544b87e", ], "index": "pypi", "version": "==1.9.0", }, }, "develop": {}, }, pipfile=None, ) _DEPENDENCIES_CYCLE_PIPFILE_LOCK = PipfileLock.from_dict( { "_meta": { "hash": {"sha256": "FOO"}, "pipfile-spec": 6, "requires": {"python_version": "3.8"}, "sources": [{"name": "pypi", "url": "https://pypi.org/simple", "verify_ssl": True}], }, "default": { "a": { "hashes": ["sha256:foo"], "index": "pypi", "version": "==1.0.0", }, "b": { "hashes": ["sha256:foo"], "index": "pypi", "version": "==1.0.0", }, "c": { "hashes": ["sha256:foo"], "index": "pypi", "version": "==1.0.0", }, }, "develop": {}, }, pipfile=None, ) def test_from_final_state(self, context: Context) -> None: """Test instantiating product from a final state.""" state = State( score=0.5, resolved_dependencies={ "daiquiri": ("daiquiri", "1.6.0", "https://pypi.org/simple"), "numpy": ("numpy", "1.17.4", "https://pypi.org/simple"), "tensorflow": ("tensorflow", "2.0.0", "https://pypi.org/simple"), }, unresolved_dependencies={}, advised_runtime_environment=RuntimeEnvironment.from_dict({"python_version": "3.6"}), ) state.add_justification(self.JUSTIFICATION_SAMPLE_1) pypi = Source("https://pypi.org/simple") pv_daiquiri_locked = PackageVersion(name="daiquiri", version="==1.6.0", index=pypi, develop=False) pv_numpy_locked = PackageVersion(name="numpy", version="==1.17.4", index=pypi, develop=False) pv_tensorflow_locked = PackageVersion(name="tensorflow", version="==2.0.0", index=pypi, develop=False) context.should_receive("get_package_version").with_args( ("daiquiri", "1.6.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_daiquiri_locked).ordered() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "daiquiri", "1.6.0", "https://pypi.org/simple" ).and_return(["000"]).ordered() context.should_receive("get_package_version").with_args( ("numpy", "1.17.4", "https://pypi.org/simple"), graceful=False ).and_return(pv_numpy_locked).ordered() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "numpy", "1.17.4", "https://pypi.org/simple" ).and_return(["111"]).ordered() context.should_receive("get_package_version").with_args( ("tensorflow", "2.0.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_tensorflow_locked).ordered() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "tensorflow", "2.0.0", "https://pypi.org/simple" ).and_return(["222"]).ordered() pv_daiquiri = PackageVersion(name="daiquiri", version="*", index=pypi, develop=False) pv_tensorflow = PackageVersion(name="tensorflow", version=">=2.0.0", index=pypi, develop=False) project = flexmock( pipfile=Pipfile.from_string(self._PIPFILE), runtime_environment=RuntimeEnvironment.from_dict({"operating_system": {"name": "rhel"}}), ) project.should_receive("iter_dependencies").with_args(with_devel=True).and_return( [pv_daiquiri, pv_tensorflow] ).once() context.project = project context.dependencies = { "daiquiri": { ("daiquiri", "1.6.0", "https://pypi.org/simple"): set(), }, "numpy": {("numpy", "1.17.4", "https://pypi.org/simple"): set()}, "tensorflow": { ("tensorflow", "2.0.0", "https://pypi.org/simple"): {("numpy", "1.17.4", "https://pypi.org/simple")} }, } context.dependents = { "daiquiri": { ("daiquiri", "1.6.0", "https://pypi.org/simple"): set(), }, "numpy": { ("numpy", "1.17.4", "https://pypi.org/simple"): { ( ("tensorflow", "2.0.0", "https://pypi.org/simple"), "fedora", "31", "3.7", ) } }, "tensorflow": {("tensorflow", "2.0.0", "https://pypi.org/simple"): set()}, } context.graph.should_receive("get_python_environment_marker").with_args( "tensorflow", "2.0.0", "https://pypi.org/simple", dependency_name="numpy", dependency_version="1.17.4", os_name="fedora", os_version="31", python_version="3.7", marker_evaluation_result=True, ).and_return("python_version >= '3.7'").once() assert "THOTH_ADVISER_METADATA" not in os.environ metadata_justification = {"thoth.adviser": {"justification": [{"bar": "baz"}]}} os.environ["THOTH_ADVISER_METADATA"] = json.dumps(metadata_justification) try: product = Product.from_final_state(state=state, context=context) finally: os.environ.pop("THOTH_ADVISER_METADATA") assert product.score == state.score assert product.justification == list( chain(metadata_justification["thoth.adviser"]["justification"], state.justification) ) assert product.advised_runtime_environment == state.advised_runtime_environment assert product.project.to_dict() == { "constraints": [], "requirements": { "packages": { "daiquiri": {"index": "pypi-org-simple", "version": "*"}, "tensorflow": {"index": "pypi-org-simple", "version": ">=2.0.0"}, }, "dev-packages": {}, "requires": {"python_version": "3.7"}, "source": [ { "url": "https://pypi.org/simple", "verify_ssl": True, "name": "pypi-org", }, { "url": "https://pypi.org/simple", "verify_ssl": True, "name": "pypi-org-simple", }, ], "thoth": { "allow_prereleases": {"black": True}, }, }, "requirements_locked": { "_meta": { "sources": [ {"name": "pypi-org", "url": "https://pypi.org/simple", "verify_ssl": True}, { "url": "https://pypi.org/simple", "verify_ssl": True, "name": "pypi-org-simple", }, ], "requires": {"python_version": "3.7"}, "hash": {"sha256": "6cc8365e799b949fb6cc564cea2d8e0e8a782ab676a006e65abbe14621b93381"}, "pipfile-spec": 6, }, "default": { "daiquiri": { "version": "==1.6.0", "hashes": ["sha256:000"], "index": "pypi-org-simple", }, "numpy": { "version": "==1.17.4", "hashes": ["sha256:111"], "index": "pypi-org-simple", "markers": "python_version >= '3.7'", }, "tensorflow": { "version": "==2.0.0", "hashes": ["sha256:222"], "index": "pypi-org-simple", }, }, "develop": {}, }, "runtime_environment": { "hardware": {"cpu_family": None, "cpu_model": None, "gpu_model": None}, "operating_system": {"name": "rhel", "version": None}, "python_version": None, "cuda_version": None, "labels": None, "cudnn_version": None, "name": None, "platform": None, "base_image": None, "mkl_version": None, "openblas_version": None, "openmpi_version": None, "recommendation_type": None, }, } def test_to_dict(self, context: Context) -> None: """Test conversion of this product into a dictionary representation.""" project = context.project project.should_receive("to_dict").with_args(keep_thoth_section=True).and_return({"baz": "bar"}).once() advised_runtime_environment = flexmock() advised_runtime_environment.should_receive("to_dict").with_args().and_return({"hello": "thoth"}).once() advised_manifest_changes = [ [ { "apiVersion": "apps.openshift.io/v1", "kind": "DeploymentConfig", "patch": { "op": "add", "path": "spec.template.spec.containers[0].env", "value": {"name": "OMP_NUM_THREADS", "value": "1"}, }, } ] ] product = Product( advised_manifest_changes=advised_manifest_changes, advised_runtime_environment=advised_runtime_environment, justification=[{"foo": "bar"}], project=project, score=0.999, context=context, ) assert product.to_dict() == { "score": 0.999, "project": {"baz": "bar"}, "justification": [{"foo": "bar"}], "advised_runtime_environment": {"hello": "thoth"}, "advised_manifest_changes": advised_manifest_changes, "dependency_graph": { "edges": [], "nodes": [ "absl-py", "astor", "click", "flask", "gast", "grpcio", "itsdangerous", "jinja2", "markdown", "markupsafe", "numpy", "protobuf", "six", "tensorboard", "tensorflow", "termcolor", "werkzeug", "wheel", ], }, } def test_environment_markers(self, context: Context) -> None: """Test handling of environment markers across multiple runs.""" state = State( score=0.0, resolved_dependencies={ "numpy": ("numpy", "1.0.0", "https://pypi.org/simple"), "tensorflow": ("tensorflow", "2.0.0", "https://pypi.org/simple"), }, unresolved_dependencies={}, justification=[{"type": "INFO", "message": "Foo bar", "link": "https://thoth-station.ninja"}], ) # Make sure tested packages are not direct dependencies. In such cases the behaviour is # different, see other tests. context.project.pipfile.packages.packages.pop("tensorflow", None) context.project.pipfile.packages.packages.pop("numpy", None) context.graph.should_receive("get_python_package_hashes_sha256").with_args( "numpy", "1.0.0", "https://pypi.org/simple" ).and_return(["000"]).once() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "tensorflow", "2.0.0", "https://pypi.org/simple" ).and_return(["111"]).once() pypi = Source("https://pypi.org/simple") pv_numpy_locked = PackageVersion(name="numpy", version="==1.0.0", index=pypi, develop=False) pv_tensorflow_locked = PackageVersion(name="tensorflow", version="==2.0.0", index=pypi, develop=False) context.should_receive("get_package_version").with_args( ("numpy", "1.0.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_numpy_locked).twice() context.should_receive("get_package_version").with_args( ("tensorflow", "2.0.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_tensorflow_locked).twice() context.dependents = { "numpy": { ("numpy", "1.0.0", "https://pypi.org/simple"): { ( ("tensorflow", "2.0.0", "https://pypi.org/simple"), "fedora", "31", "3.7", ) } }, "tensorflow": {("tensorflow", "2.0.0", "https://pypi.org/simple"): set()}, } context.graph.should_receive("get_python_environment_marker").with_args( "tensorflow", "2.0.0", "https://pypi.org/simple", dependency_name="numpy", dependency_version="1.0.0", os_name="fedora", os_version="31", python_version="3.7", marker_evaluation_result=True, ).and_return("python_version >= '3.7'").and_return("python_version >= '3' or 1").twice() product = Product.from_final_state(context=context, state=state) expected = { "advised_manifest_changes": [], "advised_runtime_environment": None, "justification": [ { "link": "https://thoth-station.ninja", "message": "Foo bar", "type": "INFO", } ], "dependency_graph": {"edges": [], "nodes": ["numpy", "tensorflow"]}, "project": { "constraints": [], "requirements": { "dev-packages": {}, "packages": {"flask": "*"}, "requires": {"python_version": "3.6"}, "source": [ { "name": "pypi", "url": "https://pypi.org/simple", "verify_ssl": True, }, { "name": "pypi-org-simple", "url": "https://pypi.org/simple", "verify_ssl": True, }, ], "thoth": { "allow_prereleases": {}, "disable_index_adjustment": False, }, }, "requirements_locked": { "_meta": { "hash": {"sha256": "2e49395dfa87159358e581bd22e656c27c0dab04894d1b137a14f85bb387ea51"}, "pipfile-spec": 6, "requires": {"python_version": "3.6"}, "sources": [ {"name": "pypi", "url": "https://pypi.org/simple", "verify_ssl": True}, { "name": "pypi-org-simple", "url": "https://pypi.org/simple", "verify_ssl": True, }, ], }, "default": { "numpy": { "hashes": ["sha256:000"], "index": "pypi-org-simple", "markers": "python_version >= '3.7'", "version": "==1.0.0", }, "tensorflow": { "hashes": ["sha256:111"], "index": "pypi-org-simple", "version": "==2.0.0", }, }, "develop": {}, }, "runtime_environment": { "base_image": None, "cuda_version": None, "cudnn_version": None, "hardware": {"cpu_family": None, "cpu_model": None, "gpu_model": None}, "name": None, "operating_system": {"name": None, "version": None}, "labels": None, "openblas_version": None, "openmpi_version": None, "mkl_version": None, "python_version": None, "recommendation_type": None, "platform": None, }, }, "score": 0.0, } assert product.to_dict() == expected # Markers should not intersect. product = Product.from_final_state(context=context, state=state) expected["project"]["requirements_locked"]["default"]["numpy"]["markers"] = "python_version >= '3' or 1" assert product.to_dict() == expected def test_environment_markers_direct_dependency(self, context: Context) -> None: """Test handling environment markers for direct dependencies.""" state = State( score=0.0, resolved_dependencies={ "numpy": ("numpy", "1.0.0", "https://pypi.org/simple"), "tensorflow": ("tensorflow", "2.0.0", "https://pypi.org/simple"), }, unresolved_dependencies={}, justification=[{"type": "INFO", "message": "Foo bar", "link": "https://thoth-station.ninja"}], ) pypi = Source("https://pypi.org/simple") # Let's assume tensorflow is our direct dependency with an environment marker set. It sets also an environment # marker for numpy. context.project.pipfile.packages.packages.pop("tensorflow", None) tf_package_version_direct = PackageVersion( name="tensorflow", version=">1.0.0", index=pypi, develop=False, markers="python_version >= '3.6'" ) context.project.pipfile.add_package_version(tf_package_version_direct) # Just to make sure numpy is not in the direct dependency listing. context.project.pipfile.packages.packages.pop("numpy", None) pv_numpy_locked = PackageVersion(name="numpy", version="==1.0.0", index=pypi, develop=False) pv_tensorflow_locked = PackageVersion(name="tensorflow", version="==2.0.0", index=pypi, develop=False) context.graph.should_receive("get_python_package_hashes_sha256").with_args( *pv_numpy_locked.to_tuple() ).and_return(["000"]).once() context.graph.should_receive("get_python_package_hashes_sha256").with_args( *pv_tensorflow_locked.to_tuple() ).and_return(["111"]).once() context.should_receive("get_package_version").with_args( ("numpy", "1.0.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_numpy_locked).once() context.should_receive("get_package_version").with_args( ("tensorflow", "2.0.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_tensorflow_locked).once() context.dependents = { "numpy": { ("numpy", "1.0.0", "https://pypi.org/simple"): { ( ("tensorflow", "2.0.0", "https://pypi.org/simple"), "fedora", "31", "3.7", ) } }, "tensorflow": {("tensorflow", "2.0.0", "https://pypi.org/simple"): set()}, } context.graph.should_receive("get_python_environment_marker").with_args( "tensorflow", "2.0.0", "https://pypi.org/simple", dependency_name="numpy", dependency_version="1.0.0", os_name="fedora", os_version="31", python_version="3.7", marker_evaluation_result=True, ).and_return("python_version >= '3.7'").once() product = Product.from_final_state(context=context, state=state) expected = { "advised_manifest_changes": [], "advised_runtime_environment": None, "justification": [ { "link": "https://thoth-station.ninja", "message": "Foo bar", "type": "INFO", } ], "dependency_graph": {"edges": [], "nodes": ["numpy", "tensorflow"]}, "project": { "constraints": [], "requirements": { "dev-packages": {}, "packages": { "flask": "*", "tensorflow": { "index": "pypi-org-simple", "markers": "python_version >= '3.6'", "version": ">1.0.0", }, }, "requires": {"python_version": "3.6"}, "source": [ { "name": "pypi", "url": "https://pypi.org/simple", "verify_ssl": True, }, { "name": "pypi-org-simple", "url": "https://pypi.org/simple", "verify_ssl": True, }, ], "thoth": { "allow_prereleases": {}, "disable_index_adjustment": False, }, }, "requirements_locked": { "_meta": { "hash": {"sha256": "382cd84046246d08cf670a07c61628958dec76e05d2473bf936866fc54619f9a"}, "pipfile-spec": 6, "requires": {"python_version": "3.6"}, "sources": [ {"name": "pypi", "url": "https://pypi.org/simple", "verify_ssl": True}, { "name": "pypi-org-simple", "url": "https://pypi.org/simple", "verify_ssl": True, }, ], }, "default": { "numpy": { "hashes": ["sha256:000"], "index": "pypi-org-simple", "markers": "python_version >= '3.7'", "version": "==1.0.0", }, "tensorflow": { "hashes": ["sha256:111"], "index": "pypi-org-simple", "markers": "python_version >= '3.6'", "version": "==2.0.0", }, }, "develop": {}, }, "runtime_environment": { "base_image": None, "cuda_version": None, "cudnn_version": None, "hardware": {"cpu_family": None, "cpu_model": None, "gpu_model": None}, "name": None, "operating_system": {"name": None, "version": None}, "openblas_version": None, "labels": None, "openmpi_version": None, "mkl_version": None, "python_version": None, "recommendation_type": None, "platform": None, }, }, "score": 0.0, } assert product.to_dict() == expected def test_environment_markers_shared(self, context: Context) -> None: """Test handling of environment markers when multiple dependencies share one.""" state = State( score=0.0, resolved_dependencies={ "pandas": ("pandas", "1.0.0", "https://pypi.org/simple"), "numpy": ("numpy", "1.0.0", "https://pypi.org/simple"), "tensorflow": ("tensorflow", "2.0.0", "https://pypi.org/simple"), }, unresolved_dependencies={}, justification=[{"type": "INFO", "message": "Foo bar", "link": "https://thoth-station.ninja"}], ) # Make sure tested packages are not direct dependencies. In such cases the behaviour is # different, see other tests. context.project.pipfile.packages.packages.pop("pandas", None) context.project.pipfile.packages.packages.pop("numpy", None) context.project.pipfile.packages.packages.pop("tensorflow", None) context.graph.should_receive("get_python_package_hashes_sha256").with_args( "numpy", "1.0.0", "https://pypi.org/simple" ).and_return(["000"]).once() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "tensorflow", "2.0.0", "https://pypi.org/simple" ).and_return(["111"]).once() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "pandas", "1.0.0", "https://pypi.org/simple" ).and_return(["222"]).once() pypi = Source("https://pypi.org/simple") pv_numpy_locked = PackageVersion(name="numpy", version="==1.0.0", index=pypi, develop=False) pv_tensorflow_locked = PackageVersion(name="tensorflow", version="==2.0.0", index=pypi, develop=False) pv_pandas_locked = PackageVersion(name="pandas", version="==1.0.0", index=pypi, develop=False) context.should_receive("get_package_version").with_args( ("numpy", "1.0.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_numpy_locked).once() context.should_receive("get_package_version").with_args( ("pandas", "1.0.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_pandas_locked).once() context.should_receive("get_package_version").with_args( ("tensorflow", "2.0.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_tensorflow_locked).once() context.dependents = { "numpy": { ("numpy", "1.0.0", "https://pypi.org/simple"): [ # set to list for reproducible runs. ( ("tensorflow", "2.0.0", "https://pypi.org/simple"), "fedora", "31", "3.7", ), ( ("pandas", "1.0.0", "https://pypi.org/simple"), "fedora", "31", "3.7", ), ] }, "tensorflow": {("tensorflow", "2.0.0", "https://pypi.org/simple"): set()}, "pandas": {("pandas", "1.0.0", "https://pypi.org/simple"): set()}, } context.graph.should_receive("get_python_environment_marker").with_args( "tensorflow", "2.0.0", "https://pypi.org/simple", dependency_name="numpy", dependency_version="1.0.0", os_name="fedora", os_version="31", python_version="3.7", marker_evaluation_result=True, ).and_return("python_version >= '3.8'").once() context.graph.should_receive("get_python_environment_marker").with_args( "pandas", "1.0.0", "https://pypi.org/simple", dependency_name="numpy", dependency_version="1.0.0", os_name="fedora", os_version="31", python_version="3.7", marker_evaluation_result=True, ).and_return(None).once() product = Product.from_final_state(context=context, state=state) expected = { "advised_manifest_changes": [], "advised_runtime_environment": None, "dependency_graph": {"edges": [], "nodes": ["pandas", "numpy", "tensorflow"]}, "justification": [ { "link": "https://thoth-station.ninja", "message": "Foo bar", "type": "INFO", } ], "project": { "constraints": [], "requirements": { "dev-packages": {}, "packages": {"flask": "*"}, "requires": {"python_version": "3.6"}, "source": [ {"name": "pypi", "url": "https://pypi.org/simple", "verify_ssl": True}, {"name": "pypi-org-simple", "url": "https://pypi.org/simple", "verify_ssl": True}, ], "thoth": { "allow_prereleases": {}, "disable_index_adjustment": False, }, }, "requirements_locked": { "_meta": { "hash": {"sha256": "2e49395dfa87159358e581bd22e656c27c0dab04894d1b137a14f85bb387ea51"}, "pipfile-spec": 6, "requires": {"python_version": "3.6"}, "sources": [ {"name": "pypi", "url": "https://pypi.org/simple", "verify_ssl": True}, {"name": "pypi-org-simple", "url": "https://pypi.org/simple", "verify_ssl": True}, ], }, "default": { "numpy": {"hashes": ["sha256:000"], "index": "pypi-org-simple", "version": "==1.0.0"}, "pandas": {"hashes": ["sha256:222"], "index": "pypi-org-simple", "version": "==1.0.0"}, "tensorflow": {"hashes": ["sha256:111"], "index": "pypi-org-simple", "version": "==2.0.0"}, }, "develop": {}, }, "runtime_environment": { "base_image": None, "cuda_version": None, "cudnn_version": None, "hardware": {"cpu_family": None, "cpu_model": None, "gpu_model": None}, "name": None, "operating_system": {"name": None, "version": None}, "labels": None, "platform": None, "mkl_version": None, "openmpi_version": None, "openblas_version": None, "python_version": None, "recommendation_type": None, }, }, "score": 0.0, } assert product.to_dict() == expected def test_no_observation(self, context: Context) -> None: """Test adding information about no justification added.""" state = State( score=0.0, resolved_dependencies={ "flask": ("flask", "0.12", "https://pypi.org/simple"), }, unresolved_dependencies={}, ) context.project.pipfile.packages.packages.pop("tensorflow") context.graph.should_receive("get_python_package_hashes_sha256").with_args( "flask", "0.12", "https://pypi.org/simple" ).and_return(["222"]).once() pypi = Source("https://pypi.org/simple") pv_pandas_locked = PackageVersion(name="flask", version="==0.12", index=pypi, develop=False) context.should_receive("get_package_version").with_args( ("flask", "0.12", "https://pypi.org/simple"), graceful=False ).and_return(pv_pandas_locked).once() context.dependents = { "flask": {("flask", "0.12", "https://pypi.org/simple"): set()}, } product = Product.from_final_state(context=context, state=state) expected = { "advised_manifest_changes": [], "advised_runtime_environment": None, "dependency_graph": {"edges": [], "nodes": ["flask"]}, "justification": [ { "type": "INFO", "message": "No issues spotted for this stack based on Thoth's database", "link": jl("no_observations"), } ], "project": { "constraints": [], "requirements": { "dev-packages": {}, "packages": {"flask": "*"}, "requires": {"python_version": "3.6"}, "source": [ {"name": "pypi", "url": "https://pypi.org/simple", "verify_ssl": True}, {"name": "pypi-org-simple", "url": "https://pypi.org/simple", "verify_ssl": True}, ], "thoth": { "allow_prereleases": {}, "disable_index_adjustment": False, }, }, "requirements_locked": { "_meta": { "hash": {"sha256": "2e49395dfa87159358e581bd22e656c27c0dab04894d1b137a14f85bb387ea51"}, "pipfile-spec": 6, "requires": {"python_version": "3.6"}, "sources": [ {"name": "pypi", "url": "https://pypi.org/simple", "verify_ssl": True}, {"name": "pypi-org-simple", "url": "https://pypi.org/simple", "verify_ssl": True}, ], }, "default": { "flask": {"hashes": ["sha256:222"], "index": "pypi-org-simple", "version": "==0.12"}, }, "develop": {}, }, "runtime_environment": { "base_image": None, "cuda_version": None, "cudnn_version": None, "hardware": {"cpu_family": None, "cpu_model": None, "gpu_model": None}, "name": None, "operating_system": {"name": None, "version": None}, "labels": None, "platform": None, "mkl_version": None, "openmpi_version": None, "openblas_version": None, "python_version": None, "recommendation_type": None, }, }, "score": 0.0, } assert product.to_dict() == expected def test_construct_dependency_graph_basic(self) -> None: """Test constructing dependency graph.""" context = flexmock(dependencies=self._DEPENDENCIES_NO_CYCLE) dependency_graph = Product._construct_dependency_graph(context, self._DEPENDENCIES_NO_CYCLE_PIPFILE_LOCK) assert dependency_graph["nodes"] == ["absl-py", "astor", "six", "tensorflow"] assert set(tuple(i) for i in dependency_graph["edges"]) == {(0, 2), (1, 2), (3, 1), (3, 0)} def test_construct_dependency_graph_cycle(self, context: Context) -> None: """Test constructing dependency graph information with cycles.""" context = flexmock(dependencies=self._DEPENDENCIES_CYCLE) dependency_graph = Product._construct_dependency_graph(context, self._DEPENDENCIES_CYCLE_PIPFILE_LOCK) assert dependency_graph["nodes"] == ["a", "b", "c"] assert set(tuple(i) for i in dependency_graph["edges"]) == {(0, 1), (2, 0), (2, 1)}
async def post(self): """Lock and install dependencies using pipenv.""" initial_path = Path.cwd() input_data = self.get_json_body() kernel_name: str = input_data["kernel_name"] requirements: dict = json.loads(input_data["requirements"]) home = Path.home() complete_path = home.joinpath(".local/share/thoth/kernels") env_path = complete_path.joinpath(kernel_name) # Delete and recreate folder if not env_path.exists(): _ = subprocess.call(f"rm -rf ./{kernel_name} ", shell=True, cwd=complete_path) env_path.mkdir(parents=True, exist_ok=True) result = {"requirements_lock": "", "error": False} ## Create virtualenv cli_run([str(env_path)]) pipfile_path = env_path.joinpath("Pipfile") pipfile_string = Pipfile.from_dict(requirements).to_string() _LOGGER.info("Resolution engine used: pipenv") with open(pipfile_path, "w") as pipfile_file: pipfile_file.write(pipfile_string) _LOGGER.info(f"kernel path: {env_path}") _LOGGER.info(f"Input Pipfile: \n{pipfile_string}") try: # TODO: check if pipenv is installed subprocess.run( f". {kernel_name}/bin/activate && cd {kernel_name} && pip install pipenv", cwd=complete_path, shell=True) except Exception as pipenv_error: _LOGGER.warning("error installing pipenv: %r", pipenv_error) result['error'] = True os.chdir(initial_path) self.finish(json.dumps(result)) try: subprocess.run( f". {kernel_name}/bin/activate && cd {kernel_name} && pipenv lock", env=dict(os.environ, PIPENV_CACHE_DIR='/tmp'), cwd=complete_path, shell=True) except Exception as pipenv_error: _LOGGER.warning("error locking dependencies using Pipenv: %r", pipenv_error) result['error'] = True os.chdir(env_path) if not result['error']: pipfile_lock_path = env_path.joinpath("Pipfile.lock") if pipfile_lock_path.exists(): with open(pipfile_lock_path, "r") as pipfile_lock_file: pipfile_lock_str = pipfile_lock_file.read() pipfile = Pipfile.from_string(pipfile_string) pipfile_lock_str: PipfileLock = PipfileLock.from_string( pipfile_lock_str, pipfile=pipfile) result["requirements_lock"] = pipfile_lock_str.to_dict() _LOGGER.debug(f"result from pipenv received: {result}") else: _LOGGER.warning("Pipfile.lock cannot be found at: %r", str(pipfile_lock_path)) result['error'] = True os.chdir(initial_path) self.finish(json.dumps(result))