def test_construct_requirements(self): """Test construct requirements.""" pipfile = Pipfile.from_file( os.path.join(self.data_dir, "pipfiles", "Pipfile_requirements")) expected = """# # This file is autogenerated by Thoth and is meant to be used with pip-compile # as provided by pip-tools. # --index-url https://pypi.org/simple --extra-index-url https://tensorflow.pypi.thoth-station.ninja/index/manylinux2010/AVX2/simple/ attrs>=10 connexion[swagger-ui]>=1.2; python_version < '2.7.9' or (python_version >= '3.0' and python_version < '3.4') flask==1.1.1 aiocontextvars sentry-sdk[flask] tensorflow==1.13.2 # # dev packages # pytest """ assert pipfile.construct_requirements_in() == expected assert pipfile.construct_requirements_txt() == expected
def _instantiate_project( requirements: str, requirements_locked: Optional[str] = None, runtime_environment: RuntimeEnvironment = None, ): """Create Project instance based on arguments passed to CLI.""" try: with open(requirements, "r") as requirements_file: requirements = requirements_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. requirements = requirements.replace("\\n", "\n") if requirements_locked: try: with open(requirements_locked, "r") as requirements_file: requirements_locked = requirements_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. requirements_locked = requirements_locked.replace("\\n", "\n") pipfile = Pipfile.from_string(requirements) pipfile_lock = PipfileLock.from_string( requirements_locked, pipfile) if requirements_locked else None project = Project( pipfile=pipfile, pipfile_lock=pipfile_lock, runtime_environment=runtime_environment or RuntimeEnvironment.from_dict({}), ) return project
def test_pipfile_thoth(self) -> None: """Test parsing Pipfile with Thoth specific section.""" instance = Pipfile.from_file( os.path.join(self.data_dir, "pipfiles", "Pipfile_thoth")) assert instance is not None assert instance.thoth.allow_prereleases.get("daiquiri") is True assert instance.thoth.disable_index_adjustment is True assert instance.to_dict() == { "dev-packages": {}, "packages": { "daiquiri": "*" }, "requires": { "python_version": "3.7" }, "source": [{ "name": "pypi", "url": "https://pypi.org/simple", "verify_ssl": True }], "thoth": { "allow_prereleases": { "daiquiri": True }, "disable_index_adjustment": True }, }
def get_project( self, runtime_environment_name: Optional[str] = None, *, missing_dir_ok: bool = False, ) -> Project: """Get the given overlay.""" path = self.get_overlays_directory( runtime_environment_name=runtime_environment_name, missing_dir_ok=missing_dir_ok, ) runtime_environment = RuntimeEnvironment.from_dict( self.get_runtime_environment(runtime_environment_name)) if self.requirements_format == "pipenv": pipfile_lock_path: Optional[str] = os.path.join( path, "Pipfile.lock") if pipfile_lock_path and not os.path.exists(pipfile_lock_path): pipfile_lock_path = None pipfile_path = os.path.join(path, "Pipfile") if not os.path.isfile(pipfile_path): if not os.path.isdir(path): _LOGGER.info("Creating directory structure in %r", path) os.makedirs(path, exist_ok=True) pipfile = Pipfile.from_dict({}) pipfile.to_file(path=pipfile_path) project = Project.from_files( pipfile_path=pipfile_path, pipfile_lock_path=pipfile_lock_path, runtime_environment=runtime_environment, without_pipfile_lock=pipfile_lock_path is None, ) else: requirements_in_file_path = os.path.join(path, "requirements.in") if not os.path.isfile(requirements_in_file_path): requirements_txt_file_path = os.path.join( path, "requirements.txt") if os.path.isfile(requirements_txt_file_path): _LOGGER.warning("Using %r for direct dependencies", requirements_in_file_path) project = Project.from_pip_compile_files( requirements_path=requirements_txt_file_path, requirements_lock_path=None, allow_without_lock=True, runtime_environment=runtime_environment, ) else: raise NotImplementedError( "No requirements.txt/requirements.in files found, it is recommended to " "use Pipenv files for managing dependencies") else: project = Project.from_pip_compile_files( requirements_path=requirements_in_file_path, requirements_lock_path=None, allow_without_lock=True, runtime_environment=runtime_environment, ) return project
def _instantiate_project( requirements: str, requirements_locked: typing.Optional[str], files: bool, runtime_environment: RuntimeEnvironment = None, ): """Create Project instance based on arguments passed to CLI.""" if files: with open(requirements, "r") as requirements_file: requirements = requirements_file.read() if requirements_locked: with open(requirements_locked, "r") as requirements_file: requirements_locked = requirements_file.read() del requirements_file else: # We we gather values from env vars, un-escape new lines. requirements = requirements.replace("\\n", "\n") if requirements_locked: requirements_locked = requirements_locked.replace("\\n", "\n") pipfile = Pipfile.from_string(requirements) pipfile_lock = ( PipfileLock.from_string(requirements_locked, pipfile) if requirements_locked else None ) project = Project( pipfile=pipfile, pipfile_lock=pipfile_lock, runtime_environment=runtime_environment or RuntimeEnvironment.from_dict({}), ) return project
def cli( verbose: bool, overlays_path: str, info: str, prescriptions_path: str, predictable_stack_abbreviation: str, ) -> None: """Create prescriptions out of a predictable stack repository.""" if verbose: _LOGGER.setLevel(logging.DEBUG) for ps_name in os.listdir(overlays_path): overlays_dir = os.path.join(overlays_path, ps_name) if not overlays_dir: _LOGGER.warning("Skipping %r: not a directory", overlays_dir) continue _LOGGER.info("Processing overlay %r", ps_name) pipfile = Pipfile.from_file(os.path.join(overlays_dir, "Pipfile")) ps_direct_packages = list(pipfile.packages.packages) _create_units( ps_direct_packages, ps_name, info, prescriptions_path, predictable_stack_abbreviation, )
def test_add_package_develop(self): pipfile = Pipfile.from_file( os.path.join(self.data_dir, 'pipfiles', 'Pipfile_test1')) pipfile_lock = PipfileLock.from_file( os.path.join(self.data_dir, 'pipfiles', 'Pipfile_test1.lock')) project = Project(pipfile=pipfile, pipfile_lock=pipfile_lock) source = Source(name='foo', url='https://foo.bar', verify_ssl=True, warehouse=False) assert 'selinon' not in project.pipfile.dev_packages.packages with pytest.raises(InternalError): # Trying to add package with source but source is not present in the meta. project.add_package('selinon', '==1.0.0', develop=True, source=source) source = project.add_source(url='https://foo.bar') project.add_package('selinon', '==1.0.0', develop=True, source=source) assert 'selinon' in project.pipfile.dev_packages.packages assert project.pipfile.dev_packages['selinon'].version == '==1.0.0' assert project.pipfile.dev_packages['selinon'].index == 'foo-bar' assert project.pipfile.dev_packages['selinon'].develop is True # Do not add the package to the lock - lock has to be explicitly done. assert 'selinon' not in project.pipfile_lock.dev_packages.packages
def _default_requirements_handler(args, params: dict = None, **kwargs) -> str: """Return script to be executed on `requirements` or `dep` command.""" params = params or dict() default_command = "get" try: command = f"{args.command}" if args.command is not None else default_command except AttributeError: # %requirements is an alias to 'get' if no command is provided command = default_command if command in ["get", "add"] and getattr(args, "from_file", None) is not None: # read the requirements from the file and change the command to set with args.from_file as f: requirements = Pipfile.from_string(f.read()).to_dict() setattr(args, "requirements", requirements) command = "set" script = SCRIPT_TEMPLATE % command args = { arg: re.sub("[\"']", "", v) if isinstance(v, str) else v for arg, v in args._get_kwargs() } def _serialize(arg): return repr(arg) # noqa params.update(kwargs) params["magic_args"] = json.dumps(args, default=_serialize) return script, params
def test_add_package_develop(self): """Test add package develop.""" pipfile = Pipfile.from_file( os.path.join(self.data_dir, "pipfiles", "Pipfile_test1")) pipfile_lock = PipfileLock.from_file( os.path.join(self.data_dir, "pipfiles", "Pipfile_test1.lock")) project = Project(pipfile=pipfile, pipfile_lock=pipfile_lock) source = Source(name="foo", url="https://foo.bar", verify_ssl=True, warehouse=False) assert "selinon" not in project.pipfile.dev_packages.packages with pytest.raises(InternalError): # Trying to add package with source but source is not present in the meta. project.add_package("selinon", "==1.0.0", develop=True, source=source) source = project.add_source(url="https://foo.bar") project.add_package("selinon", "==1.0.0", develop=True, source=source) assert "selinon" in project.pipfile.dev_packages.packages assert project.pipfile.dev_packages["selinon"].version == "==1.0.0" assert project.pipfile.dev_packages["selinon"].index.name == "foo-bar" assert project.pipfile.dev_packages["selinon"].develop is True # Do not add the package to the lock - lock has to be explicitly done. assert "selinon" not in project.pipfile_lock.dev_packages.packages
def estimate(recommendation_type: str, pipfile: str) -> None: """Estimate how big the dependency graph would be.""" if recommendation_type == "latest": sys.exit(_BUCKET_SMALL_SIZE) elif recommendation_type in _RESOURCE_HUNGRY_RECOMMENDATION_TYPES: if os.path.isfile(pipfile): pipfile_instance = Pipfile.from_file(pipfile) else: pipfile_instance = Pipfile.from_string(pipfile) _do_estimate(recommendation_type, pipfile_instance) else: _LOGGER.error( "Unknown recommendation type %r, assuming largest bucket size", recommendation_type) sys.exit(_BUCKET_LARGE_SIZE)
def test_from_string(self, pipfile: str): with open(os.path.join(self.data_dir, 'pipfiles', pipfile), 'r') as pipfile_file: content = pipfile_file.read() instance = Pipfile.from_string(content) # Sometimes toml does not preserve inline tables causing to_string() fail. However, we produce valid toml. assert instance.to_dict() == toml.loads(content)
def test_add_requirement_unknown_index(self) -> None: """Test adding requirement with unknown index raises an exception.""" pipfile = Pipfile.from_file( os.path.join(self.data_dir, "pipfiles", "Pipfile_requirements")) assert pipfile.packages.get("thamos") is None with pytest.raises(SourceNotFoundError): pipfile.add_requirement("thamos", index_url="some-unknown-index")
def test_add_requirement_already_present(self) -> None: """Test adding an already existing requirement raises an exception.""" pipfile = Pipfile.from_file( os.path.join(self.data_dir, "pipfiles", "Pipfile_requirements")) assert pipfile.packages.get("tensorflow") is not None with pytest.raises(PackageVersionAlreadyPresentError): pipfile.add_requirement("tensorflow")
def _instantiate_project( requirements: str, requirements_locked: Optional[str] = None, *, runtime_environment: RuntimeEnvironment = None, constraints: Optional[str] = None, ): """Create Project instance based on arguments passed to CLI.""" try: with open(requirements, "r") as requirements_file: requirements = requirements_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. requirements = requirements.replace("\\n", "\n") if requirements_locked: try: with open(requirements_locked, "r") as requirements_file: requirements_locked = requirements_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. requirements_locked = requirements_locked.replace("\\n", "\n") pipfile = Pipfile.from_string(requirements) pipfile_lock = None if requirements_locked and requirements_locked != "null": pipfile_lock = PipfileLock.from_string(requirements_locked, pipfile) constraints_instance = None if constraints: try: with open(constraints, "r") as constraints_file: constraints_content = constraints_file.read() except (OSError, FileNotFoundError): # We we gather values from env vars, un-escape new lines. constraints_content = constraints.replace("\\n", "\n") try: constraints_instance = Constraints.from_dict( json.loads(constraints_content)) except json.decoder.JSONDecodeError: constraints_instance = Constraints.from_string(constraints_content) runtime_environment = runtime_environment or RuntimeEnvironment.from_dict( {}) if not runtime_environment.platform: runtime_environment.platform = _DEFAULT_PLATFORM project = Project( pipfile=pipfile, pipfile_lock=pipfile_lock, runtime_environment=runtime_environment, constraints=constraints_instance or Constraints(), ) return project
async def lock_using_pipenv(self, input_data): """Lock and install dependencies using pipenv.""" kernel_name: str = input_data["kernel_name"] requirements: dict = json.loads(input_data["requirements"]) pipfile_string = Pipfile.from_dict(requirements).to_string() returncode, result = lock_dependencies_with_pipenv( kernel_name=kernel_name, pipfile_string=pipfile_string) return returncode, result
def test_add_requirement_no_version_specifier(self) -> None: """Test adding requirement to requirements section without any version specifier.""" pipfile = Pipfile.from_file( os.path.join(self.data_dir, "pipfiles", "Pipfile_requirements")) assert pipfile.packages.get("thamos") is None pipfile.add_requirement("thamos") added_package_version = pipfile.packages.get("thamos") assert added_package_version.version == "*"
def test_add_requirement_url(self) -> None: """Test force adding an already existing requirement raises an exception.""" pipfile = Pipfile.from_file( os.path.join(self.data_dir, "pipfiles", "Pipfile_requirements")) assert pipfile.packages.get("thamos") is None with pytest.raises(NotImplementedError): pipfile.add_requirement( "thoth-python @ https://github.com/thoth-station/python", force=True)
def test_from_string(self, pipfile_lock: str): """Test from string.""" with open(os.path.join(self.data_dir, "pipfiles", pipfile_lock), "r") as pipfile_lock_file: content = pipfile_lock_file.read() with open(os.path.join(self.data_dir, "pipfiles", pipfile_lock[: -len(".lock")]), "r") as pipfile_file: pipfile_content = pipfile_file.read() pipfile_instance = Pipfile.from_string(pipfile_content) instance = PipfileLock.from_string(content, pipfile=pipfile_instance) assert instance.to_string() == content
def test_add_requirement_already_present_force(self) -> None: """Test force adding an already existing requirement raises an exception.""" pipfile = Pipfile.from_file( os.path.join(self.data_dir, "pipfiles", "Pipfile_requirements")) existing_tf = pipfile.packages.get("tensorflow") assert existing_tf is not None assert existing_tf.version != "*" pipfile.add_requirement("tensorflow", force=True) added_tf = pipfile.packages.get("tensorflow") assert added_tf != existing_tf assert added_tf.version == "*"
def test_from_string(self, pipfile_lock: str): with open(os.path.join(self.data_dir, 'pipfiles', pipfile_lock), 'r') as pipfile_lock_file: content = pipfile_lock_file.read() with open( os.path.join(self.data_dir, 'pipfiles', pipfile_lock[:-len('.lock')]), 'r') as pipfile_file: pipfile_content = pipfile_file.read() pipfile_instance = Pipfile.from_string(pipfile_content) instance = PipfileLock.from_string(content, pipfile=pipfile_instance) assert instance.to_string() == content
def test_add_source(self): """Test add source.""" pipfile = Pipfile.from_file(os.path.join(self.data_dir, "pipfiles", "Pipfile_test1")) pipfile_lock = PipfileLock.from_file(os.path.join(self.data_dir, "pipfiles", "Pipfile_test1.lock")) project = Project(pipfile=pipfile, pipfile_lock=pipfile_lock) source = project.add_source(url="https://foo.bar") assert source.name is not None assert source.name in project.pipfile.meta.sources assert source is project.pipfile.meta.sources[source.name] assert source.name in project.pipfile_lock.meta.sources assert source is project.pipfile_lock.meta.sources[source.name]
def test_add_package(self): """Test add package.""" pipfile = Pipfile.from_file(os.path.join(self.data_dir, "pipfiles", "Pipfile_test1")) pipfile_lock = PipfileLock.from_file(os.path.join(self.data_dir, "pipfiles", "Pipfile_test1.lock")) project = Project(pipfile=pipfile, pipfile_lock=pipfile_lock) assert "selinon" not in project.pipfile.packages.packages project.add_package("selinon", "==1.0.0") assert "selinon" in project.pipfile.packages.packages assert project.pipfile.packages["selinon"].version == "==1.0.0" assert project.pipfile.packages["selinon"].index is None assert project.pipfile.packages["selinon"].develop is False # Do not add the package to the lock - lock has to be explicitly done. assert "selinon" not in project.pipfile_lock.packages.packages
def test_add_source(self): pipfile = Pipfile.from_file( os.path.join(self.data_dir, 'pipfiles', 'Pipfile_test1')) pipfile_lock = PipfileLock.from_file( os.path.join(self.data_dir, 'pipfiles', 'Pipfile_test1.lock')) project = Project(pipfile=pipfile, pipfile_lock=pipfile_lock) source = project.add_source(url='https://foo.bar') assert source.name is not None assert source.name in project.pipfile.meta.sources assert source is project.pipfile.meta.sources[source.name] assert source.name in project.pipfile_lock.meta.sources assert source is project.pipfile_lock.meta.sources[source.name]
def test_pipfile_extras_parsing(self): """Test pipfile extras parsing.""" instance = Pipfile.from_file(os.path.join(self.data_dir, "pipfiles", "Pipfile_extras")) assert instance is not None assert len(instance.packages.packages) == 1 assert "selinon" in instance.packages.packages package_version = instance.packages.packages["selinon"] assert set(package_version.to_dict().pop("extras")) == { "celery", "mongodb", "postgresql", "redis", "s3", "sentry", } assert set(package_version.extras) == {"celery", "mongodb", "postgresql", "redis", "s3", "sentry"}
def test_add_package(self): pipfile = Pipfile.from_file( os.path.join(self.data_dir, 'pipfiles', 'Pipfile_test1')) pipfile_lock = PipfileLock.from_file( os.path.join(self.data_dir, 'pipfiles', 'Pipfile_test1.lock')) project = Project(pipfile=pipfile, pipfile_lock=pipfile_lock) assert 'selinon' not in project.pipfile.packages.packages project.add_package('selinon', '==1.0.0') assert 'selinon' in project.pipfile.packages.packages assert project.pipfile.packages['selinon'].version == '==1.0.0' assert project.pipfile.packages['selinon'].index is None assert project.pipfile.packages['selinon'].develop is False # Do not add the package to the lock - lock has to be explicitly done. assert 'selinon' not in project.pipfile_lock.packages.packages
def create_pipfile_from_packages(packages: list, python_version: str): """Create Pipfile from list of packages.""" source = Source(url="https://pypi.org/simple", name="pypi", verify_ssl=True) pipfile_meta = PipfileMeta(sources={"pypi": source}, requires={"python_version": python_version}) packages_versions = [] for package_name in packages: package_version = PackageVersion(name=package_name, version="*", develop=False) packages_versions.append(package_version) pipfile_ = Pipfile.from_package_versions(packages=packages_versions, meta=pipfile_meta) return pipfile_
async def lock_using_thoth(self, input_data): """Lock dependencies using Thoth service.""" config: str = input_data["thoth_config"] kernel_name: str = input_data["kernel_name"] timeout: int = input_data["thoth_timeout"] force: bool = input_data["thoth_force"] notebook_content: str = input_data["notebook_content"] requirements: dict = json.loads(input_data["requirements"]) pipfile_string = Pipfile.from_dict(requirements).to_string() returncode, advise = lock_dependencies_with_thoth( config=config, kernel_name=kernel_name, timeout=timeout, force=force, notebook_content=notebook_content, pipfile_string=pipfile_string, ) return returncode, advise
def test_add_requirement(self, is_dev: bool) -> None: """Test adding requirement to requirements section.""" pipfile = Pipfile.from_file( os.path.join(self.data_dir, "pipfiles", "Pipfile_requirements")) assert pipfile.packages.get("thamos") is None pipfile.add_requirement( "Thamos[thoth,aicoe]==1.12.0; python_version > '3.5'", is_dev=is_dev) if not is_dev: added_package_version = pipfile.packages.get("thamos") else: added_package_version = pipfile.dev_packages.get("thamos") assert added_package_version.develop is is_dev assert added_package_version.index is None assert added_package_version.hashes == [] assert added_package_version.name == "thamos" assert added_package_version.version == "==1.12.0" assert added_package_version.markers == 'python_version > "3.5"' assert set(added_package_version.extras) == {"thoth", "aicoe"}
def test_from_final_state(self, context: Context) -> None: """Test instantiating product from a final state.""" state = State( score=0.5, resolved_dependencies={ "daiquiri": ("daiquiri", "1.6.0", "https://pypi.org/simple"), "numpy": ("numpy", "1.17.4", "https://pypi.org/simple"), "tensorflow": ("tensorflow", "2.0.0", "https://pypi.org/simple"), }, unresolved_dependencies={}, advised_runtime_environment=RuntimeEnvironment.from_dict({"python_version": "3.6"}), ) state.add_justification(self.JUSTIFICATION_SAMPLE_1) pypi = Source("https://pypi.org/simple") pv_daiquiri_locked = PackageVersion(name="daiquiri", version="==1.6.0", index=pypi, develop=False) pv_numpy_locked = PackageVersion(name="numpy", version="==1.17.4", index=pypi, develop=False) pv_tensorflow_locked = PackageVersion(name="tensorflow", version="==2.0.0", index=pypi, develop=False) context.should_receive("get_package_version").with_args( ("daiquiri", "1.6.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_daiquiri_locked).ordered() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "daiquiri", "1.6.0", "https://pypi.org/simple" ).and_return(["000"]).ordered() context.should_receive("get_package_version").with_args( ("numpy", "1.17.4", "https://pypi.org/simple"), graceful=False ).and_return(pv_numpy_locked).ordered() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "numpy", "1.17.4", "https://pypi.org/simple" ).and_return(["111"]).ordered() context.should_receive("get_package_version").with_args( ("tensorflow", "2.0.0", "https://pypi.org/simple"), graceful=False ).and_return(pv_tensorflow_locked).ordered() context.graph.should_receive("get_python_package_hashes_sha256").with_args( "tensorflow", "2.0.0", "https://pypi.org/simple" ).and_return(["222"]).ordered() pv_daiquiri = PackageVersion(name="daiquiri", version="*", index=pypi, develop=False) pv_tensorflow = PackageVersion(name="tensorflow", version=">=2.0.0", index=pypi, develop=False) project = flexmock( pipfile=Pipfile.from_string(self._PIPFILE), runtime_environment=RuntimeEnvironment.from_dict({"operating_system": {"name": "rhel"}}), ) project.should_receive("iter_dependencies").with_args(with_devel=True).and_return( [pv_daiquiri, pv_tensorflow] ).once() context.project = project context.dependencies = { "daiquiri": { ("daiquiri", "1.6.0", "https://pypi.org/simple"): set(), }, "numpy": {("numpy", "1.17.4", "https://pypi.org/simple"): set()}, "tensorflow": { ("tensorflow", "2.0.0", "https://pypi.org/simple"): {("numpy", "1.17.4", "https://pypi.org/simple")} }, } context.dependents = { "daiquiri": { ("daiquiri", "1.6.0", "https://pypi.org/simple"): set(), }, "numpy": { ("numpy", "1.17.4", "https://pypi.org/simple"): { ( ("tensorflow", "2.0.0", "https://pypi.org/simple"), "fedora", "31", "3.7", ) } }, "tensorflow": {("tensorflow", "2.0.0", "https://pypi.org/simple"): set()}, } context.graph.should_receive("get_python_environment_marker").with_args( "tensorflow", "2.0.0", "https://pypi.org/simple", dependency_name="numpy", dependency_version="1.17.4", os_name="fedora", os_version="31", python_version="3.7", marker_evaluation_result=True, ).and_return("python_version >= '3.7'").once() assert "THOTH_ADVISER_METADATA" not in os.environ metadata_justification = {"thoth.adviser": {"justification": [{"bar": "baz"}]}} os.environ["THOTH_ADVISER_METADATA"] = json.dumps(metadata_justification) try: product = Product.from_final_state(state=state, context=context) finally: os.environ.pop("THOTH_ADVISER_METADATA") assert product.score == state.score assert product.justification == list( chain(metadata_justification["thoth.adviser"]["justification"], state.justification) ) assert product.advised_runtime_environment == state.advised_runtime_environment assert product.project.to_dict() == { "constraints": [], "requirements": { "packages": { "daiquiri": {"index": "pypi-org-simple", "version": "*"}, "tensorflow": {"index": "pypi-org-simple", "version": ">=2.0.0"}, }, "dev-packages": {}, "requires": {"python_version": "3.7"}, "source": [ { "url": "https://pypi.org/simple", "verify_ssl": True, "name": "pypi-org", }, { "url": "https://pypi.org/simple", "verify_ssl": True, "name": "pypi-org-simple", }, ], "thoth": { "allow_prereleases": {"black": True}, }, }, "requirements_locked": { "_meta": { "sources": [ {"name": "pypi-org", "url": "https://pypi.org/simple", "verify_ssl": True}, { "url": "https://pypi.org/simple", "verify_ssl": True, "name": "pypi-org-simple", }, ], "requires": {"python_version": "3.7"}, "hash": {"sha256": "6cc8365e799b949fb6cc564cea2d8e0e8a782ab676a006e65abbe14621b93381"}, "pipfile-spec": 6, }, "default": { "daiquiri": { "version": "==1.6.0", "hashes": ["sha256:000"], "index": "pypi-org-simple", }, "numpy": { "version": "==1.17.4", "hashes": ["sha256:111"], "index": "pypi-org-simple", "markers": "python_version >= '3.7'", }, "tensorflow": { "version": "==2.0.0", "hashes": ["sha256:222"], "index": "pypi-org-simple", }, }, "develop": {}, }, "runtime_environment": { "hardware": {"cpu_family": None, "cpu_model": None, "gpu_model": None}, "operating_system": {"name": "rhel", "version": None}, "python_version": None, "cuda_version": None, "labels": None, "cudnn_version": None, "name": None, "platform": None, "base_image": None, "mkl_version": None, "openblas_version": None, "openmpi_version": None, "recommendation_type": None, }, }
def requirements(self, line: str, cell: str = None): """Notebook requirements management. Line magic: Print notebook requirements :param line: arguments to `%requirements` magic (see `%requiremnts --help`) :param cell: <empty> :return: None Cell magic: Set notebook requirements :param cell: Notebook requirements in Pipfile format. :return: None """ params = dict() if cell is not None: requirements = Pipfile.from_string(cell).to_dict() script = """ require(['nbrequirements'], ({cli, version}) => { cli('set', { requirements: $$requirements }) }) """ params["requirements"] = json.dumps(requirements) else: parser = MagicParser( prog="%requirements", description=""" Jupyter magic for managing notebook requirements. """, ) parser.set_defaults(func=_default_requirements_handler) # main parser.add_argument( "-I", "--ignore-metadata", action="store_true", help="Whether to ignore embedded notebook metadata.", ) parser.add_argument( "-f", "--from-file", type=argparse.FileType("r", encoding="utf-8"), help="Load requirements from a Pipfile.", ) parser.add_argument( "--to-json", action="store_true", help="Whether to display output in JSON format.", ) parser.add_argument( "--to-file", action="store_true", help="Whether to store output to file.", ) parser.add_argument( "-w", "--overwrite", action="store_true", help="Whether to overwrite existing file.", ) subparsers = parser.add_subparsers(dest="command") # command: add parser_add = subparsers.add_parser( "add", description= "Add dependency to notebook metadata without installing it.", ) parser_add.add_argument( "-d", "--dev", action="store_true", help="Whether to store the dependency as dev-package.", ) parser_add.add_argument("-v", "--version", type=str, default="*", help="Version constraint.") parser_add.add_argument( "-i", "--index", type=str, default="pypi", help=("Index (source name) for this dependency." "NOTE: The source of that name must be present!"), ) parser_add.add_argument( "--sync", action="store_true", help="Whether to sync notebook metadata with the Pipfile.", ) parser_add.add_argument( "dependency", type=str, help="The dependency to be added to notebook metadata.", ) parser_add.set_defaults(func=_default_requirements_handler) parser_add.add_argument( "--alias", type=str, help=("Alias of a package." "This is useful if package name differs from import.")) # command: add-source parser_source = subparsers.add_parser( "add-source", description="Add source index to the notebook metadata.") parser_source.add_argument("--name", type=str, help="Name for this index.") parser_source.add_argument("--url", type=str, help="URL for this index.") parser_source.add_argument( "--verify-ssl", type=bool, default=False, help="Whether to set verify_ssl=true", ) parser_source.add_argument( "--sync", action="store_true", help="Whether to sync notebook metadata with the Pipfile.", ) # command: lock parser_lock = subparsers.add_parser( "lock", add_help=False, parents=[parser], description="Lock (pin down) dependencies.", ) parser_lock.add_argument( "--engine", choices=["thoth", "pipenv"], help="Overwrite default dependency resolution engine.", ) parser_lock.add_argument( "-d", "--dev-packages", action="store_true", help=("Install both develop and default packages.\n" "Only applicable when `engine='pipenv'`"), ) parser_lock.add_argument( "--pre-releases", action="store_true", help= "Allow pre-releases.\nOnly applicable when `engine='pipenv'`", ) parser_lock.add_argument( "--sync", action="store_true", help="Whether to sync notebook metadata with the Pipfile.lock.", ) parser_lock.set_defaults(func=_requirements_lock) # command: config parser_config = subparsers.add_parser( "config", add_help=False, parents=[parser], description="Generate Thoth config.", ) parser_config.set_defaults(func=_requirements_config) # command: install parser_install = subparsers.add_parser( "install", description= ("Installs provided packages and adds them to Pipfile, " "or (if no packages are given), installs all packages from Pipfile.lock." ), ) parser_install.add_argument( "requirements", type=str, nargs=argparse.REMAINDER, help=( "[optional] Packages to be installed. " "If not provided, install all packages from Pipfile.lock." ), ) parser_install.add_argument( "-d", "--dev", action="store_true", help="Install both develop and default packages.", ) parser_install.add_argument("--pre", action="store_true", help="Allow pre-releases.") parser_install.set_defaults(func=_default_requirements_handler) # command: ensure parser_ensure = subparsers.add_parser( "ensure", description="\n".join( wrap( "Make sure that the notebook metadata and local files are in sync\n" "with the virtual environment and the Jupyter kernel.\n\n" "Ensure gets a project into a complete, reproducible, and likely compilable state.", width=200, replace_whitespace=False, )), formatter_class=argparse.RawTextHelpFormatter, ) parser_ensure.add_argument( "--engine", choices=["thoth", "pipenv"], help="Overwrite default dependency resolution engine.", ) parser_ensure.add_argument( "-d", "--dev-packages", action="store_true", help=("Install both develop and default packages.\n" "Only applicable when `engine='pipenv'`"), ) parser_ensure.add_argument( "--pre-releases", action="store_true", help= "Allow pre-releases.\nOnly applicable when `engine='pipenv'`", ) parser_ensure.add_argument( "-I", "--skip-kernel", action="store_true", help="Skip installation of the Jupyter kernel.", ) parser_ensure.add_argument( "name", type=str, nargs="?", help=("[optional] Kernel name, otherwise use notebook name.\n" "Only applicable when `--skip-kernel=false`."), ) parser_ensure.set_defaults(func=_default_requirements_handler) # command: clear parser_clear = subparsers.add_parser( "clear", description= "Clear notebook requirements and locked requirements metadata.", ) parser_clear.set_defaults(func=_default_requirements_handler) opts = line.split() try: args = parser.parse_args(opts) except MagicParserError as exc: print(f"\n{exc.args[0]}", file=sys.stderr) return if any([opt in {"-h", "--help"} for opt in opts]): # print help and return return script, params = args.func(args) return executejs(script, **params)