def get_project( self, runtime_environment_name: Optional[str] = None, *, missing_dir_ok: bool = False, ) -> Project: """Get the given overlay.""" path = self.get_overlays_directory( runtime_environment_name=runtime_environment_name, missing_dir_ok=missing_dir_ok, ) runtime_environment = RuntimeEnvironment.from_dict( self.get_runtime_environment(runtime_environment_name)) if self.requirements_format == "pipenv": pipfile_lock_path: Optional[str] = os.path.join( path, "Pipfile.lock") if pipfile_lock_path and not os.path.exists(pipfile_lock_path): pipfile_lock_path = None pipfile_path = os.path.join(path, "Pipfile") if not os.path.isfile(pipfile_path): if not os.path.isdir(path): _LOGGER.info("Creating directory structure in %r", path) os.makedirs(path, exist_ok=True) pipfile = Pipfile.from_dict({}) pipfile.to_file(path=pipfile_path) project = Project.from_files( pipfile_path=pipfile_path, pipfile_lock_path=pipfile_lock_path, runtime_environment=runtime_environment, without_pipfile_lock=pipfile_lock_path is None, ) else: requirements_in_file_path = os.path.join(path, "requirements.in") if not os.path.isfile(requirements_in_file_path): requirements_txt_file_path = os.path.join( path, "requirements.txt") if os.path.isfile(requirements_txt_file_path): _LOGGER.warning("Using %r for direct dependencies", requirements_in_file_path) project = Project.from_pip_compile_files( requirements_path=requirements_txt_file_path, requirements_lock_path=None, allow_without_lock=True, runtime_environment=runtime_environment, ) else: raise NotImplementedError( "No requirements.txt/requirements.in files found, it is recommended to " "use Pipenv files for managing dependencies") else: project = Project.from_pip_compile_files( requirements_path=requirements_in_file_path, requirements_lock_path=None, allow_without_lock=True, runtime_environment=runtime_environment, ) return project
async def lock_using_pipenv(self, input_data): """Lock and install dependencies using pipenv.""" kernel_name: str = input_data["kernel_name"] requirements: dict = json.loads(input_data["requirements"]) pipfile_string = Pipfile.from_dict(requirements).to_string() returncode, result = lock_dependencies_with_pipenv( kernel_name=kernel_name, pipfile_string=pipfile_string) return returncode, result
async def lock_using_thoth(self, input_data): """Lock dependencies using Thoth service.""" config: str = input_data["thoth_config"] kernel_name: str = input_data["kernel_name"] timeout: int = input_data["thoth_timeout"] force: bool = input_data["thoth_force"] notebook_content: str = input_data["notebook_content"] requirements: dict = json.loads(input_data["requirements"]) pipfile_string = Pipfile.from_dict(requirements).to_string() returncode, advise = lock_dependencies_with_thoth( config=config, kernel_name=kernel_name, timeout=timeout, force=force, notebook_content=notebook_content, pipfile_string=pipfile_string, ) return returncode, advise
async def post(self): """Lock dependencies using Thoth service.""" initial_path = Path.cwd() input_data = self.get_json_body() config: str = input_data["thoth_config"] kernel_name: str = input_data["kernel_name"] timeout: str = input_data["thoth_timeout"] notebook_content: str = input_data["notebook_content"] requirements: dict = json.loads(input_data["requirements"]) # Get origin before changing path origin: str = _get_origin() _LOGGER.info("Origin identified by thamos: %r", origin) home = Path.home() complete_path = home.joinpath(".local/share/thoth/kernels") env_path = complete_path.joinpath(kernel_name) # Delete and recreate folder if not env_path.exists(): _ = subprocess.call(f"rm -rf ./{kernel_name} ", shell=True, cwd=complete_path) env_path.mkdir(parents=True, exist_ok=True) os.chdir(env_path) _LOGGER.info("Resolution engine used: thoth") pipfile_string = Pipfile.from_dict(requirements).to_string() _LOGGER.info(f"Current path: %r ", env_path.as_posix()) _LOGGER.info(f"Input Pipfile: \n{pipfile_string}") advise = {"requirements": {}, "requirement_lock": {}, "error": False} temp = tempfile.NamedTemporaryFile(prefix="jl_thoth_", mode='w+t') try: adviser_inputs = { "pipfile": pipfile_string, "config": config, "origin": origin } _LOGGER.info("Adviser inputs are: %r", adviser_inputs) temp.write(notebook_content) _LOGGER.info("path to temporary file is: %r", temp.name) response = advise_using_config( pipfile=pipfile_string, pipfile_lock="", # TODO: Provide Pipfile.lock retrieved? force=False, # TODO: Provide force input from user? config=config, origin=origin, nowait=False, source_type=ThothAdviserIntegrationEnum.JUPYTER_NOTEBOOK, no_static_analysis=False, timeout=timeout, src_path=temp.name) _LOGGER.info(f"Response: {response}") if not response: raise Exception("Analysis was not successful.") result, error_result = response if error_result: advise['error'] = True else: # Use report of the best one, therefore index 0 if result["report"] and result["report"]["products"]: justifications = result["report"]["products"][0][ "justification"] _LOGGER.info(f"Justification: {justifications}") stack_info = result["report"]["stack_info"] _LOGGER.debug(f"Stack info {stack_info}") pipfile = result["report"]["products"][0]["project"][ "requirements"] pipfile_lock = result["report"]["products"][0]["project"][ "requirements_locked"] advise = { "requirements": pipfile, "requirement_lock": pipfile_lock, "error": False } except Exception as api_error: _LOGGER.warning( f"error locking dependencies using Thoth: {api_error}") advise['error'] = True finally: temp.close() _LOGGER.info(f"advise received: {advise}") if not advise['error']: try: requirements_format = "pipenv" project = Project.from_dict(pipfile, pipfile_lock) pipfile_path = env_path.joinpath("Pipfile") pipfile_lock_path = env_path.joinpath("Pipfile.lock") if requirements_format == "pipenv": _LOGGER.info("Writing to Pipfile/Pipfile.lock in %r", env_path.as_posix()) project.to_files(pipfile_path=pipfile_path, pipfile_lock_path=pipfile_lock_path) except Exception as e: _LOGGER.warning( "Requirements files have not been stored successfully %r", e) os.chdir(initial_path) self.finish(json.dumps(advise))
def parse_adviser_output() -> None: """Investigate on unresolved packages in adviser output.""" adviser_run_path = Path(os.environ["FILE_PATH"]) file_found = True unresolved_found = True unresolved_packages = [] packages_to_solve = {} if not adviser_run_path.exists(): _LOGGER.warning( f"Cannot find the file on this path: {adviser_run_path}") file_found = False if file_found: with open(adviser_run_path, "r") as f: content = json.load(f) report = content["result"]["report"] if report: errors_details = report.get("_ERROR_DETAILS") if errors_details: unresolved_packages = errors_details["unresolved"] if not unresolved_packages: _LOGGER.warning( "No packages to be solved with priority identified.") unresolved_found = False if unresolved_found: _LOGGER.info( f"Identified the following unresolved packages: {unresolved_packages}" ) parameters = content["result"]["parameters"] runtime_environment = parameters["project"].get( "runtime_environment") solver = OpenShift.obtain_solver_from_runtime_environment( runtime_environment=runtime_environment) requirements = parameters["project"].get("requirements") pipfile = Pipfile.from_dict(requirements) packages = pipfile.packages.packages dev_packages = pipfile.dev_packages.packages for package_name in unresolved_packages: if package_name in packages: packages_to_solve[package_name] = packages[package_name] if package_name in dev_packages: packages_to_solve[package_name] = dev_packages[ package_name] _LOGGER.info( f"Unresolved packages identified.. {packages_to_solve}") output_messages = [] for package, package_info in packages_to_solve.items(): message_input = { "component_name": { "type": "str", "value": __COMPONENT_NAME__ }, "service_version": { "type": "str", "value": __service_version__ }, "package_name": { "type": "Dict", "value": package_info.name }, "package_version": { "type": "str", "value": package_info.version }, "index_url": { "type": "str", "value": package_info.index }, "solver": { "type": "int", "value": solver }, } # We store the message to put in the output file here. output_messages.append({ "topic_name": "thoth.investigator.unresolved-package", "message_contents": message_input }) # Store message to file that need to be sent. store_messages(output_messages)
def parse_adviser_output(file_test_path: Optional[Path] = None) -> None: """Investigate on unresolved packages.""" if file_test_path: _LOGGER.debug("Dry run..") adviser_run_path = file_test_path else: adviser_run_path = Path(os.environ["FILE_PATH"]) if not adviser_run_path.exists(): raise FileNotFoundError( f"Cannot find the file on this path: {adviser_run_path}") with open(adviser_run_path, "r") as f: content = json.load(f) unresolved_packages = [] report = content["result"]["report"] if report: errors_details = report.get("_ERROR_DETAILS") if errors_details: unresolved_packages = errors_details["unresolved"] if not unresolved_packages: _LOGGER.warning("No packages to be solved with priority identified.") sys.exit(2) parameters = content["result"]["parameters"] runtime_environment = parameters["project"].get("runtime_environment") solver = OpenShift.obtain_solver_from_runtime_environment( runtime_environment=runtime_environment) requirements = parameters["project"].get("requirements") pipfile = Pipfile.from_dict(requirements) packages = pipfile.packages.packages dev_packages = pipfile.dev_packages.packages packages_to_solve = {} for package_name in unresolved_packages: if package_name in packages: packages_to_solve[package_name] = packages[package_name] if package_name in dev_packages: packages_to_solve[package_name] = dev_packages[package_name] _LOGGER.info(f"Unresolved packages identified.. {packages_to_solve}") output_messages = [] for package, package_info in packages_to_solve.items(): message_input = { "component_name": { "type": "str", "value": __COMPONENT_NAME__ }, "service_version": { "type": "str", "value": __service_version__ }, "package_name": { "type": "Dict", "value": package_info.name }, "package_version": { "type": "str", "value": package_info.version }, "index_url": { "type": "str", "value": package_info.index }, "solver": { "type": "int", "value": solver }, } # We store the message to put in the output file here. output_messages.append({ "topic_name": "thoth.investigator.unresolved-package", "message_contents": message_input }) # Store message to file that need to be sent. with open(f"/mnt/workdir/messages_to_be_sent.json", "w") as json_file: json.dump(output_messages, json_file) if output_messages: _LOGGER.info( f"Successfully stored file with messages to be sent!: {output_messages}" )
async def post(self): """Lock and install dependencies using pipenv.""" initial_path = Path.cwd() input_data = self.get_json_body() kernel_name: str = input_data["kernel_name"] requirements: dict = json.loads(input_data["requirements"]) home = Path.home() complete_path = home.joinpath(".local/share/thoth/kernels") env_path = complete_path.joinpath(kernel_name) # Delete and recreate folder if not env_path.exists(): _ = subprocess.call(f"rm -rf ./{kernel_name} ", shell=True, cwd=complete_path) env_path.mkdir(parents=True, exist_ok=True) result = {"requirements_lock": "", "error": False} ## Create virtualenv cli_run([str(env_path)]) pipfile_path = env_path.joinpath("Pipfile") pipfile_string = Pipfile.from_dict(requirements).to_string() _LOGGER.info("Resolution engine used: pipenv") with open(pipfile_path, "w") as pipfile_file: pipfile_file.write(pipfile_string) _LOGGER.info(f"kernel path: {env_path}") _LOGGER.info(f"Input Pipfile: \n{pipfile_string}") try: # TODO: check if pipenv is installed subprocess.run( f". {kernel_name}/bin/activate && cd {kernel_name} && pip install pipenv", cwd=complete_path, shell=True) except Exception as pipenv_error: _LOGGER.warning("error installing pipenv: %r", pipenv_error) result['error'] = True os.chdir(initial_path) self.finish(json.dumps(result)) try: subprocess.run( f". {kernel_name}/bin/activate && cd {kernel_name} && pipenv lock", env=dict(os.environ, PIPENV_CACHE_DIR='/tmp'), cwd=complete_path, shell=True) except Exception as pipenv_error: _LOGGER.warning("error locking dependencies using Pipenv: %r", pipenv_error) result['error'] = True os.chdir(env_path) if not result['error']: pipfile_lock_path = env_path.joinpath("Pipfile.lock") if pipfile_lock_path.exists(): with open(pipfile_lock_path, "r") as pipfile_lock_file: pipfile_lock_str = pipfile_lock_file.read() pipfile = Pipfile.from_string(pipfile_string) pipfile_lock_str: PipfileLock = PipfileLock.from_string( pipfile_lock_str, pipfile=pipfile) result["requirements_lock"] = pipfile_lock_str.to_dict() _LOGGER.debug(f"result from pipenv received: {result}") else: _LOGGER.warning("Pipfile.lock cannot be found at: %r", str(pipfile_lock_path)) result['error'] = True os.chdir(initial_path) self.finish(json.dumps(result))