def write_to_bento(self, bento_fs: FS, build_ctx: str): docker_folder = fs.path.join("env", "docker") bento_fs.makedirs(docker_folder, recreate=True) dockerfile = fs.path.join(docker_folder, "Dockerfile") template_file = os.path.join(os.path.dirname(__file__), "docker", "Dockerfile.template") with open(template_file, "r", encoding="utf-8") as f: dockerfile_template = f.read() with bento_fs.open(dockerfile, "w") as dockerfile: dockerfile.write( dockerfile_template.format( base_image=self.get_base_image_tag())) for filename in ["init.sh", "entrypoint.sh"]: copy_file_to_fs_folder( os.path.join(os.path.dirname(__file__), "docker", filename), bento_fs, docker_folder, ) if self.setup_script: try: setup_script = resolve_user_filepath(self.setup_script, build_ctx) except FileNotFoundError as e: raise InvalidArgument(f"Invalid setup_script file: {e}") copy_file_to_fs_folder(setup_script, bento_fs, docker_folder, "setup_script")
def write_to_bento(self, bento_fs: FS, build_ctx: str): conda_folder = fs.path.join("env", "conda") bento_fs.makedirs(conda_folder, recreate=True) if self.environment_yml is not None: environment_yml_file = resolve_user_filepath( self.environment_yml, build_ctx) copy_file_to_fs_folder( environment_yml_file, bento_fs, conda_folder, dst_filename="environment_yml", ) return deps_list = [] if self.dependencies is None else self.dependencies if self.pip is not None: deps_list.append(dict(pip=self.pip)) # type: ignore if not deps_list: return yaml_content = dict(dependencies=deps_list) yaml_content["channels"] = (["defaults"] if self.channels is None else self.channels) with bento_fs.open(fs.path.join(conda_folder, "environment_yml"), "w") as f: yaml.dump(yaml_content, f)
def _merge_primaries(changes_dict: ChangesDict, src_fs: FS, dst_fs: FS) -> None: # TODO Not sure that this hits all cases, including removal of # files and directories. Think about it. for dirpath in src_fs.walk.dirs(search="depth"): if _is_component_path(dirpath): lid = dirpath_to_lid(dirpath) changed = changes_dict.changed(lid) if changed: if not dst_fs.isdir(dirpath): dst_fs.makedirs(dirpath) src_sub_fs = SubFS(src_fs, dirpath) dst_sub_fs = SubFS(dst_fs, dirpath) # delete directories in dst that don't exist in src for subdirpath in dst_sub_fs.walk.dirs(search="depth"): if not src_sub_fs.isdir(subdirpath): dst_sub_fs.removetree(subdirpath) # delete the files in the destination (if any) for filepath in component_files(dst_fs, dirpath): dst_sub_fs.remove(filepath) # copy the new files across src_sub_fs = SubFS(src_fs, dirpath) for filepath in component_files(src_fs, dirpath): fs.copy.copy_file(src_sub_fs, filepath, dst_sub_fs, filepath)
def compile_fragment_files( self, write_fs: FS, found_fragments: Iterable[FoundFragment]) -> List[str]: """ Compile fragment files into `parent_dir`. """ outputs = [] for version_fs, filename in found_fragments: try: fragment = self.load_fragment(version_fs.readtext(filename)) fragment_type = fragment.get('type') showcontent = self.config.fragment_types.get( fragment_type, {}).get('showcontent', True) section = fragment.get('section') or None rendered_content = render_fragment( fragment, showcontent, self.config.changelog_output_type) if rendered_content.strip(): filename_stem = splitext(basename(filename))[0] output_path = join(*filter(None, [ section, '{}.{}'.format(filename_stem, fragment_type) ])) log.info('Compiling {} -> {}'.format( version_fs.getsyspath(filename), write_fs.getsyspath(output_path))) parent_dir = dirname(output_path) if parent_dir: write_fs.makedirs(parent_dir, recreate=True) write_fs.writetext(output_path, rendered_content) outputs.append(output_path) except Exception: raise FragmentCompilationError(filename) return outputs
def _checkin(src_fs: FS, src_path: str, dst_fs: FS, dst_path: str, move: bool = False): dst_dir = Path(dst_path).parent.as_posix() if not dst_fs.exists(dst_dir): dst_fs.makedirs(dst_dir) if src_fs.isfile(src_path): method = move_file if move else copy_file else: method = move_dir if move else copy_dir method(src_fs, src_path, dst_fs, dst_path) if move: src_fs.settext(rf'{src_path}.checkin', json.dumps({repr(dst_fs): dst_path})) return dst_fs, dst_path
def makedirs( self, path: str, permissions: Optional[Permissions] = None, recreate: bool = False, ) -> SubFS[FS]: return FS.makedirs(self, path, permissions=permissions, recreate=recreate)
def write_to_bento(self, bento_fs: FS, build_ctx: str): py_folder = fs.path.join("env", "python") wheels_folder = fs.path.join(py_folder, "wheels") bento_fs.makedirs(py_folder, recreate=True) # Save the python version of current build environment with bento_fs.open(fs.path.join(py_folder, "version.txt"), "w") as f: f.write(PYTHON_VERSION) # Move over required wheel files # Note: although wheel files outside of build_ctx will also work, we should # discourage users from doing that if self.wheels is not None: for whl_file in self.wheels: # pylint: disable=not-an-iterable whl_file = resolve_user_filepath(whl_file, build_ctx) copy_file_to_fs_folder(whl_file, bento_fs, wheels_folder) # If BentoML is installed in editable mode, build bentoml whl and save to Bento build_bentoml_whl_to_target_if_in_editable_mode( bento_fs.getsyspath(wheels_folder)) if self.requirements_txt is not None: requirements_txt_file = resolve_user_filepath( self.requirements_txt, build_ctx) copy_file_to_fs_folder( requirements_txt_file, bento_fs, py_folder, dst_filename="requirements.txt", ) elif self.packages is not None: with bento_fs.open(fs.path.join(py_folder, "requirements.txt"), "w") as f: f.write("\n".join(self.packages)) else: # Return early if no python packages were specified return pip_args: t.List[str] = [] if self.no_index: pip_args.append("--no-index") if self.index_url: pip_args.append(f"--index-url={self.index_url}") if self.trusted_host: for item in self.trusted_host: # pylint: disable=not-an-iterable pip_args.append(f"--trusted-host={item}") if self.find_links: for item in self.find_links: # pylint: disable=not-an-iterable pip_args.append(f"--find-links={item}") if self.extra_index_url: for item in self.extra_index_url: # pylint: disable=not-an-iterable pip_args.append(f"--extra-index-url={item}") if self.pip_args: # Additional user provided pip_args pip_args.append(self.pip_args) # write pip install args to a text file if applicable if pip_args: with bento_fs.open(fs.path.join(py_folder, "pip_args.txt"), "w") as f: f.write(" ".join(pip_args)) if self.lock_packages: # Note: "--allow-unsafe" is required for including setuptools in the # generated requirements.lock.txt file, and setuptool is required by # pyfilesystem2. Once pyfilesystem2 drop setuptools as dependency, we can # remove the "--allow-unsafe" flag here. # Note: "--generate-hashes" is purposefully not used here because it will # break if user includes PyPI package from version control system pip_compile_in = bento_fs.getsyspath( fs.path.join(py_folder, "requirements.txt")) pip_compile_out = bento_fs.getsyspath( fs.path.join(py_folder, "requirements.lock.txt")) pip_compile_args = ([pip_compile_in] + pip_args + [ "--quiet", "--allow-unsafe", "--no-header", f"--output-file={pip_compile_out}", ]) logger.info("Locking PyPI package versions..") click_ctx = pip_compile_cli.make_context("pip-compile", pip_compile_args) try: pip_compile_cli.invoke(click_ctx) except Exception as e: logger.error(f"Failed locking PyPI packages: {e}") logger.error( "Falling back to using user-provided package requirement specifier, equivalent to `lock_packages=False`" )
def _create_check_file(fs: FS, path: str): fs.makedirs(dirname(path), recreate=True) fs.create(path, wipe=True) fs.writebytes(path, bytes('This is a demo file content', encoding='ASCII')) fs.remove(path)