class Config: @property def part_names(self): return self.parts.part_names @property def all_parts(self): return self.parts.all_parts def __init__(self, project: project.Project) -> None: self.build_snaps: Set[str] = set() self.project = project # raw_snapcraft_yaml is read only, create a new copy snapcraft_yaml = apply_extensions(project.info.get_raw_snapcraft()) self.validator = Validator(snapcraft_yaml) self.validator.validate() snapcraft_yaml = self._expand_filesets(snapcraft_yaml) self.data = self._expand_env(snapcraft_yaml) self.data["architectures"] = _process_architectures( self.data.get("architectures"), project.deb_arch) self._ensure_no_duplicate_app_aliases() self._global_grammar_processor = grammar_processing.GlobalGrammarProcessor( properties=self.data, project=project) # XXX: Resetting snap_meta due to above mangling of data. # Convergence to operating on snap_meta will remove this requirement... project._snap_meta = Snap.from_dict(self.data) self.parts = PartsConfig(parts=self.data, project=project, validator=self.validator) def _ensure_no_duplicate_app_aliases(self): # Prevent multiple apps within a snap from having duplicate alias names aliases = [] for app_name, app in self.data.get("apps", {}).items(): aliases.extend(app.get("aliases", [])) # The aliases property is actually deprecated: if aliases: deprecations.handle_deprecation_notice("dn5") seen = set() duplicates = set() for alias in aliases: if alias in seen: duplicates.add(alias) else: seen.add(alias) if duplicates: raise errors.DuplicateAliasError(aliases=duplicates) def _get_required_package_repositories(self) -> List[PackageRepository]: package_repos = self.project._snap_meta.package_repositories.copy() v1_plugins = [ part.plugin for part in self.all_parts if isinstance(part.plugin, plugins.v1.PluginV1) ] for plugin in v1_plugins: package_repos.extend(plugin.get_required_package_repositories()) return package_repos def install_package_repositories(self) -> None: package_repos = self._get_required_package_repositories() if not package_repos: return # Install pre-requisite packages for apt-key, if not installed. repo.Repo.install_build_packages(package_names=["gnupg", "dirmngr"]) key_assets = self.project._get_keys_path() key_manager = apt_key_manager.AptKeyManager(key_assets=key_assets) refresh_required = False for package_repo in self._get_required_package_repositories(): refresh_required |= key_manager.install_package_repository_key( package_repo=package_repo) refresh_required |= package_repo.install() if refresh_required: repo.Repo.refresh_build_packages() def get_build_packages(self) -> Set[str]: # Install/update configured package repositories. self.install_package_repositories() build_packages = self._global_grammar_processor.get_build_packages() build_packages |= set(self.project.additional_build_packages) if self.project._snap_meta.version == "git": build_packages.add("git") for part in self.all_parts: build_packages |= part._grammar_processor.get_build_packages() # TODO: this should not pass in command but the required package, # where the required package is to be determined by the # source handler. if part.source_handler and part.source_handler.command: # TODO get_packages_for_source_type should not be a thing. build_packages |= repo.Repo.get_packages_for_source_type( part.source_handler.command) if not isinstance(part.plugin, plugins.v1.PluginV1): build_packages |= part.plugin.get_build_packages() return build_packages def get_build_snaps(self) -> Set[str]: build_snaps = set() # Add the base. if self.project._snap_meta.base is not None: build_snaps.add(self.project._snap_meta.base) for part in self.all_parts: build_snaps |= part._grammar_processor.get_build_snaps() if not isinstance(part.plugin, plugins.v1.PluginV1): build_snaps |= part.plugin.get_build_snaps() return build_snaps def get_project_state(self, step: steps.Step): """Returns a dict of states for the given step of each part.""" state = {} for part in self.parts.all_parts: state[part.name] = states.get_state(part.part_state_dir, step) return state def snap_env(self): prime_dir = self.project.prime_dir env = [] env += runtime_env(prime_dir, self.project.arch_triplet) dependency_paths = set() for part in self.parts.all_parts: env += part.env(prime_dir) dependency_paths |= part.get_primed_dependency_paths() # Dependency paths are only valid if they actually exist. Sorting them # here as well so the LD_LIBRARY_PATH is consistent between runs. dependency_paths = sorted( {path for path in dependency_paths if os.path.isdir(path)}) if dependency_paths: # Add more specific LD_LIBRARY_PATH from the dependencies. env.append('LD_LIBRARY_PATH="' + ":".join(dependency_paths) + '${LD_LIBRARY_PATH:+:$LD_LIBRARY_PATH}"') return env def project_env(self): return [ '{}="{}"'.format(variable, value) for variable, value in get_snapcraft_global_environment(self.project).items() ] def _expand_env(self, snapcraft_yaml): environment_keys = ["name", "version"] for key in snapcraft_yaml: if any((key == env_key for env_key in environment_keys)): continue replacements = environment_to_replacements( get_snapcraft_global_environment(self.project)) snapcraft_yaml[key] = replace_attr(snapcraft_yaml[key], replacements) return snapcraft_yaml def _expand_filesets(self, snapcraft_yaml): parts = snapcraft_yaml.get("parts", {}) for part_name in parts: for step in ("stage", "prime"): step_fileset = _expand_filesets_for(step, parts[part_name]) parts[part_name][step] = step_fileset return snapcraft_yaml
class Config: @property def part_names(self): return self.parts.part_names @property def all_parts(self): return self.parts.all_parts def __init__(self, project: project.Project) -> None: self.build_snaps: Set[str] = set() self.project = project # raw_snapcraft_yaml is read only, create a new copy snapcraft_yaml = apply_extensions(project.info.get_raw_snapcraft()) self.validator = Validator(snapcraft_yaml) self.validator.validate() snapcraft_yaml = self._expand_filesets(snapcraft_yaml) self.data = self._expand_env(snapcraft_yaml) self.data["architectures"] = _process_architectures( self.data.get("architectures"), project.deb_arch) self._ensure_no_duplicate_app_aliases() grammar_processor = grammar_processing.GlobalGrammarProcessor( properties=self.data, project=project) keys_path = project._get_keys_path() if any([ package_repo.install(keys_path=keys_path) for package_repo in project._snap_meta.package_repositories ]): repo.Repo.refresh_build_packages() self.build_tools = grammar_processor.get_build_packages() self.build_tools |= set(project.additional_build_packages) # If version: git is used we want to add "git" to build-packages if self.data.get("version") == "git": self.build_tools.add("git") # XXX: Resetting snap_meta due to above mangling of data. # Convergence to operating on snap_meta will remove this requirement... project._snap_meta = Snap.from_dict(self.data) # Always add the base for building for non os and base snaps if project.info.base is None and project.info.type in ("app", "gadget"): raise SnapcraftEnvironmentError( "A base is required for snaps of type {!r}.".format( project.info.type)) if project.info.base is not None: # If the base is already installed by other means, skip its installation. # But, we should always add it when in a docker environment so # the creator of said docker image is aware that it is required. if common.is_process_container( ) or not repo.snaps.SnapPackage.is_snap_installed( project.info.base): self.build_snaps.add(project.info.base) self.parts = PartsConfig( parts=self.data, project=project, validator=self.validator, build_snaps=self.build_snaps, build_tools=self.build_tools, ) def _ensure_no_duplicate_app_aliases(self): # Prevent multiple apps within a snap from having duplicate alias names aliases = [] for app_name, app in self.data.get("apps", {}).items(): aliases.extend(app.get("aliases", [])) # The aliases property is actually deprecated: if aliases: deprecations.handle_deprecation_notice("dn5") seen = set() duplicates = set() for alias in aliases: if alias in seen: duplicates.add(alias) else: seen.add(alias) if duplicates: raise errors.DuplicateAliasError(aliases=duplicates) def get_project_state(self, step: steps.Step): """Returns a dict of states for the given step of each part.""" state = {} for part in self.parts.all_parts: state[part.name] = states.get_state(part.part_state_dir, step) return state def stage_env(self): stage_dir = self.project.stage_dir env = [] env += runtime_env(stage_dir, self.project.arch_triplet) env += build_env_for_stage(stage_dir, self.data["name"], self.project.arch_triplet) for part in self.parts.all_parts: env += part.env(stage_dir) return env def snap_env(self): prime_dir = self.project.prime_dir env = [] env += runtime_env(prime_dir, self.project.arch_triplet) dependency_paths = set() for part in self.parts.all_parts: env += part.env(prime_dir) dependency_paths |= part.get_primed_dependency_paths() # Dependency paths are only valid if they actually exist. Sorting them # here as well so the LD_LIBRARY_PATH is consistent between runs. dependency_paths = sorted( {path for path in dependency_paths if os.path.isdir(path)}) if dependency_paths: # Add more specific LD_LIBRARY_PATH from the dependencies. env.append('LD_LIBRARY_PATH="' + ":".join(dependency_paths) + ':$LD_LIBRARY_PATH"') return env def project_env(self): return [ '{}="{}"'.format(variable, value) for variable, value in get_snapcraft_global_environment(self.project).items() ] def _expand_env(self, snapcraft_yaml): environment_keys = ["name", "version"] for key in snapcraft_yaml: if any((key == env_key for env_key in environment_keys)): continue replacements = environment_to_replacements( get_snapcraft_global_environment(self.project)) snapcraft_yaml[key] = replace_attr(snapcraft_yaml[key], replacements) return snapcraft_yaml def _expand_filesets(self, snapcraft_yaml): parts = snapcraft_yaml.get("parts", {}) for part_name in parts: for step in ("stage", "prime"): step_fileset = _expand_filesets_for(step, parts[part_name]) parts[part_name][step] = step_fileset return snapcraft_yaml
class Config: @property def part_names(self): return self.parts.part_names @property def all_parts(self): return self.parts.all_parts def __init__(self, project: project.Project) -> None: self.build_snaps = set() # type: Set[str] self.project = project # raw_snapcraft_yaml is read only, create a new copy snapcraft_yaml = apply_extensions(project.info.get_raw_snapcraft()) self.validator = Validator(snapcraft_yaml) self.validator.validate() snapcraft_yaml = self._expand_filesets(snapcraft_yaml) self.data = self._expand_env(snapcraft_yaml) self._ensure_no_duplicate_app_aliases() grammar_processor = grammar_processing.GlobalGrammarProcessor( properties=self.data, project=project) self.build_tools = grammar_processor.get_build_packages() self.build_tools |= set(project.additional_build_packages) # Always add the base for building for non os and base snaps if project.info.base is not None and project.info.type not in ("base", "os"): # If the base is already installed by other means, skip its installation. if not repo.snaps.SnapPackage.is_snap_installed(project.info.base): self.build_snaps.add(project.info.base) elif project.info.type not in ("base", "os"): # This exception is here to help with porting issues with bases. In normal # executions, when no base is set, the legacy snapcraft will be executed. raise RuntimeError("A base is required for {!r} snaps.".format( project.info.type)) self.parts = PartsConfig( parts=self.data, project=project, validator=self.validator, build_snaps=self.build_snaps, build_tools=self.build_tools, ) self.data["architectures"] = _process_architectures( self.data.get("architectures"), project.deb_arch) conduct_environment_sanity_check(self.project, self.data, self.validator.schema) def _ensure_no_duplicate_app_aliases(self): # Prevent multiple apps within a snap from having duplicate alias names aliases = [] for app_name, app in self.data.get("apps", {}).items(): aliases.extend(app.get("aliases", [])) # The aliases property is actually deprecated: if aliases: deprecations.handle_deprecation_notice("dn5") seen = set() duplicates = set() for alias in aliases: if alias in seen: duplicates.add(alias) else: seen.add(alias) if duplicates: raise errors.DuplicateAliasError(aliases=duplicates) def get_project_state(self, step: steps.Step): """Returns a dict of states for the given step of each part.""" state = {} for part in self.parts.all_parts: state[part.name] = states.get_state(part.plugin.statedir, step) return state def stage_env(self): stage_dir = self.project.stage_dir env = [] env += runtime_env(stage_dir, self.project.arch_triplet) env += build_env_for_stage(stage_dir, self.data["name"], self.project.arch_triplet) for part in self.parts.all_parts: env += part.env(stage_dir) return env def snap_env(self): prime_dir = self.project.prime_dir env = [] env += runtime_env(prime_dir, self.project.arch_triplet) dependency_paths = set() for part in self.parts.all_parts: env += part.env(prime_dir) dependency_paths |= part.get_primed_dependency_paths() # Dependency paths are only valid if they actually exist. Sorting them # here as well so the LD_LIBRARY_PATH is consistent between runs. dependency_paths = sorted( {path for path in dependency_paths if os.path.isdir(path)}) if dependency_paths: # Add more specific LD_LIBRARY_PATH from the dependencies. env.append('LD_LIBRARY_PATH="' + ":".join(dependency_paths) + ':$LD_LIBRARY_PATH"') return env def project_env(self): return [ '{}="{}"'.format(variable, value) for variable, value in snapcraft_global_environment(self.project).items() ] def _expand_env(self, snapcraft_yaml): environment_keys = ["name", "version"] for key in snapcraft_yaml: if any((key == env_key for env_key in environment_keys)): continue replacements = environment_to_replacements( snapcraft_global_environment(self.project)) snapcraft_yaml[key] = replace_attr(snapcraft_yaml[key], replacements) return snapcraft_yaml def _expand_filesets(self, snapcraft_yaml): parts = snapcraft_yaml.get("parts", {}) for part_name in parts: for step in ("stage", "prime"): step_fileset = _expand_filesets_for(step, parts[part_name]) parts[part_name][step] = step_fileset return snapcraft_yaml