def execute(self, args, uargs): wm = self.get_workspacemanager() profile = wm.get_profile(self._find_profile_name(args, wm=wm)) if args.pkg_add_list is not None: valid_pilist = list(wm.list_available_packages().keys()) + list( wm.list_installed_packages().keys()) pilist = [] for motif in args.pkg_add_list: if PackageIdentifier.is_valid_identifier(motif): pilist.append(PackageIdentifier.parse(motif)) else: pilist.append(find_latest_version(motif, valid_pilist)) profile.add_packages(pilist) if args.pkg_rm_list is not None: valid_pilist = profile.packages pilist = [] for motif in args.pkg_rm_list: if PackageIdentifier.is_valid_identifier(motif): pilist.append(PackageIdentifier.parse(motif)) else: pilist.append(find_latest_version(motif, valid_pilist)) profile.remove_packages(pilist) wm.update_profile(profile)
def resolve_latest(motif_list, pm): out = [] grouped_packages = {} group_package_identifiers_by_name(pm.list_installed_packages().keys(), pkgmap=grouped_packages) group_package_identifiers_by_name(pm.list_available_packages().keys(), pkgmap=grouped_packages) for motif in motif_list: pi = None if PackageIdentifier.is_valid_identifier(motif): pi = PackageIdentifier.parse(motif) if pi.name not in grouped_packages or pi not in grouped_packages[ pi.name]: # Unknwon package pi = None elif motif in grouped_packages: # Get latest of the sorted list pi = grouped_packages[motif][-1] # Check if package identifier has been found if pi is None: raise InvalidPackageNameException(motif) out.append(pi) return out
def get_latest_ap(motif, pilist): if PackageIdentifier.is_valid_identifier(motif): pi = PackageIdentifier.parse(motif) return pi if pi in pilist else None out = None for pi in pilist: if pi.name == motif: if out is None or pi > out: out = pi return out
def execute(self, args, uargs): wm = self.get_workspacemanager() logger = wm.logger pfname = wm.current_profile_name profile = wm.get_profile(pfname) profile_pkg_map = profile.pkg_map grouped_packagesmap = group_package_identifiers_by_name(wm.list_installed_packages()) grouped_packagesmap = group_package_identifiers_by_name(wm.list_available_packages(), pkgmap=grouped_packagesmap) update_pilist = [] motiflist = args.packages if args.packages is not None else profile_pkg_map.keys() for motif in motiflist: pi = None if PackageIdentifier.is_valid_identifier(motif): # User force specific version candidate = PackageIdentifier.parse(motif) if candidate.name in grouped_packagesmap: if candidate in grouped_packagesmap[candidate.name]: pi = candidate elif motif in grouped_packagesmap: # Get latest version pi = grouped_packagesmap[motif][-1] if pi is None: # Unknown package identifier raise InvalidPackageNameException(motif) if pi is not None and pi not in update_pilist: # Get PI in profile previous_pi = PackageIdentifier(pi.name, profile_pkg_map[pi.name]) if pi.name in profile_pkg_map else None if previous_pi is None: # Package not in profile yet, add it if wm.print_with_confirm("Do you want to add package {pi}?".format(pi=pi)): update_pilist.append(pi) elif previous_pi != pi: # Package already in profile with a different version, update it if wm.print_with_confirm("Do you want to update package {pi.name} from {oldpi.version} to {pi.version}?".format(pi=pi, oldpi=previous_pi)): update_pilist.append(pi) else: # Package already in profile with same version, do nothing pass if len(update_pilist) == 0: logger.print_default("Nothing to do") else: profile.add_packages(update_pilist) wm.update_profile(profile) wm.provision_profile(profile)
def generate_repository(source_folder, output_folder): mkdirs(output_folder) artifacts_list1 = [] artifacts_list2 = [] rm = RelengManager() for package_folder in source_folder.iterdir(): if package_folder.is_dir() and PackageIdentifier.is_valid_identifier( package_folder.name): manifest_file = package_folder / LeafFiles.MANIFEST if manifest_file.is_file(): manifest = Manifest.parse(manifest_file) if str(manifest.identifier) != package_folder.name: raise ValueError( "Naming error: {mf.identifier} != {folder.name}". format(mf=manifest, folder=package_folder)) filename = str(manifest.identifier) + ".leaf" output_file = output_folder / filename tar_extraargs = TAR_EXTRA_ARGS.get(manifest.identifier) rm.create_package(package_folder, output_file, tar_extra_args=tar_extraargs) # Check that the generated archive is OK check_archive_format( output_file, tar_extraargs[0] if tar_extraargs is not None else None) # Create multi index.json if str(manifest.identifier) in ALT_INDEX_CONTENT: artifacts_list2.append(output_file) if ALT_INDEX_CONTENT[str(manifest.identifier)]: artifacts_list1.append(output_file) else: artifacts_list1.append(output_file) # Create a problem with failure-badhash package if manifest.name == "failure-badhash": info_node = jloadfile( rm.find_external_info_file(output_file)) # chosen by fair dice roll. # garanteed to be random. info_node[ JsonConstants. REMOTE_PACKAGE_HASH] = "sha384:d1083143b5c4cf7f1ddaadc391b2d0102fc9fffeb0951ec51020b512ef9548d40cd1af079a1221133faa949fdc304c41" jwritefile(rm.find_external_info_file(output_file), info_node, pp=True) if len(artifacts_list1) == 0 or len(artifacts_list2) == 0: raise ValueError("Empty index!") with (output_folder / "multitags_1.0.leaf.tags").open("w") as fp: fp.write("volatileTag1\n") fp.write("volatileTag2") rm.generate_index(output_folder / "index.json", artifacts_list1, name="First repository", description="First repository description", prettyprint=True) with (output_folder / "multitags_1.0.leaf.tags").open("w") as fp: fp.write("volatileTag3\n") fp.write("volatileTag4") rm.generate_index(output_folder / "index2.json", artifacts_list2, name="Second repository", description="Second repository description", prettyprint=True) # Alter some values for test purpose index1json = jloadfile(output_folder / "index.json") for pkgjson in index1json[JsonConstants.REMOTE_PACKAGES]: if pkgjson["info"]["name"] == "failure-large-ap": pkgjson["size"] = 999999999999 jwritefile(output_folder / "index.json", index1json, pp=True) # Sign with GPG subprocess.check_call([ "gpg", "--homedir", str(TEST_GPG_HOMEDIR), "--detach-sign", "--armor", str(output_folder / "index.json") ]) subprocess.check_call([ "gpg", "--homedir", str(TEST_GPG_HOMEDIR), "--detach-sign", "--armor", str(output_folder / "index2.json") ])
def install_packages(self, items: list, env: Environment = None, keep_folder_on_error: bool = False): """ Compute dependency tree, check compatibility, download from remotes and extract needed packages @return: InstalledPackage list """ with self.application_lock.acquire(): ipmap = self.list_installed_packages() apmap = self.list_available_packages() pilist = [] for item in items: if isinstance(item, PackageIdentifier): # Package identifier is given pilist.append(item) elif PackageIdentifier.is_valid_identifier(item): # Package identifier string given pilist.append(PackageIdentifier.parse(item)) else: # If leaf artifacts are given, add/replace identifiers of available packages la = LeafArtifact(Path(item)) pilist.append(la.identifier) apmap[la.identifier] = la out = [] # Build env to resolve dynamic dependencies if env is None: env = Environment.build(self.build_builtin_environment(), self.build_user_environment()) ap_to_install = DependencyUtils.install(pilist, apmap, ipmap, env=env) # Check leaf min version min_version = check_leaf_min_version(ap_to_install) if min_version: raise LeafOutOfDateException( "You need to upgrade leaf to v{version} to install {text}". format(version=min_version, text=", ".join( [str(ap.identifier) for ap in ap_to_install]))) # Check nothing to do if len(ap_to_install) == 0: self.logger.print_default("All packages are installed") else: # Check available size download_totalsize = 0 download_count = 0 for ap in [ ap for ap in ap_to_install if isinstance(ap, AvailablePackage) ]: download_count += 1 if ap.size is not None: download_totalsize += ap.size fs_check_free_space(self.download_cache_folder, download_totalsize) # Confirm text = ", ".join([str(ap.identifier) for ap in ap_to_install]) self.logger.print_quiet( "Packages to install: {packages}".format(packages=text)) if download_totalsize > 0: self.logger.print_default("Total size:", sizeof_fmt(download_totalsize)) self.print_with_confirm(raise_on_decline=True) # Install prereq prereq_to_install = DependencyUtils.prereq( [ap.identifier for ap in ap_to_install], apmap, ipmap, env=env) if len(prereq_to_install) > 0: try: self.__install_prereq( prereq_to_install, ipmap, env=env, keep_folder_on_error=keep_folder_on_error) except BaseException as e: raise PrereqException(e) # Download ap list self.logger.print_default( "Downloading {size} package(s)".format( size=download_count)) la_to_install = [] for mf in ap_to_install: if isinstance(mf, AvailablePackage): la_to_install.append(self.__download_ap(mf)) elif isinstance(mf, LeafArtifact): la_to_install.append(mf) # Check the extracted size extracted_totalsize = 0 for la in la_to_install: if la.final_size is not None: extracted_totalsize += la.final_size else: extracted_totalsize += la.get_total_size() fs_check_free_space(self.install_folder, extracted_totalsize) # Extract la list for la in la_to_install: self.logger.print_default( "[{current}/{total}] Installing {la.identifier}". format(current=(len(out) + 1), total=len(la_to_install), la=la)) ip = self.__extract_artifact( la, env, ipmap, keep_folder_on_error=keep_folder_on_error) out.append(ip) return out