def dep_solve(self, nvrea_list, labels=None): if not labels: labels = self.channel_map.keys() repos = [{ "id": label, "relative_path": repodata(label) } for label in labels] print "Copying repodata, please wait." # dep solver expects the metadata to be in /repodata directory; # create temporary symlinks temp_repo_links = [] repo = None for repo in repos: yum_repodata_path = "%s/repodata" % (repo['relative_path']) create_repodata_link(repo['relative_path'], yum_repodata_path) temp_repo_links.append(yum_repodata_path) try: try: self.solver = DepSolver(repos) self.__dep_solve(nvrea_list) self.solver.cleanup() except RepoError, e: raise UserRepoError(repo["id"], e.value) finally: # clean up temporary symlinks for link in temp_repo_links: remove_repodata_link(link)
def dep_solve(self, nvrea_list, labels=None): if not labels: labels = self.channel_map.keys() repos = [{"id":label, "relative_path":repodata(label)} for label in labels] print "Copying repodata, please wait." # dep solver expects the metadata to be in /repodata directory; # create temporary symlinks temp_repo_links = [] repo = None for repo in repos: yum_repodata_path = "%s/repodata" % (repo['relative_path']) create_repodata_link(repo['relative_path'], yum_repodata_path) temp_repo_links.append(yum_repodata_path) try: try: solver = DepSolver(repos, nvrea_list) dep_results = solver.processResults(solver.getDependencylist()) solver.cleanup() self.process_deps(dep_results) except RepoError, e: raise UserRepoError(repo["id"], e.value) finally: # clean up temporary symlinks for link in temp_repo_links: remove_repodata_link(link)
def resolve_deps(self): '''Uses the dockerize.DepSolver class to find all the shared library dependencies of files installed into the Docker image.''' deps = DepSolver() # Iterate over all files in the image. for root, dirs, files in os.walk(self.targetdir): for name in files: path = os.path.join(root, name) deps.add(path) for src in deps.deps: self.copy_file(src, symlinks=symlink_options.COPY_ALL) # Install some basic nss libraries to permit programs to resolve # users, groups, and hosts. for libdir in deps.prefixes(): for nsslib in ['libnss_dns.so.2', 'libnss_files.so.2', 'libnss_compat.so.2']: src = os.path.join(libdir, nsslib) LOG.info('looking for %s', src) if os.path.exists(src): self.copy_file(src, symlinks=symlink_options.COPY_ALL)
def resolve_deps(self): '''Uses the dockerize.DepSolver class to find all the shared library dependencies of files installed into the Docker image.''' deps = DepSolver() # Iterate over all files in the image. for root, dirs, files in os.walk(self.targetdir): for name in files: path = os.path.join(root, name) deps.add(path) for src in deps.deps: self.copy_file(src, symlinks=symlink_options.COPY_ALL) # Install some basic nss libraries to permit programs to resolve # users, groups, and hosts. for libdir in deps.prefixes(): for nsslib in [ 'libnss_dns.so.2', 'libnss_files.so.2', 'libnss_compat.so.2' ]: src = os.path.join(libdir, nsslib) LOG.info('looking for %s', src) if os.path.exists(src): self.copy_file(src, symlinks=symlink_options.COPY_ALL)
def expand_file(path, sym_set, files_set, found_set, no_cpy_all): if len([n for n in IGNORE if fnmatch.fnmatch(path, n)]) != 0: return if no_cpy_all != True: dirs = [ n for n in COPYALL if fnmatch.fnmatch(path, os.path.join(n, '*')) ] if len(dirs) != 0: for roots in dirs: expand_dir(roots, sym_set, files_set, found_set, True) return if os.path.islink(path): sym_set.add(path) lnk = os.readlink(path) if os.path.isabs(lnk) == False: lnk = os.path.join(os.path.dirname(path), lnk) expand_file(lnk, sym_set, files_set, found_set, no_cpy_all) if os.path.isdir(path): expand_dir(path, sym_set, files_set, found_set, no_cpy_all) return if os.path.isfile(path): if path in files_set: return files_set.add(path) found_set.add(os.path.basename(path)) deps = DepSolver() deps.add(path) for src in deps.deps: expand_file(src, sym_set, files_set, found_set, no_cpy_all)
class ChannelTreeCloner: """Usage: a = ChannelTreeCloner(channel_hash, xmlrpc, db, to_date, blacklist, removelist, security_only, use_update_date, no_errata_sync, errata, parents) a.create_channels() a.prepare() a.clone() """ # pylint: disable=R0902 def __init__(self, channels, remote_api, db_api, to_date, blacklist, removelist, security_only, use_update_date, no_errata_sync, errata, parents=None): self.remote_api = remote_api self.db_api = db_api self.channel_map = channels self.to_date = to_date self.cloners = [] self.blacklist = blacklist self.removelist = removelist if parents: self.src_parent = parents[0] self.dest_parent = parents[1] self.parents_specified = True else: self.src_parent = None self.dest_parent = None self.parents_specified = False self.channel_details = None self.security_only = security_only self.use_update_date = use_update_date self.no_errata_sync = no_errata_sync self.solver = None self.validate_source_channels() for from_label in self.ordered_labels(): to_label = self.channel_map[from_label][0] cloner = ChannelCloner(from_label, to_label, self.to_date, self.remote_api, self.db_api, self.security_only, self.use_update_date, self.no_errata_sync, errata) self.cloners.append(cloner) def needing_create(self): """ returns a trimmed down version of channel_map where the value needs creating """ to_create = {} existing = self.remote_api.list_channel_labels() if self.parents_specified: if (self.dest_parent not in existing or self.src_parent not in existing): raise UserError("Channels specified with --parents must" + " already exist.\nIf you want to clone the" + " parent channels too simply add another" + " --channels option.") for src, dest in self.channel_map.items(): if dest[0] not in existing: to_create[src] = dest[0] return to_create def pending(self): total = 0 for cloner in self.cloners: total += cloner.pending() return total def find_cloner(self, src_label): for cloner in self.cloners: if cloner.src_label() == src_label: return cloner def create_channels(self, skip_depsolve=False): to_create = self.needing_create() if len(to_create) == 0: return if self.parents_specified: dest_parent = [self.dest_parent] else: dest_parent = self.channel_map[self.src_parent] nvreas = [] #clone the destination parent if it doesn't exist if dest_parent[0] in to_create.values(): self.remote_api.clone_channel(self.src_parent, dest_parent, None) del to_create[self.src_parent] cloner = self.find_cloner(self.src_parent) nvreas += [pkg['nvrea'] for pkg in cloner.reset_new_pkgs().values()] #clone the children for cloner in self.cloners: if cloner.dest_label() in to_create.values(): dest = self.channel_map[cloner.src_label()] self.remote_api.clone_channel(cloner.src_label(), dest, dest_parent[0]) nvreas += [pkg['nvrea'] for pkg in cloner.reset_new_pkgs().values()] #dep solve all added packages with the parent channel if not skip_depsolve: self.dep_solve(nvreas, labels=(to_create.keys() + [self.src_parent])) def validate_source_channels(self): self.channel_details = self.remote_api.channel_details( self.channel_map, values=False) if not self.src_parent: self.src_parent = self.find_parent(self.channel_map.keys()) self.validate_children(self.src_parent, self.channel_map.keys()) def validate_dest_channels(self): self.channel_details = self.remote_api.channel_details( self.channel_map) if not self.dest_parent: self.dest_parent = self.find_parent(self.channel_map.values()) self.validate_children(self.dest_parent, self.channel_map.values()) def validate_children(self, parent, channel_list): """ Make sure all children are children of the parent""" for channel in channel_list: if type(channel) == type([]): channel = channel[0] if channel != parent: if (self.channel_details[channel]['parent_channel_label'] != parent): raise UserError(("Child channel '%s' is not a child of " + "parent channel '%s'. If you are using --config " + "ensure you have not specified " + "existing-parent-do-not-modify on a child " + "channel.") % (channel, parent)) def find_parent(self, label_list): found_list = [] for label in label_list: if type(label) == type([]): label = label[0] if self.channel_details[label]['parent_channel_label'] == '': found_list.append(label) if len(found_list) == 0: raise UserError("Parent Channel not specified.") if len(found_list) > 1: raise UserError("Multiple parent channels specified within the " + "same channel tree.") return found_list[0] def ordered_labels(self): """Return list of labels with parent first""" if self.parents_specified: return self.channel_map.keys() labels = self.channel_map.keys() labels.remove(self.src_parent) labels.insert(0, self.src_parent) return labels def prepare(self): self.validate_dest_channels() for cloner in self.cloners: cloner.prepare() def get_errata_to_clone(self): d_result = {} for cloner in self.cloners: d_result[cloner.src_label() + "_to_" + cloner.dest_label()] = \ cloner.get_errata_to_clone() return d_result def pre_summary(self): for cloner in self.cloners: cloner.pre_summary() def clone(self, skip_depsolve=False): added_pkgs = [] for cloner in self.cloners: cloner.process() pkg_diff = cloner.pkg_diff() added_pkgs += pkg_diff log_clean(0, "") log_clean(0, "%i packages were added to %s as a result of clone:" % (len(pkg_diff), cloner.dest_label())) log_clean(0, "\n".join([pkg['nvrea'] for pkg in pkg_diff])) if len(added_pkgs) > 0 and not skip_depsolve: self.dep_solve([pkg['nvrea'] for pkg in added_pkgs]) def dep_solve(self, nvrea_list, labels=None): if not labels: labels = self.channel_map.keys() repos = [{"id": label, "relative_path": repodata(label)} for label in labels] print "Copying repodata, please wait." # dep solver expects the metadata to be in /repodata directory; # create temporary symlinks temp_repo_links = [] repo = None for repo in repos: yum_repodata_path = "%s/repodata" % (repo['relative_path']) create_repodata_link(repo['relative_path'], yum_repodata_path) temp_repo_links.append(yum_repodata_path) try: try: self.solver = DepSolver(repos) self.__dep_solve(nvrea_list) self.solver.cleanup() except RepoError, e: raise UserRepoError(repo["id"], e.value) finally: # clean up temporary symlinks for link in temp_repo_links: remove_repodata_link(link) def __dep_solve(self, nvrea_list): self.solver.setPackages(nvrea_list) dep_results = self.solver.processResults(self.solver.getDependencylist()) self.process_deps(dep_results) def process_deps(self, deps): needed_list = dict((channel[0], []) for channel in self.channel_map.values()) unsolved_deps = [] print('Processing Dependencies:') pb = ProgressBar(prompt="", endTag=' - complete', finalSize=len(deps), finalBarLength=40, stream=sys.stdout) pb.printAll(1) #loop through all the deps and find any that don't exist in the # destination channels for pkg in deps: pb.addTo(1) pb.printIncrement() for solved_list in pkg.values(): found = False for cloner in self.cloners: exists_from = cloner.src_pkg_exist(solved_list) exists_to = cloner.dest_pkg_exist(solved_list) if exists_from and not exists_to: #grab oldest package needed_list[cloner.dest_label()].append(solved_list[0]) elif exists_from: found = True if not found: unsolved_deps.append((pkg)) added_nevras = [] for cloner in self.cloners: needed = needed_list[cloner.dest_label()] if len(needed) > 0: added_nevras = added_nevras + cloner.process_deps(needed) pb.printComplete() # recursively solve dependencies to get dependencies-of-dependencies if len(added_nevras) > 0: print 'Dependencies added, looking for new dependencies' self.__dep_solve(added_nevras) def remove_packages(self): for cloner in self.cloners: if self.removelist: cloner.remove_removelist(self.removelist) if self.blacklist: cloner.remove_blacklisted(self.blacklist)
class ChannelTreeCloner: """Usage: a = ChannelTreeCloner(channel_hash, xmlrpc, db, to_date, blacklist, removelist, detached, security_only, use_update_date, no_errata_sync, errata, parents) a.create_channels() a.prepare() a.clone() """ # pylint: disable=R0902 def __init__(self, channels, remote_api, db_api, to_date, blacklist, removelist, detached, security_only, use_update_date, no_errata_sync, errata, parents=None): self.remote_api = remote_api self.db_api = db_api self.channel_map = channels self.to_date = to_date self.cloners = [] self.blacklist = blacklist self.removelist = removelist if parents: self.src_parent = parents[0] self.dest_parent = parents[1] self.parents_specified = True else: self.src_parent = None self.dest_parent = None self.parents_specified = False self.channel_details = None self.detached = detached self.security_only = security_only self.use_update_date = use_update_date self.no_errata_sync = no_errata_sync self.solver = None self.validate_source_channels() for from_label in self.ordered_labels(): to_label = self.channel_map[from_label][0] cloner = ChannelCloner(from_label, to_label, self.to_date, self.remote_api, self.db_api, self.detached, self.security_only, self.use_update_date, self.no_errata_sync, errata) self.cloners.append(cloner) def needing_create(self): """ returns a trimmed down version of channel_map where the value needs creating """ to_create = {} existing = self.remote_api.list_channel_labels() if self.parents_specified: if (self.dest_parent not in existing or self.src_parent not in existing): raise UserError("Channels specified with --parents must" + " already exist.\nIf you want to clone the" + " parent channels too simply add another" + " --channels option.") for src, dest in self.channel_map.items(): if dest[0] not in existing: to_create[src] = dest[0] return to_create def pending(self): total = 0 for cloner in self.cloners: total += cloner.pending() return total def find_cloner(self, src_label): for cloner in self.cloners: if cloner.src_label() == src_label: return cloner def create_channels(self, skip_depsolve=False): to_create = self.needing_create() if len(to_create) == 0: return if self.parents_specified: dest_parent = [self.dest_parent] else: dest_parent = self.channel_map[self.src_parent] nvreas = [] #clone the destination parent if it doesn't exist if dest_parent[0] in to_create.values(): self.remote_api.clone_channel(self.src_parent, dest_parent, None) del to_create[self.src_parent] cloner = self.find_cloner(self.src_parent) nvreas += [ pkg['nvrea'] for pkg in cloner.reset_new_pkgs().values() ] #clone the children for cloner in self.cloners: if cloner.dest_label() in to_create.values(): dest = self.channel_map[cloner.src_label()] self.remote_api.clone_channel(cloner.src_label(), dest, dest_parent[0]) nvreas += [ pkg['nvrea'] for pkg in cloner.reset_new_pkgs().values() ] #dep solve all added packages with the parent channel if not skip_depsolve: self.dep_solve(nvreas, labels=(to_create.keys() + [self.src_parent])) def validate_source_channels(self): self.channel_details = self.remote_api.channel_details( self.channel_map, values=False) if not self.src_parent: self.src_parent = self.find_parent(self.channel_map.keys()) self.validate_children(self.src_parent, self.channel_map.keys()) def validate_dest_channels(self): self.channel_details = self.remote_api.channel_details( self.channel_map) if not self.dest_parent: self.dest_parent = self.find_parent(self.channel_map.values()) self.validate_children(self.dest_parent, self.channel_map.values()) def validate_children(self, parent, channel_list): """ Make sure all children are children of the parent""" for channel in channel_list: if type(channel) == type([]): channel = channel[0] if channel != parent: if (self.channel_details[channel]['parent_channel_label'] != parent): raise UserError( ("Child channel '%s' is not a child of " + "parent channel '%s'. If you are using --config " + "ensure you have not specified " + "existing-parent-do-not-modify on a child " + "channel.") % (channel, parent)) def find_parent(self, label_list): found_list = [] for label in label_list: if type(label) == type([]): label = label[0] if self.channel_details[label]['parent_channel_label'] == '': found_list.append(label) if len(found_list) == 0: raise UserError("Parent Channel not specified.") if len(found_list) > 1: raise UserError("Multiple parent channels specified within the " + "same channel tree.") return found_list[0] def ordered_labels(self): """Return list of labels with parent first""" if self.parents_specified: return self.channel_map.keys() labels = self.channel_map.keys() labels.remove(self.src_parent) labels.insert(0, self.src_parent) return labels def prepare(self): self.validate_dest_channels() for cloner in self.cloners: cloner.prepare() def get_errata_to_clone(self): d_result = {} for cloner in self.cloners: d_result[cloner.src_label() + "_to_" + cloner.dest_label()] = \ cloner.get_errata_to_clone() return d_result def pre_summary(self): for cloner in self.cloners: cloner.pre_summary() def clone(self, skip_depsolve=False): added_pkgs = [] for cloner in self.cloners: cloner.process() pkg_diff = cloner.pkg_diff() added_pkgs += pkg_diff log_clean(0, "") log_clean( 0, "%i packages were added to %s as a result of clone:" % (len(pkg_diff), cloner.dest_label())) log_clean(0, "\n".join([pkg['nvrea'] for pkg in pkg_diff])) if len(added_pkgs) > 0 and not skip_depsolve: self.dep_solve([pkg['nvrea'] for pkg in added_pkgs]) def dep_solve(self, nvrea_list, labels=None): if not labels: labels = self.channel_map.keys() repos = [{ "id": label, "relative_path": repodata(label) } for label in labels] print "Copying repodata, please wait." # dep solver expects the metadata to be in /repodata directory; # create temporary symlinks temp_repo_links = [] repo = None for repo in repos: yum_repodata_path = "%s/repodata" % (repo['relative_path']) create_repodata_link(repo['relative_path'], yum_repodata_path) temp_repo_links.append(yum_repodata_path) try: try: self.solver = DepSolver(repos) self.__dep_solve(nvrea_list) self.solver.cleanup() except RepoError, e: raise UserRepoError(repo["id"], e.value) finally: # clean up temporary symlinks for link in temp_repo_links: remove_repodata_link(link) def __dep_solve(self, nvrea_list): self.solver.setPackages(nvrea_list) dep_results = self.solver.processResults( self.solver.getDependencylist()) self.process_deps(dep_results) def process_deps(self, deps): needed_list = dict( (channel[0], []) for channel in self.channel_map.values()) unsolved_deps = [] print('Processing Dependencies:') pb = ProgressBar(prompt="", endTag=' - complete', finalSize=len(deps), finalBarLength=40, stream=sys.stdout) pb.printAll(1) #loop through all the deps and find any that don't exist in the # destination channels for pkg in deps: pb.addTo(1) pb.printIncrement() for solved_list in pkg.values(): found = False for cloner in self.cloners: exists_from = cloner.src_pkg_exist(solved_list) exists_to = cloner.dest_pkg_exist(solved_list) if exists_from and not exists_to: #grab oldest package needed_list[cloner.dest_label()].append(solved_list[0]) elif exists_from: found = True if not found: unsolved_deps.append((pkg)) added_nevras = [] for cloner in self.cloners: needed = needed_list[cloner.dest_label()] if len(needed) > 0: added_nevras = added_nevras + cloner.process_deps(needed) pb.printComplete() # recursively solve dependencies to get dependencies-of-dependencies if len(added_nevras) > 0: print 'Dependencies added, looking for new dependencies' self.__dep_solve(added_nevras) def remove_packages(self): for cloner in self.cloners: if self.removelist: cloner.remove_removelist(self.removelist) if self.blacklist: cloner.remove_blacklisted(self.blacklist)