def main(argv, indirect=False): global ignore_slots, bold, order, topper #opts parsing opts = process_args(argv) ignore_slots = opts.ignore_slot use_overlays = opts.overlays # user can do both --arch=a,b,c or --arch a b c if len(opts.arch) > 1: opts.arch = ','.join(opts.arch) highlight_arch = ''.join(opts.arch).split(',') bold = opts.bold order = opts.align topper = opts.top_position prefix = opts.prefix color = opts.color package = opts.package # equery support if indirect and len(package) <= 0: msg_err = 'No packages specified' raise SystemExit(msg_err) # disable colors when redirected and they are not forced on if not color and not sys.stdout.isatty(): # disable colors porto.nocolor() keywords = keywords_header(prefix, highlight_arch, order) if len(package) > 0: mysettings = portc(local_config=False) dbapi = portdbapi(mysettings=mysettings) if not use_overlays: dbapi.porttrees = [dbapi.porttree_root] for pkg in package: process_display(pkg, keywords, dbapi) else: currdir = os.getcwd() # check if there are actualy some ebuilds ebuilds = [ '%s' % x for x in os.listdir(currdir) if fnmatch.fnmatch(x, '*.ebuild') ] if len(ebuilds) <= 0: msg_err = 'No ebuilds at "%s"' % currdir raise SystemExit(msg_err) package = '%s/%s' % (os.path.basename( os.path.abspath('../')), os.path.basename(currdir)) ourtree = os.path.abspath('../../') overlays = '%s %s' % (ports['PORTDIR_OVERLAY'], ourtree) mysettings = portc(local_config=False, env={'PORTDIR_OVERLAY': overlays}) dbapi = portdbapi(mysettings=mysettings) # specify that we want just our nice tree we are in cwd dbapi.porttrees = [ourtree] process_display(package, keywords, dbapi) return 0
def main(argv, indirect = False): global ignore_slots, bold, order, topper #opts parsing opts = process_args(argv) ignore_slots = opts.ignore_slot use_overlays = opts.overlays # user can do both --arch=a,b,c or --arch a b c if len(opts.arch) > 1: opts.arch = ','.join(opts.arch) highlight_arch = ''.join(opts.arch).split(',') bold = opts.bold order = opts.align topper = opts.top_position prefix = opts.prefix color = opts.color package = opts.package # equery support if indirect and len(package) <= 0: msg_err = 'No packages specified' raise SystemExit(msg_err) # disable colors when redirected and they are not forced on if not color and not sys.stdout.isatty(): # disable colors porto.nocolor() keywords = keywords_header(prefix, highlight_arch, order) if len(package) > 0: mysettings = portc(local_config=False) dbapi = portdbapi(mysettings=mysettings) if not use_overlays: dbapi.porttrees = [dbapi.porttree_root] for pkg in package: process_display(pkg, keywords, dbapi) else: currdir = os.getcwd() # check if there are actualy some ebuilds ebuilds = ['%s' % x for x in os.listdir(currdir) if fnmatch.fnmatch(x, '*.ebuild')] if len(ebuilds) <= 0: msg_err = 'No ebuilds at "%s"' % currdir raise SystemExit(msg_err) package= '%s/%s' % (os.path.basename(os.path.abspath('../')), os.path.basename(currdir)) ourtree = os.path.abspath('../../') overlays = '%s %s' % (ports['PORTDIR_OVERLAY'], ourtree) mysettings = portc(local_config=False, env={'PORTDIR_OVERLAY': overlays}) dbapi = portdbapi(mysettings=mysettings) # specify that we want just our nice tree we are in cwd dbapi.porttrees = [ourtree] process_display(package, keywords, dbapi) return 0
def look_for_keywords(self): # work out ebuild's category split = self.ebuild.split(os.sep) split.pop() split.pop() cat = split.pop() # work out ebuild's name ebuild = os.path.basename(self.ebuild) ebuild = os.path.splitext(ebuild)[0] self.ebuild = '{0}/{1}'.format(cat, ebuild) # perform look up mysettings = portage.config(local_config=False) dbapi = portage.portdbapi(mysettings=mysettings) dbapi.porttrees = [dbapi.porttree_root] keywords = dbapi.aux_get(self.ebuild, ['KEYWORDS'], dbapi.porttree_root)[0] if len(keywords) == 0: sys.exit(ERROR_MESSAGES['keywords'].format(ebuild)) for arch in keywords.split(): # keep keywords in ~arch only if '~' in arch: # skip "exotic" arches such as ~amd64-macos and such if '-' in arch: continue arch = arch.strip('~') self.arches.append(arch) self.cc.append(arch + '@gentoo.org')
def getPackagesInCatWithEclass(cur_overlay, cat, eclass): cur_tree = cur_overlay.root try: with open(os.path.join(cur_tree, 'profiles/repo_name')) as f: cur_name = f.readline().strip() except FileNotFoundError: cur_name = cur_overlay.name env = os.environ.copy() env['PORTAGE_REPOSITORIES'] = ''' [DEFAULT] main-repo = %s [%s] location = %s ''' % (cur_name, cur_name, cur_tree) p = portage.portdbapi(mysettings=portage.config(env=env, config_profile_path='')) p.frozen = False mypkgs = set() for catpkg in p.cp_all(categories=[cat]): for pkg in p.cp_list(catpkg): if pkg == '': print("No match for %s" % catpkg) continue try: aux = p.aux_get(pkg, ["INHERITED"]) except PortageKeyError: print("Portage key error for %s" % repr(pkg)) continue if eclass in aux[0].split(): if eclass not in mypkgs: mypkgs.add(catpkg) return mypkgs
def get_buildlog_info(session, settings, pkg, build_dict): myportdb = portage.portdbapi(mysettings=settings) logfile_text_dict, max_text_lines = get_log_text_dict(settings.get("PORTAGE_LOG_FILE")) hilight_dict = search_buildlog(session, logfile_text_dict, max_text_lines) error_log_list = [] qa_error_list = [] repoman_error_list = [] sum_build_log_list = [] error_info_list = get_error_info_list(session) for k, v in sorted(hilight_dict.items()): if v['startline'] == v['endline']: error_log_list.append(logfile_text_dict[k ]) if v['hilight_css_id'] == "3" or v['hilight_css_id'] == "4": # qa = 3 and 4 qa_error_list.append(logfile_text_dict[k]) else: i = k while i != (v['endline'] + 1): error_log_list.append(logfile_text_dict[i]) if v['hilight_css_id'] == "3" or v['hilight_css_id'] == "4": # qa = 3 and 4 qa_error_list.append(logfile_text_dict[i]) i = i +1 # Run repoman check_repoman() repoman_error_list = check_repoman(settings, myportdb, build_dict['cpv'], pkg.repo) if repoman_error_list: sum_build_log_list.append("1") # repoman = 1 if qa_error_list != []: sum_build_log_list.append("2") # qa = 2 else: qa_error_list = False error_search_line = "^ \\* ERROR: " for error_log_line in error_log_list: if re.search(error_search_line, error_log_line): for error_info in error_info_list: if re.search(error_info.ErrorSearch, error_log_line): sum_build_log_list.append(error_info.ErrorId) build_log_dict = {} build_log_dict['repoman_error_list'] = repoman_error_list build_log_dict['qa_error_list'] = qa_error_list build_log_dict['error_log_list'] = error_log_list build_log_dict['summary_error_list'] = sum_build_log_list build_log_dict['hilight_dict'] = hilight_dict return build_log_dict
def create_portdb(portdir=None, cachedir=None, config_root=None, target_root=None, profile=None, **kwargs): if cachedir is not None: os.environ["PORTAGE_DEPCACHEDIR"] = cachedir if config_root is None: config_root = os.environ.get("PORTAGE_CONFIGROOT", "/") if target_root is None: target_root = os.environ.get("ROOT", "/") if profile is None: profile = "" portage = import_portage() try: from portage import const as portage_const except ImportError: import portage_const # Disable overlays because we only generate metadata for the main repo. os.environ["PORTDIR_OVERLAY"] = "" conf = portage.config(config_profile_path=profile, config_incrementals=portage_const.INCREMENTALS, target_root=target_root, config_root=config_root) if portdir is None: portdir = conf["PORTDIR"] # The cannonical path is the key for portdb.auxdb. portdir = os.path.realpath(portdir) conf["PORTDIR"] = portdir conf.backup_changes("PORTDIR") portdb = portage.portdbapi(portdir, mysettings=conf) return portdb
def getDependencies(cur_overlay, catpkgs, levels=0, cur_level=0): cur_tree = cur_overlay.root try: with open(os.path.join(cur_tree, 'profiles/repo_name')) as f: cur_name = f.readline().strip() except FileNotFoundError: cur_name = cur_overlay.name env = os.environ.copy() env['PORTAGE_REPOSITORIES'] = ''' [DEFAULT] main-repo = %s [%s] location = %s ''' % (cur_name, cur_name, cur_tree) p = portage.portdbapi(mysettings=portage.config(env=env,config_profile_path='')) p.frozen = False mypkgs = set() for catpkg in list(catpkgs): for pkg in p.cp_list(catpkg): if pkg == '': print("No match for %s" % catpkg) continue try: aux = p.aux_get(pkg, ["DEPEND", "RDEPEND"]) except PortageKeyError: print("Portage key error for %s" % repr(pkg)) return mypkgs for dep in flatten(use_reduce(aux[0]+" "+aux[1], matchall=True)): if len(dep) and dep[0] == "!": continue try: mypkg = dep_getkey(dep) except portage.exception.InvalidAtom: continue if mypkg not in mypkgs: mypkgs.add(mypkg) if levels != cur_level: mypkgs = mypkgs.union(getDependencies(cur_overlay, mypkg, levels=levels, cur_level=cur_level+1)) return mypkgs
def add_buildquery_main(config_id): conn = 0 config_setup = get_config(conn, config_id) log_msg = "Adding build jobs for: %s" % (config_setup,) add_zobcs_logs(conn, log_msg, "info", config_id) check_make_conf() log_msg = "Check configs done" add_zobcs_logs(conn, log_msg, "info", config_profile) # Get default config from the configs table and default_config=1 default_config_root = "/var/cache/zobcs/" + zobcs_settings_dict['zobcs_gitreponame'] + "/" + config_setup + "/" # Set config_root (PORTAGE_CONFIGROOT) to default_config_root mysettings = portage.config(config_root = default_config_root) myportdb = portage.portdbapi(mysettings=mysettings) init_package = zobcs_package(mysettings, myportdb) log_msg = "Setting default config to: %s" % (config_setup) add_zobcs_logs(conn, log_msg, "info", config_is) # Use all exept 2 cores when multiprocessing pool_cores= multiprocessing.cpu_count() if pool_cores >= 3: use_pool_cores = pool_cores - 2 else: use_pool_cores = 1 pool = multiprocessing.Pool(processes=use_pool_cores) repo_trees_list = myportdb.porttrees for repo_dir in repo_trees_list: repo = myportdb.getRepositoryName(repo_dir) repo_dir_list = [] repo_dir_list.append(repo_dir) # Get the package list from the repo package_list_tree = myportdb.cp_all(trees=repo_dir_list) for cp in sorted(package_list_tree): pool.apply_async(add_cpv_query_pool, (mysettings, myportdb, config_id, cp, repo,)) pool.close() pool.join() log_msg = "Adding build jobs for: %s ... Done." % (config_setup,) add_zobcs_logs(conn, log_msg, "info", config_profile) return True
def config_get_use(ebuild_id, config_id): CS = get_object_or_404(Configs, ConfigId = config_id) ConfigSetupName = CS.Config ConfigSetupHostName = CS.HostName # Change config/setup # my_new_setup = "/var/cache/gobs/" + gobs_settings_dict['gobs_gitreponame'] + "/" + host_config + "/" my_new_setup = "/" mysettings_setup = portage.config(config_root = my_new_setup) myportdb_setup = portage.portdbapi(mysettings=mysettings_setup) # get cpv EM = EbuildsMetadata.objects.get(EbuildId = ebuild_id) C = EM.EbuildId.PackageId.CategoryId.Category P = EM.EbuildId.PackageId.Package V = EM.EbuildId.Version build_cpv = C + "/" + P + "-" + V # Get the iuse and use flags for that config/setup and cpv init_useflags = zobcs_use_flags(mysettings_setup, myportdb_setup, build_cpv) iuse_flags_list, final_use_list = init_useflags.get_flags() iuse_flags_list2 = [] for iuse_line in iuse_flags_list: iuse_flags_list2.append( init_useflags.reduce_flag(iuse_line)) final_use, use_expand_hidden, usemasked, useforced = init_useflags.get_all_cpv_use() # Dict the needed info attDict = {} attDict['useflags'] = final_use_list attDict['iuse'] = iuse_flags_list2 attDict['usemasked'] = usemasked attDict['use_expand_hidden'] = use_expand_hidden # Clean some cache myportdb_setup.close_caches() portage.portdbapi.portdbapi_instances.remove(myportdb_setup) return attDict
#!/usr/bin/python3 # This QA check will scan the meta-repo on the existing system for ebuilds that support an older version # of python3 but not python3.6. This does not scan python_single_target ebuilds but rather those that can # be built to support multiple python implementations. from merge.merge_utils import * import portage p = portage.portdbapi() p.freeze() future_aux = {} old_python_set = { "python_targets_python3_3", "python_targets_python3_4", "python_targets_python3_5" } cur_python_set = { "python_targets_python3_6" } def future_generator(): for cp in p.cp_all(): repos = p.getRepositories(catpkg=cp) cpv = p.xmatch("bestmatch-visible", cp) if cpv: future = p.async_aux_get(cpv, [ "INHERITED", "IUSE" ]) future_aux[id(future)] = (cpv, repos) yield future for future in iter_completed(future_generator()): cpv, repo = future_aux.pop(id(future)) try: result = future.result()
def get_build_dict_db(session, config_id, settings, zobcs_settings_dict, pkg): myportdb = portage.portdbapi(mysettings=settings) cpvr_list = catpkgsplit(pkg.cpv, silent=1) categories = cpvr_list[0] package = cpvr_list[1] repo = pkg.repo ebuild_version = cpv_getversion(pkg.cpv) log_msg = "Logging %s:%s" % (pkg.cpv, repo,) add_logs(session, log_msg, "info", config_id) PackageInfo = get_package_info(session, categories, package, repo) build_dict = {} build_dict['ebuild_version'] = ebuild_version build_dict['package_id'] = PackageInfo.PackageId build_dict['cpv'] = pkg.cpv build_dict['categories'] = categories build_dict['package'] = package build_dict['config_id'] = config_id init_useflags = zobcs_use_flags(settings, myportdb, pkg.cpv) iuse_flags_list, final_use_list = init_useflags.get_flags_pkg(pkg, settings) iuse = [] for iuse_line in iuse_flags_list: iuse.append(init_useflags.reduce_flag(iuse_line)) iuse_flags_list2 = list(set(iuse)) use_enable = final_use_list use_disable = list(set(iuse_flags_list2).difference(set(use_enable))) use_flagsDict = {} for x in use_enable: use_id = get_use_id(session, x) use_flagsDict[use_id] = True for x in use_disable: use_id = get_use_id(session, x) use_flagsDict[use_id] = False if use_enable == [] and use_disable == []: build_dict['build_useflags'] = None else: build_dict['build_useflags'] = use_flagsDict pkgdir = myportdb.getRepositoryPath(repo) + "/" + categories + "/" + package ebuild_version_checksum_tree = portage.checksum.sha256hash(pkgdir+ "/" + package + "-" + ebuild_version + ".ebuild")[0] build_dict['checksum'] = ebuild_version_checksum_tree ebuild_id_list, status = get_ebuild_id_db(session, build_dict['checksum'], build_dict['package_id']) if status: if ebuild_id_list is None: log_msg = "%s:%s Don't have any ebuild_id!" % (pkg.cpv, repo,) add_logs(session, log_msg, "info", config_id) update_manifest_sql(session, build_dict['package_id'], "0") init_package = zobcs_package(session, settings, myportdb, config_id, zobcs_settings_dict) init_package.update_package_db(build_dict['package_id']) ebuild_id_list, status = get_ebuild_id_db(session, build_dict['checksum'], build_dict['package_id']) if status and ebuild_id_list is None: log_msg = "%s:%s Don't have any ebuild_id!" % (pkg.cpv, repo,) add_logs(session, log_msg, "error", config_id) else: old_ebuild_id_list = [] for ebuild_id in ebuild_id_list: log_msg = "%s:%s:%s Dups of checksums" % (pkg.cpv, repo, ebuild_id,) add_logs(session, log_msg, "error", config_id) old_ebuild_id_list.append(ebuild_id) add_old_ebuild(session, old_ebuild_id_list) return build_dict['ebuild_id'] = ebuild_id_list build_job_id = get_build_job_id(session, build_dict) if build_job_id is None: build_dict['build_job_id'] = None else: build_dict['build_job_id'] = build_job_id return build_dict
def main(argv, indirect = False): global ignore_slots, bold, order, topper #opts parsing opts = process_args(argv) ignore_slots = opts.ignore_slot use_overlays = opts.overlays highlight_arch = ''.join(opts.arch).split(',') bold = opts.bold order = opts.align topper = opts.top_position prefix = opts.prefix color = opts.color package = opts.package # equery support if indirect and len(package) <= 0: msg_err = 'No packages specified' raise SystemExit(msg_err) # disable colors when redirected and they are not forced on if not color and not sys.stdout.isatty(): # disable colors porto.nocolor() # Imply prefix if user specified any architectures (Bug 578496) if len(opts.arch) > 0: prefix = True keywords = keywords_header(prefix, highlight_arch, order) if len(package) > 0: mysettings = portc(local_config=False) dbapi = portdbapi(mysettings=mysettings) if not use_overlays: dbapi.porttrees = [dbapi.porttree_root] for pkg in package: process_display(pkg, keywords, dbapi) else: currdir = os.getcwd() # check if there are actualy some ebuilds ebuilds = ['%s' % x for x in os.listdir(currdir) if fnmatch.fnmatch(x, '*.ebuild')] if len(ebuilds) <= 0: msg_err = 'No ebuilds at "%s"' % currdir raise SystemExit(msg_err) package= '%s/%s' % (os.path.basename(os.path.abspath('../')), os.path.basename(currdir)) ourtree = os.path.realpath('../..') ourstat = os.stat(ourtree) ourstat = (ourstat.st_ino, ourstat.st_dev) for repo in ports.repositories: try: repostat = os.stat(repo.location) except OSError: continue if ourstat == (repostat.st_ino, repostat.st_dev): dbapi = portdbapi(mysettings=portc(local_config=False)) break else: repos = {} for repo in ports.repositories: repos[repo.name] = repo.location with open(os.path.join(ourtree, 'profiles', 'repo_name'), 'rt') as f: repo_name = f.readline().strip() repos[repo_name] = ourtree repos = ''.join('[{}]\nlocation={}\n'.format(k, v) for k, v in repos.items()) mysettings = portc(local_config=False, env={'PORTAGE_REPOSITORIES': repos}) dbapi = portdbapi(mysettings=mysettings) # specify that we want just our nice tree we are in cwd dbapi.porttrees = [ourtree] process_display(package, keywords, dbapi) return 0
def git_repo_sync_main(session): zobcs_settings_dict = read_config_settings() _hostname = zobcs_settings_dict['hostname'] _config = zobcs_settings_dict['zobcs_config'] config_id = get_config_id(session, _config, _hostname) host_config = _hostname +"/" + _config default_config_root = "/var/cache/zobcs/" + zobcs_settings_dict['zobcs_gitreponame'] + "/" + host_config + "/" mysettings = portage.config(config_root = default_config_root) myportdb = portage.portdbapi(mysettings=mysettings) GuestBusy = True log_msg = "Waiting for Guest to be idel" add_logs(session, log_msg, "info", config_id) guestid_list = [] for config in get_config_all_info(session): if not config.Host: guestid_list.append(config.ConfigId) while GuestBusy: Status_list = [] for guest_id in guestid_list: ConfigMetadata = get_configmetadata_info(session, guest_id) Status_list.append(ConfigMetadata.Status) if not 'Runing' in Status_list: GuestBusy = False else: time.sleep(30) try: os.remove(mysettings['PORTDIR'] + "/profiles/config/parent") os.rmdir(mysettings['PORTDIR'] + "/profiles/config") except: pass repo_cp_dict = {} for repo_dir in git_repos_list(myportdb): reponame = myportdb.getRepositoryName(repo_dir) attr = {} repo = git.Repo(repo_dir) info_list, repouptodate = git_fetch(repo) if not repouptodate: cp_list = [] for diff_line in repo.git.diff('HEAD^').splitlines(): if re.search("^diff --git.*/Manifest", diff_line): diff_line2 = re.split(' b/', re.sub('diff --git', '', diff_line)) diff_line3 = re.sub(' a/', '', diff_line2[0]) if diff_line3 == diff_line2[1] or "Manifest" in diff_line3: cp = re.sub('/Manifest', '', diff_line3) cp_list.append(cp) else: cp = re.sub('/Manifest', '', diff_line2[1]) cp_list.append(cp) attr['cp_list'] = cp_list repo_cp_dict[reponame] = attr git_merge(repo, info_list[0]) else: log_msg = "Repo %s is up to date" % (reponame) add_logs(session, log_msg, "info", config_id) # Need to add a config dir so we can use profiles/base for reading the tree. # We may allready have the dir on local repo when we sync. try: os.mkdir(mysettings['PORTDIR'] + "/profiles/config", 0o777) with open(mysettings['PORTDIR'] + "/profiles/config/parent", "w") as f: f.write("../base\n") f.close() except: pass log_msg = "Repo sync ... Done." add_logs(session, log_msg, "info", config_id) return repo_cp_dict
def emirrordist_main(args): # The calling environment is ignored, so the program is # completely controlled by commandline arguments. env = {} if not sys.stdout.isatty(): portage.output.nocolor() env['NOCOLOR'] = 'true' parser, options, args = parse_args(args) if options.version: sys.stdout.write("Portage %s\n" % portage.VERSION) return os.EX_OK config_root = options.config_root if options.repositories_configuration is not None: env['PORTAGE_REPOSITORIES'] = options.repositories_configuration settings = portage.config(config_root=config_root, local_config=False, env=env) default_opts = None if not options.ignore_default_opts: default_opts = settings.get('EMIRRORDIST_DEFAULT_OPTS', '').split() if default_opts: parser, options, args = parse_args(default_opts + args) settings = portage.config(config_root=config_root, local_config=False, env=env) if options.repo is None: if len(settings.repositories.prepos) == 2: for repo in settings.repositories: if repo.name != "DEFAULT": options.repo = repo.name break if options.repo is None: parser.error("--repo option is required") repo_path = settings.repositories.treemap.get(options.repo) if repo_path is None: parser.error("Unable to locate repository named '%s'" % (options.repo,)) if options.jobs is not None: options.jobs = int(options.jobs) if options.load_average is not None: options.load_average = float(options.load_average) if options.failure_log is not None: options.failure_log = normalize_path( os.path.abspath(options.failure_log)) parent_dir = os.path.dirname(options.failure_log) if not (os.path.isdir(parent_dir) and os.access(parent_dir, os.W_OK|os.X_OK)): parser.error(("--failure-log '%s' parent is not a " "writable directory") % options.failure_log) if options.success_log is not None: options.success_log = normalize_path( os.path.abspath(options.success_log)) parent_dir = os.path.dirname(options.success_log) if not (os.path.isdir(parent_dir) and os.access(parent_dir, os.W_OK|os.X_OK)): parser.error(("--success-log '%s' parent is not a " "writable directory") % options.success_log) if options.scheduled_deletion_log is not None: options.scheduled_deletion_log = normalize_path( os.path.abspath(options.scheduled_deletion_log)) parent_dir = os.path.dirname(options.scheduled_deletion_log) if not (os.path.isdir(parent_dir) and os.access(parent_dir, os.W_OK|os.X_OK)): parser.error(("--scheduled-deletion-log '%s' parent is not a " "writable directory") % options.scheduled_deletion_log) if options.deletion_db is None: parser.error("--scheduled-deletion-log requires --deletion-db") if options.deletion_delay is not None: options.deletion_delay = long(options.deletion_delay) if options.deletion_db is None: parser.error("--deletion-delay requires --deletion-db") if options.deletion_db is not None: if options.deletion_delay is None: parser.error("--deletion-db requires --deletion-delay") options.deletion_db = normalize_path( os.path.abspath(options.deletion_db)) if options.temp_dir is not None: options.temp_dir = normalize_path( os.path.abspath(options.temp_dir)) if not (os.path.isdir(options.temp_dir) and os.access(options.temp_dir, os.W_OK|os.X_OK)): parser.error(("--temp-dir '%s' is not a " "writable directory") % options.temp_dir) if options.distfiles is not None: options.distfiles = normalize_path( os.path.abspath(options.distfiles)) if not (os.path.isdir(options.distfiles) and os.access(options.distfiles, os.W_OK|os.X_OK)): parser.error(("--distfiles '%s' is not a " "writable directory") % options.distfiles) else: parser.error("missing required --distfiles parameter") if options.mirror_overrides is not None: options.mirror_overrides = normalize_path( os.path.abspath(options.mirror_overrides)) if not (os.access(options.mirror_overrides, os.R_OK) and os.path.isfile(options.mirror_overrides)): parser.error( "--mirror-overrides-file '%s' is not a readable file" % options.mirror_overrides) if options.distfiles_local is not None: options.distfiles_local = normalize_path( os.path.abspath(options.distfiles_local)) if not (os.path.isdir(options.distfiles_local) and os.access(options.distfiles_local, os.W_OK|os.X_OK)): parser.error(("--distfiles-local '%s' is not a " "writable directory") % options.distfiles_local) if options.distfiles_db is not None: options.distfiles_db = normalize_path( os.path.abspath(options.distfiles_db)) if options.tries is not None: options.tries = int(options.tries) if options.recycle_dir is not None: options.recycle_dir = normalize_path( os.path.abspath(options.recycle_dir)) if not (os.path.isdir(options.recycle_dir) and os.access(options.recycle_dir, os.W_OK|os.X_OK)): parser.error(("--recycle-dir '%s' is not a " "writable directory") % options.recycle_dir) if options.recycle_db is not None: if options.recycle_dir is None: parser.error("--recycle-db requires " "--recycle-dir to be specified") options.recycle_db = normalize_path( os.path.abspath(options.recycle_db)) if options.recycle_deletion_delay is not None: options.recycle_deletion_delay = \ long(options.recycle_deletion_delay) if options.fetch_log_dir is not None: options.fetch_log_dir = normalize_path( os.path.abspath(options.fetch_log_dir)) if not (os.path.isdir(options.fetch_log_dir) and os.access(options.fetch_log_dir, os.W_OK|os.X_OK)): parser.error(("--fetch-log-dir '%s' is not a " "writable directory") % options.fetch_log_dir) if options.whitelist_from: normalized_paths = [] for x in options.whitelist_from: path = normalize_path(os.path.abspath(x)) if not os.access(path, os.R_OK): parser.error("--whitelist-from '%s' is not readable" % x) if os.path.isfile(path): normalized_paths.append(path) elif os.path.isdir(path): for file in _recursive_file_list(path): if not os.access(file, os.R_OK): parser.error("--whitelist-from '%s' directory contains not readable file '%s'" % (x, file)) normalized_paths.append(file) else: parser.error("--whitelist-from '%s' is not a regular file or a directory" % x) options.whitelist_from = normalized_paths if options.strict_manifests is not None: if options.strict_manifests == "y": settings.features.add("strict") else: settings.features.discard("strict") settings.lock() portdb = portage.portdbapi(mysettings=settings) # Limit ebuilds to the specified repo. portdb.porttrees = [repo_path] portage.util.initialize_logger() if options.verbose > 0: l = logging.getLogger() l.setLevel(l.getEffectiveLevel() - 10 * options.verbose) with Config(options, portdb, SchedulerInterface(global_event_loop())) as config: if not options.mirror: parser.error('No action specified') returncode = os.EX_OK if options.mirror: signum = run_main_scheduler(MirrorDistTask(config)) if signum is not None: sys.exit(128 + signum) return returncode
except: pass def echo(x): if not silent: print x echo(output.green('Using portage tree: ') + portdir) echo(output.green('Using distfiles directory: ') + distdir) echo(output.green('Using DB directories: ')) for i in DBPATHS: echo(output.blue(' * ') + i) db = portage.portdbapi(mysettings=portage.settings) if not silent: print output.green('Gathering files... '), update_spinner() required_files = [] counter = 0 for dbpkg in DBPATHS: try: for category in sorted(os.listdir(dbpkg)): if (os.path.isdir(dbpkg + category)):
def emirrordist_main(args): # The calling environment is ignored, so the program is # completely controlled by commandline arguments. env = {} if not sys.stdout.isatty(): portage.output.nocolor() env['NOCOLOR'] = 'true' parser, options, args = parse_args(args) if options.version: sys.stdout.write("Portage[mgorny] %s\n" % portage.VERSION) return os.EX_OK config_root = options.config_root if options.repositories_configuration is not None: env['PORTAGE_REPOSITORIES'] = options.repositories_configuration settings = portage.config(config_root=config_root, local_config=False, env=env) default_opts = None if not options.ignore_default_opts: default_opts = settings.get('EMIRRORDIST_DEFAULT_OPTS', '').split() if default_opts: parser, options, args = parse_args(default_opts + args) settings = portage.config(config_root=config_root, local_config=False, env=env) if options.repo is None: if len(settings.repositories.prepos) == 2: for repo in settings.repositories: if repo.name != "DEFAULT": options.repo = repo.name break if options.repo is None: parser.error("--repo option is required") repo_path = settings.repositories.treemap.get(options.repo) if repo_path is None: parser.error("Unable to locate repository named '%s'" % (options.repo,)) if options.jobs is not None: options.jobs = int(options.jobs) if options.load_average is not None: options.load_average = float(options.load_average) if options.failure_log is not None: options.failure_log = normalize_path( os.path.abspath(options.failure_log)) parent_dir = os.path.dirname(options.failure_log) if not (os.path.isdir(parent_dir) and os.access(parent_dir, os.W_OK|os.X_OK)): parser.error(("--failure-log '%s' parent is not a " "writable directory") % options.failure_log) if options.success_log is not None: options.success_log = normalize_path( os.path.abspath(options.success_log)) parent_dir = os.path.dirname(options.success_log) if not (os.path.isdir(parent_dir) and os.access(parent_dir, os.W_OK|os.X_OK)): parser.error(("--success-log '%s' parent is not a " "writable directory") % options.success_log) if options.scheduled_deletion_log is not None: options.scheduled_deletion_log = normalize_path( os.path.abspath(options.scheduled_deletion_log)) parent_dir = os.path.dirname(options.scheduled_deletion_log) if not (os.path.isdir(parent_dir) and os.access(parent_dir, os.W_OK|os.X_OK)): parser.error(("--scheduled-deletion-log '%s' parent is not a " "writable directory") % options.scheduled_deletion_log) if options.deletion_db is None: parser.error("--scheduled-deletion-log requires --deletion-db") if options.deletion_delay is not None: options.deletion_delay = long(options.deletion_delay) if options.deletion_db is None: parser.error("--deletion-delay requires --deletion-db") if options.deletion_db is not None: if options.deletion_delay is None: parser.error("--deletion-db requires --deletion-delay") options.deletion_db = normalize_path( os.path.abspath(options.deletion_db)) if options.temp_dir is not None: options.temp_dir = normalize_path( os.path.abspath(options.temp_dir)) if not (os.path.isdir(options.temp_dir) and os.access(options.temp_dir, os.W_OK|os.X_OK)): parser.error(("--temp-dir '%s' is not a " "writable directory") % options.temp_dir) if options.distfiles is not None: options.distfiles = normalize_path( os.path.abspath(options.distfiles)) if not (os.path.isdir(options.distfiles) and os.access(options.distfiles, os.W_OK|os.X_OK)): parser.error(("--distfiles '%s' is not a " "writable directory") % options.distfiles) else: parser.error("missing required --distfiles parameter") if options.mirror_overrides is not None: options.mirror_overrides = normalize_path( os.path.abspath(options.mirror_overrides)) if not (os.access(options.mirror_overrides, os.R_OK) and os.path.isfile(options.mirror_overrides)): parser.error( "--mirror-overrides-file '%s' is not a readable file" % options.mirror_overrides) if options.distfiles_local is not None: options.distfiles_local = normalize_path( os.path.abspath(options.distfiles_local)) if not (os.path.isdir(options.distfiles_local) and os.access(options.distfiles_local, os.W_OK|os.X_OK)): parser.error(("--distfiles-local '%s' is not a " "writable directory") % options.distfiles_local) if options.distfiles_db is not None: options.distfiles_db = normalize_path( os.path.abspath(options.distfiles_db)) if options.tries is not None: options.tries = int(options.tries) if options.recycle_dir is not None: options.recycle_dir = normalize_path( os.path.abspath(options.recycle_dir)) if not (os.path.isdir(options.recycle_dir) and os.access(options.recycle_dir, os.W_OK|os.X_OK)): parser.error(("--recycle-dir '%s' is not a " "writable directory") % options.recycle_dir) if options.recycle_db is not None: if options.recycle_dir is None: parser.error("--recycle-db requires " "--recycle-dir to be specified") options.recycle_db = normalize_path( os.path.abspath(options.recycle_db)) if options.recycle_deletion_delay is not None: options.recycle_deletion_delay = \ long(options.recycle_deletion_delay) if options.fetch_log_dir is not None: options.fetch_log_dir = normalize_path( os.path.abspath(options.fetch_log_dir)) if not (os.path.isdir(options.fetch_log_dir) and os.access(options.fetch_log_dir, os.W_OK|os.X_OK)): parser.error(("--fetch-log-dir '%s' is not a " "writable directory") % options.fetch_log_dir) if options.whitelist_from: normalized_paths = [] for x in options.whitelist_from: path = normalize_path(os.path.abspath(x)) if not os.access(path, os.R_OK): parser.error("--whitelist-from '%s' is not readable" % x) if os.path.isfile(path): normalized_paths.append(path) elif os.path.isdir(path): for file in _recursive_file_list(path): if not os.access(file, os.R_OK): parser.error("--whitelist-from '%s' directory contains not readable file '%s'" % (x, file)) normalized_paths.append(file) else: parser.error("--whitelist-from '%s' is not a regular file or a directory" % x) options.whitelist_from = normalized_paths if options.strict_manifests is not None: if options.strict_manifests == "y": settings.features.add("strict") else: settings.features.discard("strict") settings.lock() portdb = portage.portdbapi(mysettings=settings) # Limit ebuilds to the specified repo. portdb.porttrees = [repo_path] portage.util.initialize_logger() if options.verbose > 0: l = logging.getLogger() l.setLevel(l.getEffectiveLevel() - 10 * options.verbose) with Config(options, portdb, SchedulerInterface(global_event_loop())) as config: if not options.mirror: parser.error('No action specified') returncode = os.EX_OK if options.mirror: signum = run_main_scheduler(MirrorDistTask(config)) if signum is not None: sys.exit(128 + signum) return returncode
def update_cpv_db(session, repo_cp_dict, config_id, zobcs_settings_dict): GuestBusy = True log_msg = "Waiting for Guest to be idel" add_logs(session, log_msg, "info", config_id) guestid_list = [] for config in get_config_all_info(session): if not config.Host: guestid_list.append(config.ConfigId) while GuestBusy: Status_list = [] for guest_id in guestid_list: ConfigMetadata = get_configmetadata_info(session, guest_id) Status_list.append(ConfigMetadata.Status) if not 'Runing' in Status_list: break time.sleep(30) mysettings = init_portage_settings(session, config_id, zobcs_settings_dict) log_msg = "Checking categories, package, ebuilds" add_logs(session, log_msg, "info", config_id) new_build_jobs_list = [] # Setup portdb, package pool myportdb = portage.portdbapi(mysettings=mysettings) # Use all cores when multiprocessing pool_cores = multiprocessing.cpu_count() pool = multiprocessing.Pool(processes = pool_cores) if repo_cp_dict is None: repo_list = [] repos_trees_list = [] # Get the repos and update the repos db repo_list = myportdb.getRepositories() update_repo_db(session, repo_list) # Get the rootdirs for the repos repo_trees_list = myportdb.porttrees for repo_dir in repo_trees_list: repo = myportdb.getRepositoryName(repo_dir) repo_dir_list = [] repo_dir_list.append(repo_dir) # Get the package list from the repo package_list_tree = myportdb.cp_all(trees=repo_dir_list) # Run the update package for all package in the list and in a multiprocessing pool for cp in sorted(package_list_tree): pool.apply_async(update_cpv_db_pool, (mysettings, myportdb, cp, repo, zobcs_settings_dict, config_id,)) # use this when debuging #update_cpv_db_pool(mysettings, myportdb, cp, repo, zobcs_settings_dict, config_id) else: for repo, v in repo_cp_dict.items(): # Get the repos and update the repos db repo_list = [] repo_list.append(repo) update_repo_db(session, repo_list) # Run the update package for all package in the list and in a multiprocessing pool for cp in v['cp_list']: pool.apply_async(update_cpv_db_pool, (mysettings, myportdb, cp, repo, zobcs_settings_dict, config_id,)) # use this when debuging #update_cpv_db_pool(mysettings, myportdb, cp, repo, zobcs_settings_dict, config_id) #close and join the multiprocessing pools pool.close() pool.join() log_msg = "Checking categories, package and ebuilds ... done" add_logs(session, log_msg, "info", config_id)
import portage cur_name = sys.argv[1] cur_tree = "/var/git/meta-repo/kits/" + cur_name cur_overlay = GitTree(cur_name, root=cur_tree) env = os.environ.copy() env['PORTAGE_DEPCACHEDIR'] = '/var/cache/edb/%s-%s-meta' % (cur_overlay.name, cur_overlay.branch) env['PORTAGE_REPOSITORIES'] = ''' [DEFAULT] main-repo = %s [%s] location = %s ''' % (cur_name, cur_name, cur_tree) p_global = portage.portdbapi() #mysettings=portage.config(env=env, config_profile_path='')) v = portage.vardbapi() results = { "orphaned" : [], "masked" : [], "stale" : [] } #for catpkg in v.cp_all(): # inst_match = v.cp_list(catpkg) # if len(inst_match): # matches = p.match(catpkg) # all_matches = p.xmatch("match-all", catpkg) # if len(matches):
def main(): """ Main function """ api = portage.portdbapi() api._aux_cache_keys.clear() api._aux_cache_keys.update(["EAPI", "KEYWORDS", "SLOT"]) root = '/' trees = { root : {'porttree' : portage.portagetree(root)} } portdb = trees[root]['porttree'].dbapi #portdb._aux_cache_keys.clear() #portdb._aux_cache_keys.update(["EAPI", "KEYWORDS", "SLOT"]) #pprint(api.porttrees) for atom in sys.argv: arches = {} keywords = {} best_revision = {} need_keyword = {} revisions = api.cp_list(atom, mytree=api.porttrees[0]) rev_keywords = {} max_arch_list = [] # Select best revision revisions.sort(package_sort) # Build maximum possible arches list for revision in revisions: # print revision aux_kw, slot = api.aux_get(revision, ['KEYWORDS', 'SLOT']) aux_kw = [kw for kw in aux_kw.replace('~', '').split() if kw and not kw.startswith('-') and kw not in ARCH_DEV and kw not in ARCH_EXP] # Build best set of keywords per SLOT for kw in aux_kw: if slot in keywords: if kw not in keywords[slot]: keywords[slot].append(kw) else: keywords[slot] = [kw] if not keywords or not slot in keywords: revisions.remove(revision) else: if revision.endswith("9999"): revisions.remove(revision) continue best_revision[slot] = revision keywords[slot] = ARCH_GNOME need_keyword[slot] = [kw for kw in keywords[slot] if kw not in aux_kw] # Show missing arches for slot in keywords: if need_keyword[slot]: # keycheck_incrementals = tuple(x for x in portage.const.INCREMENTALS if x != 'ACCEPT_KEYWORDS') # for package.use.mask support inside dep_check # dep_settings = portage.config( # config_incrementals = keycheck_incrementals, # local_config = False) # dep_settings.setcpv(revision) # dep_settings["ACCEPT_KEYWORDS"] = " ".join(keywords[slot] + need_keyword[slot]) # # for dep_type in ('DEPEND', 'RDEPEND', 'PDEPEND'): # aux_dep = api.aux_get(revision, [dep_type])[0] # print portage.dep_check(aux_dep, portdb, dep_settings, use='all', trees=trees, mode='minimum-visible') # kw_print = "" for kw in ARCH_GNOME: kw_print += " " if kw in need_keyword[slot]: kw_print += kw else: kw_print += " " * len(kw) print "%-60s:" % best_revision[slot], kw_print
def main(argv, indirect=False): global ignore_slots, bold, order, topper #opts parsing opts = process_args(argv) ignore_slots = opts.ignore_slot use_overlays = opts.overlays highlight_arch = ''.join(opts.arch).split(',') bold = opts.bold order = opts.align topper = opts.top_position prefix = opts.prefix color = opts.color package = opts.package # equery support if indirect and len(package) <= 0: msg_err = 'No packages specified' raise SystemExit(msg_err) # disable colors when redirected and they are not forced on if not color and not sys.stdout.isatty(): # disable colors porto.nocolor() # Imply prefix if user specified any architectures (Bug 578496) if len(opts.arch) > 0: prefix = True keywords = keywords_header(prefix, highlight_arch, order) if len(package) > 0: mysettings = portc(local_config=False) dbapi = portdbapi(mysettings=mysettings) if not use_overlays: dbapi.porttrees = [dbapi.porttree_root] for pkg in package: process_display(pkg, keywords, dbapi) else: currdir = os.getcwd() # check if there are actualy some ebuilds ebuilds = [ '%s' % x for x in os.listdir(currdir) if fnmatch.fnmatch(x, '*.ebuild') ] if len(ebuilds) <= 0: msg_err = 'No ebuilds at "%s"' % currdir raise SystemExit(msg_err) package = '%s/%s' % (os.path.basename( os.path.abspath('../')), os.path.basename(currdir)) ourtree = os.path.realpath('../..') ourstat = os.stat(ourtree) ourstat = (ourstat.st_ino, ourstat.st_dev) for repo in ports.repositories: try: repostat = os.stat(repo.location) except OSError: continue if ourstat == (repostat.st_ino, repostat.st_dev): dbapi = portdbapi(mysettings=portc(local_config=False)) break else: repos = {} for repo in ports.repositories: repos[repo.name] = repo.location with open(os.path.join(ourtree, 'profiles', 'repo_name'), 'rt') as f: repo_name = f.readline().strip() repos[repo_name] = ourtree repos = ''.join('[{}]\nlocation={}\n'.format(k, v) for k, v in repos.items()) mysettings = portc(local_config=False, env={'PORTAGE_REPOSITORIES': repos}) dbapi = portdbapi(mysettings=mysettings) # specify that we want just our nice tree we are in cwd dbapi.porttrees = [ourtree] process_display(package, keywords, dbapi) return 0