def result(self, cp, version, urls, handler, confidence): from euscan.version import get_version_type cpv = '%s-%s' % (cp, version) urls = ' '.join( transform_url(self.config, cpv, url) for url in urls.split() ) if self.config['format'] in ['json', 'dict']: _curr = self.queries[self.current_query] _curr["result"].append( { "version": version, "urls": urls.split(), "handler": handler, "confidence": confidence, "type": get_version_type(version) } ) else: if not self.config['quiet']: print("Upstream Version:", pp.number("%s" % version), end=' ') print(pp.path(" %s" % urls)) else: print(pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(urls))
def result(self, cp, version, urls, handler, confidence): from euscan.version import get_version_type cpv = '%s-%s' % (cp, version) urls = ' '.join( transform_url(self.config, cpv, url) for url in urls.split() ) if self.config['format'] in ['json', 'dict']: _curr = self.queries[self.current_query] _curr["result"].append( { "version": version, "urls": urls.split(), "handler": handler, "confidence": confidence, "type": get_version_type(version) } ) else: if not self.config['quiet']: print "Upstream Version:", pp.number("%s" % version), print pp.path(" %s" % urls) else: print pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(urls)
def display_files(contents): """Display the content of an installed package. @see: gentoolkit.package.Package.parsed_contents @type contents: dict @param contents: {'path': ['filetype', ...], ...} """ filenames = list(contents.keys()) filenames.sort() last = [] for name in filenames: if QUERY_OPTS["output_tree"]: dirdepth = name.count("/") indent = " " if dirdepth == 2: indent = " " elif dirdepth > 2: indent = " " * (dirdepth - 1) basename = name.rsplit("/", dirdepth - 1) if contents[name][0] == "dir": if len(last) == 0: last = basename pp.uprint(pp.path(indent + basename[0])) continue for i, directory in enumerate(basename): try: if directory in last[i]: continue except IndexError: pass last = basename if len(last) == 1: pp.uprint(pp.path(indent + last[0])) continue pp.uprint(pp.path(indent + "> /" + last[-1])) elif contents[name][0] == "sym": pp.uprint(pp.path(indent + "+"), end=" ") pp.uprint(pp.path_symlink(basename[-1] + " -> " + contents[name][2])) else: pp.uprint(pp.path(indent + "+ ") + basename[-1]) else: pp.uprint( format_filetype( name, contents[name], show_type=QUERY_OPTS["show_type"], show_md5=QUERY_OPTS["show_MD5"], show_timestamp=QUERY_OPTS["show_timestamp"], ) )
def display_files(contents): """Display the content of an installed package. @see: gentoolkit.package.Package.parsed_contents @type contents: dict @param contents: {'path': ['filetype', ...], ...} """ filenames = list(contents.keys()) filenames.sort() last = [] for name in filenames: if QUERY_OPTS["output_tree"]: dirdepth = name.count('/') indent = " " if dirdepth == 2: indent = " " elif dirdepth > 2: indent = " " * (dirdepth - 1) basename = name.rsplit("/", dirdepth - 1) if contents[name][0] == "dir": if len(last) == 0: last = basename pp.uprint(pp.path(indent + basename[0])) continue for i, directory in enumerate(basename): try: if directory in last[i]: continue except IndexError: pass last = basename if len(last) == 1: pp.uprint(pp.path(indent + last[0])) continue pp.uprint(pp.path(indent + "> /" + last[-1])) elif contents[name][0] == "sym": pp.uprint(pp.path(indent + "+"), end=' ') pp.uprint(pp.path_symlink(basename[-1] + " -> " + contents[name][2])) else: pp.uprint(pp.path(indent + "+ ") + basename[-1]) else: pp.uprint(format_filetype( name, contents[name], show_type=QUERY_OPTS["show_type"], show_md5=QUERY_OPTS["show_MD5"], show_timestamp=QUERY_OPTS["show_timestamp"] ))
def result(self, cp, version, url, handler, confidence): from euscan.helpers import get_version_type if self.config['format']: _curr = self.queries[self.current_query] _curr["result"].append( {"version": version, "urls": [url], "handler": handler, "confidence": confidence, "type": get_version_type(version)} ) else: if not self.config['quiet']: print "Upstream Version:", pp.number("%s" % version), print pp.path(" %s" % url) else: print pp.cpv("%s-%s" % (cp, version)) + ":", pp.path(url)
def format_filetype(path, fdesc, show_type=False, show_md5=False, show_timestamp=False): """Format a path for printing. @type path: str @param path: the path @type fdesc: list @param fdesc: [file_type, timestamp, MD5 sum/symlink target] file_type is one of dev, dir, obj, sym, fif. If file_type is dir, there is no timestamp or MD5 sum. If file_type is sym, fdesc[2] is the target of the symlink. @type show_type: bool @param show_type: if True, prepend the file's type to the formatted string @type show_md5: bool @param show_md5: if True, append MD5 sum to the formatted string @type show_timestamp: bool @param show_timestamp: if True, append time-of-creation after pathname @rtype: str @return: formatted pathname with optional added information """ ftype = fpath = stamp = md5sum = "" if fdesc[0] == "obj": ftype = "file" fpath = path stamp = format_timestamp(fdesc[1]) md5sum = fdesc[2] elif fdesc[0] == "dir": ftype = "dir" fpath = pp.path(path) elif fdesc[0] == "sym": ftype = "sym" stamp = format_timestamp(fdesc[1]) tgt = fdesc[2].split()[0] if CONFIG["piping"]: fpath = path else: fpath = pp.path_symlink(path + " -> " + tgt) elif fdesc[0] == "dev": ftype = "dev" fpath = path elif fdesc[0] == "fif": ftype = "fifo" fpath = path else: sys.stderr.write( pp.error("%s has unknown type: %s" % (path, fdesc[0])) ) result = "" if show_type: result += "%4s " % ftype result += fpath if show_timestamp: result += " " + stamp if show_md5: result += " " + md5sum return result
def print_verbose(self, pkg, cfile): "Format for full output." file_str = pp.path(format_filetype(cfile, pkg.parsed_contents()[cfile])) if self.name_only: name = pkg.cp else: name = str(pkg.cpv) pp.uprint(pp.cpv(name), "(" + file_str + ")")
def scan_upstream(query): matches = Query(query).find( include_masked=True, in_installed=False ) if not matches: sys.stderr.write(pp.warn("No package matching '%s'" % pp.pkgquery(query))) return [] matches = sorted(matches) pkg = matches.pop() while '9999' in pkg.version and len(matches): pkg = matches.pop() if not pkg: sys.stderr.write(pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(pkg.cp))) return [] if pkg.cp in BLACKLIST_PACKAGES: sys.stderr.write(pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp))) return [] if not CONFIG['quiet']: pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name()))) pp.uprint() ebuild_path = pkg.ebuild_path() if ebuild_path: pp.uprint('Ebuild: ' + pp.path(os.path.normpath(ebuild_path))) pp.uprint('Repository: ' + pkg.repo_name()) pp.uprint('Homepage: ' + pkg.environment("HOMEPAGE")) pp.uprint('Description: ' + pkg.environment("DESCRIPTION")) cpv = pkg.cpv metadata = { "EAPI" : port_settings["EAPI"], "SRC_URI" : pkg.environment("SRC_URI", False), } use = frozenset(port_settings["PORTAGE_USE"].split()) try: alist = porttree._parse_uri_map(cpv, metadata, use=use) aalist = porttree._parse_uri_map(cpv, metadata) except Exception as e: sys.stderr.write(pp.warn("%s\n" % str(e))) sys.stderr.write(pp.warn("Invalid SRC_URI for '%s'" % pp.pkgquery(cpv))) return [] if "mirror" in portage.settings.features: urls = aalist else: urls = alist return scan_upstream_urls(pkg.cpv, urls)
def get_global_useflags(): """Get global and expanded USE flag variables from PORTDIR/profiles/use.desc and PORTDIR/profiles/desc/*.desc respectively. @rtype: dict @return: {'flag_name': 'flag description', ...} """ global_usedesc = {} # Get global USE flag descriptions try: path = os.path.join(settings["PORTDIR"], "profiles", "use.desc") with open( _unicode_encode(path, encoding=_encodings["fs"]), encoding=_encodings["content"], ) as open_file: for line in open_file: if line.startswith("#"): continue # Ex. of fields: ['syslog', 'Enables support for syslog\n'] fields = line.split(" - ", 1) if len(fields) == 2: global_usedesc[fields[0]] = fields[1].rstrip() except IOError: sys.stderr.write( pp.warn("Could not load USE flag descriptions from %s" % pp.path(path))) del path, open_file # Add USE_EXPANDED variables to usedesc hash -- Bug #238005 for path in glob( os.path.join(settings["PORTDIR"], "profiles", "desc", "*.desc")): try: with open( _unicode_encode(path, encoding=_encodings["fs"]), encoding=_encodings["content"], ) as open_file: for line in open_file: if line.startswith("#"): continue fields = [field.strip() for field in line.split(" - ", 1)] if len(fields) == 2: expanded_useflag = "%s_%s" % ( path.split("/")[-1][0:-5], fields[0], ) global_usedesc[expanded_useflag] = fields[1] except IOError: sys.stderr.write( pp.warn("Could not load USE flag descriptions from %s" % path)) return global_usedesc
def __init__(self, changelog_path, invalid_entry_is_fatal=False): if not (os.path.isfile(changelog_path) and os.access(changelog_path, os.R_OK)): raise errors.GentoolkitFatalError( "%s does not exist or is unreadable" % pp.path(changelog_path)) self.changelog_path = changelog_path self.invalid_entry_is_fatal = invalid_entry_is_fatal # Process the ChangeLog: self.entries = self._split_changelog() self.indexed_entries = self._index_changelog() self.full = self.entries self.latest = self.entries[0]
def __init__(self, changelog_path, invalid_entry_is_fatal=False): if not (os.path.isfile(changelog_path) and os.access(changelog_path, os.R_OK)): raise errors.GentoolkitFatalError( "%s does not exist or is unreadable" % pp.path(changelog_path) ) self.changelog_path = changelog_path self.invalid_entry_is_fatal = invalid_entry_is_fatal # Process the ChangeLog: self.entries = self._split_changelog() self.indexed_entries = self._index_changelog() self.full = self.entries self.latest = self.entries[0]
def get_global_useflags(): """Get global and expanded USE flag variables from PORTDIR/profiles/use.desc and PORTDIR/profiles/desc/*.desc respectively. @rtype: dict @return: {'flag_name': 'flag description', ...} """ global_usedesc = {} # Get global USE flag descriptions try: path = os.path.join(settings["PORTDIR"], 'profiles', 'use.desc') with open(_unicode_encode(path, encoding=_encodings['fs']), encoding=_encodings['content']) as open_file: for line in open_file: if line.startswith('#'): continue # Ex. of fields: ['syslog', 'Enables support for syslog\n'] fields = line.split(" - ", 1) if len(fields) == 2: global_usedesc[fields[0]] = fields[1].rstrip() except IOError: sys.stderr.write( pp.warn( "Could not load USE flag descriptions from %s" % pp.path(path) ) ) del path, open_file # Add USE_EXPANDED variables to usedesc hash -- Bug #238005 for path in glob(os.path.join(settings["PORTDIR"], 'profiles', 'desc', '*.desc')): try: with open(_unicode_encode(path, encoding=_encodings['fs']), encoding=_encodings['content']) as open_file: for line in open_file: if line.startswith('#'): continue fields = [field.strip() for field in line.split(" - ", 1)] if len(fields) == 2: expanded_useflag = "%s_%s" % \ (path.split("/")[-1][0:-5], fields[0]) global_usedesc[expanded_useflag] = fields[1] except IOError: sys.stderr.write( pp.warn("Could not load USE flag descriptions from %s" % path) ) return global_usedesc
def scan_upstream(query, on_progress=None): """ Scans the upstream searching new versions for the given query """ maxval = 3 curval = 0 matches = [] if query.endswith(".ebuild"): cpv = package_from_ebuild(query) if cpv: reload_gentoolkit() matches = [Package(cpv)] else: matches = Query(query).find( include_masked=True, in_installed=False ) if not matches: output.ewarn( pp.warn("No package matching '%s'" % pp.pkgquery(query)) ) return None matches = sorted(matches) pkg = matches.pop() while '9999' in pkg.version and len(matches): pkg = matches.pop() if not pkg: output.ewarn( pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(pkg.cp)) ) return None # useful data only for formatted output start_time = datetime.now() output.metadata("datetime", start_time.isoformat(), show=False) output.metadata("cp", pkg.cp, show=False) output.metadata("cpv", pkg.cpv, show=False) curval += 1 if on_progress: on_progress(maxval, curval) if pkg.cp in BLACKLIST_PACKAGES: output.ewarn( pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)) ) return None if not CONFIG['quiet']: if not CONFIG['format']: pp.uprint( " * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())) ) pp.uprint() else: output.metadata("overlay", pp.section(pkg.repo_name())) ebuild_path = pkg.ebuild_path() if ebuild_path: output.metadata( "ebuild", pp.path(os.path.normpath(ebuild_path)) ) output.metadata("repository", pkg.repo_name()) output.metadata("homepage", pkg.environment("HOMEPAGE")) output.metadata("description", pkg.environment("DESCRIPTION")) cpv = pkg.cpv metadata = { "EAPI": portage.settings["EAPI"], "SRC_URI": pkg.environment("SRC_URI", False), } use = frozenset(portage.settings["PORTAGE_USE"].split()) try: alist = porttree._parse_uri_map(cpv, metadata, use=use) aalist = porttree._parse_uri_map(cpv, metadata) except Exception as e: output.ewarn(pp.warn("%s\n" % str(e))) output.ewarn( pp.warn("Invalid SRC_URI for '%s'" % pp.pkgquery(cpv)) ) return None if "mirror" in portage.settings.features: urls = aalist else: urls = alist # output scan time for formatted output scan_time = (datetime.now() - start_time).total_seconds() output.metadata("scan_time", scan_time, show=False) curval += 1 if on_progress: on_progress(maxval, curval) result = scan_upstream_urls(pkg.cpv, urls, on_progress) curval += 1 if on_progress: on_progress(maxval, curval) return result
def scan_upstream(query, on_progress=None): """ Scans the upstream searching new versions for the given query """ matches = [] if query.endswith(".ebuild"): cpv = package_from_ebuild(query) reload_gentoolkit() if cpv: matches = [Package(cpv)] else: matches = Query(query).find( include_masked=True, in_installed=False, ) if not matches: output.ewarn(pp.warn("No package matching '%s'" % pp.pkgquery(query))) return None matches = sorted(matches) pkg = matches.pop() while '9999' in pkg.version and len(matches): pkg = matches.pop() if not pkg: output.ewarn( pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(pkg.cp))) return None # useful data only for formatted output start_time = datetime.now() output.metadata("datetime", start_time.isoformat(), show=False) output.metadata("cp", pkg.cp, show=False) output.metadata("cpv", pkg.cpv, show=False) if on_progress: on_progress(increment=10) if pkg.cp in BLACKLIST_PACKAGES: output.ewarn( pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp))) return None if not CONFIG['quiet']: if not CONFIG['format']: pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name()))) pp.uprint() else: output.metadata("overlay", pp.section(pkg.repo_name())) ebuild_path = pkg.ebuild_path() if ebuild_path: output.metadata("ebuild", pp.path(os.path.normpath(ebuild_path))) uris, homepage, description = pkg.environment( ('SRC_URI', 'HOMEPAGE', 'DESCRIPTION')) output.metadata("repository", pkg.repo_name()) output.metadata("homepage", homepage) output.metadata("description", description) else: uris = pkg.environment('SRC_URI') cpv = pkg.cpv uris = parse_src_uri(uris) uris_expanded = [ from_mirror(uri) if 'mirror://' in uri else uri for uri in uris ] pkg._uris = uris pkg._uris_expanded = uris_expanded versions = handlers.scan(pkg, uris, on_progress) cp, ver, rev = portage.pkgsplit(pkg.cpv) result = filter_versions(cp, versions) if on_progress: on_progress(increment=10) # output scan time for formatted output scan_time = (datetime.now() - start_time).total_seconds() output.metadata("scan_time", scan_time, show=False) is_current_version_stable = is_version_stable(ver) if len(result) > 0: if not (CONFIG['format'] or CONFIG['quiet']): print("") for cp, url, version, handler, confidence in result: if CONFIG["ignore-pre-release"]: if not is_version_stable(version): continue if CONFIG["ignore-pre-release-if-stable"]: if is_current_version_stable and \ not is_version_stable(version): continue if CONFIG['progress']: print("", file=sys.stderr) output.result(cp, version, url, handler, confidence) return result
def scan_upstream(query, on_progress=None): """ Scans the upstream searching new versions for the given query """ matches = [] if query.endswith(".ebuild"): cpv = package_from_ebuild(query) reload_gentoolkit() if cpv: matches = [Package(cpv)] else: matches = Query(query).find( include_masked=True, in_installed=False, ) if not matches: output.ewarn( pp.warn("No package matching '%s'" % pp.pkgquery(query)) ) return None matches = sorted(matches) pkg = matches.pop() while '9999' in pkg.version and len(matches): pkg = matches.pop() if not pkg: output.ewarn( pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(pkg.cp)) ) return None # useful data only for formatted output start_time = datetime.now() output.metadata("datetime", start_time.isoformat(), show=False) output.metadata("cp", pkg.cp, show=False) output.metadata("cpv", pkg.cpv, show=False) if on_progress: on_progress(increment=10) if pkg.cp in BLACKLIST_PACKAGES: output.ewarn( pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)) ) return None if not CONFIG['quiet']: if not CONFIG['format']: pp.uprint( " * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())) ) pp.uprint() else: output.metadata("overlay", pp.section(pkg.repo_name())) ebuild_path = pkg.ebuild_path() if ebuild_path: output.metadata( "ebuild", pp.path(os.path.normpath(ebuild_path)) ) uris, homepage, description = pkg.environment( ('SRC_URI', 'HOMEPAGE', 'DESCRIPTION') ) output.metadata("repository", pkg.repo_name()) output.metadata("homepage", homepage) output.metadata("description", description) else: uris = pkg.environment('SRC_URI') cpv = pkg.cpv uris = parse_src_uri(uris) uris_expanded = [ from_mirror(uri) if 'mirror://' in uri else uri for uri in uris ] pkg._uris = uris pkg._uris_expanded = uris_expanded versions = handlers.scan(pkg, uris, on_progress) cp, ver, rev = portage.pkgsplit(pkg.cpv) result = filter_versions(cp, versions) if on_progress: on_progress(increment=10) # output scan time for formatted output scan_time = (datetime.now() - start_time).total_seconds() output.metadata("scan_time", scan_time, show=False) is_current_version_stable = is_version_stable(ver) if len(result) > 0: if not (CONFIG['format'] or CONFIG['quiet']): print("") for cp, url, version, handler, confidence in result: if CONFIG["ignore-pre-release"]: if not is_version_stable(version): continue if CONFIG["ignore-pre-release-if-stable"]: if is_current_version_stable and \ not is_version_stable(version): continue if CONFIG['progress']: print("", file=sys.stderr) output.result(cp, version, url, handler, confidence) return result