def get_metadata(pkg): metadata = {} pkg_metadata = None meta_override = os.path.join('metadata', pkg.category, pkg.name, 'metadata.xml') try: if os.path.exists(meta_override): pkg_metadata = MetaDataXML(meta_override) output.einfo('Using custom metadata: %s' % meta_override) if not pkg_metadata: pkg_metadata = pkg.metadata except Exception as e: output.ewarn('Error when fetching metadata: %s' % str(e)) if not pkg_metadata: return {} # Support multiple remote-id and multiple watch for upstream in pkg_metadata._xml_tree.findall("upstream"): for node in upstream.findall("watch"): options = dict(node.attrib) options['data'] = node.text if "type" in options: handler = options['type'] else: handler = "url" options['type'] = "url" for key in ["versionmangle", "downloadurlmangle"]: value = options.get(key, None) if value: options[key] = value.split(";") if handler not in metadata: metadata[handler] = [] metadata[handler].append(options) for upstream in pkg_metadata._xml_tree.findall("upstream"): for node in upstream.findall("remote-id"): handler = node.attrib.get("type") if not handler: continue if handler in metadata: for i in range(len(metadata[handler])): if not metadata[handler][i]['data']: metadata[handler][i]['data'] = node.text else: metadata[handler] = [{'type': handler, 'data': node.text}] return metadata
def scan_upstream_urls(cpv, urls, on_progress): versions = [] maxval = len(urls) + 5 curval = 1 for filename in urls: curval += 1 if on_progress: on_progress(maxval, curval) for url in urls[filename]: if not CONFIG['quiet'] and not CONFIG['format']: pp.uprint() output.einfo("SRC_URI is '%s'" % url) if '://' not in url: output.einfo("Invalid url '%s'" % url) continue # Try normal scan if CONFIG["scan-dir"]: try: versions.extend(handlers.scan(cpv, url)) except Exception as e: output.ewarn("Handler failed: [%s] %s" % (e.__class__.__name__, e.message)) if versions and CONFIG['oneshot']: break # Brute Force if CONFIG["brute-force"] > 0: versions.extend(handlers.brute_force(cpv, url)) if versions and CONFIG['oneshot']: break cp, ver, rev = portage.pkgsplit(cpv) curval += 1 if on_progress: on_progress(maxval, curval) result = filter_versions(cp, versions) curval += 1 if on_progress: on_progress(maxval, curval) return result
def get_metadata(pkg): metadata = {} pkg_metadata = None meta_override = os.path.join('metadata', pkg.category, pkg.name, 'metadata.xml') try: if os.path.exists(meta_override): pkg_metadata = MetaData(meta_override) output.einfo('Using custom metadata: %s' % meta_override) if not pkg_metadata: pkg_metadata = pkg.metadata except Exception, e: output.ewarn('Error when fetching metadata: %s' % str(e))
def scan_url(pkg, urls, options, on_progress=None): versions = [] if on_progress: progress_available = 70 num_urls = sum([len(urls[fn]) for fn in urls]) if num_urls > 0: progress_increment = progress_available / num_urls else: progress_increment = 0 for filename in urls: for url in urls[filename]: if on_progress and progress_available > 0: on_progress(increment=progress_increment) progress_available -= progress_increment output.einfo("SRC_URI is '%s'" % url) if '://' not in url: output.einfo("Invalid url '%s'" % url) continue try: url_handler = find_best_handler('url', pkg, url) if url_handler: for o in options: versions += url_handler.scan_url(pkg, url, o) else: output.eerror("Can't find a suitable handler!") except Exception as e: output.ewarn( "Handler failed: [%s] %s" % (e.__class__.__name__, e.message) ) if versions and CONFIG['oneshot']: break if on_progress and progress_available > 0: on_progress(increment=progress_available) return versions
def scan_pkg(pkg, options): package = options['data'] output.einfo("Using Gnome json cache: " + package) fp = urllib.request.urlopen('/'.join( [GNOME_URL_SOURCE, package, 'cache.json'])) content = fp.read() fp.close() cache = json.loads(content, encoding='ascii') if cache[0] != 4: output.eerror('Unknow cache format detected') return [] versions = cache[2][package] if not versions: return [] versions.reverse() cp, ver, _rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in versions: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue up_files = cache[1][package][up_pv] for tarball_comp in ('tar.xz', 'tar.bz2', 'tar.gz'): if tarball_comp in up_files: url = '/'.join( [GNOME_URL_SOURCE, package, up_files[tarball_comp]]) break else: output.ewarn('No tarball for release %s' % up_pv) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): package = options['data'] output.einfo("Using Gnome json cache: " + package) fp = urllib2.urlopen('/'.join([GNOME_URL_SOURCE, package, 'cache.json'])) content = fp.read() fp.close() cache = json.loads(content, encoding='ascii') if cache[0] != 4: output.eerror('Unknow cache format detected') return [] versions = cache[2][package] if not versions: return [] versions.reverse() cp, ver, _rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in versions: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue up_files = cache[1][package][up_pv] for tarball_comp in ('tar.xz', 'tar.bz2', 'tar.gz'): if tarball_comp in up_files: url = '/'.join([GNOME_URL_SOURCE, package, up_files[tarball_comp]]) break else: output.ewarn('No tarball for release %s' % up_pv) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_upstream(query, on_progress=None): """ Scans the upstream searching new versions for the given query """ matches = [] if query.endswith(".ebuild"): cpv = package_from_ebuild(query) reload_gentoolkit() if cpv: matches = [Package(cpv)] else: matches = Query(query).find( include_masked=True, in_installed=False, ) if not matches: output.ewarn(pp.warn("No package matching '%s'" % pp.pkgquery(query))) return None matches = sorted(matches) pkg = matches.pop() while '9999' in pkg.version and len(matches): pkg = matches.pop() if not pkg: output.ewarn( pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(pkg.cp))) return None # useful data only for formatted output start_time = datetime.now() output.metadata("datetime", start_time.isoformat(), show=False) output.metadata("cp", pkg.cp, show=False) output.metadata("cpv", pkg.cpv, show=False) if on_progress: on_progress(increment=10) if pkg.cp in BLACKLIST_PACKAGES: output.ewarn( pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp))) return None if not CONFIG['quiet']: if not CONFIG['format']: pp.uprint(" * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name()))) pp.uprint() else: output.metadata("overlay", pp.section(pkg.repo_name())) ebuild_path = pkg.ebuild_path() if ebuild_path: output.metadata("ebuild", pp.path(os.path.normpath(ebuild_path))) uris, homepage, description = pkg.environment( ('SRC_URI', 'HOMEPAGE', 'DESCRIPTION')) output.metadata("repository", pkg.repo_name()) output.metadata("homepage", homepage) output.metadata("description", description) else: uris = pkg.environment('SRC_URI') cpv = pkg.cpv uris = parse_src_uri(uris) uris_expanded = [ from_mirror(uri) if 'mirror://' in uri else uri for uri in uris ] pkg._uris = uris pkg._uris_expanded = uris_expanded versions = handlers.scan(pkg, uris, on_progress) cp, ver, rev = portage.pkgsplit(pkg.cpv) result = filter_versions(cp, versions) if on_progress: on_progress(increment=10) # output scan time for formatted output scan_time = (datetime.now() - start_time).total_seconds() output.metadata("scan_time", scan_time, show=False) is_current_version_stable = is_version_stable(ver) if len(result) > 0: if not (CONFIG['format'] or CONFIG['quiet']): print("") for cp, url, version, handler, confidence in result: if CONFIG["ignore-pre-release"]: if not is_version_stable(version): continue if CONFIG["ignore-pre-release-if-stable"]: if is_current_version_stable and \ not is_version_stable(version): continue if CONFIG['progress']: print("", file=sys.stderr) output.result(cp, version, url, handler, confidence) return result
def scan_upstream(query, on_progress=None): """ Scans the upstream searching new versions for the given query """ maxval = 3 curval = 0 matches = [] if query.endswith(".ebuild"): cpv = package_from_ebuild(query) if cpv: reload_gentoolkit() matches = [Package(cpv)] else: matches = Query(query).find( include_masked=True, in_installed=False ) if not matches: output.ewarn( pp.warn("No package matching '%s'" % pp.pkgquery(query)) ) return None matches = sorted(matches) pkg = matches.pop() while '9999' in pkg.version and len(matches): pkg = matches.pop() if not pkg: output.ewarn( pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(pkg.cp)) ) return None # useful data only for formatted output start_time = datetime.now() output.metadata("datetime", start_time.isoformat(), show=False) output.metadata("cp", pkg.cp, show=False) output.metadata("cpv", pkg.cpv, show=False) curval += 1 if on_progress: on_progress(maxval, curval) if pkg.cp in BLACKLIST_PACKAGES: output.ewarn( pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)) ) return None if not CONFIG['quiet']: if not CONFIG['format']: pp.uprint( " * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())) ) pp.uprint() else: output.metadata("overlay", pp.section(pkg.repo_name())) ebuild_path = pkg.ebuild_path() if ebuild_path: output.metadata( "ebuild", pp.path(os.path.normpath(ebuild_path)) ) output.metadata("repository", pkg.repo_name()) output.metadata("homepage", pkg.environment("HOMEPAGE")) output.metadata("description", pkg.environment("DESCRIPTION")) cpv = pkg.cpv metadata = { "EAPI": portage.settings["EAPI"], "SRC_URI": pkg.environment("SRC_URI", False), } use = frozenset(portage.settings["PORTAGE_USE"].split()) try: alist = porttree._parse_uri_map(cpv, metadata, use=use) aalist = porttree._parse_uri_map(cpv, metadata) except Exception as e: output.ewarn(pp.warn("%s\n" % str(e))) output.ewarn( pp.warn("Invalid SRC_URI for '%s'" % pp.pkgquery(cpv)) ) return None if "mirror" in portage.settings.features: urls = aalist else: urls = alist # output scan time for formatted output scan_time = (datetime.now() - start_time).total_seconds() output.metadata("scan_time", scan_time, show=False) curval += 1 if on_progress: on_progress(maxval, curval) result = scan_upstream_urls(pkg.cpv, urls, on_progress) curval += 1 if on_progress: on_progress(maxval, curval) return result
def scan_upstream(query, on_progress=None): """ Scans the upstream searching new versions for the given query """ matches = [] if query.endswith(".ebuild"): cpv = package_from_ebuild(query) reload_gentoolkit() if cpv: matches = [Package(cpv)] else: matches = Query(query).find( include_masked=True, in_installed=False, ) if not matches: output.ewarn( pp.warn("No package matching '%s'" % pp.pkgquery(query)) ) return None matches = sorted(matches) pkg = matches.pop() while '9999' in pkg.version and len(matches): pkg = matches.pop() if not pkg: output.ewarn( pp.warn("Package '%s' only have a dev version (9999)" % pp.pkgquery(pkg.cp)) ) return None # useful data only for formatted output start_time = datetime.now() output.metadata("datetime", start_time.isoformat(), show=False) output.metadata("cp", pkg.cp, show=False) output.metadata("cpv", pkg.cpv, show=False) if on_progress: on_progress(increment=10) if pkg.cp in BLACKLIST_PACKAGES: output.ewarn( pp.warn("Package '%s' is blacklisted" % pp.pkgquery(pkg.cp)) ) return None if not CONFIG['quiet']: if not CONFIG['format']: pp.uprint( " * %s [%s]" % (pp.cpv(pkg.cpv), pp.section(pkg.repo_name())) ) pp.uprint() else: output.metadata("overlay", pp.section(pkg.repo_name())) ebuild_path = pkg.ebuild_path() if ebuild_path: output.metadata( "ebuild", pp.path(os.path.normpath(ebuild_path)) ) uris, homepage, description = pkg.environment( ('SRC_URI', 'HOMEPAGE', 'DESCRIPTION') ) output.metadata("repository", pkg.repo_name()) output.metadata("homepage", homepage) output.metadata("description", description) else: uris = pkg.environment('SRC_URI') cpv = pkg.cpv uris = parse_src_uri(uris) uris_expanded = [ from_mirror(uri) if 'mirror://' in uri else uri for uri in uris ] pkg._uris = uris pkg._uris_expanded = uris_expanded versions = handlers.scan(pkg, uris, on_progress) cp, ver, rev = portage.pkgsplit(pkg.cpv) result = filter_versions(cp, versions) if on_progress: on_progress(increment=10) # output scan time for formatted output scan_time = (datetime.now() - start_time).total_seconds() output.metadata("scan_time", scan_time, show=False) is_current_version_stable = is_version_stable(ver) if len(result) > 0: if not (CONFIG['format'] or CONFIG['quiet']): print("") for cp, url, version, handler, confidence in result: if CONFIG["ignore-pre-release"]: if not is_version_stable(version): continue if CONFIG["ignore-pre-release-if-stable"]: if is_current_version_stable and \ not is_version_stable(version): continue if CONFIG['progress']: print("", file=sys.stderr) output.result(cp, version, url, handler, confidence) return result