def get_max_version(self, category, name): """ Get the recent available version of a package. Args: category: Category name. name: package name. Returns: The recent version of a package. """ if not category or (not category in self.categories): raise InvalidKeyError('No such category: ' + category) if not category in self.database \ or not name in self.database[category]['packages']: raise InvalidKeyError('No such package: ' + category + '/' + name) pkgname = category + '/' + name versions = list(self.database[category]['packages'][name]) max_ver = versions[0] for version in versions[1:]: if portage.pkgcmp(portage.pkgsplit(pkgname + '-' + version), portage.pkgsplit(pkgname + '-' + max_ver)) > 0: max_ver = version return max_ver
def revisionMatch(revisionAtom, portdb, match_type="default"): """ handler for the special >~, >=~, <=~ and <~ atoms that are supposed to behave as > and < except that they are limited to the same version, the range only applies to the revision part. @type revisionAtom: string @param revisionAtom: a <~ or >~ atom that contains the atom to match against @type portdb: portage.dbapi @param portdb: one of the portage databases to use as information source @type match_type: string @param match_type: if != "default" passed as first argument to portdb.xmatch to apply the wanted visibility filters @rtype: list of strings @return: a list with the matching versions """ if match_type == "default" or not hasattr(portdb, "xmatch"): if ":" in revisionAtom: mylist = portdb.match(re.sub(r'-r[0-9]+(:[^ ]+)?$', r'\1', revisionAtom[2:])) else: mylist = portdb.match(re.sub("-r[0-9]+$", "", revisionAtom[2:])) else: if ":" in revisionAtom: mylist = portdb.xmatch(match_type, re.sub(r'-r[0-9]+(:[^ ]+)?$', r'\1', revisionAtom[2:])) else: mylist = portdb.xmatch(match_type, re.sub("-r[0-9]+$", "", revisionAtom[2:])) rValue = [] for v in mylist: r1 = portage.pkgsplit(v)[-1][1:] r2 = portage.pkgsplit(revisionAtom[3:])[-1][1:] if eval(r1+" "+revisionAtom[0:2]+" "+r2): rValue.append(v) return rValue
def scan_url(pkg, url, options): output.einfo("Using BerliOS handler") cp, ver, rev = portage.pkgsplit(pkg.cpv) project, filename = re.search(berlios_regex, url).groups() project_page = "http://developer.berlios.de/projects/%s" % project content = urllib.request.urlopen(project_page).read() project_id = re.search(r"/project/filelist.php\?group_id=(\d+)", content).group(1) base_url = ( "http://developer.berlios.de/project/filelist.php?group_id=%s" % project_id) file_pattern = regex_from_template(filename.replace(ver, "${PV}")) result = url_scan(pkg, base_url, file_pattern) ret = [] for found_url, pv, _, _ in result: found_url = found_url.replace("prdownload", "download") ret.append((found_url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan(cpv, url): 'http://wiki.python.org/moin/PyPiXmlRpc' package = guess_package(cpv, url) euscan.output.einfo("Using PyPi XMLRPC: " + package) client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi') versions = client.package_releases(package) if not versions: return versions versions.reverse() cp, ver, rev = portage.pkgsplit(cpv) ret = [] for version in versions: if helpers.version_filtered(cp, ver, version): continue urls = client.release_urls(package, version) urls = " ".join([ infos['url'] for infos in urls ]) ret.append(( urls, version )) return ret
def scan_url(pkg, url, options): output.einfo("Using BerliOS handler") cp, ver, rev = portage.pkgsplit(pkg.cpv) project, filename = re.search(berlios_regex, url).groups() project_page = "http://developer.berlios.de/projects/%s" % project content = urllib.urlopen(project_page).read() project_id = re.search( r"/project/filelist.php\?group_id=(\d+)", content ).group(1) base_url = ( "http://developer.berlios.de/project/filelist.php?group_id=%s" % project_id ) file_pattern = regex_from_template( filename.replace(ver, "${PV}") ) result = url_scan(pkg, base_url, file_pattern) ret = [] for found_url, pv, _, _ in result: found_url = found_url.replace("prdownload", "download") ret.append((found_url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): gem = options['data'] url = 'http://rubygems.org/api/v1/versions/%s.json' % gem try: fp = helpers.urlopen(url) except urllib.error.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() versions = json.loads(data) cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for version in versions: up_pv = version['number'] pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv) url = mangling.mangle_url(url, options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): cp, ver, rev = portage.pkgsplit(pkg.cpv) packages_url, package_name = options['data'].strip().split(" ", 1) output.einfo("Using Debian Packages: " + packages_url) fp = urllib.urlopen(packages_url) content = fp.read() # Support for .gz and .bz2 Packages file if packages_url.endswith(".bz2"): content = bz2.decompress(content) if packages_url.endswith(".gz"): content = zlib.decompress(content, 16 + zlib.MAX_WBITS) content = content.split("\n\n") result = [] for package_info in content: package_line = re.search(r"^Package: (.*)$", package_info, re.M) version_line = re.search(r"^Version: (.*)$", package_info, re.M) if package_line and package_line.group(1) == package_name: if version_line: result.append(version_line.group(1)) ret = [] for up_pv in result: url = "" # TODO: How to find the url? pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def re_find_package_name(package, regexps, uri): if not isinstance(regexps, (list, tuple)): regexps = ((regexps, 1),) match = None group = -1 for regexp in regexps: if isinstance(regexp, (list, tuple)): regexp, group = regexp else: group = 1 match = regexp.match(uri) if match: break if not match: sys.stderr.write(pp.warn("Can't find package name in '%s'" % uri)) return None package_name = match.group(group) # Try to strip version, if present if "-%s" % package.version in package_name: package_name.replace("-%s" % package.version, "") cpv = "fake/" + package_name cpv = portage.pkgsplit(cpv) if cpv: package_name = cpv[0].replace("fake/", "", 1) return package_name
def scan_url(pkg, url, options): cp, ver, rev = portage.pkgsplit(pkg.cpv) remote_pkg = guess_package(cp, url) output.einfo("Using CPAN API: %s", remote_pkg) return scan_pkg(pkg, {'data': remote_pkg})
def scan_url(pkg, url, options): 'http://developer.github.com/v3/repos/downloads/' user, project, filename = guess_package(pkg.cpv, url) # find out where version is expected to be found cp, ver, rev = portage.pkgsplit(pkg.cpv) if ver not in filename: return # now create a filename-matching regexp # XXX: supposedly replace first with (?P<foo>...) # and remaining ones with (?P=foo) fnre = re.compile('^%s$' % \ re.escape(filename).replace(re.escape(ver), '(.*?)')) output.einfo("Using github API for: project=%s user=%s filename=%s" % \ (project, user, filename)) dlreq = urllib.request.urlopen('https://api.github.com/repos/%s/%s/downloads' % \ (user, project)) dls = json.load(dlreq) ret = [] for dl in dls: m = fnre.match(dl['name']) if m: pv = mangling.mangle_version(m.group(1), options) if helpers.version_filtered(cp, ver, pv): continue url = mangling.mangle_url(dl['html_url'], options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): package = options['data'] output.einfo("Using PyPi XMLRPC: " + package) client = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi') versions = client.package_releases(package) if not versions: return versions versions.reverse() cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in versions: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue urls = client.release_urls(package, up_pv) urls = " ".join([mangling.mangle_url(infos['url'], options) for infos in urls]) ret.append((urls, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): cp, ver, rev = portage.pkgsplit(pkg.cpv) package = options['data'].strip() output.einfo("Using FreeCode handler: " + package) fp = urllib.urlopen("http://freecode.com/projects/%s/releases" % package) content = fp.read() result = re.findall( r'<a href="/projects/%s/releases/(\d+)">([^<]+)</a>' % package, content ) ret = [] for release_id, up_pv in result: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue fp = urllib.urlopen("http://freecode.com/projects/%s/releases/%s" % (package, release_id)) content = fp.read() download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0] fp = urllib.urlopen("http://freecode.com%s" % download_page) content = fp.read() url = re.findall( r'In case it doesn\'t, click here: <a href="([^"]+)"', content )[0] ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): gem = options['data'] url = 'http://rubygems.org/api/v1/versions/%s.json' % gem try: fp = helpers.urlopen(url) except urllib2.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() versions = json.loads(data) cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for version in versions: up_pv = version['number'] pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv) url = mangling.mangle_url(url, options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan(cpv, url): 'http://developer.github.com/v3/repos/downloads/' user, project, filename = guess_package(cpv, url) # find out where version is expected to be found cp, ver, rev = portage.pkgsplit(cpv) if ver not in filename: return # now create a filename-matching regexp # XXX: supposedly replace first with (?P<foo>...) # and remaining ones with (?P=foo) fnre = re.compile('^%s$' % re.escape(filename).replace(re.escape(ver), '(.*?)')) output.einfo("Using github API for: " + '/'.join(package)) dlreq = urllib2.urlopen('https://api.github.com/repos/%s/%s/downloads' % (user, project)) dls = json.load(dlreq) for dl in dls: m = fnre.match(dl['name']) if m: pv = helpers.gentoo_mangle_version(m.group(1)) if helpers.version_filtered(cp, ver, pv): continue yield (dl['html_url'], pv, HANDLER_NAME, CONFIDENCE)
def scan_pkg(pkg, options): package = options['data'] output.einfo("Using PyPi XMLRPC: " + package) client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi') versions = client.package_releases(package) if not versions: return versions versions.reverse() cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in versions: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue urls = client.release_urls(package, up_pv) urls = " ".join([mangling.mangle_url(infos['url'], options) for infos in urls]) ret.append((urls, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_upstream_urls(cpv, urls): versions = [] for filename in urls: for url in urls[filename]: if not CONFIG['quiet']: pp.uprint() euscan.output.einfo("SRC_URI is '%s'" % url) if '://' not in url: euscan.output.einfo("Invalid url '%s'" % url) continue ''' Try normal scan ''' if CONFIG["scan-dir"]: versions.extend(handlers.scan(cpv, url)) if versions and CONFIG['oneshot']: break ''' Brute Force ''' if CONFIG["brute-force"] > 0: versions.extend(handlers.brute_force(cpv, url)) if versions and CONFIG['oneshot']: break cp, ver, rev = portage.pkgsplit(cpv) return filter_versions(cp, versions)
def scan(cpv, url): for bu in SCANDIR_BLACKLIST_URLS: if re.match(bu, url): euscan.output.einfo("%s is blacklisted by rule %s" % (url, bu)) return [] resolved_url = helpers.parse_mirror(url) if not resolved_url: return [] cp, ver, rev = portage.pkgsplit(cpv) # 'Hack' for _beta/_rc versions where _ is used instead of - if ver not in resolved_url: newver = helpers.version_change_end_sep(ver) if newver and newver in resolved_url: euscan.output.einfo( "Version: using %s instead of %s" % (newver, ver) ) ver = newver template = helpers.template_from_url(resolved_url, ver) if '${' not in template: euscan.output.einfo( "Url doesn't seems to depend on version: %s not found in %s" % (ver, resolved_url) ) return [] else: euscan.output.einfo("Scanning: %s" % template) steps = helpers.generate_scan_paths(template) return scan_directory_recursive(cp, ver, rev, "", steps, url)
def scan(cpv, url): 'http://wiki.python.org/moin/PyPiXmlRpc' package = guess_package(cpv, url) output.einfo("Using PyPi XMLRPC: " + package) client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi') versions = client.package_releases(package) if not versions: return versions versions.reverse() cp, ver, rev = portage.pkgsplit(cpv) ret = [] for up_pv in versions: pv = helpers.gentoo_mangle_version(up_pv) if helpers.version_filtered(cp, ver, pv): continue urls = client.release_urls(package, up_pv) urls = " ".join([infos['url'] for infos in urls]) ret.append((urls, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_url(pkg, url, options): cp, ver, rev = portage.pkgsplit(pkg.cpv) remote_pkg = guess_package(cp, url) output.einfo("Using CPAN API: %s", remote_pkg) return scan_pkg(pkg, {"data": remote_pkg})
def scan_url(pkg, url, options): 'http://developer.github.com/v3/repos/downloads/' user, project, filename = guess_package(pkg.cpv, url) # find out where version is expected to be found cp, ver, rev = portage.pkgsplit(pkg.cpv) if ver not in filename: return # now create a filename-matching regexp # XXX: supposedly replace first with (?P<foo>...) # and remaining ones with (?P=foo) fnre = re.compile('^%s$' % \ re.escape(filename).replace(re.escape(ver), '(.*?)')) output.einfo("Using github API for: project=%s user=%s filename=%s" % \ (project, user, filename)) dlreq = urllib2.urlopen('https://api.github.com/repos/%s/%s/downloads' % \ (user, project)) dls = json.load(dlreq) ret = [] for dl in dls: m = fnre.match(dl['name']) if m: pv = mangling.mangle_version(m.group(1), options) if helpers.version_filtered(cp, ver, pv): continue url = mangling.mangle_url(dl['html_url'], options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): cp, ver, rev = portage.pkgsplit(pkg.cpv) package = options['data'].strip() output.einfo("Using FreeCode handler: " + package) fp = urllib.request.urlopen("http://freecode.com/projects/%s/releases" % package) content = str(fp.read()) result = re.findall( r'<a href="/projects/%s/releases/(\d+)">([^<]+)</a>' % package, content) ret = [] for release_id, up_pv in result: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue fp = urllib.request.urlopen( "http://freecode.com/projects/%s/releases/%s" % (package, release_id)) content = str(fp.read()) download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0] fp = urllib.request.urlopen("http://freecode.com%s" % download_page) content = str(fp.read()) url = re.findall(r'In case it doesn\'t, click here: <a href="([^"]+)"', content)[0] ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def search_ebuilds(path, portdir=True, searchdef="", repo_num="", config=None, data=None): pv = "" pkgs = [] nr = len(data['ebuilds']) + 1 if portdir: rep = darkgreen("Portage ") else: rep = red("Overlay "+str(repo_num)+" ") if isdir(path): filelist = listdir(path) for file in filelist: if file[-7:] == ".ebuild": pv = file[:-7] pkgs.append(list(pkgsplit(pv))) pkgs[-1].append(path + file) if searchdef != "" and pv == searchdef: data['defebuild'] = (searchdef, pkgs[-1][3]) if not portdir: config['found_in_overlay'] = True pkgs.sort(key=cmp_sort_key(mypkgcmp)) for pkg in pkgs: rev = "" if pkg[2] != "r0": rev = "-" + pkg[2] data['output'].append(" " + rep + " [" + bold(str(nr)) + "] " + pkg[0] + "-" + pkg[1] + rev + "\n") data['ebuilds'].append(pkg[len(pkg)-1]) nr += 1
def can_handle(pkg, url=None): if not url: return False cp, ver, rev = portage.pkgsplit(pkg.cpv) if ver not in url: return False return re.search(berlios_regex, url)
def can_handle(pkg, url=None): if not url: return False cp, ver, rev = portage.pkgsplit(pkg.cpv) if ver not in url: return False return re.match(package_name_regex, url)
def can_handle(pkg, url=None): if not url: return False cp, ver, rev = portage.pkgsplit(pkg.cpv) if ver not in url: return False return "mirror://sourceforge/" in url
def get_v(cpv): "cpv can be anything" if portage.isjustname(cpv): raise Exception('Input (%s) has no version!' % cpv) pv = portage.pkgsplit(cpv)[-2:] if pv[1] == 'r0': return pv[0] else: return '%s-%s' % (pv[0], pv[1])
def guess_gem(cpv, url): match = re.search('mirror://rubygems/(.*).gem', url) if match: cpv = 'fake/%s' % match.group(1) cp, ver, rev = portage.pkgsplit(cpv) cat, pkg = cp.split("/") return pkg
def scan(cpv, url): cp, ver, rev = portage.pkgsplit(cpv) pkg = guess_package(cp, url) orig_url = url url = 'http://search.cpan.org/api/dist/%s' % pkg euscan.output.einfo("Using: " + url) try: fp = helpers.urlopen(url) except urllib2.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() data = json.loads(data) if 'releases' not in data: return [] ret = [] for version in data['releases']: #if version['status'] == 'testing': # continue up_pv = version['version'] up_pv = cpan_trim_version(up_pv) pv = gentoo_mangle_version(up_pv) up_ver = cpan_mangle_version(ver) if helpers.version_filtered(cp, up_ver, up_pv, cpan_vercmp): continue url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % ( version['cpanid'][0], version['cpanid'][0:1], version['cpanid'], version['archive'] ) if url == orig_url: continue ret.append((url, pv)) return ret
def guess_gem(cpv, url): match = re.search('mirror://rubygems/(.*).gem', url) if match: cpv = 'fake/%s' % match.group(1) ret = portage.pkgsplit(cpv) if not ret: return None cp, ver, rev = ret cat, pkg = cp.split("/") return pkg
def scan_upstream_urls(cpv, urls, on_progress): versions = [] maxval = len(urls) + 5 curval = 1 for filename in urls: curval += 1 if on_progress: on_progress(maxval, curval) for url in urls[filename]: if not CONFIG['quiet'] and not CONFIG['format']: pp.uprint() output.einfo("SRC_URI is '%s'" % url) if '://' not in url: output.einfo("Invalid url '%s'" % url) continue # Try normal scan if CONFIG["scan-dir"]: try: versions.extend(handlers.scan(cpv, url)) except Exception as e: output.ewarn("Handler failed: [%s] %s" % (e.__class__.__name__, e.message)) if versions and CONFIG['oneshot']: break # Brute Force if CONFIG["brute-force"] > 0: versions.extend(handlers.brute_force(cpv, url)) if versions and CONFIG['oneshot']: break cp, ver, rev = portage.pkgsplit(cpv) curval += 1 if on_progress: on_progress(maxval, curval) result = filter_versions(cp, versions) curval += 1 if on_progress: on_progress(maxval, curval) return result
def _do_symlinks(mdb, slots, atom, atom_dir, summary): logging.debug("working in {}".format(atom_dir)) # for each slot, get the best match according to the profile (i.e. newer stable, or newer unstable if the profile whitelist this atom) visibles = set() for slot in slots: for cpv in mdb.match_best_visibles("{}:{}".format(atom, slot)): slot, keywords = mdb.aux_get_first(cpv, ["SLOT", "KEYWORDS"]) visibles.add(os.path.basename(cpv)) # may use isStable() from portage/package/ebuild/_config/KeywordsManager.py logging.debug("found {} slot:{}".format(cpv, slot)) for root, dirs, files in os.walk(atom_dir): for name in files: head, tail = os.path.splitext(name) # remove invisible ebuilds if tail == ".ebuild" and head not in visibles: try: _fs_remove(os.path.join(atom_dir, name), mdb.assert_beneath_portdir) summary.removed_ebuilds.add(head) except OutsideOfPortageTreeException as exc: logging.warning("skipping file for deletion (duplicate atoms?): %s/%s", atom_dir, name) continue # It's more common to remove an old package version than the more # up-to-date (i.e. we don't downgrade packages but can keep and old version # for compatibility): decrease order to keep the most up to date at first. for i, pvr in enumerate(sorted([portage.pkgsplit(x) for x in visibles], key=functools.cmp_to_key(portage.pkgcmp), reverse=True)): if pvr[2] == "r0": name = "-".join(pvr[:-1]) else: name = "-".join(pvr) src = "{}.ebuild".format(name) # ignore already equalized ebuilds if os.path.islink(os.path.join(atom_dir, src)): continue # the equalized name should not end with ".ebuild" # (cf. dbapi/porttree.py:cp_list "Invalid ebuild name" and # versions.py:catpkgsplit) dst = ".{}.ebuild.{}".format(pvr[0], i) try: _fs_move(atom_dir, src, dst, mdb.assert_beneath_portdir) except OutsideOfPortageTreeException as exc: logging.warning("skipping atom for move (duplicate atoms?): %s/(%s -> %s)", atom_dir, src, dst) continue try: _fs_symlink(atom_dir, src, dst, mdb.assert_beneath_portdir) summary.symlinked_ebuilds += 1 except OutsideOfPortageTreeException as exc: logging.warning("skipping atom for symlink (duplicate atoms?): %s/(%s -> %s)", atom_dir, src, dst) continue
def revisionMatch(revisionAtom, portdb, match_type="default"): """ handler for the special >~, >=~, <=~ and <~ atoms that are supposed to behave as > and < except that they are limited to the same version, the range only applies to the revision part. @type revisionAtom: string @param revisionAtom: a <~ or >~ atom that contains the atom to match against @type portdb: portage.dbapi @param portdb: one of the portage databases to use as information source @type match_type: string @param match_type: if != "default" passed as first argument to portdb.xmatch to apply the wanted visibility filters @rtype: list of strings @return: a list with the matching versions """ if match_type == "default" or not hasattr(portdb, "xmatch"): if ":" in revisionAtom: mylist = portdb.match( re.sub(r'-r[0-9]+(:[^ ]+)?$', r'\1', revisionAtom[2:])) else: mylist = portdb.match(re.sub("-r[0-9]+$", "", revisionAtom[2:])) else: if ":" in revisionAtom: mylist = portdb.xmatch( match_type, re.sub(r'-r[0-9]+(:[^ ]+)?$', r'\1', revisionAtom[2:])) else: mylist = portdb.xmatch(match_type, re.sub("-r[0-9]+$", "", revisionAtom[2:])) rValue = [] for v in mylist: r1 = portage.pkgsplit(v)[-1][1:] r2 = portage.pkgsplit(revisionAtom[3:])[-1][1:] if eval(r1 + " " + revisionAtom[0:2] + " " + r2): rValue.append(v) return rValue
def deserialize(cls, value): atom = portage.dep.Atom(value) operator = portage.dep.get_operator(atom) cpv = portage.dep.dep_getcpv(atom) category, rest = portage.catsplit(cpv) if operator: package, version, revision = portage.pkgsplit(rest) else: package = rest version = "" operator = "" return Dependency(category, package, version, operator)
def scan_directory_recursive(cpv, url, steps): if not steps: return [] cp, ver, rev = portage.pkgsplit(cpv) url += steps[0][0] pattern = steps[0][1] steps = steps[1:] euscan.output.einfo("Scanning: %s" % url) try: fp = helpers.urlopen(url) except urllib2.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() results = [] if re.search("<\s*a\s+[^>]*href", data): results.extend(scan_html(data, url, pattern)) elif url.startswith('ftp://'): results.extend(scan_ftp(data, url, pattern)) versions = [] for version, path in results: if helpers.version_filtered(cp, ver, version): continue if not url.endswith('/') and not path.startswith('/'): path = url + '/' + path else: path = url + path versions.append((path, version)) if steps: ret = scan_directory_recursive(cpv, path, steps) versions.extend(ret) return versions
def guess_package(cp, url): match = _cpan_package_name_re.search(url) pkg = None if match: pkg = match.group(1) try: cp, ver, rev = portage.pkgsplit('fake/' + pkg) except: pass cat, pkg = cp.split("/") return pkg
def _guess_components(cls, my_p): """Try to break up raw MY_P into PN and PV""" pn, pv = "", "" # Ok, we just have one automagical test here. # We should look at versionator.eclass for inspiration # and then come up with several functions. my_p = my_p.replace("_", "-") psplit = pkgsplit(my_p) if psplit: pn = psplit[0] pv = psplit[1] log.debug("guess_components got: pn(%s), pv(%s)", pn, pv) return pn, pv
def guess_package(cp, url): match = _cpan_package_name_re.search(url) pkg = None if match: pkg = match.group(1) try: cp, ver, rev = portage.pkgsplit("fake/" + pkg) except: pass cat, pkg = cp.split("/") return pkg
def getVersion(self, packageName): # the portage utilities are almost always going to be in # /usr/lib/portage/pym import sys sys.path.append('/usr/lib/portage/pym') import portage # FIXME: this takes the first package returned in the list, in the # case that there are slotted packages, and removes the leading # category such as 'sys-apps' gentooPackageName = portage.db["/"]["vartree"].dbapi.match(packageName)[0].split('/')[1] # this removes the distribution specific versioning returning only the # upstream version upstreamVersion = portage.pkgsplit(gentooPackageName)[1] # print("Version of package is: " + upstreamVersion) return Version.fromString(upstreamVersion)
def getVersion(self, packageName): # the portage utilities are almost always going to be in # /usr/lib/portage/pym import sys sys.path.append('/usr/lib/portage/pym') import portage # FIXME: this takes the first package returned in the list, in the # case that there are slotted packages, and removes the leading # category such as 'sys-apps' gentooPackageName = portage.db["/"]["vartree"].dbapi.match( packageName)[0].split('/')[1] # this removes the distribution specific versioning returning only the # upstream version upstreamVersion = portage.pkgsplit(gentooPackageName)[1] # print("Version of package is: " + upstreamVersion) return Version.fromString(upstreamVersion)
def scan_url(pkg, url, options): output.einfo("Using SourceForge handler") cp, ver, rev = portage.pkgsplit(pkg.cpv) project, filename = re.search( "mirror://sourceforge/([^/]+)/(?:.*/)?([^/]+)", url).groups() base_url = "http://qa.debian.org/watch/sf.php/%s" % project file_pattern = regex_from_template(filename.replace(ver, "${PV}")) result = url_scan(pkg, base_url, file_pattern) ret = [] for url, pv, _, _ in result: ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_url(pkg, url, options): output.einfo("Using Google Code handler") cp, ver, rev = portage.pkgsplit(pkg.cpv) package_name = re.match(package_name_regex, url).group(1) base_url = "http://code.google.com/p/%s/downloads/list" % package_name file_pattern = regex_from_template( url.split("/")[-1].replace(ver, "${PV}")) result = url_scan(pkg, base_url, file_pattern) ret = [] for url, pv, _, _ in result: ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def equalize(mdb, atoms=None, dry_run=False): """Equalize a Gentoo Portage tree. If `atoms` is None (or emtpy list, or whatever evaluates to False), then this function will equalize the entire Portage tree. :param mdb: a set of Portage tree databases from MultiDb """ global DRY_RUN DRY_RUN = dry_run # get the entire list of atoms provided by the set of Portage tree # databases `mdb` if not atoms: atoms = mdb.cp_all() atom_nb = len(atoms) summary = EqualizeSummary() for i, atom in enumerate(atoms, start=1): # find all the slots for this atom slots = set() for cpv in mdb.match(atom): slots.add(mdb.aux_get_first(cpv, ["SLOT"])[0]) logging.debug("") logging.info("equalizing {}/{} {}".format(i, atom_nb, atom)) # check if some slots are visible to the current profile if len(slots) != 0: _do_symlinks(mdb, slots, atom, mdb.get_atom_dir_selected(cpv), summary) continue # remove files which are not usable with the current profile cpvs = mdb.match_all(atom) if len(cpvs) == 0: raise Exception( "Missing atom in the cache, you should run `egencache --update` for this Portage tree" ) cpv = cpvs.pop() atom_dir = mdb.get_atom_dir_selected(cpv) try: _fs_remove_tree(atom_dir, mdb.assert_beneath_portdir) summary.removed_packages.add("/".join(portage.pkgsplit(cpv)[0:2])) except OutsideOfPortageTreeException as exc: logging.debug("skipping atom dir for deletion: %s", atom_dir) continue return summary
def split_uri(cls, uri): """Try to split a URI into PN, PV and REV :param uri: SRC_URI :type uri: string :returns: PN, PV, REV :rtype: tuple of strings **Example:** >>> Enamer.split_uri('http://www.foobar.com/foobar-1.0.tar.gz') ('foobar', '1.0', 'r0') >>> Enamer.split_uri('http://www.foobar.com/foo-2.3_beta3-r5.tar.gz') ('foo', '2.3_beta3', 'r5') """ p = cls.get_filename(uri) return pkgsplit(p)
def scan_url(pkg, url, options): output.einfo("Using Google Code handler") cp, ver, rev = portage.pkgsplit(pkg.cpv) package_name = re.match(package_name_regex, url).group(1) base_url = "http://code.google.com/p/%s/downloads/list" % package_name file_pattern = regex_from_template( url.split("/")[-1].replace(ver, "${PV}") ) result = url_scan(pkg, base_url, file_pattern) ret = [] for url, pv, _, _ in result: ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): package = options['data'] output.einfo("Using Gnome json cache: " + package) fp = urllib.request.urlopen('/'.join( [GNOME_URL_SOURCE, package, 'cache.json'])) content = fp.read() fp.close() cache = json.loads(content, encoding='ascii') if cache[0] != 4: output.eerror('Unknow cache format detected') return [] versions = cache[2][package] if not versions: return [] versions.reverse() cp, ver, _rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in versions: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue up_files = cache[1][package][up_pv] for tarball_comp in ('tar.xz', 'tar.bz2', 'tar.gz'): if tarball_comp in up_files: url = '/'.join( [GNOME_URL_SOURCE, package, up_files[tarball_comp]]) break else: output.ewarn('No tarball for release %s' % up_pv) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_url(pkg, url, options): if CONFIG["scan-dir"]: for bu in SCANDIR_BLACKLIST_URLS: if re.match(bu, url): output.einfo("%s is blacklisted by rule %s" % (url, bu)) return [] resolved_url = helpers.parse_mirror(url) if not resolved_url: return [] cp, ver, rev = portage.pkgsplit(pkg.cpv) # 'Hack' for _beta/_rc versions where _ is used instead of - if ver not in resolved_url: newver = helpers.version_change_end_sep(ver) if newver and newver in resolved_url: output.einfo( "Version: using %s instead of %s" % (newver, ver) ) ver = newver template = helpers.template_from_url(resolved_url, ver) if '${' not in template: output.einfo( "Url doesn't seems to depend on version: %s not found in %s" % (ver, resolved_url) ) return [] else: output.einfo("Scanning: %s" % template) steps = helpers.generate_scan_paths(template) ret = scan_directory_recursive(cp, ver, rev, "", steps, url, options) if not ret: ret = brute_force(pkg, url) return ret
def __xmatch(self, pdb, package): """xmatch function that searches for all packages over all repos""" try: mycp = port.dep_expand(package, mydb=pdb, settings=pdb.settings).cp except port.exception.AmbiguousPackageName as Arg: msg_err = 'Ambiguous package name "%s".\n' % package found = 'Possibilities: %s' % Arg raise SystemExit('%s%s' % (msg_err, found)) except port.exception.InvalidAtom: msg_err = 'No such package "%s"' % package raise SystemExit(msg_err) mysplit = mycp.split('/') mypkgs = [] for oroot in pdb.porttrees: try: file_list = os.listdir(os.path.join(oroot, mycp)) except OSError: continue for x in file_list: pf = x[:-7] if x[-7:] == '.ebuild' else [] if pf: ps = port.pkgsplit(pf) if not ps or ps[0] != mysplit[1]: # we got garbage or ebuild with wrong name in the dir continue ver_match = port.versions.ver_regexp.match("-".join( ps[1:])) if ver_match is None or not ver_match.groups(): # version is not allowed by portage or unset continue # obtain related data from metadata and append to the pkg list keywords, slot = self.__getMetadata( pdb, mysplit[0] + '/' + pf, oroot) mypkgs.append( [mysplit[0] + '/' + pf, oroot, slot, keywords]) self.__packages_sort(mypkgs) return mypkgs
def scan_pkg(pkg, options): package = options['data'] output.einfo("Using PyPi API: " + package) url = 'https://pypi.python.org/pypi/%s/json' % package try: fp = helpers.urlopen(url) except urllib.error.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() data = json.loads(data) if 'releases' not in data: return [] ret = [] cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in data['releases']: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue urls = [entry['url'] for entry in data['releases'][up_pv]] urls = " ".join([mangling.mangle_url(url, options) for url in urls]) ret.append((urls, pv, HANDLER_NAME, CONFIDENCE)) return ret
def searchEbuilds(path, portdir=True, searchdef="", repo_num="", config=None, data=None): pv = "" pkgs = [] nr = len(data['ebuilds']) + 1 if portdir: rep = darkgreen("Portage ") else: rep = red("Overlay " + str(repo_num) + " ") if isdir(path): filelist = listdir(path) for file in filelist: if file[-7:] == ".ebuild": pv = file[:-7] pkgs.append(list(pkgsplit(pv))) pkgs[-1].append(path + file) if searchdef != "" and pv == searchdef: data['defebuild'] = (searchdef, pkgs[-1][3]) if not portdir: config['found_in_overlay'] = True pkgs.sort(mypkgcmp) for pkg in pkgs: rev = "" if pkg[2] != "r0": rev = "-" + pkg[2] data['output'].append(" " + rep + " [" + bold(str(nr)) + "] " + pkg[0] + "-" + pkg[1] + rev + "\n") data['ebuilds'].append(pkg[len(pkg) - 1]) nr += 1
def brute_force(pkg, url): if CONFIG["brute-force"] == 0: return [] cp, ver, rev = portage.pkgsplit(pkg.cpv) url = helpers.parse_mirror(url) if not url: return [] for bp in BRUTEFORCE_BLACKLIST_PACKAGES: if re.match(bp, cp): output.einfo("%s is blacklisted by rule %s" % (cp, bp)) return [] for bp in BRUTEFORCE_BLACKLIST_URLS: if re.match(bp, url): output.einfo("%s is blacklisted by rule %s" % (cp, bp)) return [] output.einfo("Generating version from " + ver) components = helpers.split_version(ver) versions = helpers.gen_versions(components, CONFIG["brute-force"]) # Remove unwanted versions for v in versions: if helpers.vercmp(cp, ver, helpers.join_version(v)) >= 0: versions.remove(v) if not versions: output.einfo("Can't generate new versions from " + ver) return [] template = helpers.template_from_url(url, ver) if '${PV}' not in template: output.einfo( "Url doesn't seems to depend on full version: %s not found in %s" % (ver, url)) return [] else: output.einfo("Brute forcing: %s" % template) result = [] i = 0 done = [] while i < len(versions): components = versions[i] i += 1 if components in done: continue done.append(tuple(components)) version = helpers.join_version(components) if helpers.version_filtered(cp, ver, version): continue try_url = helpers.url_from_template(template, version) infos = helpers.tryurl(try_url, template) if not infos: continue confidence = confidence_score(try_url, url, minimum=BRUTEFORCE_CONFIDENCE) result.append([try_url, version, BRUTEFORCE_HANDLER_NAME, confidence]) if len(result) > CONFIG['brute-force-false-watermark']: output.einfo("Broken server detected ! Skipping brute force.") return [] if CONFIG["brute-force-recursive"]: for v in helpers.gen_versions(list(components), CONFIG["brute-force"]): if v not in versions and tuple(v) not in done: versions.append(v) if CONFIG["oneshot"]: break return result
def pkgsplit(ebuild): """Split ebuild into [category/package, version, revision]""" debug.dprint("PORTAGELIB: pkgsplit(); calling portage function") return portage.pkgsplit(ebuild)