def scan_pkg(pkg, options): package = options['data'] output.einfo("Using PyPi XMLRPC: " + package) client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi') versions = client.package_releases(package) if not versions: return versions versions.reverse() cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in versions: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue urls = client.release_urls(package, up_pv) urls = " ".join( [mangling.mangle_url(infos['url'], options) for infos in urls]) ret.append((urls, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_url(pkg, url, options): 'http://developer.github.com/v3/repos/downloads/' user, project, filename = guess_package(pkg.cpv, url) # find out where version is expected to be found cp, ver, rev = portage.pkgsplit(pkg.cpv) if ver not in filename: return # now create a filename-matching regexp # XXX: supposedly replace first with (?P<foo>...) # and remaining ones with (?P=foo) fnre = re.compile('^%s$' % \ re.escape(filename).replace(re.escape(ver), '(.*?)')) output.einfo("Using github API for: project=%s user=%s filename=%s" % \ (project, user, filename)) dlreq = urllib2.urlopen('https://api.github.com/repos/%s/%s/downloads' % \ (user, project)) dls = json.load(dlreq) ret = [] for dl in dls: m = fnre.match(dl['name']) if m: pv = mangling.mangle_version(m.group(1), options) if helpers.version_filtered(cp, ver, pv): continue url = mangling.mangle_url(dl['html_url'], options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): package = options['data'] output.einfo("Using PyPi XMLRPC: " + package) client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi') versions = client.package_releases(package) if not versions: return versions versions.reverse() cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in versions: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue urls = client.release_urls(package, up_pv) urls = " ".join([mangling.mangle_url(infos['url'], options) for infos in urls]) ret.append((urls, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): cp, ver, rev = portage.pkgsplit(pkg.cpv) package = options['data'].strip() output.einfo("Using FreeCode handler: " + package) fp = urllib.urlopen("http://freecode.com/projects/%s/releases" % package) content = fp.read() result = re.findall( r'<a href="/projects/%s/releases/(\d+)">([^<]+)</a>' % package, content ) ret = [] for release_id, up_pv in result: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue fp = urllib.urlopen("http://freecode.com/projects/%s/releases/%s" % (package, release_id)) content = fp.read() download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0] fp = urllib.urlopen("http://freecode.com%s" % download_page) content = fp.read() url = re.findall( r'In case it doesn\'t, click here: <a href="([^"]+)"', content )[0] ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): cp, ver, rev = portage.pkgsplit(pkg.cpv) package = options['data'].strip() output.einfo("Using FreeCode handler: " + package) fp = urllib.request.urlopen("http://freecode.com/projects/%s/releases" % package) content = str(fp.read()) result = re.findall( r'<a href="/projects/%s/releases/(\d+)">([^<]+)</a>' % package, content) ret = [] for release_id, up_pv in result: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue fp = urllib.request.urlopen( "http://freecode.com/projects/%s/releases/%s" % (package, release_id)) content = str(fp.read()) download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0] fp = urllib.request.urlopen("http://freecode.com%s" % download_page) content = str(fp.read()) url = re.findall(r'In case it doesn\'t, click here: <a href="([^"]+)"', content)[0] ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): gem = options['data'] url = 'http://rubygems.org/api/v1/versions/%s.json' % gem try: fp = helpers.urlopen(url) except urllib2.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() versions = json.loads(data) cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for version in versions: up_pv = version['number'] pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv) url = mangling.mangle_url(url, options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): cp, ver, rev = portage.pkgsplit(pkg.cpv) packages_url, package_name = options['data'].strip().split(" ", 1) output.einfo("Using Debian Packages: " + packages_url) fp = urllib.urlopen(packages_url) content = fp.read() # Support for .gz and .bz2 Packages file if packages_url.endswith(".bz2"): content = bz2.decompress(content) if packages_url.endswith(".gz"): content = zlib.decompress(content, 16 + zlib.MAX_WBITS) content = content.split("\n\n") result = [] for package_info in content: package_line = re.search(r"^Package: (.*)$", package_info, re.M) version_line = re.search(r"^Version: (.*)$", package_info, re.M) if package_line and package_line.group(1) == package_name: if version_line: result.append(version_line.group(1)) ret = [] for up_pv in result: url = "" # TODO: How to find the url? pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): remote_pkg = options["data"] # Defaults to CPAN mangling rules if "versionmangle" not in options: options["versionmangle"] = ["cpan", "gentoo"] url = "http://search.cpan.org/api/dist/%s" % remote_pkg cp, ver, rev = pkg.cp, pkg.version, pkg.revision m_ver = cpan_mangle_version(ver) output.einfo("Using CPAN API: " + url) try: fp = helpers.urlopen(url) except urllib2.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() data = json.loads(data) if "releases" not in data: return [] ret = [] for version in data["releases"]: # if version['status'] == 'testing': # continue up_pv = version["version"] pv = mangling.mangle_version(up_pv, options) if up_pv.startswith("v"): if helpers.version_filtered(cp, ver, pv): continue else: m_pv = cpan_mangle_version(up_pv) if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp): continue url = "mirror://cpan/authors/id/%s/%s/%s/%s" % ( version["cpanid"][0], version["cpanid"][0:1], version["cpanid"], version["archive"], ) url = mangling.mangle_url(url, options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): remote_pkg = options['data'] # Defaults to CPAN mangling rules if 'versionmangle' not in options: options['versionmangle'] = ['cpan', 'gentoo'] url = 'http://search.cpan.org/api/dist/%s' % remote_pkg cp, ver, rev = pkg.cp, pkg.version, pkg.revision m_ver = cpan_mangle_version(ver) output.einfo("Using CPAN API: " + url) try: fp = helpers.urlopen(url) except urllib.error.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() data = json.loads(data) if 'releases' not in data: return [] ret = [] for version in data['releases']: #if version['status'] == 'testing': # continue up_pv = version['version'] pv = mangling.mangle_version(up_pv, options) if up_pv.startswith('v'): if helpers.version_filtered(cp, ver, pv): continue else: m_pv = cpan_mangle_version(up_pv) if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp): continue url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % ( version['cpanid'][0], version['cpanid'][0:1], version['cpanid'], version['archive'] ) url = mangling.mangle_url(url, options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options): if not steps: return [] url += steps[0][0] pattern = steps[0][1] steps = steps[1:] output.einfo("Scanning: %s" % url) try: fp = helpers.urlopen(url) except urllib2.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() results = [] if re.search("<\s*a\s+[^>]*href", data, re.I): results.extend(scan_html(data, url, pattern)) elif url.startswith('ftp://'): results.extend(scan_ftp(data, url, pattern)) versions = [] for up_pv, path in results: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue if not url.endswith("/"): url = url + "/" path = urljoin(url, path) if not steps and path not in orig_url: confidence = confidence_score(path, orig_url) path = mangling.mangle_url(path, options) versions.append((path, pv, HANDLER_NAME, confidence)) if steps: ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url, options) versions.extend(ret) return versions
def scan_pkg(pkg, options): package = options['data'] output.einfo("Using Gnome json cache: " + package) fp = urllib.request.urlopen('/'.join( [GNOME_URL_SOURCE, package, 'cache.json'])) content = fp.read() fp.close() cache = json.loads(content, encoding='ascii') if cache[0] != 4: output.eerror('Unknow cache format detected') return [] versions = cache[2][package] if not versions: return [] versions.reverse() cp, ver, _rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in versions: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue up_files = cache[1][package][up_pv] for tarball_comp in ('tar.xz', 'tar.bz2', 'tar.gz'): if tarball_comp in up_files: url = '/'.join( [GNOME_URL_SOURCE, package, up_files[tarball_comp]]) break else: output.ewarn('No tarball for release %s' % up_pv) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): package = options['data'] output.einfo("Using Gnome json cache: " + package) fp = urllib2.urlopen('/'.join([GNOME_URL_SOURCE, package, 'cache.json'])) content = fp.read() fp.close() cache = json.loads(content, encoding='ascii') if cache[0] != 4: output.eerror('Unknow cache format detected') return [] versions = cache[2][package] if not versions: return [] versions.reverse() cp, ver, _rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in versions: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue up_files = cache[1][package][up_pv] for tarball_comp in ('tar.xz', 'tar.bz2', 'tar.gz'): if tarball_comp in up_files: url = '/'.join([GNOME_URL_SOURCE, package, up_files[tarball_comp]]) break else: output.ewarn('No tarball for release %s' % up_pv) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): cp, ver, rev = pkg.cp, pkg.version, pkg.revision package = options['data'] channel = options['type'] url = 'http://%s.php.net/rest/r/%s/allreleases.xml' % (channel, package.lower()) output.einfo("Using: " + url) try: fp = helpers.urlopen(url) except urllib.error.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() dom = xml.dom.minidom.parseString(data) nodes = dom.getElementsByTagName("v") ret = [] for node in nodes: up_pv = node.childNodes[0].data pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue url = 'http://%s.php.net/get/%s-%s.tgz' % (channel, package, up_pv) url = mangling.mangle_url(url, options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): cp, ver, rev = pkg.cp, pkg.version, pkg.revision package = options['data'] channel = options['type'] url = 'http://%s.php.net/rest/r/%s/allreleases.xml' % (channel, package.lower()) output.einfo("Using: " + url) try: fp = helpers.urlopen(url) except urllib2.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() dom = xml.dom.minidom.parseString(data) nodes = dom.getElementsByTagName("v") ret = [] for node in nodes: up_pv = node.childNodes[0].data pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue url = 'http://%s.php.net/get/%s-%s.tgz' % (channel, package, up_pv) url = mangling.mangle_url(url, options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): package = options['data'] output.einfo("Using PyPi API: " + package) url = 'https://pypi.python.org/pypi/%s/json' % package try: fp = helpers.urlopen(url) except urllib.error.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() data = json.loads(data) if 'releases' not in data: return [] ret = [] cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in data['releases']: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue urls = [entry['url'] for entry in data['releases'][up_pv]] urls = " ".join([mangling.mangle_url(url, options) for url in urls]) ret.append((urls, pv, HANDLER_NAME, CONFIDENCE)) return ret