def scan_url(pkg, url, options): 'http://developer.github.com/v3/repos/downloads/' user, project, filename = guess_package(pkg.cpv, url) # find out where version is expected to be found cp, ver, rev = portage.pkgsplit(pkg.cpv) if ver not in filename: return # now create a filename-matching regexp # XXX: supposedly replace first with (?P<foo>...) # and remaining ones with (?P=foo) fnre = re.compile('^%s$' % \ re.escape(filename).replace(re.escape(ver), '(.*?)')) output.einfo("Using github API for: project=%s user=%s filename=%s" % \ (project, user, filename)) dlreq = urllib.request.urlopen('https://api.github.com/repos/%s/%s/downloads' % \ (user, project)) dls = json.load(dlreq) ret = [] for dl in dls: m = fnre.match(dl['name']) if m: pv = mangling.mangle_version(m.group(1), options) if helpers.version_filtered(cp, ver, pv): continue url = mangling.mangle_url(dl['html_url'], options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): gem = options['data'] url = 'http://rubygems.org/api/v1/versions/%s.json' % gem try: fp = helpers.urlopen(url) except urllib.error.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() versions = json.loads(data) cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for version in versions: up_pv = version['number'] pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv) url = mangling.mangle_url(url, options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): package = options['data'] output.einfo("Using PyPi XMLRPC: " + package) client = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi') versions = client.package_releases(package) if not versions: return versions versions.reverse() cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in versions: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue urls = client.release_urls(package, up_pv) urls = " ".join([mangling.mangle_url(infos['url'], options) for infos in urls]) ret.append((urls, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): package = options['data'] output.einfo("Using PyPi XMLRPC: " + package) client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi') versions = client.package_releases(package) if not versions: return versions versions.reverse() cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in versions: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue urls = client.release_urls(package, up_pv) urls = " ".join([mangling.mangle_url(infos['url'], options) for infos in urls]) ret.append((urls, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_url(pkg, url, options): 'http://developer.github.com/v3/repos/downloads/' user, project, filename = guess_package(pkg.cpv, url) # find out where version is expected to be found cp, ver, rev = portage.pkgsplit(pkg.cpv) if ver not in filename: return # now create a filename-matching regexp # XXX: supposedly replace first with (?P<foo>...) # and remaining ones with (?P=foo) fnre = re.compile('^%s$' % \ re.escape(filename).replace(re.escape(ver), '(.*?)')) output.einfo("Using github API for: project=%s user=%s filename=%s" % \ (project, user, filename)) dlreq = urllib2.urlopen('https://api.github.com/repos/%s/%s/downloads' % \ (user, project)) dls = json.load(dlreq) ret = [] for dl in dls: m = fnre.match(dl['name']) if m: pv = mangling.mangle_version(m.group(1), options) if helpers.version_filtered(cp, ver, pv): continue url = mangling.mangle_url(dl['html_url'], options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): gem = options['data'] url = 'http://rubygems.org/api/v1/versions/%s.json' % gem try: fp = helpers.urlopen(url) except urllib2.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() versions = json.loads(data) cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for version in versions: up_pv = version['number'] pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv) url = mangling.mangle_url(url, options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): remote_pkg = options["data"] # Defaults to CPAN mangling rules if "versionmangle" not in options: options["versionmangle"] = ["cpan", "gentoo"] url = "http://search.cpan.org/api/dist/%s" % remote_pkg cp, ver, rev = pkg.cp, pkg.version, pkg.revision m_ver = cpan_mangle_version(ver) output.einfo("Using CPAN API: " + url) try: fp = helpers.urlopen(url) except urllib2.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() data = json.loads(data) if "releases" not in data: return [] ret = [] for version in data["releases"]: # if version['status'] == 'testing': # continue up_pv = version["version"] pv = mangling.mangle_version(up_pv, options) if up_pv.startswith("v"): if helpers.version_filtered(cp, ver, pv): continue else: m_pv = cpan_mangle_version(up_pv) if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp): continue url = "mirror://cpan/authors/id/%s/%s/%s/%s" % ( version["cpanid"][0], version["cpanid"][0:1], version["cpanid"], version["archive"], ) url = mangling.mangle_url(url, options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): remote_pkg = options['data'] # Defaults to CPAN mangling rules if 'versionmangle' not in options: options['versionmangle'] = ['cpan', 'gentoo'] url = 'http://search.cpan.org/api/dist/%s' % remote_pkg cp, ver, rev = pkg.cp, pkg.version, pkg.revision m_ver = cpan_mangle_version(ver) output.einfo("Using CPAN API: " + url) try: fp = helpers.urlopen(url) except urllib.error.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() data = json.loads(data) if 'releases' not in data: return [] ret = [] for version in data['releases']: #if version['status'] == 'testing': # continue up_pv = version['version'] pv = mangling.mangle_version(up_pv, options) if up_pv.startswith('v'): if helpers.version_filtered(cp, ver, pv): continue else: m_pv = cpan_mangle_version(up_pv) if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp): continue url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % ( version['cpanid'][0], version['cpanid'][0:1], version['cpanid'], version['archive'] ) url = mangling.mangle_url(url, options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options): if not steps: return [] url += steps[0][0] pattern = steps[0][1] steps = steps[1:] output.einfo("Scanning: %s" % url) try: fp = helpers.urlopen(url) except urllib2.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() results = [] if re.search("<\s*a\s+[^>]*href", data, re.I): results.extend(scan_html(data, url, pattern)) elif url.startswith('ftp://'): results.extend(scan_ftp(data, url, pattern)) versions = [] for up_pv, path in results: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue if not url.endswith("/"): url = url + "/" path = urljoin(url, path) if not steps and path not in orig_url: confidence = confidence_score(path, orig_url) path = mangling.mangle_url(path, options) versions.append((path, pv, HANDLER_NAME, confidence)) if steps: ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url, options) versions.extend(ret) return versions
def scan_pkg(pkg, options): cp, ver, rev = pkg.cp, pkg.version, pkg.revision package = options['data'] channel = options['type'] url = 'http://%s.php.net/rest/r/%s/allreleases.xml' % (channel, package.lower()) output.einfo("Using: " + url) try: fp = helpers.urlopen(url) except urllib.error.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() dom = xml.dom.minidom.parseString(data) nodes = dom.getElementsByTagName("v") ret = [] for node in nodes: up_pv = node.childNodes[0].data pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue url = 'http://%s.php.net/get/%s-%s.tgz' % (channel, package, up_pv) url = mangling.mangle_url(url, options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): cp, ver, rev = pkg.cp, pkg.version, pkg.revision package = options['data'] channel = options['type'] url = 'http://%s.php.net/rest/r/%s/allreleases.xml' % (channel, package.lower()) output.einfo("Using: " + url) try: fp = helpers.urlopen(url) except urllib2.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() dom = xml.dom.minidom.parseString(data) nodes = dom.getElementsByTagName("v") ret = [] for node in nodes: up_pv = node.childNodes[0].data pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue url = 'http://%s.php.net/get/%s-%s.tgz' % (channel, package, up_pv) url = mangling.mangle_url(url, options) ret.append((url, pv, HANDLER_NAME, CONFIDENCE)) return ret
def scan_pkg(pkg, options): package = options['data'] output.einfo("Using PyPi API: " + package) url = 'https://pypi.python.org/pypi/%s/json' % package try: fp = helpers.urlopen(url) except urllib.error.URLError: return [] except IOError: return [] if not fp: return [] data = fp.read() data = json.loads(data) if 'releases' not in data: return [] ret = [] cp, ver, rev = portage.pkgsplit(pkg.cpv) ret = [] for up_pv in data['releases']: pv = mangling.mangle_version(up_pv, options) if helpers.version_filtered(cp, ver, pv): continue urls = [entry['url'] for entry in data['releases'][up_pv]] urls = " ".join([mangling.mangle_url(url, options) for url in urls]) ret.append((urls, pv, HANDLER_NAME, CONFIDENCE)) return ret