Exemple #1
0
def scan_pkg(pkg, options):
    gem = options['data']
    url = 'http://rubygems.org/api/v1/versions/%s.json' % gem

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    versions = json.loads(data)

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for version in versions:
        up_pv = version['number']
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv)
        url = mangling.mangle_url(url, options)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Exemple #2
0
def scan_pkg(pkg, options):
    gem = options['data']
    url = 'http://rubygems.org/api/v1/versions/%s.json' % gem

    try:
        fp = helpers.urlopen(url)
    except urllib.error.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    versions = json.loads(data)

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for version in versions:
        up_pv = version['number']
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv)
        url = mangling.mangle_url(url, options)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Exemple #3
0
def scan_pkg(pkg, options):
    remote_pkg = options['data']

    # Defaults to CPAN mangling rules
    if 'versionmangle' not in options:
        options['versionmangle'] = ['cpan', 'gentoo']

    url = 'http://search.cpan.org/api/dist/%s' % remote_pkg
    cp, ver, rev = pkg.cp, pkg.version, pkg.revision
    m_ver = cpan_mangle_version(ver)

    output.einfo("Using CPAN API: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib.error.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    data = json.loads(data)

    if 'releases' not in data:
        return []

    ret = []

    for version in data['releases']:
        #if version['status'] == 'testing':
        #    continue

        up_pv = version['version']
        pv = mangling.mangle_version(up_pv, options)

        if up_pv.startswith('v'):
            if helpers.version_filtered(cp, ver, pv):
                continue
        else:
            m_pv = cpan_mangle_version(up_pv)
            if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
                continue

        url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % (
            version['cpanid'][0],
            version['cpanid'][0:1],
            version['cpanid'],
            version['archive']
        )

        url = mangling.mangle_url(url, options)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Exemple #4
0
def scan_pkg(pkg, options):
    remote_pkg = options["data"]

    # Defaults to CPAN mangling rules
    if "versionmangle" not in options:
        options["versionmangle"] = ["cpan", "gentoo"]

    url = "http://search.cpan.org/api/dist/%s" % remote_pkg
    cp, ver, rev = pkg.cp, pkg.version, pkg.revision
    m_ver = cpan_mangle_version(ver)

    output.einfo("Using CPAN API: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    data = json.loads(data)

    if "releases" not in data:
        return []

    ret = []

    for version in data["releases"]:
        # if version['status'] == 'testing':
        #    continue

        up_pv = version["version"]
        pv = mangling.mangle_version(up_pv, options)

        if up_pv.startswith("v"):
            if helpers.version_filtered(cp, ver, pv):
                continue
        else:
            m_pv = cpan_mangle_version(up_pv)
            if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
                continue

        url = "mirror://cpan/authors/id/%s/%s/%s/%s" % (
            version["cpanid"][0],
            version["cpanid"][0:1],
            version["cpanid"],
            version["archive"],
        )

        url = mangling.mangle_url(url, options)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Exemple #5
0
def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
    if not steps:
        return []

    url += steps[0][0]
    pattern = steps[0][1]

    steps = steps[1:]

    output.einfo("Scanning: %s" % url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    results = []

    if re.search("<\s*a\s+[^>]*href", data, re.I):
        results.extend(scan_html(data, url, pattern))
    elif url.startswith('ftp://'):
        results.extend(scan_ftp(data, url, pattern))

    versions = []

    for up_pv, path in results:
        pv = mangling.mangle_version(up_pv, options)

        if helpers.version_filtered(cp, ver, pv):
            continue
        if not url.endswith("/"):
            url = url + "/"
        path = urljoin(url, path)

        if not steps and path not in orig_url:
            confidence = confidence_score(path, orig_url)
            path = mangling.mangle_url(path, options)
            versions.append((path, pv, HANDLER_NAME, confidence))

        if steps:
            ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url,
                                           options)
            versions.extend(ret)

    return versions
Exemple #6
0
def scan(cpv, url):
    cp, ver, rev = portage.pkgsplit(cpv)
    pkg = guess_package(cp, url)

    orig_url = url
    url = 'http://search.cpan.org/api/dist/%s' % pkg

    euscan.output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    data = json.loads(data)

    if 'releases' not in data:
        return []

    ret = []

    for version in data['releases']:
        #if version['status'] == 'testing':
        #    continue

        up_pv = version['version']
        up_pv = cpan_trim_version(up_pv)
        pv = gentoo_mangle_version(up_pv)
        up_ver = cpan_mangle_version(ver)

        if helpers.version_filtered(cp, up_ver, up_pv, cpan_vercmp):
            continue

        url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % (
            version['cpanid'][0],
            version['cpanid'][0:1],
            version['cpanid'],
            version['archive']
        )

        if url == orig_url:
            continue

        ret.append((url, pv))

    return ret
Exemple #7
0
def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
    if not steps:
        return []

    url += steps[0][0]
    pattern = steps[0][1]

    steps = steps[1:]

    output.einfo("Scanning: %s" % url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    results = []

    if re.search("<\s*a\s+[^>]*href", data, re.I):
        results.extend(scan_html(data, url, pattern))
    elif url.startswith('ftp://'):
        results.extend(scan_ftp(data, url, pattern))

    versions = []

    for up_pv, path in results:
        pv = mangling.mangle_version(up_pv, options)

        if helpers.version_filtered(cp, ver, pv):
            continue
        if not url.endswith("/"):
            url = url + "/"
        path = urljoin(url, path)

        if not steps and path not in orig_url:
            confidence = confidence_score(path, orig_url)
            path = mangling.mangle_url(path, options)
            versions.append((path, pv, HANDLER_NAME, confidence))

        if steps:
            ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url,
                                           options)
            versions.extend(ret)

    return versions
Exemple #8
0
def scan_directory_recursive(cp, ver, rev, url, steps, orig_url):
    if not steps:
        return []

    url += steps[0][0]
    pattern = steps[0][1]

    steps = steps[1:]

    euscan.output.einfo("Scanning: %s" % url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    results = []

    if re.search("<\s*a\s+[^>]*href", data):
        results.extend(scan_html(data, url, pattern))
    elif url.startswith('ftp://'):
        results.extend(scan_ftp(data, url, pattern))

    versions = []

    for up_pv, path in results:
        pv = helpers.gentoo_mangle_version(up_pv)
        if helpers.version_filtered(cp, ver, pv):
            continue

        if not url.endswith('/') and not path.startswith('/'):
            path = url + '/' + path
        else:
            path = url + path

        if not steps and path not in orig_url:
            versions.append((path, pv))

        if steps:
            ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url)
            versions.extend(ret)

    return versions
Exemple #9
0
def scan(cpv, url):
    'http://guides.rubygems.org/rubygems-org-api/#gemversion'

    gem = guess_gem(cpv, url)
    if not gem:
        euscan.output.eerror("Can't guess gem name using %s and %s" % \
            (cpv, url))
        return []

    url = 'http://rubygems.org/api/v1/versions/%s.json' % gem

    euscan.output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    versions = json.loads(data)

    if not versions:
        return []

    cp, ver, rev = portage.pkgsplit(cpv)

    ret = []

    for version in versions:
        up_pv = version['number']
        pv = helpers.gentoo_mangle_version(up_pv)
        if helpers.version_filtered(cp, ver, pv):
            continue
        url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv)
        ret.append((url, pv))

    return ret
Exemple #10
0
def scan_pkg(pkg, options):
    cp, ver, rev = pkg.cp, pkg.version, pkg.revision

    package = options['data']
    channel = options['type']

    url = 'http://%s.php.net/rest/r/%s/allreleases.xml' % (channel,
                                                           package.lower())

    output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib.error.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    dom = xml.dom.minidom.parseString(data)

    nodes = dom.getElementsByTagName("v")
    ret = []

    for node in nodes:
        up_pv = node.childNodes[0].data
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue

        url = 'http://%s.php.net/get/%s-%s.tgz' % (channel, package, up_pv)
        url = mangling.mangle_url(url, options)

        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Exemple #11
0
def scan(cpv, url):
    pkg, channel = guess_package_and_channel(cpv, url)

    orig_url = url
    url = "http://%s/rest/r/%s/allreleases.xml" % (channel, pkg.lower())

    euscan.output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    dom = xml.dom.minidom.parseString(data)

    nodes = dom.getElementsByTagName("v")
    ret = []

    cp, ver, rev = portage.pkgsplit(cpv)

    for node in nodes:
        version = node.childNodes[0].data
        if helpers.version_filtered(cp, ver, version):
            continue

        url = "http://%s/get/%s-%s.tgz" % (channel, pkg, version)

        if url == orig_url:
            continue

        ret.append((url, version))

    return ret
Exemple #12
0
def scan(cpv, url):
    cp, ver, rev = portage.pkgsplit(cpv)
    pkg, channel = guess_package_and_channel(cp, url)

    orig_url = url
    url = 'http://%s/rest/r/%s/allreleases.xml' % (channel, pkg.lower())

    output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    dom = xml.dom.minidom.parseString(data)

    nodes = dom.getElementsByTagName("v")
    ret = []

    for node in nodes:
        up_pv = node.childNodes[0].data
        pv = helpers.gentoo_mangle_version(up_pv)
        if helpers.version_filtered(cp, ver, pv):
            continue

        url = 'http://%s/get/%s-%s.tgz' % (channel, pkg, up_pv)

        if url == orig_url:
            continue

        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Exemple #13
0
def scan_pkg(pkg, options):
    cp, ver, rev = pkg.cp, pkg.version, pkg.revision

    package = options['data']
    channel = options['type']

    url = 'http://%s.php.net/rest/r/%s/allreleases.xml' % (channel, package.lower())

    output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    dom = xml.dom.minidom.parseString(data)

    nodes = dom.getElementsByTagName("v")
    ret = []

    for node in nodes:
        up_pv = node.childNodes[0].data
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue

        url = 'http://%s.php.net/get/%s-%s.tgz' % (channel, package, up_pv)
        url = mangling.mangle_url(url, options)

        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Exemple #14
0
def handle_directory_patterns(base, file_pattern):
    """
    Directory pattern matching
    e.g.: base: ftp://ftp.nessus.org/pub/nessus/nessus-([\d\.]+)/src/
          file_pattern: nessus-core-([\d\.]+)\.tar\.gz
    """
    splitted = base.split("/")
    i = 0
    basedir = []
    for elem in splitted:
        if re.search(is_pattern, elem):
            break
        basedir.append(elem)
        i += 1
    basedir = "/".join(basedir)
    directory_pattern = splitted[i]
    final = "/".join(splitted[i + 1:])

    try:
        fp = helpers.urlopen(basedir)
    except urllib.error.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    if basedir.startswith("ftp://"):
        scan_data = generic.scan_ftp(data, basedir, directory_pattern)
    else:
        scan_data = generic.scan_html(data, basedir, directory_pattern)

    return [("/".join((basedir, path, final)), file_pattern)
            for _, path in scan_data]
Exemple #15
0
def handle_directory_patterns(base, file_pattern):
    """
    Directory pattern matching
    e.g.: base: ftp://ftp.nessus.org/pub/nessus/nessus-([\d\.]+)/src/
          file_pattern: nessus-core-([\d\.]+)\.tar\.gz
    """
    splitted = base.split("/")
    i = 0
    basedir = []
    for elem in splitted:
        if re.search(is_pattern, elem):
            break
        basedir.append(elem)
        i += 1
    basedir = "/".join(basedir)
    directory_pattern = splitted[i]
    final = "/".join(splitted[i + 1:])

    try:
        fp = helpers.urlopen(basedir)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    if basedir.startswith("ftp://"):
        scan_data = generic.scan_ftp(data, basedir, directory_pattern)
    else:
        scan_data = generic.scan_html(data, basedir, directory_pattern)

    return [("/".join((basedir, path, final)), file_pattern)
            for _, path in scan_data]
Exemple #16
0
def scan_pkg(pkg, options):
    package = options['data']

    output.einfo("Using PyPi API: " + package)

    url = 'https://pypi.python.org/pypi/%s/json' % package

    try:
        fp = helpers.urlopen(url)
    except urllib.error.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    data = json.loads(data)

    if 'releases' not in data:
        return []

    ret = []

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for up_pv in data['releases']:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        urls = [entry['url'] for entry in data['releases'][up_pv]]
        urls = " ".join([mangling.mangle_url(url, options)
                         for url in urls])
        ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Exemple #17
0
def scan(cpv, url):
    'http://guides.rubygems.org/rubygems-org-api/#gemversion'

    gem = guess_gem(cpv, url)
    url = 'http://rubygems.org/api/v1/versions/%s.json' % gem

    euscan.output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    versions = json.loads(data)

    if not versions:
        return []

    cp, ver, rev = portage.pkgsplit(cpv)

    ret = []

    for version in versions:
        version = version['number']
        if helpers.version_filtered(cp, ver, version):
            continue
        url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, version)
        ret.append(( url, version ))

    return ret