Esempio n. 1
0
def scan_url(pkg, url, options):
    'http://developer.github.com/v3/repos/downloads/'

    user, project, filename = guess_package(pkg.cpv, url)

    # find out where version is expected to be found
    cp, ver, rev = portage.pkgsplit(pkg.cpv)
    if ver not in filename:
        return

    # now create a filename-matching regexp
    # XXX: supposedly replace first with (?P<foo>...)
    # and remaining ones with (?P=foo)
    fnre = re.compile('^%s$' % \
                      re.escape(filename).replace(re.escape(ver), '(.*?)'))

    output.einfo("Using github API for: project=%s user=%s filename=%s" % \
                 (project, user, filename))

    dlreq = urllib2.urlopen('https://api.github.com/repos/%s/%s/downloads' % \
                            (user, project))
    dls = json.load(dlreq)

    ret = []
    for dl in dls:
        m = fnre.match(dl['name'])

        if m:
            pv = mangling.mangle_version(m.group(1), options)
            if helpers.version_filtered(cp, ver, pv):
                continue

            url = mangling.mangle_url(dl['html_url'], options)
            ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 2
0
def scan_url(pkg, url, options):
    output.einfo("Using BerliOS handler")

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    project, filename = re.search(berlios_regex, url).groups()

    project_page = "http://developer.berlios.de/projects/%s" % project
    content = urllib.request.urlopen(project_page).read()

    project_id = re.search(r"/project/filelist.php\?group_id=(\d+)",
                           content).group(1)

    base_url = (
        "http://developer.berlios.de/project/filelist.php?group_id=%s" %
        project_id)

    file_pattern = regex_from_template(filename.replace(ver, "${PV}"))

    result = url_scan(pkg, base_url, file_pattern)

    ret = []
    for found_url, pv, _, _ in result:
        found_url = found_url.replace("prdownload", "download")
        ret.append((found_url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 3
0
def scan_url(pkg, url, options):
    'http://developer.github.com/v3/repos/downloads/'

    user, project, filename = guess_package(pkg.cpv, url)

    # find out where version is expected to be found
    cp, ver, rev = portage.pkgsplit(pkg.cpv)
    if ver not in filename:
        return

    # now create a filename-matching regexp
    # XXX: supposedly replace first with (?P<foo>...)
    # and remaining ones with (?P=foo)
    fnre = re.compile('^%s$' % \
                      re.escape(filename).replace(re.escape(ver), '(.*?)'))

    output.einfo("Using github API for: project=%s user=%s filename=%s" % \
                 (project, user, filename))

    dlreq = urllib.request.urlopen('https://api.github.com/repos/%s/%s/downloads' % \
                            (user, project))
    dls = json.load(dlreq)

    ret = []
    for dl in dls:
        m = fnre.match(dl['name'])

        if m:
            pv = mangling.mangle_version(m.group(1), options)
            if helpers.version_filtered(cp, ver, pv):
                continue

            url = mangling.mangle_url(dl['html_url'], options)
            ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 4
0
def scan_pkg(pkg, options):
    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    package = options['data'].strip()

    output.einfo("Using FreeCode handler: " + package)

    fp = urllib.urlopen("http://freecode.com/projects/%s/releases" % package)
    content = fp.read()

    result = re.findall(
        r'<a href="/projects/%s/releases/(\d+)">([^<]+)</a>' % package,
        content
    )

    ret = []
    for release_id, up_pv in result:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        fp = urllib.urlopen("http://freecode.com/projects/%s/releases/%s" %
                            (package, release_id))
        content = fp.read()
        download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]
        fp = urllib.urlopen("http://freecode.com%s" % download_page)
        content = fp.read()
        url = re.findall(
            r'In case it doesn\'t, click here: <a href="([^"]+)"',
            content
        )[0]
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 5
0
def scan_pkg(pkg, options):
    package = options['data']

    output.einfo("Using PyPi XMLRPC: " + package)

    client = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi')
    versions = client.package_releases(package)

    if not versions:
        return versions

    versions.reverse()

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for up_pv in versions:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        urls = client.release_urls(package, up_pv)
        urls = " ".join([mangling.mangle_url(infos['url'], options)
                         for infos in urls])
        ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 6
0
def scan_pkg(pkg, options):
    package = options['data']

    output.einfo("Using PyPi XMLRPC: " + package)

    client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
    versions = client.package_releases(package)

    if not versions:
        return versions

    versions.reverse()

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for up_pv in versions:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        urls = client.release_urls(package, up_pv)
        urls = " ".join([mangling.mangle_url(infos['url'], options)
                         for infos in urls])
        ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 7
0
def scan_url(pkg, url, options):
    output.einfo("Using BerliOS handler")

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    project, filename = re.search(berlios_regex, url).groups()

    project_page = "http://developer.berlios.de/projects/%s" % project
    content = urllib.urlopen(project_page).read()

    project_id = re.search(
        r"/project/filelist.php\?group_id=(\d+)",
        content
    ).group(1)

    base_url = (
        "http://developer.berlios.de/project/filelist.php?group_id=%s" %
        project_id
    )

    file_pattern = regex_from_template(
        filename.replace(ver, "${PV}")
    )

    result = url_scan(pkg, base_url, file_pattern)

    ret = []
    for found_url, pv, _, _ in result:
        found_url = found_url.replace("prdownload", "download")
        ret.append((found_url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 8
0
def scan(cpv, url):
    'http://wiki.python.org/moin/PyPiXmlRpc'

    package = guess_package(cpv, url)

    output.einfo("Using PyPi XMLRPC: " + package)

    client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
    versions = client.package_releases(package)

    if not versions:
        return versions

    versions.reverse()

    cp, ver, rev = portage.pkgsplit(cpv)

    ret = []

    for up_pv in versions:
        pv = helpers.gentoo_mangle_version(up_pv)
        if helpers.version_filtered(cp, ver, pv):
            continue
        urls = client.release_urls(package, up_pv)
        urls = " ".join([infos['url'] for infos in urls])
        ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Esempio n. 9
0
File: deb.py Progetto: oxr463/euscan
def scan_pkg(pkg, options):
    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    packages_url, package_name = options['data'].strip().split(" ", 1)

    output.einfo("Using Debian Packages: " + packages_url)

    fp = urllib.urlopen(packages_url)
    content = fp.read()

    # Support for .gz and .bz2 Packages file
    if packages_url.endswith(".bz2"):
        content = bz2.decompress(content)
    if packages_url.endswith(".gz"):
        content = zlib.decompress(content, 16 + zlib.MAX_WBITS)

    content = content.split("\n\n")

    result = []

    for package_info in content:
        package_line = re.search(r"^Package: (.*)$", package_info, re.M)
        version_line = re.search(r"^Version: (.*)$", package_info, re.M)
        if package_line and package_line.group(1) == package_name:
            if version_line:
                result.append(version_line.group(1))

    ret = []
    for up_pv in result:
        url = ""  # TODO: How to find the url?
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 10
0
def scan(cpv, url):
    'http://developer.github.com/v3/repos/downloads/'

    user, project, filename = guess_package(cpv, url)

    # find out where version is expected to be found
    cp, ver, rev = portage.pkgsplit(cpv)
    if ver not in filename:
        return

    # now create a filename-matching regexp
    # XXX: supposedly replace first with (?P<foo>...)
    # and remaining ones with (?P=foo)
    fnre = re.compile('^%s$' % re.escape(filename).replace(re.escape(ver), '(.*?)'))

    output.einfo("Using github API for: " + '/'.join(package))

    dlreq = urllib2.urlopen('https://api.github.com/repos/%s/%s/downloads' % (user, project))
    dls = json.load(dlreq)

    for dl in dls:
        m = fnre.match(dl['name'])

        if m:
            pv = helpers.gentoo_mangle_version(m.group(1))
            if helpers.version_filtered(cp, ver, pv):
                continue
            yield (dl['html_url'], pv, HANDLER_NAME, CONFIDENCE)
Esempio n. 11
0
def scan_pkg(pkg, options):
    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    package = options['data'].strip()

    output.einfo("Using FreeCode handler: " + package)

    fp = urllib.request.urlopen("http://freecode.com/projects/%s/releases" %
                                package)
    content = str(fp.read())

    result = re.findall(
        r'<a href="/projects/%s/releases/(\d+)">([^<]+)</a>' % package,
        content)

    ret = []
    for release_id, up_pv in result:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        fp = urllib.request.urlopen(
            "http://freecode.com/projects/%s/releases/%s" %
            (package, release_id))
        content = str(fp.read())
        download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]
        fp = urllib.request.urlopen("http://freecode.com%s" % download_page)
        content = str(fp.read())
        url = re.findall(r'In case it doesn\'t, click here: <a href="([^"]+)"',
                         content)[0]
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 12
0
def scan_url(pkg, url, options):
    cp, ver, rev = portage.pkgsplit(pkg.cpv)
    remote_pkg = guess_package(cp, url)

    output.einfo("Using CPAN API: %s", remote_pkg)

    return scan_pkg(pkg, {'data': remote_pkg})
Esempio n. 13
0
def scan_url(pkg, url, options):
    cp, ver, rev = portage.pkgsplit(pkg.cpv)
    remote_pkg = guess_package(cp, url)

    output.einfo("Using CPAN API: %s", remote_pkg)

    return scan_pkg(pkg, {"data": remote_pkg})
Esempio n. 14
0
def scan_pkg(pkg, options):
    remote_pkg = options["data"]

    # Defaults to CPAN mangling rules
    if "versionmangle" not in options:
        options["versionmangle"] = ["cpan", "gentoo"]

    url = "http://search.cpan.org/api/dist/%s" % remote_pkg
    cp, ver, rev = pkg.cp, pkg.version, pkg.revision
    m_ver = cpan_mangle_version(ver)

    output.einfo("Using CPAN API: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    data = json.loads(data)

    if "releases" not in data:
        return []

    ret = []

    for version in data["releases"]:
        # if version['status'] == 'testing':
        #    continue

        up_pv = version["version"]
        pv = mangling.mangle_version(up_pv, options)

        if up_pv.startswith("v"):
            if helpers.version_filtered(cp, ver, pv):
                continue
        else:
            m_pv = cpan_mangle_version(up_pv)
            if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
                continue

        url = "mirror://cpan/authors/id/%s/%s/%s/%s" % (
            version["cpanid"][0],
            version["cpanid"][0:1],
            version["cpanid"],
            version["archive"],
        )

        url = mangling.mangle_url(url, options)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Esempio n. 15
0
def scan_pkg(pkg, options):
    remote_pkg = options['data']

    # Defaults to CPAN mangling rules
    if 'versionmangle' not in options:
        options['versionmangle'] = ['cpan', 'gentoo']

    url = 'http://search.cpan.org/api/dist/%s' % remote_pkg
    cp, ver, rev = pkg.cp, pkg.version, pkg.revision
    m_ver = cpan_mangle_version(ver)

    output.einfo("Using CPAN API: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib.error.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    data = json.loads(data)

    if 'releases' not in data:
        return []

    ret = []

    for version in data['releases']:
        #if version['status'] == 'testing':
        #    continue

        up_pv = version['version']
        pv = mangling.mangle_version(up_pv, options)

        if up_pv.startswith('v'):
            if helpers.version_filtered(cp, ver, pv):
                continue
        else:
            m_pv = cpan_mangle_version(up_pv)
            if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
                continue

        url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % (
            version['cpanid'][0],
            version['cpanid'][0:1],
            version['cpanid'],
            version['archive']
        )

        url = mangling.mangle_url(url, options)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Esempio n. 16
0
def get_metadata(pkg):
    metadata = {}

    pkg_metadata = None

    meta_override = os.path.join('metadata', pkg.category, pkg.name,
                                 'metadata.xml')

    try:
        if os.path.exists(meta_override):
            pkg_metadata = MetaDataXML(meta_override)
            output.einfo('Using custom metadata: %s' % meta_override)
        if not pkg_metadata:
            pkg_metadata = pkg.metadata
    except Exception as e:
        output.ewarn('Error when fetching metadata: %s' % str(e))

    if not pkg_metadata:
        return {}

    # Support multiple remote-id and multiple watch
    for upstream in pkg_metadata._xml_tree.findall("upstream"):
        for node in upstream.findall("watch"):
            options = dict(node.attrib)
            options['data'] = node.text

            if "type" in options:
                handler = options['type']
            else:
                handler = "url"
                options['type'] = "url"

            for key in ["versionmangle", "downloadurlmangle"]:
                value = options.get(key, None)
                if value:
                    options[key] = value.split(";")

            if handler not in metadata:
                metadata[handler] = []
            metadata[handler].append(options)

    for upstream in pkg_metadata._xml_tree.findall("upstream"):
        for node in upstream.findall("remote-id"):
            handler = node.attrib.get("type")
            if not handler:
                continue
            if handler in metadata:
                for i in range(len(metadata[handler])):
                    if not metadata[handler][i]['data']:
                        metadata[handler][i]['data'] = node.text
            else:
                metadata[handler] = [{'type': handler, 'data': node.text}]

    return metadata
Esempio n. 17
0
def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
    if not steps:
        return []

    url += steps[0][0]
    pattern = steps[0][1]

    steps = steps[1:]

    output.einfo("Scanning: %s" % url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    results = []

    if re.search("<\s*a\s+[^>]*href", data, re.I):
        results.extend(scan_html(data, url, pattern))
    elif url.startswith('ftp://'):
        results.extend(scan_ftp(data, url, pattern))

    versions = []

    for up_pv, path in results:
        pv = mangling.mangle_version(up_pv, options)

        if helpers.version_filtered(cp, ver, pv):
            continue
        if not url.endswith("/"):
            url = url + "/"
        path = urljoin(url, path)

        if not steps and path not in orig_url:
            confidence = confidence_score(path, orig_url)
            path = mangling.mangle_url(path, options)
            versions.append((path, pv, HANDLER_NAME, confidence))

        if steps:
            ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url,
                                           options)
            versions.extend(ret)

    return versions
Esempio n. 18
0
def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
    if not steps:
        return []

    url += steps[0][0]
    pattern = steps[0][1]

    steps = steps[1:]

    output.einfo("Scanning: %s" % url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    results = []

    if re.search("<\s*a\s+[^>]*href", data, re.I):
        results.extend(scan_html(data, url, pattern))
    elif url.startswith('ftp://'):
        results.extend(scan_ftp(data, url, pattern))

    versions = []

    for up_pv, path in results:
        pv = mangling.mangle_version(up_pv, options)

        if helpers.version_filtered(cp, ver, pv):
            continue
        if not url.endswith("/"):
            url = url + "/"
        path = urljoin(url, path)

        if not steps and path not in orig_url:
            confidence = confidence_score(path, orig_url)
            path = mangling.mangle_url(path, options)
            versions.append((path, pv, HANDLER_NAME, confidence))

        if steps:
            ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url,
                                           options)
            versions.extend(ret)

    return versions
Esempio n. 19
0
def scan(cpv, url):
    cp, ver, rev = portage.pkgsplit(cpv)
    pkg = guess_package(cp, url)

    orig_url = url
    url = 'http://search.cpan.org/api/dist/%s' % pkg

    output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    data = json.loads(data)

    if 'releases' not in data:
        return []

    ret = []

    for version in data['releases']:
        #if version['status'] == 'testing':
        #    continue

        up_pv = version['version']
        up_pv = cpan_trim_version(up_pv)
        pv = gentoo_mangle_version(up_pv)
        up_ver = cpan_mangle_version(ver)

        if helpers.version_filtered(cp, up_ver, up_pv, cpan_vercmp):
            continue

        url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % (
            version['cpanid'][0],
            version['cpanid'][0:1],
            version['cpanid'],
            version['archive']
        )

        if url == orig_url:
            continue

        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Esempio n. 20
0
def scan_url(pkg, url, options):
    'http://guides.rubygems.org/rubygems-org-api/#gemversion'

    gem = guess_gem(pkg.cpv, url)

    if not gem:
        output.eerror("Can't guess gem name using %s and %s" % \
            (pkg.cpv, url))
        return []

    output.einfo("Using RubyGem API: %s" % gem)

    return scan_pkg(pkg, {'data': gem})
Esempio n. 21
0
def scan_url(pkg, url, options):
    'http://guides.rubygems.org/rubygems-org-api/#gemversion'

    gem = guess_gem(pkg.cpv, url)

    if not gem:
        output.eerror("Can't guess gem name using %s and %s" % \
            (pkg.cpv, url))
        return []

    output.einfo("Using RubyGem API: %s" % gem)

    return scan_pkg(pkg, {'data': gem})
Esempio n. 22
0
def scan_upstream_urls(cpv, urls, on_progress):
    versions = []

    maxval = len(urls) + 5
    curval = 1

    for filename in urls:
        curval += 1
        if on_progress:
            on_progress(maxval, curval)

        for url in urls[filename]:
            if not CONFIG['quiet'] and not CONFIG['format']:
                pp.uprint()
            output.einfo("SRC_URI is '%s'" % url)

            if '://' not in url:
                output.einfo("Invalid url '%s'" % url)
                continue

            # Try normal scan
            if CONFIG["scan-dir"]:
                try:
                    versions.extend(handlers.scan(cpv, url))
                except Exception as e:
                    output.ewarn("Handler failed: [%s] %s"
                            % (e.__class__.__name__, e.message))

            if versions and CONFIG['oneshot']:
                break

            # Brute Force
            if CONFIG["brute-force"] > 0:
                versions.extend(handlers.brute_force(cpv, url))

            if versions and CONFIG['oneshot']:
                break

    cp, ver, rev = portage.pkgsplit(cpv)

    curval += 1
    if on_progress:
        on_progress(maxval, curval)

    result = filter_versions(cp, versions)

    curval += 1
    if on_progress:
        on_progress(maxval, curval)

    return result
Esempio n. 23
0
def get_metadata(pkg):
    metadata = {}

    pkg_metadata = None

    meta_override = os.path.join('metadata', pkg.category, pkg.name,
                                 'metadata.xml')

    try:
        if os.path.exists(meta_override):
            pkg_metadata = MetaData(meta_override)
            output.einfo('Using custom metadata: %s' % meta_override)
        if not pkg_metadata:
            pkg_metadata = pkg.metadata
    except Exception, e:
        output.ewarn('Error when fetching metadata: %s' % str(e))
Esempio n. 24
0
def scan_url(pkg, url, options):
    output.einfo("Using SourceForge handler")

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    project, filename = re.search(
        "mirror://sourceforge/([^/]+)/(?:.*/)?([^/]+)", url).groups()

    base_url = "http://qa.debian.org/watch/sf.php/%s" % project
    file_pattern = regex_from_template(filename.replace(ver, "${PV}"))

    result = url_scan(pkg, base_url, file_pattern)

    ret = []
    for url, pv, _, _ in result:
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 25
0
def scan_url(pkg, url, options):
    output.einfo("Using Google Code handler")

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    package_name = re.match(package_name_regex, url).group(1)
    base_url = "http://code.google.com/p/%s/downloads/list" % package_name

    file_pattern = regex_from_template(
        url.split("/")[-1].replace(ver, "${PV}"))

    result = url_scan(pkg, base_url, file_pattern)

    ret = []
    for url, pv, _, _ in result:
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 26
0
def scan_url(pkg, url, options):
    output.einfo("Using Google Code handler")

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    package_name = re.match(package_name_regex, url).group(1)
    base_url = "http://code.google.com/p/%s/downloads/list" % package_name

    file_pattern = regex_from_template(
        url.split("/")[-1].replace(ver, "${PV}")
    )

    result = url_scan(pkg, base_url, file_pattern)

    ret = []
    for url, pv, _, _ in result:
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 27
0
def scan_url(pkg, urls, options, on_progress=None):
    versions = []

    if on_progress:
        progress_available = 70
        num_urls = sum([len(urls[fn]) for fn in urls])
        if num_urls > 0:
            progress_increment = progress_available / num_urls
        else:
            progress_increment = 0

    for filename in urls:
        for url in urls[filename]:
            if on_progress and progress_available > 0:
                on_progress(increment=progress_increment)
                progress_available -= progress_increment

            output.einfo("SRC_URI is '%s'" % url)

            if '://' not in url:
                output.einfo("Invalid url '%s'" % url)
                continue

            try:
                url_handler = find_best_handler('url', pkg, url)
                if url_handler:
                    for o in options:
                        versions += url_handler.scan_url(pkg, url, o)
                else:
                    output.eerror("Can't find a suitable handler!")
            except Exception as e:
                output.ewarn(
                    "Handler failed: [%s] %s" %
                    (e.__class__.__name__, e.message)
                )

            if versions and CONFIG['oneshot']:
                break

    if on_progress and progress_available > 0:
        on_progress(increment=progress_available)

    return versions
Esempio n. 28
0
def scan(cpv, url):
    'http://guides.rubygems.org/rubygems-org-api/#gemversion'

    gem = guess_gem(cpv, url)
    if not gem:
        output.eerror("Can't guess gem name using %s and %s" % \
            (cpv, url))
        return []

    url = 'http://rubygems.org/api/v1/versions/%s.json' % gem

    output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    versions = json.loads(data)

    if not versions:
        return []

    cp, ver, rev = portage.pkgsplit(cpv)

    ret = []

    for version in versions:
        up_pv = version['number']
        pv = helpers.gentoo_mangle_version(up_pv)
        if helpers.version_filtered(cp, ver, pv):
            continue
        url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Esempio n. 29
0
def scan_pkg(pkg, options):
    package = options['data']

    output.einfo("Using Gnome json cache: " + package)

    fp = urllib.request.urlopen('/'.join(
        [GNOME_URL_SOURCE, package, 'cache.json']))
    content = fp.read()
    fp.close()

    cache = json.loads(content, encoding='ascii')

    if cache[0] != 4:
        output.eerror('Unknow cache format detected')
        return []

    versions = cache[2][package]

    if not versions:
        return []

    versions.reverse()

    cp, ver, _rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for up_pv in versions:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        up_files = cache[1][package][up_pv]
        for tarball_comp in ('tar.xz', 'tar.bz2', 'tar.gz'):
            if tarball_comp in up_files:
                url = '/'.join(
                    [GNOME_URL_SOURCE, package, up_files[tarball_comp]])
                break
        else:
            output.ewarn('No tarball for release %s' % up_pv)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Esempio n. 30
0
def scan_url(pkg, url, options):
    output.einfo("Using SourceForge handler")

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    project, filename = re.search(
        "mirror://sourceforge/([^/]+)/(?:.*/)?([^/]+)",
        url
    ).groups()

    base_url = "http://qa.debian.org/watch/sf.php/%s" % project
    file_pattern = regex_from_template(
        filename.replace(ver, "${PV}")
    )

    result = url_scan(pkg, base_url, file_pattern)

    ret = []
    for url, pv, _, _ in result:
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 31
0
File: php.py Progetto: mgorny/euscan
def scan(cpv, url):
    cp, ver, rev = portage.pkgsplit(cpv)
    pkg, channel = guess_package_and_channel(cp, url)

    orig_url = url
    url = 'http://%s/rest/r/%s/allreleases.xml' % (channel, pkg.lower())

    output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    dom = xml.dom.minidom.parseString(data)

    nodes = dom.getElementsByTagName("v")
    ret = []

    for node in nodes:
        up_pv = node.childNodes[0].data
        pv = helpers.gentoo_mangle_version(up_pv)
        if helpers.version_filtered(cp, ver, pv):
            continue

        url = 'http://%s/get/%s-%s.tgz' % (channel, pkg, up_pv)

        if url == orig_url:
            continue

        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Esempio n. 32
0
def scan_pkg(pkg, options):
    package = options['data']

    output.einfo("Using Gnome json cache: " + package)

    fp = urllib2.urlopen('/'.join([GNOME_URL_SOURCE, package, 'cache.json']))
    content = fp.read()
    fp.close()

    cache = json.loads(content, encoding='ascii')

    if cache[0] != 4:
        output.eerror('Unknow cache format detected')
        return []

    versions = cache[2][package]

    if not versions:
        return []

    versions.reverse()

    cp, ver, _rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for up_pv in versions:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        up_files = cache[1][package][up_pv]
        for tarball_comp in ('tar.xz', 'tar.bz2', 'tar.gz'):
            if tarball_comp in up_files:
                url = '/'.join([GNOME_URL_SOURCE, package,
                                 up_files[tarball_comp]])
                break
        else:
            output.ewarn('No tarball for release %s' % up_pv)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Esempio n. 33
0
def scan_pkg(pkg, options):
    cp, ver, rev = pkg.cp, pkg.version, pkg.revision

    package = options['data']
    channel = options['type']

    url = 'http://%s.php.net/rest/r/%s/allreleases.xml' % (channel,
                                                           package.lower())

    output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib.error.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    dom = xml.dom.minidom.parseString(data)

    nodes = dom.getElementsByTagName("v")
    ret = []

    for node in nodes:
        up_pv = node.childNodes[0].data
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue

        url = 'http://%s.php.net/get/%s-%s.tgz' % (channel, package, up_pv)
        url = mangling.mangle_url(url, options)

        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Esempio n. 34
0
File: php.py Progetto: EvaSDK/euscan
def scan_pkg(pkg, options):
    cp, ver, rev = pkg.cp, pkg.version, pkg.revision

    package = options['data']
    channel = options['type']

    url = 'http://%s.php.net/rest/r/%s/allreleases.xml' % (channel, package.lower())

    output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    dom = xml.dom.minidom.parseString(data)

    nodes = dom.getElementsByTagName("v")
    ret = []

    for node in nodes:
        up_pv = node.childNodes[0].data
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue

        url = 'http://%s.php.net/get/%s-%s.tgz' % (channel, package, up_pv)
        url = mangling.mangle_url(url, options)

        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
Esempio n. 35
0
def scan_pkg(pkg, options):
    package = options['data']

    output.einfo("Using PyPi API: " + package)

    url = 'https://pypi.python.org/pypi/%s/json' % package

    try:
        fp = helpers.urlopen(url)
    except urllib.error.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    data = json.loads(data)

    if 'releases' not in data:
        return []

    ret = []

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for up_pv in data['releases']:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        urls = [entry['url'] for entry in data['releases'][up_pv]]
        urls = " ".join([mangling.mangle_url(url, options)
                         for url in urls])
        ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
    return ret
Esempio n. 36
0
def scan_url(pkg, url, options):
    if CONFIG["scan-dir"]:
        for bu in SCANDIR_BLACKLIST_URLS:
            if re.match(bu, url):
                output.einfo("%s is blacklisted by rule %s" % (url, bu))
                return []

        resolved_url = helpers.parse_mirror(url)
        if not resolved_url:
            return []

        cp, ver, rev = portage.pkgsplit(pkg.cpv)

        # 'Hack' for _beta/_rc versions where _ is used instead of -
        if ver not in resolved_url:
            newver = helpers.version_change_end_sep(ver)
            if newver and newver in resolved_url:
                output.einfo(
                    "Version: using %s instead of %s" % (newver, ver)
                )
                ver = newver

        template = helpers.template_from_url(resolved_url, ver)
        if '${' not in template:
            output.einfo(
                "Url doesn't seems to depend on version: %s not found in %s" %
                (ver, resolved_url)
            )
            return []
        else:
            output.einfo("Scanning: %s" % template)

        steps = helpers.generate_scan_paths(template)
        ret = scan_directory_recursive(cp, ver, rev, "", steps, url, options)

    if not ret:
        ret = brute_force(pkg, url)

    return ret
Esempio n. 37
0
def scan_url(pkg, url, options):
    if CONFIG["scan-dir"]:
        for bu in SCANDIR_BLACKLIST_URLS:
            if re.match(bu, url):
                output.einfo("%s is blacklisted by rule %s" % (url, bu))
                return []

        resolved_url = helpers.parse_mirror(url)
        if not resolved_url:
            return []

        cp, ver, rev = portage.pkgsplit(pkg.cpv)

        # 'Hack' for _beta/_rc versions where _ is used instead of -
        if ver not in resolved_url:
            newver = helpers.version_change_end_sep(ver)
            if newver and newver in resolved_url:
                output.einfo(
                    "Version: using %s instead of %s" % (newver, ver)
                )
                ver = newver

        template = helpers.template_from_url(resolved_url, ver)
        if '${' not in template:
            output.einfo(
                "Url doesn't seems to depend on version: %s not found in %s" %
                (ver, resolved_url)
            )
            return []
        else:
            output.einfo("Scanning: %s" % template)

        steps = helpers.generate_scan_paths(template)
        ret = scan_directory_recursive(cp, ver, rev, "", steps, url, options)

    if not ret:
        ret = brute_force(pkg, url)

    return ret
Esempio n. 38
0
File: url.py Progetto: EvaSDK/euscan
def scan_pkg(pkg, options):
    output.einfo("Using watch data")
    base, file_pattern = read_options(options)
    return process_scan(pkg, base, file_pattern, options)
Esempio n. 39
0
def scan_pkg(pkg, options):
    output.einfo("Using watch data")
    base, file_pattern = read_options(options)
    return process_scan(pkg, base, file_pattern, options)
Esempio n. 40
0
def brute_force(pkg, url):
    if CONFIG["brute-force"] == 0:
        return []

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    url = helpers.parse_mirror(url)
    if not url:
        return []

    for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
        if re.match(bp, cp):
            output.einfo("%s is blacklisted by rule %s" % (cp, bp))
            return []

    for bp in BRUTEFORCE_BLACKLIST_URLS:
        if re.match(bp, url):
            output.einfo("%s is blacklisted by rule %s" % (cp, bp))
            return []

    output.einfo("Generating version from " + ver)

    components = helpers.split_version(ver)
    versions = helpers.gen_versions(components, CONFIG["brute-force"])

    # Remove unwanted versions
    for v in versions:
        if helpers.vercmp(cp, ver, helpers.join_version(v)) >= 0:
            versions.remove(v)

    if not versions:
        output.einfo("Can't generate new versions from " + ver)
        return []

    template = helpers.template_from_url(url, ver)

    if '${PV}' not in template:
        output.einfo(
            "Url doesn't seems to depend on full version: %s not found in %s" %
            (ver, url))
        return []
    else:
        output.einfo("Brute forcing: %s" % template)

    result = []

    i = 0
    done = []

    while i < len(versions):
        components = versions[i]
        i += 1
        if components in done:
            continue
        done.append(tuple(components))

        version = helpers.join_version(components)

        if helpers.version_filtered(cp, ver, version):
            continue

        try_url = helpers.url_from_template(template, version)
        infos = helpers.tryurl(try_url, template)

        if not infos:
            continue
        confidence = confidence_score(try_url, url,
                                      minimum=BRUTEFORCE_CONFIDENCE)
        result.append([try_url, version, BRUTEFORCE_HANDLER_NAME, confidence])

        if len(result) > CONFIG['brute-force-false-watermark']:
            output.einfo(
                "Broken server detected ! Skipping brute force."
            )
            return []

        if CONFIG["brute-force-recursive"]:
            for v in helpers.gen_versions(list(components),
                                          CONFIG["brute-force"]):
                if v not in versions and tuple(v) not in done:
                    versions.append(v)

        if CONFIG["oneshot"]:
            break

    return result
Esempio n. 41
0
def brute_force(pkg, url):
    if CONFIG["brute-force"] == 0:
        return []

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    url = helpers.parse_mirror(url)
    if not url:
        return []

    for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
        if re.match(bp, cp):
            output.einfo("%s is blacklisted by rule %s" % (cp, bp))
            return []

    for bp in BRUTEFORCE_BLACKLIST_URLS:
        if re.match(bp, url):
            output.einfo("%s is blacklisted by rule %s" % (cp, bp))
            return []

    output.einfo("Generating version from " + ver)

    components = helpers.split_version(ver)
    versions = helpers.gen_versions(components, CONFIG["brute-force"])

    # Remove unwanted versions
    for v in versions:
        if helpers.vercmp(cp, ver, helpers.join_version(v)) >= 0:
            versions.remove(v)

    if not versions:
        output.einfo("Can't generate new versions from " + ver)
        return []

    template = helpers.template_from_url(url, ver)

    if '${PV}' not in template:
        output.einfo(
            "Url doesn't seems to depend on full version: %s not found in %s" %
            (ver, url))
        return []
    else:
        output.einfo("Brute forcing: %s" % template)

    result = []

    i = 0
    done = []

    while i < len(versions):
        components = versions[i]
        i += 1
        if components in done:
            continue
        done.append(tuple(components))

        version = helpers.join_version(components)

        if helpers.version_filtered(cp, ver, version):
            continue

        try_url = helpers.url_from_template(template, version)
        infos = helpers.tryurl(try_url, template)

        if not infos:
            continue
        confidence = confidence_score(try_url,
                                      url,
                                      minimum=BRUTEFORCE_CONFIDENCE)
        result.append([try_url, version, BRUTEFORCE_HANDLER_NAME, confidence])

        if len(result) > CONFIG['brute-force-false-watermark']:
            output.einfo("Broken server detected ! Skipping brute force.")
            return []

        if CONFIG["brute-force-recursive"]:
            for v in helpers.gen_versions(list(components),
                                          CONFIG["brute-force"]):
                if v not in versions and tuple(v) not in done:
                    versions.append(v)

        if CONFIG["oneshot"]:
            break

    return result