예제 #1
0
파일: cpan.py 프로젝트: voyageur/euscan
def scan_pkg(pkg, options):
    remote_pkg = options['data']

    # Defaults to CPAN mangling rules
    if 'versionmangle' not in options:
        options['versionmangle'] = ['cpan', 'gentoo']

    url = 'http://search.cpan.org/api/dist/%s' % remote_pkg
    cp, ver, rev = pkg.cp, pkg.version, pkg.revision
    m_ver = cpan_mangle_version(ver)

    output.einfo("Using CPAN API: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib.error.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    data = json.loads(data)

    if 'releases' not in data:
        return []

    ret = []

    for version in data['releases']:
        #if version['status'] == 'testing':
        #    continue

        up_pv = version['version']
        pv = mangling.mangle_version(up_pv, options)

        if up_pv.startswith('v'):
            if helpers.version_filtered(cp, ver, pv):
                continue
        else:
            m_pv = cpan_mangle_version(up_pv)
            if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
                continue

        url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % (
            version['cpanid'][0],
            version['cpanid'][0:1],
            version['cpanid'],
            version['archive']
        )

        url = mangling.mangle_url(url, options)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
예제 #2
0
파일: cpan.py 프로젝트: EvaSDK/euscan
def scan_pkg(pkg, options):
    remote_pkg = options["data"]

    # Defaults to CPAN mangling rules
    if "versionmangle" not in options:
        options["versionmangle"] = ["cpan", "gentoo"]

    url = "http://search.cpan.org/api/dist/%s" % remote_pkg
    cp, ver, rev = pkg.cp, pkg.version, pkg.revision
    m_ver = cpan_mangle_version(ver)

    output.einfo("Using CPAN API: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    data = json.loads(data)

    if "releases" not in data:
        return []

    ret = []

    for version in data["releases"]:
        # if version['status'] == 'testing':
        #    continue

        up_pv = version["version"]
        pv = mangling.mangle_version(up_pv, options)

        if up_pv.startswith("v"):
            if helpers.version_filtered(cp, ver, pv):
                continue
        else:
            m_pv = cpan_mangle_version(up_pv)
            if helpers.version_filtered(cp, m_ver, m_pv, cpan_vercmp):
                continue

        url = "mirror://cpan/authors/id/%s/%s/%s/%s" % (
            version["cpanid"][0],
            version["cpanid"][0:1],
            version["cpanid"],
            version["archive"],
        )

        url = mangling.mangle_url(url, options)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
예제 #3
0
파일: freecode.py 프로젝트: EvaSDK/euscan
def scan_pkg(pkg, options):
    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    package = options['data'].strip()

    output.einfo("Using FreeCode handler: " + package)

    fp = urllib.urlopen("http://freecode.com/projects/%s/releases" % package)
    content = fp.read()

    result = re.findall(
        r'<a href="/projects/%s/releases/(\d+)">([^<]+)</a>' % package,
        content
    )

    ret = []
    for release_id, up_pv in result:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        fp = urllib.urlopen("http://freecode.com/projects/%s/releases/%s" %
                            (package, release_id))
        content = fp.read()
        download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]
        fp = urllib.urlopen("http://freecode.com%s" % download_page)
        content = fp.read()
        url = re.findall(
            r'In case it doesn\'t, click here: <a href="([^"]+)"',
            content
        )[0]
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
예제 #4
0
파일: rubygems.py 프로젝트: voyageur/euscan
def scan_pkg(pkg, options):
    gem = options['data']
    url = 'http://rubygems.org/api/v1/versions/%s.json' % gem

    try:
        fp = helpers.urlopen(url)
    except urllib.error.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    versions = json.loads(data)

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for version in versions:
        up_pv = version['number']
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv)
        url = mangling.mangle_url(url, options)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
예제 #5
0
파일: pypi.py 프로젝트: EvaSDK/euscan
def scan_pkg(pkg, options):
    package = options['data']

    output.einfo("Using PyPi XMLRPC: " + package)

    client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
    versions = client.package_releases(package)

    if not versions:
        return versions

    versions.reverse()

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for up_pv in versions:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        urls = client.release_urls(package, up_pv)
        urls = " ".join([mangling.mangle_url(infos['url'], options)
                         for infos in urls])
        ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
    return ret
예제 #6
0
파일: pypi.py 프로젝트: tomspur/euscan
def scan(cpv, url):
    'http://wiki.python.org/moin/PyPiXmlRpc'


    package = guess_package(cpv, url)

    euscan.output.einfo("Using PyPi XMLRPC: " + package)

    client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
    versions = client.package_releases(package)

    if not versions:
        return versions

    versions.reverse()

    cp, ver, rev = portage.pkgsplit(cpv)

    ret = []

    for version in versions:
        if helpers.version_filtered(cp, ver, version):
            continue
        urls = client.release_urls(package, version)
        urls = " ".join([ infos['url'] for infos in urls ])
        ret.append(( urls, version ))

    return ret
예제 #7
0
파일: rubygems.py 프로젝트: EvaSDK/euscan
def scan_pkg(pkg, options):
    gem = options['data']
    url = 'http://rubygems.org/api/v1/versions/%s.json' % gem

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    versions = json.loads(data)

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for version in versions:
        up_pv = version['number']
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv)
        url = mangling.mangle_url(url, options)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
예제 #8
0
def scan_pkg(pkg, options):
    package = options['data']

    output.einfo("Using PyPi XMLRPC: " + package)

    client = xmlrpc.client.ServerProxy('https://pypi.python.org/pypi')
    versions = client.package_releases(package)

    if not versions:
        return versions

    versions.reverse()

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for up_pv in versions:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        urls = client.release_urls(package, up_pv)
        urls = " ".join([mangling.mangle_url(infos['url'], options)
                         for infos in urls])
        ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
    return ret
예제 #9
0
파일: github.py 프로젝트: EvaSDK/euscan
def scan_url(pkg, url, options):
    'http://developer.github.com/v3/repos/downloads/'

    user, project, filename = guess_package(pkg.cpv, url)

    # find out where version is expected to be found
    cp, ver, rev = portage.pkgsplit(pkg.cpv)
    if ver not in filename:
        return

    # now create a filename-matching regexp
    # XXX: supposedly replace first with (?P<foo>...)
    # and remaining ones with (?P=foo)
    fnre = re.compile('^%s$' % \
                      re.escape(filename).replace(re.escape(ver), '(.*?)'))

    output.einfo("Using github API for: project=%s user=%s filename=%s" % \
                 (project, user, filename))

    dlreq = urllib2.urlopen('https://api.github.com/repos/%s/%s/downloads' % \
                            (user, project))
    dls = json.load(dlreq)

    ret = []
    for dl in dls:
        m = fnre.match(dl['name'])

        if m:
            pv = mangling.mangle_version(m.group(1), options)
            if helpers.version_filtered(cp, ver, pv):
                continue

            url = mangling.mangle_url(dl['html_url'], options)
            ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
예제 #10
0
def scan_url(pkg, url, options):
    'http://developer.github.com/v3/repos/downloads/'

    user, project, filename = guess_package(pkg.cpv, url)

    # find out where version is expected to be found
    cp, ver, rev = portage.pkgsplit(pkg.cpv)
    if ver not in filename:
        return

    # now create a filename-matching regexp
    # XXX: supposedly replace first with (?P<foo>...)
    # and remaining ones with (?P=foo)
    fnre = re.compile('^%s$' % \
                      re.escape(filename).replace(re.escape(ver), '(.*?)'))

    output.einfo("Using github API for: project=%s user=%s filename=%s" % \
                 (project, user, filename))

    dlreq = urllib.request.urlopen('https://api.github.com/repos/%s/%s/downloads' % \
                            (user, project))
    dls = json.load(dlreq)

    ret = []
    for dl in dls:
        m = fnre.match(dl['name'])

        if m:
            pv = mangling.mangle_version(m.group(1), options)
            if helpers.version_filtered(cp, ver, pv):
                continue

            url = mangling.mangle_url(dl['html_url'], options)
            ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
예제 #11
0
파일: pypi.py 프로젝트: mgorny/euscan
def scan(cpv, url):
    'http://wiki.python.org/moin/PyPiXmlRpc'

    package = guess_package(cpv, url)

    output.einfo("Using PyPi XMLRPC: " + package)

    client = xmlrpclib.ServerProxy('http://pypi.python.org/pypi')
    versions = client.package_releases(package)

    if not versions:
        return versions

    versions.reverse()

    cp, ver, rev = portage.pkgsplit(cpv)

    ret = []

    for up_pv in versions:
        pv = helpers.gentoo_mangle_version(up_pv)
        if helpers.version_filtered(cp, ver, pv):
            continue
        urls = client.release_urls(package, up_pv)
        urls = " ".join([infos['url'] for infos in urls])
        ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))

    return ret
예제 #12
0
파일: github.py 프로젝트: mgorny/euscan
def scan(cpv, url):
    'http://developer.github.com/v3/repos/downloads/'

    user, project, filename = guess_package(cpv, url)

    # find out where version is expected to be found
    cp, ver, rev = portage.pkgsplit(cpv)
    if ver not in filename:
        return

    # now create a filename-matching regexp
    # XXX: supposedly replace first with (?P<foo>...)
    # and remaining ones with (?P=foo)
    fnre = re.compile('^%s$' % re.escape(filename).replace(re.escape(ver), '(.*?)'))

    output.einfo("Using github API for: " + '/'.join(package))

    dlreq = urllib2.urlopen('https://api.github.com/repos/%s/%s/downloads' % (user, project))
    dls = json.load(dlreq)

    for dl in dls:
        m = fnre.match(dl['name'])

        if m:
            pv = helpers.gentoo_mangle_version(m.group(1))
            if helpers.version_filtered(cp, ver, pv):
                continue
            yield (dl['html_url'], pv, HANDLER_NAME, CONFIDENCE)
예제 #13
0
def scan_pkg(pkg, options):
    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    package = options['data'].strip()

    output.einfo("Using FreeCode handler: " + package)

    fp = urllib.request.urlopen("http://freecode.com/projects/%s/releases" %
                                package)
    content = str(fp.read())

    result = re.findall(
        r'<a href="/projects/%s/releases/(\d+)">([^<]+)</a>' % package,
        content)

    ret = []
    for release_id, up_pv in result:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        fp = urllib.request.urlopen(
            "http://freecode.com/projects/%s/releases/%s" %
            (package, release_id))
        content = str(fp.read())
        download_page = re.findall(r'<a href="(/urls/[^"]+)"', content)[0]
        fp = urllib.request.urlopen("http://freecode.com%s" % download_page)
        content = str(fp.read())
        url = re.findall(r'In case it doesn\'t, click here: <a href="([^"]+)"',
                         content)[0]
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
예제 #14
0
파일: deb.py 프로젝트: oxr463/euscan
def scan_pkg(pkg, options):
    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    packages_url, package_name = options['data'].strip().split(" ", 1)

    output.einfo("Using Debian Packages: " + packages_url)

    fp = urllib.urlopen(packages_url)
    content = fp.read()

    # Support for .gz and .bz2 Packages file
    if packages_url.endswith(".bz2"):
        content = bz2.decompress(content)
    if packages_url.endswith(".gz"):
        content = zlib.decompress(content, 16 + zlib.MAX_WBITS)

    content = content.split("\n\n")

    result = []

    for package_info in content:
        package_line = re.search(r"^Package: (.*)$", package_info, re.M)
        version_line = re.search(r"^Version: (.*)$", package_info, re.M)
        if package_line and package_line.group(1) == package_name:
            if version_line:
                result.append(version_line.group(1))

    ret = []
    for up_pv in result:
        url = ""  # TODO: How to find the url?
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))
    return ret
예제 #15
0
파일: cpan.py 프로젝트: bacher09/euscan
def scan(cpv, url):
    cp, ver, rev = portage.pkgsplit(cpv)
    pkg = guess_package(cp, url)

    orig_url = url
    url = 'http://search.cpan.org/api/dist/%s' % pkg

    euscan.output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    data = json.loads(data)

    if 'releases' not in data:
        return []

    ret = []

    for version in data['releases']:
        #if version['status'] == 'testing':
        #    continue

        up_pv = version['version']
        up_pv = cpan_trim_version(up_pv)
        pv = gentoo_mangle_version(up_pv)
        up_ver = cpan_mangle_version(ver)

        if helpers.version_filtered(cp, up_ver, up_pv, cpan_vercmp):
            continue

        url = 'mirror://cpan/authors/id/%s/%s/%s/%s' % (
            version['cpanid'][0],
            version['cpanid'][0:1],
            version['cpanid'],
            version['archive']
        )

        if url == orig_url:
            continue

        ret.append((url, pv))

    return ret
예제 #16
0
파일: generic.py 프로젝트: oxr463/euscan
def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
    if not steps:
        return []

    url += steps[0][0]
    pattern = steps[0][1]

    steps = steps[1:]

    output.einfo("Scanning: %s" % url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    results = []

    if re.search("<\s*a\s+[^>]*href", data, re.I):
        results.extend(scan_html(data, url, pattern))
    elif url.startswith('ftp://'):
        results.extend(scan_ftp(data, url, pattern))

    versions = []

    for up_pv, path in results:
        pv = mangling.mangle_version(up_pv, options)

        if helpers.version_filtered(cp, ver, pv):
            continue
        if not url.endswith("/"):
            url = url + "/"
        path = urljoin(url, path)

        if not steps and path not in orig_url:
            confidence = confidence_score(path, orig_url)
            path = mangling.mangle_url(path, options)
            versions.append((path, pv, HANDLER_NAME, confidence))

        if steps:
            ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url,
                                           options)
            versions.extend(ret)

    return versions
예제 #17
0
파일: generic.py 프로젝트: EvaSDK/euscan
def scan_directory_recursive(cp, ver, rev, url, steps, orig_url, options):
    if not steps:
        return []

    url += steps[0][0]
    pattern = steps[0][1]

    steps = steps[1:]

    output.einfo("Scanning: %s" % url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    results = []

    if re.search("<\s*a\s+[^>]*href", data, re.I):
        results.extend(scan_html(data, url, pattern))
    elif url.startswith('ftp://'):
        results.extend(scan_ftp(data, url, pattern))

    versions = []

    for up_pv, path in results:
        pv = mangling.mangle_version(up_pv, options)

        if helpers.version_filtered(cp, ver, pv):
            continue
        if not url.endswith("/"):
            url = url + "/"
        path = urljoin(url, path)

        if not steps and path not in orig_url:
            confidence = confidence_score(path, orig_url)
            path = mangling.mangle_url(path, options)
            versions.append((path, pv, HANDLER_NAME, confidence))

        if steps:
            ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url,
                                           options)
            versions.extend(ret)

    return versions
예제 #18
0
파일: generic.py 프로젝트: bacher09/euscan
def scan_directory_recursive(cp, ver, rev, url, steps, orig_url):
    if not steps:
        return []

    url += steps[0][0]
    pattern = steps[0][1]

    steps = steps[1:]

    euscan.output.einfo("Scanning: %s" % url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    results = []

    if re.search("<\s*a\s+[^>]*href", data):
        results.extend(scan_html(data, url, pattern))
    elif url.startswith('ftp://'):
        results.extend(scan_ftp(data, url, pattern))

    versions = []

    for up_pv, path in results:
        pv = helpers.gentoo_mangle_version(up_pv)
        if helpers.version_filtered(cp, ver, pv):
            continue

        if not url.endswith('/') and not path.startswith('/'):
            path = url + '/' + path
        else:
            path = url + path

        if not steps and path not in orig_url:
            versions.append((path, pv))

        if steps:
            ret = scan_directory_recursive(cp, ver, rev, path, steps, orig_url)
            versions.extend(ret)

    return versions
예제 #19
0
파일: rubygem.py 프로젝트: bacher09/euscan
def scan(cpv, url):
    'http://guides.rubygems.org/rubygems-org-api/#gemversion'

    gem = guess_gem(cpv, url)
    if not gem:
        euscan.output.eerror("Can't guess gem name using %s and %s" % \
            (cpv, url))
        return []

    url = 'http://rubygems.org/api/v1/versions/%s.json' % gem

    euscan.output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    versions = json.loads(data)

    if not versions:
        return []

    cp, ver, rev = portage.pkgsplit(cpv)

    ret = []

    for version in versions:
        up_pv = version['number']
        pv = helpers.gentoo_mangle_version(up_pv)
        if helpers.version_filtered(cp, ver, pv):
            continue
        url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, up_pv)
        ret.append((url, pv))

    return ret
예제 #20
0
def scan_pkg(pkg, options):
    package = options['data']

    output.einfo("Using Gnome json cache: " + package)

    fp = urllib.request.urlopen('/'.join(
        [GNOME_URL_SOURCE, package, 'cache.json']))
    content = fp.read()
    fp.close()

    cache = json.loads(content, encoding='ascii')

    if cache[0] != 4:
        output.eerror('Unknow cache format detected')
        return []

    versions = cache[2][package]

    if not versions:
        return []

    versions.reverse()

    cp, ver, _rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for up_pv in versions:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        up_files = cache[1][package][up_pv]
        for tarball_comp in ('tar.xz', 'tar.bz2', 'tar.gz'):
            if tarball_comp in up_files:
                url = '/'.join(
                    [GNOME_URL_SOURCE, package, up_files[tarball_comp]])
                break
        else:
            output.ewarn('No tarball for release %s' % up_pv)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
예제 #21
0
파일: php.py 프로젝트: mgorny/euscan
def scan(cpv, url):
    cp, ver, rev = portage.pkgsplit(cpv)
    pkg, channel = guess_package_and_channel(cp, url)

    orig_url = url
    url = 'http://%s/rest/r/%s/allreleases.xml' % (channel, pkg.lower())

    output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    dom = xml.dom.minidom.parseString(data)

    nodes = dom.getElementsByTagName("v")
    ret = []

    for node in nodes:
        up_pv = node.childNodes[0].data
        pv = helpers.gentoo_mangle_version(up_pv)
        if helpers.version_filtered(cp, ver, pv):
            continue

        url = 'http://%s/get/%s-%s.tgz' % (channel, pkg, up_pv)

        if url == orig_url:
            continue

        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
예제 #22
0
파일: php.py 프로젝트: tomspur/euscan
def scan(cpv, url):
    pkg, channel = guess_package_and_channel(cpv, url)

    orig_url = url
    url = "http://%s/rest/r/%s/allreleases.xml" % (channel, pkg.lower())

    euscan.output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    dom = xml.dom.minidom.parseString(data)

    nodes = dom.getElementsByTagName("v")
    ret = []

    cp, ver, rev = portage.pkgsplit(cpv)

    for node in nodes:
        version = node.childNodes[0].data
        if helpers.version_filtered(cp, ver, version):
            continue

        url = "http://%s/get/%s-%s.tgz" % (channel, pkg, version)

        if url == orig_url:
            continue

        ret.append((url, version))

    return ret
예제 #23
0
파일: gnome.py 프로젝트: EvaSDK/euscan
def scan_pkg(pkg, options):
    package = options['data']

    output.einfo("Using Gnome json cache: " + package)

    fp = urllib2.urlopen('/'.join([GNOME_URL_SOURCE, package, 'cache.json']))
    content = fp.read()
    fp.close()

    cache = json.loads(content, encoding='ascii')

    if cache[0] != 4:
        output.eerror('Unknow cache format detected')
        return []

    versions = cache[2][package]

    if not versions:
        return []

    versions.reverse()

    cp, ver, _rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for up_pv in versions:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        up_files = cache[1][package][up_pv]
        for tarball_comp in ('tar.xz', 'tar.bz2', 'tar.gz'):
            if tarball_comp in up_files:
                url = '/'.join([GNOME_URL_SOURCE, package,
                                 up_files[tarball_comp]])
                break
        else:
            output.ewarn('No tarball for release %s' % up_pv)
        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
예제 #24
0
파일: php.py 프로젝트: voyageur/euscan
def scan_pkg(pkg, options):
    cp, ver, rev = pkg.cp, pkg.version, pkg.revision

    package = options['data']
    channel = options['type']

    url = 'http://%s.php.net/rest/r/%s/allreleases.xml' % (channel,
                                                           package.lower())

    output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib.error.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    dom = xml.dom.minidom.parseString(data)

    nodes = dom.getElementsByTagName("v")
    ret = []

    for node in nodes:
        up_pv = node.childNodes[0].data
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue

        url = 'http://%s.php.net/get/%s-%s.tgz' % (channel, package, up_pv)
        url = mangling.mangle_url(url, options)

        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
예제 #25
0
파일: php.py 프로젝트: EvaSDK/euscan
def scan_pkg(pkg, options):
    cp, ver, rev = pkg.cp, pkg.version, pkg.revision

    package = options['data']
    channel = options['type']

    url = 'http://%s.php.net/rest/r/%s/allreleases.xml' % (channel, package.lower())

    output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()

    dom = xml.dom.minidom.parseString(data)

    nodes = dom.getElementsByTagName("v")
    ret = []

    for node in nodes:
        up_pv = node.childNodes[0].data
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue

        url = 'http://%s.php.net/get/%s-%s.tgz' % (channel, package, up_pv)
        url = mangling.mangle_url(url, options)

        ret.append((url, pv, HANDLER_NAME, CONFIDENCE))

    return ret
예제 #26
0
def scan_pkg(pkg, options):
    package = options['data']

    output.einfo("Using PyPi API: " + package)

    url = 'https://pypi.python.org/pypi/%s/json' % package

    try:
        fp = helpers.urlopen(url)
    except urllib.error.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    data = json.loads(data)

    if 'releases' not in data:
        return []

    ret = []

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    ret = []
    for up_pv in data['releases']:
        pv = mangling.mangle_version(up_pv, options)
        if helpers.version_filtered(cp, ver, pv):
            continue
        urls = [entry['url'] for entry in data['releases'][up_pv]]
        urls = " ".join([mangling.mangle_url(url, options)
                         for url in urls])
        ret.append((urls, pv, HANDLER_NAME, CONFIDENCE))
    return ret
예제 #27
0
파일: rubygem.py 프로젝트: tomspur/euscan
def scan(cpv, url):
    'http://guides.rubygems.org/rubygems-org-api/#gemversion'

    gem = guess_gem(cpv, url)
    url = 'http://rubygems.org/api/v1/versions/%s.json' % gem

    euscan.output.einfo("Using: " + url)

    try:
        fp = helpers.urlopen(url)
    except urllib2.URLError:
        return []
    except IOError:
        return []

    if not fp:
        return []

    data = fp.read()
    versions = json.loads(data)

    if not versions:
        return []

    cp, ver, rev = portage.pkgsplit(cpv)

    ret = []

    for version in versions:
        version = version['number']
        if helpers.version_filtered(cp, ver, version):
            continue
        url = 'http://rubygems.org/gems/%s-%s.gem' % (gem, version)
        ret.append(( url, version ))

    return ret
예제 #28
0
파일: generic.py 프로젝트: bacher09/euscan
def brute_force(cpv, url):
    cp, ver, rev = portage.pkgsplit(cpv)

    url = helpers.parse_mirror(url)
    if not url:
        return []

    for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
        if re.match(bp, cp):
            euscan.output.einfo("%s is blacklisted by rule %s" % (cp, bp))
            return []

    for bp in BRUTEFORCE_BLACKLIST_URLS:
        if re.match(bp, url):
            euscan.output.einfo("%s is blacklisted by rule %s" % (cp, bp))
            return []

    euscan.output.einfo("Generating version from " + ver)

    components = helpers.split_version(ver)
    versions = helpers.gen_versions(components, CONFIG["brute-force"])

    """ Remove unwanted versions """
    for v in versions:
        if helpers.vercmp(cp, ver, helpers.join_version(v)) >= 0:
            versions.remove(v)

    if not versions:
        euscan.output.einfo("Can't generate new versions from " + ver)
        return []

    template = helpers.template_from_url(url, ver)

    if '${PV}' not in template:
        euscan.output.einfo(
            "Url doesn't seems to depend on full version: %s not found in %s" %
            (ver, url))
        return []
    else:
        euscan.output.einfo("Brute forcing: %s" % template)

    result = []

    i = 0
    done = []

    while i < len(versions):
        components = versions[i]
        i += 1
        if components in done:
            continue
        done.append(tuple(components))

        version = helpers.join_version(components)

        if helpers.version_filtered(cp, ver, version):
            continue

        url = helpers.url_from_template(template, version)
        infos = helpers.tryurl(url, template)

        if not infos:
            continue

        result.append([url, version])

        if len(result) > CONFIG['brute-force-false-watermark']:
            euscan.output.einfo(
                "Broken server detected ! Skipping brute force."
            )
            return []

        if CONFIG["brute-force-recursive"]:
            for v in helpers.gen_versions(list(components),
                                          CONFIG["brute-force"]):
                if v not in versions and tuple(v) not in done:
                    versions.append(v)

        if CONFIG["oneshot"]:
            break

    return result
예제 #29
0
파일: generic.py 프로젝트: oxr463/euscan
def brute_force(pkg, url):
    if CONFIG["brute-force"] == 0:
        return []

    cp, ver, rev = portage.pkgsplit(pkg.cpv)

    url = helpers.parse_mirror(url)
    if not url:
        return []

    for bp in BRUTEFORCE_BLACKLIST_PACKAGES:
        if re.match(bp, cp):
            output.einfo("%s is blacklisted by rule %s" % (cp, bp))
            return []

    for bp in BRUTEFORCE_BLACKLIST_URLS:
        if re.match(bp, url):
            output.einfo("%s is blacklisted by rule %s" % (cp, bp))
            return []

    output.einfo("Generating version from " + ver)

    components = helpers.split_version(ver)
    versions = helpers.gen_versions(components, CONFIG["brute-force"])

    # Remove unwanted versions
    for v in versions:
        if helpers.vercmp(cp, ver, helpers.join_version(v)) >= 0:
            versions.remove(v)

    if not versions:
        output.einfo("Can't generate new versions from " + ver)
        return []

    template = helpers.template_from_url(url, ver)

    if '${PV}' not in template:
        output.einfo(
            "Url doesn't seems to depend on full version: %s not found in %s" %
            (ver, url))
        return []
    else:
        output.einfo("Brute forcing: %s" % template)

    result = []

    i = 0
    done = []

    while i < len(versions):
        components = versions[i]
        i += 1
        if components in done:
            continue
        done.append(tuple(components))

        version = helpers.join_version(components)

        if helpers.version_filtered(cp, ver, version):
            continue

        try_url = helpers.url_from_template(template, version)
        infos = helpers.tryurl(try_url, template)

        if not infos:
            continue
        confidence = confidence_score(try_url,
                                      url,
                                      minimum=BRUTEFORCE_CONFIDENCE)
        result.append([try_url, version, BRUTEFORCE_HANDLER_NAME, confidence])

        if len(result) > CONFIG['brute-force-false-watermark']:
            output.einfo("Broken server detected ! Skipping brute force.")
            return []

        if CONFIG["brute-force-recursive"]:
            for v in helpers.gen_versions(list(components),
                                          CONFIG["brute-force"]):
                if v not in versions and tuple(v) not in done:
                    versions.append(v)

        if CONFIG["oneshot"]:
            break

    return result