Пример #1
0
def source_by_metadata(key=None):
    """

    Finds all Debian source packages which have the specified metadata set.

    E.g., to find out the Maintainer of all source packages, query
    /source/by_metadata/Maintainer.

    @type key: string
    @param key: Metadata key to search for.

    @rtype: dictionary
    @return: A list of dictionaries of
             - source
             - metadata value
    """

    if not key:
        return bottle.HTTPError(503, 'Metadata key not specified.')

    s = DBConn().session()
    q = s.query(DBSource.source, SourceMetadata.value)
    q = q.join(SourceMetadata).join(MetadataKey)
    q = q.filter(MetadataKey.key == key)
    ret = []
    for p in q:
        ret.append({'source': p.source, 'metadata_value': p.value})
    s.close()

    return json.dumps(ret)
Пример #2
0
def source_by_metadata(key=None):
    """

    Finds all Debian source packages which have the specified metadata set.

    E.g., to find out the Maintainer of all source packages, query
    /source/by_metadata/Maintainer.

    @type key: string
    @param key: Metadata key to search for.

    @rtype: dictionary
    @return: A list of dictionaries of
             - source
             - metadata value
    """

    if not key:
        return bottle.HTTPError(503, 'Metadata key not specified.')

    s = DBConn().session()
    q = s.query(DBSource.source, SourceMetadata.value)
    q = q.join(SourceMetadata).join(MetadataKey)
    q = q.filter(MetadataKey.key == key)
    ret = []
    for p in q:
        ret.append({'source': p.source,
                    'metadata_value': p.value})
    s.close()

    return json.dumps(ret)
Пример #3
0
def binary_by_metadata(key=None):
    """

    Finds all Debian binary packages which have the specified metadata set.

    E.g., to find out the Go import paths of all Debian Go packages, query
    /binary/by_metadata/Go-Import-Path.

    @type key: string
    @param key: Metadata key to search for.

    @rtype: dictionary
    @return: A list of dictionaries of
             - binary
             - source
             - metadata value
    """

    if not key:
        return bottle.HTTPError(503, 'Metadata key not specified.')

    s = DBConn().session()
    q = s.query(DBBinary.package, DBSource.source, SourceMetadata.value)
    q = q.join(DBSource).join(SourceMetadata).join(MetadataKey)
    q = q.filter(MetadataKey.key == key)
    q = q.group_by(DBBinary.package, DBSource.source, SourceMetadata.value)
    ret = []
    for p in q:
        ret.append({'binary': p.package,
                    'source': p.source,
                    'metadata_value': p.value})
    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #4
0
def sources_in_suite(suite=None):
    """
    Returns all source packages and their versions in a given suite.

    @since: December 2014

    @type suite: string
    @param suite: Name of the suite.
    @see: L{I{suites}<dakweb.queries.suite.suites>} on how to receive a list of valid suites.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - source
             - version
    """
    if suite is None:
        return bottle.HTTPError(503, 'Suite not specified.')

    s = DBConn().session()
    q = s.query(DBSource).join(Suite, DBSource.suites)
    q = q.filter(or_(Suite.suite_name == suite, Suite.codename == suite))
    ret = []
    for p in q:
        ret.append({'source':    p.source,
                    'version':   p.version})

    s.close()

    return json.dumps(ret)
Пример #5
0
def suites():
    """
    Give information about all known suites.

    @maps: name maps to Suite: in the release file
    @maps: codename maps to Codename: in the release file.
    @maps: dakname is an internal name and should not be relied upon.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - name
             - codename
             - dakname
             - archive
             - architectures
             - components

    """

    s = DBConn().session()
    q = s.query(Suite)
    q = q.order_by(Suite.suite_name)
    ret = []
    for p in q:
        ret.append({'name':       p.release_suite_output,
                    'codename':   p.codename,
                    'dakname':    p.suite_name,
                    'archive':    p.archive.archive_name,
                    'architectures': [x.arch_string for x in p.architectures],
                    'components': [x.component_name for x in p.components]})

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #6
0
def sources_in_suite(suite=None):
    """
    Returns all source packages and their versions in a given suite.

    @since: December 2014

    @type suite: string
    @param suite: Name of the suite.
    @see: L{I{suites}<dakweb.queries.suite.suites>} on how to receive a list of valid suites.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - source
             - version
    """
    if suite is None:
        return bottle.HTTPError(503, 'Suite not specified.')

    s = DBConn().session()
    q = s.query(DBSource).join(Suite, DBSource.suites)
    q = q.filter(or_(Suite.suite_name == suite, Suite.codename == suite))
    ret = []
    for p in q:
        ret.append({'source':    p.source,
                    'version':   p.version})

    s.close()

    return json.dumps(ret)
Пример #7
0
def sha256sum_in_archive(sha256sum=None):
    """
    Check if files with matching sha256sums are known to the archive.

    @since: June 2018

    @type sha256sum: string
    @param sha256sum: SHA256 sum of the file.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - filename
             - sha256sum
             - component
    """
    if sha256sum is None:
        return bottle.HTTPError(503, 'sha256sum not specified.')

    s = DBConn().session()
    q = s.query(PoolFile)
    q = q.filter(PoolFile.sha256sum == sha256sum)
    ret = []

    for p in q:
        ret.append({'filename':  p.filename,
                    'component': p.component.component_name,
                    'sha256sum': p.sha256sum})

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #8
0
def read_number():
    session = DBConn().session()
    result = session.query('foo').from_statement(
        sql.text('select 7 as foo')).scalar()
    sleep(0.1)
    session.close()
    return result
Пример #9
0
def export_external_signature_requests(session, path):
    tbl_arch = DBConn().tbl_architecture
    tbl_ba = DBConn().tbl_bin_associations
    tbl_bin = DBConn().tbl_binaries
    tbl_esr = DBConn().tbl_external_signature_requests
    tbl_suite = DBConn().tbl_suite

    query = sql.select([tbl_bin.c.package, tbl_suite.c.suite_name, tbl_suite.c.codename, tbl_arch.c.arch_string, sql.func.max(tbl_bin.c.version)]) \
            .select_from(tbl_esr.join(tbl_suite).join(tbl_ba, tbl_ba.c.id == tbl_esr.c.association_id).join(tbl_bin).join(tbl_arch)) \
            .group_by(tbl_bin.c.package, tbl_suite.c.suite_name, tbl_suite.c.codename, tbl_arch.c.arch_string)
    requests = session.execute(query)

    data = {
        'packages': [
            {
                'package':      row[0],
                'suite':        row[1],
                'codename':     row[2],
                'architecture': row[3],
                'version':      row[4],
            }
            for row in requests],
    }

    with open(path, 'w') as fh:
        json.dump(data, fh, indent=2)
Пример #10
0
def sha256sum_in_archive(sha256sum=None):
    """
    Check if files with matching sha256sums are known to the archive.

    @since: June 2018

    @type sha256sum: string
    @param sha256sum: SHA256 sum of the file.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - filename
             - sha256sum
             - component
    """
    if sha256sum is None:
        return bottle.HTTPError(503, 'sha256sum not specified.')

    s = DBConn().session()
    q = s.query(PoolFile)
    q = q.filter(PoolFile.sha256sum == sha256sum)
    ret = []

    for p in q:
        ret.append({'filename':  p.filename,
                    'component': p.component.component_name,
                    'sha256sum': p.sha256sum})

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #11
0
def binary_by_metadata(key=None):
    """

    Finds all Debian binary packages which have the specified metadata set.

    E.g., to find out the Go import paths of all Debian Go packages, query
    /binary/by_metadata/Go-Import-Path.

    @type key: string
    @param key: Metadata key to search for.

    @rtype: dictionary
    @return: A list of dictionaries of
             - binary
             - source
             - metadata value
    """

    if not key:
        return bottle.HTTPError(503, 'Metadata key not specified.')

    s = DBConn().session()
    q = s.query(DBBinary.package, DBSource.source, SourceMetadata.value)
    q = q.join(DBSource).join(SourceMetadata).join(MetadataKey)
    q = q.filter(MetadataKey.key == key)
    q = q.group_by(DBBinary.package, DBSource.source, SourceMetadata.value)
    ret = []
    for p in q:
        ret.append({'binary': p.package,
                    'source': p.source,
                    'metadata_value': p.value})
    s.close()

    return json.dumps(ret)
Пример #12
0
def main():
    global Cnf

    Cnf = utils.get_conf()
    Arguments = [('h',"help","Queue-Report::Options::Help"),
                 ('n',"new","Queue-Report::Options::New"),
                 ('8','822',"Queue-Report::Options::822"),
                 ('s',"sort","Queue-Report::Options::Sort", "HasArg"),
                 ('a',"age","Queue-Report::Options::Age", "HasArg"),
                 ('r',"rrd","Queue-Report::Options::Rrd", "HasArg"),
                 ('d',"directories","Queue-Report::Options::Directories", "HasArg")]
    for i in [ "help" ]:
        if not Cnf.has_key("Queue-Report::Options::%s" % (i)):
            Cnf["Queue-Report::Options::%s" % (i)] = ""

    apt_pkg.parse_commandline(Cnf, Arguments, sys.argv)

    Options = Cnf.subtree("Queue-Report::Options")
    if Options["Help"]:
        usage()

    if Cnf.has_key("Queue-Report::Options::New"):
        header()

    queue_names = []

    if Cnf.has_key("Queue-Report::Options::Directories"):
        for i in Cnf["Queue-Report::Options::Directories"].split(","):
            queue_names.append(i)
    elif Cnf.has_key("Queue-Report::Directories"):
        queue_names = Cnf.value_list("Queue-Report::Directories")
    else:
        queue_names = [ "byhand", "new" ]

    if Cnf.has_key("Queue-Report::Options::Rrd"):
        rrd_dir = Cnf["Queue-Report::Options::Rrd"]
    elif Cnf.has_key("Dir::Rrd"):
        rrd_dir = Cnf["Dir::Rrd"]
    else:
        rrd_dir = None

    f = None
    if Cnf.has_key("Queue-Report::Options::822"):
        # Open the report file
        f = open(Cnf["Queue-Report::ReportLocations::822Location"], "w")

    session = DBConn().session()

    for queue_name in queue_names:
        queue = session.query(PolicyQueue).filter_by(queue_name=queue_name).first()
        if queue is not None:
            process_queue(queue, f, rrd_dir)
        else:
            utils.warn("Cannot find queue %s" % queue_name)

    if Cnf.has_key("Queue-Report::Options::822"):
        f.close()

    if Cnf.has_key("Queue-Report::Options::New"):
        footer()
Пример #13
0
def file_in_archive(filepattern=None):
    """
    Check if a file pattern is known to the archive. Note that the
    patterns are matched against the location of the files in the
    pool, so for %tmux_2.3-1.dsc it will return t/tmux/tmux_2.3-1.dsc
    as filename.

    @since: October 2016

    @type filepattern: string

    @param filepattern: Pattern of the filenames to match. SQL LIKE
                        statement wildcard matches are supported, that
                        is % for zero, one or more characters, _ for a
                        single character match.

    @rtype: Dictionary, empty if nothing matched.
    @return: A dictionary of
             - filename
             - sha256sum
    """
    if filepattern is None:
        return bottle.HTTPError(503, 'Filepattern not specified.')

    s = DBConn().session()
    q = s.query(PoolFile)
    q = q.filter(PoolFile.filename.like(filepattern))
    ret = []

    for p in q:
        ret.append({'filename': p.filename, 'sha256sum': p.sha256sum})

    s.close()

    return json.dumps(ret)
Пример #14
0
def table_row(source, version, arch, last_mod, maint, distribution, closes, fingerprint, sponsor, changedby):

    global row_number

    trclass = "sid"
    session = DBConn().session()
    for dist in distribution:
        if dist == "experimental":
            trclass = "exp"

    query = '''SELECT source
               FROM source_suite
               WHERE source = :source
               AND suite_name IN ('unstable', 'experimental')'''
    if not session.execute(query, {'source': source}).rowcount:
        trclass += " sourceNEW"
    session.commit()

    if row_number % 2 != 0:
        print("<tr class=\"%s even\">" % (trclass))
    else:
        print("<tr class=\"%s odd\">" % (trclass))

    if "sourceNEW" in trclass:
        print("<td class=\"package\">%s</td>" % (source))
    else:
        print("<td class=\"package\"><a href=\"https://tracker.debian.org/pkg/%(source)s\">%(source)s</a></td>" % {'source': source})
    print("<td class=\"version\">")
    for vers in version.split():
        print("<a href=\"new/%s_%s.html\">%s</a><br/>" % (source, utils.html_escape(vers), utils.html_escape(vers)))
    print("</td>")
    print("<td class=\"arch\">%s</td>" % (arch))
    print("<td class=\"distribution\">")
    for dist in distribution:
        print("%s<br/>" % (dist))
    print("</td>")
    print("<td class=\"age\"><abbr title=\"%s\">%s</abbr></td>" % (
        datetime.datetime.utcfromtimestamp(int(time.time()) - last_mod).strftime('%a, %d %b %Y %T UTC'),
        time_pp(last_mod),
    ))
    (name, mail) = maint.split(":", 1)

    print("<td class=\"upload-data\">")
    print("<span class=\"maintainer\">Maintainer: <a href=\"https://qa.debian.org/developer.php?login=%s\">%s</a></span><br/>" % (utils.html_escape(mail), utils.html_escape(name)))
    (name, mail) = changedby.split(":", 1)
    print("<span class=\"changed-by\">Changed-By: <a href=\"https://qa.debian.org/developer.php?login=%s\">%s</a></span><br/>" % (utils.html_escape(mail), utils.html_escape(name)))

    if sponsor:
        print("<span class=\"sponsor\">Sponsor: <a href=\"https://qa.debian.org/developer.php?login=%s\">%s</a>@debian.org</span><br/>" % (utils.html_escape(sponsor), utils.html_escape(sponsor)))

    print("<span class=\"signature\">Fingerprint: %s</span>" % (fingerprint))
    print("</td>")

    print("<td class=\"closes\">")
    for close in closes:
        print("<a href=\"https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=%s\">#%s</a><br/>" % (utils.html_escape(close), utils.html_escape(close)))
    print("</td></tr>")
    row_number += 1
Пример #15
0
def table_row(source, version, arch, last_mod, maint, distribution, closes, fingerprint, sponsor, changedby):

    global row_number

    trclass = "sid"
    session = DBConn().session()
    for dist in distribution:
        if dist == "experimental":
            trclass = "exp"

    query = '''SELECT source
               FROM source_suite
               WHERE source = :source
               AND suite_name IN ('unstable', 'experimental')'''
    if not session.execute(query, {'source': source}).rowcount:
        trclass += " sourceNEW"
    session.commit()

    if row_number % 2 != 0:
        print("<tr class=\"%s even\">" % (trclass))
    else:
        print("<tr class=\"%s odd\">" % (trclass))

    if "sourceNEW" in trclass:
        print("<td class=\"package\">%s</td>" % (source))
    else:
        print("<td class=\"package\"><a href=\"https://tracker.debian.org/pkg/%(source)s\">%(source)s</a></td>" % {'source': source})
    print("<td class=\"version\">")
    for vers in version.split():
        print("<a href=\"new/%s_%s.html\">%s</a><br/>" % (source, utils.html_escape(vers), utils.html_escape(vers)))
    print("</td>")
    print("<td class=\"arch\">%s</td>" % (arch))
    print("<td class=\"distribution\">")
    for dist in distribution:
        print("%s<br/>" % (dist))
    print("</td>")
    print("<td class=\"age\"><abbr title=\"%s\">%s</abbr></td>" % (
        datetime.datetime.utcfromtimestamp(int(time.time()) - last_mod).strftime('%a, %d %b %Y %T UTC'),
        time_pp(last_mod),
    ))
    (name, mail) = maint.split(":", 1)

    print("<td class=\"upload-data\">")
    print("<span class=\"maintainer\">Maintainer: <a href=\"https://qa.debian.org/developer.php?login=%s\">%s</a></span><br/>" % (utils.html_escape(mail), utils.html_escape(name)))
    (name, mail) = changedby.split(":", 1)
    print("<span class=\"changed-by\">Changed-By: <a href=\"https://qa.debian.org/developer.php?login=%s\">%s</a></span><br/>" % (utils.html_escape(mail), utils.html_escape(name)))

    if sponsor:
        print("<span class=\"sponsor\">Sponsor: <a href=\"https://qa.debian.org/developer.php?login=%s\">%s</a>@debian.org</span><br/>" % (utils.html_escape(sponsor), utils.html_escape(sponsor)))

    print("<span class=\"signature\">Fingerprint: %s</span>" % (fingerprint))
    print("</td>")

    print("<td class=\"closes\">")
    for close in closes:
        print("<a href=\"https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=%s\">#%s</a><br/>" % (utils.html_escape(close), utils.html_escape(close)))
    print("</td></tr>")
    row_number += 1
Пример #16
0
def table_row(source, version, arch, last_mod, maint, distribution, closes, fingerprint, sponsor, changedby):

    global row_number

    trclass = "sid"
    session = DBConn().session()
    for dist in distribution:
        if dist == "experimental":
            trclass = "exp"

    if not len(session.query(DBSource).filter_by(source = source).all()):
        trclass += " binNEW"
    session.commit()

    if row_number % 2 != 0:
        print "<tr class=\"%s even\">" % (trclass)
    else:
        print "<tr class=\"%s odd\">" % (trclass)

    if "binNEW" in trclass:
        print "<td class=\"package\">%s</td>" % (source)
    else:
        print "<td class=\"package\"><a href=\"http://packages.qa.debian.org/%(source)s\">%(source)s</a></td>" % {'source': source}
    print "<td class=\"version\">"
    for vers in version.split():
        print "<a href=\"new/%s_%s.html\">%s</a><br/>" % (source, utils.html_escape(vers), utils.html_escape(vers))
    print "</td>"
    print "<td class=\"arch\">%s</td>" % (arch)
    print "<td class=\"distribution\">"
    for dist in distribution:
        print "%s<br/>" % (dist)
    print "</td>"
    print "<td class=\"age\">%s</td>" % (last_mod)
    (name, mail) = maint.split(":", 1)

    print "<td class=\"upload-data\">"
    print "<span class=\"maintainer\">Maintainer: <a href=\"http://qa.debian.org/developer.php?login=%s\">%s</a></span><br/>" % (utils.html_escape(mail), utils.html_escape(name))
    (name, mail) = changedby.split(":", 1)
    print "<span class=\"changed-by\">Changed-By: <a href=\"http://qa.debian.org/developer.php?login=%s\">%s</a></span><br/>" % (utils.html_escape(mail), utils.html_escape(name))

    if sponsor:
        try:
            (login, domain) = sponsor.split("@", 1)
            print "<span class=\"sponsor\">Sponsor: <a href=\"http://qa.debian.org/developer.php?login=%s\">%s</a>@debian.org</span><br/>" % (utils.html_escape(login), utils.html_escape(login))
        except Exception as e:
            pass

    print "<span class=\"signature\">Fingerprint: %s</span>" % (fingerprint)
    print "</td>"

    print "<td class=\"closes\">"
    for close in closes:
        print "<a href=\"http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=%s\">#%s</a><br/>" % (utils.html_escape(close), utils.html_escape(close))
    print "</td></tr>"
    row_number+=1
Пример #17
0
def per_arch_space_use():
    session = DBConn().session()
    q = session.execute("""
SELECT a.arch_string as Architecture, sum(f.size) AS sum
  FROM files f, binaries b, architecture a
  WHERE a.id=b.architecture AND f.id=b.file
  GROUP BY a.arch_string ORDER BY sum""").fetchall()
    for j in q:
        print "%-15.15s %s" % (j[0], j[1])
    print
    q = session.execute("SELECT sum(size) FROM files WHERE filename ~ '.(diff.gz|tar.gz|dsc)$'").fetchall()
    print "%-15.15s %s" % ("Source", q[0][0])
Пример #18
0
def per_arch_space_use():
    session = DBConn().session()
    q = session.execute("""
SELECT a.arch_string as Architecture, sum(f.size) AS sum
  FROM files f, binaries b, architecture a
  WHERE a.id=b.architecture AND f.id=b.file
  GROUP BY a.arch_string ORDER BY sum""").fetchall()
    for j in q:
        print("%-15.15s %s" % (j[0], j[1]))
    print()
    q = session.execute("SELECT sum(size) FROM files WHERE filename ~ '.(diff.gz|tar.gz|dsc)$'").fetchall()
    print("%-15.15s %s" % ("Source", q[0][0]))
Пример #19
0
def generate_packages(suite_id, component_id, architecture_id, type_name):
    global _packages_query
    from daklib.filewriter import PackagesFileWriter
    from daklib.dbconn import Architecture, Component, DBConn, OverrideType, Suite
    from daklib.dakmultiprocessing import PROC_STATUS_SUCCESS

    session = DBConn().session()
    arch_all_id = session.query(Architecture).filter_by(arch_string='all').one().arch_id
    type_id = session.query(OverrideType).filter_by(overridetype=type_name).one().overridetype_id

    suite = session.query(Suite).get(suite_id)
    component = session.query(Component).get(component_id)
    architecture = session.query(Architecture).get(architecture_id)

    overridesuite_id = suite.get_overridesuite().suite_id
    include_long_description = suite.include_long_description

    # We currently filter out the "Tag" line. They are set by external
    # overrides and NOT by the maintainer. And actually having it set by
    # maintainer means we output it twice at the moment -> which breaks
    # dselect.
    metadata_skip = ["Section", "Priority", "Tag"]
    if include_long_description:
        metadata_skip.append("Description-md5")

    writer_args = {
            'archive': suite.archive.path,
            'suite': suite.suite_name,
            'component': component.component_name,
            'architecture': architecture.arch_string,
            'debtype': type_name
    }
    if suite.indices_compression is not None:
        writer_args['compression'] = suite.indices_compression
    writer = PackagesFileWriter(**writer_args)
    output = writer.open()

    r = session.execute(_packages_query, {"archive_id": suite.archive.archive_id,
        "suite": suite_id, "component": component_id, 'component_name': component.component_name,
        "arch": architecture_id, "type_id": type_id, "type_name": type_name, "arch_all": arch_all_id,
        "overridesuite": overridesuite_id, "metadata_skip": metadata_skip,
        "include_long_description": 'true' if include_long_description else 'false'})
    for (stanza,) in r:
        print(stanza, file=output)
        print("", file=output)

    writer.close()

    message = ["generate-packages", suite.suite_name, component.component_name, architecture.arch_string]
    session.rollback()
    return (PROC_STATUS_SUCCESS, message)
Пример #20
0
def acl_export_per_source(acl_name):
    session = DBConn().session()
    acl = session.query(ACL).filter_by(name=acl_name).one()

    query = r"""
      SELECT
        f.fingerprint,
        (SELECT COALESCE(u.name, '') || ' <' || u.uid || '>'
           FROM uid u
           JOIN fingerprint f2 ON u.id = f2.uid
          WHERE f2.id = f.id) AS name,
        STRING_AGG(
          a.source
          || COALESCE(' (' || (SELECT fingerprint FROM fingerprint WHERE id = a.created_by_id) || ')', ''),
          E',\n ' ORDER BY a.source)
      FROM acl_per_source a
      JOIN fingerprint f ON a.fingerprint_id = f.id
      LEFT JOIN uid u ON f.uid = u.id
      WHERE a.acl_id = :acl_id
      GROUP BY f.id, f.fingerprint
      ORDER BY name
      """

    for row in session.execute(query, {'acl_id': acl.id}):
        print("Fingerprint:", row[0])
        print("Uid:", row[1])
        print("Allow:", row[2])
        print()

    session.rollback()
    session.close()
Пример #21
0
def generate_sources(suite_id, component_id):
    global _sources_query
    from daklib.filewriter import SourcesFileWriter
    from daklib.dbconn import Component, DBConn, OverrideType, Suite
    from daklib.dakmultiprocessing import PROC_STATUS_SUCCESS

    session = DBConn().session()
    dsc_type = session.query(OverrideType).filter_by(overridetype='dsc').one().overridetype_id

    suite = session.query(Suite).get(suite_id)
    component = session.query(Component).get(component_id)

    overridesuite_id = suite.get_overridesuite().suite_id

    writer_args = {
            'archive': suite.archive.path,
            'suite': suite.suite_name,
            'component': component.component_name
    }
    if suite.indices_compression is not None:
        writer_args['compression'] = suite.indices_compression
    writer = SourcesFileWriter(**writer_args)
    output = writer.open()

    # run query and write Sources
    r = session.execute(_sources_query, {"suite": suite_id, "component": component_id, "component_name": component.component_name, "dsc_type": dsc_type, "overridesuite": overridesuite_id})
    for (stanza,) in r:
        print(stanza, file=output)
        print("", file=output)

    writer.close()

    message = ["generate sources", suite.suite_name, component.component_name]
    session.rollback()
    return (PROC_STATUS_SUCCESS, message)
Пример #22
0
def main():
    global Options, Logger

    cnf = Config()
    session = DBConn().session()

    Arguments = [('h', "help", "Archive-Dedup-Pool::Options::Help")]

    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)

    for i in ["help"]:
        key = "Archive-Dedup-Pool::Options::%s" % i
        if key not in cnf:
            cnf[key] = ""

    Options = cnf.subtree("Archive-Dedup-Pool::Options")

    if Options["Help"]:
        usage()

    Logger = daklog.Logger("archive-dedup-pool")

    dedup(session)

    Logger.close()
Пример #23
0
def acl_deny(acl_name, fingerprint, sources):
    tbl = DBConn().tbl_acl_per_source

    session = DBConn().session()

    acl_id = session.query(ACL).filter_by(name=acl_name).one().id
    fingerprint_id = session.query(Fingerprint).filter_by(fingerprint=fingerprint).one().fingerprint_id

    # TODO: check that fpr is in ACL

    for source in sources:
        result = session.execute(tbl.delete().where(tbl.c.acl_id == acl_id).where(tbl.c.fingerprint_id == fingerprint_id).where(tbl.c.source == source))
        if result.rowcount < 1:
            print("W: Tried to deny uploads of '{}', but was not allowed before.".format(source))

    session.commit()
Пример #24
0
def binary_metadata_keys():
    """
    List all possible metadata keys

    @rtype: dictionary
    @return: A list of metadata keys
    """
    s = DBConn().session()
    q = s.query(MetadataKey)
    ret = []
    for p in q:
        ret.append( p.key)

    s.close()

    return json.dumps(ret)
Пример #25
0
    def test_clone(self):
        '''
        Tests the ORMObject.clone() method.
        '''

        uid1 = Uid(uid='foobar')
        # no session yet
        self.assertRaises(RuntimeError, uid1.clone)
        self.session.add(uid1)
        # object not persistent yet
        self.assertRaises(RuntimeError, uid1.clone)
        self.session.commit()
        # test without session parameter
        uid2 = uid1.clone()
        self.assertTrue(uid1 is not uid2)
        self.assertEqual(uid1.uid, uid2.uid)
        self.assertTrue(uid2 not in uid1.session())
        self.assertTrue(uid1 not in uid2.session())
        # test with explicit session parameter
        new_session = DBConn().session()
        uid3 = uid1.clone(session=new_session)
        self.assertEqual(uid1.uid, uid3.uid)
        self.assertTrue(uid3 in new_session)
        # test for ressource leaks with mass cloning
        for _ in xrange(1, 1000):
            uid1.clone()
Пример #26
0
def binary_metadata_keys():
    """
    List all possible metadata keys

    @rtype: dictionary
    @return: A list of metadata keys
    """
    s = DBConn().session()
    q = s.query(MetadataKey)
    ret = []
    for p in q:
        ret.append(p.key)

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #27
0
def dsc_in_suite(suite=None, source=None):
    """
    Find all dsc files for a given source package name in a given suite.

    @since: December 2014

    @type suite: string
    @param suite: Name of the suite.
    @see: L{I{suites}<dakweb.queries.suite.suites>} on how to receive a list of valid suites.

    @type source: string
    @param source: Source package to query for.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - version
             - component
             - filename
             - filesize
             - sha256sum
    """
    if suite is None:
        return bottle.HTTPError(503, 'Suite not specified.')
    if source is None:
        return bottle.HTTPError(503, 'Source package not specified.')

    s = DBConn().session()
    q = s.query(DSCFile).join(PoolFile)
    q = q.join(DBSource).join(Suite, DBSource.suites)
    q = q.filter(or_(Suite.suite_name == suite, Suite.codename == suite))
    q = q.filter(DBSource.source == source)
    q = q.filter(PoolFile.filename.endswith('.dsc'))
    ret = []
    for p in q:
        ret.append({
            'version': p.source.version,
            'component': p.poolfile.component.component_name,
            'filename': p.poolfile.filename,
            'filesize': p.poolfile.filesize,
            'sha256sum': p.poolfile.sha256sum
        })

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #28
0
def generate_packages(suite_id, component_id, architecture_id, type_name):
    global _packages_query
    from daklib.filewriter import PackagesFileWriter
    from daklib.dbconn import Architecture, Component, DBConn, OverrideType, Suite
    from daklib.dakmultiprocessing import PROC_STATUS_SUCCESS

    session = DBConn().session()
    arch_all_id = session.query(Architecture).filter_by(arch_string='all').one().arch_id
    type_id = session.query(OverrideType).filter_by(overridetype=type_name).one().overridetype_id

    suite = session.query(Suite).get(suite_id)
    component = session.query(Component).get(component_id)
    architecture = session.query(Architecture).get(architecture_id)

    overridesuite_id = suite.get_overridesuite().suite_id
    include_long_description = suite.include_long_description

    # We currently filter out the "Tag" line. They are set by external
    # overrides and NOT by the maintainer. And actually having it set by
    # maintainer means we output it twice at the moment -> which breaks
    # dselect.
    metadata_skip = ["Section", "Priority", "Tag"]
    if include_long_description:
        metadata_skip.append("Description-md5")

    writer_args = {
            'archive': suite.archive.path,
            'suite': suite.suite_name,
            'component': component.component_name,
            'architecture': architecture.arch_string,
            'debtype': type_name
    }
    if suite.indices_compression is not None:
        writer_args['compression'] = suite.indices_compression
    writer = PackagesFileWriter(**writer_args)
    output = writer.open()

    r = session.execute(_packages_query, {"archive_id": suite.archive.archive_id,
        "suite": suite_id, "component": component_id, 'component_name': component.component_name,
        "arch": architecture_id, "type_id": type_id, "type_name": type_name, "arch_all": arch_all_id,
        "overridesuite": overridesuite_id, "metadata_skip": metadata_skip,
        "include_long_description": 'true' if include_long_description else 'false'})
    for (stanza,) in r:
        print >>output, stanza
        print >>output, ""

    writer.close()

    message = ["generate-packages", suite.suite_name, component.component_name, architecture.arch_string]
    session.rollback()
    return (PROC_STATUS_SUCCESS, message)
Пример #29
0
def binary_metadata_keys():
    """
    List all possible metadata keys

    @rtype: dictionary
    @return: A list of metadata keys
    """
    s = DBConn().session()
    q = s.query(MetadataKey)
    ret = []
    for p in q:
        ret.append(p.key)

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #30
0
def acl_set_fingerprints(acl_name, entries):
    session = DBConn().session()
    acl = session.query(ACL).filter_by(name=acl_name).one()

    acl.fingerprints.clear()
    for entry in entries:
        entry = entry.strip()
        if entry.startswith('#') or len(entry) == 0:
            continue

        fps = get_fingerprint(entry, session)
        if len(fps) == 0:
            print("Unknown key for '{0}'".format(entry))
        else:
            acl.fingerprints.update(fps)

    session.commit()
Пример #31
0
def generate_translations(suite_id, component_id):
    global _translations_query
    from daklib.filewriter import TranslationFileWriter
    from daklib.dbconn import DBConn, Suite, Component
    from daklib.dakmultiprocessing import PROC_STATUS_SUCCESS

    session = DBConn().session()
    suite = session.query(Suite).get(suite_id)
    component = session.query(Component).get(component_id)

    writer_args = {
        'archive': suite.archive.path,
        'suite': suite.suite_name,
        'component': component.component_name,
        'language': 'en',
    }
    if suite.i18n_compression is not None:
        writer_args['compression'] = suite.i18n_compression
    writer = TranslationFileWriter(**writer_args)
    output = writer.open()

    r = session.execute(_translations_query, {
        "suite": suite_id,
        "component": component_id
    })
    for (stanza, ) in r:
        print >> output, stanza

    writer.close()

    message = [
        "generate-translations", suite.suite_name, component.component_name
    ]
    session.rollback()
    return (PROC_STATUS_SUCCESS, message)
Пример #32
0
def generate_sources(suite_id, component_id):
    global _sources_query
    from daklib.filewriter import SourcesFileWriter
    from daklib.dbconn import Component, DBConn, OverrideType, Suite
    from daklib.dakmultiprocessing import PROC_STATUS_SUCCESS

    session = DBConn().session()
    dsc_type = session.query(OverrideType).filter_by(overridetype='dsc').one().overridetype_id

    suite = session.query(Suite).get(suite_id)
    component = session.query(Component).get(component_id)

    overridesuite_id = suite.get_overridesuite().suite_id

    writer_args = {
            'archive': suite.archive.path,
            'suite': suite.suite_name,
            'component': component.component_name
    }
    if suite.indices_compression is not None:
        writer_args['compression'] = suite.indices_compression
    writer = SourcesFileWriter(**writer_args)
    output = writer.open()

    # run query and write Sources
    r = session.execute(_sources_query, {"suite": suite_id, "component": component_id, "component_name": component.component_name, "dsc_type": dsc_type, "overridesuite": overridesuite_id})
    for (stanza,) in r:
        print >>output, stanza
        print >>output, ""

    writer.close()

    message = ["generate sources", suite.suite_name, component.component_name]
    session.rollback()
    return (PROC_STATUS_SUCCESS, message)
Пример #33
0
class ImportThread(threading.Thread):
    def __init__(self, parent, queue):
        threading.Thread.__init__(self)
        self.queue = queue
        self.session = DBConn().session()
        self.parent = parent
        self.die = False

    def plsDie(self):
        self.die = True

    def run(self):
        while True:
            try:
                if self.die:
                    return
                to_import = self.queue.dequeue()
                if not to_import:
                    return

                print( "Directory %s, file %7d, (%s)" % (to_import.dirpath[-10:], to_import.count, to_import.changesfile) )

                changes = Changes()
                changes.changes_file = to_import.changesfile
                changesfile = os.path.join(to_import.dirpath, to_import.changesfile)
                changes.changes = parse_changes(changesfile, signing_rules=-1)
                changes.changes["fingerprint"] = check_signature(changesfile)
                changes.add_known_changes(to_import.dirpath, session=self.session)
                self.session.commit()

            except InvalidDscError as line:
                warn("syntax error in .dsc file '%s', line %s." % (f, line))

            except ChangesUnicodeError:
                warn("found invalid changes file, not properly utf-8 encoded")

            except KeyboardInterrupt:
                print("Caught C-c; on ImportThread. terminating.")
                self.parent.plsDie()
                sys.exit(1)

            except:
                self.parent.plsDie()
                sys.exit(1)
Пример #34
0
def dsc_in_suite(suite=None, source=None):
    """
    Find all dsc files for a given source package name in a given suite.

    @since: December 2014

    @type suite: string
    @param suite: Name of the suite.
    @see: L{I{suites}<dakweb.queries.suite.suites>} on how to receive a list of valid suites.

    @type source: string
    @param source: Source package to query for.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - version
             - component
             - filename
             - filesize
             - sha256sum
    """
    if suite is None:
        return bottle.HTTPError(503, 'Suite not specified.')
    if source is None:
        return bottle.HTTPError(503, 'Source package not specified.')

    s = DBConn().session()
    q = s.query(DSCFile).join(PoolFile)
    q = q.join(DBSource).join(Suite, DBSource.suites)
    q = q.filter(or_(Suite.suite_name == suite, Suite.codename == suite))
    q = q.filter(DBSource.source == source)
    q = q.filter(PoolFile.filename.endswith('.dsc'))
    ret = []
    for p in q:
        ret.append({'version':   p.source.version,
                    'component': p.poolfile.component.component_name,
                    'filename':  p.poolfile.filename,
                    'filesize':  p.poolfile.filesize,
                    'sha256sum': p.poolfile.sha256sum})

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #35
0
def main():
    cnf = Config()

    Arguments = [('h',"help","DEP11::Options::Help"),
                 ('s',"suite","DEP11::Options::Suite", "HasArg"),
                 ('e',"expire","DEP11::Options::ExpireCache"),
                 ('h',"write-hints","DEP11::Options::WriteHints"),
                 ]
    for i in ["help", "suite", "ExpireCache"]:
        if not cnf.has_key("DEP11::Options::%s" % (i)):
            cnf["DEP11::Options::%s" % (i)] = ""

    arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("DEP11::Options")

    if Options["Help"]:
        usage()
        return

    suitename = Options["Suite"]
    if not suitename:
        print("You need to specify a suite!")
        sys.exit(1)

    # check if we have some important config options set
    if not cnf.has_key("Dir::MetaInfo"):
        print("You need to specify a metadata export directory (Dir::MetaInfo)")
        sys.exit(1)
    if not cnf.has_key("DEP11::Url"):
        print("You need to specify a metadata public web URL (DEP11::Url)")
        sys.exit(1)
    if not cnf.has_key("DEP11::IconSizes"):
        print("You need to specify a list of allowed icon-sizes (DEP11::IconSizes)")
        sys.exit(1)
    if Options["WriteHints"] and not cnf.has_key("Dir::MetaInfoHints"):
        print("You need to specify an export directory for DEP-11 hints files (Dir::MetaInfoHints)")
        sys.exit(1)

    logger = daklog.Logger('generate-metadata')

    from daklib.dbconn import Component, DBConn, get_suite, Suite
    session = DBConn().session()
    suite = get_suite(suitename.lower(), session)

    if Options["ExpireCache"]:
        expire_dep11_data_cache(session, suitename, logger)

    process_suite(session, suite, logger)
    # export database content as Components-<arch>.xz YAML documents
    write_component_files(session, suite, logger)

    if Options["WriteHints"]:
        write_hints_files(session, suite, logger)

    # we're done
    logger.close()
Пример #36
0
def archives():
    """
    Give information about all known archives (sets of suites)

    @rtype: dict
    return: list of dictionaries
    """

    s = DBConn().session()
    q = s.query(Archive)
    q = q.order_by(Archive.archive_name)
    ret = []
    for a in q:
        ret.append({'name':      a.archive_name,
                    'suites':    [x.suite_name for x in a.suites]})

    s.close()

    return json.dumps(ret)
Пример #37
0
def add_external_signature_request(session, target_suite, suite, binary):
    tbl_ba = DBConn().tbl_bin_associations
    tbl_esr = DBConn().tbl_external_signature_requests

    # TODO [sqlalchemy >= 1.1]: use `ON CONFLICT DO NOTHING`
    #select = sql.select([tbl_ba.c.id, target_suite.suite_id]).where((tbl_ba.c.suite == suite.suite_id) & (tbl_ba.c.bin == binary.binary_id))
    #insert = pgsql.insert(tbl_esr).from_select([tbl_esr.c.association_id, tbl_esr.c.suite_id], select).on_conflict_do_nothing()
    #session.execute(insert)

    ba_id = session.execute(
        sql.select([tbl_ba.c.id
                    ]).where((tbl_ba.c.suite == suite.suite_id)
                             & (tbl_ba.c.bin == binary.binary_id))).scalar()
    exists = session.execute(
        sql.select([tbl_esr]).where(tbl_esr.c.association_id == ba_id).where(
            tbl_esr.c.suite_id == target_suite.suite_id)).first()
    if exists is None:
        insert = sql.insert(tbl_esr).values(association_id=ba_id,
                                            suite_id=target_suite.suite_id)
        session.execute(insert)
Пример #38
0
def all_sources():
    """
    Returns all source packages and their versions known to the archive
    (this includes NEW).

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - source
             - version
    """

    s = DBConn().session()
    q = s.query(DBSource)
    ret = []
    for p in q:
        ret.append({'source': p.source, 'version': p.version})

    s.close()

    return json.dumps(ret)
Пример #39
0
def archives():
    """
    Give information about all known archives (sets of suites)

    @rtype: dict
    return: list of dictionaries
    """

    s = DBConn().session()
    q = s.query(Archive)
    q = q.order_by(Archive.archive_name)
    ret = []
    for a in q:
        ret.append({
            'name': a.archive_name,
            'suites': [x.suite_name for x in a.suites]
        })

    s.close()

    return json.dumps(ret)
Пример #40
0
def all_sources():
    """
    Returns all source packages and their versions known to the archive
    (this includes NEW).

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - source
             - version
    """

    s = DBConn().session()
    q = s.query(DBSource)
    ret = []
    for p in q:
        ret.append({'source':    p.source,
                    'version':   p.version})

    s.close()

    return json.dumps(ret)
Пример #41
0
    def get_db_value(name, default=None, rettype=None):
        from daklib.dbconn import DBConfig, DBConn, NoResultFound
        try:
            res = DBConn().session().query(DBConfig).filter(
                DBConfig.name == name).one()
        except NoResultFound:
            return default

        if rettype:
            return rettype(res.value)
        else:
            return res.value
Пример #42
0
def generate_translations(suite_id, component_id):
    global _translations_query
    from daklib.filewriter import TranslationFileWriter
    from daklib.dbconn import DBConn, Suite, Component
    from daklib.dakmultiprocessing import PROC_STATUS_SUCCESS

    session = DBConn().session()
    suite = session.query(Suite).get(suite_id)
    component = session.query(Component).get(component_id)

    writer_args = {
            'archive': suite.archive.path,
            'suite': suite.suite_name,
            'component': component.component_name,
            'language': 'en',
    }
    if suite.i18n_compression is not None:
        writer_args['compression'] = suite.i18n_compression
    writer = TranslationFileWriter(**writer_args)
    output = writer.open()

    r = session.execute(_translations_query, {"suite": suite_id, "component": component_id})
    for (stanza,) in r:
        print >>output, stanza

    writer.close()

    message = ["generate-translations", suite.suite_name, component.component_name]
    session.rollback()
    return (PROC_STATUS_SUCCESS, message)
Пример #43
0
def file_in_archive(filepattern=None):
    """
    Check if a file pattern is known to the archive. Note that the
    patterns are matched against the location of the files in the
    pool, so for %tmux_2.3-1.dsc it will return t/tmux/tmux_2.3-1.dsc
    as filename.

    @since: October 2016

    @type filepattern: string

    @param filepattern: Pattern of the filenames to match. SQL LIKE
                        statement wildcard matches are supported, that
                        is % for zero, one or more characters, _ for a
                        single character match.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - filename
             - sha256sum
             - component
    """
    if filepattern is None:
        return bottle.HTTPError(503, 'Filepattern not specified.')

    s = DBConn().session()
    q = s.query(PoolFile)
    q = q.filter(PoolFile.filename.like(filepattern))
    ret = []

    for p in q:
        ret.append({'filename':  p.filename,
                    'component': p.component.component_name,
                    'sha256sum': p.sha256sum})

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #44
0
def get_provides(suite):
    provides = set()
    session = DBConn().session()
    query = '''SELECT DISTINCT value
               FROM binaries_metadata m
               JOIN bin_associations b
               ON b.bin = m.bin_id
               WHERE key_id = (
                 SELECT key_id
                 FROM metadata_keys
                 WHERE key = 'Provides' )
               AND b.suite = (
                 SELECT id
                 FROM suite
                 WHERE suite_name = '%(suite)s'
                 OR codename = '%(suite)s')''' % \
            {'suite': suite}
    for p in session.execute(query):
        for e in p:
            for i in e.split(','):
                provides.add(i.strip())
    session.close()
    return provides
Пример #45
0
def suites():
    """
    Give information about all known suites.

    @maps: name maps to Suite: in the release file
    @maps: codename maps to Codename: in the release file.
    @maps: dakname is an internal name and should not be relied upon.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - name
             - codename
             - dakname
             - archive
             - architectures
             - components

    """

    s = DBConn().session()
    q = s.query(Suite)
    q = q.order_by(Suite.suite_name)
    ret = []
    for p in q:
        ret.append({
            'name': p.release_suite_output,
            'codename': p.codename,
            'dakname': p.suite_name,
            'archive': p.archive.archive_name,
            'architectures': [x.arch_string for x in p.architectures],
            'components': [x.component_name for x in p.components]
        })

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #46
0
Файл: acl.py Проект: Debian/dak
def acl_allow(acl_name, fingerprint, sources):
    tbl = DBConn().tbl_acl_per_source

    session = DBConn().session()

    acl_id = session.query(ACL).filter_by(name=acl_name).one().id
    fingerprint_id = session.query(Fingerprint).filter_by(fingerprint=fingerprint).one().fingerprint_id

    # TODO: check that fpr is in ACL

    data = [
        {
            'acl_id': acl_id,
            'fingerprint_id': fingerprint_id,
            'source': source,
            'reason': 'set by {} via CLI'.format(os.environ.get('USER', '(unknown)')),
        }
        for source in sources
    ]

    session.execute(tbl.insert(), data)

    session.commit()
Пример #47
0
def main():
    cnf = Config()

    Arguments = [
        ('h', "help", "DEP11::Options::Help"),
        ('e', "expire", "DEP11::Options::ExpireCache"),
        ('s', "suite", "DEP11::Options::Suite", "HasArg"),
    ]
    for i in ["help", "suite", "ExpireCache"]:
        if not cnf.has_key("DEP11::Options::%s" % (i)):
            cnf["DEP11::Options::%s" % (i)] = ""

    arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("DEP11::Options")

    if Options["Help"]:
        usage()
        return

    suitename = Options["Suite"]
    if not suitename:
        print("You need to specify a suite!")
        return

    logger = daklog.Logger('generate-metadata')

    from daklib.dbconn import Component, DBConn, get_suite, Suite
    session = DBConn().session()
    suite = get_suite(suitename.lower(), session)

    if Options["ExpireCache"]:
        expire_dep11_data_cache(session, suitename)

    process_suite(session, suite, logger)
    # export database content as Components-<arch>.xz YAML documents
    write_component_files(suite)

    # we're done
    logger.close()
Пример #48
0
def export_external_signature_requests(session, path):
    tbl_arch = DBConn().tbl_architecture
    tbl_ba = DBConn().tbl_bin_associations
    tbl_bin = DBConn().tbl_binaries
    tbl_esr = DBConn().tbl_external_signature_requests
    tbl_suite = DBConn().tbl_suite

    query = sql.select([tbl_bin.c.package, tbl_suite.c.suite_name, tbl_arch.c.arch_string, sql.func.max(tbl_bin.c.version)]) \
            .select_from(tbl_esr.join(tbl_suite).join(tbl_ba, tbl_ba.c.id == tbl_esr.c.association_id).join(tbl_bin).join(tbl_arch)) \
            .group_by(tbl_bin.c.package, tbl_suite.c.suite_name, tbl_arch.c.arch_string)
    requests = session.execute(query)

    data = {
        'packages': [{
            'package': row[0],
            'suite': row[1],
            'architecture': row[2],
            'version': row[3],
        } for row in requests],
    }

    with open(path, 'w') as fh:
        json.dump(data, fh, indent=2)
Пример #49
0
def list_packages(packages, suites=None, components=None, architectures=None, binary_types=None,
                  source_and_binary=False, regex=False,
                  format=None, highest=None):
    session = DBConn().session()
    try:
        t = DBConn().view_package_list

        comparison_operator = "~" if regex else "="

        where = sql.false()
        for package in packages:
            where = where | t.c.package.op(comparison_operator)(package)
            if source_and_binary:
                where = where | t.c.source.op(comparison_operator)(package)

        if suites is not None:
            where = where & (t.c.suite.in_(suites) | t.c.codename.in_(suites))
        if components is not None:
            where = where & t.c.component.in_(components)
        if architectures is not None:
            where = where & t.c.architecture.in_(architectures)
        if binary_types is not None:
            where = where & t.c.type.in_(binary_types)

        if format is None:
            c_architectures = daksql.string_agg(t.c.architecture, ', ', order_by=[t.c.architecture_is_source.desc(), t.c.architecture])
            query = sql.select([t.c.package, t.c.version, t.c.display_suite, c_architectures]) \
                       .where(where) \
                       .group_by(t.c.package, t.c.version, t.c.display_suite) \
                       .order_by(t.c.package, t.c.version, t.c.display_suite)
            result = session.execute(query).fetchall()

            if len(result) == 0:
                raise StopIteration

            lengths = {
                'package': max(10, max(len(row[t.c.package]) for row in result)),
                'version': max(13, max(len(row[t.c.version]) for row in result)),
                'suite':   max(10, max(len(row[t.c.display_suite]) for row in result))
            }
            format = "{0:{lengths[package]}} | {1:{lengths[version]}} | {2:{lengths[suite]}} | {3}"

            for row in result:
                yield format.format(row[t.c.package], row[t.c.version], row[t.c.display_suite], row[c_architectures], lengths=lengths)
        elif format in ('control-suite', 'heidi'):
            query = sql.select([t.c.package, t.c.version, t.c.architecture]).where(where)
            result = session.execute(query)
            for row in result:
                yield "{0} {1} {2}".format(row[t.c.package], row[t.c.version], row[t.c.architecture])
        elif format == "python":
            c_architectures = daksql.string_agg(t.c.architecture, ',', order_by=[t.c.architecture_is_source.desc(), t.c.architecture])
            query = sql.select([t.c.package,
                                t.c.version,
                                t.c.display_suite,
                                c_architectures,
                                t.c.source,
                                t.c.source_version,
                                t.c.component]) \
                .where(where) \
                .group_by(t.c.package,
                          t.c.version,
                          t.c.display_suite,
                          t.c.source,
                          t.c.component,
                          t.c.source_version)
            result = session.execute(query).fetchall()

            if len(result) == 0:
                raise StopIteration

            val = lambda: defaultdict(val)
            ret = val()
            for row in result:
                ret[row[t.c.package]] \
                   [row[t.c.display_suite]] \
                   [row[t.c.version]]={'component':      row[t.c.component],
                                       'architectures':  row[c_architectures].split(','),
                                       'source':         row[t.c.source],
                                       'source_version': row[t.c.source_version]
                                   }

            yield ret
            return
        else:
            raise ValueError("Unknown output format requested.")

        if highest is not None:
            query = sql.select([t.c.package, sql.func.max(t.c.version)]).where(where) \
                       .group_by(t.c.package).order_by(t.c.package)
            result = session.execute(query)
            yield ""
            for row in result:
                yield "{0} ({1} {2})".format(row[0], highest, row[1])
    finally:
        session.close()
Пример #50
0
def main():
    from daklib.config import Config
    from daklib import daklog

    cnf = Config()

    Arguments = [('h',"help","Generate-Packages-Sources::Options::Help"),
                 ('a','archive','Generate-Packages-Sources::Options::Archive','HasArg'),
                 ('s',"suite","Generate-Packages-Sources::Options::Suite"),
                 ('f',"force","Generate-Packages-Sources::Options::Force"),
                 ('o','option','','ArbItem')]

    suite_names = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    try:
        Options = cnf.subtree("Generate-Packages-Sources::Options")
    except KeyError:
        Options = {}

    if Options.has_key("Help"):
        usage()

    from daklib.dakmultiprocessing import DakProcessPool, PROC_STATUS_SUCCESS, PROC_STATUS_SIGNALRAISED
    pool = DakProcessPool()

    logger = daklog.Logger('generate-packages-sources2')

    from daklib.dbconn import Component, DBConn, get_suite, Suite, Archive
    session = DBConn().session()
    session.execute("SELECT add_missing_description_md5()")
    session.commit()

    if Options.has_key("Suite"):
        suites = []
        for s in suite_names:
            suite = get_suite(s.lower(), session)
            if suite:
                suites.append(suite)
            else:
                print "I: Cannot find suite %s" % s
                logger.log(['Cannot find suite %s' % s])
    else:
        query = session.query(Suite).filter(Suite.untouchable == False)
        if 'Archive' in Options:
            query = query.join(Suite.archive).filter(Archive.archive_name==Options['Archive'])
        suites = query.all()

    force = Options.has_key("Force") and Options["Force"]


    def parse_results(message):
        # Split out into (code, msg)
        code, msg = message
        if code == PROC_STATUS_SUCCESS:
            logger.log([msg])
        elif code == PROC_STATUS_SIGNALRAISED:
            logger.log(['E: Subprocess recieved signal ', msg])
        else:
            logger.log(['E: ', msg])

    for s in suites:
        component_ids = [ c.component_id for c in s.components ]
        if s.untouchable and not force:
            import daklib.utils
            daklib.utils.fubar("Refusing to touch %s (untouchable and not forced)" % s.suite_name)
        for c in component_ids:
            pool.apply_async(generate_sources, [s.suite_id, c], callback=parse_results)
            if not s.include_long_description:
                pool.apply_async(generate_translations, [s.suite_id, c], callback=parse_results)
            for a in s.architectures:
                if a == 'source':
                    continue
                pool.apply_async(generate_packages, [s.suite_id, c, a.arch_id, 'deb'], callback=parse_results)
                pool.apply_async(generate_packages, [s.suite_id, c, a.arch_id, 'udeb'], callback=parse_results)

    pool.close()
    pool.join()

    # this script doesn't change the database
    session.close()

    logger.close()

    sys.exit(pool.overall_status())
Пример #51
0
def main():
    global Cnf

    Cnf = utils.get_conf()
    Arguments = [('h', "help", "Queue-Report::Options::Help"),
                 ('n', "new", "Queue-Report::Options::New"),
                 ('8', '822', "Queue-Report::Options::822"),
                 ('s', "sort", "Queue-Report::Options::Sort", "HasArg"),
                 ('a', "age", "Queue-Report::Options::Age", "HasArg"),
                 ('r', "rrd", "Queue-Report::Options::Rrd", "HasArg"),
                 ('d', "directories", "Queue-Report::Options::Directories",
                  "HasArg")]
    for i in ["help"]:
        key = "Queue-Report::Options::%s" % i
        if key not in Cnf:
            Cnf[key] = ""

    apt_pkg.parse_commandline(Cnf, Arguments, sys.argv)

    Options = Cnf.subtree("Queue-Report::Options")
    if Options["Help"]:
        usage()

    if "Queue-Report::Options::New" in Cnf:
        header()

    queue_names = []

    if "Queue-Report::Options::Directories" in Cnf:
        for i in Cnf["Queue-Report::Options::Directories"].split(","):
            queue_names.append(i)
    elif "Queue-Report::Directories" in Cnf:
        queue_names = Cnf.value_list("Queue-Report::Directories")
    else:
        queue_names = ["byhand", "new"]

    if "Queue-Report::Options::Rrd" in Cnf:
        rrd_dir = Cnf["Queue-Report::Options::Rrd"]
    elif "Dir::Rrd" in Cnf:
        rrd_dir = Cnf["Dir::Rrd"]
    else:
        rrd_dir = None

    f = None
    if "Queue-Report::Options::822" in Cnf:
        # Open the report file
        f = sys.stdout
        filename822 = Cnf.get("Queue-Report::ReportLocations::822Location")
        if filename822:
            f = open(filename822, "w")

    session = DBConn().session()

    for queue_name in queue_names:
        queue = session.query(PolicyQueue).filter_by(
            queue_name=queue_name).first()
        if queue is not None:
            process_queue(queue, f, rrd_dir)
        else:
            utils.warn("Cannot find queue %s" % queue_name)

    if "Queue-Report::Options::822" in Cnf:
        f.close()

    if "Queue-Report::Options::New" in Cnf:
        footer()
Пример #52
0
def process_queue(queue, log, rrd_dir):
    msg = ""
    type = queue.queue_name
    session = DBConn().session()

    # Divide the .changes into per-source groups
    per_source = {}
    total_pending = 0
    for upload in queue.uploads:
        source = upload.changes.source
        if source not in per_source:
            per_source[source] = {}
            per_source[source]["list"] = []
            per_source[source]["processed"] = ""
            handler = PolicyQueueUploadHandler(upload, session)
            if handler.get_action():
                per_source[source][
                    "processed"] = "PENDING %s" % handler.get_action()
                total_pending += 1
        per_source[source]["list"].append(upload)
        per_source[source]["list"].sort(key=lambda x: x.changes.created,
                                        reverse=True)
    # Determine oldest time and have note status for each source group
    for source in per_source.keys():
        source_list = per_source[source]["list"]
        first = source_list[0]
        oldest = time.mktime(first.changes.created.timetuple())
        have_note = 0
        for d in per_source[source]["list"]:
            mtime = time.mktime(d.changes.created.timetuple())
            if "Queue-Report::Options::New" in Cnf:
                if mtime > oldest:
                    oldest = mtime
            else:
                if mtime < oldest:
                    oldest = mtime
            have_note += has_new_comment(d.policy_queue, d.changes.source,
                                         d.changes.version)
        per_source[source]["oldest"] = oldest
        if not have_note:
            per_source[source]["note_state"] = 0  # none
        elif have_note < len(source_list):
            per_source[source]["note_state"] = 1  # some
        else:
            per_source[source]["note_state"] = 2  # all
    per_source_items = per_source.items()
    per_source_items.sort(key=functools.cmp_to_key(sg_compare))

    update_graph_database(rrd_dir, type, len(per_source_items),
                          len(queue.uploads))

    entries = []
    max_source_len = 0
    max_version_len = 0
    max_arch_len = 0
    try:
        logins = get_logins_from_ldap()
    except:
        logins = dict()
    for i in per_source_items:
        maintainer = {}
        maint = ""
        distribution = ""
        closes = ""
        fingerprint = ""
        changeby = {}
        changedby = ""
        sponsor = ""
        filename = i[1]["list"][0].changes.changesname
        last_modified = time.time() - i[1]["oldest"]
        source = i[1]["list"][0].changes.source
        if len(source) > max_source_len:
            max_source_len = len(source)
        binary_list = i[1]["list"][0].binaries
        binary = ', '.join([b.package for b in binary_list])
        arches = set()
        versions = set()
        for j in i[1]["list"]:
            dbc = j.changes
            changesbase = dbc.changesname

            if "Queue-Report::Options::New" in Cnf or "Queue-Report::Options::822" in Cnf:
                try:
                    (maintainer["maintainer822"], maintainer["maintainer2047"],
                    maintainer["maintainername"], maintainer["maintaineremail"]) = \
                    fix_maintainer(dbc.maintainer)
                except ParseMaintError as msg:
                    print("Problems while parsing maintainer address\n")
                    maintainer["maintainername"] = "Unknown"
                    maintainer["maintaineremail"] = "Unknown"
                maint = "%s:%s" % (maintainer["maintainername"],
                                   maintainer["maintaineremail"])
                # ...likewise for the Changed-By: field if it exists.
                try:
                    (changeby["changedby822"], changeby["changedby2047"],
                     changeby["changedbyname"], changeby["changedbyemail"]) = \
                        fix_maintainer(dbc.changedby)
                except ParseMaintError as msg:
                    (changeby["changedby822"], changeby["changedby2047"],
                     changeby["changedbyname"], changeby["changedbyemail"]) = \
                        ("", "", "", "")
                changedby = "%s:%s" % (changeby["changedbyname"],
                                       changeby["changedbyemail"])

                distribution = dbc.distribution.split()
                closes = dbc.closes

                fingerprint = dbc.fingerprint
                sponsor_name = get_uid_from_fingerprint(fingerprint).name
                sponsor_login = get_uid_from_fingerprint(fingerprint).uid
                if '@' in sponsor_login:
                    if fingerprint in logins:
                        sponsor_login = logins[fingerprint]
                if (sponsor_name != maintainer["maintainername"]
                        and sponsor_name != changeby["changedbyname"]
                        and sponsor_login + '@debian.org' !=
                        maintainer["maintaineremail"]
                        and sponsor_name != changeby["changedbyemail"]):
                    sponsor = sponsor_login

            for arch in dbc.architecture.split():
                arches.add(arch)
            versions.add(dbc.version)
        arches_list = sorted(arches, key=utils.ArchKey)
        arch_list = " ".join(arches_list)
        version_list = " ".join(sorted(versions, reverse=True))
        if len(version_list) > max_version_len:
            max_version_len = len(version_list)
        if len(arch_list) > max_arch_len:
            max_arch_len = len(arch_list)
        if i[1]["note_state"]:
            note = " | [N]"
        else:
            note = ""
        entries.append([
            source, binary, version_list, arch_list,
            per_source[source]["processed"], note, last_modified, maint,
            distribution, closes, fingerprint, sponsor, changedby, filename
        ])

    # direction entry consists of "Which field, which direction, time-consider" where
    # time-consider says how we should treat last_modified. Thats all.

    # Look for the options for sort and then do the sort.
    age = "h"
    if "Queue-Report::Options::Age" in Cnf:
        age = Cnf["Queue-Report::Options::Age"]
    if "Queue-Report::Options::New" in Cnf:
        # If we produce html we always have oldest first.
        direction.append([6, -1, "ao"])
    else:
        if "Queue-Report::Options::Sort" in Cnf:
            for i in Cnf["Queue-Report::Options::Sort"].split(","):
                if i == "ao":
                    # Age, oldest first.
                    direction.append([6, -1, age])
                elif i == "an":
                    # Age, newest first.
                    direction.append([6, 1, age])
                elif i == "na":
                    # Name, Ascending.
                    direction.append([0, 1, 0])
                elif i == "nd":
                    # Name, Descending.
                    direction.append([0, -1, 0])
                elif i == "nl":
                    # Notes last.
                    direction.append([5, 1, 0])
                elif i == "nf":
                    # Notes first.
                    direction.append([5, -1, 0])
    entries.sort(key=functools.cmp_to_key(sortfunc))
    # Yes, in theory you can add several sort options at the commandline with. But my mind is to small
    # at the moment to come up with a real good sorting function that considers all the sidesteps you
    # have with it. (If you combine options it will simply take the last one at the moment).
    # Will be enhanced in the future.

    if "Queue-Report::Options::822" in Cnf:
        # print stuff out in 822 format
        for entry in entries:
            (source, binary, version_list, arch_list, processed, note,
             last_modified, maint, distribution, closes, fingerprint, sponsor,
             changedby, changes_file) = entry

            # We'll always have Source, Version, Arch, Mantainer, and Dist
            # For the rest, check to see if we have them, then print them out
            log.write("Source: " + source + "\n")
            log.write("Binary: " + binary + "\n")
            log.write("Version: " + version_list + "\n")
            log.write("Architectures: ")
            log.write((", ".join(arch_list.split(" "))) + "\n")
            log.write("Age: " + time_pp(last_modified) + "\n")
            log.write("Last-Modified: " +
                      str(int(time.time()) - int(last_modified)) + "\n")
            log.write("Queue: " + type + "\n")

            (name, mail) = maint.split(":", 1)
            log.write("Maintainer: " + name + " <" + mail + ">" + "\n")
            if changedby:
                (name, mail) = changedby.split(":", 1)
                log.write("Changed-By: " + name + " <" + mail + ">" + "\n")
            if sponsor:
                log.write("Sponsored-By: %[email protected]\n" % sponsor)
            log.write("Distribution:")
            for dist in distribution:
                log.write(" " + dist)
            log.write("\n")
            log.write("Fingerprint: " + fingerprint + "\n")
            if closes:
                bug_string = ""
                for bugs in closes:
                    bug_string += "#" + bugs + ", "
                log.write("Closes: " + bug_string[:-2] + "\n")
            log.write("Changes-File: " + os.path.basename(changes_file) + "\n")
            log.write("\n")

    total_count = len(queue.uploads)
    source_count = len(per_source_items)

    if "Queue-Report::Options::New" in Cnf:
        direction.append([6, 1, "ao"])
        entries.sort(key=functools.cmp_to_key(sortfunc))
        # Output for a html file. First table header. then table_footer.
        # Any line between them is then a <tr> printed from subroutine table_row.
        if len(entries) > 0:
            table_header(type.upper(), source_count, total_count)
            for entry in entries:
                (source, binary, version_list, arch_list, processed, note,
                 last_modified, maint, distribution, closes, fingerprint,
                 sponsor, changedby, _) = entry
                table_row(source, version_list, arch_list, last_modified,
                          maint, distribution, closes, fingerprint, sponsor,
                          changedby)
            table_footer(type.upper())
    elif "Queue-Report::Options::822" not in Cnf:
        # The "normal" output without any formatting.
        msg = ""
        for entry in entries:
            (source, binary, version_list, arch_list, processed, note,
             last_modified, _, _, _, _, _, _, _) = entry
            if processed:
                format = "%%-%ds | %%-%ds | %%-%ds | %%s\n" % (
                    max_source_len, max_version_len, max_arch_len)
                msg += format % (source, version_list, arch_list, processed)
            else:
                format = "%%-%ds | %%-%ds | %%-%ds%%s | %%s old\n" % (
                    max_source_len, max_version_len, max_arch_len)
                msg += format % (source, version_list, arch_list, note,
                                 time_pp(last_modified))

        if msg:
            print(type.upper())
            print("-" * len(type))
            print()
            print(msg)
            print((
                "%s %s source package%s / %s %s package%s in total / %s %s package%s to be processed."
                % (source_count, type, plural(source_count), total_count, type,
                   plural(total_count), total_pending, type,
                   plural(total_pending))))
            print()
Пример #53
0
def main():
    global Cnf
    keyrings = None

    Cnf = utils.get_conf()

    Arguments = [('h', "help", "Add-User::Options::Help"),
                 ('k', "key", "Add-User::Options::Key", "HasArg"),
                 ('u', "user", "Add-User::Options::User", "HasArg"),
                 ]

    for i in ["help"]:
        key = "Add-User::Options::%s" % i
        if key not in Cnf:
            Cnf[key] = ""

    apt_pkg.parse_commandline(Cnf, Arguments, sys.argv)

    Options = Cnf.subtree("Add-User::Options")
    if Options["help"]:
        usage()

    session = DBConn().session()

    if not keyrings:
        keyrings = get_active_keyring_paths()

    cmd = ["gpg", "--with-colons", "--no-secmem-warning",
           "--no-auto-check-trustdb", "--with-fingerprint",
           "--no-default-keyring"]
    cmd.extend(utils.gpg_keyring_args(keyrings).split())
    cmd.extend(["--list-key", "--", Cnf["Add-User::Options::Key"]])
    output = subprocess.check_output(cmd).rstrip()
    m = re_gpg_fingerprint_colon.search(output)
    if not m:
        print(output)
        utils.fubar("0x%s: (1) No fingerprint found in gpg output but it returned 0?\n%s"
                                        % (Cnf["Add-User::Options::Key"], utils.prefix_multi_line_string(output,
                                                                                                                                                                " [GPG output:] ")))
    primary_key = m.group(1)
    primary_key = primary_key.replace(" ", "")

    uid = ""
    if "Add-User::Options::User" in Cnf and Cnf["Add-User::Options::User"]:
        uid = Cnf["Add-User::Options::User"]
        name = Cnf["Add-User::Options::User"]
    else:
        u = re_user_address.search(output)
        if not u:
            print(output)
            utils.fubar("0x%s: (2) No userid found in gpg output but it returned 0?\n%s"
                        % (Cnf["Add-User::Options::Key"], utils.prefix_multi_line_string(output, " [GPG output:] ")))
        uid = u.group(1)
        n = re_user_name.search(output)
        name = n.group(1)

# Look for all email addresses on the key.
    emails = []
    for line in output.split('\n'):
        e = re_user_mails.search(line)
        if not e:
            continue
        emails.append(e.group(2))

    print("0x%s -> %s <%s> -> %s -> %s" % (Cnf["Add-User::Options::Key"], name, emails[0], uid, primary_key))

    prompt = "Add user %s with above data (y/N) ? " % (uid)
    yn = utils.our_raw_input(prompt).lower()

    if yn == "y":
        # Create an account for the user?
        summary = ""

        # Now add user to the database.
        # Note that we provide a session, so we're responsible for committing
        uidobj = get_or_set_uid(uid, session=session)
        uid_id = uidobj.uid_id
        session.commit()

        # Lets add user to the email-whitelist file if its configured.
        if "Dinstall::MailWhiteList" in Cnf and Cnf["Dinstall::MailWhiteList"] != "":
            f = utils.open_file(Cnf["Dinstall::MailWhiteList"], "a")
            for mail in emails:
                f.write(mail + '\n')
            f.close()

        print("Added:\nUid:\t %s (ID: %s)\nMaint:\t %s\nFP:\t %s" % (uid, uid_id,
                     name, primary_key))

        # Should we send mail to the newly added user?
        if Cnf.find_b("Add-User::SendEmail"):
            mail = name + "<" + emails[0] + ">"
            Subst = {}
            Subst["__NEW_MAINTAINER__"] = mail
            Subst["__UID__"] = uid
            Subst["__KEYID__"] = Cnf["Add-User::Options::Key"]
            Subst["__PRIMARY_KEY__"] = primary_key
            Subst["__FROM_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"]
            Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"]
            Subst["__HOSTNAME__"] = Cnf["Dinstall::MyHost"]
            Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"]
            Subst["__SUMMARY__"] = summary
            new_add_message = utils.TemplateSubst(Subst, Cnf["Dir::Templates"] + "/add-user.added")
            utils.send_mail(new_add_message)

    else:
        uid = None
Пример #54
0
def number_of_packages():
    arches = {}
    arch_ids = {}
    suites = {}
    suite_ids = {}
    d = {}
    session = DBConn().session()
    # Build up suite mapping
    for i in session.query(Suite).all():
        suites[i.suite_id] = i.suite_name
        suite_ids[i.suite_name] = i.suite_id
    # Build up architecture mapping
    for i in session.query(Architecture).all():
        arches[i.arch_id] = i.arch_string
        arch_ids[i.arch_string] = i.arch_id
    # Pre-create the dictionary
    for suite_id in suites.keys():
        d[suite_id] = {}
        for arch_id in arches.keys():
            d[suite_id][arch_id] = 0
    # Get the raw data for binaries
    # Simultate 'GROUP by suite, architecture' with a dictionary
    # XXX: Why don't we just get the DB to do this?
    for i in session.execute("""SELECT suite, architecture, COUNT(suite)
                                FROM bin_associations
                           LEFT JOIN binaries ON bin = binaries.id
                            GROUP BY suite, architecture""").fetchall():
        d[i[0]][i[1]] = i[2]
    # Get the raw data for source
    arch_id = arch_ids["source"]
    for i in session.execute('SELECT suite, COUNT(suite) FROM src_associations GROUP BY suite').fetchall():
        (suite_id, count) = i
        d[suite_id][arch_id] = d[suite_id][arch_id] + count
    ## Print the results
    # Setup
    suite_list = suites.values()
    suite_id_list = []
    suite_arches = {}
    for suite in suite_list:
        suite_id = suite_ids[suite]
        suite_arches[suite_id] = {}
        for arch in get_suite_architectures(suite):
            suite_arches[suite_id][arch.arch_string] = ""
        suite_id_list.append(suite_id)
    output_list = [output_format(i) for i in suite_list]
    longest_suite = longest(output_list)
    arch_list = arches.values()
    arch_list.sort()
    longest_arch = longest(arch_list)
    # Header
    output = (" " * longest_arch) + " |"
    for suite in output_list:
        output = output + suite.center(longest_suite) + " |"
    output = output + "\n" + (len(output) * "-") + "\n"
    # per-arch data
    arch_list = arches.values()
    arch_list.sort()
    longest_arch = longest(arch_list)
    for arch in arch_list:
        arch_id = arch_ids[arch]
        output = output + arch.center(longest_arch) + " |"
        for suite_id in suite_id_list:
            if arch in suite_arches[suite_id]:
                count = "%d" % d[suite_id][arch_id]
            else:
                count = "-"
            output = output + count.rjust(longest_suite) + " |"
        output = output + "\n"
    print(output)
Пример #55
0
 def __init__(self, parent, queue):
     threading.Thread.__init__(self)
     self.queue = queue
     self.session = DBConn().session()
     self.parent = parent
     self.die = False
Пример #56
0
def main():
    global Cnf, Options, Logger

    os.umask(0o002)

    Cnf = utils.get_conf()
    Arguments = [ ('h', "help", "Generate-Index-Diffs::Options::Help"),
                  ('a', 'archive', 'Generate-Index-Diffs::Options::Archive', 'hasArg'),
                  ('c', None, "Generate-Index-Diffs::Options::CanonicalPath", "hasArg"),
                  ('p', "patchname", "Generate-Index-Diffs::Options::PatchName", "hasArg"),
                  ('d', "tmpdir", "Generate-Index-Diffs::Options::TempDir", "hasArg"),
                  ('m', "maxdiffs", "Generate-Index-Diffs::Options::MaxDiffs", "hasArg"),
                  ('n', "n-act", "Generate-Index-Diffs::Options::NoAct"),
                ]
    suites = apt_pkg.parse_commandline(Cnf,Arguments,sys.argv)
    Options = Cnf.subtree("Generate-Index-Diffs::Options")
    if Options.has_key("Help"): usage()

    maxdiffs = Options.get("MaxDiffs::Default", "56")
    maxpackages = Options.get("MaxDiffs::Packages", maxdiffs)
    maxcontents = Options.get("MaxDiffs::Contents", maxdiffs)
    maxsources = Options.get("MaxDiffs::Sources", maxdiffs)

    if not Options.has_key("PatchName"):
        format = "%Y-%m-%d-%H%M.%S"
        Options["PatchName"] = time.strftime( format )

    session = DBConn().session()

    if not suites:
        query = session.query(Suite.suite_name)
        if Options.get('Archive'):
            query = query.join(Suite.archive).filter(Archive.archive_name == Options['Archive'])
        suites = [ s.suite_name for s in query ]

    for suitename in suites:
        print "Processing: " + suitename

        suiteobj = get_suite(suitename.lower(), session=session)

        # Use the canonical version of the suite name
        suite = suiteobj.suite_name

        if suiteobj.untouchable:
            print "Skipping: " + suite + " (untouchable)"
            continue

        architectures = get_suite_architectures(suite, skipall=True, session=session)
        components = [ c.component_name for c in session.query(Component.component_name) ]

        suite_suffix = Cnf.find("Dinstall::SuiteSuffix")
        if components and suite_suffix:
            longsuite = suite + "/" + suite_suffix
        else:
            longsuite = suite

        tree = os.path.join(suiteobj.archive.path, 'dists', longsuite)

        # See if there are Translations which might need a new pdiff
        cwd = os.getcwd()
        for component in components:
            #print "DEBUG: Working on %s" % (component)
            workpath=os.path.join(tree, component, "i18n")
            if os.path.isdir(workpath):
                os.chdir(workpath)
                for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True):
                    for entry in filenames:
                        if not re_includeinpdiff.match(entry):
                            #print "EXCLUDING %s" % (entry)
                            continue
                        (fname, fext) = os.path.splitext(entry)
                        processfile=os.path.join(workpath, fname)
                        #print "Working: %s" % (processfile)
                        storename="%s/%s_%s_%s" % (Options["TempDir"], suite, component, fname)
                        #print "Storefile: %s" % (storename)
                        genchanges(Options, processfile + ".diff", storename, processfile, maxdiffs)
        os.chdir(cwd)

        for archobj in architectures:
            architecture = archobj.arch_string

            for component in components:
                if architecture == "source":
                    longarch = architecture
                    packages = "Sources"
                    maxsuite = maxsources
                else:
                    longarch = "binary-%s"% (architecture)
                    packages = "Packages"
                    maxsuite = maxpackages
                    # Process Contents
                    file = "%s/%s/Contents-%s" % (tree, component, architecture)
                    storename = "%s/%s_%s_contents_%s" % (Options["TempDir"], suite, component, architecture)
                    genchanges(Options, file + ".diff", storename, file, maxcontents)

                file = "%s/%s/%s/%s" % (tree, component, longarch, packages)
                storename = "%s/%s_%s_%s" % (Options["TempDir"], suite, component, architecture)
                genchanges(Options, file + ".diff", storename, file, maxsuite)