Пример #1
0
def read_number():
    session = DBConn().session()
    result = session.query('foo').from_statement(
        sql.text('select 7 as foo')).scalar()
    sleep(0.1)
    session.close()
    return result
Пример #2
0
def sources_in_suite(suite=None):
    """
    Returns all source packages and their versions in a given suite.

    @since: December 2014

    @type suite: string
    @param suite: Name of the suite.
    @see: L{I{suites}<dakweb.queries.suite.suites>} on how to receive a list of valid suites.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - source
             - version
    """
    if suite is None:
        return bottle.HTTPError(503, 'Suite not specified.')

    s = DBConn().session()
    q = s.query(DBSource).join(Suite, DBSource.suites)
    q = q.filter(or_(Suite.suite_name == suite, Suite.codename == suite))
    ret = []
    for p in q:
        ret.append({'source':    p.source,
                    'version':   p.version})

    s.close()

    return json.dumps(ret)
Пример #3
0
def sha256sum_in_archive(sha256sum=None):
    """
    Check if files with matching sha256sums are known to the archive.

    @since: June 2018

    @type sha256sum: string
    @param sha256sum: SHA256 sum of the file.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - filename
             - sha256sum
             - component
    """
    if sha256sum is None:
        return bottle.HTTPError(503, 'sha256sum not specified.')

    s = DBConn().session()
    q = s.query(PoolFile)
    q = q.filter(PoolFile.sha256sum == sha256sum)
    ret = []

    for p in q:
        ret.append({'filename':  p.filename,
                    'component': p.component.component_name,
                    'sha256sum': p.sha256sum})

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #4
0
def sources_in_suite(suite=None):
    """
    Returns all source packages and their versions in a given suite.

    @since: December 2014

    @type suite: string
    @param suite: Name of the suite.
    @see: L{I{suites}<dakweb.queries.suite.suites>} on how to receive a list of valid suites.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - source
             - version
    """
    if suite is None:
        return bottle.HTTPError(503, 'Suite not specified.')

    s = DBConn().session()
    q = s.query(DBSource).join(Suite, DBSource.suites)
    q = q.filter(or_(Suite.suite_name == suite, Suite.codename == suite))
    ret = []
    for p in q:
        ret.append({'source':    p.source,
                    'version':   p.version})

    s.close()

    return json.dumps(ret)
Пример #5
0
def source_by_metadata(key=None):
    """

    Finds all Debian source packages which have the specified metadata set.

    E.g., to find out the Maintainer of all source packages, query
    /source/by_metadata/Maintainer.

    @type key: string
    @param key: Metadata key to search for.

    @rtype: dictionary
    @return: A list of dictionaries of
             - source
             - metadata value
    """

    if not key:
        return bottle.HTTPError(503, 'Metadata key not specified.')

    s = DBConn().session()
    q = s.query(DBSource.source, SourceMetadata.value)
    q = q.join(SourceMetadata).join(MetadataKey)
    q = q.filter(MetadataKey.key == key)
    ret = []
    for p in q:
        ret.append({'source': p.source,
                    'metadata_value': p.value})
    s.close()

    return json.dumps(ret)
Пример #6
0
def source_by_metadata(key=None):
    """

    Finds all Debian source packages which have the specified metadata set.

    E.g., to find out the Maintainer of all source packages, query
    /source/by_metadata/Maintainer.

    @type key: string
    @param key: Metadata key to search for.

    @rtype: dictionary
    @return: A list of dictionaries of
             - source
             - metadata value
    """

    if not key:
        return bottle.HTTPError(503, 'Metadata key not specified.')

    s = DBConn().session()
    q = s.query(DBSource.source, SourceMetadata.value)
    q = q.join(SourceMetadata).join(MetadataKey)
    q = q.filter(MetadataKey.key == key)
    ret = []
    for p in q:
        ret.append({'source': p.source, 'metadata_value': p.value})
    s.close()

    return json.dumps(ret)
Пример #7
0
def acl_export_per_source(acl_name):
    session = DBConn().session()
    acl = session.query(ACL).filter_by(name=acl_name).one()

    query = r"""
      SELECT
        f.fingerprint,
        (SELECT COALESCE(u.name, '') || ' <' || u.uid || '>'
           FROM uid u
           JOIN fingerprint f2 ON u.id = f2.uid
          WHERE f2.id = f.id) AS name,
        STRING_AGG(
          a.source
          || COALESCE(' (' || (SELECT fingerprint FROM fingerprint WHERE id = a.created_by_id) || ')', ''),
          E',\n ' ORDER BY a.source)
      FROM acl_per_source a
      JOIN fingerprint f ON a.fingerprint_id = f.id
      LEFT JOIN uid u ON f.uid = u.id
      WHERE a.acl_id = :acl_id
      GROUP BY f.id, f.fingerprint
      ORDER BY name
      """

    for row in session.execute(query, {'acl_id': acl.id}):
        print("Fingerprint:", row[0])
        print("Uid:", row[1])
        print("Allow:", row[2])
        print()

    session.rollback()
    session.close()
Пример #8
0
def suites():
    """
    Give information about all known suites.

    @maps: name maps to Suite: in the release file
    @maps: codename maps to Codename: in the release file.
    @maps: dakname is an internal name and should not be relied upon.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - name
             - codename
             - dakname
             - archive
             - architectures
             - components

    """

    s = DBConn().session()
    q = s.query(Suite)
    q = q.order_by(Suite.suite_name)
    ret = []
    for p in q:
        ret.append({'name':       p.release_suite_output,
                    'codename':   p.codename,
                    'dakname':    p.suite_name,
                    'archive':    p.archive.archive_name,
                    'architectures': [x.arch_string for x in p.architectures],
                    'components': [x.component_name for x in p.components]})

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #9
0
def file_in_archive(filepattern=None):
    """
    Check if a file pattern is known to the archive. Note that the
    patterns are matched against the location of the files in the
    pool, so for %tmux_2.3-1.dsc it will return t/tmux/tmux_2.3-1.dsc
    as filename.

    @since: October 2016

    @type filepattern: string

    @param filepattern: Pattern of the filenames to match. SQL LIKE
                        statement wildcard matches are supported, that
                        is % for zero, one or more characters, _ for a
                        single character match.

    @rtype: Dictionary, empty if nothing matched.
    @return: A dictionary of
             - filename
             - sha256sum
    """
    if filepattern is None:
        return bottle.HTTPError(503, 'Filepattern not specified.')

    s = DBConn().session()
    q = s.query(PoolFile)
    q = q.filter(PoolFile.filename.like(filepattern))
    ret = []

    for p in q:
        ret.append({'filename': p.filename, 'sha256sum': p.sha256sum})

    s.close()

    return json.dumps(ret)
Пример #10
0
def binary_by_metadata(key=None):
    """

    Finds all Debian binary packages which have the specified metadata set.

    E.g., to find out the Go import paths of all Debian Go packages, query
    /binary/by_metadata/Go-Import-Path.

    @type key: string
    @param key: Metadata key to search for.

    @rtype: dictionary
    @return: A list of dictionaries of
             - binary
             - source
             - metadata value
    """

    if not key:
        return bottle.HTTPError(503, 'Metadata key not specified.')

    s = DBConn().session()
    q = s.query(DBBinary.package, DBSource.source, SourceMetadata.value)
    q = q.join(DBSource).join(SourceMetadata).join(MetadataKey)
    q = q.filter(MetadataKey.key == key)
    q = q.group_by(DBBinary.package, DBSource.source, SourceMetadata.value)
    ret = []
    for p in q:
        ret.append({'binary': p.package,
                    'source': p.source,
                    'metadata_value': p.value})
    s.close()

    return json.dumps(ret)
Пример #11
0
def sha256sum_in_archive(sha256sum=None):
    """
    Check if files with matching sha256sums are known to the archive.

    @since: June 2018

    @type sha256sum: string
    @param sha256sum: SHA256 sum of the file.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - filename
             - sha256sum
             - component
    """
    if sha256sum is None:
        return bottle.HTTPError(503, 'sha256sum not specified.')

    s = DBConn().session()
    q = s.query(PoolFile)
    q = q.filter(PoolFile.sha256sum == sha256sum)
    ret = []

    for p in q:
        ret.append({'filename':  p.filename,
                    'component': p.component.component_name,
                    'sha256sum': p.sha256sum})

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #12
0
def binary_by_metadata(key=None):
    """

    Finds all Debian binary packages which have the specified metadata set.

    E.g., to find out the Go import paths of all Debian Go packages, query
    /binary/by_metadata/Go-Import-Path.

    @type key: string
    @param key: Metadata key to search for.

    @rtype: dictionary
    @return: A list of dictionaries of
             - binary
             - source
             - metadata value
    """

    if not key:
        return bottle.HTTPError(503, 'Metadata key not specified.')

    s = DBConn().session()
    q = s.query(DBBinary.package, DBSource.source, SourceMetadata.value)
    q = q.join(DBSource).join(SourceMetadata).join(MetadataKey)
    q = q.filter(MetadataKey.key == key)
    q = q.group_by(DBBinary.package, DBSource.source, SourceMetadata.value)
    ret = []
    for p in q:
        ret.append({'binary': p.package,
                    'source': p.source,
                    'metadata_value': p.value})
    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #13
0
def binary_metadata_keys():
    """
    List all possible metadata keys

    @rtype: dictionary
    @return: A list of metadata keys
    """
    s = DBConn().session()
    q = s.query(MetadataKey)
    ret = []
    for p in q:
        ret.append( p.key)

    s.close()

    return json.dumps(ret)
Пример #14
0
def dsc_in_suite(suite=None, source=None):
    """
    Find all dsc files for a given source package name in a given suite.

    @since: December 2014

    @type suite: string
    @param suite: Name of the suite.
    @see: L{I{suites}<dakweb.queries.suite.suites>} on how to receive a list of valid suites.

    @type source: string
    @param source: Source package to query for.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - version
             - component
             - filename
             - filesize
             - sha256sum
    """
    if suite is None:
        return bottle.HTTPError(503, 'Suite not specified.')
    if source is None:
        return bottle.HTTPError(503, 'Source package not specified.')

    s = DBConn().session()
    q = s.query(DSCFile).join(PoolFile)
    q = q.join(DBSource).join(Suite, DBSource.suites)
    q = q.filter(or_(Suite.suite_name == suite, Suite.codename == suite))
    q = q.filter(DBSource.source == source)
    q = q.filter(PoolFile.filename.endswith('.dsc'))
    ret = []
    for p in q:
        ret.append({
            'version': p.source.version,
            'component': p.poolfile.component.component_name,
            'filename': p.poolfile.filename,
            'filesize': p.poolfile.filesize,
            'sha256sum': p.poolfile.sha256sum
        })

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #15
0
def binary_metadata_keys():
    """
    List all possible metadata keys

    @rtype: dictionary
    @return: A list of metadata keys
    """
    s = DBConn().session()
    q = s.query(MetadataKey)
    ret = []
    for p in q:
        ret.append(p.key)

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #16
0
def binary_metadata_keys():
    """
    List all possible metadata keys

    @rtype: dictionary
    @return: A list of metadata keys
    """
    s = DBConn().session()
    q = s.query(MetadataKey)
    ret = []
    for p in q:
        ret.append(p.key)

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #17
0
def dsc_in_suite(suite=None, source=None):
    """
    Find all dsc files for a given source package name in a given suite.

    @since: December 2014

    @type suite: string
    @param suite: Name of the suite.
    @see: L{I{suites}<dakweb.queries.suite.suites>} on how to receive a list of valid suites.

    @type source: string
    @param source: Source package to query for.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - version
             - component
             - filename
             - filesize
             - sha256sum
    """
    if suite is None:
        return bottle.HTTPError(503, 'Suite not specified.')
    if source is None:
        return bottle.HTTPError(503, 'Source package not specified.')

    s = DBConn().session()
    q = s.query(DSCFile).join(PoolFile)
    q = q.join(DBSource).join(Suite, DBSource.suites)
    q = q.filter(or_(Suite.suite_name == suite, Suite.codename == suite))
    q = q.filter(DBSource.source == source)
    q = q.filter(PoolFile.filename.endswith('.dsc'))
    ret = []
    for p in q:
        ret.append({'version':   p.source.version,
                    'component': p.poolfile.component.component_name,
                    'filename':  p.poolfile.filename,
                    'filesize':  p.poolfile.filesize,
                    'sha256sum': p.poolfile.sha256sum})

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #18
0
def archives():
    """
    Give information about all known archives (sets of suites)

    @rtype: dict
    return: list of dictionaries
    """

    s = DBConn().session()
    q = s.query(Archive)
    q = q.order_by(Archive.archive_name)
    ret = []
    for a in q:
        ret.append({'name':      a.archive_name,
                    'suites':    [x.suite_name for x in a.suites]})

    s.close()

    return json.dumps(ret)
Пример #19
0
def all_sources():
    """
    Returns all source packages and their versions known to the archive
    (this includes NEW).

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - source
             - version
    """

    s = DBConn().session()
    q = s.query(DBSource)
    ret = []
    for p in q:
        ret.append({'source': p.source, 'version': p.version})

    s.close()

    return json.dumps(ret)
Пример #20
0
def all_sources():
    """
    Returns all source packages and their versions known to the archive
    (this includes NEW).

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - source
             - version
    """

    s = DBConn().session()
    q = s.query(DBSource)
    ret = []
    for p in q:
        ret.append({'source':    p.source,
                    'version':   p.version})

    s.close()

    return json.dumps(ret)
Пример #21
0
def archives():
    """
    Give information about all known archives (sets of suites)

    @rtype: dict
    return: list of dictionaries
    """

    s = DBConn().session()
    q = s.query(Archive)
    q = q.order_by(Archive.archive_name)
    ret = []
    for a in q:
        ret.append({
            'name': a.archive_name,
            'suites': [x.suite_name for x in a.suites]
        })

    s.close()

    return json.dumps(ret)
Пример #22
0
def file_in_archive(filepattern=None):
    """
    Check if a file pattern is known to the archive. Note that the
    patterns are matched against the location of the files in the
    pool, so for %tmux_2.3-1.dsc it will return t/tmux/tmux_2.3-1.dsc
    as filename.

    @since: October 2016

    @type filepattern: string

    @param filepattern: Pattern of the filenames to match. SQL LIKE
                        statement wildcard matches are supported, that
                        is % for zero, one or more characters, _ for a
                        single character match.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - filename
             - sha256sum
             - component
    """
    if filepattern is None:
        return bottle.HTTPError(503, 'Filepattern not specified.')

    s = DBConn().session()
    q = s.query(PoolFile)
    q = q.filter(PoolFile.filename.like(filepattern))
    ret = []

    for p in q:
        ret.append({'filename':  p.filename,
                    'component': p.component.component_name,
                    'sha256sum': p.sha256sum})

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #23
0
def get_provides(suite):
    provides = set()
    session = DBConn().session()
    query = '''SELECT DISTINCT value
               FROM binaries_metadata m
               JOIN bin_associations b
               ON b.bin = m.bin_id
               WHERE key_id = (
                 SELECT key_id
                 FROM metadata_keys
                 WHERE key = 'Provides' )
               AND b.suite = (
                 SELECT id
                 FROM suite
                 WHERE suite_name = '%(suite)s'
                 OR codename = '%(suite)s')''' % \
            {'suite': suite}
    for p in session.execute(query):
        for e in p:
            for i in e.split(','):
                provides.add(i.strip())
    session.close()
    return provides
Пример #24
0
def suites():
    """
    Give information about all known suites.

    @maps: name maps to Suite: in the release file
    @maps: codename maps to Codename: in the release file.
    @maps: dakname is an internal name and should not be relied upon.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - name
             - codename
             - dakname
             - archive
             - architectures
             - components

    """

    s = DBConn().session()
    q = s.query(Suite)
    q = q.order_by(Suite.suite_name)
    ret = []
    for p in q:
        ret.append({
            'name': p.release_suite_output,
            'codename': p.codename,
            'dakname': p.suite_name,
            'archive': p.archive.archive_name,
            'architectures': [x.arch_string for x in p.architectures],
            'components': [x.component_name for x in p.components]
        })

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #25
0
def main():
    from daklib.config import Config
    from daklib import daklog

    cnf = Config()

    Arguments = [('h', "help", "Generate-Packages-Sources::Options::Help"),
                 ('a', 'archive',
                  'Generate-Packages-Sources::Options::Archive', 'HasArg'),
                 ('s', "suite", "Generate-Packages-Sources::Options::Suite",
                  'HasArg'),
                 ('f', "force", "Generate-Packages-Sources::Options::Force"),
                 ('o', 'option', '', 'ArbItem')]

    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    try:
        Options = cnf.subtree("Generate-Packages-Sources::Options")
    except KeyError:
        Options = {}

    if Options.has_key("Help"):
        usage()

    from daklib.dakmultiprocessing import DakProcessPool, PROC_STATUS_SUCCESS, PROC_STATUS_SIGNALRAISED
    pool = DakProcessPool()

    logger = daklog.Logger('generate-packages-sources2')

    from daklib.dbconn import Component, DBConn, get_suite, Suite, Archive
    session = DBConn().session()
    session.execute("SELECT add_missing_description_md5()")
    session.commit()

    import daklib.utils

    if Options.has_key("Suite"):
        suites = []
        suite_names = daklib.utils.split_args(Options['Suite'])
        for s in suite_names:
            suite = get_suite(s.lower(), session)
            if suite:
                suites.append(suite)
            else:
                print "I: Cannot find suite %s" % s
                logger.log(['Cannot find suite %s' % s])
    else:
        query = session.query(Suite).filter(Suite.untouchable == False)
        if 'Archive' in Options:
            archive_names = daklib.utils.split_args(Options['Archive'])
            query = query.join(Suite.archive).filter(
                Archive.archive_name.in_(archive_names))
        suites = query.all()

    force = Options.has_key("Force") and Options["Force"]

    def parse_results(message):
        # Split out into (code, msg)
        code, msg = message
        if code == PROC_STATUS_SUCCESS:
            logger.log(msg)
        elif code == PROC_STATUS_SIGNALRAISED:
            logger.log(['E: Subprocess received signal ', msg])
        else:
            logger.log(['E: ', msg])

    # Lock tables so that nobody can change things underneath us
    session.execute("LOCK TABLE src_associations IN SHARE MODE")
    session.execute("LOCK TABLE bin_associations IN SHARE MODE")

    for s in suites:
        component_ids = [c.component_id for c in s.components]
        if s.untouchable and not force:
            import daklib.utils
            daklib.utils.fubar(
                "Refusing to touch %s (untouchable and not forced)" %
                s.suite_name)
        for c in component_ids:
            pool.apply_async(generate_sources, [s.suite_id, c],
                             callback=parse_results)
            if not s.include_long_description:
                pool.apply_async(generate_translations, [s.suite_id, c],
                                 callback=parse_results)
            for a in s.architectures:
                if a == 'source':
                    continue
                pool.apply_async(generate_packages,
                                 [s.suite_id, c, a.arch_id, 'deb'],
                                 callback=parse_results)
                pool.apply_async(generate_packages,
                                 [s.suite_id, c, a.arch_id, 'udeb'],
                                 callback=parse_results)

    pool.close()
    pool.join()

    # this script doesn't change the database
    session.close()

    logger.close()

    sys.exit(pool.overall_status())
Пример #26
0
def list_packages(packages,
                  suites=None,
                  components=None,
                  architectures=None,
                  binary_types=None,
                  source_and_binary=False,
                  regex=False,
                  format=None,
                  highest=None):
    session = DBConn().session()
    try:
        t = DBConn().view_package_list

        comparison_operator = "~" if regex else "="

        where = sql.false()
        for package in packages:
            where = where | t.c.package.op(comparison_operator)(package)
            if source_and_binary:
                where = where | t.c.source.op(comparison_operator)(package)

        if suites is not None:
            where = where & (t.c.suite.in_(suites) | t.c.codename.in_(suites))
        if components is not None:
            where = where & t.c.component.in_(components)
        if architectures is not None:
            where = where & t.c.architecture.in_(architectures)
        if binary_types is not None:
            where = where & t.c.type.in_(binary_types)

        if format is None:
            c_architectures = daksql.string_agg(
                t.c.architecture,
                ', ',
                order_by=[t.c.architecture_is_source.desc(), t.c.architecture])
            query = sql.select([t.c.package, t.c.version, t.c.display_suite, c_architectures]) \
                       .where(where) \
                       .group_by(t.c.package, t.c.version, t.c.display_suite) \
                       .order_by(t.c.package, t.c.version, t.c.display_suite)
            result = session.execute(query).fetchall()

            if len(result) == 0:
                raise StopIteration

            lengths = {
                'package':
                max(10, max(len(row[t.c.package]) for row in result)),
                'version':
                max(13, max(len(row[t.c.version]) for row in result)),
                'suite':
                max(10, max(len(row[t.c.display_suite]) for row in result))
            }
            format = "{0:{lengths[package]}} | {1:{lengths[version]}} | {2:{lengths[suite]}} | {3}"

            for row in result:
                yield format.format(row[t.c.package],
                                    row[t.c.version],
                                    row[t.c.display_suite],
                                    row[c_architectures],
                                    lengths=lengths)
        elif format in ('control-suite', 'heidi'):
            query = sql.select([t.c.package, t.c.version,
                                t.c.architecture]).where(where)
            result = session.execute(query)
            for row in result:
                yield "{0} {1} {2}".format(row[t.c.package], row[t.c.version],
                                           row[t.c.architecture])
        elif format == "python":
            c_architectures = daksql.string_agg(
                t.c.architecture,
                ',',
                order_by=[t.c.architecture_is_source.desc(), t.c.architecture])
            query = sql.select([t.c.package,
                                t.c.version,
                                t.c.display_suite,
                                c_architectures,
                                t.c.source,
                                t.c.source_version,
                                t.c.component]) \
                .where(where) \
                .group_by(t.c.package,
                          t.c.version,
                          t.c.display_suite,
                          t.c.source,
                          t.c.component,
                          t.c.source_version)
            result = session.execute(query).fetchall()

            if len(result) == 0:
                raise StopIteration

            val = lambda: defaultdict(val)
            ret = val()
            for row in result:
                ret[row[t.c.package]] \
                   [row[t.c.display_suite]] \
                   [row[t.c.version]]={'component':      row[t.c.component],
                                       'architectures':  row[c_architectures].split(','),
                                       'source':         row[t.c.source],
                                       'source_version': row[t.c.source_version]
                                   }

            yield ret
            return
        else:
            raise ValueError("Unknown output format requested.")

        if highest is not None:
            query = sql.select([t.c.package, sql.func.max(t.c.version)]).where(where) \
                       .group_by(t.c.package).order_by(t.c.package)
            result = session.execute(query)
            yield ""
            for row in result:
                yield "{0} ({1} {2})".format(row[0], highest, row[1])
    finally:
        session.close()
Пример #27
0
def list_packages(packages, suites=None, components=None, architectures=None, binary_types=None,
                  source_and_binary=False, regex=False,
                  format=None, highest=None):
    session = DBConn().session()
    try:
        t = DBConn().view_package_list

        comparison_operator = "~" if regex else "="

        where = sql.false()
        for package in packages:
            where = where | t.c.package.op(comparison_operator)(package)
            if source_and_binary:
                where = where | t.c.source.op(comparison_operator)(package)

        if suites is not None:
            where = where & (t.c.suite.in_(suites) | t.c.codename.in_(suites))
        if components is not None:
            where = where & t.c.component.in_(components)
        if architectures is not None:
            where = where & t.c.architecture.in_(architectures)
        if binary_types is not None:
            where = where & t.c.type.in_(binary_types)

        if format is None:
            c_architectures = daksql.string_agg(t.c.architecture, ', ', order_by=[t.c.architecture_is_source.desc(), t.c.architecture])
            query = sql.select([t.c.package, t.c.version, t.c.display_suite, c_architectures]) \
                       .where(where) \
                       .group_by(t.c.package, t.c.version, t.c.display_suite) \
                       .order_by(t.c.package, t.c.version, t.c.display_suite)
            result = session.execute(query).fetchall()

            if len(result) == 0:
                raise StopIteration

            lengths = {
                'package': max(10, max(len(row[t.c.package]) for row in result)),
                'version': max(13, max(len(row[t.c.version]) for row in result)),
                'suite':   max(10, max(len(row[t.c.display_suite]) for row in result))
            }
            format = "{0:{lengths[package]}} | {1:{lengths[version]}} | {2:{lengths[suite]}} | {3}"

            for row in result:
                yield format.format(row[t.c.package], row[t.c.version], row[t.c.display_suite], row[c_architectures], lengths=lengths)
        elif format in ('control-suite', 'heidi'):
            query = sql.select([t.c.package, t.c.version, t.c.architecture]).where(where)
            result = session.execute(query)
            for row in result:
                yield "{0} {1} {2}".format(row[t.c.package], row[t.c.version], row[t.c.architecture])
        elif format == "python":
            c_architectures = daksql.string_agg(t.c.architecture, ',', order_by=[t.c.architecture_is_source.desc(), t.c.architecture])
            query = sql.select([t.c.package,
                                t.c.version,
                                t.c.display_suite,
                                c_architectures,
                                t.c.source,
                                t.c.source_version,
                                t.c.component]) \
                .where(where) \
                .group_by(t.c.package,
                          t.c.version,
                          t.c.display_suite,
                          t.c.source,
                          t.c.component,
                          t.c.source_version)
            result = session.execute(query).fetchall()

            if len(result) == 0:
                raise StopIteration

            val = lambda: defaultdict(val)
            ret = val()
            for row in result:
                ret[row[t.c.package]] \
                   [row[t.c.display_suite]] \
                   [row[t.c.version]]={'component':      row[t.c.component],
                                       'architectures':  row[c_architectures].split(','),
                                       'source':         row[t.c.source],
                                       'source_version': row[t.c.source_version]
                                   }

            yield ret
            return
        else:
            raise ValueError("Unknown output format requested.")

        if highest is not None:
            query = sql.select([t.c.package, sql.func.max(t.c.version)]).where(where) \
                       .group_by(t.c.package).order_by(t.c.package)
            result = session.execute(query)
            yield ""
            for row in result:
                yield "{0} ({1} {2})".format(row[0], highest, row[1])
    finally:
        session.close()
Пример #28
0
def read_number():
    session = DBConn().session()
    result = session.query("foo").from_statement("select 7 as foo").scalar()
    sleep(0.1)
    session.close()
    return result
Пример #29
0
def suite(suite=None):
    """
    Gives information about a single suite.  Note that this routine will look
    up a suite first by the main suite_name, but then also by codename if no
    suite is initially found.  It can therefore be used to canonicalise suite
    names.

    @type suite: string
    @param suite: Name or codename of the suite.
    @see: L{I{suites}<dakweb.queries.suite.suites>} on how to receive a list of valid suites.

    @maps: name maps to Suite: in the release file
    @maps: codename maps to Codename: in the release file.
    @maps: dakname is an internal name and should not be relied upon.

    @rtype: dictionary
    @return: A dictionary of
             - name
             - codename
             - dakname
             - archive
             - architectures
             - components
    """

    if suite is None:
        return bottle.HTTPError(503, 'Suite not specified.')

    # TODO: We should probably stick this logic into daklib/dbconn.py
    so = None

    s = DBConn().session()
    q = s.query(Suite)
    q = q.filter(Suite.suite_name == suite)

    if q.count() > 1:
        # This would mean dak is misconfigured
        s.close()
        return bottle.HTTPError(503, 'Multiple suites found: configuration error')
    elif q.count() == 1:
        so = q[0]
    else:
        # Look it up by suite_name
        q = s.query(Suite).filter(Suite.codename == suite)
        if q.count() > 1:
            # This would mean dak is misconfigured
            s.close()
            return bottle.HTTPError(503, 'Multiple suites found: configuration error')
        elif q.count() == 1:
            so = q[0]

    if so is not None:
        so = {'name':       so.release_suite_output,
              'codename':   so.codename,
              'dakname':    so.suite_name,
              'archive':    so.archive.archive_name,
              'architectures': [x.arch_string for x in so.architectures],
              'components': [x.component_name for x in so.components]}

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(so)
Пример #30
0
def suite(suite=None):
    """
    Gives information about a single suite.  Note that this routine will look
    up a suite first by the main suite_name, but then also by codename if no
    suite is initially found.  It can therefore be used to canonicalise suite
    names.

    @type suite: string
    @param suite: Name or codename of the suite.
    @see: L{I{suites}<dakweb.queries.suite.suites>} on how to receive a list of valid suites.

    @maps: name maps to Suite: in the release file
    @maps: codename maps to Codename: in the release file.
    @maps: dakname is an internal name and should not be relied upon.

    @rtype: dictionary
    @return: A dictionary of
             - name
             - codename
             - dakname
             - archive
             - architectures
             - components
    """

    if suite is None:
        return bottle.HTTPError(503, 'Suite not specified.')

    # TODO: We should probably stick this logic into daklib/dbconn.py
    so = None

    s = DBConn().session()
    q = s.query(Suite)
    q = q.filter(Suite.suite_name == suite)

    if q.count() > 1:
        # This would mean dak is misconfigured
        s.close()
        return bottle.HTTPError(503,
                                'Multiple suites found: configuration error')
    elif q.count() == 1:
        so = q[0]
    else:
        # Look it up by suite_name
        q = s.query(Suite).filter(Suite.codename == suite)
        if q.count() > 1:
            # This would mean dak is misconfigured
            s.close()
            return bottle.HTTPError(
                503, 'Multiple suites found: configuration error')
        elif q.count() == 1:
            so = q[0]

    if so is not None:
        so = {
            'name': so.release_suite_output,
            'codename': so.codename,
            'dakname': so.suite_name,
            'archive': so.archive.archive_name,
            'architectures': [x.arch_string for x in so.architectures],
            'components': [x.component_name for x in so.components]
        }

    s.close()

    return json.dumps(so)
Пример #31
0
def main():
    from daklib.config import Config
    from daklib import daklog

    cnf = Config()

    Arguments = [('h',"help","Generate-Packages-Sources::Options::Help"),
                 ('a','archive','Generate-Packages-Sources::Options::Archive','HasArg'),
                 ('s',"suite","Generate-Packages-Sources::Options::Suite"),
                 ('f',"force","Generate-Packages-Sources::Options::Force"),
                 ('o','option','','ArbItem')]

    suite_names = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    try:
        Options = cnf.subtree("Generate-Packages-Sources::Options")
    except KeyError:
        Options = {}

    if Options.has_key("Help"):
        usage()

    from daklib.dakmultiprocessing import DakProcessPool, PROC_STATUS_SUCCESS, PROC_STATUS_SIGNALRAISED
    pool = DakProcessPool()

    logger = daklog.Logger('generate-packages-sources2')

    from daklib.dbconn import Component, DBConn, get_suite, Suite, Archive
    session = DBConn().session()
    session.execute("SELECT add_missing_description_md5()")
    session.commit()

    if Options.has_key("Suite"):
        suites = []
        for s in suite_names:
            suite = get_suite(s.lower(), session)
            if suite:
                suites.append(suite)
            else:
                print "I: Cannot find suite %s" % s
                logger.log(['Cannot find suite %s' % s])
    else:
        query = session.query(Suite).filter(Suite.untouchable == False)
        if 'Archive' in Options:
            query = query.join(Suite.archive).filter(Archive.archive_name==Options['Archive'])
        suites = query.all()

    force = Options.has_key("Force") and Options["Force"]


    def parse_results(message):
        # Split out into (code, msg)
        code, msg = message
        if code == PROC_STATUS_SUCCESS:
            logger.log([msg])
        elif code == PROC_STATUS_SIGNALRAISED:
            logger.log(['E: Subprocess recieved signal ', msg])
        else:
            logger.log(['E: ', msg])

    for s in suites:
        component_ids = [ c.component_id for c in s.components ]
        if s.untouchable and not force:
            import daklib.utils
            daklib.utils.fubar("Refusing to touch %s (untouchable and not forced)" % s.suite_name)
        for c in component_ids:
            pool.apply_async(generate_sources, [s.suite_id, c], callback=parse_results)
            if not s.include_long_description:
                pool.apply_async(generate_translations, [s.suite_id, c], callback=parse_results)
            for a in s.architectures:
                if a == 'source':
                    continue
                pool.apply_async(generate_packages, [s.suite_id, c, a.arch_id, 'deb'], callback=parse_results)
                pool.apply_async(generate_packages, [s.suite_id, c, a.arch_id, 'udeb'], callback=parse_results)

    pool.close()
    pool.join()

    # this script doesn't change the database
    session.close()

    logger.close()

    sys.exit(pool.overall_status())
Пример #32
0
def process_changes_files(changes_files, type, log, rrd_dir):
    msg = ""
    cache = {}
    unprocessed = []
    # Read in all the .changes files
    for filename in changes_files:
        try:
            u = Upload()
            u.load_changes(filename)
            cache[filename] = copy(u.pkg.changes)
            cache[filename]["filename"] = filename
        except Exception as e:
            print "WARNING: Exception %s" % e
            continue
    # Divide the .changes into per-source groups
    per_source = {}
    for filename in cache.keys():
	if not cache[filename].has_key("source"):
            unprocessed.append(filename)
            continue
        source = cache[filename]["source"]
        if not per_source.has_key(source):
            per_source[source] = {}
            per_source[source]["list"] = []
        per_source[source]["list"].append(cache[filename])
    # Determine oldest time and have note status for each source group
    for source in per_source.keys():
        source_list = per_source[source]["list"]
        first = source_list[0]
        oldest = os.stat(first["filename"])[stat.ST_MTIME]
        have_note = 0
        for d in per_source[source]["list"]:
            mtime = os.stat(d["filename"])[stat.ST_MTIME]
            if Cnf.has_key("Queue-Report::Options::New"):
                if mtime > oldest:
                    oldest = mtime
            else:
                if mtime < oldest:
                    oldest = mtime
            have_note += has_new_comment(d["source"], d["version"])
        per_source[source]["oldest"] = oldest
        if not have_note:
            per_source[source]["note_state"] = 0; # none
        elif have_note < len(source_list):
            per_source[source]["note_state"] = 1; # some
        else:
            per_source[source]["note_state"] = 2; # all
    per_source_items = per_source.items()
    per_source_items.sort(sg_compare)

    update_graph_database(rrd_dir, type, len(per_source_items), len(changes_files))

    entries = []
    max_source_len = 0
    max_version_len = 0
    max_arch_len = 0
    for i in per_source_items:
        maintainer = {}
        maint=""
        distribution=""
        closes=""
        fingerprint=""
        changeby = {}
        changedby=""
        sponsor=""
        filename=i[1]["list"][0]["filename"]
        last_modified = time.time()-i[1]["oldest"]
        source = i[1]["list"][0]["source"]
        if len(source) > max_source_len:
            max_source_len = len(source)
        binary_list = i[1]["list"][0]["binary"].keys()
        binary = ', '.join(binary_list)
        arches = {}
        versions = {}
        for j in i[1]["list"]:
            changesbase = os.path.basename(j["filename"])
            try:
                session = DBConn().session()
                dbc = session.query(DBChange).filter_by(changesname=changesbase).one()
                session.close()
            except Exception as e:
                print "Can't find changes file in NEW for %s (%s)" % (changesbase, e)
                dbc = None

            if Cnf.has_key("Queue-Report::Options::New") or Cnf.has_key("Queue-Report::Options::822"):
                try:
                    (maintainer["maintainer822"], maintainer["maintainer2047"],
                    maintainer["maintainername"], maintainer["maintaineremail"]) = \
                    fix_maintainer (j["maintainer"])
                except ParseMaintError as msg:
                    print "Problems while parsing maintainer address\n"
                    maintainer["maintainername"] = "Unknown"
                    maintainer["maintaineremail"] = "Unknown"
                maint="%s:%s" % (maintainer["maintainername"], maintainer["maintaineremail"])
                # ...likewise for the Changed-By: field if it exists.
                try:
                    (changeby["changedby822"], changeby["changedby2047"],
                     changeby["changedbyname"], changeby["changedbyemail"]) = \
                     fix_maintainer (j["changed-by"])
                except ParseMaintError as msg:
                    (changeby["changedby822"], changeby["changedby2047"],
                     changeby["changedbyname"], changeby["changedbyemail"]) = \
                     ("", "", "", "")
                changedby="%s:%s" % (changeby["changedbyname"], changeby["changedbyemail"])

                distribution=j["distribution"].keys()
                closes=j["closes"].keys()
                if dbc:
                    fingerprint = dbc.fingerprint
                    sponsor_name = get_uid_from_fingerprint(fingerprint).name
                    sponsor_email = get_uid_from_fingerprint(fingerprint).uid + "@debian.org"
                    if sponsor_name != maintainer["maintainername"] and sponsor_name != changeby["changedbyname"] and \
                    sponsor_email != maintainer["maintaineremail"] and sponsor_name != changeby["changedbyemail"]:
                        sponsor = sponsor_email

            for arch in j["architecture"].keys():
                arches[arch] = ""
            version = j["version"]
            versions[version] = ""
        arches_list = arches.keys()
        arches_list.sort(utils.arch_compare_sw)
        arch_list = " ".join(arches_list)
        version_list = " ".join(versions.keys())
        if len(version_list) > max_version_len:
            max_version_len = len(version_list)
        if len(arch_list) > max_arch_len:
            max_arch_len = len(arch_list)
        if i[1]["note_state"]:
            note = " | [N]"
        else:
            note = ""
        entries.append([source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, filename])

    # direction entry consists of "Which field, which direction, time-consider" where
    # time-consider says how we should treat last_modified. Thats all.

    # Look for the options for sort and then do the sort.
    age = "h"
    if Cnf.has_key("Queue-Report::Options::Age"):
        age =  Cnf["Queue-Report::Options::Age"]
    if Cnf.has_key("Queue-Report::Options::New"):
    # If we produce html we always have oldest first.
        direction.append([5,-1,"ao"])
    else:
        if Cnf.has_key("Queue-Report::Options::Sort"):
            for i in Cnf["Queue-Report::Options::Sort"].split(","):
                if i == "ao":
                    # Age, oldest first.
                    direction.append([5,-1,age])
                elif i == "an":
                    # Age, newest first.
                    direction.append([5,1,age])
                elif i == "na":
                    # Name, Ascending.
                    direction.append([0,1,0])
                elif i == "nd":
                    # Name, Descending.
                    direction.append([0,-1,0])
                elif i == "nl":
                    # Notes last.
                    direction.append([4,1,0])
                elif i == "nf":
                    # Notes first.
                    direction.append([4,-1,0])
    entries.sort(lambda x, y: sortfunc(x, y))
    # Yes, in theory you can add several sort options at the commandline with. But my mind is to small
    # at the moment to come up with a real good sorting function that considers all the sidesteps you
    # have with it. (If you combine options it will simply take the last one at the moment).
    # Will be enhanced in the future.

    if Cnf.has_key("Queue-Report::Options::822"):
        # print stuff out in 822 format
        for entry in entries:
            (source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, changes_file) = entry

            # We'll always have Source, Version, Arch, Mantainer, and Dist
            # For the rest, check to see if we have them, then print them out
            log.write("Source: " + source + "\n")
            log.write("Binary: " + binary + "\n")
            log.write("Version: " + version_list + "\n")
            log.write("Architectures: ")
            log.write( (", ".join(arch_list.split(" "))) + "\n")
            log.write("Age: " + time_pp(last_modified) + "\n")
            log.write("Last-Modified: " + str(int(time.time()) - int(last_modified)) + "\n")
            log.write("Queue: " + type + "\n")

            (name, mail) = maint.split(":", 1)
            log.write("Maintainer: " + name + " <"+mail+">" + "\n")
            if changedby:
               (name, mail) = changedby.split(":", 1)
               log.write("Changed-By: " + name + " <"+mail+">" + "\n")
            if sponsor:
               log.write("Sponsored-By: " + "@".join(sponsor.split("@")[:2]) + "\n")
            log.write("Distribution:")
            for dist in distribution:
               log.write(" " + dist)
            log.write("\n")
            log.write("Fingerprint: " + fingerprint + "\n")
            if closes:
                bug_string = ""
                for bugs in closes:
                    bug_string += "#"+bugs+", "
                log.write("Closes: " + bug_string[:-2] + "\n")
            log.write("Changes-File: " + os.path.basename(changes_file) + "\n")
            log.write("\n")

    if Cnf.has_key("Queue-Report::Options::New"):
        direction.append([5,1,"ao"])
        entries.sort(lambda x, y: sortfunc(x, y))
    # Output for a html file. First table header. then table_footer.
    # Any line between them is then a <tr> printed from subroutine table_row.
        if len(entries) > 0:
            total_count = len(changes_files)
            source_count = len(per_source_items)
            table_header(type.upper(), source_count, total_count)
            for entry in entries:
                (source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, undef) = entry
                table_row(source, version_list, arch_list, time_pp(last_modified), maint, distribution, closes, fingerprint, sponsor, changedby)
            table_footer(type.upper())
    elif not Cnf.has_key("Queue-Report::Options::822"):
    # The "normal" output without any formatting.
        format="%%-%ds | %%-%ds | %%-%ds%%s | %%s old\n" % (max_source_len, max_version_len, max_arch_len)

        msg = ""
        for entry in entries:
            (source, binary, version_list, arch_list, note, last_modified, undef, undef, undef, undef, undef, undef, undef) = entry
            msg += format % (source, version_list, arch_list, note, time_pp(last_modified))

        if msg:
            total_count = len(changes_files)
            source_count = len(per_source_items)
            print type.upper()
            print "-"*len(type)
            print
            print msg
            print "%s %s source package%s / %s %s package%s in total." % (source_count, type, plural(source_count), total_count, type, plural(total_count))
            print

        if len(unprocessed):
            print "UNPROCESSED"
            print "-----------"
            for u in unprocessed:
                print u
            print