Пример #1
0
def file_in_archive(filepattern=None):
    """
    Check if a file pattern is known to the archive. Note that the
    patterns are matched against the location of the files in the
    pool, so for %tmux_2.3-1.dsc it will return t/tmux/tmux_2.3-1.dsc
    as filename.

    @since: October 2016

    @type filepattern: string

    @param filepattern: Pattern of the filenames to match. SQL LIKE
                        statement wildcard matches are supported, that
                        is % for zero, one or more characters, _ for a
                        single character match.

    @rtype: Dictionary, empty if nothing matched.
    @return: A dictionary of
             - filename
             - sha256sum
    """
    if filepattern is None:
        return bottle.HTTPError(503, 'Filepattern not specified.')

    s = DBConn().session()
    q = s.query(PoolFile)
    q = q.filter(PoolFile.filename.like(filepattern))
    ret = []

    for p in q:
        ret.append({'filename': p.filename, 'sha256sum': p.sha256sum})

    s.close()

    return json.dumps(ret)
Пример #2
0
    def test_clone(self):
        '''
        Tests the ORMObject.clone() method.
        '''

        uid1 = Uid(uid='foobar')
        # no session yet
        self.assertRaises(RuntimeError, uid1.clone)
        self.session.add(uid1)
        # object not persistent yet
        self.assertRaises(RuntimeError, uid1.clone)
        self.session.commit()
        # test without session parameter
        uid2 = uid1.clone()
        self.assertTrue(uid1 is not uid2)
        self.assertEqual(uid1.uid, uid2.uid)
        self.assertTrue(uid2 not in uid1.session())
        self.assertTrue(uid1 not in uid2.session())
        # test with explicit session parameter
        new_session = DBConn().session()
        uid3 = uid1.clone(session=new_session)
        self.assertEqual(uid1.uid, uid3.uid)
        self.assertTrue(uid3 in new_session)
        # test for ressource leaks with mass cloning
        for _ in xrange(1, 1000):
            uid1.clone()
Пример #3
0
def sha256sum_in_archive(sha256sum=None):
    """
    Check if files with matching sha256sums are known to the archive.

    @since: June 2018

    @type sha256sum: string
    @param sha256sum: SHA256 sum of the file.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - filename
             - sha256sum
             - component
    """
    if sha256sum is None:
        return bottle.HTTPError(503, 'sha256sum not specified.')

    s = DBConn().session()
    q = s.query(PoolFile)
    q = q.filter(PoolFile.sha256sum == sha256sum)
    ret = []

    for p in q:
        ret.append({'filename':  p.filename,
                    'component': p.component.component_name,
                    'sha256sum': p.sha256sum})

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #4
0
def main():
    global Options, Logger

    cnf = Config()
    session = DBConn().session()

    Arguments = [('h', "help", "Archive-Dedup-Pool::Options::Help")]

    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)

    for i in ["help"]:
        key = "Archive-Dedup-Pool::Options::%s" % i
        if key not in cnf:
            cnf[key] = ""

    Options = cnf.subtree("Archive-Dedup-Pool::Options")

    if Options["Help"]:
        usage()

    Logger = daklog.Logger("archive-dedup-pool")

    dedup(session)

    Logger.close()
Пример #5
0
def source_by_metadata(key=None):
    """

    Finds all Debian source packages which have the specified metadata set.

    E.g., to find out the Maintainer of all source packages, query
    /source/by_metadata/Maintainer.

    @type key: string
    @param key: Metadata key to search for.

    @rtype: dictionary
    @return: A list of dictionaries of
             - source
             - metadata value
    """

    if not key:
        return bottle.HTTPError(503, 'Metadata key not specified.')

    s = DBConn().session()
    q = s.query(DBSource.source, SourceMetadata.value)
    q = q.join(SourceMetadata).join(MetadataKey)
    q = q.filter(MetadataKey.key == key)
    ret = []
    for p in q:
        ret.append({'source': p.source, 'metadata_value': p.value})
    s.close()

    return json.dumps(ret)
Пример #6
0
def acl_deny(acl_name, fingerprint, sources):
    tbl = DBConn().tbl_acl_per_source

    session = DBConn().session()

    acl_id = session.query(ACL).filter_by(name=acl_name).one().id
    fingerprint_id = session.query(Fingerprint).filter_by(fingerprint=fingerprint).one().fingerprint_id

    # TODO: check that fpr is in ACL

    for source in sources:
        result = session.execute(tbl.delete().where(tbl.c.acl_id == acl_id).where(tbl.c.fingerprint_id == fingerprint_id).where(tbl.c.source == source))
        if result.rowcount < 1:
            print("W: Tried to deny uploads of '{}', but was not allowed before.".format(source))

    session.commit()
Пример #7
0
def sources_in_suite(suite=None):
    """
    Returns all source packages and their versions in a given suite.

    @since: December 2014

    @type suite: string
    @param suite: Name of the suite.
    @see: L{I{suites}<dakweb.queries.suite.suites>} on how to receive a list of valid suites.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - source
             - version
    """
    if suite is None:
        return bottle.HTTPError(503, 'Suite not specified.')

    s = DBConn().session()
    q = s.query(DBSource).join(Suite, DBSource.suites)
    q = q.filter(or_(Suite.suite_name == suite, Suite.codename == suite))
    ret = []
    for p in q:
        ret.append({'source':    p.source,
                    'version':   p.version})

    s.close()

    return json.dumps(ret)
Пример #8
0
def generate_sources(suite_id, component_id):
    global _sources_query
    from daklib.filewriter import SourcesFileWriter
    from daklib.dbconn import Component, DBConn, OverrideType, Suite
    from daklib.dakmultiprocessing import PROC_STATUS_SUCCESS

    session = DBConn().session()
    dsc_type = session.query(OverrideType).filter_by(overridetype='dsc').one().overridetype_id

    suite = session.query(Suite).get(suite_id)
    component = session.query(Component).get(component_id)

    overridesuite_id = suite.get_overridesuite().suite_id

    writer_args = {
            'archive': suite.archive.path,
            'suite': suite.suite_name,
            'component': component.component_name
    }
    if suite.indices_compression is not None:
        writer_args['compression'] = suite.indices_compression
    writer = SourcesFileWriter(**writer_args)
    output = writer.open()

    # run query and write Sources
    r = session.execute(_sources_query, {"suite": suite_id, "component": component_id, "component_name": component.component_name, "dsc_type": dsc_type, "overridesuite": overridesuite_id})
    for (stanza,) in r:
        print(stanza, file=output)
        print("", file=output)

    writer.close()

    message = ["generate sources", suite.suite_name, component.component_name]
    session.rollback()
    return (PROC_STATUS_SUCCESS, message)
Пример #9
0
def read_number():
    session = DBConn().session()
    result = session.query('foo').from_statement(
        sql.text('select 7 as foo')).scalar()
    sleep(0.1)
    session.close()
    return result
Пример #10
0
def generate_translations(suite_id, component_id):
    global _translations_query
    from daklib.filewriter import TranslationFileWriter
    from daklib.dbconn import DBConn, Suite, Component
    from daklib.dakmultiprocessing import PROC_STATUS_SUCCESS

    session = DBConn().session()
    suite = session.query(Suite).get(suite_id)
    component = session.query(Component).get(component_id)

    writer_args = {
        'archive': suite.archive.path,
        'suite': suite.suite_name,
        'component': component.component_name,
        'language': 'en',
    }
    if suite.i18n_compression is not None:
        writer_args['compression'] = suite.i18n_compression
    writer = TranslationFileWriter(**writer_args)
    output = writer.open()

    r = session.execute(_translations_query, {
        "suite": suite_id,
        "component": component_id
    })
    for (stanza, ) in r:
        print >> output, stanza

    writer.close()

    message = [
        "generate-translations", suite.suite_name, component.component_name
    ]
    session.rollback()
    return (PROC_STATUS_SUCCESS, message)
Пример #11
0
def acl_export_per_source(acl_name):
    session = DBConn().session()
    acl = session.query(ACL).filter_by(name=acl_name).one()

    query = r"""
      SELECT
        f.fingerprint,
        (SELECT COALESCE(u.name, '') || ' <' || u.uid || '>'
           FROM uid u
           JOIN fingerprint f2 ON u.id = f2.uid
          WHERE f2.id = f.id) AS name,
        STRING_AGG(
          a.source
          || COALESCE(' (' || (SELECT fingerprint FROM fingerprint WHERE id = a.created_by_id) || ')', ''),
          E',\n ' ORDER BY a.source)
      FROM acl_per_source a
      JOIN fingerprint f ON a.fingerprint_id = f.id
      LEFT JOIN uid u ON f.uid = u.id
      WHERE a.acl_id = :acl_id
      GROUP BY f.id, f.fingerprint
      ORDER BY name
      """

    for row in session.execute(query, {'acl_id': acl.id}):
        print("Fingerprint:", row[0])
        print("Uid:", row[1])
        print("Allow:", row[2])
        print()

    session.rollback()
    session.close()
Пример #12
0
def binary_by_metadata(key=None):
    """

    Finds all Debian binary packages which have the specified metadata set.

    E.g., to find out the Go import paths of all Debian Go packages, query
    /binary/by_metadata/Go-Import-Path.

    @type key: string
    @param key: Metadata key to search for.

    @rtype: dictionary
    @return: A list of dictionaries of
             - binary
             - source
             - metadata value
    """

    if not key:
        return bottle.HTTPError(503, 'Metadata key not specified.')

    s = DBConn().session()
    q = s.query(DBBinary.package, DBSource.source, SourceMetadata.value)
    q = q.join(DBSource).join(SourceMetadata).join(MetadataKey)
    q = q.filter(MetadataKey.key == key)
    q = q.group_by(DBBinary.package, DBSource.source, SourceMetadata.value)
    ret = []
    for p in q:
        ret.append({'binary': p.package,
                    'source': p.source,
                    'metadata_value': p.value})
    s.close()

    return json.dumps(ret)
Пример #13
0
def table_row(source, version, arch, last_mod, maint, distribution, closes, fingerprint, sponsor, changedby):

    global row_number

    trclass = "sid"
    session = DBConn().session()
    for dist in distribution:
        if dist == "experimental":
            trclass = "exp"

    query = '''SELECT source
               FROM source_suite
               WHERE source = :source
               AND suite_name IN ('unstable', 'experimental')'''
    if not session.execute(query, {'source': source}).rowcount:
        trclass += " sourceNEW"
    session.commit()

    if row_number % 2 != 0:
        print("<tr class=\"%s even\">" % (trclass))
    else:
        print("<tr class=\"%s odd\">" % (trclass))

    if "sourceNEW" in trclass:
        print("<td class=\"package\">%s</td>" % (source))
    else:
        print("<td class=\"package\"><a href=\"https://tracker.debian.org/pkg/%(source)s\">%(source)s</a></td>" % {'source': source})
    print("<td class=\"version\">")
    for vers in version.split():
        print("<a href=\"new/%s_%s.html\">%s</a><br/>" % (source, utils.html_escape(vers), utils.html_escape(vers)))
    print("</td>")
    print("<td class=\"arch\">%s</td>" % (arch))
    print("<td class=\"distribution\">")
    for dist in distribution:
        print("%s<br/>" % (dist))
    print("</td>")
    print("<td class=\"age\"><abbr title=\"%s\">%s</abbr></td>" % (
        datetime.datetime.utcfromtimestamp(int(time.time()) - last_mod).strftime('%a, %d %b %Y %T UTC'),
        time_pp(last_mod),
    ))
    (name, mail) = maint.split(":", 1)

    print("<td class=\"upload-data\">")
    print("<span class=\"maintainer\">Maintainer: <a href=\"https://qa.debian.org/developer.php?login=%s\">%s</a></span><br/>" % (utils.html_escape(mail), utils.html_escape(name)))
    (name, mail) = changedby.split(":", 1)
    print("<span class=\"changed-by\">Changed-By: <a href=\"https://qa.debian.org/developer.php?login=%s\">%s</a></span><br/>" % (utils.html_escape(mail), utils.html_escape(name)))

    if sponsor:
        print("<span class=\"sponsor\">Sponsor: <a href=\"https://qa.debian.org/developer.php?login=%s\">%s</a>@debian.org</span><br/>" % (utils.html_escape(sponsor), utils.html_escape(sponsor)))

    print("<span class=\"signature\">Fingerprint: %s</span>" % (fingerprint))
    print("</td>")

    print("<td class=\"closes\">")
    for close in closes:
        print("<a href=\"https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=%s\">#%s</a><br/>" % (utils.html_escape(close), utils.html_escape(close)))
    print("</td></tr>")
    row_number += 1
Пример #14
0
def main():
    cnf = Config()

    Arguments = [('h',"help","DEP11::Options::Help"),
                 ('s',"suite","DEP11::Options::Suite", "HasArg"),
                 ('e',"expire","DEP11::Options::ExpireCache"),
                 ('h',"write-hints","DEP11::Options::WriteHints"),
                 ]
    for i in ["help", "suite", "ExpireCache"]:
        if not cnf.has_key("DEP11::Options::%s" % (i)):
            cnf["DEP11::Options::%s" % (i)] = ""

    arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("DEP11::Options")

    if Options["Help"]:
        usage()
        return

    suitename = Options["Suite"]
    if not suitename:
        print("You need to specify a suite!")
        sys.exit(1)

    # check if we have some important config options set
    if not cnf.has_key("Dir::MetaInfo"):
        print("You need to specify a metadata export directory (Dir::MetaInfo)")
        sys.exit(1)
    if not cnf.has_key("DEP11::Url"):
        print("You need to specify a metadata public web URL (DEP11::Url)")
        sys.exit(1)
    if not cnf.has_key("DEP11::IconSizes"):
        print("You need to specify a list of allowed icon-sizes (DEP11::IconSizes)")
        sys.exit(1)
    if Options["WriteHints"] and not cnf.has_key("Dir::MetaInfoHints"):
        print("You need to specify an export directory for DEP-11 hints files (Dir::MetaInfoHints)")
        sys.exit(1)

    logger = daklog.Logger('generate-metadata')

    from daklib.dbconn import Component, DBConn, get_suite, Suite
    session = DBConn().session()
    suite = get_suite(suitename.lower(), session)

    if Options["ExpireCache"]:
        expire_dep11_data_cache(session, suitename, logger)

    process_suite(session, suite, logger)
    # export database content as Components-<arch>.xz YAML documents
    write_component_files(session, suite, logger)

    if Options["WriteHints"]:
        write_hints_files(session, suite, logger)

    # we're done
    logger.close()
Пример #15
0
def add_external_signature_request(session, target_suite, suite, binary):
    tbl_ba = DBConn().tbl_bin_associations
    tbl_esr = DBConn().tbl_external_signature_requests

    # TODO [sqlalchemy >= 1.1]: use `ON CONFLICT DO NOTHING`
    #select = sql.select([tbl_ba.c.id, target_suite.suite_id]).where((tbl_ba.c.suite == suite.suite_id) & (tbl_ba.c.bin == binary.binary_id))
    #insert = pgsql.insert(tbl_esr).from_select([tbl_esr.c.association_id, tbl_esr.c.suite_id], select).on_conflict_do_nothing()
    #session.execute(insert)

    ba_id = session.execute(
        sql.select([tbl_ba.c.id
                    ]).where((tbl_ba.c.suite == suite.suite_id)
                             & (tbl_ba.c.bin == binary.binary_id))).scalar()
    exists = session.execute(
        sql.select([tbl_esr]).where(tbl_esr.c.association_id == ba_id).where(
            tbl_esr.c.suite_id == target_suite.suite_id)).first()
    if exists is None:
        insert = sql.insert(tbl_esr).values(association_id=ba_id,
                                            suite_id=target_suite.suite_id)
        session.execute(insert)
Пример #16
0
    def get_db_value(name, default=None, rettype=None):
        from daklib.dbconn import DBConfig, DBConn, NoResultFound
        try:
            res = DBConn().session().query(DBConfig).filter(
                DBConfig.name == name).one()
        except NoResultFound:
            return default

        if rettype:
            return rettype(res.value)
        else:
            return res.value
Пример #17
0
def per_arch_space_use():
    session = DBConn().session()
    q = session.execute("""
SELECT a.arch_string as Architecture, sum(f.size) AS sum
  FROM files f, binaries b, architecture a
  WHERE a.id=b.architecture AND f.id=b.file
  GROUP BY a.arch_string ORDER BY sum""").fetchall()
    for j in q:
        print "%-15.15s %s" % (j[0], j[1])
    print
    q = session.execute("SELECT sum(size) FROM files WHERE filename ~ '.(diff.gz|tar.gz|dsc)$'").fetchall()
    print "%-15.15s %s" % ("Source", q[0][0])
Пример #18
0
def generate_packages(suite_id, component_id, architecture_id, type_name):
    global _packages_query
    from daklib.filewriter import PackagesFileWriter
    from daklib.dbconn import Architecture, Component, DBConn, OverrideType, Suite
    from daklib.dakmultiprocessing import PROC_STATUS_SUCCESS

    session = DBConn().session()
    arch_all_id = session.query(Architecture).filter_by(arch_string='all').one().arch_id
    type_id = session.query(OverrideType).filter_by(overridetype=type_name).one().overridetype_id

    suite = session.query(Suite).get(suite_id)
    component = session.query(Component).get(component_id)
    architecture = session.query(Architecture).get(architecture_id)

    overridesuite_id = suite.get_overridesuite().suite_id
    include_long_description = suite.include_long_description

    # We currently filter out the "Tag" line. They are set by external
    # overrides and NOT by the maintainer. And actually having it set by
    # maintainer means we output it twice at the moment -> which breaks
    # dselect.
    metadata_skip = ["Section", "Priority", "Tag"]
    if include_long_description:
        metadata_skip.append("Description-md5")

    writer_args = {
            'archive': suite.archive.path,
            'suite': suite.suite_name,
            'component': component.component_name,
            'architecture': architecture.arch_string,
            'debtype': type_name
    }
    if suite.indices_compression is not None:
        writer_args['compression'] = suite.indices_compression
    writer = PackagesFileWriter(**writer_args)
    output = writer.open()

    r = session.execute(_packages_query, {"archive_id": suite.archive.archive_id,
        "suite": suite_id, "component": component_id, 'component_name': component.component_name,
        "arch": architecture_id, "type_id": type_id, "type_name": type_name, "arch_all": arch_all_id,
        "overridesuite": overridesuite_id, "metadata_skip": metadata_skip,
        "include_long_description": 'true' if include_long_description else 'false'})
    for (stanza,) in r:
        print(stanza, file=output)
        print("", file=output)

    writer.close()

    message = ["generate-packages", suite.suite_name, component.component_name, architecture.arch_string]
    session.rollback()
    return (PROC_STATUS_SUCCESS, message)
Пример #19
0
def export_external_signature_requests(session, path):
    tbl_arch = DBConn().tbl_architecture
    tbl_ba = DBConn().tbl_bin_associations
    tbl_bin = DBConn().tbl_binaries
    tbl_esr = DBConn().tbl_external_signature_requests
    tbl_suite = DBConn().tbl_suite

    query = sql.select([tbl_bin.c.package, tbl_suite.c.suite_name, tbl_arch.c.arch_string, sql.func.max(tbl_bin.c.version)]) \
            .select_from(tbl_esr.join(tbl_suite).join(tbl_ba, tbl_ba.c.id == tbl_esr.c.association_id).join(tbl_bin).join(tbl_arch)) \
            .group_by(tbl_bin.c.package, tbl_suite.c.suite_name, tbl_arch.c.arch_string)
    requests = session.execute(query)

    data = {
        'packages': [{
            'package': row[0],
            'suite': row[1],
            'architecture': row[2],
            'version': row[3],
        } for row in requests],
    }

    with open(path, 'w') as fh:
        json.dump(data, fh, indent=2)
Пример #20
0
def acl_allow(acl_name, fingerprint, sources):
    tbl = DBConn().tbl_acl_per_source

    session = DBConn().session()

    acl_id = session.query(ACL).filter_by(name=acl_name).one().id
    fingerprint_id = session.query(Fingerprint).filter_by(fingerprint=fingerprint).one().fingerprint_id

    # TODO: check that fpr is in ACL

    data = [
        {
            'acl_id': acl_id,
            'fingerprint_id': fingerprint_id,
            'source': source,
            'reason': 'set by {} via CLI'.format(os.environ.get('USER', '(unknown)')),
        }
        for source in sources
    ]

    session.execute(tbl.insert(), data)

    session.commit()
Пример #21
0
def binary_metadata_keys():
    """
    List all possible metadata keys

    @rtype: dictionary
    @return: A list of metadata keys
    """
    s = DBConn().session()
    q = s.query(MetadataKey)
    ret = []
    for p in q:
        ret.append( p.key)

    s.close()

    return json.dumps(ret)
Пример #22
0
def acl_set_fingerprints(acl_name, entries):
    session = DBConn().session()
    acl = session.query(ACL).filter_by(name=acl_name).one()

    acl.fingerprints.clear()
    for entry in entries:
        entry = entry.strip()
        if entry.startswith('#') or len(entry) == 0:
            continue

        fps = get_fingerprint(entry, session)
        if len(fps) == 0:
            print("Unknown key for '{0}'".format(entry))
        else:
            acl.fingerprints.update(fps)

    session.commit()
Пример #23
0
def binary_metadata_keys():
    """
    List all possible metadata keys

    @rtype: dictionary
    @return: A list of metadata keys
    """
    s = DBConn().session()
    q = s.query(MetadataKey)
    ret = []
    for p in q:
        ret.append(p.key)

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #24
0
def dsc_in_suite(suite=None, source=None):
    """
    Find all dsc files for a given source package name in a given suite.

    @since: December 2014

    @type suite: string
    @param suite: Name of the suite.
    @see: L{I{suites}<dakweb.queries.suite.suites>} on how to receive a list of valid suites.

    @type source: string
    @param source: Source package to query for.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - version
             - component
             - filename
             - filesize
             - sha256sum
    """
    if suite is None:
        return bottle.HTTPError(503, 'Suite not specified.')
    if source is None:
        return bottle.HTTPError(503, 'Source package not specified.')

    s = DBConn().session()
    q = s.query(DSCFile).join(PoolFile)
    q = q.join(DBSource).join(Suite, DBSource.suites)
    q = q.filter(or_(Suite.suite_name == suite, Suite.codename == suite))
    q = q.filter(DBSource.source == source)
    q = q.filter(PoolFile.filename.endswith('.dsc'))
    ret = []
    for p in q:
        ret.append({
            'version': p.source.version,
            'component': p.poolfile.component.component_name,
            'filename': p.poolfile.filename,
            'filesize': p.poolfile.filesize,
            'sha256sum': p.poolfile.sha256sum
        })

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #25
0
def all_sources():
    """
    Returns all source packages and their versions known to the archive
    (this includes NEW).

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - source
             - version
    """

    s = DBConn().session()
    q = s.query(DBSource)
    ret = []
    for p in q:
        ret.append({'source': p.source, 'version': p.version})

    s.close()

    return json.dumps(ret)
Пример #26
0
def archives():
    """
    Give information about all known archives (sets of suites)

    @rtype: dict
    return: list of dictionaries
    """

    s = DBConn().session()
    q = s.query(Archive)
    q = q.order_by(Archive.archive_name)
    ret = []
    for a in q:
        ret.append({
            'name': a.archive_name,
            'suites': [x.suite_name for x in a.suites]
        })

    s.close()

    return json.dumps(ret)
Пример #27
0
def main():
    cnf = Config()

    Arguments = [
        ('h', "help", "DEP11::Options::Help"),
        ('e', "expire", "DEP11::Options::ExpireCache"),
        ('s', "suite", "DEP11::Options::Suite", "HasArg"),
    ]
    for i in ["help", "suite", "ExpireCache"]:
        if not cnf.has_key("DEP11::Options::%s" % (i)):
            cnf["DEP11::Options::%s" % (i)] = ""

    arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("DEP11::Options")

    if Options["Help"]:
        usage()
        return

    suitename = Options["Suite"]
    if not suitename:
        print("You need to specify a suite!")
        return

    logger = daklog.Logger('generate-metadata')

    from daklib.dbconn import Component, DBConn, get_suite, Suite
    session = DBConn().session()
    suite = get_suite(suitename.lower(), session)

    if Options["ExpireCache"]:
        expire_dep11_data_cache(session, suitename)

    process_suite(session, suite, logger)
    # export database content as Components-<arch>.xz YAML documents
    write_component_files(suite)

    # we're done
    logger.close()
Пример #28
0
def get_provides(suite):
    provides = set()
    session = DBConn().session()
    query = '''SELECT DISTINCT value
               FROM binaries_metadata m
               JOIN bin_associations b
               ON b.bin = m.bin_id
               WHERE key_id = (
                 SELECT key_id
                 FROM metadata_keys
                 WHERE key = 'Provides' )
               AND b.suite = (
                 SELECT id
                 FROM suite
                 WHERE suite_name = '%(suite)s'
                 OR codename = '%(suite)s')''' % \
            {'suite': suite}
    for p in session.execute(query):
        for e in p:
            for i in e.split(','):
                provides.add(i.strip())
    session.close()
    return provides
Пример #29
0
def suites():
    """
    Give information about all known suites.

    @maps: name maps to Suite: in the release file
    @maps: codename maps to Codename: in the release file.
    @maps: dakname is an internal name and should not be relied upon.

    @rtype: list of dictionaries
    @return: Dictionaries made out of
             - name
             - codename
             - dakname
             - archive
             - architectures
             - components

    """

    s = DBConn().session()
    q = s.query(Suite)
    q = q.order_by(Suite.suite_name)
    ret = []
    for p in q:
        ret.append({
            'name': p.release_suite_output,
            'codename': p.codename,
            'dakname': p.suite_name,
            'archive': p.archive.archive_name,
            'architectures': [x.arch_string for x in p.architectures],
            'components': [x.component_name for x in p.components]
        })

    s.close()

    bottle.response.content_type = 'application/json; charset=UTF-8'
    return json.dumps(ret)
Пример #30
0
def main():
    global Cnf

    Cnf = utils.get_conf()
    Arguments = [('h', "help", "Queue-Report::Options::Help"),
                 ('n', "new", "Queue-Report::Options::New"),
                 ('8', '822', "Queue-Report::Options::822"),
                 ('s', "sort", "Queue-Report::Options::Sort", "HasArg"),
                 ('a', "age", "Queue-Report::Options::Age", "HasArg"),
                 ('r', "rrd", "Queue-Report::Options::Rrd", "HasArg"),
                 ('d', "directories", "Queue-Report::Options::Directories",
                  "HasArg")]
    for i in ["help"]:
        key = "Queue-Report::Options::%s" % i
        if key not in Cnf:
            Cnf[key] = ""

    apt_pkg.parse_commandline(Cnf, Arguments, sys.argv)

    Options = Cnf.subtree("Queue-Report::Options")
    if Options["Help"]:
        usage()

    if "Queue-Report::Options::New" in Cnf:
        header()

    queue_names = []

    if "Queue-Report::Options::Directories" in Cnf:
        for i in Cnf["Queue-Report::Options::Directories"].split(","):
            queue_names.append(i)
    elif "Queue-Report::Directories" in Cnf:
        queue_names = Cnf.value_list("Queue-Report::Directories")
    else:
        queue_names = ["byhand", "new"]

    if "Queue-Report::Options::Rrd" in Cnf:
        rrd_dir = Cnf["Queue-Report::Options::Rrd"]
    elif "Dir::Rrd" in Cnf:
        rrd_dir = Cnf["Dir::Rrd"]
    else:
        rrd_dir = None

    f = None
    if "Queue-Report::Options::822" in Cnf:
        # Open the report file
        f = sys.stdout
        filename822 = Cnf.get("Queue-Report::ReportLocations::822Location")
        if filename822:
            f = open(filename822, "w")

    session = DBConn().session()

    for queue_name in queue_names:
        queue = session.query(PolicyQueue).filter_by(
            queue_name=queue_name).first()
        if queue is not None:
            process_queue(queue, f, rrd_dir)
        else:
            utils.warn("Cannot find queue %s" % queue_name)

    if "Queue-Report::Options::822" in Cnf:
        f.close()

    if "Queue-Report::Options::New" in Cnf:
        footer()