Esempio n. 1
0
def main():
    dak.examine_package.use_html = True
    pool = DakProcessPool(processes=5)

    session = DBConn().session()
    upload_ids = [u.id for u in init(session)]
    session.close()

    for upload_id in upload_ids:
        pool.apply_async(do_pkg, [upload_id], callback=result_callback)
    pool.close()

    #p.wait(timeout=600)
    pool.join()

    for htmlfile in htmlfiles_to_process:
        with open(htmlfile, "w") as fd:
            fd.write(timeout_str)

    if pool.overall_status() != PROC_STATUS_SUCCESS:
        raise Exception("Processing failed (code %s)" %
                        (pool.overall_status()))

    files = set(os.listdir(cnf["Show-New::HTMLPath"]))
    to_delete = [
        x for x in files.difference(set(sources)) if x.endswith(".html")
    ]
    for f in to_delete:
        os.remove(os.path.join(cnf["Show-New::HTMLPath"], f))
Esempio n. 2
0
def process_suite(session, suite, logger, force=False):
    '''
    Extract new metadata for a given suite.
    '''
    path = Config()["Dir::Pool"]

    if suite.untouchable and not force:
        import daklib.utils
        daklib.utils.fubar("Refusing to touch %s (untouchable and not forced)" % suite.suite_name)
        return

    for component in [ c.component_name for c in suite.components ]:
        mif = MetaInfoFinder(session)
        pkglist = mif.find_meta_files(component=component, suitename=suite.suite_name)

        values = {
            'archive': suite.archive.path,
            'suite': suite.suite_name,
            'component': component,
        }

        pool = DakProcessPool()
        dpool = MetadataPool(values)

        def parse_results(message):
            # Split out into (code, msg)
            code, msg = message
            if code == PROC_STATUS_SUCCESS:
                # we abuse the message return value here...
                logger.log([msg['message']])
                dpool.append_cptdata(msg['arch'], msg['cpts'])
            elif code == PROC_STATUS_SIGNALRAISED:
                logger.log(['E: Subprocess recieved signal ', msg])
            else:
                logger.log(['E: ', msg])

        cnf = Config()
        iconf = IconFinder(suite.suite_name, component)
        mde = MetadataExtractor(suite.suite_name, component,
                        cnf["Dir::MetaInfo"],
                        cnf["DEP11::Url"],
                        cnf.value_list('DEP11::IconSizes'),
                        iconf)

        for pkgname, pkg in pkglist.items():
            for arch, data in pkg.items():
                package_fname = os.path.join (path, data['filename'])
                if not os.path.exists(package_fname):
                    print('Package not found: %s' % (package_fname))
                    continue
                pool.apply_async(extract_metadata,
                            (mde, suite.suite_name, pkgname, data['files'], data['binid'], package_fname, arch), callback=parse_results)
        pool.close()
        pool.join()

        # save new metadata to the database
        dpool.export(session)
        make_icon_tar(suite.suite_name, component)

        logger.log(["Completed metadata extraction for suite %s/%s" % (suite.suite_name, component)])
Esempio n. 3
0
def main():
    session = DBConn().session()
    changes_files = init(session)

    examine_package.use_html=1

    pool = DakProcessPool(processes=5)
    p = pool.map_async(do_pkg, changes_files)
    pool.close()
    p.wait(timeout=600)
    for htmlfile in htmlfiles_to_process:
        with open(htmlfile, "w") as fd:
            fd.write(timeout_str)

    files = set(os.listdir(cnf["Show-New::HTMLPath"]))
    to_delete = filter(lambda x: x.endswith(".html"), files.difference(set(sources)))
    for f in to_delete:
        os.remove(os.path.join(cnf["Show-New::HTMLPath"],f))
Esempio n. 4
0
    def testPool(self):
        def alarm_handler(signum, frame):
            raise AssertionError('Timed out')

        # Shouldn't take us more than 15 seconds to run this test
        signal.signal(signal.SIGALRM, alarm_handler)
        signal.alarm(15)

        p = DakProcessPool()
        for s in range(3):
            for j in range(4):
                p.apply_async(async_function, [s, j])

        p.close()
        p.join()

        signal.alarm(0)
        signal.signal(signal.SIGALRM, signal.SIG_DFL)

        expected = [(PROC_STATUS_SUCCESS, 'blah, 0, 0'),
                    (PROC_STATUS_MISCFAILURE, 'Test custom error return'),
                    (PROC_STATUS_SUCCESS, 'blah, 0, 2'),
                    (PROC_STATUS_EXCEPTION,
                     'Exception: Test uncaught exception handling'),
                    (PROC_STATUS_SIGNALRAISED, 15),
                    (PROC_STATUS_SIGNALRAISED, 13),
                    (PROC_STATUS_SIGNALRAISED, 14),
                    (PROC_STATUS_SIGNALRAISED, 1),
                    (PROC_STATUS_SUCCESS, 'blah, 2, 0'),
                    (PROC_STATUS_SUCCESS, 'blah, 2, 1'),
                    (PROC_STATUS_SUCCESS, 'blah, 2, 2'),
                    (PROC_STATUS_EXCEPTION,
                     'Exception: Test uncaught exception handling')]

        self.assertEqual(len(p.results), len(expected))

        for r in range(len(p.results)):
            if p.results[r] != expected[r]:
                code, info = p.results[r]
                line1 = info.splitlines()[0]
                self.assertEqual(code, expected[r][0])
                self.assertEqual(line1, expected[r][1])
Esempio n. 5
0
def main():
    examine_package.use_html = True
    pool = DakProcessPool(processes=5)

    session = DBConn().session()
    upload_ids = [u.id for u in init(session)]
    session.close()

    for upload_id in upload_ids:
        pool.apply_async(do_pkg, [upload_id])
    pool.close()

    #p.wait(timeout=600)
    pool.join()
    for htmlfile in htmlfiles_to_process:
        with open(htmlfile, "w") as fd:
            fd.write(timeout_str)

    files = set(os.listdir(cnf["Show-New::HTMLPath"]))
    to_delete = filter(lambda x: x.endswith(".html"), files.difference(set(sources)))
    for f in to_delete:
        os.remove(os.path.join(cnf["Show-New::HTMLPath"], f))
Esempio n. 6
0
def main():
    examine_package.use_html = True
    pool = DakProcessPool(processes=5)

    session = DBConn().session()
    upload_ids = [u.id for u in init(session)]
    session.close()

    for upload_id in upload_ids:
        pool.apply_async(do_pkg, [upload_id])
    pool.close()

    # p.wait(timeout=600)
    pool.join()
    for htmlfile in htmlfiles_to_process:
        with open(htmlfile, "w") as fd:
            fd.write(timeout_str)

    files = set(os.listdir(cnf["Show-New::HTMLPath"]))
    to_delete = filter(lambda x: x.endswith(".html"), files.difference(set(sources)))
    for f in to_delete:
        os.remove(os.path.join(cnf["Show-New::HTMLPath"], f))
Esempio n. 7
0
    def testPool(self):
        def alarm_handler(signum, frame):
            raise AssertionError("Timed out")

        # Shouldn't take us more than 15 seconds to run this test
        signal.signal(signal.SIGALRM, alarm_handler)
        signal.alarm(15)

        p = DakProcessPool()
        for s in range(3):
            for j in range(4):
                p.apply_async(test_function, [s, j])

        p.close()
        p.join()

        signal.alarm(0)
        signal.signal(signal.SIGALRM, signal.SIG_DFL)

        expected = [
            (PROC_STATUS_SUCCESS, "blah, 0, 0"),
            (PROC_STATUS_MISCFAILURE, "Test custom error return"),
            (PROC_STATUS_SUCCESS, "blah, 0, 2"),
            (PROC_STATUS_EXCEPTION, "Test uncaught exception handling"),
            (PROC_STATUS_SIGNALRAISED, 15),
            (PROC_STATUS_SIGNALRAISED, 13),
            (PROC_STATUS_SIGNALRAISED, 14),
            (PROC_STATUS_SIGNALRAISED, 1),
            (PROC_STATUS_SUCCESS, "blah, 2, 0"),
            (PROC_STATUS_SUCCESS, "blah, 2, 1"),
            (PROC_STATUS_SUCCESS, "blah, 2, 2"),
            (PROC_STATUS_EXCEPTION, "Test uncaught exception handling"),
        ]

        self.assertEqual(len(p.results), len(expected))

        for r in range(len(p.results)):
            self.assertEqual(p.results[r], expected[r])
Esempio n. 8
0
def main():
    cnf = Config()
    Logger = daklog.Logger('generate-filelist')
    Arguments = [('h', "help",         "Filelist::Options::Help"),
                 ('s', "suite",        "Filelist::Options::Suite", "HasArg"),
                 ('c', "component",    "Filelist::Options::Component", "HasArg"),
                 ('a', "architecture", "Filelist::Options::Architecture", "HasArg"),
                 ('i', "incremental",  "Filelist::Options::Incremental")]
    session = DBConn().session()
    query_suites = session.query(Suite)
    suites = [suite.suite_name for suite in query_suites]
    if not cnf.has_key('Filelist::Options::Suite'):
        cnf['Filelist::Options::Suite'] = ','.join(suites).encode()
    query_components = session.query(Component)
    components = \
        [component.component_name for component in query_components]
    if not cnf.has_key('Filelist::Options::Component'):
        cnf['Filelist::Options::Component'] = ','.join(components).encode()
    query_architectures = session.query(Architecture)
    architectures = \
        [architecture.arch_string for architecture in query_architectures]
    if not cnf.has_key('Filelist::Options::Architecture'):
        cnf['Filelist::Options::Architecture'] = ','.join(architectures).encode()
    cnf['Filelist::Options::Help'] = ''
    cnf['Filelist::Options::Incremental'] = ''
    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("Filelist::Options")
    if Options['Help']:
        usage()
    pool = DakProcessPool()
    query_suites = query_suites. \
        filter(Suite.suite_name.in_(utils.split_args(Options['Suite'])))
    query_components = query_components. \
        filter(Component.component_name.in_(utils.split_args(Options['Component'])))
    query_architectures = query_architectures. \
        filter(Architecture.arch_string.in_(utils.split_args(Options['Architecture'])))

    def parse_results(message):
        # Split out into (code, msg)
        code, msg = message
        if code == PROC_STATUS_SUCCESS:
            Logger.log([msg])
        elif code == PROC_STATUS_SIGNALRAISED:
            Logger.log(['E: Subprocess recieved signal ', msg])
        else:
            Logger.log(['E: ', msg])

    for suite in query_suites:
        suite_id = suite.suite_id
        for component in query_components:
            component_id = component.component_id
            for architecture in query_architectures:
                architecture_id = architecture.arch_id
                if architecture not in suite.architectures:
                    pass
                elif architecture.arch_string == 'source':
                    pool.apply_async(writeSourceList,
                        (suite_id, component_id, Options['Incremental']), callback=parse_results)
                elif architecture.arch_string == 'all':
                    pool.apply_async(writeAllList,
                        (suite_id, component_id, architecture_id, 'deb',
                            Options['Incremental']), callback=parse_results)
                    pool.apply_async(writeAllList,
                        (suite_id, component_id, architecture_id, 'udeb',
                            Options['Incremental']), callback=parse_results)
                else: # arch any
                    pool.apply_async(writeBinaryList,
                        (suite_id, component_id, architecture_id, 'deb',
                            Options['Incremental']), callback=parse_results)
                    pool.apply_async(writeBinaryList,
                        (suite_id, component_id, architecture_id, 'udeb',
                            Options['Incremental']), callback=parse_results)
    pool.close()
    pool.join()

    # this script doesn't change the database
    session.close()

    Logger.close()

    sys.exit(pool.overall_status())
Esempio n. 9
0
def main():
    global Logger

    cnf = Config()

    for i in ["Help", "Suite", "Force", "Quiet"]:
        key = "Generate-Releases::Options::%s" % i
        if key not in cnf:
            cnf[key] = ""

    Arguments = [('h', "help", "Generate-Releases::Options::Help"),
                 ('a', 'archive', 'Generate-Releases::Options::Archive', 'HasArg'),
                 ('s', "suite", "Generate-Releases::Options::Suite"),
                 ('f', "force", "Generate-Releases::Options::Force"),
                 ('q', "quiet", "Generate-Releases::Options::Quiet"),
                 ('o', 'option', '', 'ArbItem')]

    suite_names = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("Generate-Releases::Options")

    if Options["Help"]:
        usage()

    Logger = daklog.Logger('generate-releases')
    pool = DakProcessPool()

    session = DBConn().session()

    if Options["Suite"]:
        suites = []
        for s in suite_names:
            suite = get_suite(s.lower(), session)
            if suite:
                suites.append(suite)
            else:
                print("cannot find suite %s" % s)
                Logger.log(['cannot find suite %s' % s])
    else:
        query = session.query(Suite).filter(Suite.untouchable == False)  # noqa:E712
        if 'Archive' in Options:
            archive_names = utils.split_args(Options['Archive'])
            query = query.join(Suite.archive).filter(Archive.archive_name.in_(archive_names))
        suites = query.all()

    for s in suites:
        # Setup a multiprocessing Pool. As many workers as we have CPU cores.
        if s.untouchable and not Options["Force"]:
            print("Skipping %s (untouchable)" % s.suite_name)
            continue

        if not Options["Quiet"]:
            print("Processing %s" % s.suite_name)
        Logger.log(['Processing release file for Suite: %s' % (s.suite_name)])
        pool.apply_async(generate_helper, (s.suite_id, ))

    # No more work will be added to our pool, close it and then wait for all to finish
    pool.close()
    pool.join()

    retcode = pool.overall_status()

    if retcode > 0:
        # TODO: CENTRAL FUNCTION FOR THIS / IMPROVE LOGGING
        Logger.log(['Release file generation broken: %s' % (','.join([str(x[1]) for x in pool.results]))])

    Logger.close()

    sys.exit(retcode)
Esempio n. 10
0
def main():
    from daklib.config import Config
    from daklib import daklog

    cnf = Config()

    Arguments = [('h', "help", "Generate-Packages-Sources::Options::Help"),
                 ('a', 'archive',
                  'Generate-Packages-Sources::Options::Archive', 'HasArg'),
                 ('s', "suite", "Generate-Packages-Sources::Options::Suite",
                  'HasArg'),
                 ('f', "force", "Generate-Packages-Sources::Options::Force"),
                 ('o', 'option', '', 'ArbItem')]

    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    try:
        Options = cnf.subtree("Generate-Packages-Sources::Options")
    except KeyError:
        Options = {}

    if Options.has_key("Help"):
        usage()

    from daklib.dakmultiprocessing import DakProcessPool, PROC_STATUS_SUCCESS, PROC_STATUS_SIGNALRAISED
    pool = DakProcessPool()

    logger = daklog.Logger('generate-packages-sources2')

    from daklib.dbconn import Component, DBConn, get_suite, Suite, Archive
    session = DBConn().session()
    session.execute("SELECT add_missing_description_md5()")
    session.commit()

    import daklib.utils

    if Options.has_key("Suite"):
        suites = []
        suite_names = daklib.utils.split_args(Options['Suite'])
        for s in suite_names:
            suite = get_suite(s.lower(), session)
            if suite:
                suites.append(suite)
            else:
                print "I: Cannot find suite %s" % s
                logger.log(['Cannot find suite %s' % s])
    else:
        query = session.query(Suite).filter(Suite.untouchable == False)
        if 'Archive' in Options:
            archive_names = daklib.utils.split_args(Options['Archive'])
            query = query.join(Suite.archive).filter(
                Archive.archive_name.in_(archive_names))
        suites = query.all()

    force = Options.has_key("Force") and Options["Force"]

    def parse_results(message):
        # Split out into (code, msg)
        code, msg = message
        if code == PROC_STATUS_SUCCESS:
            logger.log(msg)
        elif code == PROC_STATUS_SIGNALRAISED:
            logger.log(['E: Subprocess received signal ', msg])
        else:
            logger.log(['E: ', msg])

    # Lock tables so that nobody can change things underneath us
    session.execute("LOCK TABLE src_associations IN SHARE MODE")
    session.execute("LOCK TABLE bin_associations IN SHARE MODE")

    for s in suites:
        component_ids = [c.component_id for c in s.components]
        if s.untouchable and not force:
            import daklib.utils
            daklib.utils.fubar(
                "Refusing to touch %s (untouchable and not forced)" %
                s.suite_name)
        for c in component_ids:
            pool.apply_async(generate_sources, [s.suite_id, c],
                             callback=parse_results)
            if not s.include_long_description:
                pool.apply_async(generate_translations, [s.suite_id, c],
                                 callback=parse_results)
            for a in s.architectures:
                if a == 'source':
                    continue
                pool.apply_async(generate_packages,
                                 [s.suite_id, c, a.arch_id, 'deb'],
                                 callback=parse_results)
                pool.apply_async(generate_packages,
                                 [s.suite_id, c, a.arch_id, 'udeb'],
                                 callback=parse_results)

    pool.close()
    pool.join()

    # this script doesn't change the database
    session.close()

    logger.close()

    sys.exit(pool.overall_status())
Esempio n. 11
0
def main():
    from daklib.config import Config
    from daklib import daklog

    cnf = Config()

    Arguments = [('h',"help","Generate-Packages-Sources::Options::Help"),
                 ('a','archive','Generate-Packages-Sources::Options::Archive','HasArg'),
                 ('s',"suite","Generate-Packages-Sources::Options::Suite"),
                 ('f',"force","Generate-Packages-Sources::Options::Force"),
                 ('o','option','','ArbItem')]

    suite_names = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    try:
        Options = cnf.subtree("Generate-Packages-Sources::Options")
    except KeyError:
        Options = {}

    if Options.has_key("Help"):
        usage()

    from daklib.dakmultiprocessing import DakProcessPool, PROC_STATUS_SUCCESS, PROC_STATUS_SIGNALRAISED
    pool = DakProcessPool()

    logger = daklog.Logger('generate-packages-sources2')

    from daklib.dbconn import Component, DBConn, get_suite, Suite, Archive
    session = DBConn().session()
    session.execute("SELECT add_missing_description_md5()")
    session.commit()

    if Options.has_key("Suite"):
        suites = []
        for s in suite_names:
            suite = get_suite(s.lower(), session)
            if suite:
                suites.append(suite)
            else:
                print "I: Cannot find suite %s" % s
                logger.log(['Cannot find suite %s' % s])
    else:
        query = session.query(Suite).filter(Suite.untouchable == False)
        if 'Archive' in Options:
            query = query.join(Suite.archive).filter(Archive.archive_name==Options['Archive'])
        suites = query.all()

    force = Options.has_key("Force") and Options["Force"]


    def parse_results(message):
        # Split out into (code, msg)
        code, msg = message
        if code == PROC_STATUS_SUCCESS:
            logger.log([msg])
        elif code == PROC_STATUS_SIGNALRAISED:
            logger.log(['E: Subprocess recieved signal ', msg])
        else:
            logger.log(['E: ', msg])

    for s in suites:
        component_ids = [ c.component_id for c in s.components ]
        if s.untouchable and not force:
            import daklib.utils
            daklib.utils.fubar("Refusing to touch %s (untouchable and not forced)" % s.suite_name)
        for c in component_ids:
            pool.apply_async(generate_sources, [s.suite_id, c], callback=parse_results)
            if not s.include_long_description:
                pool.apply_async(generate_translations, [s.suite_id, c], callback=parse_results)
            for a in s.architectures:
                if a == 'source':
                    continue
                pool.apply_async(generate_packages, [s.suite_id, c, a.arch_id, 'deb'], callback=parse_results)
                pool.apply_async(generate_packages, [s.suite_id, c, a.arch_id, 'udeb'], callback=parse_results)

    pool.close()
    pool.join()

    # this script doesn't change the database
    session.close()

    logger.close()

    sys.exit(pool.overall_status())
Esempio n. 12
0
def main ():
    global Logger

    cnf = Config()

    for i in ["Help", "Suite", "Force", "Quiet"]:
        if not cnf.has_key("Generate-Releases::Options::%s" % (i)):
            cnf["Generate-Releases::Options::%s" % (i)] = ""

    Arguments = [('h',"help","Generate-Releases::Options::Help"),
                 ('a','archive','Generate-Releases::Options::Archive','HasArg'),
                 ('s',"suite","Generate-Releases::Options::Suite"),
                 ('f',"force","Generate-Releases::Options::Force"),
                 ('q',"quiet","Generate-Releases::Options::Quiet"),
                 ('o','option','','ArbItem')]

    suite_names = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("Generate-Releases::Options")

    if Options["Help"]:
        usage()

    Logger = daklog.Logger('generate-releases')
    pool = DakProcessPool()

    session = DBConn().session()

    if Options["Suite"]:
        suites = []
        for s in suite_names:
            suite = get_suite(s.lower(), session)
            if suite:
                suites.append(suite)
            else:
                print "cannot find suite %s" % s
                Logger.log(['cannot find suite %s' % s])
    else:
        query = session.query(Suite).filter(Suite.untouchable == False)
        if 'Archive' in Options:
            query = query.join(Suite.archive).filter(Archive.archive_name==Options['Archive'])
        suites = query.all()

    broken=[]

    for s in suites:
        # Setup a multiprocessing Pool. As many workers as we have CPU cores.
        if s.untouchable and not Options["Force"]:
            print "Skipping %s (untouchable)" % s.suite_name
            continue

        if not Options["Quiet"]:
            print "Processing %s" % s.suite_name
        Logger.log(['Processing release file for Suite: %s' % (s.suite_name)])
        pool.apply_async(generate_helper, (s.suite_id, ))

    # No more work will be added to our pool, close it and then wait for all to finish
    pool.close()
    pool.join()

    retcode = pool.overall_status()

    if retcode > 0:
        # TODO: CENTRAL FUNCTION FOR THIS / IMPROVE LOGGING
        Logger.log(['Release file generation broken: %s' % (','.join([str(x[1]) for x in pool.results]))])

    Logger.close()

    sys.exit(retcode)
Esempio n. 13
0
def process_suite(session, suite, logger, force=False):
    '''
    Run by main to loop for different component and architecture.
    '''
    path = Config()["Dir::Pool"]

    if suite.untouchable and not force:
        import daklib.utils
        daklib.utils.fubar(
            "Refusing to touch %s (untouchable and not forced)" %
            suite.suite_name)
        return

    for component in [c.component_name for c in suite.components]:
        mif = MetaInfoFinder(session)
        pkglist = mif.find_meta_files(component=component,
                                      suitename=suite.suite_name)

        values = {
            'archive': suite.archive.path,
            'suite': suite.suite_name,
            'component': component,
        }

        pool = DakProcessPool()
        dpool = MetadataPool(values)

        def parse_results(message):
            # Split out into (code, msg)
            code, msg = message
            if code == PROC_STATUS_SUCCESS:
                # we abuse the message return value here...
                logger.log([msg['message']])
                dpool.append_cptdata(msg['arch'], msg['cpts'])
            elif code == PROC_STATUS_SIGNALRAISED:
                logger.log(['E: Subprocess recieved signal ', msg])
            else:
                logger.log(['E: ', msg])

        for pkgname, pkg in pkglist.items():
            for arch, data in pkg.items():
                package_fname = os.path.join(path, data['filename'])
                if not os.path.exists(package_fname):
                    print('Package not found: %s' % (package_fname))
                    continue
                pool.apply_async(
                    extract_metadata,
                    (suite.suite_name, component, pkgname, data['files'],
                     data['binid'], package_fname, arch),
                    callback=parse_results)
        pool.close()
        pool.join()

        # Save metadata of all binaries of the Components-arch
        # This would require a lock
        dpool.export(session)
        make_icon_tar(suite.suite_name, component)

        logger.log([
            "Completed metadata extraction for suite %s/%s" %
            (suite.suite_name, component)
        ])
Esempio n. 14
0
def main():
    cnf = Config()
    Logger = daklog.Logger('generate-filelist')
    Arguments = [('h', "help", "Filelist::Options::Help"),
                 ('s', "suite", "Filelist::Options::Suite", "HasArg"),
                 ('c', "component", "Filelist::Options::Component", "HasArg"),
                 ('a', "architecture", "Filelist::Options::Architecture",
                  "HasArg"),
                 ('i', "incremental", "Filelist::Options::Incremental")]
    session = DBConn().session()
    query_suites = session.query(Suite)
    suites = [suite.suite_name for suite in query_suites]
    if not cnf.has_key('Filelist::Options::Suite'):
        cnf['Filelist::Options::Suite'] = ','.join(suites).encode()
    query_components = session.query(Component)
    components = \
        [component.component_name for component in query_components]
    if not cnf.has_key('Filelist::Options::Component'):
        cnf['Filelist::Options::Component'] = ','.join(components).encode()
    query_architectures = session.query(Architecture)
    architectures = \
        [architecture.arch_string for architecture in query_architectures]
    if not cnf.has_key('Filelist::Options::Architecture'):
        cnf['Filelist::Options::Architecture'] = ','.join(
            architectures).encode()
    cnf['Filelist::Options::Help'] = ''
    cnf['Filelist::Options::Incremental'] = ''
    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("Filelist::Options")
    if Options['Help']:
        usage()
    pool = DakProcessPool()
    query_suites = query_suites. \
        filter(Suite.suite_name.in_(utils.split_args(Options['Suite'])))
    query_components = query_components. \
        filter(Component.component_name.in_(utils.split_args(Options['Component'])))
    query_architectures = query_architectures. \
        filter(Architecture.arch_string.in_(utils.split_args(Options['Architecture'])))

    def parse_results(message):
        # Split out into (code, msg)
        code, msg = message
        if code == PROC_STATUS_SUCCESS:
            Logger.log([msg])
        elif code == PROC_STATUS_SIGNALRAISED:
            Logger.log(['E: Subprocess recieved signal ', msg])
        else:
            Logger.log(['E: ', msg])

    for suite in query_suites:
        suite_id = suite.suite_id
        for component in query_components:
            component_id = component.component_id
            for architecture in query_architectures:
                architecture_id = architecture.arch_id
                if architecture not in suite.architectures:
                    pass
                elif architecture.arch_string == 'source':
                    pool.apply_async(
                        writeSourceList,
                        (suite_id, component_id, Options['Incremental']),
                        callback=parse_results)
                elif architecture.arch_string == 'all':
                    pool.apply_async(writeAllList,
                                     (suite_id, component_id, architecture_id,
                                      'deb', Options['Incremental']),
                                     callback=parse_results)
                    pool.apply_async(writeAllList,
                                     (suite_id, component_id, architecture_id,
                                      'udeb', Options['Incremental']),
                                     callback=parse_results)
                else:  # arch any
                    pool.apply_async(writeBinaryList,
                                     (suite_id, component_id, architecture_id,
                                      'deb', Options['Incremental']),
                                     callback=parse_results)
                    pool.apply_async(writeBinaryList,
                                     (suite_id, component_id, architecture_id,
                                      'udeb', Options['Incremental']),
                                     callback=parse_results)
    pool.close()
    pool.join()

    # this script doesn't change the database
    session.close()

    Logger.close()

    sys.exit(pool.overall_status())