Beispiel #1
0
def sign_release_dir(suite, dirname):
    cnf = Config()

    if 'Dinstall::SigningKeyring' in cnf or 'Dinstall::SigningHomedir' in cnf:
        args = {
            'keyids': suite.signingkeys or [],
            'pubring': cnf.get('Dinstall::SigningPubKeyring') or None,
            'secring': cnf.get('Dinstall::SigningKeyring') or None,
            'homedir': cnf.get('Dinstall::SigningHomedir') or None,
            'passphrase_file': cnf.get('Dinstall::SigningPassphraseFile') or None,
        }

        relname = os.path.join(dirname, 'Release')

        dest = os.path.join(dirname, 'Release.gpg')
        if os.path.exists(dest):
            os.unlink(dest)

        inlinedest = os.path.join(dirname, 'InRelease')
        if os.path.exists(inlinedest):
            os.unlink(inlinedest)

        with open(relname, 'r') as stdin:
            with open(dest, 'w') as stdout:
                daklib.gpg.sign(stdin, stdout, inline=False, **args)
            stdin.seek(0)
            with open(inlinedest, 'w') as stdout:
                daklib.gpg.sign(stdin, stdout, inline=True, **args)
Beispiel #2
0
def sign_release_dir(suite, dirname):
    cnf = Config()

    if 'Dinstall::SigningKeyring' in cnf or 'Dinstall::SigningHomedir' in cnf:
        args = {
            'keyids': suite.signingkeys or [],
            'pubring': cnf.get('Dinstall::SigningPubKeyring') or None,
            'secring': cnf.get('Dinstall::SigningKeyring') or None,
            'homedir': cnf.get('Dinstall::SigningHomedir') or None,
            'passphrase_file': cnf.get('Dinstall::SigningPassphraseFile') or None,
        }

        relname = os.path.join(dirname, 'Release')

        dest = os.path.join(dirname, 'Release.gpg')
        if os.path.exists(dest):
            os.unlink(dest)

        inlinedest = os.path.join(dirname, 'InRelease')
        if os.path.exists(inlinedest):
            os.unlink(inlinedest)

        with open(relname, 'r') as stdin:
            with open(dest, 'w') as stdout:
                daklib.gpg.sign(stdin, stdout, inline=False, **args)
            stdin.seek(0)
            with open(inlinedest, 'w') as stdout:
                daklib.gpg.sign(stdin, stdout, inline=True, **args)
Beispiel #3
0
    def prepare(self):
        """prepare upload for further processing

        This copies the files involved to a temporary directory.  If you use
        this method directly, you have to remove the directory given by the
        C{directory} attribute later on your own.

        Instead of using the method directly, you can also use a with-statement::

           with ArchiveUpload(...) as upload:
              ...

        This will automatically handle any required cleanup.
        """
        assert self.directory is None
        assert self.original_changes.valid_signature

        cnf = Config()
        session = self.transaction.session

        group = cnf.get('Dinstall::UnprivGroup') or None
        self.directory = utils.temp_dirname(parent=cnf.get('Dir::TempPath'),
                                            mode=0o2750, group=group)
        with FilesystemTransaction() as fs:
            src = os.path.join(self.original_directory, self.original_changes.filename)
            dst = os.path.join(self.directory, self.original_changes.filename)
            fs.copy(src, dst, mode=0o640)

            self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings)

            for f in self.changes.files.itervalues():
                src = os.path.join(self.original_directory, f.filename)
                dst = os.path.join(self.directory, f.filename)
                if not os.path.exists(src):
                    continue
                fs.copy(src, dst, mode=0o640)

            source = None
            try:
                source = self.changes.source
            except Exception:
                # Do not raise an exception here if the .dsc is invalid.
                pass

            if source is not None:
                for f in source.files.itervalues():
                    src = os.path.join(self.original_directory, f.filename)
                    dst = os.path.join(self.directory, f.filename)
                    if not os.path.exists(dst):
                        try:
                            db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False)
                            db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first()
                            fs.copy(db_archive_file.path, dst, mode=0o640)
                        except KeyError:
                            # Ignore if get_file could not find it. Upload will
                            # probably be rejected later.
                            pass
Beispiel #4
0
def _subst_for_upload(upload):
    """ Prepare substitutions used for announce mails.

    @type  upload: L{daklib.upload.Source} or L{daklib.upload.Binary}
    @param upload: upload to handle

    @rtype: dict
    @returns: A dict of substition values for use by L{daklib.utils.TemplateSubst}
    """
    cnf = Config()

    maintainer = upload.maintainer or cnf['Dinstall::MyEmailAddress']
    changed_by = upload.changed_by or maintainer
    if upload.sourceful:
        maintainer_to = mail_addresses_for_upload(maintainer, changed_by, upload.fingerprint)
    else:
        maintainer_to = mail_addresses_for_upload(maintainer, maintainer, upload.fingerprint)

    bcc = 'X-DAK: dak {0}'.format(upload.program)
    if 'Dinstall::Bcc' in cnf:
        bcc = '{0}\nBcc: {1}'.format(bcc, cnf['Dinstall::Bcc'])

    subst = {
        '__DISTRO__': cnf['Dinstall::MyDistribution'],
        '__BUG_SERVER__': cnf.get('Dinstall::BugServer'),
        '__ADMIN_ADDRESS__': cnf['Dinstall::MyAdminAddress'],
        '__DAK_ADDRESS__': cnf['Dinstall::MyEmailAddress'],
        '__REJECTOR_ADDRESS__': cnf['Dinstall::MyEmailAddress'],
        '__MANUAL_REJECT_MESSAGE__': '',

        '__BCC__': bcc,

        '__MAINTAINER__': changed_by,
        '__MAINTAINER_FROM__': fix_maintainer(changed_by)[1],
        '__MAINTAINER_TO__': ', '.join(maintainer_to),
        '__CHANGES_FILENAME__': upload.changes_filename,
        '__FILE_CONTENTS__': upload.changes,
        '__SOURCE__': upload.source,
        '__VERSION__': upload.version,
        '__ARCHITECTURE__': upload.architecture,
        '__WARNINGS__': '\n'.join(upload.warnings),
        }

    override_maintainer = cnf.get('Dinstall::OverrideMaintainer')
    if override_maintainer:
        subst['__MAINTAINER_FROM__'] = subst['__MAINTAINER_TO__'] = override_maintainer

    return subst
Beispiel #5
0
def main():
    global Options, Logger, Sections, Priorities

    cnf = Config()
    session = DBConn().session()

    Arguments = [('a', "automatic", "Process-New::Options::Automatic"),
                 ('b', "no-binaries", "Process-New::Options::No-Binaries"),
                 ('c', "comments", "Process-New::Options::Comments"),
                 ('h', "help", "Process-New::Options::Help"),
                 ('m', "manual-reject", "Process-New::Options::Manual-Reject",
                  "HasArg"), ('t', "trainee", "Process-New::Options::Trainee"),
                 ('q', 'queue', 'Process-New::Options::Queue', 'HasArg'),
                 ('n', "no-action", "Process-New::Options::No-Action")]

    changes_files = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)

    for i in [
            "automatic", "no-binaries", "comments", "help", "manual-reject",
            "no-action", "version", "trainee"
    ]:
        if not cnf.has_key("Process-New::Options::%s" % (i)):
            cnf["Process-New::Options::%s" % (i)] = ""

    queue_name = cnf.get('Process-New::Options::Queue', 'new')
    new_queue = session.query(PolicyQueue).filter_by(
        queue_name=queue_name).one()
    if len(changes_files) == 0:
        uploads = new_queue.uploads
    else:
        uploads = session.query(PolicyQueueUpload).filter_by(policy_queue=new_queue) \
            .join(DBChange).filter(DBChange.changesname.in_(changes_files)).all()

    Options = cnf.subtree("Process-New::Options")

    if Options["Help"]:
        usage()

    if not Options["No-Action"]:
        try:
            Logger = daklog.Logger("process-new")
        except CantOpenError as e:
            Options["Trainee"] = "True"

    Sections = Section_Completer(session)
    Priorities = Priority_Completer(session)
    readline.parse_and_bind("tab: complete")

    if len(uploads) > 1:
        sys.stderr.write("Sorting changes...\n")
        uploads = sort_uploads(new_queue, uploads, session,
                               Options["No-Binaries"])

    if Options["Comments"]:
        show_new_comments(uploads, session)
    else:
        for upload in uploads:
            do_pkg(upload, session)

    end()
Beispiel #6
0
    def action_dm_migrate(self, fingerprint, section, session):
        self._action_dm_admin_common(fingerprint, section, session)
        cnf = Config()
        acl_name = cnf.get('Command::DM::ACL', 'dm')
        acl = session.query(ACL).filter_by(name=acl_name).one()

        fpr_hash_from = section['From'].translate(None, ' ')
        fpr_from = session.query(Fingerprint).filter_by(fingerprint=fpr_hash_from).first()
        if fpr_from is None:
            self.result.append('Unknown fingerprint (From): {0}\nNo action taken.'.format(fpr_hash_from))
            return

        fpr_hash_to = section['To'].translate(None, ' ')
        fpr_to = session.query(Fingerprint).filter_by(fingerprint=fpr_hash_to).first()
        if fpr_to is None:
            self.result.append('Unknown fingerprint (To): {0}\nNo action taken.'.format(fpr_hash_to))
            return
        if fpr_to.keyring is None or fpr_to.keyring.keyring_name not in cnf.value_list('Command::DM::Keyrings'):
            self.result.append('Key (To) {0} is not in DM keyring.\nNo action taken.'.format(fpr_to.fingerprint))
            return

        self.log.log(['dm-migrate', 'from={0}'.format(fpr_hash_from), 'to={0}'.format(fpr_hash_to)])

        sources = []
        for entry in session.query(ACLPerSource).filter_by(acl=acl, fingerprint=fpr_from):
            self.log.log(['dm-migrate', 'from={0}'.format(fpr_hash_from), 'to={0}'.format(fpr_hash_to), 'source={0}'.format(entry.source)])
            entry.fingerprint = fpr_to
            sources.append(entry.source)

        self.result.append('Migrated {0} to {1}.\n{2} acl entries changed: {3}'.format(fpr_hash_from, fpr_hash_to, len(sources), ", ".join(sources)))

        session.commit()
Beispiel #7
0
def do_pkg(upload_id):
    cnf = Config()

    session = DBConn().session()
    upload = session.query(PolicyQueueUpload).filter_by(id=upload_id).one()

    queue = upload.policy_queue
    changes = upload.changes

    origchanges = os.path.join(queue.path, changes.changesname)
    print origchanges

    htmlname = "{0}_{1}.html".format(changes.source, changes.version)
    htmlfile = os.path.join(cnf['Show-New::HTMLPath'], htmlname)

    # Have we already processed this?
    if os.path.exists(htmlfile) and \
        os.stat(htmlfile).st_mtime > time.mktime(changes.created.timetuple()):
        with open(htmlfile, "r") as fd:
            if fd.read() != timeout_str:
                sources.append(htmlname)
                return (PROC_STATUS_SUCCESS,
                        '%s already up-to-date' % htmlfile)

    # Go, process it... Now!
    htmlfiles_to_process.append(htmlfile)
    sources.append(htmlname)

    group = cnf.get('Dinstall::UnprivGroup') or None

    with open(htmlfile, 'w') as outfile:
        with policy.UploadCopy(upload, group=group) as upload_copy:
            handler = policy.PolicyQueueUploadHandler(upload, session)
            missing = [(o['type'], o['package'])
                       for o in handler.missing_overrides()]
            distribution = changes.distribution

            print >> outfile, html_header(changes.source, missing)
            print >> outfile, examine_package.display_changes(
                distribution, origchanges)

            if upload.source is not None and ('dsc',
                                              upload.source.source) in missing:
                fn = os.path.join(upload_copy.directory,
                                  upload.source.poolfile.basename)
                print >> outfile, examine_package.check_dsc(
                    distribution, fn, session)
            for binary in upload.binaries:
                if (binary.binarytype, binary.package) not in missing:
                    continue
                fn = os.path.join(upload_copy.directory,
                                  binary.poolfile.basename)
                print >> outfile, examine_package.check_deb(
                    distribution, fn, session)

            print >> outfile, html_footer()

    session.close()
    htmlfiles_to_process.remove(htmlfile)
    return (PROC_STATUS_SUCCESS, '{0} already updated'.format(htmlfile))
Beispiel #8
0
    def _do_bts_versiontracking(self):
        cnf = Config()
        fs = self.transaction.fs

        btsdir = cnf.get('Dir::BTSVersionTrack')
        if btsdir is None or btsdir == '':
            return

        base = os.path.join(btsdir, self.changes.filename[:-8])

        # version history
        sourcedir = self.unpacked_source()
        if sourcedir is not None:
            fh = open(os.path.join(sourcedir, 'debian', 'changelog'), 'r')
            versions = fs.create("{0}.versions".format(base), mode=0o644)
            for line in fh.readlines():
                if re_changelog_versions.match(line):
                    versions.write(line)
            fh.close()
            versions.close()

        # binary -> source mapping
        debinfo = fs.create("{0}.debinfo".format(base), mode=0o644)
        for binary in self.changes.binaries:
            control = binary.control
            source_package, source_version = binary.source
            line = " ".join([control['Package'], control['Version'], control['Architecture'], source_package, source_version])
            print >>debinfo, line
        debinfo.close()
Beispiel #9
0
    def action_dm_remove(self, fingerprint, section, session):
        self._action_dm_admin_common(fingerprint, section, session)

        cnf = Config()
        acl_name = cnf.get('Command::DM::ACL', 'dm')
        acl = session.query(ACL).filter_by(name=acl_name).one()

        fpr_hash = section['Fingerprint'].translate(None, ' ')
        fpr = session.query(Fingerprint).filter_by(
            fingerprint=fpr_hash).first()
        if fpr is None:
            self.result.append(
                'Unknown fingerprint: {0}\nNo action taken.'.format(fpr_hash))
            return

        self.log.log(['dm-remove', fpr.fingerprint])

        count = 0
        for entry in session.query(ACLPerSource).filter_by(acl=acl,
                                                           fingerprint=fpr):
            self.log.log([
                'dm-remove', fpr.fingerprint, 'source={0}'.format(entry.source)
            ])
            count += 1
            session.delete(entry)

        self.result.append('Removed: {0}.\n{1} acl entries removed.'.format(
            fpr.fingerprint, count))

        session.commit()
Beispiel #10
0
    def action_dm_migrate(self, fingerprint, section, session):
        self._action_dm_admin_common(fingerprint, section, session)
        cnf = Config()
        acl_name = cnf.get('Command::DM::ACL', 'dm')
        acl = session.query(ACL).filter_by(name=acl_name).one()

        fpr_hash_from = section['From'].translate(None, ' ')
        fpr_from = session.query(Fingerprint).filter_by(fingerprint=fpr_hash_from).first()
        if fpr_from is None:
            self.result.append('Unknown fingerprint (From): {0}\nNo action taken.'.format(fpr_hash_from))
            return

        fpr_hash_to = section['To'].translate(None, ' ')
        fpr_to = session.query(Fingerprint).filter_by(fingerprint=fpr_hash_to).first()
        if fpr_to is None:
            self.result.append('Unknown fingerprint (To): {0}\nNo action taken.'.format(fpr_hash_to))
            return
        if fpr_to.keyring is None or fpr_to.keyring.keyring_name not in cnf.value_list('Command::DM::Keyrings'):
            self.result.append('Key (To) {0} is not in DM keyring.\nNo action taken.'.format(fpr_to.fingerprint))
            return

        self.log.log(['dm-migrate', 'from={0}'.format(fpr_hash_from), 'to={0}'.format(fpr_hash_to)])

        sources = []
        for entry in session.query(ACLPerSource).filter_by(acl=acl, fingerprint=fpr_from):
            self.log.log(['dm-migrate', 'from={0}'.format(fpr_hash_from), 'to={0}'.format(fpr_hash_to), 'source={0}'.format(entry.source)])
            entry.fingerprint = fpr_to
            sources.append(entry.source)

        self.result.append('Migrated {0} to {1}.\n{2} acl entries changed: {3}'.format(fpr_hash_from, fpr_hash_to, len(sources), ", ".join(sources)))

        session.commit()
Beispiel #11
0
def lock_package(package):
    """
    Lock C{package} so that noone else jumps in processing it.

    @type package: string
    @param package: source package name to lock
    """

    cnf = Config()

    path = os.path.join(cnf.get("Process-New::LockDir", cnf['Dir::Lock']),
                        package)

    try:
        fd = os.open(path, os.O_CREAT | os.O_EXCL | os.O_RDONLY)
    except OSError as e:
        if e.errno == errno.EEXIST or e.errno == errno.EACCES:
            user = pwd.getpwuid(
                os.stat(path)[stat.ST_UID])[4].split(',')[0].replace('.', '')
            raise AlreadyLockedError(user)

    try:
        yield fd
    finally:
        os.unlink(path)
Beispiel #12
0
def main():
    global Options, Logger

    cnf = Config()

    for i in ["Help"]:
        key = "Manage-External-Signature-Requests::Options::{}".format(i)
        if key not in cnf:
            cnf[key] = ""

    Arguments = [('h', "help",
                  "Manage-External-Signature-Requests::Options::Help")]

    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("Manage-External-Signature-Requests::Options")

    if Options["Help"]:
        usage()

    Logger = daklog.Logger('manage-external-signature-requests')

    if 'External-Signature-Requests' not in cnf:
        print("DAK not configured to handle external signature requests")
        return

    config = cnf.subtree('External-Signature-Requests')

    session = DBConn().session()

    export_external_signature_requests(session, config['Export'])

    if 'ExportSigningKeys' in config:
        args = {
            'pubring': cnf.get('Dinstall::SigningPubKeyring') or None,
            'secring': cnf.get('Dinstall::SigningKeyring') or None,
            'homedir': cnf.get('Dinstall::SigningHomedir') or None,
            'passphrase_file': cnf.get('Dinstall::SigningPassphraseFile')
            or None,
        }
        sign_external_signature_requests(
            session, config['Export'], config.value_list('ExportSigningKeys'),
            args)

    session.close()

    Logger.close()
Beispiel #13
0
    def check(self, upload):
        changes = upload.changes

        # Only check sourceful uploads.
        if changes.source is None:
            return True
        # Only check uploads to unstable or experimental.
        if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
            return True

        cnf = Config()
        if 'Dinstall::LintianTags' not in cnf:
            return True
        tagfile = cnf['Dinstall::LintianTags']

        with open(tagfile, 'r') as sourcefile:
            sourcecontent = sourcefile.read()
        try:
            lintiantags = yaml.safe_load(sourcecontent)['lintian']
        except yaml.YAMLError as msg:
            raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg))

        fd, temp_filename = utils.temp_filename(mode=0o644)
        temptagfile = os.fdopen(fd, 'w')
        for tags in lintiantags.itervalues():
            for tag in tags:
                print >>temptagfile, tag
        temptagfile.close()

        changespath = os.path.join(upload.directory, changes.filename)
        try:
            cmd = []
            result = 0

            user = cnf.get('Dinstall::UnprivUser') or None
            if user is not None:
                cmd.extend(['sudo', '-H', '-u', user])

            cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath])
            output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT)
        except subprocess.CalledProcessError as e:
            result = e.returncode
            output = e.output
        finally:
            os.unlink(temp_filename)

        if result == 2:
            utils.warn("lintian failed for %s [return code: %s]." % \
                (changespath, result))
            utils.warn(utils.prefix_multi_line_string(output, \
                " [possible output:] "))

        parsed_tags = lintian.parse_lintian_output(output)
        rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags))
        if len(rejects) != 0:
            raise Reject('\n'.join(rejects))

        return True
Beispiel #14
0
def announce_accept(upload):
    """ Announce an upload.

    @type  upload: L{daklib.upload.Source} or L{daklib.upload.Binary}
    @param upload: upload to handle
    """

    cnf = Config()
    subst = _subst_for_upload(upload)
    whitelists = _whitelists(upload)

    accepted_to_real_suite = any(suite.policy_queue is None or suite in upload.from_policy_suites for suite in upload.suites)

    suite_names = []
    for suite in upload.suites:
        if suite.policy_queue:
            suite_names.append("{0}->{1}".format(suite.suite_name, suite.policy_queue.queue_name))
        else:
            suite_names.append(suite.suite_name)
    suite_names.extend(suite.suite_name for suite in upload.from_policy_suites)
    subst['__SUITE__'] = ', '.join(suite_names) or '(none)'

    message = TemplateSubst(subst, os.path.join(cnf['Dir::Templates'], 'process-unchecked.accepted'))
    send_mail(message, whitelists=whitelists)

    if accepted_to_real_suite and upload.sourceful:
        # send mail to announce lists and tracking server
        announce = set()
        for suite in upload.suites:
            if suite.policy_queue is None or suite in upload.from_policy_suites:
                announce.update(suite.announce or [])

        announce_list_address = ", ".join(announce)

        # according to #890944 this email shall be sent to dispatch@<TrackingServer> to avoid
        # bouncing emails
        # the package email alias is not yet created shortly after accepting the package
        tracker = cnf.get('Dinstall::TrackingServer')
        if tracker:
            announce_list_address = "{0}\nBcc: dispatch@{1}".format(announce_list_address, tracker)

        if len(announce_list_address) != 0:
            my_subst = subst.copy()
            my_subst['__ANNOUNCE_LIST_ADDRESS__'] = announce_list_address

            message = TemplateSubst(my_subst, os.path.join(cnf['Dir::Templates'], 'process-unchecked.announce'))
            send_mail(message, whitelists=whitelists)

    close_bugs_default = cnf.find_b('Dinstall::CloseBugs')
    close_bugs = any(s.close_bugs if s.close_bugs is not None else close_bugs_default for s in upload.suites)
    if accepted_to_real_suite and upload.sourceful and close_bugs:
        for bug in upload.bugs:
            my_subst = subst.copy()
            my_subst['__BUG_NUMBER__'] = str(bug)

            message = TemplateSubst(my_subst, os.path.join(cnf['Dir::Templates'], 'process-unchecked.bug-close'))
            send_mail(message, whitelists=whitelists)
Beispiel #15
0
def main():
    global Options
    cnf = Config()

    Arguments = [
        ('h', "help", "Auto-Decruft::Options::Help"),
        ('n', "dry-run", "Auto-Decruft::Options::Dry-Run"),
        ('d', "debug", "Auto-Decruft::Options::Debug"),
        ('s', "suite", "Auto-Decruft::Options::Suite", "HasArg"),
        # The "\0" seems to be the only way to disable short options.
        ("\0", 'if-newer-version-in', "Auto-Decruft::Options::OtherSuite",
         "HasArg"),
        ("\0", 'if-newer-version-in-rm-msg',
         "Auto-Decruft::Options::OtherSuiteRMMsg", "HasArg")
    ]
    for i in ["help", "Dry-Run", "Debug", "OtherSuite", "OtherSuiteRMMsg"]:
        if not cnf.has_key("Auto-Decruft::Options::%s" % (i)):
            cnf["Auto-Decruft::Options::%s" % (i)] = ""

    cnf["Auto-Decruft::Options::Suite"] = cnf.get("Dinstall::DefaultSuite",
                                                  "unstable")

    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)

    Options = cnf.subtree("Auto-Decruft::Options")
    if Options["Help"]:
        usage()

    debug = False
    dryrun = False
    if Options["Dry-Run"]:
        dryrun = True
    if Options["Debug"]:
        debug = True

    if Options["OtherSuite"] and not Options["OtherSuiteRMMsg"]:
        utils.fubar(
            "--if-newer-version-in requires --if-newer-version-in-rm-msg")

    session = DBConn().session()

    suite = get_suite(Options["Suite"].lower(), session)
    if not suite:
        utils.fubar("Cannot find suite %s" % Options["Suite"].lower())

    suite_id = suite.suite_id
    suite_name = suite.suite_name.lower()

    auto_decruft_suite(suite_name, suite_id, session, dryrun, debug)

    if Options["OtherSuite"]:
        osuite = get_suite(Options["OtherSuite"].lower(), session).suite_name
        decruft_newer_version_in(osuite, suite_name, suite_id,
                                 Options["OtherSuiteRMMsg"], session, dryrun)

    if not dryrun:
        session.commit()
Beispiel #16
0
def main():
    global Options
    cnf = Config()

    Arguments = [('h', "help", "Auto-Decruft::Options::Help"),
                 ('n', "dry-run", "Auto-Decruft::Options::Dry-Run"),
                 ('d', "debug", "Auto-Decruft::Options::Debug"),
                 ('s', "suite", "Auto-Decruft::Options::Suite", "HasArg"),
                 # The "\0" seems to be the only way to disable short options.
                 ("\0", 'if-newer-version-in', "Auto-Decruft::Options::OtherSuite", "HasArg"),
                 ("\0", 'if-newer-version-in-rm-msg', "Auto-Decruft::Options::OtherSuiteRMMsg", "HasArg"),
                 ("\0", 'decruft-equal-versions', "Auto-Decruft::Options::OtherSuiteDecruftEqual")
                ]
    for i in ["help", "Dry-Run", "Debug", "OtherSuite", "OtherSuiteRMMsg", "OtherSuiteDecruftEqual"]:
        key = "Auto-Decruft::Options::%s" % i
        if key not in cnf:
            cnf[key] = ""

    cnf["Auto-Decruft::Options::Suite"] = cnf.get("Dinstall::DefaultSuite", "unstable")

    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)

    Options = cnf.subtree("Auto-Decruft::Options")
    if Options["Help"]:
        usage()

    debug = False
    dryrun = False
    decruft_equal_versions = False
    if Options["Dry-Run"]:
        dryrun = True
    if Options["Debug"]:
        debug = True
    if Options["OtherSuiteDecruftEqual"]:
        decruft_equal_versions = True

    if Options["OtherSuite"] and not Options["OtherSuiteRMMsg"]:
        utils.fubar("--if-newer-version-in requires --if-newer-version-in-rm-msg")

    session = DBConn().session()

    suite = get_suite(Options["Suite"].lower(), session)
    if not suite:
        utils.fubar("Cannot find suite %s" % Options["Suite"].lower())

    suite_id = suite.suite_id
    suite_name = suite.suite_name.lower()

    auto_decruft_suite(suite_name, suite_id, session, dryrun, debug)

    if Options["OtherSuite"]:
        osuite = get_suite(Options["OtherSuite"].lower(), session).suite_name
        decruft_newer_version_in(osuite, suite_name, suite_id, Options["OtherSuiteRMMsg"], session, dryrun, decruft_equal_versions)

    if not dryrun:
        session.commit()
Beispiel #17
0
def main(argv=None):
    if argv is None:
        argv = sys.argv

    arguments = [('h', 'help', 'Process-Commands::Options::Help'),
                 ('d', 'directory', 'Process-Commands::Options::Directory', 'HasArg')]

    cnf = Config()
    cnf['Process-Commands::Options::Dummy'] = ''
    filenames = apt_pkg.parse_commandline(cnf.Cnf, arguments, argv)
    options = cnf.subtree('Process-Commands::Options')

    if 'Help' in options or (len(filenames) == 0 and 'Directory' not in options):
        usage()
        sys.exit(0)

    log = Logger('command')

    now = datetime.datetime.now()
    donedir = os.path.join(cnf['Dir::Done'], now.strftime('%Y/%m/%d'))
    rejectdir = cnf['Dir::Reject']

    if len(filenames) == 0:
        filenames = [ fn for fn in os.listdir(options['Directory']) if fn.endswith('.dak-commands') ]

    for fn in filenames:
        basename = os.path.basename(fn)
        if not fn.endswith('.dak-commands'):
            log.log(['unexpected filename', basename])
            continue

        with open(fn, 'r') as fh:
            data = fh.read()

        try:
            command = CommandFile(basename, data, log)
            command.evaluate()
        except:
            created = os.stat(fn).st_mtime
            now = time.time()
            too_new = (now - created < int(cnf.get('Dinstall::SkipTime', '60')))
            if too_new:
                log.log(['skipped (too new)'])
                continue
            log.log(['reject', basename])
            dst = find_next_free(os.path.join(rejectdir, basename))
        else:
            log.log(['done', basename])
            dst = find_next_free(os.path.join(donedir, basename))

        with FilesystemTransaction() as fs:
            fs.unlink(fn)
            fs.create(dst, mode=0o644).write(data)
            fs.commit()

    log.close()
Beispiel #18
0
def main():
    global Options, Logger, Sections, Priorities

    cnf = Config()
    session = DBConn().session()

    Arguments = [('a', "automatic", "Process-New::Options::Automatic"),
                 ('b', "no-binaries", "Process-New::Options::No-Binaries"),
                 ('c', "comments", "Process-New::Options::Comments"),
                 ('h', "help", "Process-New::Options::Help"),
                 ('m', "manual-reject", "Process-New::Options::Manual-Reject", "HasArg"),
                 ('t', "trainee", "Process-New::Options::Trainee"),
                 ('q', 'queue', 'Process-New::Options::Queue', 'HasArg'),
                 ('n', "no-action", "Process-New::Options::No-Action")]

    changes_files = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)

    for i in ["automatic", "no-binaries", "comments", "help", "manual-reject", "no-action", "version", "trainee"]:
        key = "Process-New::Options::%s" % i
        if key not in cnf:
            cnf[key] = ""

    queue_name = cnf.get('Process-New::Options::Queue', 'new')
    new_queue = session.query(PolicyQueue).filter_by(queue_name=queue_name).one()
    if len(changes_files) == 0:
        uploads = new_queue.uploads
    else:
        uploads = session.query(PolicyQueueUpload).filter_by(policy_queue=new_queue) \
            .join(DBChange).filter(DBChange.changesname.in_(changes_files)).all()

    Options = cnf.subtree("Process-New::Options")

    if Options["Help"]:
        usage()

    if not Options["No-Action"]:
        try:
            Logger = daklog.Logger("process-new")
        except CantOpenError as e:
            Options["Trainee"] = "True"

    Sections = Section_Completer(session)
    Priorities = Priority_Completer(session)
    readline.parse_and_bind("tab: complete")

    if len(uploads) > 1:
        print("Sorting changes...", file=sys.stderr)
        uploads = sort_uploads(new_queue, uploads, session, Options["No-Binaries"])

    if Options["Comments"]:
        show_new_comments(uploads, session)
    else:
        for upload in uploads:
            do_pkg(upload, session)

    end()
def main():
    global Options, Logger

    cnf = Config()

    for i in ["Help"]:
        key = "Manage-External-Signature-Requests::Options::{}".format(i)
        if key not in cnf:
            cnf[key] = ""

    Arguments = [('h', "help", "Manage-External-Signature-Requests::Options::Help")]

    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("Manage-External-Signature-Requests::Options")

    if Options["Help"]:
        usage()

    Logger = daklog.Logger('manage-external-signature-requests')

    if 'External-Signature-Requests' not in cnf:
        print("DAK not configured to handle external signature requests")
        return

    config = cnf.subtree('External-Signature-Requests')

    session = DBConn().session()

    export_external_signature_requests(session, config['Export'])

    if 'ExportSigningKeys' in config:
        args = {
            'pubring': cnf.get('Dinstall::SigningPubKeyring') or None,
            'secring': cnf.get('Dinstall::SigningKeyring') or None,
            'homedir': cnf.get('Dinstall::SigningHomedir') or None,
            'passphrase_file': cnf.get('Dinstall::SigningPassphraseFile') or None,
        }
        sign_external_signature_requests(session, config['Export'], config.value_list('ExportSigningKeys'), args)

    session.close()

    Logger.close()
Beispiel #20
0
def do_update(self):
    """
    Add suite options for overrides and control-suite to DB
    """
    print(__doc__)
    try:
        cnf = Config()

        c = self.db.cursor()

        c.execute("ALTER TABLE suite ADD COLUMN overrideprocess BOOLEAN NOT NULL DEFAULT FALSE")
        c.execute("COMMENT ON COLUMN suite.overrideprocess IS %s", ['If true, check-overrides will process the suite by default'])
        c.execute("ALTER TABLE suite ADD COLUMN overrideorigin TEXT DEFAULT NULL")
        c.execute("COMMENT ON COLUMN suite.overrideprocess IS %s", ['If NOT NULL, check-overrides will take missing overrides from the named suite'])

        # Migrate config file values into database
        if "Check-Overrides::OverrideSuites" in cnf:
            for suitename in cnf.subtree("Check-Overrides::OverrideSuites").list():
                if cnf.get("Check-Overrides::OverrideSuites::%s::Process" % suitename, "0") == "1":
                    print("Marking %s to have overrides processed automatically" % suitename.lower())
                    c.execute("UPDATE suite SET overrideprocess = TRUE WHERE suite_name = %s", [suitename.lower()])

                originsuite = cnf.get("Check-Overrides::OverrideSuites::%s::OriginSuite" % suitename, '')
                if originsuite != '':
                    print("Setting %s to use %s as origin for overrides" % (suitename.lower(), originsuite.lower()))
                    c.execute("UPDATE suite SET overrideorigin = %s WHERE suite_name = %s", [originsuite.lower(), suitename.lower()])

        c.execute("ALTER TABLE suite ADD COLUMN allowcsset BOOLEAN NOT NULL DEFAULT FALSE")
        c.execute("COMMENT ON COLUMN suite.allowcsset IS %s", ['Allow control-suite to be used with the --set option without forcing'])

        # Import historical hard-coded values
        c.execute("UPDATE suite SET allowcsset = TRUE WHERE suite_name IN ('testing', 'squeeze-updates')")

        c.execute("UPDATE config SET value = '70' WHERE name = 'db_revision'")
        self.db.commit()

    except psycopg2.ProgrammingError as msg:
        self.db.rollback()
        raise DBUpdateError('Unable to apply sick update 70, rollback issued. Error message : %s' % (str(msg)))
Beispiel #21
0
def main():
    global Options, Logger

    cnf = Config()

    for i in ["Help", "No-Action", "Maximum" ]:
        if not cnf.has_key("Clean-Suites::Options::%s" % (i)):
            cnf["Clean-Suites::Options::%s" % (i)] = ""

    Arguments = [('h',"help","Clean-Suites::Options::Help"),
                 ('n',"no-action","Clean-Suites::Options::No-Action"),
                 ('m',"maximum","Clean-Suites::Options::Maximum", "HasArg")]

    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("Clean-Suites::Options")

    if cnf["Clean-Suites::Options::Maximum"] != "":
        try:
            # Only use Maximum if it's an integer
            max_delete = int(cnf["Clean-Suites::Options::Maximum"])
            if max_delete < 1:
                utils.fubar("If given, Maximum must be at least 1")
        except ValueError as e:
            utils.fubar("If given, Maximum must be an integer")
    else:
        max_delete = None

    if Options["Help"]:
        usage()

    Logger = daklog.Logger("clean-suites", debug=Options["No-Action"])

    session = DBConn().session()

    now_date = datetime.now()

    # Stay of execution; default to 1.5 days
    soe = int(cnf.get('Clean-Suites::StayOfExecution', '129600'))

    delete_date = now_date - timedelta(seconds=soe)

    check_binaries(now_date, delete_date, max_delete, session)
    clean_binaries(now_date, delete_date, max_delete, session)
    check_sources(now_date, delete_date, max_delete, session)
    check_files(now_date, delete_date, max_delete, session)
    clean(now_date, delete_date, max_delete, session)
    clean_maintainers(now_date, delete_date, max_delete, session)
    clean_fingerprints(now_date, delete_date, max_delete, session)
    clean_empty_directories(session)

    Logger.close()
Beispiel #22
0
def do_pkg(upload_id):
    cnf = Config()

    session = DBConn().session()
    upload = session.query(PolicyQueueUpload).filter_by(id=upload_id).one()

    queue = upload.policy_queue
    changes = upload.changes

    origchanges = os.path.join(queue.path, changes.changesname)
    print origchanges

    htmlname = "{0}_{1}.html".format(changes.source, changes.version)
    htmlfile = os.path.join(cnf["Show-New::HTMLPath"], htmlname)

    # Have we already processed this?
    if os.path.exists(htmlfile) and os.stat(htmlfile).st_mtime > time.mktime(changes.created.timetuple()):
        with open(htmlfile, "r") as fd:
            if fd.read() != timeout_str:
                sources.append(htmlname)
                return (PROC_STATUS_SUCCESS, "%s already up-to-date" % htmlfile)

    # Go, process it... Now!
    htmlfiles_to_process.append(htmlfile)
    sources.append(htmlname)

    group = cnf.get("Dinstall::UnprivGroup") or None

    with open(htmlfile, "w") as outfile:
        with policy.UploadCopy(upload, group=group) as upload_copy:
            handler = policy.PolicyQueueUploadHandler(upload, session)
            missing = [(o["type"], o["package"]) for o in handler.missing_overrides()]
            distribution = changes.distribution

            print >> outfile, html_header(changes.source, missing)
            print >> outfile, examine_package.display_changes(distribution, origchanges)

            if upload.source is not None and ("dsc", upload.source.source) in missing:
                fn = os.path.join(upload_copy.directory, upload.source.poolfile.basename)
                print >> outfile, examine_package.check_dsc(distribution, fn, session)
            for binary in upload.binaries:
                if (binary.binarytype, binary.package) not in missing:
                    continue
                fn = os.path.join(upload_copy.directory, binary.poolfile.basename)
                print >> outfile, examine_package.check_deb(distribution, fn, session)

            print >> outfile, html_footer()

    session.close()
    htmlfiles_to_process.remove(htmlfile)
    return (PROC_STATUS_SUCCESS, "{0} already updated".format(htmlfile))
Beispiel #23
0
    def get_transitions(self):
        cnf = Config()
        path = cnf.get('Dinstall::ReleaseTransitions', '')
        if path == '' or not os.path.exists(path):
            return None

        contents = file(path, 'r').read()
        try:
            transitions = yaml.safe_load(contents)
            return transitions
        except yaml.YAMLError as msg:
            utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg))

        return None
Beispiel #24
0
def do_lintian (filename):
    cnf = Config()
    cmd = []

    user = cnf.get('Dinstall::UnprivUser') or None
    if user is not None:
        cmd.extend(['sudo', '-H', '-u', user])

    color = 'always'
    if use_html:
        color = 'html'

    cmd.extend(['lintian', '--show-overrides', '--color', color, "--", filename])

    return do_command(cmd, escaped=True)
Beispiel #25
0
def do_lintian(filename):
    cnf = Config()
    cmd = []

    user = cnf.get("Dinstall::UnprivUser") or None
    if user is not None:
        cmd.extend(["sudo", "-H", "-u", user])

    color = "always"
    if use_html:
        color = "html"

    cmd.extend(["lintian", "--show-overrides", "--color", color, "--", filename])

    return do_command(cmd, escaped=True)
Beispiel #26
0
def announce_accept(upload):
    cnf = Config()
    subst = _subst_for_upload(upload)
    whitelists = _whitelists(upload)

    accepted_to_real_suite = any(suite.policy_queue is None or suite in upload.from_policy_suites for suite in upload.suites)

    suite_names = []
    for suite in upload.suites:
        if suite.policy_queue:
            suite_names.append("{0}->{1}".format(suite.suite_name, suite.policy_queue.queue_name))
        else:
            suite_names.append(suite.suite_name)
    suite_names.extend(suite.suite_name for suite in upload.from_policy_suites)
    subst['__SUITE__'] = ', '.join(suite_names) or '(none)'

    message = TemplateSubst(subst, os.path.join(cnf['Dir::Templates'], 'process-unchecked.accepted'))
    send_mail(message, whitelists=whitelists)

    if accepted_to_real_suite and upload.sourceful:
        # senf mail to announce lists and tracking server
        announce = set()
        for suite in upload.suites:
            if suite.policy_queue is None or suite in upload.from_policy_suites:
                announce.update(suite.announce or [])

        announce_list_address = ", ".join(announce)

        tracking = cnf.get('Dinstall::TrackingServer')
        if tracking:
            announce_list_address = "{0}\nBcc: {1}@{2}".format(announce_list_address, upload.source, tracking)

        if len(announce_list_address) != 0:
            my_subst = subst.copy()
            my_subst['__ANNOUNCE_LIST_ADDRESS__'] = announce_list_address

            message = TemplateSubst(my_subst, os.path.join(cnf['Dir::Templates'], 'process-unchecked.announce'))
            send_mail(message, whitelists=whitelists)

    close_bugs_default = cnf.find_b('Dinstall::CloseBugs')
    close_bugs = any(s.close_bugs if s.close_bugs is not None else close_bugs_default for s in upload.suites)
    if accepted_to_real_suite and upload.sourceful and close_bugs:
        for bug in upload.bugs:
            my_subst = subst.copy()
            my_subst['__BUG_NUMBER__'] = str(bug)

            message = TemplateSubst(my_subst, os.path.join(cnf['Dir::Templates'], 'process-unchecked.bug-close'))
            send_mail(message, whitelists=whitelists)
Beispiel #27
0
def do_update(self):
    print __doc__
    try:
        cnf = Config()

        c = self.db.cursor()

	stayofexecution = cnf.get('Clean-Suites::StayOfExecution', '129600')
	c.execute("ALTER TABLE archive ADD COLUMN stayofexecution INTERVAL NOT NULL DEFAULT %s", (stayofexecution,))
        c.execute("UPDATE archive SET stayofexecution='0' WHERE name IN ('new', 'policy', 'build-queues')")

        c.execute("UPDATE config SET value = '77' WHERE name = 'db_revision'")
        self.db.commit()

    except psycopg2.ProgrammingError as msg:
        self.db.rollback()
        raise DBUpdateError('Unable to apply sick update 77, rollback issued. Error message: {0}'.format(msg))
Beispiel #28
0
def check_daily_lock():
    """
    Raises CantGetLockError if the dinstall daily.lock exists.
    """

    cnf = Config()
    try:
        lockfile = cnf.get("Process-New::DinstallLockFile",
                           os.path.join(cnf['Dir::Lock'], 'processnew.lock'))

        os.open(lockfile,
                os.O_RDONLY | os.O_CREAT | os.O_EXCL)
    except OSError as e:
        if e.errno == errno.EEXIST or e.errno == errno.EACCES:
            raise CantGetLockError

    os.unlink(lockfile)
Beispiel #29
0
def do_pkg(upload, session):
    # Try to get an included dsc
    dsc = upload.source

    cnf = Config()
    group = cnf.get('Dinstall::UnprivGroup') or None

    try:
        with lock_package(upload.changes.source), \
                UploadCopy(upload, group=group) as upload_copy:
            handler = PolicyQueueUploadHandler(upload, session)
            if handler.get_action() is not None:
                print("PENDING %s\n" % handler.get_action())
                return

            do_new(upload, upload_copy, handler, session)
    except AlreadyLockedError as e:
        print("Seems to be locked by %s already, skipping..." % (e))
Beispiel #30
0
def do_lintian(filename):
    cnf = Config()
    cmd = []

    user = cnf.get('Dinstall::UnprivUser') or None
    if user is not None:
        cmd.extend(['sudo', '-H', '-u', user])

    color = 'always'
    if use_html:
        color = 'html'

    cmd.extend(
        ['lintian', '--show-overrides', '--color', color, "--", filename])

    try:
        return do_command(cmd, escaped=True)
    except OSError as e:
        return (colour_output("Running lintian failed: %s" % (e), "error"))
Beispiel #31
0
def lock_package(package):
    """
    Lock C{package} so that noone else jumps in processing it.

    @type package: string
    @param package: source package name to lock
    """

    cnf = Config()

    path = os.path.join(cnf.get("Process-New::LockDir", cnf['Dir::Lock']), package)

    try:
        fd = os.open(path, os.O_CREAT | os.O_EXCL | os.O_RDONLY)
    except OSError as e:
        if e.errno == errno.EEXIST or e.errno == errno.EACCES:
            user = pwd.getpwuid(os.stat(path)[stat.ST_UID])[4].split(',')[0].replace('.', '')
            raise AlreadyLockedError(user)

    try:
        yield fd
    finally:
        os.unlink(path)
Beispiel #32
0
def do_pkg(upload, session):
    # Try to get an included dsc
    dsc = upload.source

    cnf = Config()
    group = cnf.get('Dinstall::UnprivGroup') or None

    #bcc = "X-DAK: dak process-new"
    #if cnf.has_key("Dinstall::Bcc"):
    #    u.Subst["__BCC__"] = bcc + "\nBcc: %s" % (cnf["Dinstall::Bcc"])
    #else:
    #    u.Subst["__BCC__"] = bcc

    try:
        with lock_package(upload.changes.source):
            with UploadCopy(upload, group=group) as upload_copy:
                handler = PolicyQueueUploadHandler(upload, session)
                if handler.get_action() is not None:
                    print "PENDING %s\n" % handler.get_action()
                    return

                do_new(upload, upload_copy, handler, session)
    except AlreadyLockedError as e:
        print "Seems to be locked by %s already, skipping..." % (e)
Beispiel #33
0
def do_pkg(upload, session):
    # Try to get an included dsc
    dsc = upload.source

    cnf = Config()
    group = cnf.get('Dinstall::UnprivGroup') or None

    #bcc = "X-DAK: dak process-new"
    #if cnf.has_key("Dinstall::Bcc"):
    #    u.Subst["__BCC__"] = bcc + "\nBcc: %s" % (cnf["Dinstall::Bcc"])
    #else:
    #    u.Subst["__BCC__"] = bcc

    try:
      with lock_package(upload.changes.source):
       with UploadCopy(upload, group=group) as upload_copy:
        handler = PolicyQueueUploadHandler(upload, session)
        if handler.get_action() is not None:
            print "PENDING %s\n" % handler.get_action()
            return

        do_new(upload, upload_copy, handler, session)
    except AlreadyLockedError as e:
        print "Seems to be locked by %s already, skipping..." % (e)
Beispiel #34
0
    def action_dm_remove(self, fingerprint, section, session):
        self._action_dm_admin_common(fingerprint, section, session)

        cnf = Config()
        acl_name = cnf.get('Command::DM::ACL', 'dm')
        acl = session.query(ACL).filter_by(name=acl_name).one()

        fpr_hash = section['Fingerprint'].translate(None, ' ')
        fpr = session.query(Fingerprint).filter_by(fingerprint=fpr_hash).first()
        if fpr is None:
            self.result.append('Unknown fingerprint: {0}\nNo action taken.'.format(fpr_hash))
            return

        self.log.log(['dm-remove', fpr.fingerprint])

        count = 0
        for entry in session.query(ACLPerSource).filter_by(acl=acl, fingerprint=fpr):
            self.log.log(['dm-remove', fpr.fingerprint, 'source={0}'.format(entry.source)])
            count += 1
            session.delete(entry)

        self.result.append('Removed: {0}.\n{1} acl entries removed.'.format(fpr.fingerprint, count))

        session.commit()
Beispiel #35
0
    def check(self, upload):
        changes = upload.changes

        # Only check sourceful uploads.
        if changes.source is None:
            return True
        # Only check uploads to unstable or experimental.
        if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions:
            return True

        cnf = Config()
        if 'Dinstall::LintianTags' not in cnf:
            return True
        tagfile = cnf['Dinstall::LintianTags']

        with open(tagfile, 'r') as sourcefile:
            sourcecontent = sourcefile.read()
        try:
            lintiantags = yaml.safe_load(sourcecontent)['lintian']
        except yaml.YAMLError as msg:
            raise Exception(
                'Could not read lintian tags file {0}, YAML error: {1}'.format(
                    tagfile, msg))

        with tempfile.NamedTemporaryFile(mode="w+t") as temptagfile:
            os.fchmod(temptagfile.fileno(), 0o644)
            for tags in six.itervalues(lintiantags):
                for tag in tags:
                    print(tag, file=temptagfile)
            temptagfile.flush()

            changespath = os.path.join(upload.directory, changes.filename)

            cmd = []
            user = cnf.get('Dinstall::UnprivUser') or None
            if user is not None:
                cmd.extend(['sudo', '-H', '-u', user])
            cmd.extend([
                '/usr/bin/lintian', '--show-overrides', '--tags-from-file',
                temptagfile.name, changespath
            ])
            process = subprocess.Popen(cmd,
                                       stdout=subprocess.PIPE,
                                       stderr=subprocess.STDOUT)
            output_raw = process.communicate()[0]
            output = six.ensure_text(output_raw)
            result = process.returncode

        if result == 2:
            utils.warn("lintian failed for %s [return code: %s]." %
                       (changespath, result))
            utils.warn(
                utils.prefix_multi_line_string(output, " [possible output:] "))

        parsed_tags = lintian.parse_lintian_output(output)
        rejects = list(
            lintian.generate_reject_messages(parsed_tags, lintiantags))
        if len(rejects) != 0:
            raise Reject('\n'.join(rejects))

        return True
Beispiel #36
0
def clean(now_date, archives, max_delete, session):
    cnf = Config()

    count = 0
    size = 0

    Logger.log(["Cleaning out packages..."])

    morguedir = cnf.get("Dir::Morgue", os.path.join("Dir::Pool", 'morgue'))
    morguesubdir = cnf.get("Clean-Suites::MorgueSubDir", 'pool')

    # Build directory as morguedir/morguesubdir/year/month/day
    dest = os.path.join(morguedir, morguesubdir, str(now_date.year),
                        '%.2d' % now_date.month, '%.2d' % now_date.day)

    if not Options["No-Action"] and not os.path.exists(dest):
        os.makedirs(dest)

    # Delete from source
    Logger.log(["Deleting from source table..."])
    q = session.execute("""
      WITH
      deleted_sources AS (
        DELETE FROM source
         USING files f
         WHERE source.file = f.id
           AND NOT EXISTS (SELECT 1 FROM files_archive_map af
                                    JOIN archive_delete_date ad ON af.archive_id = ad.archive_id
                                   WHERE af.file_id = source.file
                                     AND (af.last_used IS NULL OR af.last_used > ad.delete_date))
        RETURNING source.id AS id, f.filename AS filename
      ),
      deleted_dsc_files AS (
        DELETE FROM dsc_files df WHERE df.source IN (SELECT id FROM deleted_sources)
        RETURNING df.file AS file_id
      ),
      now_unused_source_files AS (
        UPDATE files_archive_map af
           SET last_used = '1977-03-13 13:37:42' -- Kill it now. We waited long enough before removing the .dsc.
         WHERE af.file_id IN (SELECT file_id FROM deleted_dsc_files)
           AND NOT EXISTS (SELECT 1 FROM dsc_files df WHERE df.file = af.file_id)
      )
      SELECT filename FROM deleted_sources""")
    for s in q:
        Logger.log(["delete source", s[0]])

    if not Options["No-Action"]:
        session.commit()

    # Delete files from the pool
    old_files = session.query(ArchiveFile).filter(
        sql.text(
            'files_archive_map.last_used <= (SELECT delete_date FROM archive_delete_date ad WHERE ad.archive_id = files_archive_map.archive_id)'
        )).join(Archive)
    if max_delete is not None:
        old_files = old_files.limit(max_delete)
        Logger.log(["Limiting removals to %d" % max_delete])

    if archives is not None:
        archive_ids = [a.archive_id for a in archives]
        old_files = old_files.filter(ArchiveFile.archive_id.in_(archive_ids))

    for af in old_files:
        filename = af.path
        try:
            st = os.lstat(filename)
        except FileNotFoundError:
            Logger.log(["database referred to non-existing file", filename])
            session.delete(af)
            continue
        Logger.log(["delete archive file", filename])
        if stat.S_ISLNK(st.st_mode):
            count += 1
            Logger.log(["delete symlink", filename])
            if not Options["No-Action"]:
                os.unlink(filename)
                session.delete(af)
        elif stat.S_ISREG(st.st_mode):
            size += st.st_size
            count += 1

            dest_filename = dest + '/' + os.path.basename(filename)
            # If the destination file exists; try to find another filename to use
            if os.path.lexists(dest_filename):
                dest_filename = utils.find_next_free(dest_filename)

            if not Options["No-Action"]:
                if af.archive.use_morgue:
                    Logger.log(["move to morgue", filename, dest_filename])
                    utils.move(filename, dest_filename)
                else:
                    Logger.log(["removed file", filename])
                    os.unlink(filename)
                session.delete(af)

        else:
            utils.fubar("%s is neither symlink nor file?!" % (filename))

    if count > 0:
        Logger.log(["total", count, utils.size_type(size)])

    # Delete entries in files no longer referenced by any archive
    query = """
       DELETE FROM files f
        WHERE NOT EXISTS (SELECT 1 FROM files_archive_map af WHERE af.file_id = f.id)
    """
    session.execute(query)

    if not Options["No-Action"]:
        session.commit()
Beispiel #37
0
    def update_db(self):
        # Ok, try and find the configuration table
        print("Determining dak database revision ...")
        cnf = Config()
        logger = Logger('update-db')
        modules = []

        try:
            # Build a connect string
            if "DB::Service" in cnf:
                connect_str = "service=%s" % cnf["DB::Service"]
            else:
                connect_str = "dbname=%s" % (cnf["DB::Name"])
                if "DB::Host" in cnf and cnf["DB::Host"] != '':
                    connect_str += " host=%s" % (cnf["DB::Host"])
                if "DB::Port" in cnf and cnf["DB::Port"] != '-1':
                    connect_str += " port=%d" % (int(cnf["DB::Port"]))

            self.db = psycopg2.connect(connect_str)

            db_role = cnf.get("DB::Role")
            if db_role:
                self.db.cursor().execute('SET ROLE "{}"'.format(db_role))

        except Exception as e:
            print("FATAL: Failed connect to database (%s)" % str(e))
            sys.exit(1)

        database_revision = int(self.get_db_rev())
        logger.log(
            ['transaction id before update: %s' % self.get_transaction_id()])

        if database_revision == -1:
            print("dak database schema predates update-db.")
            print("")
            print(
                "This script will attempt to upgrade it to the lastest, but may fail."
            )
            print(
                "Please make sure you have a database backup handy. If you don't, press Ctrl-C now!"
            )
            print("")
            print("Continuing in five seconds ...")
            time.sleep(5)
            print("")
            print("Attempting to upgrade pre-zero database to zero")

            self.update_db_to_zero()
            database_revision = 0

        dbfiles = glob(
            os.path.join(os.path.dirname(__file__), 'dakdb/update*.py'))
        required_database_schema = max(
            map(int, findall('update(\d+).py', " ".join(dbfiles))))

        print("dak database schema at %d" % database_revision)
        print("dak version requires schema %d" % required_database_schema)

        if database_revision < required_database_schema:
            print("\nUpdates to be applied:")
            for i in range(database_revision, required_database_schema):
                i += 1
                dakdb = __import__("dakdb", globals(), locals(),
                                   ['update' + str(i)])
                update_module = getattr(dakdb, "update" + str(i))
                print(
                    "Update %d: %s" %
                    (i, next(s
                             for s in update_module.__doc__.split("\n") if s)))
                modules.append((update_module, i))
            if not Config().find_b("Update-DB::Options::Yes", False):
                prompt = "\nUpdate database? (y/N) "
                answer = utils.our_raw_input(prompt)
                if answer.upper() != 'Y':
                    sys.exit(0)
        else:
            print("no updates required")
            logger.log(["no updates required"])
            sys.exit(0)

        for module in modules:
            (update_module, i) = module
            try:
                update_module.do_update(self)
                message = "updated database schema from %d to %d" % (
                    database_revision, i)
                print(message)
                logger.log([message])
            except DBUpdateError as e:
                # Seems the update did not work.
                print("Was unable to update database schema from %d to %d." %
                      (database_revision, i))
                print("The error message received was %s" % (e))
                logger.log(["DB Schema upgrade failed"])
                logger.close()
                utils.fubar("DB Schema upgrade failed")
            database_revision += 1
        logger.close()
Beispiel #38
0
def main ():
    global suite, suite_id, source_binaries, source_versions

    cnf = Config()

    Arguments = [('h',"help","Cruft-Report::Options::Help"),
                 ('m',"mode","Cruft-Report::Options::Mode", "HasArg"),
                 ('R',"rdep-check", "Cruft-Report::Options::Rdep-Check"),
                 ('s',"suite","Cruft-Report::Options::Suite","HasArg"),
                 ('w',"wanna-build-dump","Cruft-Report::Options::Wanna-Build-Dump","HasArg")]
    for i in [ "help", "Rdep-Check" ]:
        if not cnf.has_key("Cruft-Report::Options::%s" % (i)):
            cnf["Cruft-Report::Options::%s" % (i)] = ""

    cnf["Cruft-Report::Options::Suite"] = cnf.get("Dinstall::DefaultSuite", "unstable")

    if not cnf.has_key("Cruft-Report::Options::Mode"):
        cnf["Cruft-Report::Options::Mode"] = "daily"

    if not cnf.has_key("Cruft-Report::Options::Wanna-Build-Dump"):
        cnf["Cruft-Report::Options::Wanna-Build-Dump"] = "/srv/ftp-master.debian.org/scripts/nfu"

    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)

    Options = cnf.subtree("Cruft-Report::Options")
    if Options["Help"]:
        usage()

    if Options["Rdep-Check"]:
        rdeps = True
    else:
        rdeps = False

    # Set up checks based on mode
    if Options["Mode"] == "daily":
        checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu" ]
    elif Options["Mode"] == "full":
        checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu", "dubious nbs", "bnb", "bms", "anais" ]
    elif Options["Mode"] == "bdo":
        checks = [ "nbs",  "obsolete source" ]
    else:
        utils.warn("%s is not a recognised mode - only 'full', 'daily' or 'bdo' are understood." % (Options["Mode"]))
        usage(1)

    session = DBConn().session()

    bin_pkgs = {}
    src_pkgs = {}
    bin2source = {}
    bins_in_suite = {}
    nbs = {}
    source_versions = {}

    anais_output = ""

    nfu_packages = {}

    suite = get_suite(Options["Suite"].lower(), session)
    if not suite:
        utils.fubar("Cannot find suite %s" % Options["Suite"].lower())

    suite_id = suite.suite_id
    suite_name = suite.suite_name.lower()

    if "obsolete source" in checks:
        report_obsolete_source(suite_name, session)

    if "nbs" in checks:
        reportAllNBS(suite_name, suite_id, session, rdeps)

    if "outdated non-free" in checks:
        report_outdated_nonfree(suite_name, session, rdeps)

    bin_not_built = {}

    if "bnb" in checks:
        bins_in_suite = get_suite_binaries(suite, session)

    # Checks based on the Sources files
    components = get_component_names(session)
    for component in components:
        filename = "%s/dists/%s/%s/source/Sources.gz" % (suite.archive.path, suite_name, component)
        # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance...
        (fd, temp_filename) = utils.temp_filename()
        (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
        if (result != 0):
            sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
            sys.exit(result)
        sources = utils.open_file(temp_filename)
        Sources = apt_pkg.TagFile(sources)
        while Sources.step():
            source = Sources.section.find('Package')
            source_version = Sources.section.find('Version')
            architecture = Sources.section.find('Architecture')
            binaries = Sources.section.find('Binary')
            binaries_list = [ i.strip() for i in  binaries.split(',') ]

            if "bnb" in checks:
                # Check for binaries not built on any architecture.
                for binary in binaries_list:
                    if not bins_in_suite.has_key(binary):
                        bin_not_built.setdefault(source, {})
                        bin_not_built[source][binary] = ""

            if "anais" in checks:
                anais_output += do_anais(architecture, binaries_list, source, session)

            # build indices for checking "no source" later
            source_index = component + '/' + source
            src_pkgs[source] = source_index
            for binary in binaries_list:
                bin_pkgs[binary] = source
            source_binaries[source] = binaries
            source_versions[source] = source_version

        sources.close()
        os.unlink(temp_filename)

    # Checks based on the Packages files
    check_components = components[:]
    if suite_name != "experimental":
        check_components.append('main/debian-installer');

    for component in check_components:
        architectures = [ a.arch_string for a in get_suite_architectures(suite_name,
                                                                         skipsrc=True, skipall=True,
                                                                         session=session) ]
        for architecture in architectures:
            if component == 'main/debian-installer' and re.match("kfreebsd", architecture):
                continue
            filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (suite.archive.path, suite_name, component, architecture)
            # apt_pkg.TagFile needs a real file handle
            (fd, temp_filename) = utils.temp_filename()
            (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename))
            if (result != 0):
                sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
                sys.exit(result)

            if "nfu" in checks:
                nfu_packages.setdefault(architecture,[])
                nfu_entries = parse_nfu(architecture)

            packages = utils.open_file(temp_filename)
            Packages = apt_pkg.TagFile(packages)
            while Packages.step():
                package = Packages.section.find('Package')
                source = Packages.section.find('Source', "")
                version = Packages.section.find('Version')
                if source == "":
                    source = package
                if bin2source.has_key(package) and \
                       apt_pkg.version_compare(version, bin2source[package]["version"]) > 0:
                    bin2source[package]["version"] = version
                    bin2source[package]["source"] = source
                else:
                    bin2source[package] = {}
                    bin2source[package]["version"] = version
                    bin2source[package]["source"] = source
                if source.find("(") != -1:
                    m = re_extract_src_version.match(source)
                    source = m.group(1)
                    version = m.group(2)
                if not bin_pkgs.has_key(package):
                    nbs.setdefault(source,{})
                    nbs[source].setdefault(package, {})
                    nbs[source][package][version] = ""
                else:
                    if "nfu" in checks:
                        if package in nfu_entries and \
                               version != source_versions[source]: # only suggest to remove out-of-date packages
                            nfu_packages[architecture].append((package,version,source_versions[source]))
                    
            packages.close()
            os.unlink(temp_filename)

    # Distinguish dubious (version numbers match) and 'real' NBS (they don't)
    dubious_nbs = {}
    for source in nbs.keys():
        for package in nbs[source].keys():
            versions = nbs[source][package].keys()
            versions.sort(apt_pkg.version_compare)
            latest_version = versions.pop()
            source_version = source_versions.get(source,"0")
            if apt_pkg.version_compare(latest_version, source_version) == 0:
                add_nbs(dubious_nbs, source, latest_version, package, suite_id, session)

    if "nviu" in checks:
        do_newer_version('unstable', 'experimental', 'NVIU', session)

    if "nvit" in checks:
        do_newer_version('testing', 'testing-proposed-updates', 'NVIT', session)

    ###

    if Options["Mode"] == "full":
        print "="*75
        print

    if "nfu" in checks:
        do_nfu(nfu_packages)

    if "bnb" in checks:
        print "Unbuilt binary packages"
        print "-----------------------"
        print
        keys = bin_not_built.keys()
        keys.sort()
        for source in keys:
            binaries = bin_not_built[source].keys()
            binaries.sort()
            print " o %s: %s" % (source, ", ".join(binaries))
        print

    if "bms" in checks:
        report_multiple_source(suite)

    if "anais" in checks:
        print "Architecture Not Allowed In Source"
        print "----------------------------------"
        print anais_output
        print

    if "dubious nbs" in checks:
        do_dubious_nbs(dubious_nbs)
Beispiel #39
0
    def action_dm(self, fingerprint, section, session):
        cnf = Config()

        if (
            "Command::DM::AdminKeyrings" not in cnf
            or "Command::DM::ACL" not in cnf
            or "Command::DM::Keyrings" not in cnf
        ):
            raise CommandError("DM command is not configured for this archive.")

        allowed_keyrings = cnf.value_list("Command::DM::AdminKeyrings")
        if fingerprint.keyring.keyring_name not in allowed_keyrings:
            raise CommandError("Key {0} is not allowed to set DM".format(fingerprint.fingerprint))

        acl_name = cnf.get("Command::DM::ACL", "dm")
        acl = session.query(ACL).filter_by(name=acl_name).one()

        fpr_hash = section["Fingerprint"].translate(None, " ")
        fpr = session.query(Fingerprint).filter_by(fingerprint=fpr_hash).first()
        if fpr is None:
            raise CommandError("Unknown fingerprint {0}".format(fpr_hash))
        if fpr.keyring is None or fpr.keyring.keyring_name not in cnf.value_list("Command::DM::Keyrings"):
            raise CommandError("Key {0} is not in DM keyring.".format(fpr.fingerprint))
        addresses = gpg_get_key_addresses(fpr.fingerprint)
        if len(addresses) > 0:
            self.cc.append(addresses[0])

        self.log.log(["dm", "fingerprint", fpr.fingerprint])
        self.result.append("Fingerprint: {0}".format(fpr.fingerprint))
        if len(addresses) > 0:
            self.log.log(["dm", "uid", addresses[0]])
            self.result.append("Uid: {0}".format(addresses[0]))

        for source in self._split_packages(section.get("Allow", "")):
            # Check for existance of source package to catch typos
            if session.query(DBSource).filter_by(source=source).first() is None:
                raise CommandError("Tried to grant permissions for unknown source package: {0}".format(source))

            if session.query(ACLPerSource).filter_by(acl=acl, fingerprint=fpr, source=source).first() is None:
                aps = ACLPerSource()
                aps.acl = acl
                aps.fingerprint = fpr
                aps.source = source
                aps.created_by = fingerprint
                aps.reason = section.get("Reason")
                session.add(aps)
                self.log.log(["dm", "allow", fpr.fingerprint, source])
                self.result.append("Allowed: {0}".format(source))
            else:
                self.result.append("Already-Allowed: {0}".format(source))

        session.flush()

        for source in self._split_packages(section.get("Deny", "")):
            count = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=fpr, source=source).delete()
            if count == 0:
                raise CommandError(
                    "Tried to remove upload permissions for package {0}, "
                    "but no upload permissions were granted before.".format(source)
                )

            self.log.log(["dm", "deny", fpr.fingerprint, source])
            self.result.append("Denied: {0}".format(source))

        session.commit()
Beispiel #40
0
def main():
    cnf = Config()

    Arguments = [
        ('n', "no-action", "Import-Users-From-Passwd::Options::No-Action"),
        ('q', "quiet", "Import-Users-From-Passwd::Options::Quiet"),
        ('v', "verbose", "Import-Users-From-Passwd::Options::Verbose"),
        ('h', "help", "Import-Users-From-Passwd::Options::Help")
    ]
    for i in ["no-action", "quiet", "verbose", "help"]:
        key = "Import-Users-From-Passwd::Options::%s" % i
        if key not in cnf:
            cnf[key] = ""

    arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("Import-Users-From-Passwd::Options")

    if Options["Help"]:
        usage()
    elif arguments:
        utils.warn(
            "dak import-users-from-passwd takes no non-option arguments.")
        usage(1)

    session = DBConn().session()
    valid_gid = cnf.get("Import-Users-From-Passwd::ValidGID", "")
    if valid_gid:
        debiangrp = grp.getgrnam(valid_gid).gr_mem
    else:
        debiangrp = []

    passwd_unames = {}
    for entry in pwd.getpwall():
        uname = entry[0]
        if uname not in debiangrp:
            if Options["Verbose"]:
                print("Skipping %s (Not in group %s)." % (uname, valid_gid))
            continue
        passwd_unames[uname] = ""

    postgres_unames = {}
    q = session.execute("SELECT usename FROM pg_user")
    for i in q.fetchall():
        uname = i[0]
        postgres_unames[uname] = ""

    known_postgres_unames = {}
    for i in cnf.get("Import-Users-From-Passwd::KnownPostgres", "").split(","):
        uname = i.strip()
        known_postgres_unames[uname] = ""

    for uname in sorted(postgres_unames):
        if uname not in passwd_unames and uname not in known_postgres_unames:
            print(
                "I: Deleting %s from Postgres, no longer in passwd or list of known Postgres users"
                % (uname))
            q = session.execute('DROP USER "%s"' % (uname))

    safe_name = re.compile('^[A-Za-z0-9]+$')
    for uname in sorted(passwd_unames):
        if uname not in postgres_unames:
            if not Options["Quiet"]:
                print("Creating %s user in Postgres." % (uname))
            if not Options["No-Action"]:
                if safe_name.match(uname):
                    # NB: I never figured out how to use a bind parameter for this query
                    # XXX: Fix this as it looks like a potential SQL injection attack to me
                    #      (hence the safe_name match we do)
                    try:
                        q = session.execute('CREATE USER "%s"' % (uname))
                        session.commit()
                    except Exception as e:
                        utils.warn("Could not create user %s (%s)" %
                                   (uname, str(e)))
                        session.rollback()
                else:
                    print("NOT CREATING USER %s.  Doesn't match safety regex" %
                          uname)

    session.commit()
Beispiel #41
0
def clean(now_date, archives, max_delete, session):
    cnf = Config()

    count = 0
    size = 0

    Logger.log(["Cleaning out packages..."])

    morguedir = cnf.get("Dir::Morgue", os.path.join("Dir::Pool", 'morgue'))
    morguesubdir = cnf.get("Clean-Suites::MorgueSubDir", 'pool')

    # Build directory as morguedir/morguesubdir/year/month/day
    dest = os.path.join(morguedir,
                        morguesubdir,
                        str(now_date.year),
                        '%.2d' % now_date.month,
                        '%.2d' % now_date.day)

    if not Options["No-Action"] and not os.path.exists(dest):
        os.makedirs(dest)

    # Delete from source
    Logger.log(["Deleting from source table..."])
    q = session.execute("""
      WITH
      deleted_sources AS (
        DELETE FROM source
         USING files f
         WHERE source.file = f.id
           AND NOT EXISTS (SELECT 1 FROM files_archive_map af
                                    JOIN archive_delete_date ad ON af.archive_id = ad.archive_id
                                   WHERE af.file_id = source.file
                                     AND (af.last_used IS NULL OR af.last_used > ad.delete_date))
        RETURNING source.id AS id, f.filename AS filename
      ),
      deleted_dsc_files AS (
        DELETE FROM dsc_files df WHERE df.source IN (SELECT id FROM deleted_sources)
        RETURNING df.file AS file_id
      ),
      now_unused_source_files AS (
        UPDATE files_archive_map af
           SET last_used = '1977-03-13 13:37:42' -- Kill it now. We waited long enough before removing the .dsc.
         WHERE af.file_id IN (SELECT file_id FROM deleted_dsc_files)
           AND NOT EXISTS (SELECT 1 FROM dsc_files df WHERE df.file = af.file_id)
      )
      SELECT filename FROM deleted_sources""")
    for s in q:
        Logger.log(["delete source", s[0]])

    if not Options["No-Action"]:
        session.commit()

    # Delete files from the pool
    old_files = session.query(ArchiveFile).filter('files_archive_map.last_used <= (SELECT delete_date FROM archive_delete_date ad WHERE ad.archive_id = files_archive_map.archive_id)').join(Archive)
    if max_delete is not None:
        old_files = old_files.limit(max_delete)
        Logger.log(["Limiting removals to %d" % max_delete])

    if archives is not None:
        archive_ids = [ a.archive_id for a in archives ]
        old_files = old_files.filter(ArchiveFile.archive_id.in_(archive_ids))

    for af in old_files:
        filename = af.path
        if not os.path.exists(filename):
            Logger.log(["database referred to non-existing file", af.path])
            session.delete(af)
            continue
        Logger.log(["delete archive file", filename])
        if os.path.isfile(filename):
            if os.path.islink(filename):
                count += 1
                Logger.log(["delete symlink", filename])
                if not Options["No-Action"]:
                    os.unlink(filename)
            else:
                size += os.stat(filename)[stat.ST_SIZE]
                count += 1

                dest_filename = dest + '/' + os.path.basename(filename)
                # If the destination file exists; try to find another filename to use
                if os.path.lexists(dest_filename):
                    dest_filename = utils.find_next_free(dest_filename)

                if not Options["No-Action"]:
                    if af.archive.use_morgue:
                        Logger.log(["move to morgue", filename, dest_filename])
                        utils.move(filename, dest_filename)
                    else:
                        Logger.log(["removed file", filename])
                        os.unlink(filename)

            if not Options["No-Action"]:
                session.delete(af)
                session.commit()

        else:
            utils.fubar("%s is neither symlink nor file?!" % (filename))

    if count > 0:
        Logger.log(["total", count, utils.size_type(size)])

    # Delete entries in files no longer referenced by any archive
    query = """
       DELETE FROM files f
        WHERE NOT EXISTS (SELECT 1 FROM files_archive_map af WHERE af.file_id = f.id)
    """
    session.execute(query)

    if not Options["No-Action"]:
        session.commit()
Beispiel #42
0
def comment_accept(upload, srcqueue, comments, transaction):
    for byhand in upload.byhand:
        path = os.path.join(srcqueue.path, byhand.filename)
        if os.path.exists(path):
            raise Exception(
                'E: cannot ACCEPT upload with unprocessed byhand file {0}'.
                format(byhand.filename))

    cnf = Config()

    fs = transaction.fs
    session = transaction.session
    changesname = upload.changes.changesname
    allow_tainted = srcqueue.suite.archive.tainted

    # We need overrides to get the target component
    overridesuite = upload.target_suite
    if overridesuite.overridesuite is not None:
        overridesuite = session.query(Suite).filter_by(
            suite_name=overridesuite.overridesuite).one()

    def binary_component_func(db_binary):
        override = session.query(Override).filter_by(suite=overridesuite, package=db_binary.package) \
            .join(OverrideType).filter(OverrideType.overridetype == db_binary.binarytype) \
            .join(Component).one()
        return override.component

    def source_component_func(db_source):
        override = session.query(Override).filter_by(suite=overridesuite, package=db_source.source) \
            .join(OverrideType).filter(OverrideType.overridetype == 'dsc') \
            .join(Component).one()
        return override.component

    all_target_suites = [upload.target_suite]
    all_target_suites.extend(
        [q.suite for q in upload.target_suite.copy_queues])

    for suite in all_target_suites:
        if upload.source is not None:
            transaction.copy_source(upload.source,
                                    suite,
                                    source_component_func(upload.source),
                                    allow_tainted=allow_tainted)
        for db_binary in upload.binaries:
            # build queues may miss the source package if this is a binary-only upload
            if suite != upload.target_suite:
                transaction.copy_source(db_binary.source,
                                        suite,
                                        source_component_func(
                                            db_binary.source),
                                        allow_tainted=allow_tainted)
            transaction.copy_binary(
                db_binary,
                suite,
                binary_component_func(db_binary),
                allow_tainted=allow_tainted,
                extra_archives=[upload.target_suite.archive])

    # Copy .changes if needed
    if upload.target_suite.copychanges:
        src = os.path.join(upload.policy_queue.path,
                           upload.changes.changesname)
        dst = os.path.join(upload.target_suite.path,
                           upload.changes.changesname)
        fs.copy(src, dst, mode=upload.target_suite.archive.mode)

    # Copy upload to Process-Policy::CopyDir
    # Used on security.d.o to sync accepted packages to ftp-master, but this
    # should eventually be replaced by something else.
    copydir = cnf.get('Process-Policy::CopyDir') or None
    if copydir is not None:
        mode = upload.target_suite.archive.mode
        if upload.source is not None:
            for f in [df.poolfile for df in upload.source.srcfiles]:
                dst = os.path.join(copydir, f.basename)
                if not os.path.exists(dst):
                    fs.copy(f.fullpath, dst, mode=mode)

        for db_binary in upload.binaries:
            f = db_binary.poolfile
            dst = os.path.join(copydir, f.basename)
            if not os.path.exists(dst):
                fs.copy(f.fullpath, dst, mode=mode)

        src = os.path.join(upload.policy_queue.path,
                           upload.changes.changesname)
        dst = os.path.join(copydir, upload.changes.changesname)
        if not os.path.exists(dst):
            fs.copy(src, dst, mode=mode)

    if upload.source is not None and not Options['No-Action']:
        urgency = upload.changes.urgency
        if urgency not in cnf.value_list('Urgency::Valid'):
            urgency = cnf['Urgency::Default']
        UrgencyLog().log(upload.source.source, upload.source.version, urgency)

    print "  ACCEPT"
    if not Options['No-Action']:
        Logger.log(["Policy Queue ACCEPT", srcqueue.queue_name, changesname])

    pu = get_processed_upload(upload)
    daklib.announce.announce_accept(pu)

    # TODO: code duplication. Similar code is in process-upload.
    # Move .changes to done
    src = os.path.join(upload.policy_queue.path, upload.changes.changesname)
    now = datetime.datetime.now()
    donedir = os.path.join(cnf['Dir::Done'], now.strftime('%Y/%m/%d'))
    dst = os.path.join(donedir, upload.changes.changesname)
    dst = utils.find_next_free(dst)
    fs.copy(src, dst, mode=0o644)

    remove_upload(upload, transaction)
Beispiel #43
0
    def generate_release_files(self):
        """
        Generate Release files for the given suite

        @type suite: string
        @param suite: Suite name
        """

        suite = self.suite
        session = object_session(suite)

        architectures = get_suite_architectures(suite.suite_name, skipall=True, skipsrc=True, session=session)

        # Attribs contains a tuple of field names and the database names to use to
        # fill them in
        attribs = (('Origin',      'origin'),
                    ('Label',       'label'),
                    ('Suite',       'release_suite_output'),
                    ('Version',     'version'),
                    ('Codename',    'codename'),
                    ('Changelogs',  'changelog_url'),
                  )

        # A "Sub" Release file has slightly different fields
        subattribs = (('Archive',  'suite_name'),
                       ('Origin',   'origin'),
                       ('Label',    'label'),
                       ('Version',  'version'))

        # Boolean stuff. If we find it true in database, write out "yes" into the release file
        boolattrs = (('NotAutomatic',         'notautomatic'),
                      ('ButAutomaticUpgrades', 'butautomaticupgrades'),
                      ('Acquire-By-Hash',      'byhash'),
                    )

        cnf = Config()
        cnf_suite_suffix = cnf.get("Dinstall::SuiteSuffix", "").rstrip("/")

        suite_suffix = utils.suite_suffix(suite.suite_name)

        self.create_output_directories()
        self.create_release_symlinks()

        outfile = os.path.join(self.suite_release_path(), "Release")
        out = open(outfile + ".new", "w")

        for key, dbfield in attribs:
            # Hack to skip NULL Version fields as we used to do this
            # We should probably just always ignore anything which is None
            if key in ("Version", "Changelogs") and getattr(suite, dbfield) is None:
                continue

            out.write("%s: %s\n" % (key, getattr(suite, dbfield)))

        out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))

        if suite.validtime:
            validtime = float(suite.validtime)
            out.write("Valid-Until: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time() + validtime))))

        for key, dbfield in boolattrs:
            if getattr(suite, dbfield, False):
                out.write("%s: yes\n" % (key))

        out.write("Architectures: %s\n" % (" ".join([a.arch_string for a in architectures])))

        components = [c.component_name for c in suite.components]

        out.write("Components: %s\n" % (" ".join(components)))

        # For exact compatibility with old g-r, write out Description here instead
        # of with the rest of the DB fields above
        if getattr(suite, 'description') is not None:
            out.write("Description: %s\n" % suite.description)

        for comp in components:
            for dirpath, dirnames, filenames in os.walk(os.path.join(self.suite_path(), comp), topdown=True):
                if not re_gensubrelease.match(dirpath):
                    continue

                subfile = os.path.join(dirpath, "Release")
                subrel = open(subfile + '.new', "w")

                for key, dbfield in subattribs:
                    if getattr(suite, dbfield) is not None:
                        subrel.write("%s: %s\n" % (key, getattr(suite, dbfield)))

                for key, dbfield in boolattrs:
                    if getattr(suite, dbfield, False):
                        subrel.write("%s: yes\n" % (key))

                subrel.write("Component: %s%s\n" % (suite_suffix, comp))

                # Urgh, but until we have all the suite/component/arch stuff in the DB,
                # this'll have to do
                arch = os.path.split(dirpath)[-1]
                if arch.startswith('binary-'):
                    arch = arch[7:]

                subrel.write("Architecture: %s\n" % (arch))
                subrel.close()

                os.rename(subfile + '.new', subfile)

        # Now that we have done the groundwork, we want to get off and add the files with
        # their checksums to the main Release file
        oldcwd = os.getcwd()

        os.chdir(self.suite_path())

        hashes = [x for x in RELEASE_HASHES if x.db_name in suite.checksums]

        fileinfo = {}
        fileinfo_byhash = {}

        uncompnotseen = {}

        for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True):
            # SuiteSuffix deprecation:
            # components on security-master are updates/{main,contrib,non-free}, but
            # we want dists/${suite}/main.  Until we can rename the components,
            # we cheat by having an updates -> . symlink.  This should not be visited.
            if cnf_suite_suffix:
                path = os.path.join(dirpath, cnf_suite_suffix)
                try:
                    target = os.readlink(path)
                    if target == ".":
                        dirnames.remove(cnf_suite_suffix)
                except (OSError, ValueError):
                    pass
            for entry in filenames:
                if dirpath == '.' and entry in ["Release", "Release.gpg", "InRelease"]:
                    continue

                filename = os.path.join(dirpath.lstrip('./'), entry)

                if re_includeinrelease_byhash.match(entry):
                    fileinfo[filename] = fileinfo_byhash[filename] = {}
                elif re_includeinrelease_plain.match(entry):
                    fileinfo[filename] = {}
                # Skip things we don't want to include
                else:
                    continue

                contents = open(filename, 'r').read()

                # If we find a file for which we have a compressed version and
                # haven't yet seen the uncompressed one, store the possibility
                # for future use
                if entry.endswith(".gz") and filename[:-3] not in uncompnotseen:
                    uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename)
                elif entry.endswith(".bz2") and filename[:-4] not in uncompnotseen:
                    uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename)
                elif entry.endswith(".xz") and filename[:-3] not in uncompnotseen:
                    uncompnotseen[filename[:-3]] = (XzFile, filename)

                fileinfo[filename]['len'] = len(contents)

                for hf in hashes:
                    fileinfo[filename][hf.release_field] = hf.func(contents)

        for filename, comp in uncompnotseen.items():
            # If we've already seen the uncompressed file, we don't
            # need to do anything again
            if filename in fileinfo:
                continue

            fileinfo[filename] = {}

            # File handler is comp[0], filename of compressed file is comp[1]
            contents = comp[0](comp[1], 'r').read()

            fileinfo[filename]['len'] = len(contents)

            for hf in hashes:
                fileinfo[filename][hf.release_field] = hf.func(contents)

        for field in sorted(h.release_field for h in hashes):
            out.write('%s:\n' % field)
            for filename in sorted(fileinfo.keys()):
                out.write(" %s %8d %s\n" % (fileinfo[filename][field], fileinfo[filename]['len'], filename))

        out.close()
        os.rename(outfile + '.new', outfile)

        self._update_hashfile_table(session, fileinfo_byhash, hashes)
        self._make_byhash_links(fileinfo_byhash, hashes)
        self._make_byhash_base_symlink(fileinfo_byhash, hashes)

        sign_release_dir(suite, os.path.dirname(outfile))

        os.chdir(oldcwd)

        return
Beispiel #44
0
def main():
    global suite, suite_id, source_binaries, source_versions

    cnf = Config()

    Arguments = [('h', "help", "Cruft-Report::Options::Help"),
                 ('m', "mode", "Cruft-Report::Options::Mode", "HasArg"),
                 ('R', "rdep-check", "Cruft-Report::Options::Rdep-Check"),
                 ('s', "suite", "Cruft-Report::Options::Suite", "HasArg"),
                 ('w', "wanna-build-dump",
                  "Cruft-Report::Options::Wanna-Build-Dump", "HasArg")]
    for i in ["help", "Rdep-Check"]:
        if not cnf.has_key("Cruft-Report::Options::%s" % (i)):
            cnf["Cruft-Report::Options::%s" % (i)] = ""

    cnf["Cruft-Report::Options::Suite"] = cnf.get("Dinstall::DefaultSuite",
                                                  "unstable")

    if not cnf.has_key("Cruft-Report::Options::Mode"):
        cnf["Cruft-Report::Options::Mode"] = "daily"

    if not cnf.has_key("Cruft-Report::Options::Wanna-Build-Dump"):
        cnf["Cruft-Report::Options::Wanna-Build-Dump"] = "/srv/ftp-master.debian.org/scripts/nfu"

    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)

    Options = cnf.subtree("Cruft-Report::Options")
    if Options["Help"]:
        usage()

    if Options["Rdep-Check"]:
        rdeps = True
    else:
        rdeps = False

    # Set up checks based on mode
    if Options["Mode"] == "daily":
        checks = [
            "nbs", "nviu", "nvit", "obsolete source", "outdated non-free",
            "nfu"
        ]
    elif Options["Mode"] == "full":
        checks = [
            "nbs", "nviu", "nvit", "obsolete source", "outdated non-free",
            "nfu", "dubious nbs", "bnb", "bms", "anais"
        ]
    elif Options["Mode"] == "bdo":
        checks = ["nbs", "obsolete source"]
    else:
        utils.warn(
            "%s is not a recognised mode - only 'full', 'daily' or 'bdo' are understood."
            % (Options["Mode"]))
        usage(1)

    session = DBConn().session()

    bin_pkgs = {}
    src_pkgs = {}
    bin2source = {}
    bins_in_suite = {}
    nbs = {}
    source_versions = {}

    anais_output = ""

    nfu_packages = {}

    suite = get_suite(Options["Suite"].lower(), session)
    if not suite:
        utils.fubar("Cannot find suite %s" % Options["Suite"].lower())

    suite_id = suite.suite_id
    suite_name = suite.suite_name.lower()

    if "obsolete source" in checks:
        report_obsolete_source(suite_name, session)

    if "nbs" in checks:
        reportAllNBS(suite_name, suite_id, session, rdeps)

    if "outdated non-free" in checks:
        report_outdated_nonfree(suite_name, session, rdeps)

    bin_not_built = {}

    if "bnb" in checks:
        bins_in_suite = get_suite_binaries(suite, session)

    # Checks based on the Sources files
    components = get_component_names(session)
    for component in components:
        filename = "%s/dists/%s/%s/source/Sources.gz" % (suite.archive.path,
                                                         suite_name, component)
        # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance...
        (fd, temp_filename) = utils.temp_filename()
        (result, output) = commands.getstatusoutput("gunzip -c %s > %s" %
                                                    (filename, temp_filename))
        if (result != 0):
            sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
            sys.exit(result)
        sources = utils.open_file(temp_filename)
        Sources = apt_pkg.TagFile(sources)
        while Sources.step():
            source = Sources.section.find('Package')
            source_version = Sources.section.find('Version')
            architecture = Sources.section.find('Architecture')
            binaries = Sources.section.find('Binary')
            binaries_list = [i.strip() for i in binaries.split(',')]

            if "bnb" in checks:
                # Check for binaries not built on any architecture.
                for binary in binaries_list:
                    if not bins_in_suite.has_key(binary):
                        bin_not_built.setdefault(source, {})
                        bin_not_built[source][binary] = ""

            if "anais" in checks:
                anais_output += do_anais(architecture, binaries_list, source,
                                         session)

            # build indices for checking "no source" later
            source_index = component + '/' + source
            src_pkgs[source] = source_index
            for binary in binaries_list:
                bin_pkgs[binary] = source
            source_binaries[source] = binaries
            source_versions[source] = source_version

        sources.close()
        os.unlink(temp_filename)

    # Checks based on the Packages files
    check_components = components[:]
    if suite_name != "staging":
        check_components.append('main/debian-installer')

    for component in check_components:
        architectures = [
            a.arch_string for a in get_suite_architectures(
                suite_name, skipsrc=True, skipall=True, session=session)
        ]
        for architecture in architectures:
            if component == 'main/debian-installer' and re.match(
                    "kfreebsd", architecture):
                continue
            filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (
                suite.archive.path, suite_name, component, architecture)
            # apt_pkg.TagFile needs a real file handle
            (fd, temp_filename) = utils.temp_filename()
            (result, output) = commands.getstatusoutput(
                "gunzip -c %s > %s" % (filename, temp_filename))
            if (result != 0):
                sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output))
                sys.exit(result)

            if "nfu" in checks:
                nfu_packages.setdefault(architecture, [])
                nfu_entries = parse_nfu(architecture)

            packages = utils.open_file(temp_filename)
            Packages = apt_pkg.TagFile(packages)
            while Packages.step():
                package = Packages.section.find('Package')
                source = Packages.section.find('Source', "")
                version = Packages.section.find('Version')
                if source == "":
                    source = package
                if bin2source.has_key(package) and \
                       apt_pkg.version_compare(version, bin2source[package]["version"]) > 0:
                    bin2source[package]["version"] = version
                    bin2source[package]["source"] = source
                else:
                    bin2source[package] = {}
                    bin2source[package]["version"] = version
                    bin2source[package]["source"] = source
                if source.find("(") != -1:
                    m = re_extract_src_version.match(source)
                    source = m.group(1)
                    version = m.group(2)
                if not bin_pkgs.has_key(package):
                    nbs.setdefault(source, {})
                    nbs[source].setdefault(package, {})
                    nbs[source][package][version] = ""
                else:
                    if "nfu" in checks:
                        if package in nfu_entries and \
                               version != source_versions[source]: # only suggest to remove out-of-date packages
                            nfu_packages[architecture].append(
                                (package, version, source_versions[source]))

            packages.close()
            os.unlink(temp_filename)

    # Distinguish dubious (version numbers match) and 'real' NBS (they don't)
    dubious_nbs = {}
    for source in nbs.keys():
        for package in nbs[source].keys():
            versions = nbs[source][package].keys()
            versions.sort(apt_pkg.version_compare)
            latest_version = versions.pop()
            source_version = source_versions.get(source, "0")
            if apt_pkg.version_compare(latest_version, source_version) == 0:
                add_nbs(dubious_nbs, source, latest_version, package, suite_id,
                        session)

    if "nviu" in checks:
        do_newer_version('chromodoris', 'staging', 'NVIU', session)

    # FIXME: Not used in Tanglu
    #if "nvit" in checks:
    #    do_newer_version('testing', 'testing-proposed-updates', 'NVIT', session)

    ###

    if Options["Mode"] == "full":
        print "=" * 75
        print

    if "nfu" in checks:
        do_nfu(nfu_packages)

    if "bnb" in checks:
        print "Unbuilt binary packages"
        print "-----------------------"
        print
        keys = bin_not_built.keys()
        keys.sort()
        for source in keys:
            binaries = bin_not_built[source].keys()
            binaries.sort()
            print " o %s: %s" % (source, ", ".join(binaries))
        print

    if "bms" in checks:
        report_multiple_source(suite)

    if "anais" in checks:
        print "Architecture Not Allowed In Source"
        print "----------------------------------"
        print anais_output
        print

    if "dubious nbs" in checks:
        do_dubious_nbs(dubious_nbs)
Beispiel #45
0
def clean(now_date, delete_date, max_delete, session):
    cnf = Config()

    count = 0
    size = 0

    print "Cleaning out packages..."

    morguedir = cnf.get("Dir::Morgue", os.path.join("Dir::Pool", 'morgue'))
    morguesubdir = cnf.get("Clean-Suites::MorgueSubDir", 'pool')

    # Build directory as morguedir/morguesubdir/year/month/day
    dest = os.path.join(morguedir,
                        morguesubdir,
                        str(now_date.year),
                        '%.2d' % now_date.month,
                        '%.2d' % now_date.day)

    if not Options["No-Action"] and not os.path.exists(dest):
        os.makedirs(dest)

    # Delete from source
    print "Deleting from source table... "
    q = session.execute("""
SELECT s.id, f.filename FROM source s, files f
  WHERE f.last_used <= :deletedate
        AND s.file = f.id
        AND s.id NOT IN (SELECT src_id FROM extra_src_references)""", {'deletedate': delete_date})
    for s in q.fetchall():
        Logger.log(["delete source", s[1], s[0]])
        if not Options["No-Action"]:
            session.execute("DELETE FROM dsc_files WHERE source = :s_id", {"s_id":s[0]})
            session.execute("DELETE FROM source WHERE id = :s_id", {"s_id":s[0]})

    if not Options["No-Action"]:
        session.commit()

    # Delete files from the pool
    old_files = session.query(PoolFile).filter(PoolFile.last_used <= delete_date)
    if max_delete is not None:
        old_files = old_files.limit(max_delete)
        print "Limiting removals to %d" % max_delete

    for pf in old_files:
        filename = os.path.join(pf.location.path, pf.filename)
        if not os.path.exists(filename):
            utils.warn("can not find '%s'." % (filename))
            continue
        Logger.log(["delete pool file", filename])
        if os.path.isfile(filename):
            if os.path.islink(filename):
                count += 1
                Logger.log(["delete symlink", filename])
                if not Options["No-Action"]:
                    os.unlink(filename)
            else:
                size += os.stat(filename)[stat.ST_SIZE]
                count += 1

                dest_filename = dest + '/' + os.path.basename(filename)
                # If the destination file exists; try to find another filename to use
                if os.path.exists(dest_filename):
                    dest_filename = utils.find_next_free(dest_filename)

                Logger.log(["move to morgue", filename, dest_filename])
                if not Options["No-Action"]:
                    utils.move(filename, dest_filename)

            if not Options["No-Action"]:
                session.delete(pf)
                session.commit()

        else:
            utils.fubar("%s is neither symlink nor file?!" % (filename))

    if count > 0:
        Logger.log(["total", count, utils.size_type(size)])
        print "Cleaned %d files, %s." % (count, utils.size_type(size))
Beispiel #46
0
def comment_accept(upload, srcqueue, comments, transaction):
    for byhand in upload.byhand:
        path = os.path.join(srcqueue.path, byhand.filename)
        if os.path.exists(path):
            raise Exception('E: cannot ACCEPT upload with unprocessed byhand file {0}'.format(byhand.filename))

    cnf = Config()

    fs = transaction.fs
    session = transaction.session
    changesname = upload.changes.changesname
    allow_tainted = srcqueue.suite.archive.tainted

    # We need overrides to get the target component
    overridesuite = upload.target_suite
    if overridesuite.overridesuite is not None:
        overridesuite = session.query(Suite).filter_by(suite_name=overridesuite.overridesuite).one()

    def binary_component_func(db_binary):
        section = db_binary.proxy['Section']
        component_name = 'main'
        if section.find('/') != -1:
            component_name = section.split('/', 1)[0]
        return get_mapped_component(component_name, session=session)

    def source_component_func(db_source):
        package_list = PackageList(db_source.proxy)
        component = source_component_from_package_list(package_list, upload.target_suite)
        if component is not None:
            return get_mapped_component(component.component_name, session=session)

        # Fallback for packages without Package-List field
        query = session.query(Override).filter_by(suite=overridesuite, package=db_source.source) \
            .join(OverrideType).filter(OverrideType.overridetype == 'dsc') \
            .join(Component)
        return query.one().component

    all_target_suites = [upload.target_suite]
    all_target_suites.extend([q.suite for q in upload.target_suite.copy_queues])

    for suite in all_target_suites:
        if upload.source is not None:
            transaction.copy_source(upload.source, suite, source_component_func(upload.source), allow_tainted=allow_tainted)
        for db_binary in upload.binaries:
            # build queues may miss the source package if this is a binary-only upload
            if suite != upload.target_suite:
                transaction.copy_source(db_binary.source, suite, source_component_func(db_binary.source), allow_tainted=allow_tainted)
            transaction.copy_binary(db_binary, suite, binary_component_func(db_binary), allow_tainted=allow_tainted, extra_archives=[upload.target_suite.archive])

    # Copy .changes if needed
    if upload.target_suite.copychanges:
        src = os.path.join(upload.policy_queue.path, upload.changes.changesname)
        dst = os.path.join(upload.target_suite.path, upload.changes.changesname)
        fs.copy(src, dst, mode=upload.target_suite.archive.mode)

    # Copy upload to Process-Policy::CopyDir
    # Used on security.d.o to sync accepted packages to ftp-master, but this
    # should eventually be replaced by something else.
    copydir = cnf.get('Process-Policy::CopyDir') or None
    if copydir is not None:
        mode = upload.target_suite.archive.mode
        if upload.source is not None:
            for f in [ df.poolfile for df in upload.source.srcfiles ]:
                dst = os.path.join(copydir, f.basename)
                if not os.path.exists(dst):
                    fs.copy(f.fullpath, dst, mode=mode)

        for db_binary in upload.binaries:
            f = db_binary.poolfile
            dst = os.path.join(copydir, f.basename)
            if not os.path.exists(dst):
                fs.copy(f.fullpath, dst, mode=mode)

        src = os.path.join(upload.policy_queue.path, upload.changes.changesname)
        dst = os.path.join(copydir, upload.changes.changesname)
        if not os.path.exists(dst):
            fs.copy(src, dst, mode=mode)

    if upload.source is not None and not Options['No-Action']:
        urgency = upload.changes.urgency
        if urgency not in cnf.value_list('Urgency::Valid'):
            urgency = cnf['Urgency::Default']
        UrgencyLog().log(upload.source.source, upload.source.version, urgency)

    print "  ACCEPT"
    if not Options['No-Action']:
        Logger.log(["Policy Queue ACCEPT", srcqueue.queue_name, changesname])

    pu = get_processed_upload(upload)
    daklib.announce.announce_accept(pu)

    # TODO: code duplication. Similar code is in process-upload.
    # Move .changes to done
    src = os.path.join(upload.policy_queue.path, upload.changes.changesname)
    now = datetime.datetime.now()
    donedir = os.path.join(cnf['Dir::Done'], now.strftime('%Y/%m/%d'))
    dst = os.path.join(donedir, upload.changes.changesname)
    dst = utils.find_next_free(dst)
    fs.copy(src, dst, mode=0o644)

    remove_upload(upload, transaction)
def main ():
    cnf = Config()

    Arguments = [('n', "no-action", "Import-Users-From-Passwd::Options::No-Action"),
                 ('q', "quiet", "Import-Users-From-Passwd::Options::Quiet"),
                 ('v', "verbose", "Import-Users-From-Passwd::Options::Verbose"),
                 ('h', "help", "Import-Users-From-Passwd::Options::Help")]
    for i in [ "no-action", "quiet", "verbose", "help" ]:
        if not cnf.has_key("Import-Users-From-Passwd::Options::%s" % (i)):
            cnf["Import-Users-From-Passwd::Options::%s" % (i)] = ""

    arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("Import-Users-From-Passwd::Options")

    if Options["Help"]:
        usage()
    elif arguments:
        utils.warn("dak import-users-from-passwd takes no non-option arguments.")
        usage(1)

    session = DBConn().session()
    valid_gid = cnf.get("Import-Users-From-Passwd::ValidGID", "")
    if valid_gid:
        debiangrp = grp.getgrnam(valid_gid).gr_mem
    else:
        debiangrp = []

    passwd_unames = {}
    for entry in pwd.getpwall():
        uname = entry[0]
        if uname not in debiangrp:
            if Options["Verbose"]:
                print "Skipping %s (Not in group %s)." % (uname, valid_gid)
            continue
        passwd_unames[uname] = ""

    postgres_unames = {}
    q = session.execute("SELECT usename FROM pg_user")
    for i in q.fetchall():
        uname = i[0]
        postgres_unames[uname] = ""

    known_postgres_unames = {}
    for i in cnf.get("Import-Users-From-Passwd::KnownPostgres","").split(","):
        uname = i.strip()
        known_postgres_unames[uname] = ""

    keys = postgres_unames.keys()
    keys.sort()
    for uname in keys:
        if not passwd_unames.has_key(uname) and not known_postgres_unames.has_key(uname):
            print "I: Deleting %s from Postgres, no longer in passwd or list of known Postgres users" % (uname)
            q = session.execute('DROP USER "%s"' % (uname))

    keys = passwd_unames.keys()
    keys.sort()
    safe_name = re.compile('^[A-Za-z0-9]+$')
    for uname in keys:
        if not postgres_unames.has_key(uname):
            if not Options["Quiet"]:
                print "Creating %s user in Postgres." % (uname)
            if not Options["No-Action"]:
                if safe_name.match(uname):
                    # NB: I never figured out how to use a bind parameter for this query
                    # XXX: Fix this as it looks like a potential SQL injection attack to me
                    #      (hence the safe_name match we do)
                    try:
                        q = session.execute('CREATE USER "%s"' % (uname))
                        session.commit()
                    except Exception as e:
                        utils.warn("Could not create user %s (%s)" % (uname, str(e)))
                        session.rollback()
                else:
                    print "NOT CREATING USER %s.  Doesn't match safety regex" % uname

    session.commit()
Beispiel #48
0
    def action_dm(self, fingerprint, section, session):
        cnf = Config()

        if 'Command::DM::AdminKeyrings' not in cnf \
                or 'Command::DM::ACL' not in cnf \
                or 'Command::DM::Keyrings' not in cnf:
            raise CommandError('DM command is not configured for this archive.')

        allowed_keyrings = cnf.value_list('Command::DM::AdminKeyrings')
        if fingerprint.keyring.keyring_name not in allowed_keyrings:
            raise CommandError('Key {0} is not allowed to set DM'.format(fingerprint.fingerprint))

        acl_name = cnf.get('Command::DM::ACL', 'dm')
        acl = session.query(ACL).filter_by(name=acl_name).one()

        fpr_hash = section['Fingerprint'].translate(None, ' ')
        fpr = session.query(Fingerprint).filter_by(fingerprint=fpr_hash).first()
        if fpr is None:
            raise CommandError('Unknown fingerprint {0}'.format(fpr_hash))
        if fpr.keyring is None or fpr.keyring.keyring_name not in cnf.value_list('Command::DM::Keyrings'):
            raise CommandError('Key {0} is not in DM keyring.'.format(fpr.fingerprint))
        addresses = gpg_get_key_addresses(fpr.fingerprint)
        if len(addresses) > 0:
            self.cc.append(addresses[0])

        self.log.log(['dm', 'fingerprint', fpr.fingerprint])
        self.result.append('Fingerprint: {0}'.format(fpr.fingerprint))
        if len(addresses) > 0:
            self.log.log(['dm', 'uid', addresses[0]])
            self.result.append('Uid: {0}'.format(addresses[0]))

        for source in self._split_packages(section.get('Allow', '')):
            # Check for existance of source package to catch typos
            if session.query(DBSource).filter_by(source=source).first() is None:
                raise CommandError('Tried to grant permissions for unknown source package: {0}'.format(source))

            if session.query(ACLPerSource).filter_by(acl=acl, fingerprint=fpr, source=source).first() is None:
                aps = ACLPerSource()
                aps.acl = acl
                aps.fingerprint = fpr
                aps.source = source
                aps.created_by = fingerprint
                aps.reason = section.get('Reason')
                session.add(aps)
                self.log.log(['dm', 'allow', fpr.fingerprint, source])
                self.result.append('Allowed: {0}'.format(source))
            else:
                self.result.append('Already-Allowed: {0}'.format(source))

        session.flush()

        for source in self._split_packages(section.get('Deny', '')):
            count = session.query(ACLPerSource).filter_by(acl=acl, fingerprint=fpr, source=source).delete()
            if count == 0:
                raise CommandError('Tried to remove upload permissions for package {0}, '
                                   'but no upload permissions were granted before.'.format(source))

            self.log.log(['dm', 'deny', fpr.fingerprint, source])
            self.result.append('Denied: {0}'.format(source))

        session.commit()
Beispiel #49
0
def main(argv=None):
    if argv is None:
        argv = sys.argv

    arguments = [('h', 'help', 'Process-Commands::Options::Help'),
                 ('d', 'directory', 'Process-Commands::Options::Directory',
                  'HasArg')]

    cnf = Config()
    cnf['Process-Commands::Options::Dummy'] = ''
    filenames = apt_pkg.parse_commandline(cnf.Cnf, arguments, argv)
    options = cnf.subtree('Process-Commands::Options')

    if 'Help' in options or (len(filenames) == 0
                             and 'Directory' not in options):
        usage()
        sys.exit(0)

    log = Logger('command')

    now = datetime.datetime.now()
    donedir = os.path.join(cnf['Dir::Done'], now.strftime('%Y/%m/%d'))
    rejectdir = cnf['Dir::Reject']

    if len(filenames) == 0:
        cdir = options['Directory']
        filenames = [
            os.path.join(cdir, fn) for fn in os.listdir(cdir)
            if fn.endswith('.dak-commands')
        ]

    for fn in filenames:
        basename = os.path.basename(fn)
        if not fn.endswith('.dak-commands'):
            log.log(['unexpected filename', basename])
            continue

        with open(fn, 'r') as fh:
            data = fh.read()

        try:
            command = CommandFile(basename, data, log)
            command.evaluate()
        except CommandError as e:
            created = os.stat(fn).st_mtime
            now = time.time()
            too_new = (now - created < int(cnf.get('Dinstall::SkipTime',
                                                   '60')))
            if too_new:
                log.log(['skipped (too new)'])
                continue
            log.log(['reject', basename, e])
        except Exception as e:
            log.log_traceback('Exception while processing %s:' % (basename), e)
            dst = find_next_free(os.path.join(rejectdir, basename))
        else:
            log.log(['done', basename])
            dst = find_next_free(os.path.join(donedir, basename))

        with FilesystemTransaction() as fs:
            fs.unlink(fn)
            fs.create(dst, mode=0o644).write(data)
            fs.commit()

    log.close()
Beispiel #50
0
    def action_dm(self, fingerprint, section, session):
        cnf = Config()

        if 'Command::DM::AdminKeyrings' not in cnf \
                or 'Command::DM::ACL' not in cnf \
                or 'Command::DM::Keyrings' not in cnf:
            raise CommandError(
                'DM command is not configured for this archive.')

        allowed_keyrings = cnf.value_list('Command::DM::AdminKeyrings')
        if fingerprint.keyring.keyring_name not in allowed_keyrings:
            raise CommandError('Key {0} is not allowed to set DM'.format(
                fingerprint.fingerprint))

        acl_name = cnf.get('Command::DM::ACL', 'dm')
        acl = session.query(ACL).filter_by(name=acl_name).one()

        fpr_hash = section['Fingerprint'].translate(None, ' ')
        fpr = session.query(Fingerprint).filter_by(
            fingerprint=fpr_hash).first()
        if fpr is None:
            raise CommandError('Unknown fingerprint {0}'.format(fpr_hash))
        if fpr.keyring is None or fpr.keyring.keyring_name not in cnf.value_list(
                'Command::DM::Keyrings'):
            raise CommandError('Key {0} is not in DM keyring.'.format(
                fpr.fingerprint))
        addresses = gpg_get_key_addresses(fpr.fingerprint)
        if len(addresses) > 0:
            self.cc.append(addresses[0])

        self.log.log(['dm', 'fingerprint', fpr.fingerprint])
        self.result.append('Fingerprint: {0}'.format(fpr.fingerprint))
        if len(addresses) > 0:
            self.log.log(['dm', 'uid', addresses[0]])
            self.result.append('Uid: {0}'.format(addresses[0]))

        for source in self._split_packages(section.get('Allow', '')):
            # Check for existance of source package to catch typos
            if session.query(DBSource).filter_by(
                    source=source).first() is None:
                raise CommandError(
                    'Tried to grant permissions for unknown source package: {0}'
                    .format(source))

            if session.query(ACLPerSource).filter_by(
                    acl=acl, fingerprint=fpr, source=source).first() is None:
                aps = ACLPerSource()
                aps.acl = acl
                aps.fingerprint = fpr
                aps.source = source
                aps.created_by = fingerprint
                aps.reason = section.get('Reason')
                session.add(aps)
                self.log.log(['dm', 'allow', fpr.fingerprint, source])
                self.result.append('Allowed: {0}'.format(source))
            else:
                self.result.append('Already-Allowed: {0}'.format(source))

        session.flush()

        for source in self._split_packages(section.get('Deny', '')):
            count = session.query(ACLPerSource).filter_by(
                acl=acl, fingerprint=fpr, source=source).delete()
            if count == 0:
                raise CommandError(
                    'Tried to remove upload permissions for package {0}, '
                    'but no upload permissions were granted before.'.format(
                        source))

            self.log.log(['dm', 'deny', fpr.fingerprint, source])
            self.result.append('Denied: {0}'.format(source))

        session.commit()
Beispiel #51
0
def comment_accept(upload, srcqueue, comments, transaction):
    for byhand in upload.byhand:
        path = os.path.join(srcqueue.path, byhand.filename)
        if os.path.exists(path):
            raise Exception('E: cannot ACCEPT upload with unprocessed byhand file {0}'.format(byhand.filename))

    cnf = Config()

    fs = transaction.fs
    session = transaction.session
    changesname = upload.changes.changesname
    allow_tainted = srcqueue.suite.archive.tainted

    # We need overrides to get the target component
    overridesuite = upload.target_suite
    if overridesuite.overridesuite is not None:
        overridesuite = session.query(Suite).filter_by(suite_name=overridesuite.overridesuite).one()

    def binary_component_func(db_binary):
        section = db_binary.proxy['Section']
        component_name = 'main'
        if section.find('/') != -1:
            component_name = section.split('/', 1)[0]
        return get_mapped_component(component_name, session=session)

    def is_debug_binary(db_binary):
        return daklib.utils.is_in_debug_section(db_binary.proxy)

    def has_debug_binaries(upload):
        return any((is_debug_binary(x) for x in upload.binaries))

    def source_component_func(db_source):
        package_list = PackageList(db_source.proxy)
        component = source_component_from_package_list(package_list, upload.target_suite)
        if component is not None:
            return get_mapped_component(component.component_name, session=session)

        # Fallback for packages without Package-List field
        query = session.query(Override).filter_by(suite=overridesuite, package=db_source.source) \
            .join(OverrideType).filter(OverrideType.overridetype == 'dsc') \
            .join(Component)
        return query.one().component

    all_target_suites = [upload.target_suite]
    all_target_suites.extend([q.suite for q in upload.target_suite.copy_queues])

    for suite in all_target_suites:
        debug_suite = suite.debug_suite

        if upload.source is not None:
            # If we have Source in this upload, let's include it into
            # upload suite.
            transaction.copy_source(
                upload.source,
                suite,
                source_component_func(upload.source),
                allow_tainted=allow_tainted,
            )

            if debug_suite is not None and has_debug_binaries(upload):
                # If we're handing a debug package, we also need to include the
                # source in the debug suite as well.
                transaction.copy_source(
                    upload.source,
                    debug_suite,
                    source_component_func(upload.source),
                    allow_tainted=allow_tainted,
                )

        for db_binary in upload.binaries:
            # Now, let's work out where to copy this guy to -- if it's
            # a debug binary, and the suite has a debug suite, let's go
            # ahead and target the debug suite rather then the stock
            # suite.
            copy_to_suite = suite
            if debug_suite is not None and is_debug_binary(db_binary):
                copy_to_suite = debug_suite

            # build queues and debug suites may miss the source package
            # if this is a binary-only upload.
            if copy_to_suite != upload.target_suite:
                transaction.copy_source(
                    db_binary.source,
                    copy_to_suite,
                    source_component_func(db_binary.source),
                    allow_tainted=allow_tainted,
                )

            transaction.copy_binary(
                db_binary,
                copy_to_suite,
                binary_component_func(db_binary),
                allow_tainted=allow_tainted,
                extra_archives=[upload.target_suite.archive],
            )

            check_upload_for_external_signature_request(session, suite, copy_to_suite, db_binary)

        suite.update_last_changed()

    # Copy .changes if needed
    if upload.target_suite.copychanges:
        src = os.path.join(upload.policy_queue.path, upload.changes.changesname)
        dst = os.path.join(upload.target_suite.path, upload.changes.changesname)
        fs.copy(src, dst, mode=upload.target_suite.archive.mode)

    # List of files in the queue directory
    queue_files = [changesname]
    chg = daklib.upload.Changes(upload.policy_queue.path, changesname, keyrings=[], require_signature=False)
    queue_files.extend(f.filename for f in chg.buildinfo_files)

    # Copy upload to Process-Policy::CopyDir
    # Used on security.d.o to sync accepted packages to ftp-master, but this
    # should eventually be replaced by something else.
    copydir = cnf.get('Process-Policy::CopyDir') or None
    if copydir is not None:
        mode = upload.target_suite.archive.mode
        if upload.source is not None:
            for f in [ df.poolfile for df in upload.source.srcfiles ]:
                dst = os.path.join(copydir, f.basename)
                if not os.path.exists(dst):
                    fs.copy(f.fullpath, dst, mode=mode)

        for db_binary in upload.binaries:
            f = db_binary.poolfile
            dst = os.path.join(copydir, f.basename)
            if not os.path.exists(dst):
                fs.copy(f.fullpath, dst, mode=mode)

        for fn in queue_files:
            src = os.path.join(upload.policy_queue.path, fn)
            dst = os.path.join(copydir, fn)
            # We check for `src` to exist as old uploads in policy queues
            # might still miss the `.buildinfo` files.
            if os.path.exists(src) and not os.path.exists(dst):
                fs.copy(src, dst, mode=mode)

    if upload.source is not None and not Options['No-Action']:
        urgency = upload.changes.urgency
        # As per policy 5.6.17, the urgency can be followed by a space and a
        # comment.  Extract only the urgency from the string.
        if ' ' in urgency:
          (urgency, comment) = urgency.split(' ', 1)
        if urgency not in cnf.value_list('Urgency::Valid'):
            urgency = cnf['Urgency::Default']
        UrgencyLog().log(upload.source.source, upload.source.version, urgency)

    print "  ACCEPT"
    if not Options['No-Action']:
        Logger.log(["Policy Queue ACCEPT", srcqueue.queue_name, changesname])

    pu = get_processed_upload(upload)
    daklib.announce.announce_accept(pu)

    # TODO: code duplication. Similar code is in process-upload.
    # Move .changes to done
    now = datetime.datetime.now()
    donedir = os.path.join(cnf['Dir::Done'], now.strftime('%Y/%m/%d'))
    for fn in queue_files:
        src = os.path.join(upload.policy_queue.path, fn)
        if os.path.exists(src):
            dst = os.path.join(donedir, fn)
            dst = utils.find_next_free(dst)
            fs.copy(src, dst, mode=0o644)

    remove_upload(upload, transaction)
Beispiel #52
0
def main():
    global suite, suite_id, source_binaries, source_versions

    cnf = Config()

    Arguments = [('h', "help", "Cruft-Report::Options::Help"),
                 ('m', "mode", "Cruft-Report::Options::Mode", "HasArg"),
                 ('R', "rdep-check", "Cruft-Report::Options::Rdep-Check"),
                 ('s', "suite", "Cruft-Report::Options::Suite", "HasArg"),
                 ('w', "wanna-build-dump",
                  "Cruft-Report::Options::Wanna-Build-Dump", "HasArg")]
    for i in ["help", "Rdep-Check"]:
        key = "Cruft-Report::Options::%s" % i
        if key not in cnf:
            cnf[key] = ""

    cnf["Cruft-Report::Options::Suite"] = cnf.get("Dinstall::DefaultSuite",
                                                  "unstable")

    if "Cruft-Report::Options::Mode" not in cnf:
        cnf["Cruft-Report::Options::Mode"] = "daily"

    if "Cruft-Report::Options::Wanna-Build-Dump" not in cnf:
        cnf["Cruft-Report::Options::Wanna-Build-Dump"] = "/srv/ftp-master.debian.org/scripts/nfu"

    apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)

    Options = cnf.subtree("Cruft-Report::Options")
    if Options["Help"]:
        usage()

    if Options["Rdep-Check"]:
        rdeps = True
    else:
        rdeps = False

    # Set up checks based on mode
    if Options["Mode"] == "daily":
        checks = [
            "nbs", "nviu", "nvit", "obsolete source", "outdated non-free",
            "nfu", "nbs metadata"
        ]
    elif Options["Mode"] == "full":
        checks = [
            "nbs", "nviu", "nvit", "obsolete source", "outdated non-free",
            "nfu", "nbs metadata", "dubious nbs", "bnb", "bms", "anais"
        ]
    elif Options["Mode"] == "bdo":
        checks = ["nbs", "obsolete source"]
    else:
        utils.warn(
            "%s is not a recognised mode - only 'full', 'daily' or 'bdo' are understood."
            % (Options["Mode"]))
        usage(1)

    session = DBConn().session()

    bin_pkgs = {}
    src_pkgs = {}
    bin2source = {}
    bins_in_suite = {}
    nbs = {}
    source_versions = {}

    anais_output = ""

    nfu_packages = {}

    suite = get_suite(Options["Suite"].lower(), session)
    if not suite:
        utils.fubar("Cannot find suite %s" % Options["Suite"].lower())

    suite_id = suite.suite_id
    suite_name = suite.suite_name.lower()

    if "obsolete source" in checks:
        report_obsolete_source(suite_name, session)

    if "nbs" in checks:
        reportAllNBS(suite_name, suite_id, session, rdeps)

    if "nbs metadata" in checks:
        reportNBSMetadata(suite_name, suite_id, session, rdeps)

    if "outdated non-free" in checks:
        report_outdated_nonfree(suite_name, session, rdeps)

    bin_not_built = {}

    if "bnb" in checks:
        bins_in_suite = get_suite_binaries(suite, session)

    # Checks based on the Sources files
    components = get_component_names(session)
    for component in components:
        filename = "%s/dists/%s/%s/source/Sources" % (suite.archive.path,
                                                      suite_name, component)
        filename = utils.find_possibly_compressed_file(filename)
        with apt_pkg.TagFile(filename) as Sources:
            while Sources.step():
                source = Sources.section.find('Package')
                source_version = Sources.section.find('Version')
                architecture = Sources.section.find('Architecture')
                binaries = Sources.section.find('Binary')
                binaries_list = [i.strip() for i in binaries.split(',')]

                if "bnb" in checks:
                    # Check for binaries not built on any architecture.
                    for binary in binaries_list:
                        if binary not in bins_in_suite:
                            bin_not_built.setdefault(source, {})
                            bin_not_built[source][binary] = ""

                if "anais" in checks:
                    anais_output += do_anais(architecture, binaries_list,
                                             source, session)

                # build indices for checking "no source" later
                source_index = component + '/' + source
                src_pkgs[source] = source_index
                for binary in binaries_list:
                    bin_pkgs[binary] = source
                source_binaries[source] = binaries
                source_versions[source] = source_version

    # Checks based on the Packages files
    check_components = components[:]
    if suite_name != "experimental":
        check_components.append('main/debian-installer')

    for component in check_components:
        architectures = [
            a.arch_string for a in get_suite_architectures(
                suite_name, skipsrc=True, skipall=True, session=session)
        ]
        for architecture in architectures:
            if component == 'main/debian-installer' and re.match(
                    "kfreebsd", architecture):
                continue

            if "nfu" in checks:
                nfu_packages.setdefault(architecture, [])
                nfu_entries = parse_nfu(architecture)

            filename = "%s/dists/%s/%s/binary-%s/Packages" % (
                suite.archive.path, suite_name, component, architecture)
            filename = utils.find_possibly_compressed_file(filename)
            with apt_pkg.TagFile(filename) as Packages:
                while Packages.step():
                    package = Packages.section.find('Package')
                    source = Packages.section.find('Source', "")
                    version = Packages.section.find('Version')
                    if source == "":
                        source = package
                    if package in bin2source and \
                           apt_pkg.version_compare(version, bin2source[package]["version"]) > 0:
                        bin2source[package]["version"] = version
                        bin2source[package]["source"] = source
                    else:
                        bin2source[package] = {}
                        bin2source[package]["version"] = version
                        bin2source[package]["source"] = source
                    if source.find("(") != -1:
                        m = re_extract_src_version.match(source)
                        source = m.group(1)
                        version = m.group(2)
                    if package not in bin_pkgs:
                        nbs.setdefault(source, {})
                        nbs[source].setdefault(package, {})
                        nbs[source][package][version] = ""
                    else:
                        if "nfu" in checks:
                            if package in nfu_entries and \
                                   version != source_versions[source]: # only suggest to remove out-of-date packages
                                nfu_packages[architecture].append(
                                    (package, version,
                                     source_versions[source]))

    # Distinguish dubious (version numbers match) and 'real' NBS (they don't)
    dubious_nbs = {}
    version_sort_key = functools.cmp_to_key(apt_pkg.version_compare)
    for source in nbs:
        for package in nbs[source]:
            latest_version = max(nbs[source][package], key=version_sort_key)
            source_version = source_versions.get(source, "0")
            if apt_pkg.version_compare(latest_version, source_version) == 0:
                add_nbs(dubious_nbs, source, latest_version, package, suite_id,
                        session)

    if "nviu" in checks:
        do_newer_version('unstable', 'experimental', 'NVIU', session)

    if "nvit" in checks:
        do_newer_version('testing', 'testing-proposed-updates', 'NVIT',
                         session)

    ###

    if Options["Mode"] == "full":
        print("=" * 75)
        print()

    if "nfu" in checks:
        do_nfu(nfu_packages)

    if "bnb" in checks:
        print("Unbuilt binary packages")
        print("-----------------------")
        print()
        for source in sorted(bin_not_built):
            binaries = sorted(bin_not_built[source])
            print(" o %s: %s" % (source, ", ".join(binaries)))
        print()

    if "bms" in checks:
        report_multiple_source(suite)

    if "anais" in checks:
        print("Architecture Not Allowed In Source")
        print("----------------------------------")
        print(anais_output)
        print()

    if "dubious nbs" in checks:
        do_dubious_nbs(dubious_nbs)