コード例 #1
0
ファイル: clean_suites.py プロジェクト: stapelberg/dak
def clean_byhash(now_date, session):
    cnf = Config()
    suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")

    Logger.log(["Cleaning out unused by-hash files..."])

    q = session.execute("""
        DELETE FROM hashfile h
        USING suite s, archive a
        WHERE s.id = h.suite_id
          AND a.id = s.archive_id
          AND h.unreferenced + a.stayofexecution < CURRENT_TIMESTAMP
        RETURNING a.path, s.suite_name, h.path""")
    count = q.rowcount

    if not Options["No-Action"]:
        for base, suite, path in q:
            filename = os.path.join(base, 'dists', suite, suite_suffix, path)
            try:
                os.unlink(filename)
            except OSError as exc:
                if exc.errno != errno.ENOENT:
                    raise
                Logger.log(['database referred to non-existing file', filename])
            else:
                Logger.log(['delete hashfile', suite, path])
        session.commit()

    if count > 0:
        Logger.log(["total", count])
コード例 #2
0
ファイル: checks.py プロジェクト: tanglu-org/tdak
    def check(self, upload):
        cnf = Config()
        future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600)
        past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y'))

        class TarTime(object):
            def __init__(self):
                self.future_files = dict()
                self.past_files = dict()
            def callback(self, member, data):
                if member.mtime > future_cutoff:
                    self.future_files[member.name] = member.mtime
                elif member.mtime < past_cutoff:
                    self.past_files[member.name] = member.mtime

        def format_reason(filename, direction, files):
            reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction)
            for fn, ts in files.iteritems():
                reason += "  {0} ({1})".format(fn, time.ctime(ts))
            return reason

        for binary in upload.changes.binaries:
            filename = binary.hashed_file.filename
            path = os.path.join(upload.directory, filename)
            deb = apt_inst.DebFile(path)
            tar = TarTime()
            deb.control.go(tar.callback)
            if tar.future_files:
                raise Reject(format_reason(filename, 'future', tar.future_files))
            if tar.past_files:
                raise Reject(format_reason(filename, 'past', tar.past_files))
コード例 #3
0
ファイル: clean_suites.py プロジェクト: Debian/dak
def clean_byhash(now_date, session):
    cnf = Config()
    suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")

    Logger.log(["Cleaning out unused by-hash files..."])

    q = session.execute("""
        DELETE FROM hashfile h
        USING suite s, archive a
        WHERE s.id = h.suite_id
          AND a.id = s.archive_id
          AND h.unreferenced + a.stayofexecution < CURRENT_TIMESTAMP
        RETURNING a.path, s.suite_name, h.path""")
    count = q.rowcount

    if not Options["No-Action"]:
        for base, suite, path in q:
            filename = os.path.join(base, 'dists', suite, suite_suffix, path)
            try:
                os.unlink(filename)
            except OSError as exc:
                if exc.errno != errno.ENOENT:
                    raise
                Logger.log(['database referred to non-existing file', filename])
            else:
                Logger.log(['delete hashfile', suite, path])
        session.commit()

    if count > 0:
        Logger.log(["total", count])
コード例 #4
0
    def suite_path(self):
        """
        Absolute path to the suite-specific files.
        """
        cnf = Config()
        suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")

        return os.path.join(self.suite.archive.path, 'dists',
                            self.suite.suite_name, suite_suffix)
コード例 #5
0
ファイル: generate_releases.py プロジェクト: Debian/dak
    def suite_path(self):
        """
        Absolute path to the suite-specific files.
        """
        cnf = Config()
        suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")

        return os.path.join(self.suite.archive.path, 'dists',
                            self.suite.suite_name, suite_suffix)
コード例 #6
0
    def suite_release_path(self):
        """
        Absolute path where Release files are physically stored.
        This should be a path that sorts after the dists/ directory.
        """
        cnf = Config()
        suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")

        return os.path.join(self.suite.archive.path, 'zzz-dists',
                            self.suite.suite_name, suite_suffix)
コード例 #7
0
ファイル: generate_releases.py プロジェクト: Debian/dak
    def suite_release_path(self):
        """
        Absolute path where Release files are physically stored.
        This should be a path that sorts after the dists/ directory.
        """
        cnf = Config()
        suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")

        return os.path.join(self.suite.archive.path, 'zzz-dists',
                            self.suite.suite_name, suite_suffix)
コード例 #8
0
ファイル: override.py プロジェクト: ximion/dak
def main():
    cnf = Config()

    Arguments = [
        ("h", "help", "Override::Options::Help"),
        ("c", "check", "Override::Options::Check"),
        ("d", "done", "Override::Options::Done", "HasArg"),
        ("n", "no-action", "Override::Options::No-Action"),
        ("s", "suite", "Override::Options::Suite", "HasArg"),
    ]
    for i in ["help", "check", "no-action"]:
        if not cnf.has_key("Override::Options::%s" % (i)):
            cnf["Override::Options::%s" % (i)] = ""
    if not cnf.has_key("Override::Options::Suite"):
        cnf["Override::Options::Suite"] = "unstable"

    arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("Override::Options")

    if Options["Help"]:
        usage()

    session = DBConn().session()

    if not arguments:
        utils.fubar("package name is a required argument.")

    package = arguments.pop(0)
    suite_name = Options["Suite"]
    if arguments and len(arguments) > 2:
        utils.fubar("Too many arguments")

    suite = get_suite(suite_name, session)
    if suite is None:
        utils.fubar("Unknown suite '{0}'".format(suite_name))

    if arguments and len(arguments) == 1:
        # Determine if the argument is a priority or a section...
        arg = arguments.pop()
        q = session.execute(
            """
        SELECT ( SELECT COUNT(*) FROM section WHERE section = :arg ) AS secs,
               ( SELECT COUNT(*) FROM priority WHERE priority = :arg ) AS prios
               """,
            {"arg": arg},
        )
        r = q.fetchall()
        if r[0][0] == 1:
            arguments = (arg, ".")
        elif r[0][1] == 1:
            arguments = (".", arg)
        else:
            utils.fubar("%s is not a valid section or priority" % (arg))

    # Retrieve current section/priority...
    oldsection, oldsourcesection, oldpriority = None, None, None
    for packagetype in ["source", "binary"]:
        eqdsc = "!="
        if packagetype == "source":
            eqdsc = "="
        q = session.execute(
            """
    SELECT priority.priority AS prio, section.section AS sect, override_type.type AS type
      FROM override, priority, section, suite, override_type
     WHERE override.priority = priority.id
       AND override.type = override_type.id
       AND override_type.type %s 'dsc'
       AND override.section = section.id
       AND override.package = :package
       AND override.suite = suite.id
       AND suite.suite_name = :suite_name
        """
            % (eqdsc),
            {"package": package, "suite_name": suite_name},
        )

        if q.rowcount == 0:
            continue
        if q.rowcount > 1:
            utils.fubar("%s is ambiguous. Matches %d packages" % (package, q.rowcount))

        r = q.fetchone()
        if packagetype == "binary":
            oldsection = r[1]
            oldpriority = r[0]
        else:
            oldsourcesection = r[1]
            oldpriority = "source"

    if not oldpriority and not oldsourcesection:
        utils.fubar("Unable to find package %s" % (package))

    if oldsection and oldsourcesection and oldsection != oldsourcesection:
        # When setting overrides, both source & binary will become the same section
        utils.warn("Source is in section '%s' instead of '%s'" % (oldsourcesection, oldsection))

    if not oldsection:
        oldsection = oldsourcesection

    if not arguments:
        print "%s is in section '%s' at priority '%s'" % (package, oldsection, oldpriority)
        sys.exit(0)

    # At this point, we have a new section and priority... check they're valid...
    newsection, newpriority = arguments

    if newsection == ".":
        newsection = oldsection
    if newpriority == ".":
        newpriority = oldpriority

    s = get_section(newsection, session)
    if s is None:
        utils.fubar("Supplied section %s is invalid" % (newsection))
    newsecid = s.section_id

    p = get_priority(newpriority, session)
    if p is None:
        utils.fubar("Supplied priority %s is invalid" % (newpriority))
    newprioid = p.priority_id

    if newpriority == oldpriority and newsection == oldsection:
        print "I: Doing nothing"
        sys.exit(0)

    if oldpriority == "source" and newpriority != "source":
        utils.fubar("Trying to change priority of a source-only package")

    if Options["Check"] and newpriority != oldpriority:
        check_override_compliance(package, p, suite.archive.path, suite_name, cnf, session)

    # If we're in no-action mode
    if Options["No-Action"]:
        if newpriority != oldpriority:
            print "I: Would change priority from %s to %s" % (oldpriority, newpriority)
        if newsection != oldsection:
            print "I: Would change section from %s to %s" % (oldsection, newsection)
        if Options.has_key("Done"):
            print "I: Would also close bug(s): %s" % (Options["Done"])

        sys.exit(0)

    if newpriority != oldpriority:
        print "I: Will change priority from %s to %s" % (oldpriority, newpriority)

    if newsection != oldsection:
        print "I: Will change section from %s to %s" % (oldsection, newsection)

    if not Options.has_key("Done"):
        pass
        # utils.warn("No bugs to close have been specified. Noone will know you have done this.")
    else:
        print "I: Will close bug(s): %s" % (Options["Done"])

    game_over()

    Logger = daklog.Logger("override")

    dsc_otype_id = get_override_type("dsc").overridetype_id

    # We're already in a transaction
    # We're in "do it" mode, we have something to do... do it
    if newpriority != oldpriority:
        session.execute(
            """
        UPDATE override
           SET priority = :newprioid
         WHERE package = :package
           AND override.type != :otypedsc
           AND suite = (SELECT id FROM suite WHERE suite_name = :suite_name)""",
            {"newprioid": newprioid, "package": package, "otypedsc": dsc_otype_id, "suite_name": suite_name},
        )

        Logger.log(["changed priority", package, oldpriority, newpriority])

    if newsection != oldsection:
        q = session.execute(
            """
        UPDATE override
           SET section = :newsecid
         WHERE package = :package
           AND suite = (SELECT id FROM suite WHERE suite_name = :suite_name)""",
            {"newsecid": newsecid, "package": package, "suite_name": suite_name},
        )

        Logger.log(["changed section", package, oldsection, newsection])

    session.commit()

    if Options.has_key("Done"):
        if not cnf.has_key("Dinstall::BugServer"):
            utils.warn("Asked to send Done message but Dinstall::BugServer is not configured")
            Logger.close()
            return

        Subst = {}
        Subst["__OVERRIDE_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
        Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
        bcc = []
        if cnf.find("Dinstall::Bcc") != "":
            bcc.append(cnf["Dinstall::Bcc"])
        if bcc:
            Subst["__BCC__"] = "Bcc: " + ", ".join(bcc)
        else:
            Subst["__BCC__"] = "X-Filler: 42"
        if cnf.has_key("Dinstall::PackagesServer"):
            Subst["__CC__"] = "Cc: " + package + "@" + cnf["Dinstall::PackagesServer"] + "\nX-DAK: dak override"
        else:
            Subst["__CC__"] = "X-DAK: dak override"
        Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
        Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
        Subst["__WHOAMI__"] = utils.whoami()
        Subst["__SOURCE__"] = package

        summary = "Concerning package %s...\n" % (package)
        summary += "Operating on the %s suite\n" % (suite_name)
        if newpriority != oldpriority:
            summary += "Changed priority from %s to %s\n" % (oldpriority, newpriority)
        if newsection != oldsection:
            summary += "Changed section from %s to %s\n" % (oldsection, newsection)
        Subst["__SUMMARY__"] = summary

        template = os.path.join(cnf["Dir::Templates"], "override.bug-close")
        for bug in utils.split_args(Options["Done"]):
            Subst["__BUG_NUMBER__"] = bug
            mail_message = utils.TemplateSubst(Subst, template)
            utils.send_mail(mail_message)
            Logger.log(["closed bug", bug])

    Logger.close()
コード例 #9
0
    def generate_release_files(self):
        """
        Generate Release files for the given suite

        @type suite: string
        @param suite: Suite name
        """

        suite = self.suite
        session = object_session(suite)

        architectures = get_suite_architectures(suite.suite_name,
                                                skipall=True,
                                                skipsrc=True,
                                                session=session)

        # Attribs contains a tuple of field names and the database names to use to
        # fill them in
        attribs = (
            ('Origin', 'origin'),
            ('Label', 'label'),
            ('Suite', 'release_suite_output'),
            ('Version', 'version'),
            ('Codename', 'codename'),
            ('Changelogs', 'changelog_url'),
        )

        # A "Sub" Release file has slightly different fields
        subattribs = (('Archive', 'suite_name'), ('Origin', 'origin'),
                      ('Label', 'label'), ('Version', 'version'))

        # Boolean stuff. If we find it true in database, write out "yes" into the release file
        boolattrs = (
            ('NotAutomatic', 'notautomatic'),
            ('ButAutomaticUpgrades', 'butautomaticupgrades'),
            ('Acquire-By-Hash', 'byhash'),
        )

        cnf = Config()

        suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")

        self.create_output_directories()
        self.create_release_symlinks()

        outfile = os.path.join(self.suite_release_path(), "Release")
        out = open(outfile + ".new", "w")

        for key, dbfield in attribs:
            # Hack to skip NULL Version fields as we used to do this
            # We should probably just always ignore anything which is None
            if key in ("Version",
                       "Changelogs") and getattr(suite, dbfield) is None:
                continue

            out.write("%s: %s\n" % (key, getattr(suite, dbfield)))

        out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC",
                                                time.gmtime(time.time()))))

        if suite.validtime:
            validtime = float(suite.validtime)
            out.write("Valid-Until: %s\n" %
                      (time.strftime("%a, %d %b %Y %H:%M:%S UTC",
                                     time.gmtime(time.time() + validtime))))

        for key, dbfield in boolattrs:
            if getattr(suite, dbfield, False):
                out.write("%s: yes\n" % (key))

        out.write("Architectures: %s\n" %
                  (" ".join([a.arch_string for a in architectures])))

        components = [c.component_name for c in suite.components]

        out.write("Components: %s\n" % (" ".join(components)))

        # For exact compatibility with old g-r, write out Description here instead
        # of with the rest of the DB fields above
        if getattr(suite, 'description') is not None:
            out.write("Description: %s\n" % suite.description)

        for comp in components:
            for dirpath, dirnames, filenames in os.walk(os.path.join(
                    self.suite_path(), comp),
                                                        topdown=True):
                if not re_gensubrelease.match(dirpath):
                    continue

                subfile = os.path.join(dirpath, "Release")
                subrel = open(subfile + '.new', "w")

                for key, dbfield in subattribs:
                    if getattr(suite, dbfield) is not None:
                        subrel.write("%s: %s\n" %
                                     (key, getattr(suite, dbfield)))

                for key, dbfield in boolattrs:
                    if getattr(suite, dbfield, False):
                        subrel.write("%s: yes\n" % (key))

                subrel.write("Component: %s%s\n" % (suite_suffix, comp))

                # Urgh, but until we have all the suite/component/arch stuff in the DB,
                # this'll have to do
                arch = os.path.split(dirpath)[-1]
                if arch.startswith('binary-'):
                    arch = arch[7:]

                subrel.write("Architecture: %s\n" % (arch))
                subrel.close()

                os.rename(subfile + '.new', subfile)

        # Now that we have done the groundwork, we want to get off and add the files with
        # their checksums to the main Release file
        oldcwd = os.getcwd()

        os.chdir(self.suite_path())

        hashes = [x for x in RELEASE_HASHES if x.db_name in suite.checksums]

        fileinfo = {}
        fileinfo_byhash = {}

        uncompnotseen = {}

        for dirpath, dirnames, filenames in os.walk(".",
                                                    followlinks=True,
                                                    topdown=True):
            for entry in filenames:
                if dirpath == '.' and entry in [
                        "Release", "Release.gpg", "InRelease"
                ]:
                    continue

                filename = os.path.join(dirpath.lstrip('./'), entry)

                if re_includeinrelease_byhash.match(entry):
                    fileinfo[filename] = fileinfo_byhash[filename] = {}
                elif re_includeinrelease_plain.match(entry):
                    fileinfo[filename] = {}
                # Skip things we don't want to include
                else:
                    continue

                contents = open(filename, 'r').read()

                # If we find a file for which we have a compressed version and
                # haven't yet seen the uncompressed one, store the possibility
                # for future use
                if entry.endswith(
                        ".gz") and filename[:-3] not in uncompnotseen:
                    uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename)
                elif entry.endswith(
                        ".bz2") and filename[:-4] not in uncompnotseen:
                    uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename)
                elif entry.endswith(
                        ".xz") and filename[:-3] not in uncompnotseen:
                    uncompnotseen[filename[:-3]] = (XzFile, filename)

                fileinfo[filename]['len'] = len(contents)

                for hf in hashes:
                    fileinfo[filename][hf.release_field] = hf.func(contents)

        for filename, comp in uncompnotseen.items():
            # If we've already seen the uncompressed file, we don't
            # need to do anything again
            if filename in fileinfo:
                continue

            fileinfo[filename] = {}

            # File handler is comp[0], filename of compressed file is comp[1]
            contents = comp[0](comp[1], 'r').read()

            fileinfo[filename]['len'] = len(contents)

            for hf in hashes:
                fileinfo[filename][hf.release_field] = hf.func(contents)

        for field in sorted(h.release_field for h in hashes):
            out.write('%s:\n' % field)
            for filename in sorted(fileinfo.keys()):
                out.write(" %s %8d %s\n" %
                          (fileinfo[filename][field],
                           fileinfo[filename]['len'], filename))

        out.close()
        os.rename(outfile + '.new', outfile)

        self._update_hashfile_table(session, fileinfo_byhash, hashes)
        self._make_byhash_links(fileinfo_byhash, hashes)
        self._make_byhash_base_symlink(fileinfo_byhash, hashes)

        sign_release_dir(suite, os.path.dirname(outfile))

        os.chdir(oldcwd)

        return
コード例 #10
0
ファイル: rm.py プロジェクト: pombreda/dak-dep11
def main ():
    global Options

    cnf = Config()

    Arguments = [('h',"help","Rm::Options::Help"),
                 ('a',"architecture","Rm::Options::Architecture", "HasArg"),
                 ('b',"binary", "Rm::Options::Binary"),
                 ('B',"binary-only", "Rm::Options::Binary-Only"),
                 ('c',"component", "Rm::Options::Component", "HasArg"),
                 ('C',"carbon-copy", "Rm::Options::Carbon-Copy", "HasArg"), # Bugs to Cc
                 ('d',"done","Rm::Options::Done", "HasArg"), # Bugs fixed
                 ('D',"do-close","Rm::Options::Do-Close"),
                 ('R',"rdep-check", "Rm::Options::Rdep-Check"),
                 ('m',"reason", "Rm::Options::Reason", "HasArg"), # Hysterical raisins; -m is old-dinstall option for rejection reason
                 ('n',"no-action","Rm::Options::No-Action"),
                 ('p',"partial", "Rm::Options::Partial"),
                 ('s',"suite","Rm::Options::Suite", "HasArg"),
                 ('S',"source-only", "Rm::Options::Source-Only"),
                 ]

    for i in [ "architecture", "binary", "binary-only", "carbon-copy", "component",
               "done", "help", "no-action", "partial", "rdep-check", "reason",
               "source-only", "Do-Close" ]:
        if not cnf.has_key("Rm::Options::%s" % (i)):
            cnf["Rm::Options::%s" % (i)] = ""
    if not cnf.has_key("Rm::Options::Suite"):
        cnf["Rm::Options::Suite"] = "unstable"

    arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("Rm::Options")

    if Options["Help"]:
        usage()

    session = DBConn().session()

    # Sanity check options
    if not arguments:
        utils.fubar("need at least one package name as an argument.")
    if Options["Architecture"] and Options["Source-Only"]:
        utils.fubar("can't use -a/--architecture and -S/--source-only options simultaneously.")
    if ((Options["Binary"] and Options["Source-Only"])
            or (Options["Binary"] and Options["Binary-Only"])
            or (Options["Binary-Only"] and Options["Source-Only"])):
        utils.fubar("Only one of -b/--binary, -B/--binary-only and -S/--source-only can be used.")
    if Options.has_key("Carbon-Copy") and not Options.has_key("Done"):
        utils.fubar("can't use -C/--carbon-copy without also using -d/--done option.")
    if Options["Architecture"] and not Options["Partial"]:
        utils.warn("-a/--architecture implies -p/--partial.")
        Options["Partial"] = "true"
    if Options["Do-Close"] and not Options["Done"]:
        utils.fubar("No.")
    if (Options["Do-Close"]
           and (Options["Binary"] or Options["Binary-Only"] or Options["Source-Only"])):
        utils.fubar("No.")

    # Force the admin to tell someone if we're not doing a 'dak
    # cruft-report' inspired removal (or closing a bug, which counts
    # as telling someone).
    if not Options["No-Action"] and not Options["Carbon-Copy"] \
           and not Options["Done"] and Options["Reason"].find("[auto-cruft]") == -1:
        utils.fubar("Need a -C/--carbon-copy if not closing a bug and not doing a cruft removal.")

    # Process -C/--carbon-copy
    #
    # Accept 3 types of arguments (space separated):
    #  1) a number - assumed to be a bug number, i.e. [email protected]
    #  2) the keyword 'package' - cc's [email protected] for every argument
    #  3) contains a '@' - assumed to be an email address, used unmofidied
    #
    carbon_copy = []
    for copy_to in utils.split_args(Options.get("Carbon-Copy")):
        if copy_to.isdigit():
            if cnf.has_key("Dinstall::BugServer"):
                carbon_copy.append(copy_to + "@" + cnf["Dinstall::BugServer"])
            else:
                utils.fubar("Asked to send mail to #%s in BTS but Dinstall::BugServer is not configured" % copy_to)
        elif copy_to == 'package':
            for package in arguments:
                if cnf.has_key("Dinstall::PackagesServer"):
                    carbon_copy.append(package + "@" + cnf["Dinstall::PackagesServer"])
                if cnf.has_key("Dinstall::TrackingServer"):
                    carbon_copy.append(package + "@" + cnf["Dinstall::TrackingServer"])
        elif '@' in copy_to:
            carbon_copy.append(copy_to)
        else:
            utils.fubar("Invalid -C/--carbon-copy argument '%s'; not a bug number, 'package' or email address." % (copy_to))

    if Options["Binary"]:
        field = "b.package"
    else:
        field = "s.source"
    con_packages = "AND %s IN (%s)" % (field, ", ".join([ repr(i) for i in arguments ]))

    (con_suites, con_architectures, con_components, check_source) = \
                 utils.parse_args(Options)

    # Additional suite checks
    suite_ids_list = []
    whitelists = []
    suites = utils.split_args(Options["Suite"])
    suites_list = utils.join_with_commas_and(suites)
    if not Options["No-Action"]:
        for suite in suites:
            s = get_suite(suite, session=session)
            if s is not None:
                suite_ids_list.append(s.suite_id)
                whitelists.append(s.mail_whitelist)
            if suite in ("oldstable", "stable"):
                print "**WARNING** About to remove from the (old)stable suite!"
                print "This should only be done just prior to a (point) release and not at"
                print "any other time."
                game_over()
            elif suite == "testing":
                print "**WARNING About to remove from the testing suite!"
                print "There's no need to do this normally as removals from unstable will"
                print "propogate to testing automagically."
                game_over()

    # Additional architecture checks
    if Options["Architecture"] and check_source:
        utils.warn("'source' in -a/--argument makes no sense and is ignored.")

    # Additional component processing
    over_con_components = con_components.replace("c.id", "component")

    # Don't do dependency checks on multiple suites
    if Options["Rdep-Check"] and len(suites) > 1:
        utils.fubar("Reverse dependency check on multiple suites is not implemented.")

    to_remove = []
    maintainers = {}

    # We have 3 modes of package selection: binary, source-only, binary-only
    # and source+binary.

    # XXX: TODO: This all needs converting to use placeholders or the object
    #            API. It's an SQL injection dream at the moment

    if Options["Binary"]:
        # Removal by binary package name
        q = session.execute("SELECT b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b, bin_associations ba, architecture a, suite su, files f, files_archive_map af, component c WHERE ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id AND b.file = f.id AND af.file_id = f.id AND af.archive_id = su.archive_id AND af.component_id = c.id %s %s %s %s" % (con_packages, con_suites, con_components, con_architectures))
        to_remove.extend(q)
    else:
        # Source-only
        if not Options["Binary-Only"]:
            q = session.execute("SELECT s.source, s.version, 'source', s.id, s.maintainer FROM source s, src_associations sa, suite su, archive, files f, files_archive_map af, component c WHERE sa.source = s.id AND sa.suite = su.id AND archive.id = su.archive_id AND s.file = f.id AND af.file_id = f.id AND af.archive_id = su.archive_id AND af.component_id = c.id %s %s %s" % (con_packages, con_suites, con_components))
            to_remove.extend(q)
        if not Options["Source-Only"]:
            # Source + Binary
            q = session.execute("""
                    SELECT b.package, b.version, a.arch_string, b.id, b.maintainer
                    FROM binaries b
                         JOIN bin_associations ba ON b.id = ba.bin
                         JOIN architecture a ON b.architecture = a.id
                         JOIN suite su ON ba.suite = su.id
                         JOIN archive ON archive.id = su.archive_id
                         JOIN files_archive_map af ON b.file = af.file_id AND af.archive_id = archive.id
                         JOIN component c ON af.component_id = c.id
                         JOIN source s ON b.source = s.id
                         JOIN src_associations sa ON s.id = sa.source AND sa.suite = su.id
                    WHERE TRUE %s %s %s %s""" % (con_packages, con_suites, con_components, con_architectures))
            to_remove.extend(q)

    if not to_remove:
        print "Nothing to do."
        sys.exit(0)

    # If we don't have a reason; spawn an editor so the user can add one
    # Write the rejection email out as the <foo>.reason file
    if not Options["Reason"] and not Options["No-Action"]:
        (fd, temp_filename) = utils.temp_filename()
        editor = os.environ.get("EDITOR","vi")
        result = os.system("%s %s" % (editor, temp_filename))
        if result != 0:
            utils.fubar ("vi invocation failed for `%s'!" % (temp_filename), result)
        temp_file = utils.open_file(temp_filename)
        for line in temp_file.readlines():
            Options["Reason"] += line
        temp_file.close()
        os.unlink(temp_filename)

    # Generate the summary of what's to be removed
    d = {}
    for i in to_remove:
        package = i[0]
        version = i[1]
        architecture = i[2]
        maintainer = i[4]
        maintainers[maintainer] = ""
        if not d.has_key(package):
            d[package] = {}
        if not d[package].has_key(version):
            d[package][version] = []
        if architecture not in d[package][version]:
            d[package][version].append(architecture)

    maintainer_list = []
    for maintainer_id in maintainers.keys():
        maintainer_list.append(get_maintainer(maintainer_id).name)
    summary = ""
    removals = d.keys()
    removals.sort()
    versions = []
    for package in removals:
        versions = d[package].keys()
        versions.sort(apt_pkg.version_compare)
        for version in versions:
            d[package][version].sort(utils.arch_compare_sw)
            summary += "%10s | %10s | %s\n" % (package, version, ", ".join(d[package][version]))
    print "Will remove the following packages from %s:" % (suites_list)
    print
    print summary
    print "Maintainer: %s" % ", ".join(maintainer_list)
    if Options["Done"]:
        print "Will also close bugs: "+Options["Done"]
    if carbon_copy:
        print "Will also send CCs to: " + ", ".join(carbon_copy)
    if Options["Do-Close"]:
        print "Will also close associated bug reports."
    print
    print "------------------- Reason -------------------"
    print Options["Reason"]
    print "----------------------------------------------"
    print

    if Options["Rdep-Check"]:
        arches = utils.split_args(Options["Architecture"])
        reverse_depends_check(removals, suites[0], arches, session)

    # If -n/--no-action, drop out here
    if Options["No-Action"]:
        sys.exit(0)

    print "Going to remove the packages now."
    game_over()

    whoami = utils.whoami()
    date = commands.getoutput('date -R')

    # Log first; if it all falls apart I want a record that we at least tried.
    logfile = utils.open_file(cnf["Rm::LogFile"], 'a')
    logfile.write("=========================================================================\n")
    logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami))
    logfile.write("Removed the following packages from %s:\n\n%s" % (suites_list, summary))
    if Options["Done"]:
        logfile.write("Closed bugs: %s\n" % (Options["Done"]))
    logfile.write("\n------------------- Reason -------------------\n%s\n" % (Options["Reason"]))
    logfile.write("----------------------------------------------\n")

    # Do the same in rfc822 format
    logfile822 = utils.open_file(cnf["Rm::LogFile822"], 'a')
    logfile822.write("Date: %s\n" % date)
    logfile822.write("Ftpmaster: %s\n" % whoami)
    logfile822.write("Suite: %s\n" % suites_list)
    sources = []
    binaries = []
    for package in summary.split("\n"):
        for row in package.split("\n"):
            element = row.split("|")
            if len(element) == 3:
                if element[2].find("source") > 0:
                    sources.append("%s_%s" % tuple(elem.strip(" ") for elem in element[:2]))
                    element[2] = sub("source\s?,?", "", element[2]).strip(" ")
                if element[2]:
                    binaries.append("%s_%s [%s]" % tuple(elem.strip(" ") for elem in element))
    if sources:
        logfile822.write("Sources:\n")
        for source in sources:
            logfile822.write(" %s\n" % source)
    if binaries:
        logfile822.write("Binaries:\n")
        for binary in binaries:
            logfile822.write(" %s\n" % binary)
    logfile822.write("Reason: %s\n" % Options["Reason"].replace('\n', '\n '))
    if Options["Done"]:
        logfile822.write("Bug: %s\n" % Options["Done"])

    dsc_type_id = get_override_type('dsc', session).overridetype_id
    deb_type_id = get_override_type('deb', session).overridetype_id

    # Do the actual deletion
    print "Deleting...",
    sys.stdout.flush()

    for i in to_remove:
        package = i[0]
        architecture = i[2]
        package_id = i[3]
        for suite_id in suite_ids_list:
            if architecture == "source":
                session.execute("DELETE FROM src_associations WHERE source = :packageid AND suite = :suiteid",
                                {'packageid': package_id, 'suiteid': suite_id})
                #print "DELETE FROM src_associations WHERE source = %s AND suite = %s" % (package_id, suite_id)
            else:
                session.execute("DELETE FROM bin_associations WHERE bin = :packageid AND suite = :suiteid",
                                {'packageid': package_id, 'suiteid': suite_id})
                #print "DELETE FROM bin_associations WHERE bin = %s AND suite = %s" % (package_id, suite_id)
            # Delete from the override file
            if not Options["Partial"]:
                if architecture == "source":
                    type_id = dsc_type_id
                else:
                    type_id = deb_type_id
                # TODO: Again, fix this properly to remove the remaining non-bind argument
                session.execute("DELETE FROM override WHERE package = :package AND type = :typeid AND suite = :suiteid %s" % (over_con_components), {'package': package, 'typeid': type_id, 'suiteid': suite_id})
    session.commit()
    print "done."

    # If we don't have a Bug server configured, we're done
    if not cnf.has_key("Dinstall::BugServer"):
        if Options["Done"] or Options["Do-Close"]:
            print "Cannot send mail to BugServer as Dinstall::BugServer is not configured"

        logfile.write("=========================================================================\n")
        logfile.close()

        logfile822.write("\n")
        logfile822.close()

        return

    # read common subst variables for all bug closure mails
    Subst_common = {}
    Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
    Subst_common["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
    Subst_common["__CC__"] = "X-DAK: dak rm"
    if carbon_copy:
        Subst_common["__CC__"] += "\nCc: " + ", ".join(carbon_copy)
    Subst_common["__SUITE_LIST__"] = suites_list
    Subst_common["__SUBJECT__"] = "Removed package(s) from %s" % (suites_list)
    Subst_common["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
    Subst_common["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
    Subst_common["__WHOAMI__"] = whoami

    # Send the bug closing messages
    if Options["Done"]:
        Subst_close_rm = Subst_common
        bcc = []
        if cnf.find("Dinstall::Bcc") != "":
            bcc.append(cnf["Dinstall::Bcc"])
        if cnf.find("Rm::Bcc") != "":
            bcc.append(cnf["Rm::Bcc"])
        if bcc:
            Subst_close_rm["__BCC__"] = "Bcc: " + ", ".join(bcc)
        else:
            Subst_close_rm["__BCC__"] = "X-Filler: 42"
        summarymail = "%s\n------------------- Reason -------------------\n%s\n" % (summary, Options["Reason"])
        summarymail += "----------------------------------------------\n"
        Subst_close_rm["__SUMMARY__"] = summarymail

        for bug in utils.split_args(Options["Done"]):
            Subst_close_rm["__BUG_NUMBER__"] = bug
            if Options["Do-Close"]:
                mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close-with-related")
            else:
                mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close")
            utils.send_mail(mail_message, whitelists=whitelists)

    # close associated bug reports
    if Options["Do-Close"]:
        Subst_close_other = Subst_common
        bcc = []
        wnpp = utils.parse_wnpp_bug_file()
        versions = list(set([re_bin_only_nmu.sub('', v) for v in versions]))
        if len(versions) == 1:
            Subst_close_other["__VERSION__"] = versions[0]
        else:
            utils.fubar("Closing bugs with multiple package versions is not supported.  Do it yourself.")
        if bcc:
            Subst_close_other["__BCC__"] = "Bcc: " + ", ".join(bcc)
        else:
            Subst_close_other["__BCC__"] = "X-Filler: 42"
        # at this point, I just assume, that the first closed bug gives
        # some useful information on why the package got removed
        Subst_close_other["__BUG_NUMBER__"] = utils.split_args(Options["Done"])[0]
        if len(sources) == 1:
            source_pkg = source.split("_", 1)[0]
        else:
            utils.fubar("Closing bugs for multiple source packages is not supported.  Do it yourself.")
        Subst_close_other["__BUG_NUMBER_ALSO__"] = ""
        Subst_close_other["__SOURCE__"] = source_pkg
        merged_bugs = set()
        other_bugs = bts.get_bugs('src', source_pkg, 'status', 'open', 'status', 'forwarded')
        if other_bugs:
            for bugno in other_bugs:
                if bugno not in merged_bugs:
                    for bug in bts.get_status(bugno):
                        for merged in bug.mergedwith:
                            other_bugs.remove(merged)
                            merged_bugs.add(merged)
            logfile.write("Also closing bug(s):")
            logfile822.write("Also-Bugs:")
            for bug in other_bugs:
                Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + ","
                logfile.write(" " + str(bug))
                logfile822.write(" " + str(bug))
            logfile.write("\n")
            logfile822.write("\n")
        if source_pkg in wnpp.keys():
            logfile.write("Also closing WNPP bug(s):")
            logfile822.write("Also-WNPP:")
            for bug in wnpp[source_pkg]:
                # the wnpp-rm file we parse also contains our removal
                # bugs, filtering that out
                if bug != Subst_close_other["__BUG_NUMBER__"]:
                    Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + ","
                    logfile.write(" " + str(bug))
                    logfile822.write(" " + str(bug))
            logfile.write("\n")
            logfile822.write("\n")

        mail_message = utils.TemplateSubst(Subst_close_other,cnf["Dir::Templates"]+"/rm.bug-close-related")
        if Subst_close_other["__BUG_NUMBER_ALSO__"]:
            utils.send_mail(mail_message)


    logfile.write("=========================================================================\n")
    logfile.close()

    logfile822.write("\n")
    logfile822.close()
コード例 #11
0
ファイル: generate_releases.py プロジェクト: julian-klode/dak
    def generate_release_files(self):
        """
        Generate Release files for the given suite

        @type suite: string
        @param suite: Suite name
        """

        suite = self.suite
        session = object_session(suite)

        architectures = get_suite_architectures(suite.suite_name, skipall=True, skipsrc=True, session=session)

        # Attribs contains a tuple of field names and the database names to use to
        # fill them in
        attribs = ( ('Origin',      'origin'),
                    ('Label',       'label'),
                    ('Suite',       'release_suite_output'),
                    ('Version',     'version'),
                    ('Codename',    'codename'),
                    ('Changelogs',  'changelog_url'),
                  )

        # A "Sub" Release file has slightly different fields
        subattribs = ( ('Archive',  'suite_name'),
                       ('Origin',   'origin'),
                       ('Label',    'label'),
                       ('Version',  'version') )

        # Boolean stuff. If we find it true in database, write out "yes" into the release file
        boolattrs = ( ('NotAutomatic',         'notautomatic'),
                      ('ButAutomaticUpgrades', 'butautomaticupgrades') )

        cnf = Config()

        suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")

        outfile = os.path.join(suite.archive.path, 'dists', suite.suite_name, suite_suffix, "Release")
        out = open(outfile + ".new", "w")

        for key, dbfield in attribs:
            # Hack to skip NULL Version fields as we used to do this
            # We should probably just always ignore anything which is None
            if key in ("Version", "Changelogs") and getattr(suite, dbfield) is None:
                continue

            out.write("%s: %s\n" % (key, getattr(suite, dbfield)))

        out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))

        if suite.validtime:
            validtime=float(suite.validtime)
            out.write("Valid-Until: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()+validtime))))

        for key, dbfield in boolattrs:
            if getattr(suite, dbfield, False):
                out.write("%s: yes\n" % (key))

        out.write("Architectures: %s\n" % (" ".join([a.arch_string for a in architectures])))

        components = [ c.component_name for c in suite.components ]

        out.write("Components: %s\n" % (" ".join(components)))

        # For exact compatibility with old g-r, write out Description here instead
        # of with the rest of the DB fields above
        if getattr(suite, 'description') is not None:
            out.write("Description: %s\n" % suite.description)

        for comp in components:
            for dirpath, dirnames, filenames in os.walk(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix, comp), topdown=True):
                if not re_gensubrelease.match(dirpath):
                    continue

                subfile = os.path.join(dirpath, "Release")
                subrel = open(subfile + '.new', "w")

                for key, dbfield in subattribs:
                    if getattr(suite, dbfield) is not None:
                        subrel.write("%s: %s\n" % (key, getattr(suite, dbfield)))

                for key, dbfield in boolattrs:
                    if getattr(suite, dbfield, False):
                        subrel.write("%s: yes\n" % (key))

                subrel.write("Component: %s%s\n" % (suite_suffix, comp))

                # Urgh, but until we have all the suite/component/arch stuff in the DB,
                # this'll have to do
                arch = os.path.split(dirpath)[-1]
                if arch.startswith('binary-'):
                    arch = arch[7:]

                subrel.write("Architecture: %s\n" % (arch))
                subrel.close()

                os.rename(subfile + '.new', subfile)

        # Now that we have done the groundwork, we want to get off and add the files with
        # their checksums to the main Release file
        oldcwd = os.getcwd()

        os.chdir(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix))

        hashfuncs = { 'MD5Sum' : apt_pkg.md5sum,
                      'SHA1' : apt_pkg.sha1sum,
                      'SHA256' : apt_pkg.sha256sum }

        fileinfo = {}

        uncompnotseen = {}

        for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True):
            for entry in filenames:
                # Skip things we don't want to include
                if not re_includeinrelease.match(entry):
                    continue

                if dirpath == '.' and entry in ["Release", "Release.gpg", "InRelease"]:
                    continue

                filename = os.path.join(dirpath.lstrip('./'), entry)
                fileinfo[filename] = {}
                contents = open(filename, 'r').read()

                # If we find a file for which we have a compressed version and
                # haven't yet seen the uncompressed one, store the possibility
                # for future use
                if entry.endswith(".gz") and filename[:-3] not in uncompnotseen:
                    uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename)
                elif entry.endswith(".bz2") and filename[:-4] not in uncompnotseen:
                    uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename)
                elif entry.endswith(".xz") and filename[:-3] not in uncompnotseen:
                    uncompnotseen[filename[:-3]] = (XzFile, filename)

                fileinfo[filename]['len'] = len(contents)

                for hf, func in hashfuncs.items():
                    fileinfo[filename][hf] = func(contents)

        for filename, comp in uncompnotseen.items():
            # If we've already seen the uncompressed file, we don't
            # need to do anything again
            if filename in fileinfo:
                continue

            fileinfo[filename] = {}

            # File handler is comp[0], filename of compressed file is comp[1]
            contents = comp[0](comp[1], 'r').read()

            fileinfo[filename]['len'] = len(contents)

            for hf, func in hashfuncs.items():
                fileinfo[filename][hf] = func(contents)


        for h in sorted(hashfuncs.keys()):
            out.write('%s:\n' % h)
            for filename in sorted(fileinfo.keys()):
                out.write(" %s %8d %s\n" % (fileinfo[filename][h], fileinfo[filename]['len'], filename))

        out.close()
        os.rename(outfile + '.new', outfile)

        sign_release_dir(suite, os.path.dirname(outfile))

        os.chdir(oldcwd)

        return
コード例 #12
0
ファイル: override.py プロジェクト: pombreda/tanglu-dak
def main():
    cnf = Config()

    Arguments = [
        ('h', "help", "Override::Options::Help"),
        ('c', "check", "Override::Options::Check"),
        ('d', "done", "Override::Options::Done", "HasArg"),
        ('n', "no-action", "Override::Options::No-Action"),
        ('s', "suite", "Override::Options::Suite", "HasArg"),
    ]
    for i in ["help", "check", "no-action"]:
        if not cnf.has_key("Override::Options::%s" % (i)):
            cnf["Override::Options::%s" % (i)] = ""
    if not cnf.has_key("Override::Options::Suite"):
        cnf["Override::Options::Suite"] = "unstable"

    arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("Override::Options")

    if Options["Help"]:
        usage()

    session = DBConn().session()

    if not arguments:
        utils.fubar("package name is a required argument.")

    package = arguments.pop(0)
    suite_name = Options["Suite"]
    if arguments and len(arguments) > 2:
        utils.fubar("Too many arguments")

    suite = get_suite(suite_name, session)
    if suite is None:
        utils.fubar("Unknown suite '{0}'".format(suite_name))

    if arguments and len(arguments) == 1:
        # Determine if the argument is a priority or a section...
        arg = arguments.pop()
        q = session.execute(
            """
        SELECT ( SELECT COUNT(*) FROM section WHERE section = :arg ) AS secs,
               ( SELECT COUNT(*) FROM priority WHERE priority = :arg ) AS prios
               """, {'arg': arg})
        r = q.fetchall()
        if r[0][0] == 1:
            arguments = (arg, ".")
        elif r[0][1] == 1:
            arguments = (".", arg)
        else:
            utils.fubar("%s is not a valid section or priority" % (arg))

    # Retrieve current section/priority...
    oldsection, oldsourcesection, oldpriority = None, None, None
    for packagetype in ['source', 'binary']:
        eqdsc = '!='
        if packagetype == 'source':
            eqdsc = '='
        q = session.execute(
            """
    SELECT priority.priority AS prio, section.section AS sect, override_type.type AS type
      FROM override, priority, section, suite, override_type
     WHERE override.priority = priority.id
       AND override.type = override_type.id
       AND override_type.type %s 'dsc'
       AND override.section = section.id
       AND override.package = :package
       AND override.suite = suite.id
       AND suite.suite_name = :suite_name
        """ % (eqdsc), {
                'package': package,
                'suite_name': suite_name
            })

        if q.rowcount == 0:
            continue
        if q.rowcount > 1:
            utils.fubar("%s is ambiguous. Matches %d packages" %
                        (package, q.rowcount))

        r = q.fetchone()
        if packagetype == 'binary':
            oldsection = r[1]
            oldpriority = r[0]
        else:
            oldsourcesection = r[1]
            oldpriority = 'source'

    if not oldpriority and not oldsourcesection:
        utils.fubar("Unable to find package %s" % (package))

    if oldsection and oldsourcesection and oldsection != oldsourcesection:
        # When setting overrides, both source & binary will become the same section
        utils.warn("Source is in section '%s' instead of '%s'" %
                   (oldsourcesection, oldsection))

    if not oldsection:
        oldsection = oldsourcesection

    if not arguments:
        print "%s is in section '%s' at priority '%s'" % (package, oldsection,
                                                          oldpriority)
        sys.exit(0)

    # At this point, we have a new section and priority... check they're valid...
    newsection, newpriority = arguments

    if newsection == ".":
        newsection = oldsection
    if newpriority == ".":
        newpriority = oldpriority

    s = get_section(newsection, session)
    if s is None:
        utils.fubar("Supplied section %s is invalid" % (newsection))
    newsecid = s.section_id

    p = get_priority(newpriority, session)
    if p is None:
        utils.fubar("Supplied priority %s is invalid" % (newpriority))
    newprioid = p.priority_id

    if newpriority == oldpriority and newsection == oldsection:
        print "I: Doing nothing"
        sys.exit(0)

    if oldpriority == 'source' and newpriority != 'source':
        utils.fubar("Trying to change priority of a source-only package")

    if Options["Check"] and newpriority != oldpriority:
        check_override_compliance(package, p, suite.archive.path, suite_name,
                                  cnf, session)

    # If we're in no-action mode
    if Options["No-Action"]:
        if newpriority != oldpriority:
            print "I: Would change priority from %s to %s" % (oldpriority,
                                                              newpriority)
        if newsection != oldsection:
            print "I: Would change section from %s to %s" % (oldsection,
                                                             newsection)
        if Options.has_key("Done"):
            print "I: Would also close bug(s): %s" % (Options["Done"])

        sys.exit(0)

    if newpriority != oldpriority:
        print "I: Will change priority from %s to %s" % (oldpriority,
                                                         newpriority)

    if newsection != oldsection:
        print "I: Will change section from %s to %s" % (oldsection, newsection)

    if not Options.has_key("Done"):
        pass
        #utils.warn("No bugs to close have been specified. Noone will know you have done this.")
    else:
        print "I: Will close bug(s): %s" % (Options["Done"])

    game_over()

    Logger = daklog.Logger("override")

    dsc_otype_id = get_override_type('dsc').overridetype_id

    # We're already in a transaction
    # We're in "do it" mode, we have something to do... do it
    if newpriority != oldpriority:
        session.execute(
            """
        UPDATE override
           SET priority = :newprioid
         WHERE package = :package
           AND override.type != :otypedsc
           AND suite = (SELECT id FROM suite WHERE suite_name = :suite_name)""",
            {
                'newprioid': newprioid,
                'package': package,
                'otypedsc': dsc_otype_id,
                'suite_name': suite_name
            })

        Logger.log(["changed priority", package, oldpriority, newpriority])

    if newsection != oldsection:
        q = session.execute(
            """
        UPDATE override
           SET section = :newsecid
         WHERE package = :package
           AND suite = (SELECT id FROM suite WHERE suite_name = :suite_name)""",
            {
                'newsecid': newsecid,
                'package': package,
                'suite_name': suite_name
            })

        Logger.log(["changed section", package, oldsection, newsection])

    session.commit()

    if Options.has_key("Done"):
        if not cnf.has_key("Dinstall::BugServer"):
            utils.warn(
                "Asked to send Done message but Dinstall::BugServer is not configured"
            )
            Logger.close()
            return

        Subst = {}
        Subst["__OVERRIDE_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
        Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
        bcc = []
        if cnf.find("Dinstall::Bcc") != "":
            bcc.append(cnf["Dinstall::Bcc"])
        if bcc:
            Subst["__BCC__"] = "Bcc: " + ", ".join(bcc)
        else:
            Subst["__BCC__"] = "X-Filler: 42"
        if cnf.has_key("Dinstall::PackagesServer"):
            Subst["__CC__"] = "Cc: " + package + "@" + cnf[
                "Dinstall::PackagesServer"] + "\nX-DAK: dak override"
        else:
            Subst["__CC__"] = "X-DAK: dak override"
        Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
        Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
        Subst["__WHOAMI__"] = utils.whoami()
        Subst["__SOURCE__"] = package

        summary = "Concerning package %s...\n" % (package)
        summary += "Operating on the %s suite\n" % (suite_name)
        if newpriority != oldpriority:
            summary += "Changed priority from %s to %s\n" % (oldpriority,
                                                             newpriority)
        if newsection != oldsection:
            summary += "Changed section from %s to %s\n" % (oldsection,
                                                            newsection)
        Subst["__SUMMARY__"] = summary

        template = os.path.join(cnf["Dir::Templates"], "override.bug-close")
        for bug in utils.split_args(Options["Done"]):
            Subst["__BUG_NUMBER__"] = bug
            mail_message = utils.TemplateSubst(Subst, template)
            utils.send_mail(mail_message)
            Logger.log(["closed bug", bug])

    Logger.close()
コード例 #13
0
    def generate_release_files(self):
        """
        Generate Release files for the given suite

        @type suite: string
        @param suite: Suite name
        """

        suite = self.suite
        session = object_session(suite)

        architectures = get_suite_architectures(suite.suite_name, skipall=True, skipsrc=True, session=session)

        # Attribs contains a tuple of field names and the database names to use to
        # fill them in
        attribs = ( ('Origin',      'origin'),
                    ('Label',       'label'),
                    ('Suite',       'suite_name'),
                    ('Version',     'version'),
                    ('Codename',    'codename') )

        # A "Sub" Release file has slightly different fields
        subattribs = ( ('Archive',  'suite_name'),
                       ('Origin',   'origin'),
                       ('Label',    'label'),
                       ('Version',  'version') )

        # Boolean stuff. If we find it true in database, write out "yes" into the release file
        boolattrs = ( ('NotAutomatic',         'notautomatic'),
                      ('ButAutomaticUpgrades', 'butautomaticupgrades') )

        cnf = Config()

        suite_suffix = cnf.find("Dinstall::SuiteSuffix", "")

        outfile = os.path.join(suite.archive.path, 'dists', suite.suite_name, suite_suffix, "Release")
        out = open(outfile + ".new", "w")

        for key, dbfield in attribs:
            if getattr(suite, dbfield) is not None:
                # TEMPORARY HACK HACK HACK until we change the way we store the suite names etc
                if key == 'Suite' and getattr(suite, dbfield) == 'squeeze-updates':
                    out.write("Suite: oldstable-updates\n")
                elif key == 'Suite' and getattr(suite, dbfield) == 'wheezy-updates':
                    out.write("Suite: stable-updates\n")
                elif key == 'Suite' and getattr(suite, dbfield) == 'jessie-updates':
                    out.write("Suite: testing-updates\n")
                else:
                    out.write("%s: %s\n" % (key, getattr(suite, dbfield)))

        out.write("Date: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()))))

        if suite.validtime:
            validtime=float(suite.validtime)
            out.write("Valid-Until: %s\n" % (time.strftime("%a, %d %b %Y %H:%M:%S UTC", time.gmtime(time.time()+validtime))))

        for key, dbfield in boolattrs:
            if getattr(suite, dbfield, False):
                out.write("%s: yes\n" % (key))

        out.write("Architectures: %s\n" % (" ".join([a.arch_string for a in architectures])))

        components = [ c.component_name for c in suite.components ]

        out.write("Components: %s\n" % (" ".join(components)))

        # For exact compatibility with old g-r, write out Description here instead
        # of with the rest of the DB fields above
        if getattr(suite, 'description') is not None:
            out.write("Description: %s\n" % suite.description)

        for comp in components:
            for dirpath, dirnames, filenames in os.walk(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix, comp), topdown=True):
                if not re_gensubrelease.match(dirpath):
                    continue

                subfile = os.path.join(dirpath, "Release")
                subrel = open(subfile + '.new', "w")

                for key, dbfield in subattribs:
                    if getattr(suite, dbfield) is not None:
                        subrel.write("%s: %s\n" % (key, getattr(suite, dbfield)))

                for key, dbfield in boolattrs:
                    if getattr(suite, dbfield, False):
                        subrel.write("%s: yes\n" % (key))

                subrel.write("Component: %s%s\n" % (suite_suffix, comp))

                # Urgh, but until we have all the suite/component/arch stuff in the DB,
                # this'll have to do
                arch = os.path.split(dirpath)[-1]
                if arch.startswith('binary-'):
                    arch = arch[7:]

                subrel.write("Architecture: %s\n" % (arch))
                subrel.close()

                os.rename(subfile + '.new', subfile)

        # Now that we have done the groundwork, we want to get off and add the files with
        # their checksums to the main Release file
        oldcwd = os.getcwd()

        os.chdir(os.path.join(suite.archive.path, "dists", suite.suite_name, suite_suffix))

        hashfuncs = { 'MD5Sum' : apt_pkg.md5sum,
                      'SHA1' : apt_pkg.sha1sum,
                      'SHA256' : apt_pkg.sha256sum }

        fileinfo = {}

        uncompnotseen = {}

        for dirpath, dirnames, filenames in os.walk(".", followlinks=True, topdown=True):
            for entry in filenames:
                # Skip things we don't want to include
                if not re_includeinrelease.match(entry):
                    continue

                if dirpath == '.' and entry in ["Release", "Release.gpg", "InRelease"]:
                    continue

                filename = os.path.join(dirpath.lstrip('./'), entry)
                fileinfo[filename] = {}
                contents = open(filename, 'r').read()

                # If we find a file for which we have a compressed version and
                # haven't yet seen the uncompressed one, store the possibility
                # for future use
                if entry.endswith(".gz") and entry[:-3] not in uncompnotseen.keys():
                    uncompnotseen[filename[:-3]] = (gzip.GzipFile, filename)
                elif entry.endswith(".bz2") and entry[:-4] not in uncompnotseen.keys():
                    uncompnotseen[filename[:-4]] = (bz2.BZ2File, filename)
                elif entry.endswith(".xz") and entry[:-3] not in uncompnotseen.keys():
                    uncompnotseen[filename[:-3]] = (XzFile, filename)

                fileinfo[filename]['len'] = len(contents)

                for hf, func in hashfuncs.items():
                    fileinfo[filename][hf] = func(contents)

        for filename, comp in uncompnotseen.items():
            # If we've already seen the uncompressed file, we don't
            # need to do anything again
            if filename in fileinfo.keys():
                continue

            # Skip uncompressed Contents files as they're huge, take ages to
            # checksum and we checksum the compressed ones anyways
            if os.path.basename(filename).startswith("Contents"):
                continue

            fileinfo[filename] = {}

            # File handler is comp[0], filename of compressed file is comp[1]
            contents = comp[0](comp[1], 'r').read()

            fileinfo[filename]['len'] = len(contents)

            for hf, func in hashfuncs.items():
                fileinfo[filename][hf] = func(contents)


        for h in sorted(hashfuncs.keys()):
            out.write('%s:\n' % h)
            for filename in sorted(fileinfo.keys()):
                out.write(" %s %8d %s\n" % (fileinfo[filename][h], fileinfo[filename]['len'], filename))

        out.close()
        os.rename(outfile + '.new', outfile)

        sign_release_dir(suite, os.path.dirname(outfile))

        os.chdir(oldcwd)

        return