Exemplo n.º 1
0
    def check(self, upload):
        changes = upload.changes
        control = changes.changes
        fn = changes.filename

        for field in ('Distribution', 'Source', 'Binary', 'Architecture', 'Version', 'Maintainer', 'Files', 'Changes', 'Description'):
            if field not in control:
                raise Reject('{0}: misses mandatory field {1}'.format(fn, field))

        check_fields_for_valid_utf8(fn, control)

        source_match = re_field_source.match(control['Source'])
        if not source_match:
            raise Reject('{0}: Invalid Source field'.format(fn))
        version_match = re_field_version.match(control['Version'])
        if not version_match:
            raise Reject('{0}: Invalid Version field'.format(fn))
        version_without_epoch = version_match.group('without_epoch')

        match = re_file_changes.match(fn)
        if not match:
            raise Reject('{0}: Does not match re_file_changes'.format(fn))
        if match.group('package') != source_match.group('package'):
            raise Reject('{0}: Filename does not match Source field'.format(fn))
        if match.group('version') != version_without_epoch:
            raise Reject('{0}: Filename does not match Version field'.format(fn))

        for bn in changes.binary_names:
            if not re_field_package.match(bn):
                raise Reject('{0}: Invalid binary package name {1}'.format(fn, bn))

        if 'source' in changes.architectures and changes.source is None:
            raise Reject("Changes has architecture source, but no source found.")
        if changes.source is not None and 'source' not in changes.architectures:
            raise Reject("Upload includes source, but changes does not say so.")

        try:
            fix_maintainer(changes.changes['Maintainer'])
        except ParseMaintError as e:
            raise Reject('{0}: Failed to parse Maintainer field: {1}'.format(changes.filename, e))

        try:
            changed_by = changes.changes.get('Changed-By')
            if changed_by is not None:
                fix_maintainer(changed_by)
        except ParseMaintError as e:
            raise Reject('{0}: Failed to parse Changed-By field: {1}'.format(changes.filename, e))

        if len(changes.files) == 0:
            raise Reject("Changes includes no files.")

        for bugnum in changes.closed_bugs:
            if not re_isanum.match(bugnum):
                raise Reject('{0}: "{1}" in Closes field is not a number'.format(changes.filename, bugnum))

        return True
Exemplo n.º 2
0
    def assertValid(self, input, a, b, c, d):
        a_, b_, c_, d_ = fix_maintainer(input)

        self.assertEqual(a, a_)
        self.assertEqual(b, b_)
        self.assertEqual(c, c_)
        self.assertEqual(d, d_)
Exemplo n.º 3
0
    def assertValid(self, input, a, b, c, d):
        a_, b_, c_, d_ = fix_maintainer(input)

        self.assertEqual(a, a_)
        self.assertEqual(b, b_)
        self.assertEqual(c, c_)
        self.assertEqual(d, d_)
Exemplo n.º 4
0
    def _notify_uploader(self):
        cnf = Config()

        bcc = 'X-DAK: dak process-command'
        if 'Dinstall::Bcc' in cnf:
            bcc = '{0}\nBcc: {1}'.format(bcc, cnf['Dinstall::Bcc'])

        cc = set(fix_maintainer(address)[1] for address in self.cc)

        subst = {
            '__DAK_ADDRESS__': cnf['Dinstall::MyEmailAddress'],
            '__MAINTAINER_TO__': fix_maintainer(self.uploader)[1],
            '__CC__': ", ".join(cc),
            '__BCC__': bcc,
            '__RESULTS__': "\n".join(self.result),
            '__FILENAME__': self.filename,
            }

        message = TemplateSubst(subst, os.path.join(cnf['Dir::Templates'], 'process-command.processed'))

        send_mail(message)
Exemplo n.º 5
0
    def _notify_uploader(self):
        cnf = Config()

        bcc = "X-DAK: dak process-command"
        if "Dinstall::Bcc" in cnf:
            bcc = "{0}\nBcc: {1}".format(bcc, cnf["Dinstall::Bcc"])

        cc = set(fix_maintainer(address)[1] for address in self.cc)

        subst = {
            "__DAK_ADDRESS__": cnf["Dinstall::MyEmailAddress"],
            "__MAINTAINER_TO__": fix_maintainer(self.uploader)[1],
            "__CC__": ", ".join(cc),
            "__BCC__": bcc,
            "__RESULTS__": "\n".join(self.result),
            "__FILENAME__": self.filename,
        }

        message = TemplateSubst(subst, os.path.join(cnf["Dir::Templates"], "process-command.processed"))

        send_mail(message)
Exemplo n.º 6
0
    def _notify_uploader(self):
        cnf = Config()

        bcc = 'X-DAK: dak process-command'
        if 'Dinstall::Bcc' in cnf:
            bcc = '{0}\nBcc: {1}'.format(bcc, cnf['Dinstall::Bcc'])

        cc = set(fix_maintainer(address)[1] for address in self.cc)

        subst = {
            '__DAK_ADDRESS__': cnf['Dinstall::MyEmailAddress'],
            '__MAINTAINER_TO__': fix_maintainer(self.uploader)[1],
            '__CC__': ", ".join(cc),
            '__BCC__': bcc,
            '__RESULTS__': "\n".join(self.result),
            '__FILENAME__': self.filename,
            }

        message = TemplateSubst(subst, os.path.join(cnf['Dir::Templates'], 'process-command.processed'))

        send_mail(message)
Exemplo n.º 7
0
def _subst_for_upload(upload):
    """ Prepare substitutions used for announce mails.

    @type  upload: L{daklib.upload.Source} or L{daklib.upload.Binary}
    @param upload: upload to handle

    @rtype: dict
    @returns: A dict of substition values for use by L{daklib.utils.TemplateSubst}
    """
    cnf = Config()

    maintainer = upload.maintainer or cnf['Dinstall::MyEmailAddress']
    changed_by = upload.changed_by or maintainer
    if upload.sourceful:
        maintainer_to = mail_addresses_for_upload(maintainer, changed_by, upload.fingerprint)
    else:
        maintainer_to = mail_addresses_for_upload(maintainer, maintainer, upload.fingerprint)

    bcc = 'X-DAK: dak {0}'.format(upload.program)
    if 'Dinstall::Bcc' in cnf:
        bcc = '{0}\nBcc: {1}'.format(bcc, cnf['Dinstall::Bcc'])

    subst = {
        '__DISTRO__': cnf['Dinstall::MyDistribution'],
        '__BUG_SERVER__': cnf.get('Dinstall::BugServer'),
        '__ADMIN_ADDRESS__': cnf['Dinstall::MyAdminAddress'],
        '__DAK_ADDRESS__': cnf['Dinstall::MyEmailAddress'],
        '__REJECTOR_ADDRESS__': cnf['Dinstall::MyEmailAddress'],
        '__MANUAL_REJECT_MESSAGE__': '',

        '__BCC__': bcc,

        '__MAINTAINER__': changed_by,
        '__MAINTAINER_FROM__': fix_maintainer(changed_by)[1],
        '__MAINTAINER_TO__': ', '.join(maintainer_to),
        '__CHANGES_FILENAME__': upload.changes_filename,
        '__FILE_CONTENTS__': upload.changes,
        '__SOURCE__': upload.source,
        '__VERSION__': upload.version,
        '__ARCHITECTURE__': upload.architecture,
        '__WARNINGS__': '\n'.join(upload.warnings),
        }

    override_maintainer = cnf.get('Dinstall::OverrideMaintainer')
    if override_maintainer:
        subst['__MAINTAINER_FROM__'] = subst['__MAINTAINER_TO__'] = override_maintainer

    return subst
Exemplo n.º 8
0
def process_queue(queue, log, rrd_dir):
    msg = ""
    type = queue.queue_name
    session = DBConn().session()

    # Divide the .changes into per-source groups
    per_source = {}
    total_pending = 0
    for upload in queue.uploads:
        source = upload.changes.source
        if source not in per_source:
            per_source[source] = {}
            per_source[source]["list"] = []
            per_source[source]["processed"] = ""
            handler = PolicyQueueUploadHandler(upload, session)
            if handler.get_action():
                per_source[source][
                    "processed"] = "PENDING %s" % handler.get_action()
                total_pending += 1
        per_source[source]["list"].append(upload)
        per_source[source]["list"].sort(key=lambda x: x.changes.created,
                                        reverse=True)
    # Determine oldest time and have note status for each source group
    for source in per_source.keys():
        source_list = per_source[source]["list"]
        first = source_list[0]
        oldest = time.mktime(first.changes.created.timetuple())
        have_note = 0
        for d in per_source[source]["list"]:
            mtime = time.mktime(d.changes.created.timetuple())
            if "Queue-Report::Options::New" in Cnf:
                if mtime > oldest:
                    oldest = mtime
            else:
                if mtime < oldest:
                    oldest = mtime
            have_note += has_new_comment(d.policy_queue, d.changes.source,
                                         d.changes.version)
        per_source[source]["oldest"] = oldest
        if not have_note:
            per_source[source]["note_state"] = 0  # none
        elif have_note < len(source_list):
            per_source[source]["note_state"] = 1  # some
        else:
            per_source[source]["note_state"] = 2  # all
    per_source_items = per_source.items()
    per_source_items.sort(key=functools.cmp_to_key(sg_compare))

    update_graph_database(rrd_dir, type, len(per_source_items),
                          len(queue.uploads))

    entries = []
    max_source_len = 0
    max_version_len = 0
    max_arch_len = 0
    try:
        logins = get_logins_from_ldap()
    except:
        logins = dict()
    for i in per_source_items:
        maintainer = {}
        maint = ""
        distribution = ""
        closes = ""
        fingerprint = ""
        changeby = {}
        changedby = ""
        sponsor = ""
        filename = i[1]["list"][0].changes.changesname
        last_modified = time.time() - i[1]["oldest"]
        source = i[1]["list"][0].changes.source
        if len(source) > max_source_len:
            max_source_len = len(source)
        binary_list = i[1]["list"][0].binaries
        binary = ', '.join([b.package for b in binary_list])
        arches = set()
        versions = set()
        for j in i[1]["list"]:
            dbc = j.changes
            changesbase = dbc.changesname

            if "Queue-Report::Options::New" in Cnf or "Queue-Report::Options::822" in Cnf:
                try:
                    (maintainer["maintainer822"], maintainer["maintainer2047"],
                    maintainer["maintainername"], maintainer["maintaineremail"]) = \
                    fix_maintainer(dbc.maintainer)
                except ParseMaintError as msg:
                    print("Problems while parsing maintainer address\n")
                    maintainer["maintainername"] = "Unknown"
                    maintainer["maintaineremail"] = "Unknown"
                maint = "%s:%s" % (maintainer["maintainername"],
                                   maintainer["maintaineremail"])
                # ...likewise for the Changed-By: field if it exists.
                try:
                    (changeby["changedby822"], changeby["changedby2047"],
                     changeby["changedbyname"], changeby["changedbyemail"]) = \
                        fix_maintainer(dbc.changedby)
                except ParseMaintError as msg:
                    (changeby["changedby822"], changeby["changedby2047"],
                     changeby["changedbyname"], changeby["changedbyemail"]) = \
                        ("", "", "", "")
                changedby = "%s:%s" % (changeby["changedbyname"],
                                       changeby["changedbyemail"])

                distribution = dbc.distribution.split()
                closes = dbc.closes

                fingerprint = dbc.fingerprint
                sponsor_name = get_uid_from_fingerprint(fingerprint).name
                sponsor_login = get_uid_from_fingerprint(fingerprint).uid
                if '@' in sponsor_login:
                    if fingerprint in logins:
                        sponsor_login = logins[fingerprint]
                if (sponsor_name != maintainer["maintainername"]
                        and sponsor_name != changeby["changedbyname"]
                        and sponsor_login + '@debian.org' !=
                        maintainer["maintaineremail"]
                        and sponsor_name != changeby["changedbyemail"]):
                    sponsor = sponsor_login

            for arch in dbc.architecture.split():
                arches.add(arch)
            versions.add(dbc.version)
        arches_list = sorted(arches, key=utils.ArchKey)
        arch_list = " ".join(arches_list)
        version_list = " ".join(sorted(versions, reverse=True))
        if len(version_list) > max_version_len:
            max_version_len = len(version_list)
        if len(arch_list) > max_arch_len:
            max_arch_len = len(arch_list)
        if i[1]["note_state"]:
            note = " | [N]"
        else:
            note = ""
        entries.append([
            source, binary, version_list, arch_list,
            per_source[source]["processed"], note, last_modified, maint,
            distribution, closes, fingerprint, sponsor, changedby, filename
        ])

    # direction entry consists of "Which field, which direction, time-consider" where
    # time-consider says how we should treat last_modified. Thats all.

    # Look for the options for sort and then do the sort.
    age = "h"
    if "Queue-Report::Options::Age" in Cnf:
        age = Cnf["Queue-Report::Options::Age"]
    if "Queue-Report::Options::New" in Cnf:
        # If we produce html we always have oldest first.
        direction.append([6, -1, "ao"])
    else:
        if "Queue-Report::Options::Sort" in Cnf:
            for i in Cnf["Queue-Report::Options::Sort"].split(","):
                if i == "ao":
                    # Age, oldest first.
                    direction.append([6, -1, age])
                elif i == "an":
                    # Age, newest first.
                    direction.append([6, 1, age])
                elif i == "na":
                    # Name, Ascending.
                    direction.append([0, 1, 0])
                elif i == "nd":
                    # Name, Descending.
                    direction.append([0, -1, 0])
                elif i == "nl":
                    # Notes last.
                    direction.append([5, 1, 0])
                elif i == "nf":
                    # Notes first.
                    direction.append([5, -1, 0])
    entries.sort(key=functools.cmp_to_key(sortfunc))
    # Yes, in theory you can add several sort options at the commandline with. But my mind is to small
    # at the moment to come up with a real good sorting function that considers all the sidesteps you
    # have with it. (If you combine options it will simply take the last one at the moment).
    # Will be enhanced in the future.

    if "Queue-Report::Options::822" in Cnf:
        # print stuff out in 822 format
        for entry in entries:
            (source, binary, version_list, arch_list, processed, note,
             last_modified, maint, distribution, closes, fingerprint, sponsor,
             changedby, changes_file) = entry

            # We'll always have Source, Version, Arch, Mantainer, and Dist
            # For the rest, check to see if we have them, then print them out
            log.write("Source: " + source + "\n")
            log.write("Binary: " + binary + "\n")
            log.write("Version: " + version_list + "\n")
            log.write("Architectures: ")
            log.write((", ".join(arch_list.split(" "))) + "\n")
            log.write("Age: " + time_pp(last_modified) + "\n")
            log.write("Last-Modified: " +
                      str(int(time.time()) - int(last_modified)) + "\n")
            log.write("Queue: " + type + "\n")

            (name, mail) = maint.split(":", 1)
            log.write("Maintainer: " + name + " <" + mail + ">" + "\n")
            if changedby:
                (name, mail) = changedby.split(":", 1)
                log.write("Changed-By: " + name + " <" + mail + ">" + "\n")
            if sponsor:
                log.write("Sponsored-By: %[email protected]\n" % sponsor)
            log.write("Distribution:")
            for dist in distribution:
                log.write(" " + dist)
            log.write("\n")
            log.write("Fingerprint: " + fingerprint + "\n")
            if closes:
                bug_string = ""
                for bugs in closes:
                    bug_string += "#" + bugs + ", "
                log.write("Closes: " + bug_string[:-2] + "\n")
            log.write("Changes-File: " + os.path.basename(changes_file) + "\n")
            log.write("\n")

    total_count = len(queue.uploads)
    source_count = len(per_source_items)

    if "Queue-Report::Options::New" in Cnf:
        direction.append([6, 1, "ao"])
        entries.sort(key=functools.cmp_to_key(sortfunc))
        # Output for a html file. First table header. then table_footer.
        # Any line between them is then a <tr> printed from subroutine table_row.
        if len(entries) > 0:
            table_header(type.upper(), source_count, total_count)
            for entry in entries:
                (source, binary, version_list, arch_list, processed, note,
                 last_modified, maint, distribution, closes, fingerprint,
                 sponsor, changedby, _) = entry
                table_row(source, version_list, arch_list, last_modified,
                          maint, distribution, closes, fingerprint, sponsor,
                          changedby)
            table_footer(type.upper())
    elif "Queue-Report::Options::822" not in Cnf:
        # The "normal" output without any formatting.
        msg = ""
        for entry in entries:
            (source, binary, version_list, arch_list, processed, note,
             last_modified, _, _, _, _, _, _, _) = entry
            if processed:
                format = "%%-%ds | %%-%ds | %%-%ds | %%s\n" % (
                    max_source_len, max_version_len, max_arch_len)
                msg += format % (source, version_list, arch_list, processed)
            else:
                format = "%%-%ds | %%-%ds | %%-%ds%%s | %%s old\n" % (
                    max_source_len, max_version_len, max_arch_len)
                msg += format % (source, version_list, arch_list, note,
                                 time_pp(last_modified))

        if msg:
            print(type.upper())
            print("-" * len(type))
            print()
            print(msg)
            print((
                "%s %s source package%s / %s %s package%s in total / %s %s package%s to be processed."
                % (source_count, type, plural(source_count), total_count, type,
                   plural(total_count), total_pending, type,
                   plural(total_pending))))
            print()
Exemplo n.º 9
0
def process_queue(queue, log, rrd_dir):
    msg = ""
    type = queue.queue_name
    session = DBConn().session()

    # Divide the .changes into per-source groups
    per_source = {}
    total_pending = 0
    for upload in queue.uploads:
        source = upload.changes.source
        if source not in per_source:
            per_source[source] = {}
            per_source[source]["list"] = []
            per_source[source]["processed"] = ""
            handler = PolicyQueueUploadHandler(upload, session)
            if handler.get_action():
                per_source[source]["processed"] = "PENDING %s" % handler.get_action()
                total_pending += 1
        per_source[source]["list"].append(upload)
        per_source[source]["list"].sort(key=lambda x: x.changes.created, reverse=True)
    # Determine oldest time and have note status for each source group
    for source in per_source.keys():
        source_list = per_source[source]["list"]
        first = source_list[0]
        oldest = time.mktime(first.changes.created.timetuple())
        have_note = 0
        for d in per_source[source]["list"]:
            mtime = time.mktime(d.changes.created.timetuple())
            if "Queue-Report::Options::New" in Cnf:
                if mtime > oldest:
                    oldest = mtime
            else:
                if mtime < oldest:
                    oldest = mtime
            have_note += has_new_comment(d.policy_queue, d.changes.source, d.changes.version)
        per_source[source]["oldest"] = oldest
        if not have_note:
            per_source[source]["note_state"] = 0 # none
        elif have_note < len(source_list):
            per_source[source]["note_state"] = 1 # some
        else:
            per_source[source]["note_state"] = 2 # all
    per_source_items = per_source.items()
    per_source_items.sort(key=functools.cmp_to_key(sg_compare))

    update_graph_database(rrd_dir, type, len(per_source_items), len(queue.uploads))

    entries = []
    max_source_len = 0
    max_version_len = 0
    max_arch_len = 0
    try:
        logins = get_logins_from_ldap()
    except:
        logins = dict()
    for i in per_source_items:
        maintainer = {}
        maint = ""
        distribution = ""
        closes = ""
        fingerprint = ""
        changeby = {}
        changedby = ""
        sponsor = ""
        filename = i[1]["list"][0].changes.changesname
        last_modified = time.time() - i[1]["oldest"]
        source = i[1]["list"][0].changes.source
        if len(source) > max_source_len:
            max_source_len = len(source)
        binary_list = i[1]["list"][0].binaries
        binary = ', '.join([b.package for b in binary_list])
        arches = set()
        versions = set()
        for j in i[1]["list"]:
            dbc = j.changes
            changesbase = dbc.changesname

            if "Queue-Report::Options::New" in Cnf or "Queue-Report::Options::822" in Cnf:
                try:
                    (maintainer["maintainer822"], maintainer["maintainer2047"],
                    maintainer["maintainername"], maintainer["maintaineremail"]) = \
                    fix_maintainer(dbc.maintainer)
                except ParseMaintError as msg:
                    print("Problems while parsing maintainer address\n")
                    maintainer["maintainername"] = "Unknown"
                    maintainer["maintaineremail"] = "Unknown"
                maint = "%s:%s" % (maintainer["maintainername"], maintainer["maintaineremail"])
                # ...likewise for the Changed-By: field if it exists.
                try:
                    (changeby["changedby822"], changeby["changedby2047"],
                     changeby["changedbyname"], changeby["changedbyemail"]) = \
                        fix_maintainer(dbc.changedby)
                except ParseMaintError as msg:
                    (changeby["changedby822"], changeby["changedby2047"],
                     changeby["changedbyname"], changeby["changedbyemail"]) = \
                        ("", "", "", "")
                changedby = "%s:%s" % (changeby["changedbyname"], changeby["changedbyemail"])

                distribution = dbc.distribution.split()
                closes = dbc.closes

                fingerprint = dbc.fingerprint
                sponsor_name = get_uid_from_fingerprint(fingerprint).name
                sponsor_login = get_uid_from_fingerprint(fingerprint).uid
                if '@' in sponsor_login:
                    if fingerprint in logins:
                        sponsor_login = logins[fingerprint]
                if (sponsor_name != maintainer["maintainername"]
                  and sponsor_name != changeby["changedbyname"]
                  and sponsor_login + '@debian.org' != maintainer["maintaineremail"]
                  and sponsor_name != changeby["changedbyemail"]):
                    sponsor = sponsor_login

            for arch in dbc.architecture.split():
                arches.add(arch)
            versions.add(dbc.version)
        arches_list = list(arches)
        arches_list.sort(key=utils.ArchKey)
        arch_list = " ".join(arches_list)
        version_list = " ".join(sorted(versions, reverse=True))
        if len(version_list) > max_version_len:
            max_version_len = len(version_list)
        if len(arch_list) > max_arch_len:
            max_arch_len = len(arch_list)
        if i[1]["note_state"]:
            note = " | [N]"
        else:
            note = ""
        entries.append([source, binary, version_list, arch_list, per_source[source]["processed"], note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, filename])

    # direction entry consists of "Which field, which direction, time-consider" where
    # time-consider says how we should treat last_modified. Thats all.

    # Look for the options for sort and then do the sort.
    age = "h"
    if "Queue-Report::Options::Age" in Cnf:
        age = Cnf["Queue-Report::Options::Age"]
    if "Queue-Report::Options::New" in Cnf:
        # If we produce html we always have oldest first.
        direction.append([6, -1, "ao"])
    else:
        if "Queue-Report::Options::Sort" in Cnf:
            for i in Cnf["Queue-Report::Options::Sort"].split(","):
                if i == "ao":
                    # Age, oldest first.
                    direction.append([6, -1, age])
                elif i == "an":
                    # Age, newest first.
                    direction.append([6, 1, age])
                elif i == "na":
                    # Name, Ascending.
                    direction.append([0, 1, 0])
                elif i == "nd":
                    # Name, Descending.
                    direction.append([0, -1, 0])
                elif i == "nl":
                    # Notes last.
                    direction.append([5, 1, 0])
                elif i == "nf":
                    # Notes first.
                    direction.append([5, -1, 0])
    entries.sort(key=functools.cmp_to_key(sortfunc))
    # Yes, in theory you can add several sort options at the commandline with. But my mind is to small
    # at the moment to come up with a real good sorting function that considers all the sidesteps you
    # have with it. (If you combine options it will simply take the last one at the moment).
    # Will be enhanced in the future.

    if "Queue-Report::Options::822" in Cnf:
        # print stuff out in 822 format
        for entry in entries:
            (source, binary, version_list, arch_list, processed, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, changes_file) = entry

            # We'll always have Source, Version, Arch, Mantainer, and Dist
            # For the rest, check to see if we have them, then print them out
            log.write("Source: " + source + "\n")
            log.write("Binary: " + binary + "\n")
            log.write("Version: " + version_list + "\n")
            log.write("Architectures: ")
            log.write((", ".join(arch_list.split(" "))) + "\n")
            log.write("Age: " + time_pp(last_modified) + "\n")
            log.write("Last-Modified: " + str(int(time.time()) - int(last_modified)) + "\n")
            log.write("Queue: " + type + "\n")

            (name, mail) = maint.split(":", 1)
            log.write("Maintainer: " + name + " <" + mail + ">" + "\n")
            if changedby:
                (name, mail) = changedby.split(":", 1)
                log.write("Changed-By: " + name + " <" + mail + ">" + "\n")
            if sponsor:
                log.write("Sponsored-By: %[email protected]\n" % sponsor)
            log.write("Distribution:")
            for dist in distribution:
                log.write(" " + dist)
            log.write("\n")
            log.write("Fingerprint: " + fingerprint + "\n")
            if closes:
                bug_string = ""
                for bugs in closes:
                    bug_string += "#" + bugs + ", "
                log.write("Closes: " + bug_string[:-2] + "\n")
            log.write("Changes-File: " + os.path.basename(changes_file) + "\n")
            log.write("\n")

    total_count = len(queue.uploads)
    source_count = len(per_source_items)

    if "Queue-Report::Options::New" in Cnf:
        direction.append([6, 1, "ao"])
        entries.sort(key=functools.cmp_to_key(sortfunc))
        # Output for a html file. First table header. then table_footer.
        # Any line between them is then a <tr> printed from subroutine table_row.
        if len(entries) > 0:
            table_header(type.upper(), source_count, total_count)
            for entry in entries:
                (source, binary, version_list, arch_list, processed, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, _) = entry
                table_row(source, version_list, arch_list, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby)
            table_footer(type.upper())
    elif "Queue-Report::Options::822" not in Cnf:
        # The "normal" output without any formatting.
        msg = ""
        for entry in entries:
            (source, binary, version_list, arch_list, processed, note, last_modified, _, _, _, _, _, _, _) = entry
            if processed:
                format = "%%-%ds | %%-%ds | %%-%ds | %%s\n" % (max_source_len, max_version_len, max_arch_len)
                msg += format % (source, version_list, arch_list, processed)
            else:
                format = "%%-%ds | %%-%ds | %%-%ds%%s | %%s old\n" % (max_source_len, max_version_len, max_arch_len)
                msg += format % (source, version_list, arch_list, note, time_pp(last_modified))

        if msg:
            print(type.upper())
            print("-" * len(type))
            print()
            print(msg)
            print(("%s %s source package%s / %s %s package%s in total / %s %s package%s to be processed." %
                   (source_count, type, plural(source_count),
                    total_count, type, plural(total_count),
                    total_pending, type, plural(total_pending))))
            print()
Exemplo n.º 10
0
 def assertNotValid(self, input):
     self.assertRaises(ParseMaintError, lambda: fix_maintainer(input))
Exemplo n.º 11
0
 def assertNotValid(self, input):
     self.assertRaises(ParseMaintError, lambda: fix_maintainer(input))
Exemplo n.º 12
0
def process_changes_files(changes_files, type, log, rrd_dir):
    msg = ""
    cache = {}
    unprocessed = []
    # Read in all the .changes files
    for filename in changes_files:
        try:
            u = Upload()
            u.load_changes(filename)
            cache[filename] = copy(u.pkg.changes)
            cache[filename]["filename"] = filename
        except Exception as e:
            print "WARNING: Exception %s" % e
            continue
    # Divide the .changes into per-source groups
    per_source = {}
    for filename in cache.keys():
	if not cache[filename].has_key("source"):
            unprocessed.append(filename)
            continue
        source = cache[filename]["source"]
        if not per_source.has_key(source):
            per_source[source] = {}
            per_source[source]["list"] = []
        per_source[source]["list"].append(cache[filename])
    # Determine oldest time and have note status for each source group
    for source in per_source.keys():
        source_list = per_source[source]["list"]
        first = source_list[0]
        oldest = os.stat(first["filename"])[stat.ST_MTIME]
        have_note = 0
        for d in per_source[source]["list"]:
            mtime = os.stat(d["filename"])[stat.ST_MTIME]
            if Cnf.has_key("Queue-Report::Options::New"):
                if mtime > oldest:
                    oldest = mtime
            else:
                if mtime < oldest:
                    oldest = mtime
            have_note += has_new_comment(d["source"], d["version"])
        per_source[source]["oldest"] = oldest
        if not have_note:
            per_source[source]["note_state"] = 0; # none
        elif have_note < len(source_list):
            per_source[source]["note_state"] = 1; # some
        else:
            per_source[source]["note_state"] = 2; # all
    per_source_items = per_source.items()
    per_source_items.sort(sg_compare)

    update_graph_database(rrd_dir, type, len(per_source_items), len(changes_files))

    entries = []
    max_source_len = 0
    max_version_len = 0
    max_arch_len = 0
    for i in per_source_items:
        maintainer = {}
        maint=""
        distribution=""
        closes=""
        fingerprint=""
        changeby = {}
        changedby=""
        sponsor=""
        filename=i[1]["list"][0]["filename"]
        last_modified = time.time()-i[1]["oldest"]
        source = i[1]["list"][0]["source"]
        if len(source) > max_source_len:
            max_source_len = len(source)
        binary_list = i[1]["list"][0]["binary"].keys()
        binary = ', '.join(binary_list)
        arches = {}
        versions = {}
        for j in i[1]["list"]:
            changesbase = os.path.basename(j["filename"])
            try:
                session = DBConn().session()
                dbc = session.query(DBChange).filter_by(changesname=changesbase).one()
                session.close()
            except Exception as e:
                print "Can't find changes file in NEW for %s (%s)" % (changesbase, e)
                dbc = None

            if Cnf.has_key("Queue-Report::Options::New") or Cnf.has_key("Queue-Report::Options::822"):
                try:
                    (maintainer["maintainer822"], maintainer["maintainer2047"],
                    maintainer["maintainername"], maintainer["maintaineremail"]) = \
                    fix_maintainer (j["maintainer"])
                except ParseMaintError as msg:
                    print "Problems while parsing maintainer address\n"
                    maintainer["maintainername"] = "Unknown"
                    maintainer["maintaineremail"] = "Unknown"
                maint="%s:%s" % (maintainer["maintainername"], maintainer["maintaineremail"])
                # ...likewise for the Changed-By: field if it exists.
                try:
                    (changeby["changedby822"], changeby["changedby2047"],
                     changeby["changedbyname"], changeby["changedbyemail"]) = \
                     fix_maintainer (j["changed-by"])
                except ParseMaintError as msg:
                    (changeby["changedby822"], changeby["changedby2047"],
                     changeby["changedbyname"], changeby["changedbyemail"]) = \
                     ("", "", "", "")
                changedby="%s:%s" % (changeby["changedbyname"], changeby["changedbyemail"])

                distribution=j["distribution"].keys()
                closes=j["closes"].keys()
                if dbc:
                    fingerprint = dbc.fingerprint
                    sponsor_name = get_uid_from_fingerprint(fingerprint).name
                    sponsor_email = get_uid_from_fingerprint(fingerprint).uid + "@debian.org"
                    if sponsor_name != maintainer["maintainername"] and sponsor_name != changeby["changedbyname"] and \
                    sponsor_email != maintainer["maintaineremail"] and sponsor_name != changeby["changedbyemail"]:
                        sponsor = sponsor_email

            for arch in j["architecture"].keys():
                arches[arch] = ""
            version = j["version"]
            versions[version] = ""
        arches_list = arches.keys()
        arches_list.sort(utils.arch_compare_sw)
        arch_list = " ".join(arches_list)
        version_list = " ".join(versions.keys())
        if len(version_list) > max_version_len:
            max_version_len = len(version_list)
        if len(arch_list) > max_arch_len:
            max_arch_len = len(arch_list)
        if i[1]["note_state"]:
            note = " | [N]"
        else:
            note = ""
        entries.append([source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, filename])

    # direction entry consists of "Which field, which direction, time-consider" where
    # time-consider says how we should treat last_modified. Thats all.

    # Look for the options for sort and then do the sort.
    age = "h"
    if Cnf.has_key("Queue-Report::Options::Age"):
        age =  Cnf["Queue-Report::Options::Age"]
    if Cnf.has_key("Queue-Report::Options::New"):
    # If we produce html we always have oldest first.
        direction.append([5,-1,"ao"])
    else:
        if Cnf.has_key("Queue-Report::Options::Sort"):
            for i in Cnf["Queue-Report::Options::Sort"].split(","):
                if i == "ao":
                    # Age, oldest first.
                    direction.append([5,-1,age])
                elif i == "an":
                    # Age, newest first.
                    direction.append([5,1,age])
                elif i == "na":
                    # Name, Ascending.
                    direction.append([0,1,0])
                elif i == "nd":
                    # Name, Descending.
                    direction.append([0,-1,0])
                elif i == "nl":
                    # Notes last.
                    direction.append([4,1,0])
                elif i == "nf":
                    # Notes first.
                    direction.append([4,-1,0])
    entries.sort(lambda x, y: sortfunc(x, y))
    # Yes, in theory you can add several sort options at the commandline with. But my mind is to small
    # at the moment to come up with a real good sorting function that considers all the sidesteps you
    # have with it. (If you combine options it will simply take the last one at the moment).
    # Will be enhanced in the future.

    if Cnf.has_key("Queue-Report::Options::822"):
        # print stuff out in 822 format
        for entry in entries:
            (source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, changes_file) = entry

            # We'll always have Source, Version, Arch, Mantainer, and Dist
            # For the rest, check to see if we have them, then print them out
            log.write("Source: " + source + "\n")
            log.write("Binary: " + binary + "\n")
            log.write("Version: " + version_list + "\n")
            log.write("Architectures: ")
            log.write( (", ".join(arch_list.split(" "))) + "\n")
            log.write("Age: " + time_pp(last_modified) + "\n")
            log.write("Last-Modified: " + str(int(time.time()) - int(last_modified)) + "\n")
            log.write("Queue: " + type + "\n")

            (name, mail) = maint.split(":", 1)
            log.write("Maintainer: " + name + " <"+mail+">" + "\n")
            if changedby:
               (name, mail) = changedby.split(":", 1)
               log.write("Changed-By: " + name + " <"+mail+">" + "\n")
            if sponsor:
               log.write("Sponsored-By: " + "@".join(sponsor.split("@")[:2]) + "\n")
            log.write("Distribution:")
            for dist in distribution:
               log.write(" " + dist)
            log.write("\n")
            log.write("Fingerprint: " + fingerprint + "\n")
            if closes:
                bug_string = ""
                for bugs in closes:
                    bug_string += "#"+bugs+", "
                log.write("Closes: " + bug_string[:-2] + "\n")
            log.write("Changes-File: " + os.path.basename(changes_file) + "\n")
            log.write("\n")

    if Cnf.has_key("Queue-Report::Options::New"):
        direction.append([5,1,"ao"])
        entries.sort(lambda x, y: sortfunc(x, y))
    # Output for a html file. First table header. then table_footer.
    # Any line between them is then a <tr> printed from subroutine table_row.
        if len(entries) > 0:
            total_count = len(changes_files)
            source_count = len(per_source_items)
            table_header(type.upper(), source_count, total_count)
            for entry in entries:
                (source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, undef) = entry
                table_row(source, version_list, arch_list, time_pp(last_modified), maint, distribution, closes, fingerprint, sponsor, changedby)
            table_footer(type.upper())
    elif not Cnf.has_key("Queue-Report::Options::822"):
    # The "normal" output without any formatting.
        format="%%-%ds | %%-%ds | %%-%ds%%s | %%s old\n" % (max_source_len, max_version_len, max_arch_len)

        msg = ""
        for entry in entries:
            (source, binary, version_list, arch_list, note, last_modified, undef, undef, undef, undef, undef, undef, undef) = entry
            msg += format % (source, version_list, arch_list, note, time_pp(last_modified))

        if msg:
            total_count = len(changes_files)
            source_count = len(per_source_items)
            print type.upper()
            print "-"*len(type)
            print
            print msg
            print "%s %s source package%s / %s %s package%s in total." % (source_count, type, plural(source_count), total_count, type, plural(total_count))
            print

        if len(unprocessed):
            print "UNPROCESSED"
            print "-----------"
            for u in unprocessed:
                print u
            print