Example #1
0
def do_pkg(changes_file):
    changes_file = utils.validate_changes_file_arg(changes_file, 0)
    if not changes_file:
        return
    print "\n" + changes_file

    u = Upload()
    u.pkg.changes_file = changes_file
    # We can afoord not to check the signature before loading the changes file
    # as we've validated it already (otherwise it couldn't be in new)
    # and we can more quickly skip over already processed files this way
    u.load_changes(changes_file)

    origchanges = os.path.abspath(u.pkg.changes_file)

    # Still be cautious in case paring the changes file went badly
    if u.pkg.changes.has_key('source') and u.pkg.changes.has_key('version'):
        htmlname = u.pkg.changes["source"] + "_" + u.pkg.changes["version"] + ".html"
        htmlfile = os.path.join(cnf["Show-New::HTMLPath"], htmlname)
    else:
        # Changes file was bad
        print "Changes file %s missing source or version field" % changes_file
        return

    # Have we already processed this?
    if os.path.exists(htmlfile) and \
        os.stat(htmlfile).st_mtime > os.stat(origchanges).st_mtime:
            with open(htmlfile, "r") as fd:
                if fd.read() != timeout_str:
                    sources.append(htmlname)
                    return (PROC_STATUS_SUCCESS,
                            '%s already up-to-date' % htmlfile)

    # Now we'll load the fingerprint
    session = DBConn().session()
    htmlfiles_to_process.append(htmlfile)
    (u.pkg.changes["fingerprint"], rejects) = utils.check_signature(changes_file, session=session)
    new_queue = get_policy_queue('new', session );
    u.pkg.directory = new_queue.path
    u.update_subst()
    files = u.pkg.files
    changes = u.pkg.changes
    sources.append(htmlname)

    for deb_filename, f in files.items():
        if deb_filename.endswith(".udeb") or deb_filename.endswith(".deb"):
            u.binary_file_checks(deb_filename, session)
            u.check_binary_against_db(deb_filename, session)
        else:
            u.source_file_checks(deb_filename, session)
            u.check_source_against_db(deb_filename, session)
    u.pkg.changes["suite"] = u.pkg.changes["distribution"]

    new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, 0, dsc=u.pkg.dsc, session=session)

    outfile = open(os.path.join(cnf["Show-New::HTMLPath"],htmlname),"w")

    filestoexamine = []
    for pkg in new.keys():
        for fn in new[pkg]["files"]:
            filestoexamine.append(fn)

    print >> outfile, html_header(changes["source"], filestoexamine)

    check_valid(new, session)
    distribution = changes["distribution"].keys()[0]
    print >> outfile, examine_package.display_changes(distribution, changes_file)

    for fn in filter(lambda fn: fn.endswith(".dsc"), filestoexamine):
        print >> outfile, examine_package.check_dsc(distribution, fn, session)
    for fn in filter(lambda fn: fn.endswith(".deb") or fn.endswith(".udeb"), filestoexamine):
        print >> outfile, examine_package.check_deb(distribution, fn, session)

    print >> outfile, html_footer()

    outfile.close()
    session.close()

    htmlfiles_to_process.remove(htmlfile)
    return (PROC_STATUS_SUCCESS, '%s already updated' % htmlfile)
Example #2
0
sys.path.append('/srv/dak/dak')
from daklib.dbconn import *
from daklib import utils
from daklib.queue import Upload

i = 0
t = 0
pattern = '*.changes'
changes_dir = '/srv/dak/done'

def find_changes(pattern, root):
    for path, dirs, files in os.walk(os.path.abspath(root)):
        for filename in fnmatch.filter(files, pattern):
            yield os.path.join(path, filename)

for changes_file in find_changes(pattern, changes_dir):
    t = t + 1
for changes_file in find_changes(pattern, changes_dir):
    u = Upload()
    u.pkg.changes_file = changes_file
    (u.pkg.changes["fingerprint"], rejects) = utils.check_signature(changes_file)
    if u.load_changes(changes_file):
        try:
            u.store_changelog()
        except:
            print 'Unable to handle %s' % changes_file
    else:
        print u.rejects
    i = i + 1
    sys.stdout.write('%d out of %d processed\r' % (i, t))
Example #3
0
from daklib.queue import Upload

i = 0
t = 0
pattern = '*.changes'
changes_dir = '/srv/ftp.debian.org/queue/done'


def find_changes(pattern, root):
    for path, dirs, files in os.walk(os.path.abspath(root)):
        for filename in fnmatch.filter(files, pattern):
            yield os.path.join(path, filename)


for changes_file in find_changes(pattern, changes_dir):
    t = t + 1
for changes_file in find_changes(pattern, changes_dir):
    u = Upload()
    u.pkg.changes_file = changes_file
    (u.pkg.changes["fingerprint"],
     rejects) = utils.check_signature(changes_file)
    if u.load_changes(changes_file):
        try:
            u.store_changelog()
        except:
            print 'Unable to handle %s' % changes_file
    else:
        print u.rejects
    i = i + 1
    sys.stdout.write('%d out of %d processed\r' % (i, t))
Example #4
0
def process_changes_files(changes_files, type, log, rrd_dir):
    msg = ""
    cache = {}
    unprocessed = []
    # Read in all the .changes files
    for filename in changes_files:
        try:
            u = Upload()
            u.load_changes(filename)
            cache[filename] = copy(u.pkg.changes)
            cache[filename]["filename"] = filename
        except Exception as e:
            print "WARNING: Exception %s" % e
            continue
    # Divide the .changes into per-source groups
    per_source = {}
    for filename in cache.keys():
	if not cache[filename].has_key("source"):
            unprocessed.append(filename)
            continue
        source = cache[filename]["source"]
        if not per_source.has_key(source):
            per_source[source] = {}
            per_source[source]["list"] = []
        per_source[source]["list"].append(cache[filename])
    # Determine oldest time and have note status for each source group
    for source in per_source.keys():
        source_list = per_source[source]["list"]
        first = source_list[0]
        oldest = os.stat(first["filename"])[stat.ST_MTIME]
        have_note = 0
        for d in per_source[source]["list"]:
            mtime = os.stat(d["filename"])[stat.ST_MTIME]
            if Cnf.has_key("Queue-Report::Options::New"):
                if mtime > oldest:
                    oldest = mtime
            else:
                if mtime < oldest:
                    oldest = mtime
            have_note += has_new_comment(d["source"], d["version"])
        per_source[source]["oldest"] = oldest
        if not have_note:
            per_source[source]["note_state"] = 0; # none
        elif have_note < len(source_list):
            per_source[source]["note_state"] = 1; # some
        else:
            per_source[source]["note_state"] = 2; # all
    per_source_items = per_source.items()
    per_source_items.sort(sg_compare)

    update_graph_database(rrd_dir, type, len(per_source_items), len(changes_files))

    entries = []
    max_source_len = 0
    max_version_len = 0
    max_arch_len = 0
    for i in per_source_items:
        maintainer = {}
        maint=""
        distribution=""
        closes=""
        fingerprint=""
        changeby = {}
        changedby=""
        sponsor=""
        filename=i[1]["list"][0]["filename"]
        last_modified = time.time()-i[1]["oldest"]
        source = i[1]["list"][0]["source"]
        if len(source) > max_source_len:
            max_source_len = len(source)
        binary_list = i[1]["list"][0]["binary"].keys()
        binary = ', '.join(binary_list)
        arches = {}
        versions = {}
        for j in i[1]["list"]:
            changesbase = os.path.basename(j["filename"])
            try:
                session = DBConn().session()
                dbc = session.query(DBChange).filter_by(changesname=changesbase).one()
                session.close()
            except Exception as e:
                print "Can't find changes file in NEW for %s (%s)" % (changesbase, e)
                dbc = None

            if Cnf.has_key("Queue-Report::Options::New") or Cnf.has_key("Queue-Report::Options::822"):
                try:
                    (maintainer["maintainer822"], maintainer["maintainer2047"],
                    maintainer["maintainername"], maintainer["maintaineremail"]) = \
                    fix_maintainer (j["maintainer"])
                except ParseMaintError as msg:
                    print "Problems while parsing maintainer address\n"
                    maintainer["maintainername"] = "Unknown"
                    maintainer["maintaineremail"] = "Unknown"
                maint="%s:%s" % (maintainer["maintainername"], maintainer["maintaineremail"])
                # ...likewise for the Changed-By: field if it exists.
                try:
                    (changeby["changedby822"], changeby["changedby2047"],
                     changeby["changedbyname"], changeby["changedbyemail"]) = \
                     fix_maintainer (j["changed-by"])
                except ParseMaintError as msg:
                    (changeby["changedby822"], changeby["changedby2047"],
                     changeby["changedbyname"], changeby["changedbyemail"]) = \
                     ("", "", "", "")
                changedby="%s:%s" % (changeby["changedbyname"], changeby["changedbyemail"])

                distribution=j["distribution"].keys()
                closes=j["closes"].keys()
                if dbc:
                    fingerprint = dbc.fingerprint
                    sponsor_name = get_uid_from_fingerprint(fingerprint).name
                    sponsor_email = get_uid_from_fingerprint(fingerprint).uid + "@debian.org"
                    if sponsor_name != maintainer["maintainername"] and sponsor_name != changeby["changedbyname"] and \
                    sponsor_email != maintainer["maintaineremail"] and sponsor_name != changeby["changedbyemail"]:
                        sponsor = sponsor_email

            for arch in j["architecture"].keys():
                arches[arch] = ""
            version = j["version"]
            versions[version] = ""
        arches_list = arches.keys()
        arches_list.sort(utils.arch_compare_sw)
        arch_list = " ".join(arches_list)
        version_list = " ".join(versions.keys())
        if len(version_list) > max_version_len:
            max_version_len = len(version_list)
        if len(arch_list) > max_arch_len:
            max_arch_len = len(arch_list)
        if i[1]["note_state"]:
            note = " | [N]"
        else:
            note = ""
        entries.append([source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, filename])

    # direction entry consists of "Which field, which direction, time-consider" where
    # time-consider says how we should treat last_modified. Thats all.

    # Look for the options for sort and then do the sort.
    age = "h"
    if Cnf.has_key("Queue-Report::Options::Age"):
        age =  Cnf["Queue-Report::Options::Age"]
    if Cnf.has_key("Queue-Report::Options::New"):
    # If we produce html we always have oldest first.
        direction.append([5,-1,"ao"])
    else:
        if Cnf.has_key("Queue-Report::Options::Sort"):
            for i in Cnf["Queue-Report::Options::Sort"].split(","):
                if i == "ao":
                    # Age, oldest first.
                    direction.append([5,-1,age])
                elif i == "an":
                    # Age, newest first.
                    direction.append([5,1,age])
                elif i == "na":
                    # Name, Ascending.
                    direction.append([0,1,0])
                elif i == "nd":
                    # Name, Descending.
                    direction.append([0,-1,0])
                elif i == "nl":
                    # Notes last.
                    direction.append([4,1,0])
                elif i == "nf":
                    # Notes first.
                    direction.append([4,-1,0])
    entries.sort(lambda x, y: sortfunc(x, y))
    # Yes, in theory you can add several sort options at the commandline with. But my mind is to small
    # at the moment to come up with a real good sorting function that considers all the sidesteps you
    # have with it. (If you combine options it will simply take the last one at the moment).
    # Will be enhanced in the future.

    if Cnf.has_key("Queue-Report::Options::822"):
        # print stuff out in 822 format
        for entry in entries:
            (source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, changes_file) = entry

            # We'll always have Source, Version, Arch, Mantainer, and Dist
            # For the rest, check to see if we have them, then print them out
            log.write("Source: " + source + "\n")
            log.write("Binary: " + binary + "\n")
            log.write("Version: " + version_list + "\n")
            log.write("Architectures: ")
            log.write( (", ".join(arch_list.split(" "))) + "\n")
            log.write("Age: " + time_pp(last_modified) + "\n")
            log.write("Last-Modified: " + str(int(time.time()) - int(last_modified)) + "\n")
            log.write("Queue: " + type + "\n")

            (name, mail) = maint.split(":", 1)
            log.write("Maintainer: " + name + " <"+mail+">" + "\n")
            if changedby:
               (name, mail) = changedby.split(":", 1)
               log.write("Changed-By: " + name + " <"+mail+">" + "\n")
            if sponsor:
               log.write("Sponsored-By: " + "@".join(sponsor.split("@")[:2]) + "\n")
            log.write("Distribution:")
            for dist in distribution:
               log.write(" " + dist)
            log.write("\n")
            log.write("Fingerprint: " + fingerprint + "\n")
            if closes:
                bug_string = ""
                for bugs in closes:
                    bug_string += "#"+bugs+", "
                log.write("Closes: " + bug_string[:-2] + "\n")
            log.write("Changes-File: " + os.path.basename(changes_file) + "\n")
            log.write("\n")

    if Cnf.has_key("Queue-Report::Options::New"):
        direction.append([5,1,"ao"])
        entries.sort(lambda x, y: sortfunc(x, y))
    # Output for a html file. First table header. then table_footer.
    # Any line between them is then a <tr> printed from subroutine table_row.
        if len(entries) > 0:
            total_count = len(changes_files)
            source_count = len(per_source_items)
            table_header(type.upper(), source_count, total_count)
            for entry in entries:
                (source, binary, version_list, arch_list, note, last_modified, maint, distribution, closes, fingerprint, sponsor, changedby, undef) = entry
                table_row(source, version_list, arch_list, time_pp(last_modified), maint, distribution, closes, fingerprint, sponsor, changedby)
            table_footer(type.upper())
    elif not Cnf.has_key("Queue-Report::Options::822"):
    # The "normal" output without any formatting.
        format="%%-%ds | %%-%ds | %%-%ds%%s | %%s old\n" % (max_source_len, max_version_len, max_arch_len)

        msg = ""
        for entry in entries:
            (source, binary, version_list, arch_list, note, last_modified, undef, undef, undef, undef, undef, undef, undef) = entry
            msg += format % (source, version_list, arch_list, note, time_pp(last_modified))

        if msg:
            total_count = len(changes_files)
            source_count = len(per_source_items)
            print type.upper()
            print "-"*len(type)
            print
            print msg
            print "%s %s source package%s / %s %s package%s in total." % (source_count, type, plural(source_count), total_count, type, plural(total_count))
            print

        if len(unprocessed):
            print "UNPROCESSED"
            print "-----------"
            for u in unprocessed:
                print u
            print
Example #5
0
    def __init__(self, session=None):
        cnf = Config()
        try:
            newq = get_policy_queue('new', session)
            for changes_fn in glob.glob(newq.path + "/*.changes"):
                changes_bn = os.path.basename(changes_fn)
                chg = get_dbchange(changes_bn, session)

                u = Upload()
                success = u.load_changes(changes_fn)
                u.pkg.changes_file = changes_bn
                u.check_hashes()

                if not chg:
                    chg = u.pkg.add_known_changes(newq.path, newq.policy_queue_id, session)
                    session.add(chg)

                if not success:
                    log.critical("failed to load %s" % changes_fn)
                    sys.exit(1)
                else:
                    log.critical("ACCLAIM: %s" % changes_fn)

                files=[]
                for chg_fn in u.pkg.files.keys():
                    try:
                        f = open(os.path.join(newq.path, chg_fn))
                        cpf = ChangePendingFile()
                        cpf.filename = chg_fn
                        cpf.size = u.pkg.files[chg_fn]['size']
                        cpf.md5sum = u.pkg.files[chg_fn]['md5sum']

                        if u.pkg.files[chg_fn].has_key('sha1sum'):
                            cpf.sha1sum = u.pkg.files[chg_fn]['sha1sum']
                        else:
                            log.warning("Having to generate sha1sum for %s" % chg_fn)
                            f.seek(0)
                            cpf.sha1sum = apt_pkg.sha1sum(f)

                        if u.pkg.files[chg_fn].has_key('sha256sum'):
                            cpf.sha256sum = u.pkg.files[chg_fn]['sha256sum']
                        else:
                            log.warning("Having to generate sha256sum for %s" % chg_fn)
                            f.seek(0)
                            cpf.sha256sum = apt_pkg.sha256sum(f)

                        session.add(cpf)
                        files.append(cpf)
                        f.close()
                    except IOError:
                        # Can't find the file, try to look it up in the pool
                        poolname = poolify(u.pkg.changes["source"], u.pkg.files[chg_fn]["component"])
                        l = get_location(cnf["Dir::Pool"], u.pkg.files[chg_fn]["component"], session=session)
                        if not l:
                            log.critical("ERROR: Can't find location for %s (component %s)" % (chg_fn, u.pkg.files[chg_fn]["component"]))

                        found, poolfile = check_poolfile(os.path.join(poolname, chg_fn),
                                                         u.pkg.files[chg_fn]['size'],
                                                         u.pkg.files[chg_fn]["md5sum"],
                                                         l.location_id,
                                                         session=session)

                        if found is None:
                            log.critical("ERROR: Found multiple files for %s in pool" % chg_fn)
                            sys.exit(1)
                        elif found is False and poolfile is not None:
                            log.critical("ERROR: md5sum / size mismatch for %s in pool" % chg_fn)
                            sys.exit(1)
                        else:
                            if poolfile is None:
                                log.critical("ERROR: Could not find %s in pool" % chg_fn)
                                sys.exit(1)
                            else:
                                chg.poolfiles.append(poolfile)


                chg.files = files


            session.commit()

        except KeyboardInterrupt:
            print("Caught C-c; terminating.")
            utils.warn("Caught C-c; terminating.")
            self.plsDie()