Exemplo n.º 1
0
def check_dscs():
    """
    Parse every .dsc file in the archive and check for it's validity.
    """

    count = 0

    for src in DBConn().session().query(DBSource).order_by(DBSource.source, DBSource.version):
        f = src.poolfile.fullpath
        try:
            utils.parse_changes(f, signing_rules=1, dsc_file=1)
        except InvalidDscError:
            utils.warn("syntax error in .dsc file %s" % f)
            count += 1
        except ChangesUnicodeError:
            utils.warn("found invalid dsc file (%s), not properly utf-8 encoded" % f)
            count += 1
        except CantOpenError:
            utils.warn("missing dsc file (%s)" % f)
            count += 1
        except Exception as e:
            utils.warn("miscellaneous error parsing dsc file (%s): %s" % (f, str(e)))
            count += 1

    if count:
        utils.warn("Found %s invalid .dsc files." % (count))
Exemplo n.º 2
0
def check_dscs():
    """
    Parse every .dsc file in the archive and check for it's validity.
    """

    count = 0

    for src in DBConn().session().query(DBSource).order_by(
            DBSource.source, DBSource.version):
        f = src.poolfile.fullpath
        try:
            utils.parse_changes(f, signing_rules=1, dsc_file=1)
        except InvalidDscError:
            utils.warn("syntax error in .dsc file %s" % f)
            count += 1
        except ChangesUnicodeError:
            utils.warn(
                "found invalid dsc file (%s), not properly utf-8 encoded" % f)
            count += 1
        except CantOpenError:
            utils.warn("missing dsc file (%s)" % f)
            count += 1
        except Exception as e:
            utils.warn("miscellaneous error parsing dsc file (%s): %s" %
                       (f, str(e)))
            count += 1

    if count:
        utils.warn("Found %s invalid .dsc files." % (count))
Exemplo n.º 3
0
def flush_orphans():
    all_files = {}
    changes_files = []

    Logger.log(["check Incoming for old orphaned files", os.getcwd()])
    # Build up the list of all files in the directory
    for i in os.listdir('.'):
        if os.path.isfile(i):
            all_files[i] = 1
            if i.endswith(".changes"):
                changes_files.append(i)

    # Proces all .changes and .dsc files.
    for changes_filename in changes_files:
        try:
            changes = utils.parse_changes(changes_filename)
            files = utils.build_file_list(changes)
        except:
            utils.warn("error processing '%s'; skipping it. [Got %s]" %
                       (changes_filename, sys.exc_info()[0]))
            continue

        dsc_files = {}
        for f in files.keys():
            if f.endswith(".dsc"):
                try:
                    dsc = utils.parse_changes(f, dsc_file=1)
                    dsc_files = utils.build_file_list(dsc, is_a_dsc=1)
                except:
                    utils.warn("error processing '%s'; skipping it. [Got %s]" %
                               (f, sys.exc_info()[0]))
                    continue

        # Ensure all the files we've seen aren't deleted
        keys = []
        for i in (files.keys(), dsc_files.keys(), [changes_filename]):
            keys.extend(i)
        for key in keys:
            if key in all_files:
                if Options["Verbose"]:
                    print("Skipping, has parents, '%s'." % (key))
                del all_files[key]

    # Anthing left at this stage is not referenced by a .changes (or
    # a .dsc) and should be deleted if old enough.
    for f in all_files.keys():
        if os.stat(f)[stat.ST_MTIME] < delete_date:
            remove('Incoming', f)
        else:
            if Options["Verbose"]:
                print("Skipping, too new, '%s'." % (os.path.basename(f)))
Exemplo n.º 4
0
def flush_orphans ():
    all_files = {}
    changes_files = []

    Logger.log(["check Incoming for old orphaned files", os.getcwd()])
    # Build up the list of all files in the directory
    for i in os.listdir('.'):
        if os.path.isfile(i):
            all_files[i] = 1
            if i.endswith(".changes"):
                changes_files.append(i)

    # Proces all .changes and .dsc files.
    for changes_filename in changes_files:
        try:
            changes = utils.parse_changes(changes_filename)
            files = utils.build_file_list(changes)
        except:
            utils.warn("error processing '%s'; skipping it. [Got %s]" % (changes_filename, sys.exc_info()[0]))
            continue

        dsc_files = {}
        for f in files.keys():
            if f.endswith(".dsc"):
                try:
                    dsc = utils.parse_changes(f, dsc_file=1)
                    dsc_files = utils.build_file_list(dsc, is_a_dsc=1)
                except:
                    utils.warn("error processing '%s'; skipping it. [Got %s]" % (f, sys.exc_info()[0]))
                    continue

        # Ensure all the files we've seen aren't deleted
        keys = []
        for i in (files.keys(), dsc_files.keys(), [changes_filename]):
            keys.extend(i)
        for key in keys:
            if all_files.has_key(key):
                if Options["Verbose"]:
                    print "Skipping, has parents, '%s'." % (key)
                del all_files[key]

    # Anthing left at this stage is not referenced by a .changes (or
    # a .dsc) and should be deleted if old enough.
    for f in all_files.keys():
        if os.stat(f)[stat.ST_MTIME] < delete_date:
            remove('Incoming', f)
        else:
            if Options["Verbose"]:
                print "Skipping, too new, '%s'." % (os.path.basename(f))
Exemplo n.º 5
0
def check_files_in_dsc():
    """
    Ensure each .dsc lists appropriate files in its Files field (according
    to the format announced in its Format field).
    """
    count = 0

    print "Building list of database files..."
    q = DBConn().session().query(PoolFile).filter(PoolFile.filename.like('.dsc$'))

    if q.count() > 0:
        print "Checking %d files..." % len(ql)
    else:
        print "No files to check."

    for pf in q.all():
        filename = os.path.abspath(os.path.join(pf.location.path + pf.filename))

        try:
            # NB: don't enforce .dsc syntax
            dsc = utils.parse_changes(filename, dsc_file=1)
        except:
            utils.fubar("error parsing .dsc file '%s'." % (filename))

        reasons = utils.check_dsc_files(filename, dsc)
        for r in reasons:
            utils.warn(r)

        if len(reasons) > 0:
            count += 1

    if count:
        utils.warn("Found %s invalid .dsc files." % (count))
Exemplo n.º 6
0
    def run(self):
        while True:
            try:
                if self.die:
                    return
                to_import = self.queue.dequeue()
                if not to_import:
                    return

                print( "Directory %s, file %7d, (%s)" % (to_import.dirpath[-10:], to_import.count, to_import.changesfile) )

                changes = Changes()
                changes.changes_file = to_import.changesfile
                changesfile = os.path.join(to_import.dirpath, to_import.changesfile)
                changes.changes = parse_changes(changesfile, signing_rules=-1)
                changes.changes["fingerprint"] = check_signature(changesfile)
                changes.add_known_changes(to_import.dirpath, session=self.session)
                self.session.commit()

            except InvalidDscError as line:
                warn("syntax error in .dsc file '%s', line %s." % (f, line))

            except ChangesUnicodeError:
                warn("found invalid changes file, not properly utf-8 encoded")

            except KeyboardInterrupt:
                print("Caught C-c; on ImportThread. terminating.")
                self.parent.plsDie()
                sys.exit(1)

            except:
                self.parent.plsDie()
                sys.exit(1)
Exemplo n.º 7
0
def check_files_in_dsc():
    """
    Ensure each .dsc lists appropriate files in its Files field (according
    to the format announced in its Format field).
    """
    count = 0

    print "Building list of database files..."
    q = DBConn().session().query(PoolFile).filter(
        PoolFile.filename.like('.dsc$'))

    if q.count() > 0:
        print "Checking %d files..." % len(ql)
    else:
        print "No files to check."

    for pf in q.all():
        filename = os.path.abspath(os.path.join(pf.location.path +
                                                pf.filename))

        try:
            # NB: don't enforce .dsc syntax
            dsc = utils.parse_changes(filename, dsc_file=1)
        except:
            utils.fubar("error parsing .dsc file '%s'." % (filename))

        reasons = utils.check_dsc_files(filename, dsc)
        for r in reasons:
            utils.warn(r)

        if len(reasons) > 0:
            count += 1

    if count:
        utils.warn("Found %s invalid .dsc files." % (count))
Exemplo n.º 8
0
def check_changes (changes_filename):
    try:
        changes = utils.parse_changes (changes_filename)
    except ChangesUnicodeError:
        utils.warn("Encoding problem with changes file %s" % (changes_filename))
    print display_changes(changes['distribution'], changes_filename)

    files = utils.build_file_list(changes)
    for f in files.keys():
        if f.endswith(".deb") or f.endswith(".udeb"):
            print check_deb(changes['distribution'], f)
        if f.endswith(".dsc"):
            print check_dsc(changes['distribution'], f)
Exemplo n.º 9
0
def chk_bd_process_dir (unused, dirname, filenames):
    for name in filenames:
        if not name.endswith(".dsc"):
            continue
        filename = os.path.abspath(dirname+'/'+name)
        dsc = utils.parse_changes(filename, dsc_file=1)
        for field_name in [ "build-depends", "build-depends-indep" ]:
            field = dsc.get(field_name)
            if field:
                try:
                    apt_pkg.parse_src_depends(field)
                except:
                    print "E: [%s] %s: %s" % (filename, field_name, field)
                    pass
Exemplo n.º 10
0
def chk_bd_process_dir(unused, dirname, filenames):
    for name in filenames:
        if not name.endswith(".dsc"):
            continue
        filename = os.path.abspath(dirname + '/' + name)
        dsc = utils.parse_changes(filename, dsc_file=1)
        for field_name in ["build-depends", "build-depends-indep"]:
            field = dsc.get(field_name)
            if field:
                try:
                    apt_pkg.parse_src_depends(field)
                except:
                    print "E: [%s] %s: %s" % (filename, field_name, field)
                    pass
Exemplo n.º 11
0
def check_changes(changes_filename):
    try:
        changes = utils.parse_changes(changes_filename)
    except UnicodeDecodeError:
        utils.warn("Encoding problem with changes file %s" %
                   (changes_filename))
    output = display_changes(changes['distribution'], changes_filename)

    files = utils.build_file_list(changes)
    for f in files.keys():
        if f.endswith(".deb") or f.endswith(".udeb"):
            output += check_deb(changes['distribution'], f)
        if f.endswith(".dsc"):
            output += check_dsc(changes['distribution'], f)
        # else: => byhand
    return six.ensure_str(output)
Exemplo n.º 12
0
def read_changes_or_dsc(suite, filename, session=None):
    dsc = {}

    dsc_file = utils.open_file(filename)
    try:
        dsc = utils.parse_changes(filename, dsc_file=1)
    except:
        return formatted_text("can't parse .dsc control info")
    dsc_file.close()

    filecontents = strip_pgp_signature(filename)
    keysinorder = []
    for l in filecontents.split('\n'):
        m = re.match(r'([-a-zA-Z0-9]*):', l)
        if m:
            keysinorder.append(m.group(1))

    for k in dsc.keys():
        if k in ("build-depends", "build-depends-indep"):
            dsc[k] = create_depends_string(suite, split_depends(dsc[k]),
                                           session)
        elif k == "architecture":
            if (dsc["architecture"] != "any"):
                dsc['architecture'] = colour_output(dsc["architecture"],
                                                    'arch')
        elif k == "distribution":
            if dsc["distribution"] not in ('unstable', 'experimental'):
                dsc['distribution'] = colour_output(dsc["distribution"],
                                                    'distro')
        elif k in ("files", "changes", "description"):
            if use_html:
                dsc[k] = formatted_text(dsc[k], strip=True)
            else:
                dsc[k] = ('\n' + '\n'.join(
                    map(lambda x: ' ' + x, dsc[k].split('\n')))).rstrip()
        else:
            dsc[k] = escape_if_needed(dsc[k])

    keysinorder = filter(lambda x: not x.lower().startswith('checksums-'),
                         keysinorder)

    filecontents = '\n'.join(
        map(lambda x: format_field(x, dsc[x.lower()]), keysinorder)) + '\n'
    return filecontents
Exemplo n.º 13
0
def read_changes_or_dsc (suite, filename, session = None):
    dsc = {}

    dsc_file = utils.open_file(filename)
    try:
        dsc = utils.parse_changes(filename, dsc_file=1)
    except:
        return formatted_text("can't parse .dsc control info")
    dsc_file.close()

    filecontents = strip_pgp_signature(filename)
    keysinorder = []
    for l in filecontents.split('\n'):
        m = re.match(r'([-a-zA-Z0-9]*):', l)
        if m:
            keysinorder.append(m.group(1))

    for k in dsc.keys():
        if k in ("build-depends","build-depends-indep"):
            dsc[k] = create_depends_string(suite, split_depends(dsc[k]), session)
        elif k == "architecture":
            if (dsc["architecture"] != "any"):
                dsc['architecture'] = colour_output(dsc["architecture"], 'arch')
        elif k == "distribution":
            if dsc["distribution"] not in ('unstable', 'experimental'):
                dsc['distribution'] = colour_output(dsc["distribution"], 'distro')
        elif k in ("files","changes","description"):
            if use_html:
                dsc[k] = formatted_text(dsc[k], strip=True)
            else:
                dsc[k] = ('\n'+'\n'.join(map(lambda x: ' '+x, dsc[k].split('\n')))).rstrip()
        else:
            dsc[k] = escape_if_needed(dsc[k])

    keysinorder = filter(lambda x: not x.lower().startswith('checksums-'), keysinorder)

    filecontents = '\n'.join(map(lambda x: format_field(x,dsc[x.lower()]), keysinorder))+'\n'
    return filecontents
Exemplo n.º 14
0
def check_pkg (upload):
    save_stdout = sys.stdout
    try:
        sys.stdout = os.popen("less -R -", 'w', 0)
        changes = utils.parse_changes (upload.pkg.changes_file)
        print examine_package.display_changes(changes['distribution'], upload.pkg.changes_file)
        files = upload.pkg.files
        for f in files.keys():
            if files[f].has_key("new"):
                ftype = files[f]["type"]
                if ftype == "deb":
                    print examine_package.check_deb(changes['distribution'], f)
                elif ftype == "dsc":
                    print examine_package.check_dsc(changes['distribution'], f)
        print examine_package.output_package_relations()
    except IOError as e:
        if e.errno == errno.EPIPE:
            utils.warn("[examine_package] Caught EPIPE; skipping.")
        else:
            sys.stdout = save_stdout
            raise
    except KeyboardInterrupt:
        utils.warn("[examine_package] Caught C-c; skipping.")
    sys.stdout = save_stdout
Exemplo n.º 15
0
Arquivo: rm.py Projeto: evgeni/dak
def main ():
    global Options

    cnf = Config()

    Arguments = [('h',"help","Rm::Options::Help"),
                 ('a',"architecture","Rm::Options::Architecture", "HasArg"),
                 ('b',"binary", "Rm::Options::Binary-Only"),
                 ('c',"component", "Rm::Options::Component", "HasArg"),
                 ('C',"carbon-copy", "Rm::Options::Carbon-Copy", "HasArg"), # Bugs to Cc
                 ('d',"done","Rm::Options::Done", "HasArg"), # Bugs fixed
                 ('D',"do-close","Rm::Options::Do-Close"),
                 ('R',"rdep-check", "Rm::Options::Rdep-Check"),
                 ('m',"reason", "Rm::Options::Reason", "HasArg"), # Hysterical raisins; -m is old-dinstall option for rejection reason
                 ('n',"no-action","Rm::Options::No-Action"),
                 ('p',"partial", "Rm::Options::Partial"),
                 ('s',"suite","Rm::Options::Suite", "HasArg"),
                 ('S',"source-only", "Rm::Options::Source-Only"),
                 ]

    for i in [ "architecture", "binary-only", "carbon-copy", "component",
               "done", "help", "no-action", "partial", "rdep-check", "reason",
               "source-only", "Do-Close" ]:
        if not cnf.has_key("Rm::Options::%s" % (i)):
            cnf["Rm::Options::%s" % (i)] = ""
    if not cnf.has_key("Rm::Options::Suite"):
        cnf["Rm::Options::Suite"] = "unstable"

    arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv)
    Options = cnf.subtree("Rm::Options")

    if Options["Help"]:
        usage()

    session = DBConn().session()

    # Sanity check options
    if not arguments:
        utils.fubar("need at least one package name as an argument.")
    if Options["Architecture"] and Options["Source-Only"]:
        utils.fubar("can't use -a/--architecture and -S/--source-only options simultaneously.")
    if Options["Binary-Only"] and Options["Source-Only"]:
        utils.fubar("can't use -b/--binary-only and -S/--source-only options simultaneously.")
    if Options.has_key("Carbon-Copy") and not Options.has_key("Done"):
        utils.fubar("can't use -C/--carbon-copy without also using -d/--done option.")
    if Options["Architecture"] and not Options["Partial"]:
        utils.warn("-a/--architecture implies -p/--partial.")
        Options["Partial"] = "true"
    if Options["Do-Close"] and not Options["Done"]:
        utils.fubar("No.")
    if Options["Do-Close"] and Options["Binary-Only"]:
        utils.fubar("No.")
    if Options["Do-Close"] and Options["Source-Only"]:
        utils.fubar("No.")
    if Options["Do-Close"] and Options["Suite"] != 'unstable':
        utils.fubar("No.")

    # Force the admin to tell someone if we're not doing a 'dak
    # cruft-report' inspired removal (or closing a bug, which counts
    # as telling someone).
    if not Options["No-Action"] and not Options["Carbon-Copy"] \
           and not Options["Done"] and Options["Reason"].find("[auto-cruft]") == -1:
        utils.fubar("Need a -C/--carbon-copy if not closing a bug and not doing a cruft removal.")

    # Process -C/--carbon-copy
    #
    # Accept 3 types of arguments (space separated):
    #  1) a number - assumed to be a bug number, i.e. [email protected]
    #  2) the keyword 'package' - cc's [email protected] for every argument
    #  3) contains a '@' - assumed to be an email address, used unmofidied
    #
    carbon_copy = []
    for copy_to in utils.split_args(Options.get("Carbon-Copy")):
        if copy_to.isdigit():
            if cnf.has_key("Dinstall::BugServer"):
                carbon_copy.append(copy_to + "@" + cnf["Dinstall::BugServer"])
            else:
                utils.fubar("Asked to send mail to #%s in BTS but Dinstall::BugServer is not configured" % copy_to)
        elif copy_to == 'package':
            for package in arguments:
                if cnf.has_key("Dinstall::PackagesServer"):
                    carbon_copy.append(package + "@" + cnf["Dinstall::PackagesServer"])
                if cnf.has_key("Dinstall::TrackingServer"):
                    carbon_copy.append(package + "@" + cnf["Dinstall::TrackingServer"])
        elif '@' in copy_to:
            carbon_copy.append(copy_to)
        else:
            utils.fubar("Invalid -C/--carbon-copy argument '%s'; not a bug number, 'package' or email address." % (copy_to))

    if Options["Binary-Only"]:
        field = "b.package"
    else:
        field = "s.source"
    con_packages = "AND %s IN (%s)" % (field, ", ".join([ repr(i) for i in arguments ]))

    (con_suites, con_architectures, con_components, check_source) = \
                 utils.parse_args(Options)

    # Additional suite checks
    suite_ids_list = []
    suites = utils.split_args(Options["Suite"])
    suites_list = utils.join_with_commas_and(suites)
    if not Options["No-Action"]:
        for suite in suites:
            s = get_suite(suite, session=session)
            if s is not None:
                suite_ids_list.append(s.suite_id)
            if suite in ("oldstable", "stable"):
                print "**WARNING** About to remove from the (old)stable suite!"
                print "This should only be done just prior to a (point) release and not at"
                print "any other time."
                game_over()
            elif suite == "testing":
                print "**WARNING About to remove from the testing suite!"
                print "There's no need to do this normally as removals from unstable will"
                print "propogate to testing automagically."
                game_over()

    # Additional architecture checks
    if Options["Architecture"] and check_source:
        utils.warn("'source' in -a/--argument makes no sense and is ignored.")

    # Additional component processing
    over_con_components = con_components.replace("c.id", "component")

    # Don't do dependency checks on multiple suites
    if Options["Rdep-Check"] and len(suites) > 1:
        utils.fubar("Reverse dependency check on multiple suites is not implemented.")

    print "Working...",
    sys.stdout.flush()
    to_remove = []
    maintainers = {}

    # We have 3 modes of package selection: binary-only, source-only
    # and source+binary.  The first two are trivial and obvious; the
    # latter is a nasty mess, but very nice from a UI perspective so
    # we try to support it.

    # XXX: TODO: This all needs converting to use placeholders or the object
    #            API. It's an SQL injection dream at the moment

    if Options["Binary-Only"]:
        # Binary-only
        q = session.execute("SELECT b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b, bin_associations ba, architecture a, suite su, files f, location l, component c WHERE ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id AND b.file = f.id AND f.location = l.id AND l.component = c.id %s %s %s %s" % (con_packages, con_suites, con_components, con_architectures))
        for i in q.fetchall():
            to_remove.append(i)
    else:
        # Source-only
        source_packages = {}
        q = session.execute("SELECT l.path, f.filename, s.source, s.version, 'source', s.id, s.maintainer FROM source s, src_associations sa, suite su, files f, location l, component c WHERE sa.source = s.id AND sa.suite = su.id AND s.file = f.id AND f.location = l.id AND l.component = c.id %s %s %s" % (con_packages, con_suites, con_components))
        for i in q.fetchall():
            source_packages[i[2]] = i[:2]
            to_remove.append(i[2:])
        if not Options["Source-Only"]:
            # Source + Binary
            binary_packages = {}
            # First get a list of binary package names we suspect are linked to the source
            q = session.execute("SELECT DISTINCT b.package FROM binaries b, source s, src_associations sa, suite su, files f, location l, component c WHERE b.source = s.id AND sa.source = s.id AND sa.suite = su.id AND s.file = f.id AND f.location = l.id AND l.component = c.id %s %s %s" % (con_packages, con_suites, con_components))
            for i in q.fetchall():
                binary_packages[i[0]] = ""
            # Then parse each .dsc that we found earlier to see what binary packages it thinks it produces
            for i in source_packages.keys():
                filename = "/".join(source_packages[i])
                try:
                    dsc = utils.parse_changes(filename, dsc_file=1)
                except CantOpenError:
                    utils.warn("couldn't open '%s'." % (filename))
                    continue
                for package in dsc.get("binary").split(','):
                    package = package.strip()
                    binary_packages[package] = ""
            # Then for each binary package: find any version in
            # unstable, check the Source: field in the deb matches our
            # source package and if so add it to the list of packages
            # to be removed.
            for package in binary_packages.keys():
                q = session.execute("SELECT l.path, f.filename, b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b, bin_associations ba, architecture a, suite su, files f, location l, component c WHERE ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id AND b.file = f.id AND f.location = l.id AND l.component = c.id %s %s %s AND b.package = '%s'" % (con_suites, con_components, con_architectures, package))
                for i in q.fetchall():
                    filename = "/".join(i[:2])
                    control = apt_pkg.TagSection(utils.deb_extract_control(utils.open_file(filename)))
                    source = control.find("Source", control.find("Package"))
                    source = re_strip_source_version.sub('', source)
                    if source_packages.has_key(source):
                        to_remove.append(i[2:])
    print "done."

    if not to_remove:
        print "Nothing to do."
        sys.exit(0)

    # If we don't have a reason; spawn an editor so the user can add one
    # Write the rejection email out as the <foo>.reason file
    if not Options["Reason"] and not Options["No-Action"]:
        (fd, temp_filename) = utils.temp_filename()
        editor = os.environ.get("EDITOR","vi")
        result = os.system("%s %s" % (editor, temp_filename))
        if result != 0:
            utils.fubar ("vi invocation failed for `%s'!" % (temp_filename), result)
        temp_file = utils.open_file(temp_filename)
        for line in temp_file.readlines():
            Options["Reason"] += line
        temp_file.close()
        os.unlink(temp_filename)

    # Generate the summary of what's to be removed
    d = {}
    for i in to_remove:
        package = i[0]
        version = i[1]
        architecture = i[2]
        maintainer = i[4]
        maintainers[maintainer] = ""
        if not d.has_key(package):
            d[package] = {}
        if not d[package].has_key(version):
            d[package][version] = []
        if architecture not in d[package][version]:
            d[package][version].append(architecture)

    maintainer_list = []
    for maintainer_id in maintainers.keys():
        maintainer_list.append(get_maintainer(maintainer_id).name)
    summary = ""
    removals = d.keys()
    removals.sort()
    versions = []
    for package in removals:
        versions = d[package].keys()
        versions.sort(apt_pkg.version_compare)
        for version in versions:
            d[package][version].sort(utils.arch_compare_sw)
            summary += "%10s | %10s | %s\n" % (package, version, ", ".join(d[package][version]))
    print "Will remove the following packages from %s:" % (suites_list)
    print
    print summary
    print "Maintainer: %s" % ", ".join(maintainer_list)
    if Options["Done"]:
        print "Will also close bugs: "+Options["Done"]
    if carbon_copy:
        print "Will also send CCs to: " + ", ".join(carbon_copy)
    if Options["Do-Close"]:
        print "Will also close associated bug reports."
    print
    print "------------------- Reason -------------------"
    print Options["Reason"]
    print "----------------------------------------------"
    print

    if Options["Rdep-Check"]:
        arches = utils.split_args(Options["Architecture"])
        reverse_depends_check(removals, suites[0], arches, session)

    # If -n/--no-action, drop out here
    if Options["No-Action"]:
        sys.exit(0)

    print "Going to remove the packages now."
    game_over()

    whoami = utils.whoami()
    date = commands.getoutput('date -R')

    # Log first; if it all falls apart I want a record that we at least tried.
    logfile = utils.open_file(cnf["Rm::LogFile"], 'a')
    logfile.write("=========================================================================\n")
    logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami))
    logfile.write("Removed the following packages from %s:\n\n%s" % (suites_list, summary))
    if Options["Done"]:
        logfile.write("Closed bugs: %s\n" % (Options["Done"]))
    logfile.write("\n------------------- Reason -------------------\n%s\n" % (Options["Reason"]))
    logfile.write("----------------------------------------------\n")

    # Do the same in rfc822 format
    logfile822 = utils.open_file(cnf["Rm::LogFile822"], 'a')
    logfile822.write("Date: %s\n" % date)
    logfile822.write("Ftpmaster: %s\n" % whoami)
    logfile822.write("Suite: %s\n" % suites_list)
    sources = []
    binaries = []
    for package in summary.split("\n"):
        for row in package.split("\n"):
            element = row.split("|")
            if len(element) == 3:
                if element[2].find("source") > 0:
                    sources.append("%s_%s" % tuple(elem.strip(" ") for elem in element[:2]))
                    element[2] = sub("source\s?,?", "", element[2]).strip(" ")
                if element[2]:
                    binaries.append("%s_%s [%s]" % tuple(elem.strip(" ") for elem in element))
    if sources:
        logfile822.write("Sources:\n")
        for source in sources:
            logfile822.write(" %s\n" % source)
    if binaries:
        logfile822.write("Binaries:\n")
        for binary in binaries:
            logfile822.write(" %s\n" % binary)
    logfile822.write("Reason: %s\n" % Options["Reason"].replace('\n', '\n '))
    if Options["Done"]:
        logfile822.write("Bug: %s\n" % Options["Done"])

    dsc_type_id = get_override_type('dsc', session).overridetype_id
    deb_type_id = get_override_type('deb', session).overridetype_id

    # Do the actual deletion
    print "Deleting...",
    sys.stdout.flush()

    for i in to_remove:
        package = i[0]
        architecture = i[2]
        package_id = i[3]
        for suite_id in suite_ids_list:
            if architecture == "source":
                session.execute("DELETE FROM src_associations WHERE source = :packageid AND suite = :suiteid",
                                {'packageid': package_id, 'suiteid': suite_id})
                #print "DELETE FROM src_associations WHERE source = %s AND suite = %s" % (package_id, suite_id)
            else:
                session.execute("DELETE FROM bin_associations WHERE bin = :packageid AND suite = :suiteid",
                                {'packageid': package_id, 'suiteid': suite_id})
                #print "DELETE FROM bin_associations WHERE bin = %s AND suite = %s" % (package_id, suite_id)
            # Delete from the override file
            if not Options["Partial"]:
                if architecture == "source":
                    type_id = dsc_type_id
                else:
                    type_id = deb_type_id
                # TODO: Again, fix this properly to remove the remaining non-bind argument
                session.execute("DELETE FROM override WHERE package = :package AND type = :typeid AND suite = :suiteid %s" % (over_con_components), {'package': package, 'typeid': type_id, 'suiteid': suite_id})
    session.commit()
    print "done."

    # If we don't have a Bug server configured, we're done
    if not cnf.has_key("Dinstall::BugServer"):
        if Options["Done"] or Options["Do-Close"]:
            print "Cannot send mail to BugServer as Dinstall::BugServer is not configured"

        logfile.write("=========================================================================\n")
        logfile.close()

        logfile822.write("\n")
        logfile822.close()

        return

    # read common subst variables for all bug closure mails
    Subst_common = {}
    Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"]
    Subst_common["__BUG_SERVER__"] = cnf["Dinstall::BugServer"]
    Subst_common["__CC__"] = "X-DAK: dak rm"
    if carbon_copy:
        Subst_common["__CC__"] += "\nCc: " + ", ".join(carbon_copy)
    Subst_common["__SUITE_LIST__"] = suites_list
    Subst_common["__SUBJECT__"] = "Removed package(s) from %s" % (suites_list)
    Subst_common["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"]
    Subst_common["__DISTRO__"] = cnf["Dinstall::MyDistribution"]
    Subst_common["__WHOAMI__"] = whoami

    # Send the bug closing messages
    if Options["Done"]:
        Subst_close_rm = Subst_common
        bcc = []
        if cnf.find("Dinstall::Bcc") != "":
            bcc.append(cnf["Dinstall::Bcc"])
        if cnf.find("Rm::Bcc") != "":
            bcc.append(cnf["Rm::Bcc"])
        if bcc:
            Subst_close_rm["__BCC__"] = "Bcc: " + ", ".join(bcc)
        else:
            Subst_close_rm["__BCC__"] = "X-Filler: 42"
        summarymail = "%s\n------------------- Reason -------------------\n%s\n" % (summary, Options["Reason"])
        summarymail += "----------------------------------------------\n"
        Subst_close_rm["__SUMMARY__"] = summarymail

        whereami = utils.where_am_i()
        Archive = get_archive(whereami, session)
        if Archive is None:
            utils.warn("Cannot find archive %s.  Setting blank values for origin" % whereami)
            Subst_close_rm["__MASTER_ARCHIVE__"] = ""
            Subst_close_rm["__PRIMARY_MIRROR__"] = ""
        else:
            Subst_close_rm["__MASTER_ARCHIVE__"] = Archive.origin_server
            Subst_close_rm["__PRIMARY_MIRROR__"] = Archive.primary_mirror

        for bug in utils.split_args(Options["Done"]):
            Subst_close_rm["__BUG_NUMBER__"] = bug
            if Options["Do-Close"]:
                mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close-with-related")
            else:
                mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close")
            utils.send_mail(mail_message)

    # close associated bug reports
    if Options["Do-Close"]:
        Subst_close_other = Subst_common
        bcc = []
        wnpp = utils.parse_wnpp_bug_file()
        versions = list(set([re_bin_only_nmu.sub('', v) for v in versions]))
        if len(versions) == 1:
            Subst_close_other["__VERSION__"] = versions[0]
        else:
            utils.fubar("Closing bugs with multiple package versions is not supported.  Do it yourself.")
        if bcc:
            Subst_close_other["__BCC__"] = "Bcc: " + ", ".join(bcc)
        else:
            Subst_close_other["__BCC__"] = "X-Filler: 42"
        # at this point, I just assume, that the first closed bug gives
        # some useful information on why the package got removed
        Subst_close_other["__BUG_NUMBER__"] = utils.split_args(Options["Done"])[0]
        if len(sources) == 1:
            source_pkg = source.split("_", 1)[0]
        else:
            utils.fubar("Closing bugs for multiple source pakcages is not supported.  Do it yourself.")
        Subst_close_other["__BUG_NUMBER_ALSO__"] = ""
        Subst_close_other["__SOURCE__"] = source_pkg
        other_bugs = bts.get_bugs('src', source_pkg, 'status', 'open')
        if other_bugs:
            logfile.write("Also closing bug(s):")
            logfile822.write("Also-Bugs:")
            for bug in other_bugs:
                Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + ","
                logfile.write(" " + str(bug))
                logfile822.write(" " + str(bug))
            logfile.write("\n")
            logfile822.write("\n")
        if source_pkg in wnpp.keys():
            logfile.write("Also closing WNPP bug(s):")
            logfile822.write("Also-WNPP:")
            for bug in wnpp[source_pkg]:
                # the wnpp-rm file we parse also contains our removal
                # bugs, filtering that out
                if bug != Subst_close_other["__BUG_NUMBER__"]:
                    Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + ","
                    logfile.write(" " + str(bug))
                    logfile822.write(" " + str(bug))
            logfile.write("\n")
            logfile822.write("\n")

        mail_message = utils.TemplateSubst(Subst_close_other,cnf["Dir::Templates"]+"/rm.bug-close-related")
        if Subst_close_other["__BUG_NUMBER_ALSO__"]:
            utils.send_mail(mail_message)


    logfile.write("=========================================================================\n")
    logfile.close()

    logfile822.write("\n")
    logfile822.close()
Exemplo n.º 16
0
def do_update(self):
    print "Adding known_changes table"

    try:
        c = self.db.cursor()
        c.execute("""
                    CREATE TABLE known_changes (
                    id SERIAL PRIMARY KEY,
                    changesname TEXT NOT NULL,
                    seen TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(),
                    source TEXT NOT NULL,
                    binaries TEXT NOT NULL,
                    architecture TEXT NOT NULL,
                    version TEXT NOT NULL,
                    distribution TEXT NOT NULL,
                    urgency TEXT NOT NULL,
                    maintainer TEXT NOT NULL,
                    fingerprint TEXT NOT NULL,
                    changedby TEXT NOT NULL,
                    date TEXT NOT NULL,
                    UNIQUE (changesname)
            )
        """)
        c.execute("CREATE INDEX changesname_ind ON known_changes(changesname)")
        c.execute("CREATE INDEX changestimestamp_ind ON known_changes(seen)")
        c.execute("CREATE INDEX changessource_ind ON known_changes(source)")
        c.execute("CREATE INDEX changesdistribution_ind ON known_changes(distribution)")
        c.execute("CREATE INDEX changesurgency_ind ON known_changes(urgency)")

        c.execute("GRANT ALL ON known_changes TO ftpmaster;")
        c.execute("GRANT SELECT ON known_changes TO public;")

        c.execute("UPDATE config SET value = '18' WHERE name = 'db_revision'")
        self.db.commit()

        print "Done. Now looking for old changes files"
        count = 0
        failure = 0
        cnf = Config()
        for directory in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]:
            checkdir = cnf["Dir::Queue::%s" % (directory) ]
            if os.path.exists(checkdir):
                print "Looking into %s" % (checkdir)
                for filename in os.listdir(checkdir):
                    if not filename.endswith(".changes"):
                        # Only interested in changes files.
                        continue
                    try:
                        count += 1
                        print "Directory %s, file %7d, failures %3d. (%s)" % (directory, count, failure, filename)
                        changes = Changes()
                        changes.changes_file = filename
                        changesfile = os.path.join(checkdir, filename)
                        changes.changes = parse_changes(changesfile, signing_rules=-1)
                        changes.changes["fingerprint"] = check_signature(changesfile)
                        changes.add_known_changes(directory)
                    except InvalidDscError as line:
                        warn("syntax error in .dsc file '%s', line %s." % (f, line))
                        failure += 1
                    except ChangesUnicodeError:
                        warn("found invalid changes file, not properly utf-8 encoded")
                        failure += 1

    except psycopg2.ProgrammingError as msg:
        self.db.rollback()
        raise DBUpdateError("Unable to apply knownchanges update 18, rollback issued. Error message : %s" % (str(msg)))
Exemplo n.º 17
0
 def assertParse(self, filename, *args):
     return parse_changes(fixture(filename), *args)
Exemplo n.º 18
0
 def assertParse(self, filename, *args):
     return parse_changes(fixture(filename), *args)
Exemplo n.º 19
0
def do_update(self):
    print "Adding known_changes table"

    try:
        c = self.db.cursor()
        c.execute("""
                    CREATE TABLE known_changes (
                    id SERIAL PRIMARY KEY,
                    changesname TEXT NOT NULL,
                    seen TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(),
                    source TEXT NOT NULL,
                    binaries TEXT NOT NULL,
                    architecture TEXT NOT NULL,
                    version TEXT NOT NULL,
                    distribution TEXT NOT NULL,
                    urgency TEXT NOT NULL,
                    maintainer TEXT NOT NULL,
                    fingerprint TEXT NOT NULL,
                    changedby TEXT NOT NULL,
                    date TEXT NOT NULL,
                    UNIQUE (changesname)
            )
        """)
        c.execute("CREATE INDEX changesname_ind ON known_changes(changesname)")
        c.execute("CREATE INDEX changestimestamp_ind ON known_changes(seen)")
        c.execute("CREATE INDEX changessource_ind ON known_changes(source)")
        c.execute(
            "CREATE INDEX changesdistribution_ind ON known_changes(distribution)"
        )
        c.execute("CREATE INDEX changesurgency_ind ON known_changes(urgency)")

        c.execute("GRANT ALL ON known_changes TO ftpmaster;")
        c.execute("GRANT SELECT ON known_changes TO public;")

        c.execute("UPDATE config SET value = '18' WHERE name = 'db_revision'")
        self.db.commit()

        print "Done. Now looking for old changes files"
        count = 0
        failure = 0
        cnf = Config()
        for directory in [
                "Accepted", "Byhand", "Done", "New", "ProposedUpdates",
                "OldProposedUpdates"
        ]:
            checkdir = cnf["Dir::Queue::%s" % (directory)]
            if os.path.exists(checkdir):
                print "Looking into %s" % (checkdir)
                for filename in os.listdir(checkdir):
                    if not filename.endswith(".changes"):
                        # Only interested in changes files.
                        continue
                    try:
                        count += 1
                        print "Directory %s, file %7d, failures %3d. (%s)" % (
                            directory, count, failure, filename)
                        changes = Changes()
                        changes.changes_file = filename
                        changesfile = os.path.join(checkdir, filename)
                        changes.changes = parse_changes(changesfile,
                                                        signing_rules=-1)
                        changes.changes["fingerprint"] = check_signature(
                            changesfile)
                        changes.add_known_changes(directory)
                    except InvalidDscError as line:
                        warn("syntax error in .dsc file '%s', line %s." %
                             (f, line))
                        failure += 1
                    except ChangesUnicodeError:
                        warn(
                            "found invalid changes file, not properly utf-8 encoded"
                        )
                        failure += 1

    except psycopg2.ProgrammingError as msg:
        self.db.rollback()
        raise DBUpdateError(
            "Unable to apply knownchanges update 18, rollback issued. Error message : %s"
            % (str(msg)))