def main (): global Cnf, db_files, waste, excluded # Cnf = utils.get_conf() Arguments = [('h',"help","Examine-Package::Options::Help"), ('H',"html-output","Examine-Package::Options::Html-Output"), ] for i in [ "Help", "Html-Output", "partial-html" ]: if not Cnf.has_key("Examine-Package::Options::%s" % (i)): Cnf["Examine-Package::Options::%s" % (i)] = "" args = apt_pkg.parse_commandline(Cnf,Arguments,sys.argv) Options = Cnf.subtree("Examine-Package::Options") if Options["Help"]: usage() if Options["Html-Output"]: global use_html use_html = True stdout_fd = sys.stdout for f in args: try: if not Options["Html-Output"]: # Pipe output for each argument through less less_cmd = ("less", "-R", "-") less_process = daklib.daksubprocess.Popen(less_cmd, stdin=subprocess.PIPE, bufsize=0) less_fd = less_process.stdin # -R added to display raw control chars for colour sys.stdout = less_fd try: if f.endswith(".changes"): check_changes(f) elif f.endswith(".deb") or f.endswith(".udeb"): # default to unstable when we don't have a .changes file # perhaps this should be a command line option? print check_deb('unstable', f) elif f.endswith(".dsc"): print check_dsc('unstable', f) else: utils.fubar("Unrecognised file type: '%s'." % (f)) finally: print output_package_relations() if not Options["Html-Output"]: # Reset stdout here so future less invocations aren't FUBAR less_fd.close() less_process.wait() sys.stdout = stdout_fd except IOError as e: if errno.errorcode[e.errno] == 'EPIPE': utils.warn("[examine-package] Caught EPIPE; skipping.") pass else: raise except KeyboardInterrupt: utils.warn("[examine-package] Caught C-c; skipping.") pass
def init (self): cnf = Config() arguments = [('h', "help", "Update-DB::Options::Help")] for i in [ "help" ]: if not cnf.has_key("Update-DB::Options::%s" % (i)): cnf["Update-DB::Options::%s" % (i)] = "" arguments = apt_pkg.parse_commandline(cnf.Cnf, arguments, sys.argv) options = cnf.subtree("Update-DB::Options") if options["Help"]: self.usage() elif arguments: utils.warn("dak update-db takes no arguments.") self.usage(exit_code=1) try: if os.path.isdir(cnf["Dir::Lock"]): lock_fd = os.open(os.path.join(cnf["Dir::Lock"], 'dinstall.lock'), os.O_RDWR | os.O_CREAT) fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) else: utils.warn("Lock directory doesn't exist yet - not locking") except IOError as e: if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN': utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.") self.update_db()
def init(self): cnf = Config() arguments = [('h', "help", "Update-DB::Options::Help"), ("y", "yes", "Update-DB::Options::Yes")] for i in ["help"]: key = "Update-DB::Options::%s" % i if key not in cnf: cnf[key] = "" arguments = apt_pkg.parse_commandline(cnf.Cnf, arguments, sys.argv) options = cnf.subtree("Update-DB::Options") if options["Help"]: self.usage() elif arguments: utils.warn("dak update-db takes no arguments.") self.usage(exit_code=1) try: if os.path.isdir(cnf["Dir::Lock"]): lock_fd = os.open(os.path.join(cnf["Dir::Lock"], 'daily.lock'), os.O_RDONLY | os.O_CREAT) fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) else: utils.warn("Lock directory doesn't exist yet - not locking") except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): utils.fubar( "Couldn't obtain lock, looks like archive is doing something, try again later." ) self.update_db()
def check_files_in_dsc(): """ Ensure each .dsc lists appropriate files in its Files field (according to the format announced in its Format field). """ count = 0 print "Building list of database files..." q = DBConn().session().query(PoolFile).filter(PoolFile.filename.like('.dsc$')) if q.count() > 0: print "Checking %d files..." % len(ql) else: print "No files to check." for pf in q.all(): filename = os.path.abspath(os.path.join(pf.location.path + pf.filename)) try: # NB: don't enforce .dsc syntax dsc = utils.parse_changes(filename, dsc_file=1) except: utils.fubar("error parsing .dsc file '%s'." % (filename)) reasons = utils.check_dsc_files(filename, dsc) for r in reasons: utils.warn(r) if len(reasons) > 0: count += 1 if count: utils.warn("Found %s invalid .dsc files." % (count))
def init(self): cnf = Config() arguments = [('h', "help", "Update-DB::Options::Help"), ("y", "yes", "Update-DB::Options::Yes")] for i in ["help"]: key = "Update-DB::Options::%s" % i if key not in cnf: cnf[key] = "" arguments = apt_pkg.parse_commandline(cnf.Cnf, arguments, sys.argv) options = cnf.subtree("Update-DB::Options") if options["Help"]: self.usage() elif arguments: utils.warn("dak update-db takes no arguments.") self.usage(exit_code=1) try: if os.path.isdir(cnf["Dir::Lock"]): lock_fd = os.open(os.path.join(cnf["Dir::Lock"], 'daily.lock'), os.O_RDONLY | os.O_CREAT) fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) else: utils.warn("Lock directory doesn't exist yet - not locking") except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): utils.fubar("Couldn't obtain lock, looks like archive is doing something, try again later.") self.update_db()
def version_checks(package, architecture, target_suite, new_version, session, force=False): if architecture == "source": suite_version_list = get_suite_version_by_source(package, session) else: suite_version_list = get_suite_version_by_package(package, architecture, session) must_be_newer_than = [vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan")] must_be_older_than = [vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan")] # Must be newer than an existing version in target_suite if target_suite not in must_be_newer_than: must_be_newer_than.append(target_suite) violations = False for suite, version in suite_version_list: cmp = apt_pkg.version_compare(new_version, version) # for control-suite we allow equal version (for uploads, we don't) if suite in must_be_newer_than and cmp < 0: utils.warn("%s (%s): version check violated: %s targeted at %s is *not* newer than %s in %s" % (package, architecture, new_version, target_suite, version, suite)) violations = True if suite in must_be_older_than and cmp > 0: utils.warn("%s (%s): version check violated: %s targeted at %s is *not* older than %s in %s" % (package, architecture, new_version, target_suite, version, suite)) violations = True if violations: if force: utils.warn("Continuing anyway (forced)...") else: utils.fubar("Aborting. Version checks violated and not forced.")
def check_files_in_dsc(): """ Ensure each .dsc lists appropriate files in its Files field (according to the format announced in its Format field). """ count = 0 print "Building list of database files..." q = DBConn().session().query(PoolFile).filter( PoolFile.filename.like('.dsc$')) if q.count() > 0: print "Checking %d files..." % len(ql) else: print "No files to check." for pf in q.all(): filename = os.path.abspath(os.path.join(pf.location.path + pf.filename)) try: # NB: don't enforce .dsc syntax dsc = utils.parse_changes(filename, dsc_file=1) except: utils.fubar("error parsing .dsc file '%s'." % (filename)) reasons = utils.check_dsc_files(filename, dsc) for r in reasons: utils.warn(r) if len(reasons) > 0: count += 1 if count: utils.warn("Found %s invalid .dsc files." % (count))
def init(self): cnf = Config() arguments = [('h', "help", "Update-DB::Options::Help"), ("y", "yes", "Update-DB::Options::Yes")] for i in ["help"]: if not cnf.has_key("Update-DB::Options::%s" % (i)): cnf["Update-DB::Options::%s" % (i)] = "" arguments = apt_pkg.parse_commandline(cnf.Cnf, arguments, sys.argv) options = cnf.subtree("Update-DB::Options") if options["Help"]: self.usage() elif arguments: utils.warn("dak update-db takes no arguments.") self.usage(exit_code=1) try: if os.path.isdir(cnf["Dir::Lock"]): lock_fd = os.open( os.path.join(cnf["Dir::Lock"], 'dinstall.lock'), os.O_RDWR | os.O_CREAT) fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) else: utils.warn("Lock directory doesn't exist yet - not locking") except IOError as e: if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[ e.errno] == 'EAGAIN': utils.fubar( "Couldn't obtain lock; assuming another 'dak process-unchecked' is already running." ) self.update_db()
def version_checks(package, architecture, target_suite, new_version, session, force=False): if architecture == "source": suite_version_list = get_suite_version_by_source(package, session) else: suite_version_list = get_suite_version_by_package(package, architecture, session) must_be_newer_than = [vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan")] must_be_older_than = [vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan")] # Must be newer than an existing version in target_suite if target_suite not in must_be_newer_than: must_be_newer_than.append(target_suite) violations = False for suite, version in suite_version_list: cmp = apt_pkg.version_compare(new_version, version) if suite in must_be_newer_than and cmp < 1: utils.warn("%s (%s): version check violated: %s targeted at %s is *not* newer than %s in %s" % (package, architecture, new_version, target_suite, version, suite)) violations = True if suite in must_be_older_than and cmp > 1: utils.warn("%s (%s): version check violated: %s targeted at %s is *not* older than %s in %s" % (package, architecture, new_version, target_suite, version, suite)) violations = True if violations: if force: utils.warn("Continuing anyway (forced)...") else: utils.fubar("Aborting. Version checks violated and not forced.")
def main(): cnf = Config() Arguments = [('a', "architecture", "Ls::Options::Architecture", "HasArg"), ('b', "binarytype", "Ls::Options::BinaryType", "HasArg"), ('c', "component", "Ls::Options::Component", "HasArg"), ('f', "format", "Ls::Options::Format", "HasArg"), ('g', "greaterorequal", "Ls::Options::GreaterOrEqual"), ('G', "greaterthan", "Ls::Options::GreaterThan"), ('r', "regex", "Ls::Options::Regex"), ('s', "suite", "Ls::Options::Suite", "HasArg"), ('S', "source-and-binary", "Ls::Options::Source-And-Binary"), ('h', "help", "Ls::Options::Help")] for i in ["architecture", "binarytype", "component", "format", "greaterorequal", "greaterthan", "regex", "suite", "source-and-binary", "help"]: key = "Ls::Options::%s" % i if key not in cnf: cnf[key] = "" packages = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Ls::Options") if Options["Help"]: usage() if not packages: utils.fubar("need at least one package name as an argument.") # Handle buildd maintenance helper options if Options["GreaterOrEqual"] or Options["GreaterThan"]: if Options["GreaterOrEqual"] and Options["GreaterThan"]: utils.fubar("-g/--greaterorequal and -G/--greaterthan are mutually exclusive.") if not Options["Suite"]: Options["Suite"] = "unstable" kwargs = dict() if Options["Regex"]: kwargs['regex'] = True if Options["Source-And-Binary"]: kwargs['source_and_binary'] = True if Options["Suite"]: kwargs['suites'] = utils.split_args(Options['Suite']) if Options["Architecture"]: kwargs['architectures'] = utils.split_args(Options['Architecture']) if Options['BinaryType']: kwargs['binary_types'] = utils.split_args(Options['BinaryType']) if Options['Component']: kwargs['components'] = utils.split_args(Options['Component']) if Options['Format']: kwargs['format'] = Options['Format'] if Options['GreaterOrEqual']: kwargs['highest'] = '>=' elif Options['GreaterThan']: kwargs['highest'] = '>>' for line in list_packages(packages, **kwargs): print(line)
def edit_transitions(): """ Edit the defined transitions. """ trans_file = Cnf["Dinstall::ReleaseTransitions"] edit_file = temp_transitions_file(load_transitions(trans_file)) editor = os.environ.get("EDITOR", "vi") while True: result = os.system("%s %s" % (editor, edit_file)) if result != 0: os.unlink(edit_file) utils.fubar("%s invocation failed for %s, not removing tempfile." % (editor, edit_file)) # Now try to load the new file test = load_transitions(edit_file) if test is None: # Edit is broken print("Edit was unparsable.") prompt = "[E]dit again, Drop changes?" default = "E" else: print("Edit looks okay.\n") print("The following transitions are defined:") print( "------------------------------------------------------------------------" ) transition_info(test) prompt = "[S]ave, Edit again, Drop changes?" default = "S" answer = "XXX" while prompt.find(answer) == -1: answer = utils.our_raw_input(prompt) if answer == "": answer = default answer = answer[:1].upper() if answer == 'E': continue elif answer == 'D': os.unlink(edit_file) print("OK, discarding changes") sys.exit(0) elif answer == 'S': # Ready to save break else: print("You pressed something you shouldn't have :(") sys.exit(1) # We seem to be done and also have a working file. Copy over. write_transitions_from_file(edit_file) os.unlink(edit_file) print("Transitions file updated.")
def main(): global Options cnf = Config() Arguments = [ ('h', "help", "Auto-Decruft::Options::Help"), ('n', "dry-run", "Auto-Decruft::Options::Dry-Run"), ('d', "debug", "Auto-Decruft::Options::Debug"), ('s', "suite", "Auto-Decruft::Options::Suite", "HasArg"), # The "\0" seems to be the only way to disable short options. ("\0", 'if-newer-version-in', "Auto-Decruft::Options::OtherSuite", "HasArg"), ("\0", 'if-newer-version-in-rm-msg', "Auto-Decruft::Options::OtherSuiteRMMsg", "HasArg") ] for i in ["help", "Dry-Run", "Debug", "OtherSuite", "OtherSuiteRMMsg"]: if not cnf.has_key("Auto-Decruft::Options::%s" % (i)): cnf["Auto-Decruft::Options::%s" % (i)] = "" cnf["Auto-Decruft::Options::Suite"] = cnf.get("Dinstall::DefaultSuite", "unstable") apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Auto-Decruft::Options") if Options["Help"]: usage() debug = False dryrun = False if Options["Dry-Run"]: dryrun = True if Options["Debug"]: debug = True if Options["OtherSuite"] and not Options["OtherSuiteRMMsg"]: utils.fubar( "--if-newer-version-in requires --if-newer-version-in-rm-msg") session = DBConn().session() suite = get_suite(Options["Suite"].lower(), session) if not suite: utils.fubar("Cannot find suite %s" % Options["Suite"].lower()) suite_id = suite.suite_id suite_name = suite.suite_name.lower() auto_decruft_suite(suite_name, suite_id, session, dryrun, debug) if Options["OtherSuite"]: osuite = get_suite(Options["OtherSuite"].lower(), session).suite_name decruft_newer_version_in(osuite, suite_name, suite_id, Options["OtherSuiteRMMsg"], session, dryrun) if not dryrun: session.commit()
def main(): global Options cnf = Config() Arguments = [('h', "help", "Auto-Decruft::Options::Help"), ('n', "dry-run", "Auto-Decruft::Options::Dry-Run"), ('d', "debug", "Auto-Decruft::Options::Debug"), ('s', "suite", "Auto-Decruft::Options::Suite", "HasArg"), # The "\0" seems to be the only way to disable short options. ("\0", 'if-newer-version-in', "Auto-Decruft::Options::OtherSuite", "HasArg"), ("\0", 'if-newer-version-in-rm-msg', "Auto-Decruft::Options::OtherSuiteRMMsg", "HasArg"), ("\0", 'decruft-equal-versions', "Auto-Decruft::Options::OtherSuiteDecruftEqual") ] for i in ["help", "Dry-Run", "Debug", "OtherSuite", "OtherSuiteRMMsg", "OtherSuiteDecruftEqual"]: key = "Auto-Decruft::Options::%s" % i if key not in cnf: cnf[key] = "" cnf["Auto-Decruft::Options::Suite"] = cnf.get("Dinstall::DefaultSuite", "unstable") apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Auto-Decruft::Options") if Options["Help"]: usage() debug = False dryrun = False decruft_equal_versions = False if Options["Dry-Run"]: dryrun = True if Options["Debug"]: debug = True if Options["OtherSuiteDecruftEqual"]: decruft_equal_versions = True if Options["OtherSuite"] and not Options["OtherSuiteRMMsg"]: utils.fubar("--if-newer-version-in requires --if-newer-version-in-rm-msg") session = DBConn().session() suite = get_suite(Options["Suite"].lower(), session) if not suite: utils.fubar("Cannot find suite %s" % Options["Suite"].lower()) suite_id = suite.suite_id suite_name = suite.suite_name.lower() auto_decruft_suite(suite_name, suite_id, session, dryrun, debug) if Options["OtherSuite"]: osuite = get_suite(Options["OtherSuite"].lower(), session).suite_name decruft_newer_version_in(osuite, suite_name, suite_id, Options["OtherSuiteRMMsg"], session, dryrun, decruft_equal_versions) if not dryrun: session.commit()
def spawn(command): if not re_taint_free.match(command): utils.fubar("Invalid character in \"%s\"." % (command)) if Options["No-Action"]: print "[%s]" % (command) else: (result, output) = commands.getstatusoutput(command) if (result != 0): utils.fubar("Invocation of '%s' failed:\n%s\n" % (command, output), result)
def do_dir(target, config_name): """If 'target' exists, make sure it is a directory. If it doesn't, create it.""" if os.path.exists(target): if not os.path.isdir(target): utils.fubar("%s (%s) is not a directory." % (target, config_name)) else: print "Creating %s ..." % (target) os.makedirs(target)
def edit_transitions(): """ Edit the defined transitions. """ trans_file = Cnf["Dinstall::ReleaseTransitions"] edit_file = temp_transitions_file(load_transitions(trans_file)) editor = os.environ.get("EDITOR", "vi") while True: result = os.system("%s %s" % (editor, edit_file)) if result != 0: os.unlink(edit_file) utils.fubar("%s invocation failed for %s, not removing tempfile." % (editor, edit_file)) # Now try to load the new file test = load_transitions(edit_file) if test == None: # Edit is broken print "Edit was unparsable." prompt = "[E]dit again, Drop changes?" default = "E" else: print "Edit looks okay.\n" print "The following transitions are defined:" print "------------------------------------------------------------------------" transition_info(test) prompt = "[S]ave, Edit again, Drop changes?" default = "S" answer = "XXX" while prompt.find(answer) == -1: answer = utils.our_raw_input(prompt) if answer == "": answer = default answer = answer[:1].upper() if answer == 'E': continue elif answer == 'D': os.unlink(edit_file) print "OK, discarding changes" sys.exit(0) elif answer == 'S': # Ready to save break else: print "You pressed something you shouldn't have :(" sys.exit(1) # We seem to be done and also have a working file. Copy over. write_transitions_from_file(edit_file) os.unlink(edit_file) print "Transitions file updated."
def spawn(command): if not re_taint_free.match(command): utils.fubar("Invalid character in \"%s\"." % (command)) if Options["No-Action"]: print("[%s]" % (command)) else: try: subprocess.check_output(command.split(), stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: utils.fubar("Invocation of '%s' failed:\n%s\n" % (command, e.output.rstrip()), e.returncode)
def edit_new(overrides, upload, session): # Write the current data to a temporary file (fd, temp_filename) = utils.temp_filename() temp_file = os.fdopen(fd, 'w') print_new(upload, overrides, indexed=0, session=session, file=temp_file) temp_file.close() # Spawn an editor on that file editor = os.environ.get("EDITOR", "vi") result = os.system("%s %s" % (editor, temp_filename)) if result != 0: utils.fubar("%s invocation failed for %s." % (editor, temp_filename), result) # Read the edited data back in temp_file = utils.open_file(temp_filename) lines = temp_file.readlines() temp_file.close() os.unlink(temp_filename) overrides_map = dict([((o['type'], o['package']), o) for o in overrides]) new_overrides = [] # Parse the new data for line in lines: line = line.strip() if line == "" or line[0] == '#': continue s = line.split() # Pad the list if necessary s[len(s):3] = [None] * (3 - len(s)) (pkg, priority, section) = s[:3] if pkg.find(':') != -1: type, pkg = pkg.split(':', 1) else: type = 'deb' o = overrides_map.get((type, pkg), None) if o is None: utils.warn("Ignoring unknown package '%s'" % (pkg)) else: if section.find('/') != -1: component = section.split('/', 1)[0] else: component = 'main' new_overrides.append( dict( package=pkg, type=type, section=section, component=component, priority=priority, included=o['included'], )) return new_overrides
def main(): cnf = Config() Arguments = [('h', "help", "Make-Overrides::Options::Help")] for i in ["help"]: key = "Make-Overrides::Options::%s" % i if key not in cnf: cnf[key] = "" apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Make-Overrides::Options") if Options["Help"]: usage() d = DBConn() session = d.session() for suite in session.query(Suite).filter( Suite.overrideprocess == True): # noqa:E712 if suite.untouchable: print("Skipping %s as it is marked as untouchable" % suite.suite_name) continue sys.stderr.write("Processing %s...\n" % (suite.suite_name)) override_suite = suite.overridecodename or suite.codename for component in session.query(Component).all(): for otype in session.query(OverrideType).all(): otype_name = otype.overridetype cname = component.component_name # TODO: Stick suffix info in database (or get rid of it) if otype_name == "deb": suffix = "" elif otype_name == "udeb": if cname == "contrib": continue # Ick2 suffix = ".debian-installer" elif otype_name == "dsc": suffix = ".src" else: utils.fubar("Don't understand OverrideType %s" % otype.overridetype) cname = cname.replace('/', '_') filename = os.path.join( cnf["Dir::Override"], "override.%s.%s%s" % (override_suite, cname, suffix)) output_file = utils.open_file(filename, 'w') do_list(output_file, suite, component, otype, session) output_file.close()
def main(): global Options, Logger cnf = Config() for i in ["Help", "No-Action", "Maximum" ]: if not cnf.has_key("Clean-Suites::Options::%s" % (i)): cnf["Clean-Suites::Options::%s" % (i)] = "" Arguments = [('h',"help","Clean-Suites::Options::Help"), ('n',"no-action","Clean-Suites::Options::No-Action"), ('m',"maximum","Clean-Suites::Options::Maximum", "HasArg")] apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Clean-Suites::Options") if cnf["Clean-Suites::Options::Maximum"] != "": try: # Only use Maximum if it's an integer max_delete = int(cnf["Clean-Suites::Options::Maximum"]) if max_delete < 1: utils.fubar("If given, Maximum must be at least 1") except ValueError as e: utils.fubar("If given, Maximum must be an integer") else: max_delete = None if Options["Help"]: usage() Logger = daklog.Logger("clean-suites", debug=Options["No-Action"]) session = DBConn().session() now_date = datetime.now() # Stay of execution; default to 1.5 days soe = int(cnf.get('Clean-Suites::StayOfExecution', '129600')) delete_date = now_date - timedelta(seconds=soe) check_binaries(now_date, delete_date, max_delete, session) clean_binaries(now_date, delete_date, max_delete, session) check_sources(now_date, delete_date, max_delete, session) check_files(now_date, delete_date, max_delete, session) clean(now_date, delete_date, max_delete, session) clean_maintainers(now_date, delete_date, max_delete, session) clean_fingerprints(now_date, delete_date, max_delete, session) clean_empty_directories(session) Logger.close()
def main(): Cnf = utils.get_conf() Arguments = [ ('h', 'help', 'Make-Changelog::Options::Help'), ('a', 'archive', 'Make-Changelog::Options::Archive', 'HasArg'), ('s', 'suite', 'Make-Changelog::Options::Suite', 'HasArg'), ('b', 'base-suite', 'Make-Changelog::Options::Base-Suite', 'HasArg'), ('n', 'binnmu', 'Make-Changelog::Options::binNMU'), ('e', 'export', 'Make-Changelog::Options::export'), ('p', 'progress', 'Make-Changelog::Options::progress') ] for i in ['help', 'suite', 'base-suite', 'binnmu', 'export', 'progress']: key = 'Make-Changelog::Options::%s' % i if key not in Cnf: Cnf[key] = '' apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) Options = Cnf.subtree('Make-Changelog::Options') suite = Cnf['Make-Changelog::Options::Suite'] base_suite = Cnf['Make-Changelog::Options::Base-Suite'] binnmu = Cnf['Make-Changelog::Options::binNMU'] export = Cnf['Make-Changelog::Options::export'] progress = Cnf['Make-Changelog::Options::progress'] if Options['help'] or not (suite and base_suite) and not export: usage() for s in suite, base_suite: if not export and not get_suite(s): utils.fubar('Invalid suite "%s"' % s) session = DBConn().session() if export: archive = session.query(Archive).filter_by( archive_name=Options['Archive']).one() exportpath = archive.changelog if exportpath: export_files(session, archive, exportpath, progress) generate_export_filelist(exportpath) else: utils.fubar('No changelog export path defined') elif binnmu: display_changes(get_binary_uploads(suite, base_suite, session), 3) else: display_changes(get_source_uploads(suite, base_suite, session), 2) session.commit()
def edit_new (overrides, upload, session): # Write the current data to a temporary file (fd, temp_filename) = utils.temp_filename() temp_file = os.fdopen(fd, 'w') print_new (upload, overrides, indexed=0, session=session, file=temp_file) temp_file.close() # Spawn an editor on that file editor = os.environ.get("EDITOR","vi") result = os.system("%s %s" % (editor, temp_filename)) if result != 0: utils.fubar ("%s invocation failed for %s." % (editor, temp_filename), result) # Read the edited data back in temp_file = utils.open_file(temp_filename) lines = temp_file.readlines() temp_file.close() os.unlink(temp_filename) overrides_map = dict([ ((o['type'], o['package']), o) for o in overrides ]) new_overrides = [] # Parse the new data for line in lines: line = line.strip() if line == "" or line[0] == '#': continue s = line.split() # Pad the list if necessary s[len(s):3] = [None] * (3-len(s)) (pkg, priority, section) = s[:3] if pkg.find(':') != -1: type, pkg = pkg.split(':', 1) else: type = 'deb' o = overrides_map.get((type, pkg), None) if o is None: utils.warn("Ignoring unknown package '%s'" % (pkg)) else: if section.find('/') != -1: component = section.split('/', 1)[0] else: component = 'main' new_overrides.append(dict( package=pkg, type=type, section=section, component=component, priority=priority, included=o['included'], )) return new_overrides
def main(): cnf = Config() Arguments = [('h', "help", "Make-Overrides::Options::Help")] for i in ["help"]: key = "Make-Overrides::Options::%s" % i if key not in cnf: cnf[key] = "" apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Make-Overrides::Options") if Options["Help"]: usage() d = DBConn() session = d.session() for suite in session.query(Suite).filter(Suite.overrideprocess == True): # noqa:E712 if suite.untouchable: print("Skipping %s as it is marked as untouchable" % suite.suite_name) continue print("Processing %s..." % (suite.suite_name), file=sys.stderr) override_suite = suite.overridecodename or suite.codename for component in session.query(Component).all(): for otype in session.query(OverrideType).all(): otype_name = otype.overridetype cname = component.component_name # TODO: Stick suffix info in database (or get rid of it) if otype_name == "deb": suffix = "" elif otype_name == "udeb": if cname == "contrib": continue # Ick2 suffix = ".debian-installer" elif otype_name == "dsc": suffix = ".src" else: utils.fubar("Don't understand OverrideType %s" % otype.overridetype) cname = cname.replace('/', '_') filename = os.path.join(cnf["Dir::Override"], "override.%s.%s%s" % (override_suite, cname, suffix)) output_file = utils.open_file(filename, 'w') do_list(output_file, suite, component, otype, session) output_file.close()
def main(): Cnf = utils.get_conf() Arguments = [('h','help','Make-Changelog::Options::Help'), ('a','archive','Make-Changelog::Options::Archive','HasArg'), ('s','suite','Make-Changelog::Options::Suite','HasArg'), ('b','base-suite','Make-Changelog::Options::Base-Suite','HasArg'), ('n','binnmu','Make-Changelog::Options::binNMU'), ('e','export','Make-Changelog::Options::export'), ('p','progress','Make-Changelog::Options::progress')] for i in ['help', 'suite', 'base-suite', 'binnmu', 'export', 'progress']: if not Cnf.has_key('Make-Changelog::Options::%s' % (i)): Cnf['Make-Changelog::Options::%s' % (i)] = '' apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) Options = Cnf.subtree('Make-Changelog::Options') suite = Cnf['Make-Changelog::Options::Suite'] base_suite = Cnf['Make-Changelog::Options::Base-Suite'] binnmu = Cnf['Make-Changelog::Options::binNMU'] export = Cnf['Make-Changelog::Options::export'] progress = Cnf['Make-Changelog::Options::progress'] if Options['help'] or not (suite and base_suite) and not export: usage() for s in suite, base_suite: if not export and not get_suite(s): utils.fubar('Invalid suite "%s"' % s) session = DBConn().session() if export: archive = session.query(Archive).filter_by(archive_name=Options['Archive']).one() exportpath = archive.changelog if exportpath: export_files(session, archive, exportpath, progress) generate_export_filelist(exportpath) else: utils.fubar('No changelog export path defined') elif binnmu: display_changes(get_binary_uploads(suite, base_suite, session), 3) else: display_changes(get_source_uploads(suite, base_suite, session), 2) session.commit()
def lock_file(f): """ Lock a file @attention: This function may run B{within sudo} """ for retry in range(10): lock_fd = os.open(f, os.O_RDWR | os.O_CREAT) try: fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) return lock_fd except OSError as e: if errno.errorcode[e.errno] == "EACCES" or errno.errorcode[e.errno] == "EEXIST": print "Unable to get lock for %s (try %d of 10)" % (file, retry + 1) time.sleep(60) else: raise utils.fubar("Couldn't obtain lock for %s." % (f))
def main(): Cnf = utils.get_conf() cnf = Config() Arguments = [('h','help','Make-Changelog::Options::Help'), ('s','suite','Make-Changelog::Options::Suite','HasArg'), ('b','base-suite','Make-Changelog::Options::Base-Suite','HasArg'), ('n','binnmu','Make-Changelog::Options::binNMU'), ('e','export','Make-Changelog::Options::export')] for i in ['help', 'suite', 'base-suite', 'binnmu', 'export']: if not Cnf.has_key('Make-Changelog::Options::%s' % (i)): Cnf['Make-Changelog::Options::%s' % (i)] = '' apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) Options = Cnf.subtree('Make-Changelog::Options') suite = Cnf['Make-Changelog::Options::Suite'] base_suite = Cnf['Make-Changelog::Options::Base-Suite'] binnmu = Cnf['Make-Changelog::Options::binNMU'] export = Cnf['Make-Changelog::Options::export'] if Options['help'] or not (suite and base_suite) and not export: usage() for s in suite, base_suite: if not export and not get_suite(s): utils.fubar('Invalid suite "%s"' % s) session = DBConn().session() if export: if cnf.exportpath: exportpath = os.path.join(Cnf['Dir::Export'], cnf.exportpath) export_files(session, Cnf['Dir::Pool'], exportpath) else: utils.fubar('No changelog export path defined') elif binnmu: display_changes(get_binary_uploads(suite, base_suite, session), 3) else: display_changes(get_source_uploads(suite, base_suite, session), 2) session.commit()
def lock_file(f): """ Lock a file @attention: This function may run B{within sudo} """ for retry in range(10): lock_fd = os.open(f, os.O_RDWR | os.O_CREAT) try: fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) return lock_fd except OSError as e: if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EEXIST': print "Unable to get lock for %s (try %d of 10)" % \ (file, retry+1) time.sleep(60) else: raise utils.fubar("Couldn't obtain lock for %s." % (f))
def list_overrides(suite, component, otype, session): dat = {} s = get_suite(suite, session) if s is None: utils.fubar("Suite '%s' not recognised." % (suite)) dat['suiteid'] = s.suite_id c = get_component(component, session) if c is None: utils.fubar("Component '%s' not recognised." % (component)) dat['componentid'] = c.component_id o = get_override_type(otype) if o is None: utils.fubar("Type '%s' not recognised. (Valid types are deb, udeb and dsc)" % (otype)) dat['typeid'] = o.overridetype_id if otype == "dsc": q = session.execute("""SELECT o.package, s.section, o.maintainer FROM override o, section s WHERE o.suite = :suiteid AND o.component = :componentid AND o.type = :typeid AND o.section = s.id ORDER BY s.section, o.package""", dat) for i in q.fetchall(): print(utils.result_join(i)) else: q = session.execute("""SELECT o.package, p.priority, s.section, o.maintainer, p.level FROM override o, priority p, section s WHERE o.suite = :suiteid AND o.component = :componentid AND o.type = :typeid AND o.priority = p.id AND o.section = s.id ORDER BY s.section, p.level, o.package""", dat) for i in q.fetchall(): print(utils.result_join(i[:-1]))
def init(cnf): global delete_date, del_dir # Used for directory naming now_date = datetime.now() # Used for working out times delete_date = int(time.time()) - (int(Options["Days"]) * 84600) morguedir = cnf.get("Dir::Morgue", os.path.join("Dir::Pool", 'morgue')) morguesubdir = cnf.get("Clean-Queues::MorgueSubDir", 'queue') # Build directory as morguedir/morguesubdir/year/month/day del_dir = os.path.join(morguedir, morguesubdir, str(now_date.year), '%.2d' % now_date.month, '%.2d' % now_date.day) # Ensure a directory exists to remove files to if not Options["No-Action"]: if not os.path.exists(del_dir): os.makedirs(del_dir, 0o2775) if not os.path.isdir(del_dir): utils.fubar("%s must be a directory." % (del_dir)) # Move to the directory to clean incoming = Options.get("Incoming") if not incoming: incoming = cnf.get('Dir::Unchecked') if not incoming: utils.fubar("Cannot find 'unchecked' directory") try: os.chdir(incoming) except OSError as e: utils.fubar("Cannot chdir to %s" % incoming)
def edit_new (new, upload): # Write the current data to a temporary file (fd, temp_filename) = utils.temp_filename() temp_file = os.fdopen(fd, 'w') print_new (new, upload, indexed=0, file=temp_file) temp_file.close() # Spawn an editor on that file editor = os.environ.get("EDITOR","vi") result = os.system("%s %s" % (editor, temp_filename)) if result != 0: utils.fubar ("%s invocation failed for %s." % (editor, temp_filename), result) # Read the edited data back in temp_file = utils.open_file(temp_filename) lines = temp_file.readlines() temp_file.close() os.unlink(temp_filename) # Parse the new data for line in lines: line = line.strip() if line == "": continue s = line.split() # Pad the list if necessary s[len(s):3] = [None] * (3-len(s)) (pkg, priority, section) = s[:3] if not new.has_key(pkg): utils.warn("Ignoring unknown package '%s'" % (pkg)) else: # Strip off any invalid markers, print_new will readd them. if section.endswith("[!]"): section = section[:-3] if priority.endswith("[!]"): priority = priority[:-3] for f in new[pkg]["files"]: upload.pkg.files[f]["section"] = section upload.pkg.files[f]["priority"] = priority new[pkg]["section"] = section new[pkg]["priority"] = priority
def __call__(self, package: str, architecture: str, new_version: str): if architecture == "source": suite_version_list = get_suite_version_by_source(package, self.session) else: suite_version_list = get_suite_version_by_package(package, architecture, self.session) violations = False for suite, version in suite_version_list: cmp = apt_pkg.version_compare(new_version, version) # for control-suite we allow equal version (for uploads, we don't) if suite in self.must_be_newer_than and cmp < 0: utils.warn("%s (%s): version check violated: %s targeted at %s is *not* newer than %s in %s" % (package, architecture, new_version, self.target_suite, version, suite)) violations = True if suite in self.must_be_older_than and cmp > 0: utils.warn("%s (%s): version check violated: %s targeted at %s is *not* older than %s in %s" % (package, architecture, new_version, self.target_suite, version, suite)) violations = True if violations: if self.force: utils.warn("Continuing anyway (forced)...") else: utils.fubar("Aborting. Version checks violated and not forced.")
def init (cnf): global delete_date, del_dir # Used for directory naming now_date = datetime.now() # Used for working out times delete_date = int(time.time())-(int(Options["Days"])*84600) morguedir = cnf.get("Dir::Morgue", os.path.join("Dir::Pool", 'morgue')) morguesubdir = cnf.get("Clean-Queues::MorgueSubDir", 'queue') # Build directory as morguedir/morguesubdir/year/month/day del_dir = os.path.join(morguedir, morguesubdir, str(now_date.year), '%.2d' % now_date.month, '%.2d' % now_date.day) # Ensure a directory exists to remove files to if not Options["No-Action"]: if not os.path.exists(del_dir): os.makedirs(del_dir, 0o2775) if not os.path.isdir(del_dir): utils.fubar("%s must be a directory." % (del_dir)) # Move to the directory to clean incoming = Options["Incoming"] if incoming == "": incoming_queue = get_policy_queue('unchecked') if not incoming_queue: utils.fubar("Cannot find 'unchecked' queue") incoming = incoming_queue.path try: os.chdir(incoming) except OSError as e: utils.fubar("Cannot chdir to %s" % incoming)
def update_db(self): # Ok, try and find the configuration table print("Determining dak database revision ...") cnf = Config() logger = Logger('update-db') modules = [] try: # Build a connect string if "DB::Service" in cnf: connect_str = "service=%s" % cnf["DB::Service"] else: connect_str = "dbname=%s" % (cnf["DB::Name"]) if "DB::Host" in cnf and cnf["DB::Host"] != '': connect_str += " host=%s" % (cnf["DB::Host"]) if "DB::Port" in cnf and cnf["DB::Port"] != '-1': connect_str += " port=%d" % (int(cnf["DB::Port"])) self.db = psycopg2.connect(connect_str) db_role = cnf.get("DB::Role") if db_role: self.db.cursor().execute('SET ROLE "{}"'.format(db_role)) except Exception as e: print("FATAL: Failed connect to database (%s)" % str(e)) sys.exit(1) database_revision = int(self.get_db_rev()) logger.log( ['transaction id before update: %s' % self.get_transaction_id()]) if database_revision == -1: print("dak database schema predates update-db.") print("") print( "This script will attempt to upgrade it to the lastest, but may fail." ) print( "Please make sure you have a database backup handy. If you don't, press Ctrl-C now!" ) print("") print("Continuing in five seconds ...") time.sleep(5) print("") print("Attempting to upgrade pre-zero database to zero") self.update_db_to_zero() database_revision = 0 dbfiles = glob( os.path.join(os.path.dirname(__file__), 'dakdb/update*.py')) required_database_schema = max( map(int, findall('update(\d+).py', " ".join(dbfiles)))) print("dak database schema at %d" % database_revision) print("dak version requires schema %d" % required_database_schema) if database_revision < required_database_schema: print("\nUpdates to be applied:") for i in range(database_revision, required_database_schema): i += 1 dakdb = __import__("dakdb", globals(), locals(), ['update' + str(i)]) update_module = getattr(dakdb, "update" + str(i)) print( "Update %d: %s" % (i, next(s for s in update_module.__doc__.split("\n") if s))) modules.append((update_module, i)) if not Config().find_b("Update-DB::Options::Yes", False): prompt = "\nUpdate database? (y/N) " answer = utils.our_raw_input(prompt) if answer.upper() != 'Y': sys.exit(0) else: print("no updates required") logger.log(["no updates required"]) sys.exit(0) for module in modules: (update_module, i) = module try: update_module.do_update(self) message = "updated database schema from %d to %d" % ( database_revision, i) print(message) logger.log([message]) except DBUpdateError as e: # Seems the update did not work. print("Was unable to update database schema from %d to %d." % (database_revision, i)) print("The error message received was %s" % (e)) logger.log(["DB Schema upgrade failed"]) logger.close() utils.fubar("DB Schema upgrade failed") database_revision += 1 logger.close()
def main (): global suite, suite_id, source_binaries, source_versions cnf = Config() Arguments = [('h',"help","Cruft-Report::Options::Help"), ('m',"mode","Cruft-Report::Options::Mode", "HasArg"), ('R',"rdep-check", "Cruft-Report::Options::Rdep-Check"), ('s',"suite","Cruft-Report::Options::Suite","HasArg"), ('w',"wanna-build-dump","Cruft-Report::Options::Wanna-Build-Dump","HasArg")] for i in [ "help", "Rdep-Check" ]: if not cnf.has_key("Cruft-Report::Options::%s" % (i)): cnf["Cruft-Report::Options::%s" % (i)] = "" cnf["Cruft-Report::Options::Suite"] = cnf.get("Dinstall::DefaultSuite", "unstable") if not cnf.has_key("Cruft-Report::Options::Mode"): cnf["Cruft-Report::Options::Mode"] = "daily" if not cnf.has_key("Cruft-Report::Options::Wanna-Build-Dump"): cnf["Cruft-Report::Options::Wanna-Build-Dump"] = "/srv/ftp-master.debian.org/scripts/nfu" apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Cruft-Report::Options") if Options["Help"]: usage() if Options["Rdep-Check"]: rdeps = True else: rdeps = False # Set up checks based on mode if Options["Mode"] == "daily": checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu" ] elif Options["Mode"] == "full": checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu", "dubious nbs", "bnb", "bms", "anais" ] elif Options["Mode"] == "bdo": checks = [ "nbs", "obsolete source" ] else: utils.warn("%s is not a recognised mode - only 'full', 'daily' or 'bdo' are understood." % (Options["Mode"])) usage(1) session = DBConn().session() bin_pkgs = {} src_pkgs = {} bin2source = {} bins_in_suite = {} nbs = {} source_versions = {} anais_output = "" nfu_packages = {} suite = get_suite(Options["Suite"].lower(), session) if not suite: utils.fubar("Cannot find suite %s" % Options["Suite"].lower()) suite_id = suite.suite_id suite_name = suite.suite_name.lower() if "obsolete source" in checks: report_obsolete_source(suite_name, session) if "nbs" in checks: reportAllNBS(suite_name, suite_id, session, rdeps) if "outdated non-free" in checks: report_outdated_nonfree(suite_name, session, rdeps) bin_not_built = {} if "bnb" in checks: bins_in_suite = get_suite_binaries(suite, session) # Checks based on the Sources files components = get_component_names(session) for component in components: filename = "%s/dists/%s/%s/source/Sources.gz" % (suite.archive.path, suite_name, component) # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance... (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) sources = utils.open_file(temp_filename) Sources = apt_pkg.TagFile(sources) while Sources.step(): source = Sources.section.find('Package') source_version = Sources.section.find('Version') architecture = Sources.section.find('Architecture') binaries = Sources.section.find('Binary') binaries_list = [ i.strip() for i in binaries.split(',') ] if "bnb" in checks: # Check for binaries not built on any architecture. for binary in binaries_list: if not bins_in_suite.has_key(binary): bin_not_built.setdefault(source, {}) bin_not_built[source][binary] = "" if "anais" in checks: anais_output += do_anais(architecture, binaries_list, source, session) # build indices for checking "no source" later source_index = component + '/' + source src_pkgs[source] = source_index for binary in binaries_list: bin_pkgs[binary] = source source_binaries[source] = binaries source_versions[source] = source_version sources.close() os.unlink(temp_filename) # Checks based on the Packages files check_components = components[:] if suite_name != "experimental": check_components.append('main/debian-installer'); for component in check_components: architectures = [ a.arch_string for a in get_suite_architectures(suite_name, skipsrc=True, skipall=True, session=session) ] for architecture in architectures: if component == 'main/debian-installer' and re.match("kfreebsd", architecture): continue filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (suite.archive.path, suite_name, component, architecture) # apt_pkg.TagFile needs a real file handle (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) if "nfu" in checks: nfu_packages.setdefault(architecture,[]) nfu_entries = parse_nfu(architecture) packages = utils.open_file(temp_filename) Packages = apt_pkg.TagFile(packages) while Packages.step(): package = Packages.section.find('Package') source = Packages.section.find('Source', "") version = Packages.section.find('Version') if source == "": source = package if bin2source.has_key(package) and \ apt_pkg.version_compare(version, bin2source[package]["version"]) > 0: bin2source[package]["version"] = version bin2source[package]["source"] = source else: bin2source[package] = {} bin2source[package]["version"] = version bin2source[package]["source"] = source if source.find("(") != -1: m = re_extract_src_version.match(source) source = m.group(1) version = m.group(2) if not bin_pkgs.has_key(package): nbs.setdefault(source,{}) nbs[source].setdefault(package, {}) nbs[source][package][version] = "" else: if "nfu" in checks: if package in nfu_entries and \ version != source_versions[source]: # only suggest to remove out-of-date packages nfu_packages[architecture].append((package,version,source_versions[source])) packages.close() os.unlink(temp_filename) # Distinguish dubious (version numbers match) and 'real' NBS (they don't) dubious_nbs = {} for source in nbs.keys(): for package in nbs[source].keys(): versions = nbs[source][package].keys() versions.sort(apt_pkg.version_compare) latest_version = versions.pop() source_version = source_versions.get(source,"0") if apt_pkg.version_compare(latest_version, source_version) == 0: add_nbs(dubious_nbs, source, latest_version, package, suite_id, session) if "nviu" in checks: do_newer_version('unstable', 'experimental', 'NVIU', session) if "nvit" in checks: do_newer_version('testing', 'testing-proposed-updates', 'NVIT', session) ### if Options["Mode"] == "full": print "="*75 print if "nfu" in checks: do_nfu(nfu_packages) if "bnb" in checks: print "Unbuilt binary packages" print "-----------------------" print keys = bin_not_built.keys() keys.sort() for source in keys: binaries = bin_not_built[source].keys() binaries.sort() print " o %s: %s" % (source, ", ".join(binaries)) print if "bms" in checks: report_multiple_source(suite) if "anais" in checks: print "Architecture Not Allowed In Source" print "----------------------------------" print anais_output print if "dubious nbs" in checks: do_dubious_nbs(dubious_nbs)
def main(): cnf = Config() Arguments = [('h',"help","Find-Null-Maintainers::Options::Help")] for i in [ "help" ]: if not cnf.has_key("Find-Null-Maintainers::Options::%s" % (i)): cnf["Find-Null-Maintainers::Options::%s" % (i)] = "" apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Find-Null-Maintainers::Options") if Options["Help"]: usage() if not cnf.has_key('Import-LDAP-Fingerprints::LDAPServer'): fubar("Import-LDAP-Fingerprints::LDAPServer not configured") if not cnf.has_key('Import-LDAP-Fingerprints::LDAPDn'): fubar("Import-LDAP-Fingerprints::LDAPDn not configured") session = DBConn().session() print "Getting info from the LDAP server..." LDAPDn = cnf["Import-LDAP-Fingerprints::LDAPDn"] LDAPServer = cnf["Import-LDAP-Fingerprints::LDAPServer"] l = ldap.open(LDAPServer) l.simple_bind_s("","") Attrs = l.search_s(LDAPDn, ldap.SCOPE_ONELEVEL, "(&(keyfingerprint=*)(gidnumber=%s))" % (cnf["Import-Users-From-Passwd::ValidGID"]), ["uid", "cn", "mn", "sn", "createTimestamp"]) db_uid = {} db_unstable_uid = {} print "Getting UID info for entire archive..." q = session.execute("SELECT DISTINCT u.uid FROM uid u, fingerprint f WHERE f.uid = u.id") for i in q.fetchall(): db_uid[i[0]] = "" print "Getting UID info for unstable..." q = session.execute(""" SELECT DISTINCT u.uid FROM suite su, src_associations sa, source s, fingerprint f, uid u WHERE f.uid = u.id AND sa.source = s.id AND sa.suite = su.id AND su.suite_name = 'unstable' AND s.sig_fpr = f.id UNION SELECT DISTINCT u.uid FROM suite su, bin_associations ba, binaries b, fingerprint f, uid u WHERE f.uid = u.id AND ba.bin = b.id AND ba.suite = su.id AND su.suite_name = 'unstable' AND b.sig_fpr = f.id""") for i in q.fetchall(): db_unstable_uid[i[0]] = "" now = time.time() for i in Attrs: entry = i[1] uid = entry["uid"][0] created = time.mktime(time.strptime(entry["createTimestamp"][0][:8], '%Y%m%d')) diff = now - created # 31536000 is 1 year in seconds, i.e. 60 * 60 * 24 * 365 if diff < 31536000 / 2: when = "Less than 6 months ago" elif diff < 31536000: when = "Less than 1 year ago" elif diff < 31536000 * 1.5: when = "Less than 18 months ago" elif diff < 31536000 * 2: when = "Less than 2 years ago" elif diff < 31536000 * 3: when = "Less than 3 years ago" else: when = "More than 3 years ago" name = " ".join([get_ldap_value(entry, "cn"), get_ldap_value(entry, "mn"), get_ldap_value(entry, "sn")]) if not db_uid.has_key(uid): print "NONE %s (%s) %s" % (uid, name, when) else: if not db_unstable_uid.has_key(uid): print "NOT_UNSTABLE %s (%s) %s" % (uid, name, when)
def main(): global suite, suite_id, source_binaries, source_versions cnf = Config() Arguments = [('h', "help", "Cruft-Report::Options::Help"), ('m', "mode", "Cruft-Report::Options::Mode", "HasArg"), ('R', "rdep-check", "Cruft-Report::Options::Rdep-Check"), ('s', "suite", "Cruft-Report::Options::Suite", "HasArg"), ('w', "wanna-build-dump", "Cruft-Report::Options::Wanna-Build-Dump", "HasArg")] for i in ["help", "Rdep-Check"]: key = "Cruft-Report::Options::%s" % i if key not in cnf: cnf[key] = "" cnf["Cruft-Report::Options::Suite"] = cnf.get("Dinstall::DefaultSuite", "unstable") if "Cruft-Report::Options::Mode" not in cnf: cnf["Cruft-Report::Options::Mode"] = "daily" if "Cruft-Report::Options::Wanna-Build-Dump" not in cnf: cnf["Cruft-Report::Options::Wanna-Build-Dump"] = "/srv/ftp-master.debian.org/scripts/nfu" apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Cruft-Report::Options") if Options["Help"]: usage() if Options["Rdep-Check"]: rdeps = True else: rdeps = False # Set up checks based on mode if Options["Mode"] == "daily": checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu", "nbs metadata" ] elif Options["Mode"] == "full": checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu", "nbs metadata", "dubious nbs", "bnb", "bms", "anais" ] elif Options["Mode"] == "bdo": checks = ["nbs", "obsolete source"] else: utils.warn( "%s is not a recognised mode - only 'full', 'daily' or 'bdo' are understood." % (Options["Mode"])) usage(1) session = DBConn().session() bin_pkgs = {} src_pkgs = {} bin2source = {} bins_in_suite = {} nbs = {} source_versions = {} anais_output = "" nfu_packages = {} suite = get_suite(Options["Suite"].lower(), session) if not suite: utils.fubar("Cannot find suite %s" % Options["Suite"].lower()) suite_id = suite.suite_id suite_name = suite.suite_name.lower() if "obsolete source" in checks: report_obsolete_source(suite_name, session) if "nbs" in checks: reportAllNBS(suite_name, suite_id, session, rdeps) if "nbs metadata" in checks: reportNBSMetadata(suite_name, suite_id, session, rdeps) if "outdated non-free" in checks: report_outdated_nonfree(suite_name, session, rdeps) bin_not_built = {} if "bnb" in checks: bins_in_suite = get_suite_binaries(suite, session) # Checks based on the Sources files components = get_component_names(session) for component in components: filename = "%s/dists/%s/%s/source/Sources" % (suite.archive.path, suite_name, component) filename = utils.find_possibly_compressed_file(filename) with apt_pkg.TagFile(filename) as Sources: while Sources.step(): source = Sources.section.find('Package') source_version = Sources.section.find('Version') architecture = Sources.section.find('Architecture') binaries = Sources.section.find('Binary') binaries_list = [i.strip() for i in binaries.split(',')] if "bnb" in checks: # Check for binaries not built on any architecture. for binary in binaries_list: if binary not in bins_in_suite: bin_not_built.setdefault(source, {}) bin_not_built[source][binary] = "" if "anais" in checks: anais_output += do_anais(architecture, binaries_list, source, session) # build indices for checking "no source" later source_index = component + '/' + source src_pkgs[source] = source_index for binary in binaries_list: bin_pkgs[binary] = source source_binaries[source] = binaries source_versions[source] = source_version # Checks based on the Packages files check_components = components[:] if suite_name != "experimental": check_components.append('main/debian-installer') for component in check_components: architectures = [ a.arch_string for a in get_suite_architectures( suite_name, skipsrc=True, skipall=True, session=session) ] for architecture in architectures: if component == 'main/debian-installer' and re.match( "kfreebsd", architecture): continue if "nfu" in checks: nfu_packages.setdefault(architecture, []) nfu_entries = parse_nfu(architecture) filename = "%s/dists/%s/%s/binary-%s/Packages" % ( suite.archive.path, suite_name, component, architecture) filename = utils.find_possibly_compressed_file(filename) with apt_pkg.TagFile(filename) as Packages: while Packages.step(): package = Packages.section.find('Package') source = Packages.section.find('Source', "") version = Packages.section.find('Version') if source == "": source = package if package in bin2source and \ apt_pkg.version_compare(version, bin2source[package]["version"]) > 0: bin2source[package]["version"] = version bin2source[package]["source"] = source else: bin2source[package] = {} bin2source[package]["version"] = version bin2source[package]["source"] = source if source.find("(") != -1: m = re_extract_src_version.match(source) source = m.group(1) version = m.group(2) if package not in bin_pkgs: nbs.setdefault(source, {}) nbs[source].setdefault(package, {}) nbs[source][package][version] = "" else: if "nfu" in checks: if package in nfu_entries and \ version != source_versions[source]: # only suggest to remove out-of-date packages nfu_packages[architecture].append( (package, version, source_versions[source])) # Distinguish dubious (version numbers match) and 'real' NBS (they don't) dubious_nbs = {} version_sort_key = functools.cmp_to_key(apt_pkg.version_compare) for source in nbs: for package in nbs[source]: latest_version = max(nbs[source][package], key=version_sort_key) source_version = source_versions.get(source, "0") if apt_pkg.version_compare(latest_version, source_version) == 0: add_nbs(dubious_nbs, source, latest_version, package, suite_id, session) if "nviu" in checks: do_newer_version('unstable', 'experimental', 'NVIU', session) if "nvit" in checks: do_newer_version('testing', 'testing-proposed-updates', 'NVIT', session) ### if Options["Mode"] == "full": print("=" * 75) print() if "nfu" in checks: do_nfu(nfu_packages) if "bnb" in checks: print("Unbuilt binary packages") print("-----------------------") print() for source in sorted(bin_not_built): binaries = sorted(bin_not_built[source]) print(" o %s: %s" % (source, ", ".join(binaries))) print() if "bms" in checks: report_multiple_source(suite) if "anais" in checks: print("Architecture Not Allowed In Source") print("----------------------------------") print(anais_output) print() if "dubious nbs" in checks: do_dubious_nbs(dubious_nbs)
def main(): global Logger cnf = Config() Arguments = [('a', "add", "Control-Overrides::Options::Add"), ('c', "component", "Control-Overrides::Options::Component", "HasArg"), ('h', "help", "Control-Overrides::Options::Help"), ('l', "list", "Control-Overrides::Options::List"), ('q', "quiet", "Control-Overrides::Options::Quiet"), ('s', "suite", "Control-Overrides::Options::Suite", "HasArg"), ('S', "set", "Control-Overrides::Options::Set"), ('C', "change", "Control-Overrides::Options::Change"), ('n', "no-action", "Control-Overrides::Options::No-Action"), ('t', "type", "Control-Overrides::Options::Type", "HasArg")] # Default arguments for i in ["add", "help", "list", "quiet", "set", "change", "no-action"]: key = "Control-Overrides::Options::%s" % i if key not in cnf: cnf[key] = "" if "Control-Overrides::Options::Component" not in cnf: cnf["Control-Overrides::Options::Component"] = "main" if "Control-Overrides::Options::Suite" not in cnf: cnf["Control-Overrides::Options::Suite"] = "unstable" if "Control-Overrides::Options::Type" not in cnf: cnf["Control-Overrides::Options::Type"] = "deb" file_list = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) if cnf["Control-Overrides::Options::Help"]: usage() session = DBConn().session() mode = None for i in ["add", "list", "set", "change"]: if cnf["Control-Overrides::Options::%s" % (i)]: if mode: utils.fubar("Can not perform more than one action at once.") mode = i # Need an action... if mode is None: utils.fubar("No action specified.") (suite, component, otype) = (cnf["Control-Overrides::Options::Suite"], cnf["Control-Overrides::Options::Component"], cnf["Control-Overrides::Options::Type"]) if mode == "list": list_overrides(suite, component, otype, session) else: if get_suite(suite).untouchable: utils.fubar("%s: suite is untouchable" % suite) action = True if cnf["Control-Overrides::Options::No-Action"]: utils.warn("In No-Action Mode") action = False Logger = daklog.Logger("control-overrides", mode) if file_list: for f in file_list: process_file(utils.open_file(f), suite, component, otype, mode, action, session) else: process_file(sys.stdin, suite, component, otype, mode, action, session) Logger.close()
def main(): global Options, Logger cnf = Config() for i in ["Help", "No-Action", "Maximum"]: key = "Clean-Suites::Options::%s" % i if key not in cnf: cnf[key] = "" Arguments = [('h', "help", "Clean-Suites::Options::Help"), ('a', 'archive', 'Clean-Suites::Options::Archive', 'HasArg'), ('n', "no-action", "Clean-Suites::Options::No-Action"), ('m', "maximum", "Clean-Suites::Options::Maximum", "HasArg")] apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Clean-Suites::Options") if cnf["Clean-Suites::Options::Maximum"] != "": try: # Only use Maximum if it's an integer max_delete = int(cnf["Clean-Suites::Options::Maximum"]) if max_delete < 1: utils.fubar("If given, Maximum must be at least 1") except ValueError as e: utils.fubar("If given, Maximum must be an integer") else: max_delete = None if Options["Help"]: usage() program = "clean-suites" if Options['No-Action']: program = "clean-suites (no action)" Logger = daklog.Logger(program, debug=Options["No-Action"]) session = DBConn().session() archives = None if 'Archive' in Options: archive_names = Options['Archive'].split(',') archives = session.query(Archive).filter( Archive.archive_name.in_(archive_names)).all() if len(archives) == 0: utils.fubar('Unknown archive.') now_date = datetime.now() set_archive_delete_dates(now_date, session) check_binaries(now_date, session) clean_binaries(now_date, session) check_sources(now_date, session) check_files(now_date, session) clean(now_date, archives, max_delete, session) clean_maintainers(now_date, session) clean_fingerprints(now_date, session) clean_byhash(now_date, session) clean_empty_directories(session) session.rollback() Logger.close()
def main(): cnf = Config() Arguments = [ ("h", "help", "Override::Options::Help"), ("c", "check", "Override::Options::Check"), ("d", "done", "Override::Options::Done", "HasArg"), ("n", "no-action", "Override::Options::No-Action"), ("s", "suite", "Override::Options::Suite", "HasArg"), ] for i in ["help", "check", "no-action"]: if not cnf.has_key("Override::Options::%s" % (i)): cnf["Override::Options::%s" % (i)] = "" if not cnf.has_key("Override::Options::Suite"): cnf["Override::Options::Suite"] = "unstable" arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Override::Options") if Options["Help"]: usage() session = DBConn().session() if not arguments: utils.fubar("package name is a required argument.") package = arguments.pop(0) suite_name = Options["Suite"] if arguments and len(arguments) > 2: utils.fubar("Too many arguments") suite = get_suite(suite_name, session) if suite is None: utils.fubar("Unknown suite '{0}'".format(suite_name)) if arguments and len(arguments) == 1: # Determine if the argument is a priority or a section... arg = arguments.pop() q = session.execute( """ SELECT ( SELECT COUNT(*) FROM section WHERE section = :arg ) AS secs, ( SELECT COUNT(*) FROM priority WHERE priority = :arg ) AS prios """, {"arg": arg}, ) r = q.fetchall() if r[0][0] == 1: arguments = (arg, ".") elif r[0][1] == 1: arguments = (".", arg) else: utils.fubar("%s is not a valid section or priority" % (arg)) # Retrieve current section/priority... oldsection, oldsourcesection, oldpriority = None, None, None for packagetype in ["source", "binary"]: eqdsc = "!=" if packagetype == "source": eqdsc = "=" q = session.execute( """ SELECT priority.priority AS prio, section.section AS sect, override_type.type AS type FROM override, priority, section, suite, override_type WHERE override.priority = priority.id AND override.type = override_type.id AND override_type.type %s 'dsc' AND override.section = section.id AND override.package = :package AND override.suite = suite.id AND suite.suite_name = :suite_name """ % (eqdsc), {"package": package, "suite_name": suite_name}, ) if q.rowcount == 0: continue if q.rowcount > 1: utils.fubar("%s is ambiguous. Matches %d packages" % (package, q.rowcount)) r = q.fetchone() if packagetype == "binary": oldsection = r[1] oldpriority = r[0] else: oldsourcesection = r[1] oldpriority = "source" if not oldpriority and not oldsourcesection: utils.fubar("Unable to find package %s" % (package)) if oldsection and oldsourcesection and oldsection != oldsourcesection: # When setting overrides, both source & binary will become the same section utils.warn("Source is in section '%s' instead of '%s'" % (oldsourcesection, oldsection)) if not oldsection: oldsection = oldsourcesection if not arguments: print "%s is in section '%s' at priority '%s'" % (package, oldsection, oldpriority) sys.exit(0) # At this point, we have a new section and priority... check they're valid... newsection, newpriority = arguments if newsection == ".": newsection = oldsection if newpriority == ".": newpriority = oldpriority s = get_section(newsection, session) if s is None: utils.fubar("Supplied section %s is invalid" % (newsection)) newsecid = s.section_id p = get_priority(newpriority, session) if p is None: utils.fubar("Supplied priority %s is invalid" % (newpriority)) newprioid = p.priority_id if newpriority == oldpriority and newsection == oldsection: print "I: Doing nothing" sys.exit(0) if oldpriority == "source" and newpriority != "source": utils.fubar("Trying to change priority of a source-only package") if Options["Check"] and newpriority != oldpriority: check_override_compliance(package, p, suite.archive.path, suite_name, cnf, session) # If we're in no-action mode if Options["No-Action"]: if newpriority != oldpriority: print "I: Would change priority from %s to %s" % (oldpriority, newpriority) if newsection != oldsection: print "I: Would change section from %s to %s" % (oldsection, newsection) if Options.has_key("Done"): print "I: Would also close bug(s): %s" % (Options["Done"]) sys.exit(0) if newpriority != oldpriority: print "I: Will change priority from %s to %s" % (oldpriority, newpriority) if newsection != oldsection: print "I: Will change section from %s to %s" % (oldsection, newsection) if not Options.has_key("Done"): pass # utils.warn("No bugs to close have been specified. Noone will know you have done this.") else: print "I: Will close bug(s): %s" % (Options["Done"]) game_over() Logger = daklog.Logger("override") dsc_otype_id = get_override_type("dsc").overridetype_id # We're already in a transaction # We're in "do it" mode, we have something to do... do it if newpriority != oldpriority: session.execute( """ UPDATE override SET priority = :newprioid WHERE package = :package AND override.type != :otypedsc AND suite = (SELECT id FROM suite WHERE suite_name = :suite_name)""", {"newprioid": newprioid, "package": package, "otypedsc": dsc_otype_id, "suite_name": suite_name}, ) Logger.log(["changed priority", package, oldpriority, newpriority]) if newsection != oldsection: q = session.execute( """ UPDATE override SET section = :newsecid WHERE package = :package AND suite = (SELECT id FROM suite WHERE suite_name = :suite_name)""", {"newsecid": newsecid, "package": package, "suite_name": suite_name}, ) Logger.log(["changed section", package, oldsection, newsection]) session.commit() if Options.has_key("Done"): if not cnf.has_key("Dinstall::BugServer"): utils.warn("Asked to send Done message but Dinstall::BugServer is not configured") Logger.close() return Subst = {} Subst["__OVERRIDE_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"] bcc = [] if cnf.find("Dinstall::Bcc") != "": bcc.append(cnf["Dinstall::Bcc"]) if bcc: Subst["__BCC__"] = "Bcc: " + ", ".join(bcc) else: Subst["__BCC__"] = "X-Filler: 42" if cnf.has_key("Dinstall::PackagesServer"): Subst["__CC__"] = "Cc: " + package + "@" + cnf["Dinstall::PackagesServer"] + "\nX-DAK: dak override" else: Subst["__CC__"] = "X-DAK: dak override" Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"] Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"] Subst["__WHOAMI__"] = utils.whoami() Subst["__SOURCE__"] = package summary = "Concerning package %s...\n" % (package) summary += "Operating on the %s suite\n" % (suite_name) if newpriority != oldpriority: summary += "Changed priority from %s to %s\n" % (oldpriority, newpriority) if newsection != oldsection: summary += "Changed section from %s to %s\n" % (oldsection, newsection) Subst["__SUMMARY__"] = summary template = os.path.join(cnf["Dir::Templates"], "override.bug-close") for bug in utils.split_args(Options["Done"]): Subst["__BUG_NUMBER__"] = bug mail_message = utils.TemplateSubst(Subst, template) utils.send_mail(mail_message) Logger.log(["closed bug", bug]) Logger.close()
def main(): global Options cnf = Config() Arguments = [ ('h', "help", "Rm::Options::Help"), ('A', 'no-arch-all-rdeps', 'Rm::Options::NoArchAllRdeps'), ('a', "architecture", "Rm::Options::Architecture", "HasArg"), ('b', "binary", "Rm::Options::Binary"), ('B', "binary-only", "Rm::Options::Binary-Only"), ('c', "component", "Rm::Options::Component", "HasArg"), ('C', "carbon-copy", "Rm::Options::Carbon-Copy", "HasArg"), # Bugs to Cc ('d', "done", "Rm::Options::Done", "HasArg"), # Bugs fixed ('D', "do-close", "Rm::Options::Do-Close"), ('R', "rdep-check", "Rm::Options::Rdep-Check"), ( 'm', "reason", "Rm::Options::Reason", "HasArg" ), # Hysterical raisins; -m is old-dinstall option for rejection reason ('n', "no-action", "Rm::Options::No-Action"), ('p', "partial", "Rm::Options::Partial"), ('s', "suite", "Rm::Options::Suite", "HasArg"), ('S', "source-only", "Rm::Options::Source-Only"), ] for i in [ 'NoArchAllRdeps', "architecture", "binary", "binary-only", "carbon-copy", "component", "done", "help", "no-action", "partial", "rdep-check", "reason", "source-only", "Do-Close" ]: key = "Rm::Options::%s" % (i) if key not in cnf: cnf[key] = "" if "Rm::Options::Suite" not in cnf: cnf["Rm::Options::Suite"] = "unstable" arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Rm::Options") if Options["Help"]: usage() session = DBConn().session() # Sanity check options if not arguments: utils.fubar("need at least one package name as an argument.") if Options["Architecture"] and Options["Source-Only"]: utils.fubar( "can't use -a/--architecture and -S/--source-only options simultaneously." ) if ((Options["Binary"] and Options["Source-Only"]) or (Options["Binary"] and Options["Binary-Only"]) or (Options["Binary-Only"] and Options["Source-Only"])): utils.fubar( "Only one of -b/--binary, -B/--binary-only and -S/--source-only can be used." ) if "Carbon-Copy" not in Options and "Done" not in Options: utils.fubar( "can't use -C/--carbon-copy without also using -d/--done option.") if Options["Architecture"] and not Options["Partial"]: utils.warn("-a/--architecture implies -p/--partial.") Options["Partial"] = "true" if Options["Do-Close"] and not Options["Done"]: utils.fubar("No.") if (Options["Do-Close"] and (Options["Binary"] or Options["Binary-Only"] or Options["Source-Only"])): utils.fubar("No.") # Force the admin to tell someone if we're not doing a 'dak # cruft-report' inspired removal (or closing a bug, which counts # as telling someone). if not Options["No-Action"] and not Options["Carbon-Copy"] \ and not Options["Done"] and Options["Reason"].find("[auto-cruft]") == -1: utils.fubar( "Need a -C/--carbon-copy if not closing a bug and not doing a cruft removal." ) if Options["Binary"]: field = "b.package" else: field = "s.source" con_packages = "AND %s IN (%s)" % (field, ", ".join( [repr(i) for i in arguments])) (con_suites, con_architectures, con_components, check_source) = \ utils.parse_args(Options) # Additional suite checks suite_ids_list = [] whitelists = [] suites = utils.split_args(Options["Suite"]) suites_list = utils.join_with_commas_and(suites) if not Options["No-Action"]: for suite in suites: s = get_suite(suite, session=session) if s is not None: suite_ids_list.append(s.suite_id) whitelists.append(s.mail_whitelist) if suite in ("oldstable", "stable"): print( "**WARNING** About to remove from the (old)stable suite!") print( "This should only be done just prior to a (point) release and not at" ) print("any other time.") game_over() elif suite == "testing": print("**WARNING About to remove from the testing suite!") print( "There's no need to do this normally as removals from unstable will" ) print("propogate to testing automagically.") game_over() # Additional architecture checks if Options["Architecture"] and check_source: utils.warn("'source' in -a/--argument makes no sense and is ignored.") # Don't do dependency checks on multiple suites if Options["Rdep-Check"] and len(suites) > 1: utils.fubar( "Reverse dependency check on multiple suites is not implemented.") to_remove = [] maintainers = {} # We have 3 modes of package selection: binary, source-only, binary-only # and source+binary. # XXX: TODO: This all needs converting to use placeholders or the object # API. It's an SQL injection dream at the moment if Options["Binary"]: # Removal by binary package name q = session.execute(""" SELECT b.package, b.version, a.arch_string, b.id, b.maintainer, s.source FROM binaries b JOIN source s ON s.id = b.source JOIN bin_associations ba ON ba.bin = b.id JOIN architecture a ON a.id = b.architecture JOIN suite su ON su.id = ba.suite JOIN files f ON f.id = b.file JOIN files_archive_map af ON af.file_id = f.id AND af.archive_id = su.archive_id JOIN component c ON c.id = af.component_id WHERE TRUE %s %s %s %s """ % (con_packages, con_suites, con_components, con_architectures)) to_remove.extend(q) else: # Source-only if not Options["Binary-Only"]: q = session.execute(""" SELECT s.source, s.version, 'source', s.id, s.maintainer, s.source FROM source s JOIN src_associations sa ON sa.source = s.id JOIN suite su ON su.id = sa.suite JOIN archive ON archive.id = su.archive_id JOIN files f ON f.id = s.file JOIN files_archive_map af ON af.file_id = f.id AND af.archive_id = su.archive_id JOIN component c ON c.id = af.component_id WHERE TRUE %s %s %s """ % (con_packages, con_suites, con_components)) to_remove.extend(q) if not Options["Source-Only"]: # Source + Binary q = session.execute( """ SELECT b.package, b.version, a.arch_string, b.id, b.maintainer, s.source FROM binaries b JOIN bin_associations ba ON b.id = ba.bin JOIN architecture a ON b.architecture = a.id JOIN suite su ON ba.suite = su.id JOIN archive ON archive.id = su.archive_id JOIN files_archive_map af ON b.file = af.file_id AND af.archive_id = archive.id JOIN component c ON af.component_id = c.id JOIN source s ON b.source = s.id JOIN src_associations sa ON s.id = sa.source AND sa.suite = su.id WHERE TRUE %s %s %s %s""" % (con_packages, con_suites, con_components, con_architectures)) to_remove.extend(q) if not to_remove: print("Nothing to do.") sys.exit(0) # Process -C/--carbon-copy # # Accept 3 types of arguments (space separated): # 1) a number - assumed to be a bug number, i.e. [email protected] # 2) the keyword 'package' - cc's [email protected] for every argument # 3) contains a '@' - assumed to be an email address, used unmodified # carbon_copy = [] for copy_to in utils.split_args(Options.get("Carbon-Copy")): if copy_to.isdigit(): if "Dinstall::BugServer" in cnf: carbon_copy.append(copy_to + "@" + cnf["Dinstall::BugServer"]) else: utils.fubar( "Asked to send mail to #%s in BTS but Dinstall::BugServer is not configured" % copy_to) elif copy_to == 'package': for package in set([s[5] for s in to_remove]): if "Dinstall::PackagesServer" in cnf: carbon_copy.append(package + "@" + cnf["Dinstall::PackagesServer"]) elif '@' in copy_to: carbon_copy.append(copy_to) else: utils.fubar( "Invalid -C/--carbon-copy argument '%s'; not a bug number, 'package' or email address." % (copy_to)) # If we don't have a reason; spawn an editor so the user can add one # Write the rejection email out as the <foo>.reason file if not Options["Reason"] and not Options["No-Action"]: (fd, temp_filename) = utils.temp_filename() editor = os.environ.get("EDITOR", "vi") result = os.system("%s %s" % (editor, temp_filename)) if result != 0: utils.fubar("vi invocation failed for `%s'!" % (temp_filename), result) temp_file = utils.open_file(temp_filename) for line in temp_file.readlines(): Options["Reason"] += line temp_file.close() os.unlink(temp_filename) # Generate the summary of what's to be removed d = {} for i in to_remove: package = i[0] version = i[1] architecture = i[2] maintainer = i[4] maintainers[maintainer] = "" if package not in d: d[package] = {} if version not in d[package]: d[package][version] = [] if architecture not in d[package][version]: d[package][version].append(architecture) maintainer_list = [] for maintainer_id in maintainers.keys(): maintainer_list.append(get_maintainer(maintainer_id).name) summary = "" removals = d.keys() removals.sort() for package in removals: versions = d[package].keys() versions.sort(key=functools.cmp_to_key(apt_pkg.version_compare)) for version in versions: d[package][version].sort(key=utils.ArchKey) summary += "%10s | %10s | %s\n" % (package, version, ", ".join( d[package][version])) print("Will remove the following packages from %s:" % (suites_list)) print() print(summary) print("Maintainer: %s" % ", ".join(maintainer_list)) if Options["Done"]: print("Will also close bugs: " + Options["Done"]) if carbon_copy: print("Will also send CCs to: " + ", ".join(carbon_copy)) if Options["Do-Close"]: print("Will also close associated bug reports.") print() print("------------------- Reason -------------------") print(Options["Reason"]) print("----------------------------------------------") print() if Options["Rdep-Check"]: arches = utils.split_args(Options["Architecture"]) include_arch_all = Options['NoArchAllRdeps'] == '' reverse_depends_check(removals, suites[0], arches, session, include_arch_all=include_arch_all) # If -n/--no-action, drop out here if Options["No-Action"]: sys.exit(0) print("Going to remove the packages now.") game_over() # Do the actual deletion print("Deleting...", end=' ') sys.stdout.flush() try: bugs = utils.split_args(Options["Done"]) remove(session, Options["Reason"], suites, to_remove, partial=Options["Partial"], components=utils.split_args(Options["Component"]), done_bugs=bugs, carbon_copy=carbon_copy, close_related_bugs=Options["Do-Close"]) except ValueError as ex: utils.fubar(ex.message) else: print("done.")
def main(): global Logger cnf = Config() Arguments = [('a', "add", "Control-Suite::Options::Add", "HasArg"), ('b', "britney", "Control-Suite::Options::Britney"), ('f', 'force', 'Control-Suite::Options::Force'), ('h', "help", "Control-Suite::Options::Help"), ('l', "list", "Control-Suite::Options::List", "HasArg"), ('r', "remove", "Control-Suite::Options::Remove", "HasArg"), ('s', "set", "Control-Suite::Options::Set", "HasArg")] for i in ["add", "britney", "help", "list", "remove", "set", "version"]: key = "Control-Suite::Options::%s" % i if key not in cnf: cnf[key] = "" try: file_list = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) except SystemError as e: print("%s\n" % e) usage(1) Options = cnf.subtree("Control-Suite::Options") if Options["Help"]: usage() force = "Force" in Options and Options["Force"] action = None for i in ("add", "list", "remove", "set"): if cnf["Control-Suite::Options::%s" % (i)] != "": suite_name = cnf["Control-Suite::Options::%s" % (i)] if action: utils.fubar("Can only perform one action at a time.") action = i # Need an action... if action is None: utils.fubar("No action specified.") britney = False if action == "set" and cnf["Control-Suite::Options::Britney"]: britney = True if action == "list": session = DBConn().session() suite = session.query(Suite).filter_by(suite_name=suite_name).one() get_list(suite, session) else: Logger = daklog.Logger("control-suite") with ArchiveTransaction() as transaction: session = transaction.session suite = session.query(Suite).filter_by(suite_name=suite_name).one() if action == "set" and not suite.allowcsset: if force: utils.warn("Would not normally allow setting suite {0} (allowcsset is FALSE), but --force used".format(suite_name)) else: utils.fubar("Will not reset suite {0} due to its database configuration (allowcsset is FALSE)".format(suite_name)) if file_list: for f in file_list: process_file(utils.open_file(f), suite, action, transaction, britney, force) else: process_file(sys.stdin, suite, action, transaction, britney, force) Logger.close()
def clean(now_date, delete_date, max_delete, session): cnf = Config() count = 0 size = 0 print "Cleaning out packages..." morguedir = cnf.get("Dir::Morgue", os.path.join("Dir::Pool", 'morgue')) morguesubdir = cnf.get("Clean-Suites::MorgueSubDir", 'pool') # Build directory as morguedir/morguesubdir/year/month/day dest = os.path.join(morguedir, morguesubdir, str(now_date.year), '%.2d' % now_date.month, '%.2d' % now_date.day) if not Options["No-Action"] and not os.path.exists(dest): os.makedirs(dest) # Delete from source print "Deleting from source table... " q = session.execute(""" SELECT s.id, f.filename FROM source s, files f WHERE f.last_used <= :deletedate AND s.file = f.id AND s.id NOT IN (SELECT src_id FROM extra_src_references)""", {'deletedate': delete_date}) for s in q.fetchall(): Logger.log(["delete source", s[1], s[0]]) if not Options["No-Action"]: session.execute("DELETE FROM dsc_files WHERE source = :s_id", {"s_id":s[0]}) session.execute("DELETE FROM source WHERE id = :s_id", {"s_id":s[0]}) if not Options["No-Action"]: session.commit() # Delete files from the pool old_files = session.query(PoolFile).filter(PoolFile.last_used <= delete_date) if max_delete is not None: old_files = old_files.limit(max_delete) print "Limiting removals to %d" % max_delete for pf in old_files: filename = os.path.join(pf.location.path, pf.filename) if not os.path.exists(filename): utils.warn("can not find '%s'." % (filename)) continue Logger.log(["delete pool file", filename]) if os.path.isfile(filename): if os.path.islink(filename): count += 1 Logger.log(["delete symlink", filename]) if not Options["No-Action"]: os.unlink(filename) else: size += os.stat(filename)[stat.ST_SIZE] count += 1 dest_filename = dest + '/' + os.path.basename(filename) # If the destination file exists; try to find another filename to use if os.path.exists(dest_filename): dest_filename = utils.find_next_free(dest_filename) Logger.log(["move to morgue", filename, dest_filename]) if not Options["No-Action"]: utils.move(filename, dest_filename) if not Options["No-Action"]: session.delete(pf) session.commit() else: utils.fubar("%s is neither symlink nor file?!" % (filename)) if count > 0: Logger.log(["total", count, utils.size_type(size)]) print "Cleaned %d files, %s." % (count, utils.size_type(size))
def main(): global Logger, Options, sections, priorities cnf = Config() Arguments = [('h', "help", "Check-Overrides::Options::Help"), ('n', "no-action", "Check-Overrides::Options::No-Action")] for i in ["help", "no-action"]: key = "Check-Overrides::Options::%s" % i if key not in cnf: cnf[key] = "" apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Check-Overrides::Options") if Options["Help"]: usage() session = DBConn().session() # init sections, priorities: # We need forward and reverse sections = get_sections(session) for name, entry in list(sections.items()): sections[entry] = name priorities = get_priorities(session) for name, entry in list(priorities.items()): priorities[entry] = name if not Options["No-Action"]: Logger = daklog.Logger("check-overrides") else: Logger = daklog.Logger("check-overrides", 1) for suite in session.query(Suite).filter(Suite.overrideprocess == True): # noqa:E712 originosuite = None originremark = '' if suite.overrideorigin is not None: originosuite = get_suite(suite.overrideorigin, session) if originosuite is None: utils.fubar("%s has an override origin suite of %s but it doesn't exist!" % (suite.suite_name, suite.overrideorigin)) originosuite = originosuite.suite_name originremark = " taking missing from %s" % originosuite print("Processing %s%s..." % (suite.suite_name, originremark)) # Get a list of all suites that use the override file of 'suite.suite_name' as # well as the suite ocodename = suite.codename suiteids = [x.suite_id for x in session.query(Suite).filter(Suite.overridecodename == ocodename).all()] if suite.suite_id not in suiteids: suiteids.append(suite.suite_id) if len(suiteids) < 1: utils.fubar("Couldn't find id's of all suites: %s" % suiteids) for component in session.query(Component).all(): # It is crucial for the dsc override creation based on binary # overrides that 'dsc' goes first component_name = component.component_name otypes = ['dsc'] for ot in session.query(OverrideType): if ot.overridetype == 'dsc': continue otypes.append(ot.overridetype) for otype in otypes: print("Processing %s [%s - %s]" % (suite.suite_name, component_name, otype)) sys.stdout.flush() process(suite.suite_name, suiteids, originosuite, component_name, otype, session) Logger.close()
def auto_decruft_suite(suite_name, suite_id, session, dryrun, debug): """Run the auto-decrufter on a given suite @type suite_name: string @param suite_name: The name of the suite to remove from @type suite_id: int @param suite_id: The id of the suite denoted by suite_name @type session: SQLA Session @param session: The database session in use @type dryrun: bool @param dryrun: If True, just print the actions rather than actually doing them @type debug: bool @param debug: If True, print some extra information """ all_architectures = [a.arch_string for a in get_suite_architectures(suite_name)] pkg_arch2groups = defaultdict(set) group_order = [] groups = {} full_removal_request = [] group_generator = chain( compute_sourceless_groups(suite_id, session), compute_nbs_groups(suite_id, suite_name, session) ) for group in group_generator: group_name = group["name"] pkgs = group["packages"] affected_archs = group["architectures"] # If we remove an arch:all package, then the breakage can occur on any # of the architectures. if "all" in affected_archs: affected_archs = all_architectures for pkg_arch in product(pkgs, affected_archs): pkg_arch2groups[pkg_arch].add(group_name) if group_name not in groups: groups[group_name] = group group_order.append(group_name) else: # This case usually happens when versions differ between architectures... if debug: print("N: Merging group %s" % (group_name)) groups[group_name] = merge_group(groups[group_name], group) for group_name in group_order: removal_request = groups[group_name]["removal_request"] full_removal_request.extend(removal_request.iteritems()) if not groups: if debug: print("N: Found no candidates") return if debug: print("N: Considering to remove the following packages:") for group_name in sorted(groups): group_info = groups[group_name] pkgs = group_info["packages"] archs = group_info["architectures"] print("N: * %s: %s [%s]" % (group_name, ", ".join(pkgs), " ".join(archs))) if debug: print("N: Compiling ReverseDependencyChecker (RDC) - please hold ...") rdc = ReverseDependencyChecker(session, suite_name) if debug: print("N: Computing initial breakage...") breakage = rdc.check_reverse_depends(full_removal_request) while breakage: by_breakers = [(len(breakage[x]), x, breakage[x]) for x in breakage] by_breakers.sort(reverse=True) if debug: print("N: - Removal would break %s (package, architecture)-pairs" % (len(breakage))) print("N: - full breakage:") for _, breaker, broken in by_breakers: bname = "%s/%s" % breaker broken_str = ", ".join("%s/%s" % b for b in sorted(broken)) print("N: * %s => %s" % (bname, broken_str)) averted_breakage = set() for _, package_arch, breakage in by_breakers: if breakage <= averted_breakage: # We already avoided this break continue guilty_groups = pkg_arch2groups[package_arch] if not guilty_groups: utils.fubar("Cannot figure what group provided %s" % str(package_arch)) if debug: # Only output it, if it truly a new group being discarded # - a group can reach this part multiple times, if it breaks things on # more than one architecture. This being rather common in fact. already_discard = True if any(group_name for group_name in guilty_groups if group_name in groups): already_discard = False if not already_discard: avoided = sorted(breakage - averted_breakage) print("N: - skipping removal of %s (breakage: %s)" % (", ".join(sorted(guilty_groups)), str(avoided))) averted_breakage |= breakage for group_name in guilty_groups: if group_name in groups: del groups[group_name] if not groups: if debug: print("N: Nothing left to remove") return if debug: print("N: Now considering to remove: %s" % str(", ".join(sorted(groups.iterkeys())))) # Rebuild the removal request with the remaining groups and off # we go to (not) break the world once more time full_removal_request = [] for group_info in groups.itervalues(): full_removal_request.extend(group_info["removal_request"].iteritems()) breakage = rdc.check_reverse_depends(full_removal_request) if debug: print("N: Removal looks good") if dryrun: print("Would remove the equivalent of:") for group_name in group_order: if group_name not in groups: continue group_info = groups[group_name] pkgs = group_info["packages"] archs = group_info["architectures"] message = group_info["message"] # Embed the -R just in case someone wants to run it manually later print(' dak rm -m "{message}" -s {suite} -a {architectures} -p -R -b {packages}'.format( message=message, suite=suite_name, architectures=",".join(archs), packages=" ".join(pkgs), )) print() print("Note: The removals may be interdependent. A non-breaking result may require the execution of all") print("of the removals") else: remove_groups(groups.itervalues(), suite_id, suite_name, session)
def main(): global Cnf keyrings = None Cnf = utils.get_conf() Arguments = [('h', "help", "Add-User::Options::Help"), ('k', "key", "Add-User::Options::Key", "HasArg"), ('u', "user", "Add-User::Options::User", "HasArg"), ] for i in ["help"]: key = "Add-User::Options::%s" % i if key not in Cnf: Cnf[key] = "" apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) Options = Cnf.subtree("Add-User::Options") if Options["help"]: usage() session = DBConn().session() if not keyrings: keyrings = get_active_keyring_paths() cmd = ["gpg", "--with-colons", "--no-secmem-warning", "--no-auto-check-trustdb", "--with-fingerprint", "--no-default-keyring"] cmd.extend(utils.gpg_keyring_args(keyrings).split()) cmd.extend(["--list-key", "--", Cnf["Add-User::Options::Key"]]) output = subprocess.check_output(cmd).rstrip() m = re_gpg_fingerprint_colon.search(output) if not m: print(output) utils.fubar("0x%s: (1) No fingerprint found in gpg output but it returned 0?\n%s" % (Cnf["Add-User::Options::Key"], utils.prefix_multi_line_string(output, " [GPG output:] "))) primary_key = m.group(1) primary_key = primary_key.replace(" ", "") uid = "" if "Add-User::Options::User" in Cnf and Cnf["Add-User::Options::User"]: uid = Cnf["Add-User::Options::User"] name = Cnf["Add-User::Options::User"] else: u = re_user_address.search(output) if not u: print(output) utils.fubar("0x%s: (2) No userid found in gpg output but it returned 0?\n%s" % (Cnf["Add-User::Options::Key"], utils.prefix_multi_line_string(output, " [GPG output:] "))) uid = u.group(1) n = re_user_name.search(output) name = n.group(1) # Look for all email addresses on the key. emails = [] for line in output.split('\n'): e = re_user_mails.search(line) if not e: continue emails.append(e.group(2)) print("0x%s -> %s <%s> -> %s -> %s" % (Cnf["Add-User::Options::Key"], name, emails[0], uid, primary_key)) prompt = "Add user %s with above data (y/N) ? " % (uid) yn = utils.our_raw_input(prompt).lower() if yn == "y": # Create an account for the user? summary = "" # Now add user to the database. # Note that we provide a session, so we're responsible for committing uidobj = get_or_set_uid(uid, session=session) uid_id = uidobj.uid_id session.commit() # Lets add user to the email-whitelist file if its configured. if "Dinstall::MailWhiteList" in Cnf and Cnf["Dinstall::MailWhiteList"] != "": f = utils.open_file(Cnf["Dinstall::MailWhiteList"], "a") for mail in emails: f.write(mail + '\n') f.close() print("Added:\nUid:\t %s (ID: %s)\nMaint:\t %s\nFP:\t %s" % (uid, uid_id, name, primary_key)) # Should we send mail to the newly added user? if Cnf.find_b("Add-User::SendEmail"): mail = name + "<" + emails[0] + ">" Subst = {} Subst["__NEW_MAINTAINER__"] = mail Subst["__UID__"] = uid Subst["__KEYID__"] = Cnf["Add-User::Options::Key"] Subst["__PRIMARY_KEY__"] = primary_key Subst["__FROM_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"] Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"] Subst["__HOSTNAME__"] = Cnf["Dinstall::MyHost"] Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"] Subst["__SUMMARY__"] = summary new_add_message = utils.TemplateSubst(Subst, Cnf["Dir::Templates"] + "/add-user.added") utils.send_mail(new_add_message) else: uid = None
def clean(now_date, archives, max_delete, session): cnf = Config() count = 0 size = 0 Logger.log(["Cleaning out packages..."]) morguedir = cnf.get("Dir::Morgue", os.path.join("Dir::Pool", 'morgue')) morguesubdir = cnf.get("Clean-Suites::MorgueSubDir", 'pool') # Build directory as morguedir/morguesubdir/year/month/day dest = os.path.join(morguedir, morguesubdir, str(now_date.year), '%.2d' % now_date.month, '%.2d' % now_date.day) if not Options["No-Action"] and not os.path.exists(dest): os.makedirs(dest) # Delete from source Logger.log(["Deleting from source table..."]) q = session.execute(""" WITH deleted_sources AS ( DELETE FROM source USING files f WHERE source.file = f.id AND NOT EXISTS (SELECT 1 FROM files_archive_map af JOIN archive_delete_date ad ON af.archive_id = ad.archive_id WHERE af.file_id = source.file AND (af.last_used IS NULL OR af.last_used > ad.delete_date)) RETURNING source.id AS id, f.filename AS filename ), deleted_dsc_files AS ( DELETE FROM dsc_files df WHERE df.source IN (SELECT id FROM deleted_sources) RETURNING df.file AS file_id ), now_unused_source_files AS ( UPDATE files_archive_map af SET last_used = '1977-03-13 13:37:42' -- Kill it now. We waited long enough before removing the .dsc. WHERE af.file_id IN (SELECT file_id FROM deleted_dsc_files) AND NOT EXISTS (SELECT 1 FROM dsc_files df WHERE df.file = af.file_id) ) SELECT filename FROM deleted_sources""") for s in q: Logger.log(["delete source", s[0]]) if not Options["No-Action"]: session.commit() # Delete files from the pool old_files = session.query(ArchiveFile).filter( sql.text( 'files_archive_map.last_used <= (SELECT delete_date FROM archive_delete_date ad WHERE ad.archive_id = files_archive_map.archive_id)' )).join(Archive) if max_delete is not None: old_files = old_files.limit(max_delete) Logger.log(["Limiting removals to %d" % max_delete]) if archives is not None: archive_ids = [a.archive_id for a in archives] old_files = old_files.filter(ArchiveFile.archive_id.in_(archive_ids)) for af in old_files: filename = af.path try: st = os.lstat(filename) except FileNotFoundError: Logger.log(["database referred to non-existing file", filename]) session.delete(af) continue Logger.log(["delete archive file", filename]) if stat.S_ISLNK(st.st_mode): count += 1 Logger.log(["delete symlink", filename]) if not Options["No-Action"]: os.unlink(filename) session.delete(af) elif stat.S_ISREG(st.st_mode): size += st.st_size count += 1 dest_filename = dest + '/' + os.path.basename(filename) # If the destination file exists; try to find another filename to use if os.path.lexists(dest_filename): dest_filename = utils.find_next_free(dest_filename) if not Options["No-Action"]: if af.archive.use_morgue: Logger.log(["move to morgue", filename, dest_filename]) utils.move(filename, dest_filename) else: Logger.log(["removed file", filename]) os.unlink(filename) session.delete(af) else: utils.fubar("%s is neither symlink nor file?!" % (filename)) if count > 0: Logger.log(["total", count, utils.size_type(size)]) # Delete entries in files no longer referenced by any archive query = """ DELETE FROM files f WHERE NOT EXISTS (SELECT 1 FROM files_archive_map af WHERE af.file_id = f.id) """ session.execute(query) if not Options["No-Action"]: session.commit()
def main (): global Options cnf = Config() Arguments = [('h',"help","Rm::Options::Help"), ('A','no-arch-all-rdeps','Rm::Options::NoArchAllRdeps'), ('a',"architecture","Rm::Options::Architecture", "HasArg"), ('b',"binary", "Rm::Options::Binary"), ('B',"binary-only", "Rm::Options::Binary-Only"), ('c',"component", "Rm::Options::Component", "HasArg"), ('C',"carbon-copy", "Rm::Options::Carbon-Copy", "HasArg"), # Bugs to Cc ('d',"done","Rm::Options::Done", "HasArg"), # Bugs fixed ('D',"do-close","Rm::Options::Do-Close"), ('R',"rdep-check", "Rm::Options::Rdep-Check"), ('m',"reason", "Rm::Options::Reason", "HasArg"), # Hysterical raisins; -m is old-dinstall option for rejection reason ('n',"no-action","Rm::Options::No-Action"), ('p',"partial", "Rm::Options::Partial"), ('s',"suite","Rm::Options::Suite", "HasArg"), ('S',"source-only", "Rm::Options::Source-Only"), ] for i in [ 'NoArchAllRdeps', "architecture", "binary", "binary-only", "carbon-copy", "component", "done", "help", "no-action", "partial", "rdep-check", "reason", "source-only", "Do-Close" ]: if not cnf.has_key("Rm::Options::%s" % (i)): cnf["Rm::Options::%s" % (i)] = "" if not cnf.has_key("Rm::Options::Suite"): cnf["Rm::Options::Suite"] = "unstable" arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Rm::Options") if Options["Help"]: usage() session = DBConn().session() # Sanity check options if not arguments: utils.fubar("need at least one package name as an argument.") if Options["Architecture"] and Options["Source-Only"]: utils.fubar("can't use -a/--architecture and -S/--source-only options simultaneously.") if ((Options["Binary"] and Options["Source-Only"]) or (Options["Binary"] and Options["Binary-Only"]) or (Options["Binary-Only"] and Options["Source-Only"])): utils.fubar("Only one of -b/--binary, -B/--binary-only and -S/--source-only can be used.") if Options.has_key("Carbon-Copy") and not Options.has_key("Done"): utils.fubar("can't use -C/--carbon-copy without also using -d/--done option.") if Options["Architecture"] and not Options["Partial"]: utils.warn("-a/--architecture implies -p/--partial.") Options["Partial"] = "true" if Options["Do-Close"] and not Options["Done"]: utils.fubar("No.") if (Options["Do-Close"] and (Options["Binary"] or Options["Binary-Only"] or Options["Source-Only"])): utils.fubar("No.") # Force the admin to tell someone if we're not doing a 'dak # cruft-report' inspired removal (or closing a bug, which counts # as telling someone). if not Options["No-Action"] and not Options["Carbon-Copy"] \ and not Options["Done"] and Options["Reason"].find("[auto-cruft]") == -1: utils.fubar("Need a -C/--carbon-copy if not closing a bug and not doing a cruft removal.") # Process -C/--carbon-copy # # Accept 3 types of arguments (space separated): # 1) a number - assumed to be a bug number, i.e. [email protected] # 2) the keyword 'package' - cc's [email protected] for every argument # 3) contains a '@' - assumed to be an email address, used unmodified # carbon_copy = [] for copy_to in utils.split_args(Options.get("Carbon-Copy")): if copy_to.isdigit(): if cnf.has_key("Dinstall::BugServer"): carbon_copy.append(copy_to + "@" + cnf["Dinstall::BugServer"]) else: utils.fubar("Asked to send mail to #%s in BTS but Dinstall::BugServer is not configured" % copy_to) elif copy_to == 'package': for package in arguments: if cnf.has_key("Dinstall::PackagesServer"): carbon_copy.append(package + "@" + cnf["Dinstall::PackagesServer"]) if cnf.has_key("Dinstall::TrackingServer"): carbon_copy.append(package + "@" + cnf["Dinstall::TrackingServer"]) elif '@' in copy_to: carbon_copy.append(copy_to) else: utils.fubar("Invalid -C/--carbon-copy argument '%s'; not a bug number, 'package' or email address." % (copy_to)) if Options["Binary"]: field = "b.package" else: field = "s.source" con_packages = "AND %s IN (%s)" % (field, ", ".join([ repr(i) for i in arguments ])) (con_suites, con_architectures, con_components, check_source) = \ utils.parse_args(Options) # Additional suite checks suite_ids_list = [] whitelists = [] suites = utils.split_args(Options["Suite"]) suites_list = utils.join_with_commas_and(suites) if not Options["No-Action"]: for suite in suites: s = get_suite(suite, session=session) if s is not None: suite_ids_list.append(s.suite_id) whitelists.append(s.mail_whitelist) if suite in ("oldstable", "stable"): print "**WARNING** About to remove from the (old)stable suite!" print "This should only be done just prior to a (point) release and not at" print "any other time." game_over() elif suite == "testing": print "**WARNING About to remove from the testing suite!" print "There's no need to do this normally as removals from unstable will" print "propogate to testing automagically." game_over() # Additional architecture checks if Options["Architecture"] and check_source: utils.warn("'source' in -a/--argument makes no sense and is ignored.") # Don't do dependency checks on multiple suites if Options["Rdep-Check"] and len(suites) > 1: utils.fubar("Reverse dependency check on multiple suites is not implemented.") to_remove = [] maintainers = {} # We have 3 modes of package selection: binary, source-only, binary-only # and source+binary. # XXX: TODO: This all needs converting to use placeholders or the object # API. It's an SQL injection dream at the moment if Options["Binary"]: # Removal by binary package name q = session.execute("SELECT b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b, bin_associations ba, architecture a, suite su, files f, files_archive_map af, component c WHERE ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id AND b.file = f.id AND af.file_id = f.id AND af.archive_id = su.archive_id AND af.component_id = c.id %s %s %s %s" % (con_packages, con_suites, con_components, con_architectures)) to_remove.extend(q) else: # Source-only if not Options["Binary-Only"]: q = session.execute("SELECT s.source, s.version, 'source', s.id, s.maintainer FROM source s, src_associations sa, suite su, archive, files f, files_archive_map af, component c WHERE sa.source = s.id AND sa.suite = su.id AND archive.id = su.archive_id AND s.file = f.id AND af.file_id = f.id AND af.archive_id = su.archive_id AND af.component_id = c.id %s %s %s" % (con_packages, con_suites, con_components)) to_remove.extend(q) if not Options["Source-Only"]: # Source + Binary q = session.execute(""" SELECT b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b JOIN bin_associations ba ON b.id = ba.bin JOIN architecture a ON b.architecture = a.id JOIN suite su ON ba.suite = su.id JOIN archive ON archive.id = su.archive_id JOIN files_archive_map af ON b.file = af.file_id AND af.archive_id = archive.id JOIN component c ON af.component_id = c.id JOIN source s ON b.source = s.id JOIN src_associations sa ON s.id = sa.source AND sa.suite = su.id WHERE TRUE %s %s %s %s""" % (con_packages, con_suites, con_components, con_architectures)) to_remove.extend(q) if not to_remove: print "Nothing to do." sys.exit(0) # If we don't have a reason; spawn an editor so the user can add one # Write the rejection email out as the <foo>.reason file if not Options["Reason"] and not Options["No-Action"]: (fd, temp_filename) = utils.temp_filename() editor = os.environ.get("EDITOR","vi") result = os.system("%s %s" % (editor, temp_filename)) if result != 0: utils.fubar ("vi invocation failed for `%s'!" % (temp_filename), result) temp_file = utils.open_file(temp_filename) for line in temp_file.readlines(): Options["Reason"] += line temp_file.close() os.unlink(temp_filename) # Generate the summary of what's to be removed d = {} for i in to_remove: package = i[0] version = i[1] architecture = i[2] maintainer = i[4] maintainers[maintainer] = "" if not d.has_key(package): d[package] = {} if not d[package].has_key(version): d[package][version] = [] if architecture not in d[package][version]: d[package][version].append(architecture) maintainer_list = [] for maintainer_id in maintainers.keys(): maintainer_list.append(get_maintainer(maintainer_id).name) summary = "" removals = d.keys() removals.sort() for package in removals: versions = d[package].keys() versions.sort(apt_pkg.version_compare) for version in versions: d[package][version].sort(utils.arch_compare_sw) summary += "%10s | %10s | %s\n" % (package, version, ", ".join(d[package][version])) print "Will remove the following packages from %s:" % (suites_list) print print summary print "Maintainer: %s" % ", ".join(maintainer_list) if Options["Done"]: print "Will also close bugs: "+Options["Done"] if carbon_copy: print "Will also send CCs to: " + ", ".join(carbon_copy) if Options["Do-Close"]: print "Will also close associated bug reports." print print "------------------- Reason -------------------" print Options["Reason"] print "----------------------------------------------" print if Options["Rdep-Check"]: arches = utils.split_args(Options["Architecture"]) include_arch_all = Options['NoArchAllRdeps'] == '' reverse_depends_check(removals, suites[0], arches, session, include_arch_all=include_arch_all) # If -n/--no-action, drop out here if Options["No-Action"]: sys.exit(0) print "Going to remove the packages now." game_over() # Do the actual deletion print "Deleting...", sys.stdout.flush() try: bugs = utils.split_args(Options["Done"]) remove(session, Options["Reason"], suites, to_remove, partial=Options["Partial"], components=utils.split_args(Options["Component"]), done_bugs=bugs, carbon_copy=carbon_copy, close_related_bugs=Options["Do-Close"] ) except ValueError as ex: utils.fubar(ex.message) else: print "done."
def main(): global Cnf keyrings = None Cnf = utils.get_conf() Arguments = [ ('h', "help", "Add-User::Options::Help"), ('k', "key", "Add-User::Options::Key", "HasArg"), ('u', "user", "Add-User::Options::User", "HasArg"), ] for i in ["help"]: key = "Add-User::Options::%s" % i if key not in Cnf: Cnf[key] = "" apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) Options = Cnf.subtree("Add-User::Options") if Options["help"]: usage() session = DBConn().session() if not keyrings: keyrings = get_active_keyring_paths() cmd = [ "gpg", "--with-colons", "--no-secmem-warning", "--no-auto-check-trustdb", "--with-fingerprint", "--no-default-keyring" ] cmd.extend(utils.gpg_keyring_args(keyrings).split()) cmd.extend(["--list-key", "--", Cnf["Add-User::Options::Key"]]) output = subprocess.check_output(cmd).rstrip() m = re_gpg_fingerprint_colon.search(output) if not m: print(output) utils.fubar( "0x%s: (1) No fingerprint found in gpg output but it returned 0?\n%s" % (Cnf["Add-User::Options::Key"], utils.prefix_multi_line_string(output, " [GPG output:] "))) primary_key = m.group(1) primary_key = primary_key.replace(" ", "") uid = "" if "Add-User::Options::User" in Cnf and Cnf["Add-User::Options::User"]: uid = Cnf["Add-User::Options::User"] name = Cnf["Add-User::Options::User"] else: u = re_user_address.search(output) if not u: print(output) utils.fubar( "0x%s: (2) No userid found in gpg output but it returned 0?\n%s" % (Cnf["Add-User::Options::Key"], utils.prefix_multi_line_string(output, " [GPG output:] "))) uid = u.group(1) n = re_user_name.search(output) name = n.group(1) # Look for all email addresses on the key. emails = [] for line in output.split('\n'): e = re_user_mails.search(line) if not e: continue emails.append(e.group(2)) print("0x%s -> %s <%s> -> %s -> %s" % (Cnf["Add-User::Options::Key"], name, emails[0], uid, primary_key)) prompt = "Add user %s with above data (y/N) ? " % (uid) yn = utils.our_raw_input(prompt).lower() if yn == "y": # Create an account for the user? summary = "" # Now add user to the database. # Note that we provide a session, so we're responsible for committing uidobj = get_or_set_uid(uid, session=session) uid_id = uidobj.uid_id session.commit() # Lets add user to the email-whitelist file if its configured. if "Dinstall::MailWhiteList" in Cnf and Cnf[ "Dinstall::MailWhiteList"] != "": f = utils.open_file(Cnf["Dinstall::MailWhiteList"], "a") for mail in emails: f.write(mail + '\n') f.close() print("Added:\nUid:\t %s (ID: %s)\nMaint:\t %s\nFP:\t %s" % (uid, uid_id, name, primary_key)) # Should we send mail to the newly added user? if Cnf.find_b("Add-User::SendEmail"): mail = name + "<" + emails[0] + ">" Subst = {} Subst["__NEW_MAINTAINER__"] = mail Subst["__UID__"] = uid Subst["__KEYID__"] = Cnf["Add-User::Options::Key"] Subst["__PRIMARY_KEY__"] = primary_key Subst["__FROM_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"] Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"] Subst["__HOSTNAME__"] = Cnf["Dinstall::MyHost"] Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"] Subst["__SUMMARY__"] = summary new_add_message = utils.TemplateSubst( Subst, Cnf["Dir::Templates"] + "/add-user.added") utils.send_mail(new_add_message) else: uid = None
def auto_decruft_suite(suite_name, suite_id, session, dryrun, debug): """Run the auto-decrufter on a given suite @type suite_name: string @param suite_name: The name of the suite to remove from @type suite_id: int @param suite_id: The id of the suite denoted by suite_name @type session: SQLA Session @param session: The database session in use @type dryrun: bool @param dryrun: If True, just print the actions rather than actually doing them @type debug: bool @param debug: If True, print some extra information """ all_architectures = [ a.arch_string for a in get_suite_architectures(suite_name) ] pkg_arch2groups = defaultdict(set) group_order = [] groups = {} full_removal_request = [] group_generator = chain(compute_sourceless_groups(suite_id, session), compute_nbs_groups(suite_id, suite_name, session)) for group in group_generator: group_name = group["name"] pkgs = group["packages"] affected_archs = group["architectures"] # If we remove an arch:all package, then the breakage can occur on any # of the architectures. if "all" in affected_archs: affected_archs = all_architectures for pkg_arch in product(pkgs, affected_archs): pkg_arch2groups[pkg_arch].add(group_name) if group_name not in groups: groups[group_name] = group group_order.append(group_name) else: # This case usually happens when versions differ between architectures... if debug: print("N: Merging group %s" % (group_name)) groups[group_name] = merge_group(groups[group_name], group) for group_name in group_order: removal_request = groups[group_name]["removal_request"] full_removal_request.extend(six.iteritems(removal_request)) if not groups: if debug: print("N: Found no candidates") return if debug: print("N: Considering to remove the following packages:") for group_name in sorted(groups): group_info = groups[group_name] pkgs = group_info["packages"] archs = group_info["architectures"] print("N: * %s: %s [%s]" % (group_name, ", ".join(pkgs), " ".join(archs))) if debug: print("N: Compiling ReverseDependencyChecker (RDC) - please hold ...") rdc = ReverseDependencyChecker(session, suite_name) if debug: print("N: Computing initial breakage...") breakage = rdc.check_reverse_depends(full_removal_request) while breakage: by_breakers = [(len(breakage[x]), x, breakage[x]) for x in breakage] by_breakers.sort(reverse=True) if debug: print("N: - Removal would break %s (package, architecture)-pairs" % (len(breakage))) print("N: - full breakage:") for _, breaker, broken in by_breakers: bname = "%s/%s" % breaker broken_str = ", ".join("%s/%s" % b for b in sorted(broken)) print("N: * %s => %s" % (bname, broken_str)) averted_breakage = set() for _, package_arch, breakage in by_breakers: if breakage <= averted_breakage: # We already avoided this break continue guilty_groups = pkg_arch2groups[package_arch] if not guilty_groups: utils.fubar("Cannot figure what group provided %s" % str(package_arch)) if debug: # Only output it, if it truly a new group being discarded # - a group can reach this part multiple times, if it breaks things on # more than one architecture. This being rather common in fact. already_discard = True if any(group_name for group_name in guilty_groups if group_name in groups): already_discard = False if not already_discard: avoided = sorted(breakage - averted_breakage) print("N: - skipping removal of %s (breakage: %s)" % (", ".join(sorted(guilty_groups)), str(avoided))) averted_breakage |= breakage for group_name in guilty_groups: if group_name in groups: del groups[group_name] if not groups: if debug: print("N: Nothing left to remove") return if debug: print("N: Now considering to remove: %s" % str(", ".join(sorted(six.iterkeys(groups))))) # Rebuild the removal request with the remaining groups and off # we go to (not) break the world once more time full_removal_request = [] for group_info in six.itervalues(groups): full_removal_request.extend( six.iteritems(group_info["removal_request"])) breakage = rdc.check_reverse_depends(full_removal_request) if debug: print("N: Removal looks good") if dryrun: print("Would remove the equivalent of:") for group_name in group_order: if group_name not in groups: continue group_info = groups[group_name] pkgs = group_info["packages"] archs = group_info["architectures"] message = group_info["message"] # Embed the -R just in case someone wants to run it manually later print( ' dak rm -m "{message}" -s {suite} -a {architectures} -p -R -b {packages}' .format( message=message, suite=suite_name, architectures=",".join(archs), packages=" ".join(pkgs), )) print() print( "Note: The removals may be interdependent. A non-breaking result may require the execution of all" ) print("of the removals") else: remove_groups(six.itervalues(groups), suite_id, suite_name, session)
def process_file(file, suite, component, otype, mode, action, session): cnf = Config() s = get_suite(suite, session=session) if s is None: utils.fubar("Suite '%s' not recognised." % (suite)) suite_id = s.suite_id c = get_component(component, session=session) if c is None: utils.fubar("Component '%s' not recognised." % (component)) component_id = c.component_id o = get_override_type(otype) if o is None: utils.fubar("Type '%s' not recognised. (Valid types are deb, udeb and dsc.)" % (otype)) type_id = o.overridetype_id # --set is done mostly internal for performance reasons; most # invocations of --set will be updates and making people wait 2-3 # minutes while 6000 select+inserts are run needlessly isn't cool. original = {} new = {} c_skipped = 0 c_added = 0 c_updated = 0 c_removed = 0 c_error = 0 q = session.execute("""SELECT o.package, o.priority, o.section, o.maintainer, p.priority, s.section FROM override o, priority p, section s WHERE o.suite = :suiteid AND o.component = :componentid AND o.type = :typeid and o.priority = p.id and o.section = s.id""", {'suiteid': suite_id, 'componentid': component_id, 'typeid': type_id}) for i in q.fetchall(): original[i[0]] = i[1:] start_time = time.time() section_cache = get_sections(session) priority_cache = get_priorities(session) # Our session is already in a transaction for line in file.readlines(): line = re_comments.sub('', line).strip() if line == "": continue maintainer_override = None if otype == "dsc": split_line = line.split(None, 2) if len(split_line) == 2: (package, section) = split_line elif len(split_line) == 3: (package, section, maintainer_override) = split_line else: utils.warn("'%s' does not break into 'package section [maintainer-override]'." % (line)) c_error += 1 continue priority = "extra" else: # binary or udeb split_line = line.split(None, 3) if len(split_line) == 3: (package, priority, section) = split_line elif len(split_line) == 4: (package, priority, section, maintainer_override) = split_line else: utils.warn("'%s' does not break into 'package priority section [maintainer-override]'." % (line)) c_error += 1 continue if section not in section_cache: utils.warn("'%s' is not a valid section. ['%s' in suite %s, component %s]." % (section, package, suite, component)) c_error += 1 continue section_id = section_cache[section] if priority not in priority_cache: utils.warn("'%s' is not a valid priority. ['%s' in suite %s, component %s]." % (priority, package, suite, component)) c_error += 1 continue priority_id = priority_cache[priority] if package in new: utils.warn("Can't insert duplicate entry for '%s'; ignoring all but the first. [suite %s, component %s]" % (package, suite, component)) c_error += 1 continue new[package] = "" if package in original: (old_priority_id, old_section_id, old_maintainer_override, old_priority, old_section) = original[package] if mode == "add" or old_priority_id == priority_id and \ old_section_id == section_id and \ old_maintainer_override == maintainer_override: # If it's unchanged or we're in 'add only' mode, ignore it c_skipped += 1 continue else: # If it's changed, delete the old one so we can # reinsert it with the new information c_updated += 1 if action: session.execute("""DELETE FROM override WHERE suite = :suite AND component = :component AND package = :package AND type = :typeid""", {'suite': suite_id, 'component': component_id, 'package': package, 'typeid': type_id}) # Log changes if old_priority_id != priority_id: Logger.log(["changed priority", package, old_priority, priority]) if old_section_id != section_id: Logger.log(["changed section", package, old_section, section]) if old_maintainer_override != maintainer_override: Logger.log(["changed maintainer override", package, old_maintainer_override, maintainer_override]) update_p = 1 elif mode == "change": # Ignore additions in 'change only' mode c_skipped += 1 continue else: c_added += 1 update_p = 0 if action: if not maintainer_override: m_o = None else: m_o = maintainer_override session.execute("""INSERT INTO override (suite, component, type, package, priority, section, maintainer) VALUES (:suiteid, :componentid, :typeid, :package, :priorityid, :sectionid, :maintainer)""", {'suiteid': suite_id, 'componentid': component_id, 'typeid': type_id, 'package': package, 'priorityid': priority_id, 'sectionid': section_id, 'maintainer': m_o}) if not update_p: Logger.log(["new override", suite, component, otype, package, priority, section, maintainer_override]) if mode == "set": # Delete any packages which were removed for package in original.keys(): if package not in new: if action: session.execute("""DELETE FROM override WHERE suite = :suiteid AND component = :componentid AND package = :package AND type = :typeid""", {'suiteid': suite_id, 'componentid': component_id, 'package': package, 'typeid': type_id}) c_removed += 1 Logger.log(["removed override", suite, component, otype, package]) if action: session.commit() if not cnf["Control-Overrides::Options::Quiet"]: print("Done in %d seconds. [Updated = %d, Added = %d, Removed = %d, Skipped = %d, Errors = %d]" % (int(time.time() - start_time), c_updated, c_added, c_removed, c_skipped, c_error)) Logger.log(["set complete", c_updated, c_added, c_removed, c_skipped, c_error])
def main(): global suite, suite_id, source_binaries, source_versions cnf = Config() Arguments = [('h', "help", "Cruft-Report::Options::Help"), ('m', "mode", "Cruft-Report::Options::Mode", "HasArg"), ('R', "rdep-check", "Cruft-Report::Options::Rdep-Check"), ('s', "suite", "Cruft-Report::Options::Suite", "HasArg"), ('w', "wanna-build-dump", "Cruft-Report::Options::Wanna-Build-Dump", "HasArg")] for i in ["help", "Rdep-Check"]: if not cnf.has_key("Cruft-Report::Options::%s" % (i)): cnf["Cruft-Report::Options::%s" % (i)] = "" cnf["Cruft-Report::Options::Suite"] = cnf.get("Dinstall::DefaultSuite", "unstable") if not cnf.has_key("Cruft-Report::Options::Mode"): cnf["Cruft-Report::Options::Mode"] = "daily" if not cnf.has_key("Cruft-Report::Options::Wanna-Build-Dump"): cnf["Cruft-Report::Options::Wanna-Build-Dump"] = "/srv/ftp-master.debian.org/scripts/nfu" apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Cruft-Report::Options") if Options["Help"]: usage() if Options["Rdep-Check"]: rdeps = True else: rdeps = False # Set up checks based on mode if Options["Mode"] == "daily": checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu" ] elif Options["Mode"] == "full": checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu", "dubious nbs", "bnb", "bms", "anais" ] elif Options["Mode"] == "bdo": checks = ["nbs", "obsolete source"] else: utils.warn( "%s is not a recognised mode - only 'full', 'daily' or 'bdo' are understood." % (Options["Mode"])) usage(1) session = DBConn().session() bin_pkgs = {} src_pkgs = {} bin2source = {} bins_in_suite = {} nbs = {} source_versions = {} anais_output = "" nfu_packages = {} suite = get_suite(Options["Suite"].lower(), session) if not suite: utils.fubar("Cannot find suite %s" % Options["Suite"].lower()) suite_id = suite.suite_id suite_name = suite.suite_name.lower() if "obsolete source" in checks: report_obsolete_source(suite_name, session) if "nbs" in checks: reportAllNBS(suite_name, suite_id, session, rdeps) if "outdated non-free" in checks: report_outdated_nonfree(suite_name, session, rdeps) bin_not_built = {} if "bnb" in checks: bins_in_suite = get_suite_binaries(suite, session) # Checks based on the Sources files components = get_component_names(session) for component in components: filename = "%s/dists/%s/%s/source/Sources.gz" % (suite.archive.path, suite_name, component) # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance... (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) sources = utils.open_file(temp_filename) Sources = apt_pkg.TagFile(sources) while Sources.step(): source = Sources.section.find('Package') source_version = Sources.section.find('Version') architecture = Sources.section.find('Architecture') binaries = Sources.section.find('Binary') binaries_list = [i.strip() for i in binaries.split(',')] if "bnb" in checks: # Check for binaries not built on any architecture. for binary in binaries_list: if not bins_in_suite.has_key(binary): bin_not_built.setdefault(source, {}) bin_not_built[source][binary] = "" if "anais" in checks: anais_output += do_anais(architecture, binaries_list, source, session) # build indices for checking "no source" later source_index = component + '/' + source src_pkgs[source] = source_index for binary in binaries_list: bin_pkgs[binary] = source source_binaries[source] = binaries source_versions[source] = source_version sources.close() os.unlink(temp_filename) # Checks based on the Packages files check_components = components[:] if suite_name != "staging": check_components.append('main/debian-installer') for component in check_components: architectures = [ a.arch_string for a in get_suite_architectures( suite_name, skipsrc=True, skipall=True, session=session) ] for architecture in architectures: if component == 'main/debian-installer' and re.match( "kfreebsd", architecture): continue filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % ( suite.archive.path, suite_name, component, architecture) # apt_pkg.TagFile needs a real file handle (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput( "gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) if "nfu" in checks: nfu_packages.setdefault(architecture, []) nfu_entries = parse_nfu(architecture) packages = utils.open_file(temp_filename) Packages = apt_pkg.TagFile(packages) while Packages.step(): package = Packages.section.find('Package') source = Packages.section.find('Source', "") version = Packages.section.find('Version') if source == "": source = package if bin2source.has_key(package) and \ apt_pkg.version_compare(version, bin2source[package]["version"]) > 0: bin2source[package]["version"] = version bin2source[package]["source"] = source else: bin2source[package] = {} bin2source[package]["version"] = version bin2source[package]["source"] = source if source.find("(") != -1: m = re_extract_src_version.match(source) source = m.group(1) version = m.group(2) if not bin_pkgs.has_key(package): nbs.setdefault(source, {}) nbs[source].setdefault(package, {}) nbs[source][package][version] = "" else: if "nfu" in checks: if package in nfu_entries and \ version != source_versions[source]: # only suggest to remove out-of-date packages nfu_packages[architecture].append( (package, version, source_versions[source])) packages.close() os.unlink(temp_filename) # Distinguish dubious (version numbers match) and 'real' NBS (they don't) dubious_nbs = {} for source in nbs.keys(): for package in nbs[source].keys(): versions = nbs[source][package].keys() versions.sort(apt_pkg.version_compare) latest_version = versions.pop() source_version = source_versions.get(source, "0") if apt_pkg.version_compare(latest_version, source_version) == 0: add_nbs(dubious_nbs, source, latest_version, package, suite_id, session) if "nviu" in checks: do_newer_version('chromodoris', 'staging', 'NVIU', session) # FIXME: Not used in Tanglu #if "nvit" in checks: # do_newer_version('testing', 'testing-proposed-updates', 'NVIT', session) ### if Options["Mode"] == "full": print "=" * 75 print if "nfu" in checks: do_nfu(nfu_packages) if "bnb" in checks: print "Unbuilt binary packages" print "-----------------------" print keys = bin_not_built.keys() keys.sort() for source in keys: binaries = bin_not_built[source].keys() binaries.sort() print " o %s: %s" % (source, ", ".join(binaries)) print if "bms" in checks: report_multiple_source(suite) if "anais" in checks: print "Architecture Not Allowed In Source" print "----------------------------------" print anais_output print if "dubious nbs" in checks: do_dubious_nbs(dubious_nbs)
def process(osuite, affected_suites, originosuite, component, otype, session): global Logger, Options, sections, priorities o = get_suite(osuite, session) if o is None: utils.fubar("Suite '%s' not recognised." % (osuite)) osuite_id = o.suite_id originosuite_id = None if originosuite: oo = get_suite(originosuite, session) if oo is None: utils.fubar("Suite '%s' not recognised." % (originosuite)) originosuite_id = oo.suite_id c = get_component(component, session) if c is None: utils.fubar("Component '%s' not recognised." % (component)) component_id = c.component_id ot = get_override_type(otype, session) if ot is None: utils.fubar("Type '%s' not recognised. (Valid types are deb, udeb and dsc)" % (otype)) type_id = ot.overridetype_id dsc_type_id = get_override_type("dsc", session).overridetype_id source_priority_id = get_priority("source", session).priority_id if otype == "deb" or otype == "udeb": packages = {} # TODO: Fix to use placeholders (check how to with arrays) q = session.execute(""" SELECT b.package FROM binaries b JOIN bin_associations ba ON b.id = ba.bin JOIN suite ON ba.suite = suite.id JOIN files_archive_map af ON b.file = af.file_id AND suite.archive_id = af.archive_id WHERE b.type = :otype AND ba.suite IN (%s) AND af.component_id = :component_id """ % (",".join([str(i) for i in affected_suites])), {'otype': otype, 'component_id': component_id}) for i in q.fetchall(): packages[i[0]] = 0 src_packages = {} q = session.execute(""" SELECT s.source FROM source s JOIN src_associations sa ON s.id = sa.source JOIN suite ON sa.suite = suite.id JOIN files_archive_map af ON s.file = af.file_id AND suite.archive_id = af.archive_id WHERE sa.suite IN (%s) AND af.component_id = :component_id """ % (",".join([str(i) for i in affected_suites])), {'component_id': component_id}) for i in q.fetchall(): src_packages[i[0]] = 0 # ----------- # Drop unused overrides q = session.execute("""SELECT package, priority, section, maintainer FROM override WHERE suite = :suite_id AND component = :component_id AND type = :type_id""", {'suite_id': osuite_id, 'component_id': component_id, 'type_id': type_id}) # We're already within a transaction if otype == "dsc": for i in q.fetchall(): package = i[0] if package in src_packages: src_packages[package] = 1 else: if package in blacklist: utils.warn("%s in incoming, not touching" % package) continue Logger.log(["removing unused override", osuite, component, otype, package, priorities[i[1]], sections[i[2]], i[3]]) if not Options["No-Action"]: session.execute("""DELETE FROM override WHERE package = :package AND suite = :suite_id AND component = :component_id AND type = :type_id AND created < now() - interval '14 days'""", {'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'type_id': type_id}) # create source overrides based on binary overrides, as source # overrides not always get created q = session.execute("""SELECT package, priority, section, maintainer FROM override WHERE suite = :suite_id AND component = :component_id""", {'suite_id': osuite_id, 'component_id': component_id}) for i in q.fetchall(): package = i[0] if package not in src_packages or src_packages[package]: continue src_packages[package] = 1 Logger.log(["add missing override", osuite, component, otype, package, "source", sections[i[2]], i[3]]) if not Options["No-Action"]: session.execute("""INSERT INTO override (package, suite, component, priority, section, type, maintainer) VALUES (:package, :suite_id, :component_id, :priority_id, :section_id, :type_id, :maintainer)""", {'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'priority_id': source_priority_id, 'section_id': i[2], 'type_id': dsc_type_id, 'maintainer': i[3]}) # Check whether originosuite has an override for us we can # copy if originosuite: q = session.execute("""SELECT origin.package, origin.priority, origin.section, origin.maintainer, target.priority, target.section, target.maintainer FROM override origin LEFT JOIN override target ON (origin.package = target.package AND target.suite = :suite_id AND origin.component = target.component AND origin.type = target.type) WHERE origin.suite = :originsuite_id AND origin.component = :component_id AND origin.type = :type_id""", {'suite_id': osuite_id, 'originsuite_id': originosuite_id, 'component_id': component_id, 'type_id': type_id}) for i in q.fetchall(): package = i[0] if package not in src_packages or src_packages[package]: if i[4] and (i[1] != i[4] or i[2] != i[5] or i[3] != i[6]): Logger.log(["syncing override", osuite, component, otype, package, "source", sections[i[5]], i[6], "source", sections[i[2]], i[3]]) if not Options["No-Action"]: session.execute("""UPDATE override SET priority = :priority, section = :section, maintainer = :maintainer WHERE package = :package AND suite = :suite_id AND component = :component_id AND type = :type_id""", {'priority': i[1], 'section': i[2], 'maintainer': i[3], 'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'type_id': dsc_type_id}) continue # we can copy src_packages[package] = 1 Logger.log(["copying missing override", osuite, component, otype, package, "source", sections[i[2]], i[3]]) if not Options["No-Action"]: session.execute("""INSERT INTO override (package, suite, component, priority, section, type, maintainer) VALUES (:package, :suite_id, :component_id, :priority_id, :section_id, :type_id, :maintainer)""", {'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'priority_id': source_priority_id, 'section_id': i[2], 'type_id': dsc_type_id, 'maintainer': i[3]}) for package, hasoverride in list(src_packages.items()): if not hasoverride: utils.warn("%s has no override!" % package) else: # binary override for i in q.fetchall(): package = i[0] if package in packages: packages[package] = 1 else: if package in blacklist: utils.warn("%s in incoming, not touching" % package) continue Logger.log(["removing unused override", osuite, component, otype, package, priorities[i[1]], sections[i[2]], i[3]]) if not Options["No-Action"]: session.execute("""DELETE FROM override WHERE package = :package AND suite = :suite_id AND component = :component_id AND type = :type_id AND created < now() - interval '14 days'""", {'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'type_id': type_id}) # Check whether originosuite has an override for us we can # copy if originosuite: q = session.execute("""SELECT origin.package, origin.priority, origin.section, origin.maintainer, target.priority, target.section, target.maintainer FROM override origin LEFT JOIN override target ON (origin.package = target.package AND target.suite = :suite_id AND origin.component = target.component AND origin.type = target.type) WHERE origin.suite = :originsuite_id AND origin.component = :component_id AND origin.type = :type_id""", {'suite_id': osuite_id, 'originsuite_id': originosuite_id, 'component_id': component_id, 'type_id': type_id}) for i in q.fetchall(): package = i[0] if package not in packages or packages[package]: if i[4] and (i[1] != i[4] or i[2] != i[5] or i[3] != i[6]): Logger.log(["syncing override", osuite, component, otype, package, priorities[i[4]], sections[i[5]], i[6], priorities[i[1]], sections[i[2]], i[3]]) if not Options["No-Action"]: session.execute("""UPDATE override SET priority = :priority_id, section = :section_id, maintainer = :maintainer WHERE package = :package AND suite = :suite_id AND component = :component_id AND type = :type_id""", {'priority_id': i[1], 'section_id': i[2], 'maintainer': i[3], 'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'type_id': type_id}) continue # we can copy packages[package] = 1 Logger.log(["copying missing override", osuite, component, otype, package, priorities[i[1]], sections[i[2]], i[3]]) if not Options["No-Action"]: session.execute("""INSERT INTO override (package, suite, component, priority, section, type, maintainer) VALUES (:package, :suite_id, :component_id, :priority_id, :section_id, :type_id, :maintainer)""", {'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'priority_id': i[1], 'section_id': i[2], 'type_id': type_id, 'maintainer': i[3]}) for package, hasoverride in list(packages.items()): if not hasoverride: utils.warn("%s has no override!" % package) session.commit() sys.stdout.flush()
def main(): global Options, Logger cnf = Config() summarystats = SummaryStats() Arguments = [('a', "automatic", "Dinstall::Options::Automatic"), ('h', "help", "Dinstall::Options::Help"), ('n', "no-action", "Dinstall::Options::No-Action"), ('p', "no-lock", "Dinstall::Options::No-Lock"), ('s', "no-mail", "Dinstall::Options::No-Mail"), ('d', "directory", "Dinstall::Options::Directory", "HasArg")] for i in ["automatic", "help", "no-action", "no-lock", "no-mail", "version", "directory"]: key = "Dinstall::Options::%s" % i if key not in cnf: cnf[key] = "" changes_files = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Dinstall::Options") if Options["Help"]: usage() # -n/--dry-run invalidates some other options which would involve things happening if Options["No-Action"]: Options["Automatic"] = "" # Obtain lock if not in no-action mode and initialize the log if not Options["No-Action"]: lock_fd = os.open(os.path.join(cnf["Dir::Lock"], 'process-upload.lock'), os.O_RDWR | os.O_CREAT) try: fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): utils.fubar("Couldn't obtain lock; assuming another 'dak process-upload' is already running.") else: raise # Initialise UrgencyLog() - it will deal with the case where we don't # want to log urgencies urgencylog = UrgencyLog() Logger = daklog.Logger("process-upload", Options["No-Action"]) # If we have a directory flag, use it to find our files if cnf["Dinstall::Options::Directory"] != "": # Note that we clobber the list of files we were given in this case # so warn if the user has done both if len(changes_files) > 0: utils.warn("Directory provided so ignoring files given on command line") changes_files = utils.get_changes_files(cnf["Dinstall::Options::Directory"]) Logger.log(["Using changes files from directory", cnf["Dinstall::Options::Directory"], len(changes_files)]) elif not len(changes_files) > 0: utils.fubar("No changes files given and no directory specified") else: Logger.log(["Using changes files from command-line", len(changes_files)]) process_changes(changes_files) if summarystats.accept_count: sets = "set" if summarystats.accept_count > 1: sets = "sets" print("Installed %d package %s, %s." % (summarystats.accept_count, sets, utils.size_type(int(summarystats.accept_bytes)))) Logger.log(["total", summarystats.accept_count, summarystats.accept_bytes]) if summarystats.reject_count: sets = "set" if summarystats.reject_count > 1: sets = "sets" print("Rejected %d package %s." % (summarystats.reject_count, sets)) Logger.log(["rejected", summarystats.reject_count]) if not Options["No-Action"]: urgencylog.close() Logger.close()