def init (self): cnf = Config() arguments = [('h', "help", "Update-DB::Options::Help")] for i in [ "help" ]: if not cnf.has_key("Update-DB::Options::%s" % (i)): cnf["Update-DB::Options::%s" % (i)] = "" arguments = apt_pkg.parse_commandline(cnf.Cnf, arguments, sys.argv) options = cnf.subtree("Update-DB::Options") if options["Help"]: self.usage() elif arguments: utils.warn("dak update-db takes no arguments.") self.usage(exit_code=1) try: if os.path.isdir(cnf["Dir::Lock"]): lock_fd = os.open(os.path.join(cnf["Dir::Lock"], 'dinstall.lock'), os.O_RDWR | os.O_CREAT) fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) else: utils.warn("Lock directory doesn't exist yet - not locking") except IOError as e: if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN': utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.") self.update_db()
def parse_nfu(architecture): cnf = Config() # utils/hpodder_1.1.5.0: Not-For-Us [optional:out-of-date] r = re.compile("^\w+/([^_]+)_.*: Not-For-Us") ret = set() filename = "%s/%s-all.txt" % (cnf["Cruft-Report::Options::Wanna-Build-Dump"], architecture) # Not all architectures may have a wanna-build dump, so we want to ignore missin # files if os.path.exists(filename): f = utils.open_file(filename) for line in f: if line[0] == ' ': continue m = r.match(line) if m: ret.add(m.group(1)) f.close() else: utils.warn("No wanna-build dump file for architecture %s" % architecture) return ret
def check_files(now_date, session): # FIXME: this is evil; nothing should ever be in this state. if # they are, it's a bug. # However, we've discovered it happens sometimes so we print a huge warning # and then mark the file for deletion. This probably masks a bug somwhere # else but is better than collecting cruft forever Logger.log(["Checking for unused files..."]) q = session.execute(""" UPDATE files_archive_map af SET last_used = :last_used FROM files f, archive WHERE af.file_id = f.id AND af.archive_id = archive.id AND NOT EXISTS (SELECT 1 FROM binaries b WHERE b.file = af.file_id) AND NOT EXISTS (SELECT 1 FROM dsc_files df WHERE df.file = af.file_id) AND af.last_used IS NULL RETURNING archive.name, f.filename""", {'last_used': now_date}) for x in q: utils.warn("orphaned file: {0}".format(x)) Logger.log(["set lastused", x[0], x[1], "ORPHANED FILE"]) if not Options["No-Action"]: session.commit()
def check_files_in_dsc(): """ Ensure each .dsc lists appropriate files in its Files field (according to the format announced in its Format field). """ count = 0 print "Building list of database files..." q = DBConn().session().query(PoolFile).filter(PoolFile.filename.like('.dsc$')) if q.count() > 0: print "Checking %d files..." % len(ql) else: print "No files to check." for pf in q.all(): filename = os.path.abspath(os.path.join(pf.location.path + pf.filename)) try: # NB: don't enforce .dsc syntax dsc = utils.parse_changes(filename, dsc_file=1) except: utils.fubar("error parsing .dsc file '%s'." % (filename)) reasons = utils.check_dsc_files(filename, dsc) for r in reasons: utils.warn(r) if len(reasons) > 0: count += 1 if count: utils.warn("Found %s invalid .dsc files." % (count))
def run(self): while True: try: if self.die: return to_import = self.queue.dequeue() if not to_import: return print( "Directory %s, file %7d, (%s)" % (to_import.dirpath[-10:], to_import.count, to_import.changesfile) ) changes = Changes() changes.changes_file = to_import.changesfile changesfile = os.path.join(to_import.dirpath, to_import.changesfile) changes.changes = parse_changes(changesfile, signing_rules=-1) changes.changes["fingerprint"] = check_signature(changesfile) changes.add_known_changes(to_import.dirpath, session=self.session) self.session.commit() except InvalidDscError as line: warn("syntax error in .dsc file '%s', line %s." % (f, line)) except ChangesUnicodeError: warn("found invalid changes file, not properly utf-8 encoded") except KeyboardInterrupt: print("Caught C-c; on ImportThread. terminating.") self.parent.plsDie() sys.exit(1) except: self.parent.plsDie() sys.exit(1)
def main(): global Cnf Cnf = utils.get_conf() Arguments = [('h',"help","Queue-Report::Options::Help"), ('n',"new","Queue-Report::Options::New"), ('8','822',"Queue-Report::Options::822"), ('s',"sort","Queue-Report::Options::Sort", "HasArg"), ('a',"age","Queue-Report::Options::Age", "HasArg"), ('r',"rrd","Queue-Report::Options::Rrd", "HasArg"), ('d',"directories","Queue-Report::Options::Directories", "HasArg")] for i in [ "help" ]: if not Cnf.has_key("Queue-Report::Options::%s" % (i)): Cnf["Queue-Report::Options::%s" % (i)] = "" apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) Options = Cnf.subtree("Queue-Report::Options") if Options["Help"]: usage() if Cnf.has_key("Queue-Report::Options::New"): header() queue_names = [] if Cnf.has_key("Queue-Report::Options::Directories"): for i in Cnf["Queue-Report::Options::Directories"].split(","): queue_names.append(i) elif Cnf.has_key("Queue-Report::Directories"): queue_names = Cnf.value_list("Queue-Report::Directories") else: queue_names = [ "byhand", "new" ] if Cnf.has_key("Queue-Report::Options::Rrd"): rrd_dir = Cnf["Queue-Report::Options::Rrd"] elif Cnf.has_key("Dir::Rrd"): rrd_dir = Cnf["Dir::Rrd"] else: rrd_dir = None f = None if Cnf.has_key("Queue-Report::Options::822"): # Open the report file f = open(Cnf["Queue-Report::ReportLocations::822Location"], "w") session = DBConn().session() for queue_name in queue_names: queue = session.query(PolicyQueue).filter_by(queue_name=queue_name).first() if queue is not None: process_queue(queue, f, rrd_dir) else: utils.warn("Cannot find queue %s" % queue_name) if Cnf.has_key("Queue-Report::Options::822"): f.close() if Cnf.has_key("Queue-Report::Options::New"): footer()
def init(self): cnf = Config() arguments = [('h', "help", "Update-DB::Options::Help"), ("y", "yes", "Update-DB::Options::Yes")] for i in ["help"]: key = "Update-DB::Options::%s" % i if key not in cnf: cnf[key] = "" arguments = apt_pkg.parse_commandline(cnf.Cnf, arguments, sys.argv) options = cnf.subtree("Update-DB::Options") if options["Help"]: self.usage() elif arguments: utils.warn("dak update-db takes no arguments.") self.usage(exit_code=1) try: if os.path.isdir(cnf["Dir::Lock"]): lock_fd = os.open(os.path.join(cnf["Dir::Lock"], 'daily.lock'), os.O_RDONLY | os.O_CREAT) fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) else: utils.warn("Lock directory doesn't exist yet - not locking") except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): utils.fubar("Couldn't obtain lock, looks like archive is doing something, try again later.") self.update_db()
def check_files(now_date, delete_date, max_delete, session): # FIXME: this is evil; nothing should ever be in this state. if # they are, it's a bug. # However, we've discovered it happens sometimes so we print a huge warning # and then mark the file for deletion. This probably masks a bug somwhere # else but is better than collecting cruft forever print "Checking for unused files..." q = session.execute(""" SELECT id, filename FROM files f WHERE NOT EXISTS (SELECT 1 FROM binaries b WHERE b.file = f.id) AND NOT EXISTS (SELECT 1 FROM dsc_files df WHERE df.file = f.id) AND NOT EXISTS (SELECT 1 FROM changes_pool_files cpf WHERE cpf.fileid = f.id) AND NOT EXISTS (SELECT 1 FROM build_queue_files qf WHERE qf.fileid = f.id) AND last_used IS NULL ORDER BY filename""") ql = q.fetchall() if len(ql) > 0: utils.warn("check_files found something it shouldn't") for x in ql: utils.warn("orphaned file: %s" % x) Logger.log(["set lastused", x[1], "ORPHANED FILE"]) if not Options["No-Action"]: session.execute("UPDATE files SET last_used = :lastused WHERE id = :fileid", {'lastused': now_date, 'fileid': x[0]}) if not Options["No-Action"]: session.commit()
def check_dscs(): """ Parse every .dsc file in the archive and check for it's validity. """ count = 0 for src in DBConn().session().query(DBSource).order_by(DBSource.source, DBSource.version): f = src.poolfile.fullpath try: utils.parse_changes(f, signing_rules=1, dsc_file=1) except InvalidDscError: utils.warn("syntax error in .dsc file %s" % f) count += 1 except ChangesUnicodeError: utils.warn("found invalid dsc file (%s), not properly utf-8 encoded" % f) count += 1 except CantOpenError: utils.warn("missing dsc file (%s)" % f) count += 1 except Exception as e: utils.warn("miscellaneous error parsing dsc file (%s): %s" % (f, str(e))) count += 1 if count: utils.warn("Found %s invalid .dsc files." % (count))
def main (): global Cnf, db_files, waste, excluded # Cnf = utils.get_conf() Arguments = [('h',"help","Examine-Package::Options::Help"), ('H',"html-output","Examine-Package::Options::Html-Output"), ] for i in [ "Help", "Html-Output", "partial-html" ]: if not Cnf.has_key("Examine-Package::Options::%s" % (i)): Cnf["Examine-Package::Options::%s" % (i)] = "" args = apt_pkg.parse_commandline(Cnf,Arguments,sys.argv) Options = Cnf.subtree("Examine-Package::Options") if Options["Help"]: usage() if Options["Html-Output"]: global use_html use_html = True stdout_fd = sys.stdout for f in args: try: if not Options["Html-Output"]: # Pipe output for each argument through less less_cmd = ("less", "-R", "-") less_process = daklib.daksubprocess.Popen(less_cmd, stdin=subprocess.PIPE, bufsize=0) less_fd = less_process.stdin # -R added to display raw control chars for colour sys.stdout = less_fd try: if f.endswith(".changes"): check_changes(f) elif f.endswith(".deb") or f.endswith(".udeb"): # default to unstable when we don't have a .changes file # perhaps this should be a command line option? print check_deb('unstable', f) elif f.endswith(".dsc"): print check_dsc('unstable', f) else: utils.fubar("Unrecognised file type: '%s'." % (f)) finally: print output_package_relations() if not Options["Html-Output"]: # Reset stdout here so future less invocations aren't FUBAR less_fd.close() less_process.wait() sys.stdout = stdout_fd except IOError as e: if errno.errorcode[e.errno] == 'EPIPE': utils.warn("[examine-package] Caught EPIPE; skipping.") pass else: raise except KeyboardInterrupt: utils.warn("[examine-package] Caught C-c; skipping.") pass
def check(self, upload): changes = upload.changes # Only check sourceful uploads. if changes.source is None: return True # Only check uploads to unstable or experimental. if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions: return True cnf = Config() if 'Dinstall::LintianTags' not in cnf: return True tagfile = cnf['Dinstall::LintianTags'] with open(tagfile, 'r') as sourcefile: sourcecontent = sourcefile.read() try: lintiantags = yaml.safe_load(sourcecontent)['lintian'] except yaml.YAMLError as msg: raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg)) fd, temp_filename = utils.temp_filename(mode=0o644) temptagfile = os.fdopen(fd, 'w') for tags in lintiantags.itervalues(): for tag in tags: print >>temptagfile, tag temptagfile.close() changespath = os.path.join(upload.directory, changes.filename) try: cmd = [] result = 0 user = cnf.get('Dinstall::UnprivUser') or None if user is not None: cmd.extend(['sudo', '-H', '-u', user]) cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath]) output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: result = e.returncode output = e.output finally: os.unlink(temp_filename) if result == 2: utils.warn("lintian failed for %s [return code: %s]." % \ (changespath, result)) utils.warn(utils.prefix_multi_line_string(output, \ " [possible output:] ")) parsed_tags = lintian.parse_lintian_output(output) rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags)) if len(rejects) != 0: raise Reject('\n'.join(rejects)) return True
def get_pkg(package, version, architecture, session): if architecture == 'source': q = session.query(DBSource).filter_by(source=package, version=version) \ .join(DBSource.poolfile) else: q = session.query(DBBinary).filter_by(package=package, version=version) \ .join(DBBinary.architecture).filter(Architecture.arch_string.in_([architecture, 'all'])) \ .join(DBBinary.poolfile) pkg = q.first() if pkg is None: utils.warn("Could not find {0}_{1}_{2}.".format(package, version, architecture)) return pkg
def check_changes (changes_filename): try: changes = utils.parse_changes (changes_filename) except ChangesUnicodeError: utils.warn("Encoding problem with changes file %s" % (changes_filename)) print display_changes(changes['distribution'], changes_filename) files = utils.build_file_list(changes) for f in files.keys(): if f.endswith(".deb") or f.endswith(".udeb"): print check_deb(changes['distribution'], f) if f.endswith(".dsc"): print check_dsc(changes['distribution'], f)
def get_transitions(self): cnf = Config() path = cnf.get('Dinstall::ReleaseTransitions', '') if path == '' or not os.path.exists(path): return None contents = file(path, 'r').read() try: transitions = yaml.safe_load(contents) return transitions except yaml.YAMLError as msg: utils.warn('Not checking transitions, the transitions file is broken: {0}'.format(msg)) return None
def edit_new (overrides, upload, session): # Write the current data to a temporary file (fd, temp_filename) = utils.temp_filename() temp_file = os.fdopen(fd, 'w') print_new (upload, overrides, indexed=0, session=session, file=temp_file) temp_file.close() # Spawn an editor on that file editor = os.environ.get("EDITOR","vi") result = os.system("%s %s" % (editor, temp_filename)) if result != 0: utils.fubar ("%s invocation failed for %s." % (editor, temp_filename), result) # Read the edited data back in temp_file = utils.open_file(temp_filename) lines = temp_file.readlines() temp_file.close() os.unlink(temp_filename) overrides_map = dict([ ((o['type'], o['package']), o) for o in overrides ]) new_overrides = [] # Parse the new data for line in lines: line = line.strip() if line == "" or line[0] == '#': continue s = line.split() # Pad the list if necessary s[len(s):3] = [None] * (3-len(s)) (pkg, priority, section) = s[:3] if pkg.find(':') != -1: type, pkg = pkg.split(':', 1) else: type = 'deb' o = overrides_map.get((type, pkg), None) if o is None: utils.warn("Ignoring unknown package '%s'" % (pkg)) else: if section.find('/') != -1: component = section.split('/', 1)[0] else: component = 'main' new_overrides.append(dict( package=pkg, type=type, section=section, component=component, priority=priority, included=o['included'], )) return new_overrides
def flush_orphans (): all_files = {} changes_files = [] Logger.log(["check Incoming for old orphaned files", os.getcwd()]) # Build up the list of all files in the directory for i in os.listdir('.'): if os.path.isfile(i): all_files[i] = 1 if i.endswith(".changes"): changes_files.append(i) # Proces all .changes and .dsc files. for changes_filename in changes_files: try: changes = utils.parse_changes(changes_filename) files = utils.build_file_list(changes) except: utils.warn("error processing '%s'; skipping it. [Got %s]" % (changes_filename, sys.exc_info()[0])) continue dsc_files = {} for f in files.keys(): if f.endswith(".dsc"): try: dsc = utils.parse_changes(f, dsc_file=1) dsc_files = utils.build_file_list(dsc, is_a_dsc=1) except: utils.warn("error processing '%s'; skipping it. [Got %s]" % (f, sys.exc_info()[0])) continue # Ensure all the files we've seen aren't deleted keys = [] for i in (files.keys(), dsc_files.keys(), [changes_filename]): keys.extend(i) for key in keys: if all_files.has_key(key): if Options["Verbose"]: print "Skipping, has parents, '%s'." % (key) del all_files[key] # Anthing left at this stage is not referenced by a .changes (or # a .dsc) and should be deleted if old enough. for f in all_files.keys(): if os.stat(f)[stat.ST_MTIME] < delete_date: remove('Incoming', f) else: if Options["Verbose"]: print "Skipping, too new, '%s'." % (os.path.basename(f))
def __init__(self,num_threads): self.queue = OneAtATime() self.threads = [ ChangesGenerator(self,self.queue) ] for i in range(num_threads): self.threads.append( ImportThread(self,self.queue) ) try: for thread in self.threads: thread.start() except KeyboardInterrupt: print("Caught C-c; terminating.") warn("Caught C-c; terminating.") self.plsDie()
def check_files_not_symlinks(): """ Check files in the database aren't symlinks """ print "Building list of database files... ", before = time.time() q = DBConn().session().query(PoolFile).filter(PoolFile.filename.like('.dsc$')) for pf in q.all(): filename = os.path.abspath(os.path.join(pf.location.path, pf.filename)) if os.access(filename, os.R_OK) == 0: utils.warn("%s: doesn't exist." % (filename)) else: if os.path.islink(filename): utils.warn("%s: is a symlink." % (filename))
def remove (from_dir, f): fname = os.path.basename(f) if os.access(f, os.R_OK): Logger.log(["move file to morgue", from_dir, fname, del_dir]) if Options["Verbose"]: print "Removing '%s' (to '%s')." % (fname, del_dir) if Options["No-Action"]: return dest_filename = os.path.join(del_dir, fname) # If the destination file exists; try to find another filename to use if os.path.exists(dest_filename): dest_filename = utils.find_next_free(dest_filename, 10) Logger.log(["change destination file name", os.path.basename(dest_filename)]) utils.move(f, dest_filename, 0o660) else: Logger.log(["skipping file because of permission problem", fname]) utils.warn("skipping '%s', permission denied." % fname)
def run(self): cnf = Config() count = 1 dirs = [] dirs.append(cnf['Dir::Done']) for queue_name in [ "byhand", "new", "proposedupdates", "oldproposedupdates" ]: queue = get_policy_queue(queue_name) if queue: dirs.append(os.path.abspath(queue.path)) else: warn("Could not find queue %s in database" % queue_name) for checkdir in dirs: if os.path.exists(checkdir): print "Looking into %s" % (checkdir) for dirpath, dirnames, filenames in os.walk(checkdir, topdown=True): if not filenames: # Empty directory (or only subdirectories), next continue for changesfile in filenames: try: if not changesfile.endswith(".changes"): # Only interested in changes files. continue count += 1 if not get_dbchange(changesfile, self.session): to_import = ChangesToImport(dirpath, changesfile, count) if self.die: return self.queue.enqueue(to_import) except KeyboardInterrupt: print("got Ctrl-c in enqueue thread. terminating") self.parent.plsDie() sys.exit(1) self.queue.enqueue(EndOfChanges())
def get_id(package, version, architecture, session): if architecture == "source": q = session.execute("SELECT id FROM source WHERE source = :package AND version = :version", {'package': package, 'version': version}) else: q = session.execute("""SELECT b.id FROM binaries b, architecture a WHERE b.package = :package AND b.version = :version AND (a.arch_string = :arch OR a.arch_string = 'all') AND b.architecture = a.id""", {'package': package, 'version': version, 'arch': architecture}) ql = q.fetchall() if len(ql) < 1: utils.warn("Couldn't find '%s_%s_%s'." % (package, version, architecture)) return None if len(ql) > 1: utils.warn("Found more than one match for '%s_%s_%s'." % (package, version, architecture)) return None return ql[0][0]
def version_checks(package, architecture, target_suite, new_version, session, force=False): if architecture == "source": suite_version_list = get_suite_version_by_source(package, session) else: suite_version_list = get_suite_version_by_package(package, architecture, session) must_be_newer_than = [vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeNewerThan")] must_be_older_than = [vc.reference.suite_name for vc in get_version_checks(target_suite, "MustBeOlderThan")] # Must be newer than an existing version in target_suite if target_suite not in must_be_newer_than: must_be_newer_than.append(target_suite) violations = False for suite, version in suite_version_list: cmp = apt_pkg.version_compare(new_version, version) if suite in must_be_newer_than and cmp < 1: utils.warn("%s (%s): version check violated: %s targeted at %s is *not* newer than %s in %s" % (package, architecture, new_version, target_suite, version, suite)) violations = True if suite in must_be_older_than and cmp > 1: utils.warn("%s (%s): version check violated: %s targeted at %s is *not* older than %s in %s" % (package, architecture, new_version, target_suite, version, suite)) violations = True if violations: if force: utils.warn("Continuing anyway (forced)...") else: utils.fubar("Aborting. Version checks violated and not forced.")
def main (): global Cnf Cnf = utils.get_conf() Arguments = [('h',"help","Stats::Options::Help")] for i in [ "help" ]: if not Cnf.has_key("Stats::Options::%s" % (i)): Cnf["Stats::Options::%s" % (i)] = "" args = apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) Options = Cnf.subtree("Stats::Options") if Options["Help"]: usage() if len(args) < 1: utils.warn("dak stats requires a MODE argument") usage(1) elif len(args) > 1: utils.warn("dak stats accepts only one MODE argument") usage(1) mode = args[0].lower() if mode == "arch-space": per_arch_space_use() elif mode == "pkg-nums": number_of_packages() elif mode == "daily-install": daily_install_stats() else: utils.warn("unknown mode '%s'" % (mode)) usage(1)
def check_pkg (upload, upload_copy, session): missing = [] save_stdout = sys.stdout changes = os.path.join(upload_copy.directory, upload.changes.changesname) suite_name = upload.target_suite.suite_name handler = PolicyQueueUploadHandler(upload, session) missing = [(m['type'], m["package"]) for m in handler.missing_overrides(hints=missing)] less_cmd = ("less", "-R", "-") less_process = daklib.daksubprocess.Popen(less_cmd, bufsize=0, stdin=subprocess.PIPE) try: sys.stdout = less_process.stdin print examine_package.display_changes(suite_name, changes) source = upload.source if source is not None: source_file = os.path.join(upload_copy.directory, os.path.basename(source.poolfile.filename)) print examine_package.check_dsc(suite_name, source_file) for binary in upload.binaries: binary_file = os.path.join(upload_copy.directory, os.path.basename(binary.poolfile.filename)) examined = examine_package.check_deb(suite_name, binary_file) # We always need to call check_deb to display package relations for every binary, # but we print its output only if new overrides are being added. if ("deb", binary.package) in missing: print examined print examine_package.output_package_relations() less_process.stdin.close() except IOError as e: if e.errno == errno.EPIPE: utils.warn("[examine_package] Caught EPIPE; skipping.") else: raise except KeyboardInterrupt: utils.warn("[examine_package] Caught C-c; skipping.") finally: less_process.wait() sys.stdout = save_stdout
def edit_new (new, upload): # Write the current data to a temporary file (fd, temp_filename) = utils.temp_filename() temp_file = os.fdopen(fd, 'w') print_new (new, upload, indexed=0, file=temp_file) temp_file.close() # Spawn an editor on that file editor = os.environ.get("EDITOR","vi") result = os.system("%s %s" % (editor, temp_filename)) if result != 0: utils.fubar ("%s invocation failed for %s." % (editor, temp_filename), result) # Read the edited data back in temp_file = utils.open_file(temp_filename) lines = temp_file.readlines() temp_file.close() os.unlink(temp_filename) # Parse the new data for line in lines: line = line.strip() if line == "": continue s = line.split() # Pad the list if necessary s[len(s):3] = [None] * (3-len(s)) (pkg, priority, section) = s[:3] if not new.has_key(pkg): utils.warn("Ignoring unknown package '%s'" % (pkg)) else: # Strip off any invalid markers, print_new will readd them. if section.endswith("[!]"): section = section[:-3] if priority.endswith("[!]"): priority = priority[:-3] for f in new[pkg]["files"]: upload.pkg.files[f]["section"] = section upload.pkg.files[f]["priority"] = priority new[pkg]["section"] = section new[pkg]["priority"] = priority
def check_signature (sig_filename, data_filename=""): fingerprint = None keyrings = [ "/home/joerg/keyring/keyrings/debian-keyring.gpg", "/home/joerg/keyring/keyrings/debian-maintainers.gpg", "/home/joerg/keyring/keyrings/debian-role-keys.gpg", "/home/joerg/keyring/keyrings/emeritus-keyring.pgp", "/home/joerg/keyring/keyrings/emeritus-keyring.gpg", "/home/joerg/keyring/keyrings/removed-keys.gpg", "/home/joerg/keyring/keyrings/removed-keys.pgp" ] keyringargs = " ".join(["--keyring %s" % x for x in keyrings ]) # Build the command line status_read, status_write = os.pipe() cmd = "gpgv --status-fd %s %s %s" % (status_write, keyringargs, sig_filename) # Invoke gpgv on the file (output, status, exit_status) = gpgv_get_status_output(cmd, status_read, status_write) # Process the status-fd output (keywords, internal_error) = process_gpgv_output(status) # If we failed to parse the status-fd output, let's just whine and bail now if internal_error: warn("Couldn't parse signature") return None # usually one would check for bad things here. We, however, do not care. # Next check gpgv exited with a zero return code if exit_status: warn("Couldn't parse signature") return None # Sanity check the good stuff we expect if not keywords.has_key("VALIDSIG"): warn("Couldn't parse signature") else: args = keywords["VALIDSIG"] if len(args) < 1: warn("Couldn't parse signature") else: fingerprint = args[0] return fingerprint
def main (): """Initial setup of an archive.""" global Cnf Cnf = utils.get_conf() arguments = [('h', "help", "Init-Dirs::Options::Help")] for i in [ "help" ]: if not Cnf.has_key("Init-Dirs::Options::%s" % (i)): Cnf["Init-Dirs::Options::%s" % (i)] = "" d = DBConn() arguments = apt_pkg.parse_commandline(Cnf, arguments, sys.argv) options = Cnf.subtree("Init-Dirs::Options") if options["Help"]: usage() elif arguments: utils.warn("dak init-dirs takes no arguments.") usage(exit_code=1) create_directories()
def check_pkg (upload): save_stdout = sys.stdout try: sys.stdout = os.popen("less -R -", 'w', 0) changes = utils.parse_changes (upload.pkg.changes_file) print examine_package.display_changes(changes['distribution'], upload.pkg.changes_file) files = upload.pkg.files for f in files.keys(): if files[f].has_key("new"): ftype = files[f]["type"] if ftype == "deb": print examine_package.check_deb(changes['distribution'], f) elif ftype == "dsc": print examine_package.check_dsc(changes['distribution'], f) print examine_package.output_package_relations() except IOError as e: if e.errno == errno.EPIPE: utils.warn("[examine_package] Caught EPIPE; skipping.") else: sys.stdout = save_stdout raise except KeyboardInterrupt: utils.warn("[examine_package] Caught C-c; skipping.") sys.stdout = save_stdout
def main(): Cnf = utils.get_conf() count = 0 move_date = int(time.time()) os.chdir(Cnf["Dir::Done"]) files = glob.glob("%s/*" % (Cnf["Dir::Done"])) for filename in files: if os.path.isfile(filename): filemtime = os.stat(filename)[stat.ST_MTIME] if filemtime > move_date: continue mtime = time.gmtime(filemtime) dirname = time.strftime("%Y/%m/%d", mtime) if not os.path.exists(dirname): print "Creating: %s" % (dirname) os.makedirs(dirname) dest = dirname + '/' + os.path.basename(filename) if os.path.exists(dest): utils.warn("%s already exists." % (dest)) continue print "Move: %s -> %s" % (filename, dest) os.rename(filename, dest) count = count + 1 print "Moved %d files." % (count)
def check_checksums(): """ Validate all files """ print "Getting file information from database..." q = DBConn().session().query(PoolFile) print "Checking file checksums & sizes..." for f in q: filename = f.fullpath try: fi = utils.open_file(filename) except: utils.warn("can't open '%s'." % (filename)) continue size = os.stat(filename)[stat.ST_SIZE] if size != f.filesize: utils.warn("**WARNING** size mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, size, f.filesize)) md5sum = apt_pkg.md5sum(fi) if md5sum != f.md5sum: utils.warn("**WARNING** md5sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, md5sum, f.md5sum)) fi.seek(0) sha1sum = apt_pkg.sha1sum(fi) if sha1sum != f.sha1sum: utils.warn("**WARNING** sha1sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, sha1sum, f.sha1sum)) fi.seek(0) sha256sum = apt_pkg.sha256sum(fi) if sha256sum != f.sha256sum: utils.warn("**WARNING** sha256sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, sha256sum, f.sha256sum)) print "Done."
def remove(session, reason, suites, removals, whoami=None, partial=False, components=None, done_bugs=None, date=None, carbon_copy=None, close_related_bugs=False): """Batch remove a number of packages Verify that the files listed in the Files field of the .dsc are those expected given the announced Format. @type session: SQLA Session @param session: The database session in use @type reason: string @param reason: The reason for the removal (e.g. "[auto-cruft] NBS (no longer built by <source>)") @type suites: list @param suites: A list of the suite names in which the removal should occur @type removals: list @param removals: A list of the removals. Each element should be a tuple (or list) of at least the following for 4 items from the database (in order): package, version, architecture, (database) id. For source packages, the "architecture" should be set to "source". @type partial: bool @param partial: Whether the removal is "partial" (e.g. architecture specific). @type components: list @param components: List of components involved in a partial removal. Can be an empty list to not restrict the removal to any components. @type whoami: string @param whoami: The person (or entity) doing the removal. Defaults to utils.whoami() @type date: string @param date: The date of the removal. Defaults to commands.getoutput("date -R") @type done_bugs: list @param done_bugs: A list of bugs to be closed when doing this removal. @type close_related_bugs: bool @param done_bugs: Whether bugs related to the package being removed should be closed as well. NB: Not implemented for more than one suite. @type carbon_copy: list @param carbon_copy: A list of mail addresses to CC when doing removals. NB: all items are taken "as-is" unlike "dak rm". @rtype: None @return: Nothing """ # Generate the summary of what's to be removed d = {} summary = "" sources = [] binaries = [] whitelists = [] versions = [] newest_source = '' suite_ids_list = [] suites_list = utils.join_with_commas_and(suites) cnf = utils.get_conf() con_components = '' ####################################################################################################### if not reason: raise ValueError("Empty removal reason not permitted") if not removals: raise ValueError("Nothing to remove!?") if not suites: raise ValueError("Removals without a suite!?") if whoami is None: whoami = utils.whoami() if date is None: date = commands.getoutput("date -R") if partial and components: component_ids_list = [] for componentname in components: component = get_component(componentname, session=session) if component is None: raise ValueError("component '%s' not recognised." % componentname) else: component_ids_list.append(component.component_id) if component_ids_list: con_components = "AND component IN (%s)" % ", ".join( [str(i) for i in component_ids_list]) for i in removals: package = i[0] version = i[1] architecture = i[2] if package not in d: d[package] = {} if version not in d[package]: d[package][version] = [] if architecture not in d[package][version]: d[package][version].append(architecture) for package in sorted(d): versions = sorted(d[package], key=functools.cmp_to_key(apt_pkg.version_compare)) for version in versions: d[package][version].sort(key=utils.ArchKey) summary += "%10s | %10s | %s\n" % (package, version, ", ".join( d[package][version])) if apt_pkg.version_compare(version, newest_source) > 0: newest_source = version for package in summary.split("\n"): for row in package.split("\n"): element = row.split("|") if len(element) == 3: if element[2].find("source") > 0: sources.append( "%s_%s" % tuple(elem.strip(" ") for elem in element[:2])) element[2] = sub("source\s?,?", "", element[2]).strip(" ") if element[2]: binaries.append("%s_%s [%s]" % tuple(elem.strip(" ") for elem in element)) dsc_type_id = get_override_type('dsc', session).overridetype_id deb_type_id = get_override_type('deb', session).overridetype_id for suite in suites: s = get_suite(suite, session=session) if s is not None: suite_ids_list.append(s.suite_id) whitelists.append(s.mail_whitelist) ####################################################################################################### log_filename = cnf["Rm::LogFile"] log822_filename = cnf["Rm::LogFile822"] with utils.open_file(log_filename, "a") as logfile, utils.open_file( log822_filename, "a") as logfile822: fcntl.lockf(logfile, fcntl.LOCK_EX) fcntl.lockf(logfile822, fcntl.LOCK_EX) logfile.write( "=========================================================================\n" ) logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami)) logfile.write("Removed the following packages from %s:\n\n%s" % (suites_list, summary)) if done_bugs: logfile.write("Closed bugs: %s\n" % (", ".join(done_bugs))) logfile.write( "\n------------------- Reason -------------------\n%s\n" % reason) logfile.write("----------------------------------------------\n") logfile822.write("Date: %s\n" % date) logfile822.write("Ftpmaster: %s\n" % whoami) logfile822.write("Suite: %s\n" % suites_list) if sources: logfile822.write("Sources:\n") for source in sources: logfile822.write(" %s\n" % source) if binaries: logfile822.write("Binaries:\n") for binary in binaries: logfile822.write(" %s\n" % binary) logfile822.write("Reason: %s\n" % reason.replace('\n', '\n ')) if done_bugs: logfile822.write("Bug: %s\n" % (", ".join(done_bugs))) for i in removals: package = i[0] architecture = i[2] package_id = i[3] for suite_id in suite_ids_list: if architecture == "source": session.execute( "DELETE FROM src_associations WHERE source = :packageid AND suite = :suiteid", { 'packageid': package_id, 'suiteid': suite_id }) else: session.execute( "DELETE FROM bin_associations WHERE bin = :packageid AND suite = :suiteid", { 'packageid': package_id, 'suiteid': suite_id }) # Delete from the override file if not partial: if architecture == "source": type_id = dsc_type_id else: type_id = deb_type_id # TODO: Fix this properly to remove the remaining non-bind argument session.execute( "DELETE FROM override WHERE package = :package AND type = :typeid AND suite = :suiteid %s" % (con_components), { 'package': package, 'typeid': type_id, 'suiteid': suite_id }) session.commit() # ### REMOVAL COMPLETE - send mail time ### # # If we don't have a Bug server configured, we're done if "Dinstall::BugServer" not in cnf: if done_bugs or close_related_bugs: utils.warn( "Cannot send mail to BugServer as Dinstall::BugServer is not configured" ) logfile.write( "=========================================================================\n" ) logfile822.write("\n") return # read common subst variables for all bug closure mails Subst_common = {} Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] Subst_common["__BUG_SERVER__"] = cnf["Dinstall::BugServer"] Subst_common["__CC__"] = "X-DAK: dak rm" if carbon_copy: Subst_common["__CC__"] += "\nCc: " + ", ".join(carbon_copy) Subst_common["__SUITE_LIST__"] = suites_list Subst_common["__SUBJECT__"] = "Removed package(s) from %s" % ( suites_list) Subst_common["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"] Subst_common["__DISTRO__"] = cnf["Dinstall::MyDistribution"] Subst_common["__WHOAMI__"] = whoami # Send the bug closing messages if done_bugs: Subst_close_rm = Subst_common bcc = [] if cnf.find("Dinstall::Bcc") != "": bcc.append(cnf["Dinstall::Bcc"]) if cnf.find("Rm::Bcc") != "": bcc.append(cnf["Rm::Bcc"]) if bcc: Subst_close_rm["__BCC__"] = "Bcc: " + ", ".join(bcc) else: Subst_close_rm["__BCC__"] = "X-Filler: 42" summarymail = "%s\n------------------- Reason -------------------\n%s\n" % ( summary, reason) summarymail += "----------------------------------------------\n" Subst_close_rm["__SUMMARY__"] = summarymail for bug in done_bugs: Subst_close_rm["__BUG_NUMBER__"] = bug if close_related_bugs: mail_message = utils.TemplateSubst( Subst_close_rm, cnf["Dir::Templates"] + "/rm.bug-close-with-related") else: mail_message = utils.TemplateSubst( Subst_close_rm, cnf["Dir::Templates"] + "/rm.bug-close") utils.send_mail(mail_message, whitelists=whitelists) # close associated bug reports if close_related_bugs: Subst_close_other = Subst_common bcc = [] wnpp = utils.parse_wnpp_bug_file() newest_source = re_bin_only_nmu.sub('', newest_source) if len(set(s.split("_", 1)[0] for s in sources)) == 1: source_pkg = source.split("_", 1)[0] else: logfile.write( "=========================================================================\n" ) logfile822.write("\n") raise ValueError( "Closing bugs for multiple source packages is not supported. Please do it yourself." ) if newest_source != '': Subst_close_other["__VERSION__"] = newest_source else: logfile.write( "=========================================================================\n" ) logfile822.write("\n") raise ValueError( "No versions can be found. Close bugs yourself.") if bcc: Subst_close_other["__BCC__"] = "Bcc: " + ", ".join(bcc) else: Subst_close_other["__BCC__"] = "X-Filler: 42" # at this point, I just assume, that the first closed bug gives # some useful information on why the package got removed Subst_close_other["__BUG_NUMBER__"] = done_bugs[0] Subst_close_other["__BUG_NUMBER_ALSO__"] = "" Subst_close_other["__SOURCE__"] = source_pkg merged_bugs = set() other_bugs = bts.get_bugs('src', source_pkg, 'status', 'open', 'status', 'forwarded') if other_bugs: for bugno in other_bugs: if bugno not in merged_bugs: for bug in bts.get_status(bugno): for merged in bug.mergedwith: other_bugs.remove(merged) merged_bugs.add(merged) logfile.write("Also closing bug(s):") logfile822.write("Also-Bugs:") for bug in other_bugs: Subst_close_other["__BUG_NUMBER_ALSO__"] += str( bug) + "-done@" + cnf["Dinstall::BugServer"] + "," logfile.write(" " + str(bug)) logfile822.write(" " + str(bug)) logfile.write("\n") logfile822.write("\n") if source_pkg in wnpp: logfile.write("Also closing WNPP bug(s):") logfile822.write("Also-WNPP:") for bug in wnpp[source_pkg]: # the wnpp-rm file we parse also contains our removal # bugs, filtering that out if bug != Subst_close_other["__BUG_NUMBER__"]: Subst_close_other["__BUG_NUMBER_ALSO__"] += str( bug) + "-done@" + cnf["Dinstall::BugServer"] + "," logfile.write(" " + str(bug)) logfile822.write(" " + str(bug)) logfile.write("\n") logfile822.write("\n") mail_message = utils.TemplateSubst( Subst_close_other, cnf["Dir::Templates"] + "/rm.bug-close-related") if Subst_close_other["__BUG_NUMBER_ALSO__"]: utils.send_mail(mail_message) logfile.write( "=========================================================================\n" ) logfile822.write("\n")
def main(): cnf = Config() Arguments = [ ('n', "no-action", "Import-Users-From-Passwd::Options::No-Action"), ('q', "quiet", "Import-Users-From-Passwd::Options::Quiet"), ('v', "verbose", "Import-Users-From-Passwd::Options::Verbose"), ('h', "help", "Import-Users-From-Passwd::Options::Help") ] for i in ["no-action", "quiet", "verbose", "help"]: if not cnf.has_key("Import-Users-From-Passwd::Options::%s" % (i)): cnf["Import-Users-From-Passwd::Options::%s" % (i)] = "" arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Import-Users-From-Passwd::Options") if Options["Help"]: usage() elif arguments: utils.warn( "dak import-users-from-passwd takes no non-option arguments.") usage(1) session = DBConn().session() valid_gid = int(cnf.get("Import-Users-From-Passwd::ValidGID", "")) passwd_unames = {} for entry in pwd.getpwall(): uname = entry[0] gid = entry[3] if valid_gid and gid != valid_gid: if Options["Verbose"]: print "Skipping %s (GID %s != Valid GID %s)." % (uname, gid, valid_gid) continue passwd_unames[uname] = "" postgres_unames = {} q = session.execute("SELECT usename FROM pg_user") for i in q.fetchall(): uname = i[0] postgres_unames[uname] = "" known_postgres_unames = {} for i in cnf.get("Import-Users-From-Passwd::KnownPostgres", "").split(","): uname = i.strip() known_postgres_unames[uname] = "" keys = postgres_unames.keys() keys.sort() for uname in keys: if not passwd_unames.has_key( uname) and not known_postgres_unames.has_key(uname): print "I: Deleting %s from Postgres, no longer in passwd or list of known Postgres users" % ( uname) q = session.execute('DROP USER "%s"' % (uname)) keys = passwd_unames.keys() keys.sort() safe_name = re.compile('^[A-Za-z0-9]+$') for uname in keys: if not postgres_unames.has_key(uname): if not Options["Quiet"]: print "Creating %s user in Postgres." % (uname) if not Options["No-Action"]: if safe_name.match(uname): # NB: I never figured out how to use a bind parameter for this query # XXX: Fix this as it looks like a potential SQL injection attack to me # (hence the safe_name match we do) try: q = session.execute('CREATE USER "%s"' % (uname)) session.commit() except Exception as e: utils.warn("Could not create user %s (%s)" % (uname, str(e))) session.rollback() else: print "NOT CREATING USER %s. Doesn't match safety regex" % uname session.commit()
def set_suite(file, suite, transaction, britney=False, force=False): session = transaction.session suite_id = suite.suite_id lines = file.readlines() # Our session is already in a transaction # Build up a dictionary of what is currently in the suite current = {} q = session.execute( """SELECT b.package, b.version, a.arch_string, ba.id FROM binaries b, bin_associations ba, architecture a WHERE ba.suite = :suiteid AND ba.bin = b.id AND b.architecture = a.id""", {'suiteid': suite_id}) for i in q: key = i[:3] current[key] = i[3] q = session.execute( """SELECT s.source, s.version, 'source', sa.id FROM source s, src_associations sa WHERE sa.suite = :suiteid AND sa.source = s.id""", {'suiteid': suite_id}) for i in q: key = i[:3] current[key] = i[3] # Build up a dictionary of what should be in the suite desired = set() for line in lines: split_line = line.strip().split() if len(split_line) != 3: utils.warn( "'%s' does not break into 'package version architecture'." % (line[:-1])) continue desired.add(tuple(split_line)) # Check to see which packages need added and add them for key in sorted(desired, key=functools.cmp_to_key(cmp_package_version)): if key not in current: (package, version, architecture) = key version_checks(package, architecture, suite.suite_name, version, session, force) pkg = get_pkg(package, version, architecture, session) if pkg is None: continue component = pkg.poolfile.component if architecture == "source": transaction.copy_source(pkg, suite, component) else: transaction.copy_binary(pkg, suite, component) Logger.log(["added", suite.suite_name, " ".join(key)]) # Check to see which packages need removed and remove them for key, pkid in current.iteritems(): if key not in desired: (package, version, architecture) = key if architecture == "source": session.execute( """DELETE FROM src_associations WHERE id = :pkid""", {'pkid': pkid}) else: session.execute( """DELETE FROM bin_associations WHERE id = :pkid""", {'pkid': pkid}) Logger.log(["removed", suite.suite_name, " ".join(key), pkid]) session.commit() if britney: britney_changelog(current, suite, session)
def main(): global Cnf Cnf = utils.get_conf() Arguments = [('h', "help", "Queue-Report::Options::Help"), ('n', "new", "Queue-Report::Options::New"), ('8', '822', "Queue-Report::Options::822"), ('s', "sort", "Queue-Report::Options::Sort", "HasArg"), ('a', "age", "Queue-Report::Options::Age", "HasArg"), ('r', "rrd", "Queue-Report::Options::Rrd", "HasArg"), ('d', "directories", "Queue-Report::Options::Directories", "HasArg")] for i in ["help"]: key = "Queue-Report::Options::%s" % i if key not in Cnf: Cnf[key] = "" apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) Options = Cnf.subtree("Queue-Report::Options") if Options["Help"]: usage() if "Queue-Report::Options::New" in Cnf: header() queue_names = [] if "Queue-Report::Options::Directories" in Cnf: for i in Cnf["Queue-Report::Options::Directories"].split(","): queue_names.append(i) elif "Queue-Report::Directories" in Cnf: queue_names = Cnf.value_list("Queue-Report::Directories") else: queue_names = ["byhand", "new"] if "Queue-Report::Options::Rrd" in Cnf: rrd_dir = Cnf["Queue-Report::Options::Rrd"] elif "Dir::Rrd" in Cnf: rrd_dir = Cnf["Dir::Rrd"] else: rrd_dir = None f = None if "Queue-Report::Options::822" in Cnf: # Open the report file f = sys.stdout filename822 = Cnf.get("Queue-Report::ReportLocations::822Location") if filename822: f = open(filename822, "w") session = DBConn().session() for queue_name in queue_names: queue = session.query(PolicyQueue).filter_by( queue_name=queue_name).first() if queue is not None: process_queue(queue, f, rrd_dir) else: utils.warn("Cannot find queue %s" % queue_name) if "Queue-Report::Options::822" in Cnf: f.close() if "Queue-Report::Options::New" in Cnf: footer()
def main(): cnf = Config() Arguments = [ ('h', "help", "Override::Options::Help"), ('c', "check", "Override::Options::Check"), ('d', "done", "Override::Options::Done", "HasArg"), ('n', "no-action", "Override::Options::No-Action"), ('s', "suite", "Override::Options::Suite", "HasArg"), ] for i in ["help", "check", "no-action"]: key = "Override::Options::%s" % i if key not in cnf: cnf[key] = "" if "Override::Options::Suite" not in cnf: cnf["Override::Options::Suite"] = "unstable" arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Override::Options") if Options["Help"]: usage() session = DBConn().session() if not arguments: utils.fubar("package name is a required argument.") package = arguments.pop(0) suite_name = Options["Suite"] if arguments and len(arguments) > 2: utils.fubar("Too many arguments") suite = get_suite(suite_name, session) if suite is None: utils.fubar("Unknown suite '{0}'".format(suite_name)) if arguments and len(arguments) == 1: # Determine if the argument is a priority or a section... arg = arguments.pop() q = session.execute( """ SELECT ( SELECT COUNT(*) FROM section WHERE section = :arg ) AS secs, ( SELECT COUNT(*) FROM priority WHERE priority = :arg ) AS prios """, {'arg': arg}) r = q.fetchall() if r[0][0] == 1: arguments = (arg, ".") elif r[0][1] == 1: arguments = (".", arg) else: utils.fubar("%s is not a valid section or priority" % (arg)) # Retrieve current section/priority... oldsection, oldsourcesection, oldpriority = None, None, None for packagetype in ['source', 'binary']: eqdsc = '!=' if packagetype == 'source': eqdsc = '=' q = session.execute( """ SELECT priority.priority AS prio, section.section AS sect, override_type.type AS type FROM override, priority, section, suite, override_type WHERE override.priority = priority.id AND override.type = override_type.id AND override_type.type %s 'dsc' AND override.section = section.id AND override.package = :package AND override.suite = suite.id AND suite.suite_name = :suite_name """ % (eqdsc), { 'package': package, 'suite_name': suite_name }) if q.rowcount == 0: continue if q.rowcount > 1: utils.fubar("%s is ambiguous. Matches %d packages" % (package, q.rowcount)) r = q.fetchone() if packagetype == 'binary': oldsection = r[1] oldpriority = r[0] else: oldsourcesection = r[1] oldpriority = 'source' if not oldpriority and not oldsourcesection: utils.fubar("Unable to find package %s" % (package)) if oldsection and oldsourcesection and oldsection != oldsourcesection: # When setting overrides, both source & binary will become the same section utils.warn("Source is in section '%s' instead of '%s'" % (oldsourcesection, oldsection)) if not oldsection: oldsection = oldsourcesection if not arguments: print "%s is in section '%s' at priority '%s'" % (package, oldsection, oldpriority) sys.exit(0) # At this point, we have a new section and priority... check they're valid... newsection, newpriority = arguments if newsection == ".": newsection = oldsection if newpriority == ".": newpriority = oldpriority s = get_section(newsection, session) if s is None: utils.fubar("Supplied section %s is invalid" % (newsection)) newsecid = s.section_id p = get_priority(newpriority, session) if p is None: utils.fubar("Supplied priority %s is invalid" % (newpriority)) newprioid = p.priority_id if newpriority == oldpriority and newsection == oldsection: print "I: Doing nothing" sys.exit(0) if oldpriority == 'source' and newpriority != 'source': utils.fubar("Trying to change priority of a source-only package") if Options["Check"]: print "WARNING: Check option is deprecated by Debian Policy 4.0.1" # If we're in no-action mode if Options["No-Action"]: if newpriority != oldpriority: print "I: Would change priority from %s to %s" % (oldpriority, newpriority) if newsection != oldsection: print "I: Would change section from %s to %s" % (oldsection, newsection) if "Done" in Options: print "I: Would also close bug(s): %s" % (Options["Done"]) sys.exit(0) if newpriority != oldpriority: print "I: Will change priority from %s to %s" % (oldpriority, newpriority) if newsection != oldsection: print "I: Will change section from %s to %s" % (oldsection, newsection) if "Done" not in Options: pass #utils.warn("No bugs to close have been specified. Noone will know you have done this.") else: print "I: Will close bug(s): %s" % (Options["Done"]) game_over() Logger = daklog.Logger("override") dsc_otype_id = get_override_type('dsc').overridetype_id # We're already in a transaction # We're in "do it" mode, we have something to do... do it if newpriority != oldpriority: session.execute( """ UPDATE override SET priority = :newprioid WHERE package = :package AND override.type != :otypedsc AND suite = (SELECT id FROM suite WHERE suite_name = :suite_name)""", { 'newprioid': newprioid, 'package': package, 'otypedsc': dsc_otype_id, 'suite_name': suite_name }) Logger.log(["changed priority", package, oldpriority, newpriority]) if newsection != oldsection: q = session.execute( """ UPDATE override SET section = :newsecid WHERE package = :package AND suite = (SELECT id FROM suite WHERE suite_name = :suite_name)""", { 'newsecid': newsecid, 'package': package, 'suite_name': suite_name }) Logger.log(["changed section", package, oldsection, newsection]) session.commit() if "Done" in Options: if "Dinstall::BugServer" not in cnf: utils.warn( "Asked to send Done message but Dinstall::BugServer is not configured" ) Logger.close() return Subst = {} Subst["__OVERRIDE_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] Subst["__BUG_SERVER__"] = cnf["Dinstall::BugServer"] bcc = [] if cnf.find("Dinstall::Bcc") != "": bcc.append(cnf["Dinstall::Bcc"]) if bcc: Subst["__BCC__"] = "Bcc: " + ", ".join(bcc) else: Subst["__BCC__"] = "X-Filler: 42" if "Dinstall::PackagesServer" in cnf: Subst["__CC__"] = "Cc: " + package + "@" + cnf[ "Dinstall::PackagesServer"] + "\nX-DAK: dak override" else: Subst["__CC__"] = "X-DAK: dak override" Subst["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"] Subst["__DISTRO__"] = cnf["Dinstall::MyDistribution"] Subst["__WHOAMI__"] = utils.whoami() Subst["__SOURCE__"] = package summary = "Concerning package %s...\n" % (package) summary += "Operating on the %s suite\n" % (suite_name) if newpriority != oldpriority: summary += "Changed priority from %s to %s\n" % (oldpriority, newpriority) if newsection != oldsection: summary += "Changed section from %s to %s\n" % (oldsection, newsection) Subst["__SUMMARY__"] = summary template = os.path.join(cnf["Dir::Templates"], "override.bug-close") for bug in utils.split_args(Options["Done"]): Subst["__BUG_NUMBER__"] = bug mail_message = utils.TemplateSubst(Subst, template) utils.send_mail(mail_message) Logger.log(["closed bug", bug]) Logger.close()
def do_new(upload, upload_copy, handler, session): cnf = Config() run_user_inspect_command(upload, upload_copy) # The main NEW processing loop done = False missing = [] while not done: queuedir = upload.policy_queue.path byhand = upload.byhand missing = handler.missing_overrides(hints=missing) broken = not check_valid(missing, session) changesname = os.path.basename(upload.changes.changesname) print print changesname print "-" * len(changesname) print print " Target: {0}".format(upload.target_suite.suite_name) print " Changed-By: {0}".format(upload.changes.changedby) print " Date: {0}".format(upload.changes.date) print #if len(byhand) == 0 and len(missing) == 0: # break if missing: print "NEW\n" for package in missing: if package["type"] == "deb" and package["priority"] == "extra": package["priority"] = "optional" answer = "XXX" if Options["No-Action"] or Options["Automatic"]: answer = 'S' note = print_new(upload, missing, indexed=0, session=session) prompt = "" has_unprocessed_byhand = False for f in byhand: path = os.path.join(queuedir, f.filename) if not f.processed and os.path.exists(path): print "W: {0} still present; please process byhand components and try again".format( f.filename) has_unprocessed_byhand = True if not has_unprocessed_byhand and not broken and not note: if len(missing) == 0: prompt = "Accept, " answer = 'A' else: prompt = "Add overrides, " if broken: print "W: [!] marked entries must be fixed before package can be processed." if note: print "W: note must be removed before package can be processed." prompt += "RemOve all notes, Remove note, " prompt += "Edit overrides, Check, Manual reject, Note edit, Prod, [S]kip, Quit ?" while prompt.find(answer) == -1: answer = utils.our_raw_input(prompt) m = re_default_answer.search(prompt) if answer == "": answer = m.group(1) answer = answer[:1].upper() if answer in ('A', 'E', 'M', 'O', 'R') and Options["Trainee"]: utils.warn("Trainees can't do that") continue if answer == 'A' and not Options["Trainee"]: add_overrides(missing, upload.target_suite, session) if Config().find_b("Dinstall::BXANotify"): do_bxa_notification(missing, upload, session) handler.accept() done = True Logger.log(["NEW ACCEPT", upload.changes.changesname]) elif answer == 'C': check_pkg(upload, upload_copy, session) elif answer == 'E' and not Options["Trainee"]: missing = edit_overrides(missing, upload, session) elif answer == 'M' and not Options["Trainee"]: reason = Options.get('Manual-Reject', '') + "\n" reason = reason + "\n\n=====\n\n".join([ n.comment for n in get_new_comments(upload.policy_queue, upload.changes.source, session=session) ]) reason = get_reject_reason(reason) if reason is not None: Logger.log(["NEW REJECT", upload.changes.changesname]) handler.reject(reason) done = True elif answer == 'N': if edit_note( get_new_comments(upload.policy_queue, upload.changes.source, session=session), upload, session, bool(Options["Trainee"])) == 0: end() sys.exit(0) elif answer == 'P' and not Options["Trainee"]: if prod_maintainer( get_new_comments(upload.policy_queue, upload.changes.source, session=session), upload) == 0: end() sys.exit(0) Logger.log(["NEW PROD", upload.changes.changesname]) elif answer == 'R' and not Options["Trainee"]: confirm = utils.our_raw_input("Really clear note (y/N)? ").lower() if confirm == "y": for c in get_new_comments(upload.policy_queue, upload.changes.source, upload.changes.version, session=session): session.delete(c) session.commit() elif answer == 'O' and not Options["Trainee"]: confirm = utils.our_raw_input( "Really clear all notes (y/N)? ").lower() if confirm == "y": for c in get_new_comments(upload.policy_queue, upload.changes.source, session=session): session.delete(c) session.commit() elif answer == 'S': done = True elif answer == 'Q': end() sys.exit(0) if handler.get_action(): print "PENDING %s\n" % handler.get_action()
def main(): global Logger cnf = Config() Arguments = [('a', "add", "Control-Overrides::Options::Add"), ('c', "component", "Control-Overrides::Options::Component", "HasArg"), ('h', "help", "Control-Overrides::Options::Help"), ('l', "list", "Control-Overrides::Options::List"), ('q', "quiet", "Control-Overrides::Options::Quiet"), ('s', "suite", "Control-Overrides::Options::Suite", "HasArg"), ('S', "set", "Control-Overrides::Options::Set"), ('C', "change", "Control-Overrides::Options::Change"), ('n', "no-action", "Control-Overrides::Options::No-Action"), ('t', "type", "Control-Overrides::Options::Type", "HasArg")] # Default arguments for i in ["add", "help", "list", "quiet", "set", "change", "no-action"]: if not cnf.has_key("Control-Overrides::Options::%s" % (i)): cnf["Control-Overrides::Options::%s" % (i)] = "" if not cnf.has_key("Control-Overrides::Options::Component"): cnf["Control-Overrides::Options::Component"] = "main" if not cnf.has_key("Control-Overrides::Options::Suite"): cnf["Control-Overrides::Options::Suite"] = "unstable" if not cnf.has_key("Control-Overrides::Options::Type"): cnf["Control-Overrides::Options::Type"] = "deb" file_list = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) if cnf["Control-Overrides::Options::Help"]: usage() session = DBConn().session() mode = None for i in ["add", "list", "set", "change"]: if cnf["Control-Overrides::Options::%s" % (i)]: if mode: utils.fubar("Can not perform more than one action at once.") mode = i # Need an action... if mode is None: utils.fubar("No action specified.") (suite, component, otype) = (cnf["Control-Overrides::Options::Suite"], cnf["Control-Overrides::Options::Component"], cnf["Control-Overrides::Options::Type"]) if mode == "list": list_overrides(suite, component, otype, session) else: if get_suite(suite).untouchable: utils.fubar("%s: suite is untouchable" % suite) action = True if cnf["Control-Overrides::Options::No-Action"]: utils.warn("In No-Action Mode") action = False Logger = daklog.Logger("control-overrides", mode) if file_list: for f in file_list: process_file(utils.open_file(f), suite, component, otype, mode, action, session) else: process_file(sys.stdin, suite, component, otype, mode, action, session) Logger.close()
def process_file(file, suite, component, otype, mode, action, session): cnf = Config() s = get_suite(suite, session=session) if s is None: utils.fubar("Suite '%s' not recognised." % (suite)) suite_id = s.suite_id c = get_component(component, session=session) if c is None: utils.fubar("Component '%s' not recognised." % (component)) component_id = c.component_id o = get_override_type(otype) if o is None: utils.fubar( "Type '%s' not recognised. (Valid types are deb, udeb and dsc.)" % (otype)) type_id = o.overridetype_id # --set is done mostly internal for performance reasons; most # invocations of --set will be updates and making people wait 2-3 # minutes while 6000 select+inserts are run needlessly isn't cool. original = {} new = {} c_skipped = 0 c_added = 0 c_updated = 0 c_removed = 0 c_error = 0 q = session.execute( """SELECT o.package, o.priority, o.section, o.maintainer, p.priority, s.section FROM override o, priority p, section s WHERE o.suite = :suiteid AND o.component = :componentid AND o.type = :typeid and o.priority = p.id and o.section = s.id""", { 'suiteid': suite_id, 'componentid': component_id, 'typeid': type_id }) for i in q.fetchall(): original[i[0]] = i[1:] start_time = time.time() section_cache = get_sections(session) priority_cache = get_priorities(session) # Our session is already in a transaction for line in file.readlines(): line = re_comments.sub('', line).strip() if line == "": continue maintainer_override = None if otype == "dsc": split_line = line.split(None, 2) if len(split_line) == 2: (package, section) = split_line elif len(split_line) == 3: (package, section, maintainer_override) = split_line else: utils.warn( "'%s' does not break into 'package section [maintainer-override]'." % (line)) c_error += 1 continue priority = "source" else: # binary or udeb split_line = line.split(None, 3) if len(split_line) == 3: (package, priority, section) = split_line elif len(split_line) == 4: (package, priority, section, maintainer_override) = split_line else: utils.warn( "'%s' does not break into 'package priority section [maintainer-override]'." % (line)) c_error += 1 continue if not section_cache.has_key(section): utils.warn( "'%s' is not a valid section. ['%s' in suite %s, component %s]." % (section, package, suite, component)) c_error += 1 continue section_id = section_cache[section] if not priority_cache.has_key(priority): utils.warn( "'%s' is not a valid priority. ['%s' in suite %s, component %s]." % (priority, package, suite, component)) c_error += 1 continue priority_id = priority_cache[priority] if new.has_key(package): utils.warn( "Can't insert duplicate entry for '%s'; ignoring all but the first. [suite %s, component %s]" % (package, suite, component)) c_error += 1 continue new[package] = "" if original.has_key(package): (old_priority_id, old_section_id, old_maintainer_override, old_priority, old_section) = original[package] if mode == "add" or old_priority_id == priority_id and \ old_section_id == section_id and \ old_maintainer_override == maintainer_override: # If it's unchanged or we're in 'add only' mode, ignore it c_skipped += 1 continue else: # If it's changed, delete the old one so we can # reinsert it with the new information c_updated += 1 if action: session.execute( """DELETE FROM override WHERE suite = :suite AND component = :component AND package = :package AND type = :typeid""", { 'suite': suite_id, 'component': component_id, 'package': package, 'typeid': type_id }) # Log changes if old_priority_id != priority_id: Logger.log( ["changed priority", package, old_priority, priority]) if old_section_id != section_id: Logger.log( ["changed section", package, old_section, section]) if old_maintainer_override != maintainer_override: Logger.log([ "changed maintainer override", package, old_maintainer_override, maintainer_override ]) update_p = 1 elif mode == "change": # Ignore additions in 'change only' mode c_skipped += 1 continue else: c_added += 1 update_p = 0 if action: if not maintainer_override: m_o = None else: m_o = maintainer_override session.execute( """INSERT INTO override (suite, component, type, package, priority, section, maintainer) VALUES (:suiteid, :componentid, :typeid, :package, :priorityid, :sectionid, :maintainer)""", { 'suiteid': suite_id, 'componentid': component_id, 'typeid': type_id, 'package': package, 'priorityid': priority_id, 'sectionid': section_id, 'maintainer': m_o }) if not update_p: Logger.log([ "new override", suite, component, otype, package, priority, section, maintainer_override ]) if mode == "set": # Delete any packages which were removed for package in original.keys(): if not new.has_key(package): if action: session.execute( """DELETE FROM override WHERE suite = :suiteid AND component = :componentid AND package = :package AND type = :typeid""", { 'suiteid': suite_id, 'componentid': component_id, 'package': package, 'typeid': type_id }) c_removed += 1 Logger.log( ["removed override", suite, component, otype, package]) if action: session.commit() if not cnf["Control-Overrides::Options::Quiet"]: print "Done in %d seconds. [Updated = %d, Added = %d, Removed = %d, Skipped = %d, Errors = %d]" % ( int(time.time() - start_time), c_updated, c_added, c_removed, c_skipped, c_error) Logger.log( ["set complete", c_updated, c_added, c_removed, c_skipped, c_error])
def process(osuite, affected_suites, originosuite, component, otype, session): global Logger, Options, sections, priorities o = get_suite(osuite, session) if o is None: utils.fubar("Suite '%s' not recognised." % (osuite)) osuite_id = o.suite_id originosuite_id = None if originosuite: oo = get_suite(originosuite, session) if oo is None: utils.fubar("Suite '%s' not recognised." % (originosuite)) originosuite_id = oo.suite_id c = get_component(component, session) if c is None: utils.fubar("Component '%s' not recognised." % (component)) component_id = c.component_id ot = get_override_type(otype, session) if ot is None: utils.fubar( "Type '%s' not recognised. (Valid types are deb, udeb and dsc)" % (otype)) type_id = ot.overridetype_id dsc_type_id = get_override_type("dsc", session).overridetype_id source_priority_id = get_priority("source", session).priority_id if otype == "deb" or otype == "udeb": packages = {} # TODO: Fix to use placeholders (check how to with arrays) q = session.execute( """ SELECT b.package FROM binaries b JOIN bin_associations ba ON b.id = ba.bin JOIN suite ON ba.suite = suite.id JOIN files_archive_map af ON b.file = af.file_id AND suite.archive_id = af.archive_id WHERE b.type = :otype AND ba.suite IN (%s) AND af.component_id = :component_id """ % (",".join([str(i) for i in affected_suites])), { 'otype': otype, 'component_id': component_id }) for i in q.fetchall(): packages[i[0]] = 0 src_packages = {} q = session.execute( """ SELECT s.source FROM source s JOIN src_associations sa ON s.id = sa.source JOIN suite ON sa.suite = suite.id JOIN files_archive_map af ON s.file = af.file_id AND suite.archive_id = af.archive_id WHERE sa.suite IN (%s) AND af.component_id = :component_id """ % (",".join([str(i) for i in affected_suites])), {'component_id': component_id}) for i in q.fetchall(): src_packages[i[0]] = 0 # ----------- # Drop unused overrides q = session.execute( """SELECT package, priority, section, maintainer FROM override WHERE suite = :suite_id AND component = :component_id AND type = :type_id""", { 'suite_id': osuite_id, 'component_id': component_id, 'type_id': type_id }) # We're already within a transaction if otype == "dsc": for i in q.fetchall(): package = i[0] if package in src_packages: src_packages[package] = 1 else: if package in blacklist: utils.warn("%s in incoming, not touching" % package) continue Logger.log([ "removing unused override", osuite, component, otype, package, priorities[i[1]], sections[i[2]], i[3] ]) if not Options["No-Action"]: session.execute( """DELETE FROM override WHERE package = :package AND suite = :suite_id AND component = :component_id AND type = :type_id AND created < now() - interval '14 days'""", { 'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'type_id': type_id }) # create source overrides based on binary overrides, as source # overrides not always get created q = session.execute( """SELECT package, priority, section, maintainer FROM override WHERE suite = :suite_id AND component = :component_id""", { 'suite_id': osuite_id, 'component_id': component_id }) for i in q.fetchall(): package = i[0] if package not in src_packages or src_packages[package]: continue src_packages[package] = 1 Logger.log([ "add missing override", osuite, component, otype, package, "source", sections[i[2]], i[3] ]) if not Options["No-Action"]: session.execute( """INSERT INTO override (package, suite, component, priority, section, type, maintainer) VALUES (:package, :suite_id, :component_id, :priority_id, :section_id, :type_id, :maintainer)""", { 'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'priority_id': source_priority_id, 'section_id': i[2], 'type_id': dsc_type_id, 'maintainer': i[3] }) # Check whether originosuite has an override for us we can # copy if originosuite: q = session.execute( """SELECT origin.package, origin.priority, origin.section, origin.maintainer, target.priority, target.section, target.maintainer FROM override origin LEFT JOIN override target ON (origin.package = target.package AND target.suite = :suite_id AND origin.component = target.component AND origin.type = target.type) WHERE origin.suite = :originsuite_id AND origin.component = :component_id AND origin.type = :type_id""", { 'suite_id': osuite_id, 'originsuite_id': originosuite_id, 'component_id': component_id, 'type_id': type_id }) for i in q.fetchall(): package = i[0] if package not in src_packages or src_packages[package]: if i[4] and (i[1] != i[4] or i[2] != i[5] or i[3] != i[6]): Logger.log([ "syncing override", osuite, component, otype, package, "source", sections[i[5]], i[6], "source", sections[i[2]], i[3] ]) if not Options["No-Action"]: session.execute( """UPDATE override SET priority = :priority, section = :section, maintainer = :maintainer WHERE package = :package AND suite = :suite_id AND component = :component_id AND type = :type_id""", { 'priority': i[1], 'section': i[2], 'maintainer': i[3], 'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'type_id': dsc_type_id }) continue # we can copy src_packages[package] = 1 Logger.log([ "copying missing override", osuite, component, otype, package, "source", sections[i[2]], i[3] ]) if not Options["No-Action"]: session.execute( """INSERT INTO override (package, suite, component, priority, section, type, maintainer) VALUES (:package, :suite_id, :component_id, :priority_id, :section_id, :type_id, :maintainer)""", { 'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'priority_id': source_priority_id, 'section_id': i[2], 'type_id': dsc_type_id, 'maintainer': i[3] }) for package, hasoverride in list(src_packages.items()): if not hasoverride: utils.warn("%s has no override!" % package) else: # binary override for i in q.fetchall(): package = i[0] if package in packages: packages[package] = 1 else: if package in blacklist: utils.warn("%s in incoming, not touching" % package) continue Logger.log([ "removing unused override", osuite, component, otype, package, priorities[i[1]], sections[i[2]], i[3] ]) if not Options["No-Action"]: session.execute( """DELETE FROM override WHERE package = :package AND suite = :suite_id AND component = :component_id AND type = :type_id AND created < now() - interval '14 days'""", { 'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'type_id': type_id }) # Check whether originosuite has an override for us we can # copy if originosuite: q = session.execute( """SELECT origin.package, origin.priority, origin.section, origin.maintainer, target.priority, target.section, target.maintainer FROM override origin LEFT JOIN override target ON (origin.package = target.package AND target.suite = :suite_id AND origin.component = target.component AND origin.type = target.type) WHERE origin.suite = :originsuite_id AND origin.component = :component_id AND origin.type = :type_id""", { 'suite_id': osuite_id, 'originsuite_id': originosuite_id, 'component_id': component_id, 'type_id': type_id }) for i in q.fetchall(): package = i[0] if package not in packages or packages[package]: if i[4] and (i[1] != i[4] or i[2] != i[5] or i[3] != i[6]): Logger.log([ "syncing override", osuite, component, otype, package, priorities[i[4]], sections[i[5]], i[6], priorities[i[1]], sections[i[2]], i[3] ]) if not Options["No-Action"]: session.execute( """UPDATE override SET priority = :priority_id, section = :section_id, maintainer = :maintainer WHERE package = :package AND suite = :suite_id AND component = :component_id AND type = :type_id""", { 'priority_id': i[1], 'section_id': i[2], 'maintainer': i[3], 'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'type_id': type_id }) continue # we can copy packages[package] = 1 Logger.log([ "copying missing override", osuite, component, otype, package, priorities[i[1]], sections[i[2]], i[3] ]) if not Options["No-Action"]: session.execute( """INSERT INTO override (package, suite, component, priority, section, type, maintainer) VALUES (:package, :suite_id, :component_id, :priority_id, :section_id, :type_id, :maintainer)""", { 'package': package, 'suite_id': osuite_id, 'component_id': component_id, 'priority_id': i[1], 'section_id': i[2], 'type_id': type_id, 'maintainer': i[3] }) for package, hasoverride in list(packages.items()): if not hasoverride: utils.warn("%s has no override!" % package) session.commit() sys.stdout.flush()
def check(self, upload): changes = upload.changes # Only check sourceful uploads. if changes.source is None: return True # Only check uploads to unstable or experimental. if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions: return True cnf = Config() if 'Dinstall::LintianTags' not in cnf: return True tagfile = cnf['Dinstall::LintianTags'] with open(tagfile, 'r') as sourcefile: sourcecontent = sourcefile.read() try: lintiantags = yaml.safe_load(sourcecontent)['lintian'] except yaml.YAMLError as msg: raise Exception( 'Could not read lintian tags file {0}, YAML error: {1}'.format( tagfile, msg)) fd, temp_filename = utils.temp_filename(mode=0o644) temptagfile = os.fdopen(fd, 'w') for tags in lintiantags.itervalues(): for tag in tags: print >> temptagfile, tag temptagfile.close() changespath = os.path.join(upload.directory, changes.filename) try: cmd = [] result = 0 user = cnf.get('Dinstall::UnprivUser') or None if user is not None: cmd.extend(['sudo', '-H', '-u', user]) cmd.extend([ '/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath ]) output = daklib.daksubprocess.check_output( cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: result = e.returncode output = e.output finally: os.unlink(temp_filename) if result == 2: utils.warn("lintian failed for %s [return code: %s]." % \ (changespath, result)) utils.warn(utils.prefix_multi_line_string(output, \ " [possible output:] ")) parsed_tags = lintian.parse_lintian_output(output) rejects = list( lintian.generate_reject_messages(parsed_tags, lintiantags)) if len(rejects) != 0: raise Reject('\n'.join(rejects)) return True
def main(): global Options, Logger cnf = Config() summarystats = SummaryStats() Arguments = [('a', "automatic", "Dinstall::Options::Automatic"), ('h', "help", "Dinstall::Options::Help"), ('n', "no-action", "Dinstall::Options::No-Action"), ('p', "no-lock", "Dinstall::Options::No-Lock"), ('s', "no-mail", "Dinstall::Options::No-Mail"), ('d', "directory", "Dinstall::Options::Directory", "HasArg")] for i in [ "automatic", "help", "no-action", "no-lock", "no-mail", "version", "directory" ]: key = "Dinstall::Options::%s" % i if key not in cnf: cnf[key] = "" changes_files = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Dinstall::Options") if Options["Help"]: usage() # -n/--dry-run invalidates some other options which would involve things happening if Options["No-Action"]: Options["Automatic"] = "" # Obtain lock if not in no-action mode and initialize the log if not Options["No-Action"]: lock_fd = os.open( os.path.join(cnf["Dir::Lock"], 'process-upload.lock'), os.O_RDWR | os.O_CREAT) try: fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): utils.fubar( "Couldn't obtain lock; assuming another 'dak process-upload' is already running." ) else: raise # Initialise UrgencyLog() - it will deal with the case where we don't # want to log urgencies urgencylog = UrgencyLog() Logger = daklog.Logger("process-upload", Options["No-Action"]) # If we have a directory flag, use it to find our files if cnf["Dinstall::Options::Directory"] != "": # Note that we clobber the list of files we were given in this case # so warn if the user has done both if len(changes_files) > 0: utils.warn( "Directory provided so ignoring files given on command line") changes_files = utils.get_changes_files( cnf["Dinstall::Options::Directory"]) Logger.log([ "Using changes files from directory", cnf["Dinstall::Options::Directory"], len(changes_files) ]) elif not len(changes_files) > 0: utils.fubar("No changes files given and no directory specified") else: Logger.log( ["Using changes files from command-line", len(changes_files)]) process_changes(changes_files) if summarystats.accept_count: sets = "set" if summarystats.accept_count > 1: sets = "sets" print("Installed %d package %s, %s." % (summarystats.accept_count, sets, utils.size_type(int(summarystats.accept_bytes)))) Logger.log( ["total", summarystats.accept_count, summarystats.accept_bytes]) if summarystats.reject_count: sets = "set" if summarystats.reject_count > 1: sets = "sets" print("Rejected %d package %s." % (summarystats.reject_count, sets)) Logger.log(["rejected", summarystats.reject_count]) if not Options["No-Action"]: urgencylog.close() Logger.close()
def do_update(self): print "Adding known_changes table" try: c = self.db.cursor() c.execute(""" CREATE TABLE known_changes ( id SERIAL PRIMARY KEY, changesname TEXT NOT NULL, seen TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), source TEXT NOT NULL, binaries TEXT NOT NULL, architecture TEXT NOT NULL, version TEXT NOT NULL, distribution TEXT NOT NULL, urgency TEXT NOT NULL, maintainer TEXT NOT NULL, fingerprint TEXT NOT NULL, changedby TEXT NOT NULL, date TEXT NOT NULL, UNIQUE (changesname) ) """) c.execute("CREATE INDEX changesname_ind ON known_changes(changesname)") c.execute("CREATE INDEX changestimestamp_ind ON known_changes(seen)") c.execute("CREATE INDEX changessource_ind ON known_changes(source)") c.execute( "CREATE INDEX changesdistribution_ind ON known_changes(distribution)" ) c.execute("CREATE INDEX changesurgency_ind ON known_changes(urgency)") c.execute("GRANT ALL ON known_changes TO ftpmaster;") c.execute("GRANT SELECT ON known_changes TO public;") c.execute("UPDATE config SET value = '18' WHERE name = 'db_revision'") self.db.commit() print "Done. Now looking for old changes files" count = 0 failure = 0 cnf = Config() for directory in [ "Accepted", "Byhand", "Done", "New", "ProposedUpdates", "OldProposedUpdates" ]: checkdir = cnf["Dir::Queue::%s" % (directory)] if os.path.exists(checkdir): print "Looking into %s" % (checkdir) for filename in os.listdir(checkdir): if not filename.endswith(".changes"): # Only interested in changes files. continue try: count += 1 print "Directory %s, file %7d, failures %3d. (%s)" % ( directory, count, failure, filename) changes = Changes() changes.changes_file = filename changesfile = os.path.join(checkdir, filename) changes.changes = parse_changes(changesfile, signing_rules=-1) changes.changes["fingerprint"] = check_signature( changesfile) changes.add_known_changes(directory) except InvalidDscError as line: warn("syntax error in .dsc file '%s', line %s." % (f, line)) failure += 1 except ChangesUnicodeError: warn( "found invalid changes file, not properly utf-8 encoded" ) failure += 1 except psycopg2.ProgrammingError as msg: self.db.rollback() raise DBUpdateError( "Unable to apply knownchanges update 18, rollback issued. Error message : %s" % (str(msg)))
def main(): global Logger cnf = Config() Arguments = [('a', "add", "Control-Suite::Options::Add", "HasArg"), ('b', "britney", "Control-Suite::Options::Britney"), ('f', 'force', 'Control-Suite::Options::Force'), ('h', "help", "Control-Suite::Options::Help"), ('l', "list", "Control-Suite::Options::List", "HasArg"), ('r', "remove", "Control-Suite::Options::Remove", "HasArg"), ('s', "set", "Control-Suite::Options::Set", "HasArg")] for i in ["add", "britney", "help", "list", "remove", "set", "version"]: key = "Control-Suite::Options::%s" % i if key not in cnf: cnf[key] = "" try: file_list = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) except SystemError as e: print("%s\n" % e) usage(1) Options = cnf.subtree("Control-Suite::Options") if Options["Help"]: usage() force = "Force" in Options and Options["Force"] action = None for i in ("add", "list", "remove", "set"): if cnf["Control-Suite::Options::%s" % (i)] != "": suite_name = cnf["Control-Suite::Options::%s" % (i)] if action: utils.fubar("Can only perform one action at a time.") action = i # Need an action... if action is None: utils.fubar("No action specified.") britney = False if action == "set" and cnf["Control-Suite::Options::Britney"]: britney = True if action == "list": session = DBConn().session() suite = session.query(Suite).filter_by(suite_name=suite_name).one() get_list(suite, session) else: Logger = daklog.Logger("control-suite") with ArchiveTransaction() as transaction: session = transaction.session suite = session.query(Suite).filter_by(suite_name=suite_name).one() if action == "set" and not suite.allowcsset: if force: utils.warn( "Would not normally allow setting suite {0} (allowcsset is FALSE), but --force used" .format(suite_name)) else: utils.fubar( "Will not reset suite {0} due to its database configuration (allowcsset is FALSE)" .format(suite_name)) if file_list: for f in file_list: process_file(open(f), suite, action, transaction, britney, force) else: process_file(sys.stdin, suite, action, transaction, britney, force) Logger.close()
def set_suite(file, suite, transaction, britney=False, force=False): session = transaction.session suite_id = suite.suite_id lines = file.readlines() suites = [suite] + [q.suite for q in suite.copy_queues] propup_suites = get_propup_suites(suite, session) # Our session is already in a transaction def get_binary_q(suite_id): return session.execute( """SELECT b.package, b.version, a.arch_string, ba.id FROM binaries b, bin_associations ba, architecture a WHERE ba.suite = :suiteid AND ba.bin = b.id AND b.architecture = a.id ORDER BY b.version ASC""", {'suiteid': suite_id}) def get_source_q(suite_id): return session.execute( """SELECT s.source, s.version, 'source', sa.id FROM source s, src_associations sa WHERE sa.suite = :suiteid AND sa.source = s.id ORDER BY s.version ASC""", {'suiteid': suite_id}) # Build up a dictionary of what is currently in the suite current = {} q = get_binary_q(suite_id) for i in q: key = i[:3] current[key] = i[3] q = get_source_q(suite_id) for i in q: key = i[:3] current[key] = i[3] # Build a dictionary of what's currently in the propup suites psuites_current = {} propups_needed = {} for p_s in propup_suites: propups_needed[p_s.suite_id] = set() psuites_current[p_s.suite_id] = {} q = get_binary_q(p_s.suite_id) for i in q: key = (i[0], i[2]) # the query is sorted, so we only keep the newest version psuites_current[p_s.suite_id][key] = i[1] q = get_source_q(p_s.suite_id) for i in q: key = (i[0], i[2]) # the query is sorted, so we only keep the newest version psuites_current[p_s.suite_id][key] = i[1] # Build up a dictionary of what should be in the suite desired = set() for line in lines: split_line = line.strip().split() if len(split_line) != 3: utils.warn( "'%s' does not break into 'package version architecture'." % (line[:-1])) continue desired.add(tuple(split_line)) # Check to see which packages need added and add them for key in sorted(desired, key=functools.cmp_to_key(cmp_package_version)): if key not in current: (package, version, architecture) = key version_checks(package, architecture, suite.suite_name, version, session, force) pkg = get_pkg(package, version, architecture, session) if pkg is None: continue copy_to_suites(transaction, pkg, suites) Logger.log(["added", suite.suite_name, " ".join(key)]) check_propups(pkg, psuites_current, propups_needed) # Check to see which packages need removed and remove them for key, pkid in six.iteritems(current): if key not in desired: (package, version, architecture) = key if architecture == "source": session.execute( """DELETE FROM src_associations WHERE id = :pkid""", {'pkid': pkid}) else: session.execute( """DELETE FROM bin_associations WHERE id = :pkid""", {'pkid': pkid}) Logger.log(["removed", suite.suite_name, " ".join(key), pkid]) for p_s in propup_suites: for pkg in propups_needed[p_s.suite_id]: copy_to_suites(transaction, pkg, [p_s]) info = (pkg.name, pkg.version, pkg.arch_string) Logger.log(["propup", p_s.suite_name, " ".join(info)]) session.commit() if britney: britney_changelog(current, suite, session)
def main(): global Cnf, db_files, waste, excluded # Cnf = utils.get_conf() Arguments = [('h', "help", "Examine-Package::Options::Help"), ('H', "html-output", "Examine-Package::Options::Html-Output"), ] for i in ["Help", "Html-Output", "partial-html"]: key = "Examine-Package::Options::%s" % i if key not in Cnf: Cnf[key] = "" args = apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) Options = Cnf.subtree("Examine-Package::Options") if Options["Help"]: usage() if Options["Html-Output"]: global use_html use_html = True stdout_fd = sys.stdout for f in args: try: if not Options["Html-Output"]: # Pipe output for each argument through less less_cmd = ("less", "-R", "-") less_process = daklib.daksubprocess.Popen(less_cmd, stdin=subprocess.PIPE, bufsize=0) less_fd = less_process.stdin # -R added to display raw control chars for colour sys.stdout = less_fd try: if f.endswith(".changes"): check_changes(f) elif f.endswith(".deb") or f.endswith(".udeb"): # default to unstable when we don't have a .changes file # perhaps this should be a command line option? print(check_deb('unstable', f)) elif f.endswith(".dsc"): print(check_dsc('unstable', f)) else: utils.fubar("Unrecognised file type: '%s'." % (f)) finally: print(output_package_relations()) if not Options["Html-Output"]: # Reset stdout here so future less invocations aren't FUBAR less_fd.close() less_process.wait() sys.stdout = stdout_fd except IOError as e: if e.errno == errno.EPIPE: utils.warn("[examine-package] Caught EPIPE; skipping.") pass else: raise except KeyboardInterrupt: utils.warn("[examine-package] Caught C-c; skipping.") pass
def main(): global suite, suite_id, source_binaries, source_versions cnf = Config() Arguments = [('h', "help", "Cruft-Report::Options::Help"), ('m', "mode", "Cruft-Report::Options::Mode", "HasArg"), ('R', "rdep-check", "Cruft-Report::Options::Rdep-Check"), ('s', "suite", "Cruft-Report::Options::Suite", "HasArg"), ('w', "wanna-build-dump", "Cruft-Report::Options::Wanna-Build-Dump", "HasArg")] for i in ["help", "Rdep-Check"]: key = "Cruft-Report::Options::%s" % i if key not in cnf: cnf[key] = "" cnf["Cruft-Report::Options::Suite"] = cnf.get("Dinstall::DefaultSuite", "unstable") if "Cruft-Report::Options::Mode" not in cnf: cnf["Cruft-Report::Options::Mode"] = "daily" if "Cruft-Report::Options::Wanna-Build-Dump" not in cnf: cnf["Cruft-Report::Options::Wanna-Build-Dump"] = "/srv/ftp-master.debian.org/scripts/nfu" apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Cruft-Report::Options") if Options["Help"]: usage() if Options["Rdep-Check"]: rdeps = True else: rdeps = False # Set up checks based on mode if Options["Mode"] == "daily": checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu", "nbs metadata" ] elif Options["Mode"] == "full": checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu", "nbs metadata", "dubious nbs", "bnb", "bms", "anais" ] elif Options["Mode"] == "bdo": checks = ["nbs", "obsolete source"] else: utils.warn( "%s is not a recognised mode - only 'full', 'daily' or 'bdo' are understood." % (Options["Mode"])) usage(1) session = DBConn().session() bin_pkgs = {} src_pkgs = {} bin2source = {} bins_in_suite = {} nbs = {} source_versions = {} anais_output = "" nfu_packages = {} suite = get_suite(Options["Suite"].lower(), session) if not suite: utils.fubar("Cannot find suite %s" % Options["Suite"].lower()) suite_id = suite.suite_id suite_name = suite.suite_name.lower() if "obsolete source" in checks: report_obsolete_source(suite_name, session) if "nbs" in checks: reportAllNBS(suite_name, suite_id, session, rdeps) if "nbs metadata" in checks: reportNBSMetadata(suite_name, suite_id, session, rdeps) if "outdated non-free" in checks: report_outdated_nonfree(suite_name, session, rdeps) bin_not_built = {} if "bnb" in checks: bins_in_suite = get_suite_binaries(suite, session) # Checks based on the Sources files components = get_component_names(session) for component in components: filename = "%s/dists/%s/%s/source/Sources" % (suite.archive.path, suite_name, component) filename = utils.find_possibly_compressed_file(filename) with apt_pkg.TagFile(filename) as Sources: while Sources.step(): source = Sources.section.find('Package') source_version = Sources.section.find('Version') architecture = Sources.section.find('Architecture') binaries = Sources.section.find('Binary') binaries_list = [i.strip() for i in binaries.split(',')] if "bnb" in checks: # Check for binaries not built on any architecture. for binary in binaries_list: if binary not in bins_in_suite: bin_not_built.setdefault(source, {}) bin_not_built[source][binary] = "" if "anais" in checks: anais_output += do_anais(architecture, binaries_list, source, session) # build indices for checking "no source" later source_index = component + '/' + source src_pkgs[source] = source_index for binary in binaries_list: bin_pkgs[binary] = source source_binaries[source] = binaries source_versions[source] = source_version # Checks based on the Packages files check_components = components[:] if suite_name != "experimental": check_components.append('main/debian-installer') for component in check_components: architectures = [ a.arch_string for a in get_suite_architectures( suite_name, skipsrc=True, skipall=True, session=session) ] for architecture in architectures: if component == 'main/debian-installer' and re.match( "kfreebsd", architecture): continue if "nfu" in checks: nfu_packages.setdefault(architecture, []) nfu_entries = parse_nfu(architecture) filename = "%s/dists/%s/%s/binary-%s/Packages" % ( suite.archive.path, suite_name, component, architecture) filename = utils.find_possibly_compressed_file(filename) with apt_pkg.TagFile(filename) as Packages: while Packages.step(): package = Packages.section.find('Package') source = Packages.section.find('Source', "") version = Packages.section.find('Version') if source == "": source = package if package in bin2source and \ apt_pkg.version_compare(version, bin2source[package]["version"]) > 0: bin2source[package]["version"] = version bin2source[package]["source"] = source else: bin2source[package] = {} bin2source[package]["version"] = version bin2source[package]["source"] = source if source.find("(") != -1: m = re_extract_src_version.match(source) source = m.group(1) version = m.group(2) if package not in bin_pkgs: nbs.setdefault(source, {}) nbs[source].setdefault(package, {}) nbs[source][package][version] = "" else: if "nfu" in checks: if package in nfu_entries and \ version != source_versions[source]: # only suggest to remove out-of-date packages nfu_packages[architecture].append( (package, version, source_versions[source])) # Distinguish dubious (version numbers match) and 'real' NBS (they don't) dubious_nbs = {} version_sort_key = functools.cmp_to_key(apt_pkg.version_compare) for source in nbs: for package in nbs[source]: latest_version = max(nbs[source][package], key=version_sort_key) source_version = source_versions.get(source, "0") if apt_pkg.version_compare(latest_version, source_version) == 0: add_nbs(dubious_nbs, source, latest_version, package, suite_id, session) if "nviu" in checks: do_newer_version('unstable', 'experimental', 'NVIU', session) if "nvit" in checks: do_newer_version('testing', 'testing-proposed-updates', 'NVIT', session) ### if Options["Mode"] == "full": print("=" * 75) print() if "nfu" in checks: do_nfu(nfu_packages) if "bnb" in checks: print("Unbuilt binary packages") print("-----------------------") print() for source in sorted(bin_not_built): binaries = sorted(bin_not_built[source]) print(" o %s: %s" % (source, ", ".join(binaries))) print() if "bms" in checks: report_multiple_source(suite) if "anais" in checks: print("Architecture Not Allowed In Source") print("----------------------------------") print(anais_output) print() if "dubious nbs" in checks: do_dubious_nbs(dubious_nbs)
def main(): """ Prepare the work to be done, do basic checks. @attention: This function may run B{within sudo} """ global Cnf ##################################### #### This can run within sudo !! #### ##################################### init() # Check if there is a file defined (and existant) transpath = Cnf.get("Dinstall::ReleaseTransitions", "") if transpath == "": utils.warn("Dinstall::ReleaseTransitions not defined") sys.exit(1) if not os.path.exists(transpath): utils.warn("ReleaseTransitions file, %s, not found." % (Cnf["Dinstall::ReleaseTransitions"])) sys.exit(1) # Also check if our temp directory is defined and existant temppath = Cnf.get("Dir::TempPath", "") if temppath == "": utils.warn("Dir::TempPath not defined") sys.exit(1) if not os.path.exists(temppath): utils.warn("Temporary path %s not found." % (Cnf["Dir::TempPath"])) sys.exit(1) if Options["import"]: try: write_transitions_from_file(Options["import"]) except TransitionsError as m: print(m) sys.exit(2) sys.exit(0) ############################################## #### Up to here it can run within sudo !! #### ############################################## # Parse the yaml file transitions = load_transitions(transpath) if transitions is None: # Something very broken with the transitions, exit utils.warn("Could not parse existing transitions file. Aborting.") sys.exit(2) if Options["edit"]: # Let's edit the transitions file edit_transitions() elif Options["check"]: # Check and remove outdated transitions check_transitions(transitions) else: # Output information about the currently defined transitions. print("Currently defined transitions:") transition_info(transitions) sys.exit(0)
def process_file(file, suite, action, transaction, britney=False, force=False): session = transaction.session if action == "set": set_suite(file, suite, transaction, britney, force) return suite_id = suite.suite_id suites = [suite] + [q.suite for q in suite.copy_queues] extra_archives = [suite.archive] request = [] # Our session is already in a transaction for line in file: split_line = line.strip().split() if len(split_line) != 3: utils.warn( "'%s' does not break into 'package version architecture'." % (line[:-1])) continue request.append(split_line) request.sort(key=functools.cmp_to_key(cmp_package_version)) for package, version, architecture in request: pkg = get_pkg(package, version, architecture, session) if pkg is None: continue if architecture == 'source': pkid = pkg.source_id else: pkid = pkg.binary_id component = pkg.poolfile.component # Do version checks when adding packages if action == "add": version_checks(package, architecture, suite.suite_name, version, session, force) if architecture == "source": # Find the existing association ID, if any q = session.execute( """SELECT id FROM src_associations WHERE suite = :suiteid and source = :pkid""", { 'suiteid': suite_id, 'pkid': pkid }) ql = q.fetchall() if len(ql) < 1: association_id = None else: association_id = ql[0][0] # Take action if action == "add": if association_id: utils.warn( "'%s_%s_%s' already exists in suite %s." % (package, version, architecture, suite.suite_name)) continue else: for s in suites: transaction.copy_source(pkg, s, component) Logger.log([ "added", package, version, architecture, suite.suite_name, pkid ]) elif action == "remove": if association_id is None: utils.warn("'%s_%s_%s' doesn't exist in suite %s." % (package, version, architecture, suite)) continue else: session.execute( """DELETE FROM src_associations WHERE id = :pkid""", {'pkid': association_id}) Logger.log([ "removed", package, version, architecture, suite.suite_name, pkid ]) else: # Find the existing associations ID, if any q = session.execute( """SELECT id FROM bin_associations WHERE suite = :suiteid and bin = :pkid""", { 'suiteid': suite_id, 'pkid': pkid }) ql = q.fetchall() if len(ql) < 1: association_id = None else: association_id = ql[0][0] # Take action if action == "add": if association_id: utils.warn("'%s_%s_%s' already exists in suite %s." % (package, version, architecture, suite)) continue else: for s in suites: transaction.copy_binary(pkg, s, component, extra_archives=extra_archives) Logger.log([ "added", package, version, architecture, suite.suite_name, pkid ]) elif action == "remove": if association_id is None: utils.warn("'%s_%s_%s' doesn't exist in suite %s." % (package, version, architecture, suite)) continue else: session.execute( """DELETE FROM bin_associations WHERE id = :pkid""", {'pkid': association_id}) Logger.log([ "removed", package, version, architecture, suite.suite_name, pkid ]) session.commit()