def parse_nfu(architecture): cnf = Config() # utils/hpodder_1.1.5.0: Not-For-Us [optional:out-of-date] r = re.compile(r"^\w+/([^_]+)_.*: Not-For-Us") ret = set() filename = "%s/%s-all.txt" % (cnf["Cruft-Report::Options::Wanna-Build-Dump"], architecture) # Not all architectures may have a wanna-build dump, so we want to ignore missin # files if os.path.exists(filename): f = utils.open_file(filename) for line in f: if line[0] == ' ': continue m = r.match(line) if m: ret.add(m.group(1)) f.close() else: utils.warn("No wanna-build dump file for architecture %s" % architecture) return ret
def daily_install_stats(): stats = {} f = utils.open_file("2001-11") for line in f.readlines(): split = line.strip().split('|') program = split[1] if program != "katie" and program != "process-accepted": continue action = split[2] if action != "installing changes" and action != "installed": continue date = split[0][:8] if not stats.has_key(date): stats[date] = {} stats[date]["packages"] = 0 stats[date]["size"] = 0.0 if action == "installing changes": stats[date]["packages"] += 1 elif action == "installed": stats[date]["size"] += float(split[5]) dates = stats.keys() dates.sort() for date in dates: packages = stats[date]["packages"] size = int(stats[date]["size"] / 1024.0 / 1024.0) print "%s %s %s" % (date, packages, size)
def daily_install_stats(): stats = {} f = utils.open_file("2001-11") for line in f.readlines(): split = line.strip().split('|') program = split[1] if program != "katie" and program != "process-accepted": continue action = split[2] if action != "installing changes" and action != "installed": continue date = split[0][:8] if date not in stats: stats[date] = {} stats[date]["packages"] = 0 stats[date]["size"] = 0.0 if action == "installing changes": stats[date]["packages"] += 1 elif action == "installed": stats[date]["size"] += float(split[5]) dates = stats.keys() dates.sort() for date in dates: packages = stats[date]["packages"] size = int(stats[date]["size"] / 1024.0 / 1024.0) print("%s %s %s" % (date, packages, size))
def check_timestamps(): """ Check all files for timestamps in the future; common from hardware (e.g. alpha) which have far-future dates as their default dates. """ global current_file q = DBConn().session().query(PoolFile).filter(PoolFile.filename.like('.deb$')) db_files.clear() count = 0 for pf in q.all(): filename = os.path.abspath(os.path.join(pf.location.path, pf.filename)) if os.access(filename, os.R_OK): f = utils.open_file(filename) current_file = filename sys.stderr.write("Processing %s.\n" % (filename)) apt_inst.debExtract(f, Ent, "control.tar.gz") f.seek(0) apt_inst.debExtract(f, Ent, "data.tar.gz") count += 1 print "Checked %d files (out of %d)." % (count, len(db_files.keys()))
def validate_packages(suite, component, architecture): """ Ensure files mentioned in Packages exist """ cnf = Config() filename = "%s/dists/%s/%s/binary-%s/Packages.gz" \ % (cnf["Dir::Root"], suite, component, architecture) print "Processing %s..." % (filename) # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance... (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) packages = utils.open_file(temp_filename) Packages = apt_pkg.TagFile(packages) while Packages.step(): filename = "%s/%s" % (cnf["Dir::Root"], Packages.section.find('Filename')) if not os.path.exists(filename): print "W: %s missing." % (filename) packages.close() os.unlink(temp_filename)
def get_upload_data(changesfn): achanges = deb822.Changes(file(changesfn)) changesname = os.path.basename(changesfn) delay = os.path.basename(os.path.dirname(changesfn)) m = re.match(r'([0-9]+)-day', delay) if m: delaydays = int(m.group(1)) remainingtime = (delaydays > 0) * max(0, 24 * 60 * 60 + os.stat(changesfn).st_mtime - time.time()) delay = "%d days %02d:%02d" % (max(delaydays - 1, 0), int(remainingtime / 3600), int(remainingtime / 60) % 60) else: delaydays = 0 remainingtime = 0 uploader = achanges.get('changed-by') uploader = re.sub(r'^\s*(\S.*)\s+<.*>', r'\1', uploader) with utils.open_file(changesfn) as f: fingerprint = SignedFile(f.read(), keyrings=get_active_keyring_paths(), require_signature=False).fingerprint if "Show-Deferred::LinkPath" in Cnf: isnew = False suites = get_suites_source_in(achanges['source']) if 'unstable' not in suites and 'experimental' not in suites: isnew = True if not isnew: # we don't link .changes because we don't want other people to # upload it with the existing signature. for afn in map(lambda x: x['name'], achanges['files']): lfn = os.path.join(Cnf["Show-Deferred::LinkPath"], afn) qfn = os.path.join(os.path.dirname(changesfn), afn) if os.path.islink(lfn): os.unlink(lfn) if os.path.exists(qfn): os.symlink(qfn, lfn) os.chmod(qfn, 0o644) return (max(delaydays - 1, 0) * 24 * 60 * 60 + remainingtime, changesname, delay, uploader, achanges.get('closes', '').split(), fingerprint, achanges, delaydays)
def parse_nfu(architecture): cnf = Config() # utils/hpodder_1.1.5.0: Not-For-Us [optional:out-of-date] r = re.compile("^\w+/([^_]+)_.*: Not-For-Us") ret = set() filename = "%s/%s-all.txt" % (cnf["Cruft-Report::Options::Wanna-Build-Dump"], architecture) # Not all architectures may have a wanna-build dump, so we want to ignore missin # files if os.path.exists(filename): f = utils.open_file(filename) for line in f: if line[0] == ' ': continue m = r.match(line) if m: ret.add(m.group(1)) f.close() else: utils.warn("No wanna-build dump file for architecture %s" % architecture) return ret
def check_timestamps(): """ Check all files for timestamps in the future; common from hardware (e.g. alpha) which have far-future dates as their default dates. """ global current_file q = DBConn().session().query(PoolFile).filter( PoolFile.filename.like('.deb$')) db_files.clear() count = 0 for pf in q.all(): filename = os.path.abspath(os.path.join(pf.location.path, pf.filename)) if os.access(filename, os.R_OK): f = utils.open_file(filename) current_file = filename sys.stderr.write("Processing %s.\n" % (filename)) apt_inst.debExtract(f, Ent, "control.tar.gz") f.seek(0) apt_inst.debExtract(f, Ent, "data.tar.gz") count += 1 print "Checked %d files (out of %d)." % (count, len(db_files.keys()))
def read_control (filename): recommends = [] depends = [] section = '' maintainer = '' arch = '' deb_file = utils.open_file(filename) try: extracts = utils.deb_extract_control(deb_file) control = apt_pkg.TagSection(extracts) except: print formatted_text("can't parse control info") deb_file.close() raise deb_file.close() control_keys = control.keys() if "Depends" in control: depends_str = control["Depends"] # create list of dependancy lists depends = split_depends(depends_str) if "Recommends" in control: recommends_str = control["Recommends"] recommends = split_depends(recommends_str) if "Section" in control: section_str = control["Section"] c_match = re_contrib.search(section_str) nf_match = re_nonfree.search(section_str) if c_match : # contrib colour section = colour_output(section_str, 'contrib') elif nf_match : # non-free colour section = colour_output(section_str, 'nonfree') else : # main section = colour_output(section_str, 'main') if "Architecture" in control: arch_str = control["Architecture"] arch = colour_output(arch_str, 'arch') if "Maintainer" in control: maintainer = control["Maintainer"] localhost = re_localhost.search(maintainer) if localhost: #highlight bad email maintainer = colour_output(maintainer, 'maintainer') else: maintainer = escape_if_needed(maintainer) return (control, control_keys, section, depends, recommends, arch, maintainer)
def do_bxa_notification(upload): files = upload.pkg.files summary = "" for f in files.keys(): if files[f]["type"] == "deb": control = apt_pkg.TagSection(utils.deb_extract_control(utils.open_file(f))) summary += "\n" summary += "Package: %s\n" % (control.find("Package")) summary += "Description: %s\n" % (control.find("Description")) upload.Subst["__BINARY_DESCRIPTIONS__"] = summary bxa_mail = utils.TemplateSubst(upload.Subst,Config()["Dir::Templates"]+"/process-new.bxa_notification") utils.send_mail(bxa_mail)
def edit_new(overrides, upload, session): # Write the current data to a temporary file (fd, temp_filename) = utils.temp_filename() temp_file = os.fdopen(fd, 'w') print_new(upload, overrides, indexed=0, session=session, file=temp_file) temp_file.close() # Spawn an editor on that file editor = os.environ.get("EDITOR", "vi") result = os.system("%s %s" % (editor, temp_filename)) if result != 0: utils.fubar("%s invocation failed for %s." % (editor, temp_filename), result) # Read the edited data back in temp_file = utils.open_file(temp_filename) lines = temp_file.readlines() temp_file.close() os.unlink(temp_filename) overrides_map = dict([((o['type'], o['package']), o) for o in overrides]) new_overrides = [] # Parse the new data for line in lines: line = line.strip() if line == "" or line[0] == '#': continue s = line.split() # Pad the list if necessary s[len(s):3] = [None] * (3 - len(s)) (pkg, priority, section) = s[:3] if pkg.find(':') != -1: type, pkg = pkg.split(':', 1) else: type = 'deb' o = overrides_map.get((type, pkg), None) if o is None: utils.warn("Ignoring unknown package '%s'" % (pkg)) else: if section.find('/') != -1: component = section.split('/', 1)[0] else: component = 'main' new_overrides.append( dict( package=pkg, type=type, section=section, component=component, priority=priority, included=o['included'], )) return new_overrides
def main(): cnf = Config() Arguments = [('h', "help", "Make-Overrides::Options::Help")] for i in ["help"]: key = "Make-Overrides::Options::%s" % i if key not in cnf: cnf[key] = "" apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Make-Overrides::Options") if Options["Help"]: usage() d = DBConn() session = d.session() for suite in session.query(Suite).filter( Suite.overrideprocess == True): # noqa:E712 if suite.untouchable: print("Skipping %s as it is marked as untouchable" % suite.suite_name) continue sys.stderr.write("Processing %s...\n" % (suite.suite_name)) override_suite = suite.overridecodename or suite.codename for component in session.query(Component).all(): for otype in session.query(OverrideType).all(): otype_name = otype.overridetype cname = component.component_name # TODO: Stick suffix info in database (or get rid of it) if otype_name == "deb": suffix = "" elif otype_name == "udeb": if cname == "contrib": continue # Ick2 suffix = ".debian-installer" elif otype_name == "dsc": suffix = ".src" else: utils.fubar("Don't understand OverrideType %s" % otype.overridetype) cname = cname.replace('/', '_') filename = os.path.join( cnf["Dir::Override"], "override.%s.%s%s" % (override_suite, cname, suffix)) output_file = utils.open_file(filename, 'w') do_list(output_file, suite, component, otype, session) output_file.close()
def edit_new (overrides, upload, session): # Write the current data to a temporary file (fd, temp_filename) = utils.temp_filename() temp_file = os.fdopen(fd, 'w') print_new (upload, overrides, indexed=0, session=session, file=temp_file) temp_file.close() # Spawn an editor on that file editor = os.environ.get("EDITOR","vi") result = os.system("%s %s" % (editor, temp_filename)) if result != 0: utils.fubar ("%s invocation failed for %s." % (editor, temp_filename), result) # Read the edited data back in temp_file = utils.open_file(temp_filename) lines = temp_file.readlines() temp_file.close() os.unlink(temp_filename) overrides_map = dict([ ((o['type'], o['package']), o) for o in overrides ]) new_overrides = [] # Parse the new data for line in lines: line = line.strip() if line == "" or line[0] == '#': continue s = line.split() # Pad the list if necessary s[len(s):3] = [None] * (3-len(s)) (pkg, priority, section) = s[:3] if pkg.find(':') != -1: type, pkg = pkg.split(':', 1) else: type = 'deb' o = overrides_map.get((type, pkg), None) if o is None: utils.warn("Ignoring unknown package '%s'" % (pkg)) else: if section.find('/') != -1: component = section.split('/', 1)[0] else: component = 'main' new_overrides.append(dict( package=pkg, type=type, section=section, component=component, priority=priority, included=o['included'], )) return new_overrides
def get_upload_data(changesfn): achanges = deb822.Changes(file(changesfn)) changesname = os.path.basename(changesfn) delay = os.path.basename(os.path.dirname(changesfn)) m = re.match(r'([0-9]+)-day', delay) if m: delaydays = int(m.group(1)) remainingtime = (delaydays > 0) * max( 0, 24 * 60 * 60 + os.stat(changesfn).st_mtime - time.time()) delay = "%d days %02d:%02d" % (max( delaydays - 1, 0), int( remainingtime / 3600), int(remainingtime / 60) % 60) else: delaydays = 0 remainingtime = 0 uploader = achanges.get('changed-by') uploader = re.sub(r'^\s*(\S.*)\s+<.*>', r'\1', uploader) with utils.open_file(changesfn) as f: fingerprint = SignedFile(f.read(), keyrings=get_active_keyring_paths(), require_signature=False).fingerprint if Cnf.has_key("Show-Deferred::LinkPath"): isnew = 0 suites = get_suites_source_in(achanges['source']) if 'unstable' not in suites and 'experimental' not in suites: isnew = 1 for b in achanges['binary'].split(): suites = get_suites_binary_in(b) if 'unstable' not in suites and 'experimental' not in suites: isnew = 1 if not isnew: # we don't link .changes because we don't want other people to # upload it with the existing signature. for afn in map(lambda x: x['name'], achanges['files']): lfn = os.path.join(Cnf["Show-Deferred::LinkPath"], afn) qfn = os.path.join(os.path.dirname(changesfn), afn) if os.path.islink(lfn): os.unlink(lfn) if os.path.exists(qfn): os.symlink(qfn, lfn) os.chmod(qfn, 0o644) return (max(delaydays - 1, 0) * 24 * 60 * 60 + remainingtime, changesname, delay, uploader, achanges.get('closes', '').split(), fingerprint, achanges, delaydays)
def main(): cnf = Config() Arguments = [('h', "help", "Make-Overrides::Options::Help")] for i in ["help"]: key = "Make-Overrides::Options::%s" % i if key not in cnf: cnf[key] = "" apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Make-Overrides::Options") if Options["Help"]: usage() d = DBConn() session = d.session() for suite in session.query(Suite).filter(Suite.overrideprocess == True): # noqa:E712 if suite.untouchable: print("Skipping %s as it is marked as untouchable" % suite.suite_name) continue print("Processing %s..." % (suite.suite_name), file=sys.stderr) override_suite = suite.overridecodename or suite.codename for component in session.query(Component).all(): for otype in session.query(OverrideType).all(): otype_name = otype.overridetype cname = component.component_name # TODO: Stick suffix info in database (or get rid of it) if otype_name == "deb": suffix = "" elif otype_name == "udeb": if cname == "contrib": continue # Ick2 suffix = ".debian-installer" elif otype_name == "dsc": suffix = ".src" else: utils.fubar("Don't understand OverrideType %s" % otype.overridetype) cname = cname.replace('/', '_') filename = os.path.join(cnf["Dir::Override"], "override.%s.%s%s" % (override_suite, cname, suffix)) output_file = utils.open_file(filename, 'w') do_list(output_file, suite, component, otype, session) output_file.close()
def fix_checksums(): """ Update missing checksums """ print "Getting file information from database..." session = DBConn().session() q = session.query(PoolFile) print "Checking file checksums & sizes..." for f in q: filename = f.fullpath try: fi = utils.open_file(filename) except: utils.warn("can't open '%s'." % (filename)) continue size = os.stat(filename)[stat.ST_SIZE] if size != f.filesize: utils.warn( "**WARNING** size mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, size, f.filesize)) md5sum = apt_pkg.md5sum(fi) if md5sum != f.md5sum: utils.warn( "**WARNING** md5sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, md5sum, f.md5sum)) continue fi.seek(0) sha1sum = apt_pkg.sha1sum(fi) if f.sha1sum is None: f.sha1sum = sha1sum print "Added missing sha1 checksum for {0}".format(f.filename) fi.seek(0) sha256sum = apt_pkg.sha256sum(fi) if f.sha256sum is None: f.sha256sum = sha256sum print "Added missing sha256 checksum for {0}".format(f.filename) session.commit() print "Done."
def validate_sources(suite, component): """ Ensure files mentioned in Sources exist """ cnf = Config() filename = "%s/dists/%s/%s/source/Sources.gz" % (cnf["Dir::Root"], suite, component) print "Processing %s..." % (filename) # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance... (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) sources = utils.open_file(temp_filename) Sources = apt_pkg.TagFile(sources) while Sources.step(): source = Sources.section.find('Package') directory = Sources.section.find('Directory') files = Sources.section.find('Files') for i in files.split('\n'): (md5, size, name) = i.split() filename = "%s/%s/%s" % (cnf["Dir::Root"], directory, name) if not os.path.exists(filename): if directory.find("potato") == -1: print "W: %s missing." % (filename) else: pool_location = utils.poolify(source, component) pool_filename = "%s/%s/%s" % (cnf["Dir::Pool"], pool_location, name) if not os.path.exists(pool_filename): print "E: %s missing (%s)." % (filename, pool_filename) else: # Create symlink pool_filename = os.path.normpath(pool_filename) filename = os.path.normpath(filename) src = utils.clean_symlink(pool_filename, filename, cnf["Dir::Root"]) print "Symlinking: %s -> %s" % (filename, src) #os.symlink(src, filename) sources.close() os.unlink(temp_filename)
def check_checksums(): """ Validate all files """ print "Getting file information from database..." q = DBConn().session().query(PoolFile) print "Checking file checksums & sizes..." for f in q: filename = f.fullpath try: fi = utils.open_file(filename) except: utils.warn("can't open '%s'." % (filename)) continue size = os.stat(filename)[stat.ST_SIZE] if size != f.filesize: utils.warn( "**WARNING** size mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, size, f.filesize)) md5sum = apt_pkg.md5sum(fi) if md5sum != f.md5sum: utils.warn( "**WARNING** md5sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, md5sum, f.md5sum)) fi.seek(0) sha1sum = apt_pkg.sha1sum(fi) if sha1sum != f.sha1sum: utils.warn( "**WARNING** sha1sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, sha1sum, f.sha1sum)) fi.seek(0) sha256sum = apt_pkg.sha256sum(fi) if sha256sum != f.sha256sum: utils.warn( "**WARNING** sha256sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, sha256sum, f.sha256sum)) print "Done."
def read_changes_or_dsc(suite, filename, session=None): dsc = {} dsc_file = utils.open_file(filename) try: dsc = utils.parse_changes(filename, dsc_file=1) except: return formatted_text("can't parse .dsc control info") dsc_file.close() filecontents = strip_pgp_signature(filename) keysinorder = [] for l in filecontents.split('\n'): m = re.match(r'([-a-zA-Z0-9]*):', l) if m: keysinorder.append(m.group(1)) for k in dsc.keys(): if k in ("build-depends", "build-depends-indep"): dsc[k] = create_depends_string(suite, split_depends(dsc[k]), session) elif k == "architecture": if (dsc["architecture"] != "any"): dsc['architecture'] = colour_output(dsc["architecture"], 'arch') elif k == "distribution": if dsc["distribution"] not in ('unstable', 'experimental'): dsc['distribution'] = colour_output(dsc["distribution"], 'distro') elif k in ("files", "changes", "description"): if use_html: dsc[k] = formatted_text(dsc[k], strip=True) else: dsc[k] = ('\n' + '\n'.join( map(lambda x: ' ' + x, dsc[k].split('\n')))).rstrip() else: dsc[k] = escape_if_needed(dsc[k]) keysinorder = filter(lambda x: not x.lower().startswith('checksums-'), keysinorder) filecontents = '\n'.join( map(lambda x: format_field(x, dsc[x.lower()]), keysinorder)) + '\n' return filecontents
def fix_checksums(): """ Update missing checksums """ print "Getting file information from database..." session = DBConn().session(); q = session.query(PoolFile) print "Checking file checksums & sizes..." for f in q: filename = f.fullpath try: fi = utils.open_file(filename) except: utils.warn("can't open '%s'." % (filename)) continue size = os.stat(filename)[stat.ST_SIZE] if size != f.filesize: utils.warn("**WARNING** size mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, size, f.filesize)) md5sum = apt_pkg.md5sum(fi) if md5sum != f.md5sum: utils.warn("**WARNING** md5sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, md5sum, f.md5sum)) continue; fi.seek(0) sha1sum = apt_pkg.sha1sum(fi) if f.sha1sum is None: f.sha1sum = sha1sum print "Added missing sha1 checksum for {0}".format(f.filename) fi.seek(0) sha256sum = apt_pkg.sha256sum(fi) if f.sha256sum is None: f.sha256sum = sha256sum print "Added missing sha256 checksum for {0}".format(f.filename) session.commit() print "Done."
def listPath(suite, component, architecture = None, type = None, incremental_mode = False): """returns full path to the list file""" suffixMap = { 'deb': "binary-", 'udeb': "debian-installer_binary-" } if architecture: suffix = suffixMap[type] + architecture.arch_string else: suffix = "source" filename = "%s_%s_%s.list" % \ (suite.suite_name, component.component_name, suffix) pathname = os.path.join(Config()["Dir::Lists"], filename) file = utils.open_file(pathname, "a") timestamp = None if incremental_mode: timestamp = os.fstat(file.fileno())[stat.ST_MTIME] else: file.seek(0) file.truncate() return (file, timestamp)
def validate_packages(suite, component, architecture): """ Ensure files mentioned in Packages exist """ filename = "%s/dists/%s/%s/binary-%s/Packages.gz" \ % (Cnf["Dir::Root"], suite, component, architecture) print "Processing %s..." % (filename) # apt_pkg.ParseTagFile needs a real file handle and can't handle a GzipFile instance... (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) packages = utils.open_file(temp_filename) Packages = apt_pkg.ParseTagFile(packages) while Packages.Step(): filename = "%s/%s" % (Cnf["Dir::Root"], Packages.Section.Find('Filename')) if not os.path.exists(filename): print "W: %s missing." % (filename) packages.close() os.unlink(temp_filename)
def validate_sources(suite, component): """ Ensure files mentioned in Sources exist """ cnf = Config() filename = "%s/dists/%s/%s/source/Sources.gz" % (cnf["Dir::Root"], suite, component) print "Processing %s..." % (filename) # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance... (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) sources = utils.open_file(temp_filename) Sources = apt_pkg.TagFile(sources) while Sources.step(): source = Sources.section.find('Package') directory = Sources.section.find('Directory') files = Sources.section.find('Files') for i in files.split('\n'): (md5, size, name) = i.split() filename = "%s/%s/%s" % (cnf["Dir::Root"], directory, name) if not os.path.exists(filename): if directory.find("potato") == -1: print "W: %s missing." % (filename) else: pool_location = utils.poolify (source, component) pool_filename = "%s/%s/%s" % (cnf["Dir::Pool"], pool_location, name) if not os.path.exists(pool_filename): print "E: %s missing (%s)." % (filename, pool_filename) else: # Create symlink pool_filename = os.path.normpath(pool_filename) filename = os.path.normpath(filename) src = utils.clean_symlink(pool_filename, filename, cnf["Dir::Root"]) print "Symlinking: %s -> %s" % (filename, src) #os.symlink(src, filename) sources.close() os.unlink(temp_filename)
def listPath(suite, component, architecture=None, type=None, incremental_mode=False): """returns full path to the list file""" suffixMap = {'deb': "binary-", 'udeb': "debian-installer_binary-"} if architecture: suffix = suffixMap[type] + architecture.arch_string else: suffix = "source" filename = "%s_%s_%s.list" % \ (suite.suite_name, component.component_name, suffix) pathname = os.path.join(Config()["Dir::Lists"], filename) file = utils.open_file(pathname, "a") timestamp = None if incremental_mode: timestamp = os.fstat(file.fileno())[stat.ST_MTIME] else: file.seek(0) file.truncate() return (file, timestamp)
def check_files(): """ Prepare the dictionary of existing filenames, then walk through the archive pool/ directory to compare it. """ global db_files cnf = Config() print "Building list of database files..." q = DBConn().session().query(PoolFile).join(Location).order_by('path', 'location') print "Missing files:" db_files.clear() for f in q.all(): filename = os.path.abspath(os.path.join(f.location.path, f.filename)) db_files[filename] = "" if os.access(filename, os.R_OK) == 0: if f.last_used: print "(last used: %s) %s" % (f.last_used, filename) else: print "%s" % (filename) filename = os.path.join(cnf["Dir::Override"], 'override.unreferenced') if os.path.exists(filename): f = utils.open_file(filename) for filename in f.readlines(): filename = filename[:-1] excluded[filename] = "" print "Existent files not in db:" os.path.walk(os.path.join(cnf["Dir::Root"], 'pool/'), process_dir, None) print print "%s wasted..." % (utils.size_type(waste))
def edit_new (new, upload): # Write the current data to a temporary file (fd, temp_filename) = utils.temp_filename() temp_file = os.fdopen(fd, 'w') print_new (new, upload, indexed=0, file=temp_file) temp_file.close() # Spawn an editor on that file editor = os.environ.get("EDITOR","vi") result = os.system("%s %s" % (editor, temp_filename)) if result != 0: utils.fubar ("%s invocation failed for %s." % (editor, temp_filename), result) # Read the edited data back in temp_file = utils.open_file(temp_filename) lines = temp_file.readlines() temp_file.close() os.unlink(temp_filename) # Parse the new data for line in lines: line = line.strip() if line == "": continue s = line.split() # Pad the list if necessary s[len(s):3] = [None] * (3-len(s)) (pkg, priority, section) = s[:3] if not new.has_key(pkg): utils.warn("Ignoring unknown package '%s'" % (pkg)) else: # Strip off any invalid markers, print_new will readd them. if section.endswith("[!]"): section = section[:-3] if priority.endswith("[!]"): priority = priority[:-3] for f in new[pkg]["files"]: upload.pkg.files[f]["section"] = section upload.pkg.files[f]["priority"] = priority new[pkg]["section"] = section new[pkg]["priority"] = priority
def read_changes_or_dsc (suite, filename, session = None): dsc = {} dsc_file = utils.open_file(filename) try: dsc = utils.parse_changes(filename, dsc_file=1) except: return formatted_text("can't parse .dsc control info") dsc_file.close() filecontents = strip_pgp_signature(filename) keysinorder = [] for l in filecontents.split('\n'): m = re.match(r'([-a-zA-Z0-9]*):', l) if m: keysinorder.append(m.group(1)) for k in dsc.keys(): if k in ("build-depends","build-depends-indep"): dsc[k] = create_depends_string(suite, split_depends(dsc[k]), session) elif k == "architecture": if (dsc["architecture"] != "any"): dsc['architecture'] = colour_output(dsc["architecture"], 'arch') elif k == "distribution": if dsc["distribution"] not in ('unstable', 'experimental'): dsc['distribution'] = colour_output(dsc["distribution"], 'distro') elif k in ("files","changes","description"): if use_html: dsc[k] = formatted_text(dsc[k], strip=True) else: dsc[k] = ('\n'+'\n'.join(map(lambda x: ' '+x, dsc[k].split('\n')))).rstrip() else: dsc[k] = escape_if_needed(dsc[k]) keysinorder = filter(lambda x: not x.lower().startswith('checksums-'), keysinorder) filecontents = '\n'.join(map(lambda x: format_field(x,dsc[x.lower()]), keysinorder))+'\n' return filecontents
def check_checksums(): """ Validate all files """ print "Getting file information from database..." q = DBConn().session().query(PoolFile) print "Checking file checksums & sizes..." for f in q: filename = f.fullpath try: fi = utils.open_file(filename) except: utils.warn("can't open '%s'." % (filename)) continue size = os.stat(filename)[stat.ST_SIZE] if size != f.filesize: utils.warn("**WARNING** size mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, size, f.filesize)) md5sum = apt_pkg.md5sum(fi) if md5sum != f.md5sum: utils.warn("**WARNING** md5sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, md5sum, f.md5sum)) fi.seek(0) sha1sum = apt_pkg.sha1sum(fi) if sha1sum != f.sha1sum: utils.warn("**WARNING** sha1sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, sha1sum, f.sha1sum)) fi.seek(0) sha256sum = apt_pkg.sha256sum(fi) if sha256sum != f.sha256sum: utils.warn("**WARNING** sha256sum mismatch for '%s' ('%s' [current] vs. '%s' [db])." % (filename, sha256sum, f.sha256sum)) print "Done."
def main (): global Options cnf = Config() Arguments = [('h',"help","Rm::Options::Help"), ('A','no-arch-all-rdeps','Rm::Options::NoArchAllRdeps'), ('a',"architecture","Rm::Options::Architecture", "HasArg"), ('b',"binary", "Rm::Options::Binary"), ('B',"binary-only", "Rm::Options::Binary-Only"), ('c',"component", "Rm::Options::Component", "HasArg"), ('C',"carbon-copy", "Rm::Options::Carbon-Copy", "HasArg"), # Bugs to Cc ('d',"done","Rm::Options::Done", "HasArg"), # Bugs fixed ('D',"do-close","Rm::Options::Do-Close"), ('R',"rdep-check", "Rm::Options::Rdep-Check"), ('m',"reason", "Rm::Options::Reason", "HasArg"), # Hysterical raisins; -m is old-dinstall option for rejection reason ('n',"no-action","Rm::Options::No-Action"), ('p',"partial", "Rm::Options::Partial"), ('s',"suite","Rm::Options::Suite", "HasArg"), ('S',"source-only", "Rm::Options::Source-Only"), ] for i in [ 'NoArchAllRdeps', "architecture", "binary", "binary-only", "carbon-copy", "component", "done", "help", "no-action", "partial", "rdep-check", "reason", "source-only", "Do-Close" ]: if not cnf.has_key("Rm::Options::%s" % (i)): cnf["Rm::Options::%s" % (i)] = "" if not cnf.has_key("Rm::Options::Suite"): cnf["Rm::Options::Suite"] = "unstable" arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Rm::Options") if Options["Help"]: usage() session = DBConn().session() # Sanity check options if not arguments: utils.fubar("need at least one package name as an argument.") if Options["Architecture"] and Options["Source-Only"]: utils.fubar("can't use -a/--architecture and -S/--source-only options simultaneously.") if ((Options["Binary"] and Options["Source-Only"]) or (Options["Binary"] and Options["Binary-Only"]) or (Options["Binary-Only"] and Options["Source-Only"])): utils.fubar("Only one of -b/--binary, -B/--binary-only and -S/--source-only can be used.") if Options.has_key("Carbon-Copy") and not Options.has_key("Done"): utils.fubar("can't use -C/--carbon-copy without also using -d/--done option.") if Options["Architecture"] and not Options["Partial"]: utils.warn("-a/--architecture implies -p/--partial.") Options["Partial"] = "true" if Options["Do-Close"] and not Options["Done"]: utils.fubar("No.") if (Options["Do-Close"] and (Options["Binary"] or Options["Binary-Only"] or Options["Source-Only"])): utils.fubar("No.") # Force the admin to tell someone if we're not doing a 'dak # cruft-report' inspired removal (or closing a bug, which counts # as telling someone). if not Options["No-Action"] and not Options["Carbon-Copy"] \ and not Options["Done"] and Options["Reason"].find("[auto-cruft]") == -1: utils.fubar("Need a -C/--carbon-copy if not closing a bug and not doing a cruft removal.") # Process -C/--carbon-copy # # Accept 3 types of arguments (space separated): # 1) a number - assumed to be a bug number, i.e. [email protected] # 2) the keyword 'package' - cc's [email protected] for every argument # 3) contains a '@' - assumed to be an email address, used unmodified # carbon_copy = [] for copy_to in utils.split_args(Options.get("Carbon-Copy")): if copy_to.isdigit(): if cnf.has_key("Dinstall::BugServer"): carbon_copy.append(copy_to + "@" + cnf["Dinstall::BugServer"]) else: utils.fubar("Asked to send mail to #%s in BTS but Dinstall::BugServer is not configured" % copy_to) elif copy_to == 'package': for package in arguments: if cnf.has_key("Dinstall::PackagesServer"): carbon_copy.append(package + "@" + cnf["Dinstall::PackagesServer"]) if cnf.has_key("Dinstall::TrackingServer"): carbon_copy.append(package + "@" + cnf["Dinstall::TrackingServer"]) elif '@' in copy_to: carbon_copy.append(copy_to) else: utils.fubar("Invalid -C/--carbon-copy argument '%s'; not a bug number, 'package' or email address." % (copy_to)) if Options["Binary"]: field = "b.package" else: field = "s.source" con_packages = "AND %s IN (%s)" % (field, ", ".join([ repr(i) for i in arguments ])) (con_suites, con_architectures, con_components, check_source) = \ utils.parse_args(Options) # Additional suite checks suite_ids_list = [] whitelists = [] suites = utils.split_args(Options["Suite"]) suites_list = utils.join_with_commas_and(suites) if not Options["No-Action"]: for suite in suites: s = get_suite(suite, session=session) if s is not None: suite_ids_list.append(s.suite_id) whitelists.append(s.mail_whitelist) if suite in ("oldstable", "stable"): print "**WARNING** About to remove from the (old)stable suite!" print "This should only be done just prior to a (point) release and not at" print "any other time." game_over() elif suite == "testing": print "**WARNING About to remove from the testing suite!" print "There's no need to do this normally as removals from unstable will" print "propogate to testing automagically." game_over() # Additional architecture checks if Options["Architecture"] and check_source: utils.warn("'source' in -a/--argument makes no sense and is ignored.") # Don't do dependency checks on multiple suites if Options["Rdep-Check"] and len(suites) > 1: utils.fubar("Reverse dependency check on multiple suites is not implemented.") to_remove = [] maintainers = {} # We have 3 modes of package selection: binary, source-only, binary-only # and source+binary. # XXX: TODO: This all needs converting to use placeholders or the object # API. It's an SQL injection dream at the moment if Options["Binary"]: # Removal by binary package name q = session.execute("SELECT b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b, bin_associations ba, architecture a, suite su, files f, files_archive_map af, component c WHERE ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id AND b.file = f.id AND af.file_id = f.id AND af.archive_id = su.archive_id AND af.component_id = c.id %s %s %s %s" % (con_packages, con_suites, con_components, con_architectures)) to_remove.extend(q) else: # Source-only if not Options["Binary-Only"]: q = session.execute("SELECT s.source, s.version, 'source', s.id, s.maintainer FROM source s, src_associations sa, suite su, archive, files f, files_archive_map af, component c WHERE sa.source = s.id AND sa.suite = su.id AND archive.id = su.archive_id AND s.file = f.id AND af.file_id = f.id AND af.archive_id = su.archive_id AND af.component_id = c.id %s %s %s" % (con_packages, con_suites, con_components)) to_remove.extend(q) if not Options["Source-Only"]: # Source + Binary q = session.execute(""" SELECT b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b JOIN bin_associations ba ON b.id = ba.bin JOIN architecture a ON b.architecture = a.id JOIN suite su ON ba.suite = su.id JOIN archive ON archive.id = su.archive_id JOIN files_archive_map af ON b.file = af.file_id AND af.archive_id = archive.id JOIN component c ON af.component_id = c.id JOIN source s ON b.source = s.id JOIN src_associations sa ON s.id = sa.source AND sa.suite = su.id WHERE TRUE %s %s %s %s""" % (con_packages, con_suites, con_components, con_architectures)) to_remove.extend(q) if not to_remove: print "Nothing to do." sys.exit(0) # If we don't have a reason; spawn an editor so the user can add one # Write the rejection email out as the <foo>.reason file if not Options["Reason"] and not Options["No-Action"]: (fd, temp_filename) = utils.temp_filename() editor = os.environ.get("EDITOR","vi") result = os.system("%s %s" % (editor, temp_filename)) if result != 0: utils.fubar ("vi invocation failed for `%s'!" % (temp_filename), result) temp_file = utils.open_file(temp_filename) for line in temp_file.readlines(): Options["Reason"] += line temp_file.close() os.unlink(temp_filename) # Generate the summary of what's to be removed d = {} for i in to_remove: package = i[0] version = i[1] architecture = i[2] maintainer = i[4] maintainers[maintainer] = "" if not d.has_key(package): d[package] = {} if not d[package].has_key(version): d[package][version] = [] if architecture not in d[package][version]: d[package][version].append(architecture) maintainer_list = [] for maintainer_id in maintainers.keys(): maintainer_list.append(get_maintainer(maintainer_id).name) summary = "" removals = d.keys() removals.sort() for package in removals: versions = d[package].keys() versions.sort(apt_pkg.version_compare) for version in versions: d[package][version].sort(utils.arch_compare_sw) summary += "%10s | %10s | %s\n" % (package, version, ", ".join(d[package][version])) print "Will remove the following packages from %s:" % (suites_list) print print summary print "Maintainer: %s" % ", ".join(maintainer_list) if Options["Done"]: print "Will also close bugs: "+Options["Done"] if carbon_copy: print "Will also send CCs to: " + ", ".join(carbon_copy) if Options["Do-Close"]: print "Will also close associated bug reports." print print "------------------- Reason -------------------" print Options["Reason"] print "----------------------------------------------" print if Options["Rdep-Check"]: arches = utils.split_args(Options["Architecture"]) include_arch_all = Options['NoArchAllRdeps'] == '' reverse_depends_check(removals, suites[0], arches, session, include_arch_all=include_arch_all) # If -n/--no-action, drop out here if Options["No-Action"]: sys.exit(0) print "Going to remove the packages now." game_over() # Do the actual deletion print "Deleting...", sys.stdout.flush() try: bugs = utils.split_args(Options["Done"]) remove(session, Options["Reason"], suites, to_remove, partial=Options["Partial"], components=utils.split_args(Options["Component"]), done_bugs=bugs, carbon_copy=carbon_copy, close_related_bugs=Options["Do-Close"] ) except ValueError as ex: utils.fubar(ex.message) else: print "done."
def britney_changelog(packages, suite, session): old = {} current = {} Cnf = utils.get_conf() try: q = session.execute("SELECT changelog FROM suite WHERE id = :suiteid", {'suiteid': suite.suite_id}) brit_file = q.fetchone()[0] except: brit_file = None if brit_file: brit_file = os.path.join(Cnf['Dir::Root'], brit_file) else: return q = session.execute( """SELECT s.source, s.version, sa.id FROM source s, src_associations sa WHERE sa.suite = :suiteid AND sa.source = s.id""", {'suiteid': suite.suite_id}) for p in q.fetchall(): current[p[0]] = p[1] for p in packages.keys(): if p[2] == "source": old[p[0]] = p[1] new = {} for p in current.keys(): if p in old.keys(): if apt_pkg.version_compare(current[p], old[p]) > 0: new[p] = [current[p], old[p]] else: new[p] = [current[p], 0] query = "SELECT source, changelog FROM changelogs WHERE" for p in new.keys(): query += " source = '%s' AND version > '%s' AND version <= '%s'" \ % (p, new[p][1], new[p][0]) query += " AND architecture LIKE '%source%' AND distribution in \ ('unstable', 'experimental', 'testing-proposed-updates') OR" query += " False ORDER BY source, version DESC" q = session.execute(query) pu = None brit = utils.open_file(brit_file, 'w') for u in q: if pu and pu != u[0]: brit.write("\n") brit.write("%s\n" % u[1]) pu = u[0] if q.rowcount: brit.write("\n\n\n") for p in list(set(old.keys()).difference(current.keys())): brit.write("REMOVED: %s %s\n" % (p, old[p])) brit.flush() brit.close()
def main(): global Options cnf = Config() Arguments = [ ('h', "help", "Rm::Options::Help"), ('A', 'no-arch-all-rdeps', 'Rm::Options::NoArchAllRdeps'), ('a', "architecture", "Rm::Options::Architecture", "HasArg"), ('b', "binary", "Rm::Options::Binary"), ('B', "binary-only", "Rm::Options::Binary-Only"), ('c', "component", "Rm::Options::Component", "HasArg"), ('C', "carbon-copy", "Rm::Options::Carbon-Copy", "HasArg"), # Bugs to Cc ('d', "done", "Rm::Options::Done", "HasArg"), # Bugs fixed ('D', "do-close", "Rm::Options::Do-Close"), ('R', "rdep-check", "Rm::Options::Rdep-Check"), ( 'm', "reason", "Rm::Options::Reason", "HasArg" ), # Hysterical raisins; -m is old-dinstall option for rejection reason ('n', "no-action", "Rm::Options::No-Action"), ('p', "partial", "Rm::Options::Partial"), ('s', "suite", "Rm::Options::Suite", "HasArg"), ('S', "source-only", "Rm::Options::Source-Only"), ] for i in [ 'NoArchAllRdeps', "architecture", "binary", "binary-only", "carbon-copy", "component", "done", "help", "no-action", "partial", "rdep-check", "reason", "source-only", "Do-Close" ]: key = "Rm::Options::%s" % (i) if key not in cnf: cnf[key] = "" if "Rm::Options::Suite" not in cnf: cnf["Rm::Options::Suite"] = "unstable" arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Rm::Options") if Options["Help"]: usage() session = DBConn().session() # Sanity check options if not arguments: utils.fubar("need at least one package name as an argument.") if Options["Architecture"] and Options["Source-Only"]: utils.fubar( "can't use -a/--architecture and -S/--source-only options simultaneously." ) if ((Options["Binary"] and Options["Source-Only"]) or (Options["Binary"] and Options["Binary-Only"]) or (Options["Binary-Only"] and Options["Source-Only"])): utils.fubar( "Only one of -b/--binary, -B/--binary-only and -S/--source-only can be used." ) if "Carbon-Copy" not in Options and "Done" not in Options: utils.fubar( "can't use -C/--carbon-copy without also using -d/--done option.") if Options["Architecture"] and not Options["Partial"]: utils.warn("-a/--architecture implies -p/--partial.") Options["Partial"] = "true" if Options["Do-Close"] and not Options["Done"]: utils.fubar("No.") if (Options["Do-Close"] and (Options["Binary"] or Options["Binary-Only"] or Options["Source-Only"])): utils.fubar("No.") # Force the admin to tell someone if we're not doing a 'dak # cruft-report' inspired removal (or closing a bug, which counts # as telling someone). if not Options["No-Action"] and not Options["Carbon-Copy"] \ and not Options["Done"] and Options["Reason"].find("[auto-cruft]") == -1: utils.fubar( "Need a -C/--carbon-copy if not closing a bug and not doing a cruft removal." ) if Options["Binary"]: field = "b.package" else: field = "s.source" con_packages = "AND %s IN (%s)" % (field, ", ".join( [repr(i) for i in arguments])) (con_suites, con_architectures, con_components, check_source) = \ utils.parse_args(Options) # Additional suite checks suite_ids_list = [] whitelists = [] suites = utils.split_args(Options["Suite"]) suites_list = utils.join_with_commas_and(suites) if not Options["No-Action"]: for suite in suites: s = get_suite(suite, session=session) if s is not None: suite_ids_list.append(s.suite_id) whitelists.append(s.mail_whitelist) if suite in ("oldstable", "stable"): print( "**WARNING** About to remove from the (old)stable suite!") print( "This should only be done just prior to a (point) release and not at" ) print("any other time.") game_over() elif suite == "testing": print("**WARNING About to remove from the testing suite!") print( "There's no need to do this normally as removals from unstable will" ) print("propogate to testing automagically.") game_over() # Additional architecture checks if Options["Architecture"] and check_source: utils.warn("'source' in -a/--argument makes no sense and is ignored.") # Don't do dependency checks on multiple suites if Options["Rdep-Check"] and len(suites) > 1: utils.fubar( "Reverse dependency check on multiple suites is not implemented.") to_remove = [] maintainers = {} # We have 3 modes of package selection: binary, source-only, binary-only # and source+binary. # XXX: TODO: This all needs converting to use placeholders or the object # API. It's an SQL injection dream at the moment if Options["Binary"]: # Removal by binary package name q = session.execute(""" SELECT b.package, b.version, a.arch_string, b.id, b.maintainer, s.source FROM binaries b JOIN source s ON s.id = b.source JOIN bin_associations ba ON ba.bin = b.id JOIN architecture a ON a.id = b.architecture JOIN suite su ON su.id = ba.suite JOIN files f ON f.id = b.file JOIN files_archive_map af ON af.file_id = f.id AND af.archive_id = su.archive_id JOIN component c ON c.id = af.component_id WHERE TRUE %s %s %s %s """ % (con_packages, con_suites, con_components, con_architectures)) to_remove.extend(q) else: # Source-only if not Options["Binary-Only"]: q = session.execute(""" SELECT s.source, s.version, 'source', s.id, s.maintainer, s.source FROM source s JOIN src_associations sa ON sa.source = s.id JOIN suite su ON su.id = sa.suite JOIN archive ON archive.id = su.archive_id JOIN files f ON f.id = s.file JOIN files_archive_map af ON af.file_id = f.id AND af.archive_id = su.archive_id JOIN component c ON c.id = af.component_id WHERE TRUE %s %s %s """ % (con_packages, con_suites, con_components)) to_remove.extend(q) if not Options["Source-Only"]: # Source + Binary q = session.execute( """ SELECT b.package, b.version, a.arch_string, b.id, b.maintainer, s.source FROM binaries b JOIN bin_associations ba ON b.id = ba.bin JOIN architecture a ON b.architecture = a.id JOIN suite su ON ba.suite = su.id JOIN archive ON archive.id = su.archive_id JOIN files_archive_map af ON b.file = af.file_id AND af.archive_id = archive.id JOIN component c ON af.component_id = c.id JOIN source s ON b.source = s.id JOIN src_associations sa ON s.id = sa.source AND sa.suite = su.id WHERE TRUE %s %s %s %s""" % (con_packages, con_suites, con_components, con_architectures)) to_remove.extend(q) if not to_remove: print("Nothing to do.") sys.exit(0) # Process -C/--carbon-copy # # Accept 3 types of arguments (space separated): # 1) a number - assumed to be a bug number, i.e. [email protected] # 2) the keyword 'package' - cc's [email protected] for every argument # 3) contains a '@' - assumed to be an email address, used unmodified # carbon_copy = [] for copy_to in utils.split_args(Options.get("Carbon-Copy")): if copy_to.isdigit(): if "Dinstall::BugServer" in cnf: carbon_copy.append(copy_to + "@" + cnf["Dinstall::BugServer"]) else: utils.fubar( "Asked to send mail to #%s in BTS but Dinstall::BugServer is not configured" % copy_to) elif copy_to == 'package': for package in set([s[5] for s in to_remove]): if "Dinstall::PackagesServer" in cnf: carbon_copy.append(package + "@" + cnf["Dinstall::PackagesServer"]) elif '@' in copy_to: carbon_copy.append(copy_to) else: utils.fubar( "Invalid -C/--carbon-copy argument '%s'; not a bug number, 'package' or email address." % (copy_to)) # If we don't have a reason; spawn an editor so the user can add one # Write the rejection email out as the <foo>.reason file if not Options["Reason"] and not Options["No-Action"]: (fd, temp_filename) = utils.temp_filename() editor = os.environ.get("EDITOR", "vi") result = os.system("%s %s" % (editor, temp_filename)) if result != 0: utils.fubar("vi invocation failed for `%s'!" % (temp_filename), result) temp_file = utils.open_file(temp_filename) for line in temp_file.readlines(): Options["Reason"] += line temp_file.close() os.unlink(temp_filename) # Generate the summary of what's to be removed d = {} for i in to_remove: package = i[0] version = i[1] architecture = i[2] maintainer = i[4] maintainers[maintainer] = "" if package not in d: d[package] = {} if version not in d[package]: d[package][version] = [] if architecture not in d[package][version]: d[package][version].append(architecture) maintainer_list = [] for maintainer_id in maintainers.keys(): maintainer_list.append(get_maintainer(maintainer_id).name) summary = "" removals = d.keys() removals.sort() for package in removals: versions = d[package].keys() versions.sort(key=functools.cmp_to_key(apt_pkg.version_compare)) for version in versions: d[package][version].sort(key=utils.ArchKey) summary += "%10s | %10s | %s\n" % (package, version, ", ".join( d[package][version])) print("Will remove the following packages from %s:" % (suites_list)) print() print(summary) print("Maintainer: %s" % ", ".join(maintainer_list)) if Options["Done"]: print("Will also close bugs: " + Options["Done"]) if carbon_copy: print("Will also send CCs to: " + ", ".join(carbon_copy)) if Options["Do-Close"]: print("Will also close associated bug reports.") print() print("------------------- Reason -------------------") print(Options["Reason"]) print("----------------------------------------------") print() if Options["Rdep-Check"]: arches = utils.split_args(Options["Architecture"]) include_arch_all = Options['NoArchAllRdeps'] == '' reverse_depends_check(removals, suites[0], arches, session, include_arch_all=include_arch_all) # If -n/--no-action, drop out here if Options["No-Action"]: sys.exit(0) print("Going to remove the packages now.") game_over() # Do the actual deletion print("Deleting...", end=' ') sys.stdout.flush() try: bugs = utils.split_args(Options["Done"]) remove(session, Options["Reason"], suites, to_remove, partial=Options["Partial"], components=utils.split_args(Options["Component"]), done_bugs=bugs, carbon_copy=carbon_copy, close_related_bugs=Options["Do-Close"]) except ValueError as ex: utils.fubar(ex.message) else: print("done.")
def main(): global suite, suite_id, source_binaries, source_versions cnf = Config() Arguments = [('h', "help", "Cruft-Report::Options::Help"), ('m', "mode", "Cruft-Report::Options::Mode", "HasArg"), ('R', "rdep-check", "Cruft-Report::Options::Rdep-Check"), ('s', "suite", "Cruft-Report::Options::Suite", "HasArg"), ('w', "wanna-build-dump", "Cruft-Report::Options::Wanna-Build-Dump", "HasArg")] for i in ["help", "Rdep-Check"]: if not cnf.has_key("Cruft-Report::Options::%s" % (i)): cnf["Cruft-Report::Options::%s" % (i)] = "" cnf["Cruft-Report::Options::Suite"] = cnf.get("Dinstall::DefaultSuite", "unstable") if not cnf.has_key("Cruft-Report::Options::Mode"): cnf["Cruft-Report::Options::Mode"] = "daily" if not cnf.has_key("Cruft-Report::Options::Wanna-Build-Dump"): cnf["Cruft-Report::Options::Wanna-Build-Dump"] = "/srv/ftp-master.debian.org/scripts/nfu" apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Cruft-Report::Options") if Options["Help"]: usage() if Options["Rdep-Check"]: rdeps = True else: rdeps = False # Set up checks based on mode if Options["Mode"] == "daily": checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu" ] elif Options["Mode"] == "full": checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu", "dubious nbs", "bnb", "bms", "anais" ] elif Options["Mode"] == "bdo": checks = ["nbs", "obsolete source"] else: utils.warn( "%s is not a recognised mode - only 'full', 'daily' or 'bdo' are understood." % (Options["Mode"])) usage(1) session = DBConn().session() bin_pkgs = {} src_pkgs = {} bin2source = {} bins_in_suite = {} nbs = {} source_versions = {} anais_output = "" nfu_packages = {} suite = get_suite(Options["Suite"].lower(), session) if not suite: utils.fubar("Cannot find suite %s" % Options["Suite"].lower()) suite_id = suite.suite_id suite_name = suite.suite_name.lower() if "obsolete source" in checks: report_obsolete_source(suite_name, session) if "nbs" in checks: reportAllNBS(suite_name, suite_id, session, rdeps) if "outdated non-free" in checks: report_outdated_nonfree(suite_name, session, rdeps) bin_not_built = {} if "bnb" in checks: bins_in_suite = get_suite_binaries(suite, session) # Checks based on the Sources files components = get_component_names(session) for component in components: filename = "%s/dists/%s/%s/source/Sources.gz" % (suite.archive.path, suite_name, component) # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance... (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) sources = utils.open_file(temp_filename) Sources = apt_pkg.TagFile(sources) while Sources.step(): source = Sources.section.find('Package') source_version = Sources.section.find('Version') architecture = Sources.section.find('Architecture') binaries = Sources.section.find('Binary') binaries_list = [i.strip() for i in binaries.split(',')] if "bnb" in checks: # Check for binaries not built on any architecture. for binary in binaries_list: if not bins_in_suite.has_key(binary): bin_not_built.setdefault(source, {}) bin_not_built[source][binary] = "" if "anais" in checks: anais_output += do_anais(architecture, binaries_list, source, session) # build indices for checking "no source" later source_index = component + '/' + source src_pkgs[source] = source_index for binary in binaries_list: bin_pkgs[binary] = source source_binaries[source] = binaries source_versions[source] = source_version sources.close() os.unlink(temp_filename) # Checks based on the Packages files check_components = components[:] if suite_name != "staging": check_components.append('main/debian-installer') for component in check_components: architectures = [ a.arch_string for a in get_suite_architectures( suite_name, skipsrc=True, skipall=True, session=session) ] for architecture in architectures: if component == 'main/debian-installer' and re.match( "kfreebsd", architecture): continue filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % ( suite.archive.path, suite_name, component, architecture) # apt_pkg.TagFile needs a real file handle (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput( "gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) if "nfu" in checks: nfu_packages.setdefault(architecture, []) nfu_entries = parse_nfu(architecture) packages = utils.open_file(temp_filename) Packages = apt_pkg.TagFile(packages) while Packages.step(): package = Packages.section.find('Package') source = Packages.section.find('Source', "") version = Packages.section.find('Version') if source == "": source = package if bin2source.has_key(package) and \ apt_pkg.version_compare(version, bin2source[package]["version"]) > 0: bin2source[package]["version"] = version bin2source[package]["source"] = source else: bin2source[package] = {} bin2source[package]["version"] = version bin2source[package]["source"] = source if source.find("(") != -1: m = re_extract_src_version.match(source) source = m.group(1) version = m.group(2) if not bin_pkgs.has_key(package): nbs.setdefault(source, {}) nbs[source].setdefault(package, {}) nbs[source][package][version] = "" else: if "nfu" in checks: if package in nfu_entries and \ version != source_versions[source]: # only suggest to remove out-of-date packages nfu_packages[architecture].append( (package, version, source_versions[source])) packages.close() os.unlink(temp_filename) # Distinguish dubious (version numbers match) and 'real' NBS (they don't) dubious_nbs = {} for source in nbs.keys(): for package in nbs[source].keys(): versions = nbs[source][package].keys() versions.sort(apt_pkg.version_compare) latest_version = versions.pop() source_version = source_versions.get(source, "0") if apt_pkg.version_compare(latest_version, source_version) == 0: add_nbs(dubious_nbs, source, latest_version, package, suite_id, session) if "nviu" in checks: do_newer_version('chromodoris', 'staging', 'NVIU', session) # FIXME: Not used in Tanglu #if "nvit" in checks: # do_newer_version('testing', 'testing-proposed-updates', 'NVIT', session) ### if Options["Mode"] == "full": print "=" * 75 print if "nfu" in checks: do_nfu(nfu_packages) if "bnb" in checks: print "Unbuilt binary packages" print "-----------------------" print keys = bin_not_built.keys() keys.sort() for source in keys: binaries = bin_not_built[source].keys() binaries.sort() print " o %s: %s" % (source, ", ".join(binaries)) print if "bms" in checks: report_multiple_source(suite) if "anais" in checks: print "Architecture Not Allowed In Source" print "----------------------------------" print anais_output print if "dubious nbs" in checks: do_dubious_nbs(dubious_nbs)
def remove(session, reason, suites, removals, whoami=None, partial=False, components=None, done_bugs=None, date=None, carbon_copy=None, close_related_bugs=False): """Batch remove a number of packages Verify that the files listed in the Files field of the .dsc are those expected given the announced Format. @type session: SQLA Session @param session: The database session in use @type reason: string @param reason: The reason for the removal (e.g. "[auto-cruft] NBS (no longer built by <source>)") @type suites: list @param suites: A list of the suite names in which the removal should occur @type removals: list @param removals: A list of the removals. Each element should be a tuple (or list) of at least the following for 4 items from the database (in order): package, version, architecture, (database) id. For source packages, the "architecture" should be set to "source". @type partial: bool @param partial: Whether the removal is "partial" (e.g. architecture specific). @type components: list @param components: List of components involved in a partial removal. Can be an empty list to not restrict the removal to any components. @type whoami: string @param whoami: The person (or entity) doing the removal. Defaults to utils.whoami() @type date: string @param date: The date of the removal. Defaults to commands.getoutput("date -R") @type done_bugs: list @param done_bugs: A list of bugs to be closed when doing this removal. @type close_related_bugs: bool @param done_bugs: Whether bugs related to the package being removed should be closed as well. NB: Not implemented for more than one suite. @type carbon_copy: list @param carbon_copy: A list of mail addresses to CC when doing removals. NB: all items are taken "as-is" unlike "dak rm". @rtype: None @return: Nothing """ # Generate the summary of what's to be removed d = {} summary = "" sources = [] binaries = [] whitelists = [] versions = [] suite_ids_list = [] suites_list = utils.join_with_commas_and(suites) cnf = utils.get_conf() con_components = '' ####################################################################################################### if not reason: raise ValueError("Empty removal reason not permitted") if not removals: raise ValueError("Nothing to remove!?") if not suites: raise ValueError("Removals without a suite!?") if whoami is None: whoami = utils.whoami() if date is None: date = commands.getoutput("date -R") if partial and components: component_ids_list = [] for componentname in components: component = get_component(componentname, session=session) if component is None: raise ValueError("component '%s' not recognised." % componentname) else: component_ids_list.append(component.component_id) if component_ids_list: con_components = "AND component IN (%s)" % ", ".join([str(i) for i in component_ids_list]) for i in removals: package = i[0] version = i[1] architecture = i[2] if package not in d: d[package] = {} if version not in d[package]: d[package][version] = [] if architecture not in d[package][version]: d[package][version].append(architecture) for package in sorted(d): versions = sorted(d[package], cmp=apt_pkg.version_compare) for version in versions: d[package][version].sort(utils.arch_compare_sw) summary += "%10s | %10s | %s\n" % (package, version, ", ".join(d[package][version])) for package in summary.split("\n"): for row in package.split("\n"): element = row.split("|") if len(element) == 3: if element[2].find("source") > 0: sources.append("%s_%s" % tuple(elem.strip(" ") for elem in element[:2])) element[2] = sub("source\s?,?", "", element[2]).strip(" ") if element[2]: binaries.append("%s_%s [%s]" % tuple(elem.strip(" ") for elem in element)) dsc_type_id = get_override_type('dsc', session).overridetype_id deb_type_id = get_override_type('deb', session).overridetype_id for suite in suites: s = get_suite(suite, session=session) if s is not None: suite_ids_list.append(s.suite_id) whitelists.append(s.mail_whitelist) ####################################################################################################### log_filename = cnf["Rm::LogFile"] log822_filename = cnf["Rm::LogFile822"] with utils.open_file(log_filename, "a") as logfile, utils.open_file(log822_filename, "a") as logfile822: fcntl.lockf(logfile, fcntl.LOCK_EX) fcntl.lockf(logfile822, fcntl.LOCK_EX) logfile.write("=========================================================================\n") logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami)) logfile.write("Removed the following packages from %s:\n\n%s" % (suites_list, summary)) if done_bugs: logfile.write("Closed bugs: %s\n" % (", ".join(done_bugs))) logfile.write("\n------------------- Reason -------------------\n%s\n" % reason) logfile.write("----------------------------------------------\n") logfile822.write("Date: %s\n" % date) logfile822.write("Ftpmaster: %s\n" % whoami) logfile822.write("Suite: %s\n" % suites_list) if sources: logfile822.write("Sources:\n") for source in sources: logfile822.write(" %s\n" % source) if binaries: logfile822.write("Binaries:\n") for binary in binaries: logfile822.write(" %s\n" % binary) logfile822.write("Reason: %s\n" % reason.replace('\n', '\n ')) if done_bugs: logfile822.write("Bug: %s\n" % (", ".join(done_bugs))) for i in removals: package = i[0] architecture = i[2] package_id = i[3] for suite_id in suite_ids_list: if architecture == "source": session.execute("DELETE FROM src_associations WHERE source = :packageid AND suite = :suiteid", {'packageid': package_id, 'suiteid': suite_id}) else: session.execute("DELETE FROM bin_associations WHERE bin = :packageid AND suite = :suiteid", {'packageid': package_id, 'suiteid': suite_id}) # Delete from the override file if not partial: if architecture == "source": type_id = dsc_type_id else: type_id = deb_type_id # TODO: Fix this properly to remove the remaining non-bind argument session.execute("DELETE FROM override WHERE package = :package AND type = :typeid AND suite = :suiteid %s" % (con_components), {'package': package, 'typeid': type_id, 'suiteid': suite_id}) session.commit() # ### REMOVAL COMPLETE - send mail time ### # # If we don't have a Bug server configured, we're done if "Dinstall::BugServer" not in cnf: if done_bugs or close_related_bugs: utils.warn("Cannot send mail to BugServer as Dinstall::BugServer is not configured") logfile.write("=========================================================================\n") logfile822.write("\n") return # read common subst variables for all bug closure mails Subst_common = {} Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] Subst_common["__BUG_SERVER__"] = cnf["Dinstall::BugServer"] Subst_common["__CC__"] = "X-DAK: dak rm" if carbon_copy: Subst_common["__CC__"] += "\nCc: " + ", ".join(carbon_copy) Subst_common["__SUITE_LIST__"] = suites_list Subst_common["__SUBJECT__"] = "Removed package(s) from %s" % (suites_list) Subst_common["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"] Subst_common["__DISTRO__"] = cnf["Dinstall::MyDistribution"] Subst_common["__WHOAMI__"] = whoami # Send the bug closing messages if done_bugs: Subst_close_rm = Subst_common bcc = [] if cnf.find("Dinstall::Bcc") != "": bcc.append(cnf["Dinstall::Bcc"]) if cnf.find("Rm::Bcc") != "": bcc.append(cnf["Rm::Bcc"]) if bcc: Subst_close_rm["__BCC__"] = "Bcc: " + ", ".join(bcc) else: Subst_close_rm["__BCC__"] = "X-Filler: 42" summarymail = "%s\n------------------- Reason -------------------\n%s\n" % (summary, reason) summarymail += "----------------------------------------------\n" Subst_close_rm["__SUMMARY__"] = summarymail for bug in done_bugs: Subst_close_rm["__BUG_NUMBER__"] = bug if close_related_bugs: mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close-with-related") else: mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close") utils.send_mail(mail_message, whitelists=whitelists) # close associated bug reports if close_related_bugs: Subst_close_other = Subst_common bcc = [] wnpp = utils.parse_wnpp_bug_file() versions = list(set([re_bin_only_nmu.sub('', v) for v in versions])) if len(versions) == 1: Subst_close_other["__VERSION__"] = versions[0] else: logfile.write("=========================================================================\n") logfile822.write("\n") raise ValueError("Closing bugs with multiple package versions is not supported. Do it yourself.") if bcc: Subst_close_other["__BCC__"] = "Bcc: " + ", ".join(bcc) else: Subst_close_other["__BCC__"] = "X-Filler: 42" # at this point, I just assume, that the first closed bug gives # some useful information on why the package got removed Subst_close_other["__BUG_NUMBER__"] = done_bugs[0] if len(sources) == 1: source_pkg = source.split("_", 1)[0] else: logfile.write("=========================================================================\n") logfile822.write("\n") raise ValueError("Closing bugs for multiple source packages is not supported. Please do it yourself.") Subst_close_other["__BUG_NUMBER_ALSO__"] = "" Subst_close_other["__SOURCE__"] = source_pkg merged_bugs = set() other_bugs = bts.get_bugs('src', source_pkg, 'status', 'open', 'status', 'forwarded') if other_bugs: for bugno in other_bugs: if bugno not in merged_bugs: for bug in bts.get_status(bugno): for merged in bug.mergedwith: other_bugs.remove(merged) merged_bugs.add(merged) logfile.write("Also closing bug(s):") logfile822.write("Also-Bugs:") for bug in other_bugs: Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + "," logfile.write(" " + str(bug)) logfile822.write(" " + str(bug)) logfile.write("\n") logfile822.write("\n") if source_pkg in wnpp: logfile.write("Also closing WNPP bug(s):") logfile822.write("Also-WNPP:") for bug in wnpp[source_pkg]: # the wnpp-rm file we parse also contains our removal # bugs, filtering that out if bug != Subst_close_other["__BUG_NUMBER__"]: Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + "," logfile.write(" " + str(bug)) logfile822.write(" " + str(bug)) logfile.write("\n") logfile822.write("\n") mail_message = utils.TemplateSubst(Subst_close_other, cnf["Dir::Templates"]+"/rm.bug-close-related") if Subst_close_other["__BUG_NUMBER_ALSO__"]: utils.send_mail(mail_message) logfile.write("=========================================================================\n") logfile822.write("\n")
def main(): global Logger cnf = Config() Arguments = [('a', "add", "Control-Overrides::Options::Add"), ('c', "component", "Control-Overrides::Options::Component", "HasArg"), ('h', "help", "Control-Overrides::Options::Help"), ('l', "list", "Control-Overrides::Options::List"), ('q', "quiet", "Control-Overrides::Options::Quiet"), ('s', "suite", "Control-Overrides::Options::Suite", "HasArg"), ('S', "set", "Control-Overrides::Options::Set"), ('C', "change", "Control-Overrides::Options::Change"), ('n', "no-action", "Control-Overrides::Options::No-Action"), ('t', "type", "Control-Overrides::Options::Type", "HasArg")] # Default arguments for i in ["add", "help", "list", "quiet", "set", "change", "no-action"]: key = "Control-Overrides::Options::%s" % i if key not in cnf: cnf[key] = "" if "Control-Overrides::Options::Component" not in cnf: cnf["Control-Overrides::Options::Component"] = "main" if "Control-Overrides::Options::Suite" not in cnf: cnf["Control-Overrides::Options::Suite"] = "unstable" if "Control-Overrides::Options::Type" not in cnf: cnf["Control-Overrides::Options::Type"] = "deb" file_list = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) if cnf["Control-Overrides::Options::Help"]: usage() session = DBConn().session() mode = None for i in ["add", "list", "set", "change"]: if cnf["Control-Overrides::Options::%s" % (i)]: if mode: utils.fubar("Can not perform more than one action at once.") mode = i # Need an action... if mode is None: utils.fubar("No action specified.") (suite, component, otype) = (cnf["Control-Overrides::Options::Suite"], cnf["Control-Overrides::Options::Component"], cnf["Control-Overrides::Options::Type"]) if mode == "list": list_overrides(suite, component, otype, session) else: if get_suite(suite).untouchable: utils.fubar("%s: suite is untouchable" % suite) action = True if cnf["Control-Overrides::Options::No-Action"]: utils.warn("In No-Action Mode") action = False Logger = daklog.Logger("control-overrides", mode) if file_list: for f in file_list: process_file(utils.open_file(f), suite, component, otype, mode, action, session) else: process_file(sys.stdin, suite, component, otype, mode, action, session) Logger.close()
def main (): global suite, suite_id, source_binaries, source_versions cnf = Config() Arguments = [('h',"help","Cruft-Report::Options::Help"), ('m',"mode","Cruft-Report::Options::Mode", "HasArg"), ('R',"rdep-check", "Cruft-Report::Options::Rdep-Check"), ('s',"suite","Cruft-Report::Options::Suite","HasArg"), ('w',"wanna-build-dump","Cruft-Report::Options::Wanna-Build-Dump","HasArg")] for i in [ "help", "Rdep-Check" ]: if not cnf.has_key("Cruft-Report::Options::%s" % (i)): cnf["Cruft-Report::Options::%s" % (i)] = "" cnf["Cruft-Report::Options::Suite"] = cnf.get("Dinstall::DefaultSuite", "unstable") if not cnf.has_key("Cruft-Report::Options::Mode"): cnf["Cruft-Report::Options::Mode"] = "daily" if not cnf.has_key("Cruft-Report::Options::Wanna-Build-Dump"): cnf["Cruft-Report::Options::Wanna-Build-Dump"] = "/srv/ftp-master.debian.org/scripts/nfu" apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Cruft-Report::Options") if Options["Help"]: usage() if Options["Rdep-Check"]: rdeps = True else: rdeps = False # Set up checks based on mode if Options["Mode"] == "daily": checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu" ] elif Options["Mode"] == "full": checks = [ "nbs", "nviu", "nvit", "obsolete source", "outdated non-free", "nfu", "dubious nbs", "bnb", "bms", "anais" ] elif Options["Mode"] == "bdo": checks = [ "nbs", "obsolete source" ] else: utils.warn("%s is not a recognised mode - only 'full', 'daily' or 'bdo' are understood." % (Options["Mode"])) usage(1) session = DBConn().session() bin_pkgs = {} src_pkgs = {} bin2source = {} bins_in_suite = {} nbs = {} source_versions = {} anais_output = "" nfu_packages = {} suite = get_suite(Options["Suite"].lower(), session) if not suite: utils.fubar("Cannot find suite %s" % Options["Suite"].lower()) suite_id = suite.suite_id suite_name = suite.suite_name.lower() if "obsolete source" in checks: report_obsolete_source(suite_name, session) if "nbs" in checks: reportAllNBS(suite_name, suite_id, session, rdeps) if "outdated non-free" in checks: report_outdated_nonfree(suite_name, session, rdeps) bin_not_built = {} if "bnb" in checks: bins_in_suite = get_suite_binaries(suite, session) # Checks based on the Sources files components = get_component_names(session) for component in components: filename = "%s/dists/%s/%s/source/Sources.gz" % (suite.archive.path, suite_name, component) # apt_pkg.TagFile needs a real file handle and can't handle a GzipFile instance... (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) sources = utils.open_file(temp_filename) Sources = apt_pkg.TagFile(sources) while Sources.step(): source = Sources.section.find('Package') source_version = Sources.section.find('Version') architecture = Sources.section.find('Architecture') binaries = Sources.section.find('Binary') binaries_list = [ i.strip() for i in binaries.split(',') ] if "bnb" in checks: # Check for binaries not built on any architecture. for binary in binaries_list: if not bins_in_suite.has_key(binary): bin_not_built.setdefault(source, {}) bin_not_built[source][binary] = "" if "anais" in checks: anais_output += do_anais(architecture, binaries_list, source, session) # build indices for checking "no source" later source_index = component + '/' + source src_pkgs[source] = source_index for binary in binaries_list: bin_pkgs[binary] = source source_binaries[source] = binaries source_versions[source] = source_version sources.close() os.unlink(temp_filename) # Checks based on the Packages files check_components = components[:] if suite_name != "experimental": check_components.append('main/debian-installer'); for component in check_components: architectures = [ a.arch_string for a in get_suite_architectures(suite_name, skipsrc=True, skipall=True, session=session) ] for architecture in architectures: if component == 'main/debian-installer' and re.match("kfreebsd", architecture): continue filename = "%s/dists/%s/%s/binary-%s/Packages.gz" % (suite.archive.path, suite_name, component, architecture) # apt_pkg.TagFile needs a real file handle (fd, temp_filename) = utils.temp_filename() (result, output) = commands.getstatusoutput("gunzip -c %s > %s" % (filename, temp_filename)) if (result != 0): sys.stderr.write("Gunzip invocation failed!\n%s\n" % (output)) sys.exit(result) if "nfu" in checks: nfu_packages.setdefault(architecture,[]) nfu_entries = parse_nfu(architecture) packages = utils.open_file(temp_filename) Packages = apt_pkg.TagFile(packages) while Packages.step(): package = Packages.section.find('Package') source = Packages.section.find('Source', "") version = Packages.section.find('Version') if source == "": source = package if bin2source.has_key(package) and \ apt_pkg.version_compare(version, bin2source[package]["version"]) > 0: bin2source[package]["version"] = version bin2source[package]["source"] = source else: bin2source[package] = {} bin2source[package]["version"] = version bin2source[package]["source"] = source if source.find("(") != -1: m = re_extract_src_version.match(source) source = m.group(1) version = m.group(2) if not bin_pkgs.has_key(package): nbs.setdefault(source,{}) nbs[source].setdefault(package, {}) nbs[source][package][version] = "" else: if "nfu" in checks: if package in nfu_entries and \ version != source_versions[source]: # only suggest to remove out-of-date packages nfu_packages[architecture].append((package,version,source_versions[source])) packages.close() os.unlink(temp_filename) # Distinguish dubious (version numbers match) and 'real' NBS (they don't) dubious_nbs = {} for source in nbs.keys(): for package in nbs[source].keys(): versions = nbs[source][package].keys() versions.sort(apt_pkg.version_compare) latest_version = versions.pop() source_version = source_versions.get(source,"0") if apt_pkg.version_compare(latest_version, source_version) == 0: add_nbs(dubious_nbs, source, latest_version, package, suite_id, session) if "nviu" in checks: do_newer_version('unstable', 'experimental', 'NVIU', session) if "nvit" in checks: do_newer_version('testing', 'testing-proposed-updates', 'NVIT', session) ### if Options["Mode"] == "full": print "="*75 print if "nfu" in checks: do_nfu(nfu_packages) if "bnb" in checks: print "Unbuilt binary packages" print "-----------------------" print keys = bin_not_built.keys() keys.sort() for source in keys: binaries = bin_not_built[source].keys() binaries.sort() print " o %s: %s" % (source, ", ".join(binaries)) print if "bms" in checks: report_multiple_source(suite) if "anais" in checks: print "Architecture Not Allowed In Source" print "----------------------------------" print anais_output print if "dubious nbs" in checks: do_dubious_nbs(dubious_nbs)
def strip_pgp_signature (filename): with utils.open_file(filename) as f: data = f.read() signedfile = SignedFile(data, keyrings=(), require_signature=False) return signedfile.contents
def read_control (filename): recommends = [] predepends = [] depends = [] section = '' maintainer = '' arch = '' deb_file = utils.open_file(filename) try: extracts = utils.deb_extract_control(deb_file) control = apt_pkg.TagSection(extracts) except: print formatted_text("can't parse control info") deb_file.close() raise deb_file.close() control_keys = control.keys() if "Pre-Depends" in control: predepends_str = control["Pre-Depends"] predepends = split_depends(predepends_str) if "Depends" in control: depends_str = control["Depends"] # create list of dependancy lists depends = split_depends(depends_str) if "Recommends" in control: recommends_str = control["Recommends"] recommends = split_depends(recommends_str) if "Section" in control: section_str = control["Section"] c_match = re_contrib.search(section_str) nf_match = re_nonfree.search(section_str) if c_match : # contrib colour section = colour_output(section_str, 'contrib') elif nf_match : # non-free colour section = colour_output(section_str, 'nonfree') else : # main section = colour_output(section_str, 'main') if "Architecture" in control: arch_str = control["Architecture"] arch = colour_output(arch_str, 'arch') if "Maintainer" in control: maintainer = control["Maintainer"] localhost = re_localhost.search(maintainer) if localhost: #highlight bad email maintainer = colour_output(maintainer, 'maintainer') else: maintainer = escape_if_needed(maintainer) return (control, control_keys, section, predepends, depends, recommends, arch, maintainer)
def main(): cnf = Config() Arguments = [('h',"help","Make-Maintainers::Options::Help"), ('a',"archive","Make-Maintainers::Options::Archive",'HasArg'), ('s',"source","Make-Maintainers::Options::Source"), ('p',"print","Make-Maintainers::Options::Print")] for i in ["Help", "Source", "Print" ]: if not cnf.has_key("Make-Maintainers::Options::%s" % (i)): cnf["Make-Maintainers::Options::%s" % (i)] = "" extra_files = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Make-Maintainers::Options") if Options["Help"] or not Options.get('Archive'): usage() Logger = daklog.Logger('make-maintainers') session = DBConn().session() archive = session.query(Archive).filter_by(archive_name=Options['Archive']).one() # dictionary packages to maintainer names maintainers = dict() # dictionary packages to list of uploader names uploaders = dict() source_query = session.query(DBSource).from_statement(''' select distinct on (source.source) source.* from source join src_associations sa on source.id = sa.source join suite on sa.suite = suite.id where suite.archive_id = :archive_id order by source.source, source.version desc''') \ .params(archive_id=archive.archive_id) binary_query = session.query(DBBinary).from_statement(''' select distinct on (binaries.package) binaries.* from binaries join bin_associations ba on binaries.id = ba.bin join suite on ba.suite = suite.id where suite.archive_id = :archive_id order by binaries.package, binaries.version desc''') \ .params(archive_id=archive.archive_id) Logger.log(['sources']) for source in source_query: maintainers[source.source] = source.maintainer.name uploaders[source.source] = uploader_list(source) if not Options["Source"]: Logger.log(['binaries']) for binary in binary_query: if binary.package not in maintainers: maintainers[binary.package] = binary.maintainer.name uploaders[binary.package] = uploader_list(binary.source) Logger.log(['files']) # Process any additional Maintainer files (e.g. from pseudo # packages) for filename in extra_files: extrafile = utils.open_file(filename) for line in extrafile.readlines(): line = re_comments.sub('', line).strip() if line == "": continue (package, maintainer) = line.split(None, 1) maintainers[package] = maintainer uploaders[package] = [maintainer] if Options["Print"]: for package in sorted(maintainers): sys.stdout.write(format(package, maintainers[package])) else: maintainer_file = open('Maintainers', 'w') uploader_file = open('Uploaders', 'w') for package in sorted(uploaders): maintainer_file.write(format(package, maintainers[package])) for uploader in uploaders[package]: uploader_file.write(format(package, uploader)) uploader_file.close() maintainer_file.close() Logger.close()
def remove(session, reason, suites, removals, whoami=None, partial=False, components=None, done_bugs=None, date=None, carbon_copy=None, close_related_bugs=False): """Batch remove a number of packages Verify that the files listed in the Files field of the .dsc are those expected given the announced Format. @type session: SQLA Session @param session: The database session in use @type reason: string @param reason: The reason for the removal (e.g. "[auto-cruft] NBS (no longer built by <source>)") @type suites: list @param suites: A list of the suite names in which the removal should occur @type removals: list @param removals: A list of the removals. Each element should be a tuple (or list) of at least the following for 4 items from the database (in order): package, version, architecture, (database) id. For source packages, the "architecture" should be set to "source". @type partial: bool @param partial: Whether the removal is "partial" (e.g. architecture specific). @type components: list @param components: List of components involved in a partial removal. Can be an empty list to not restrict the removal to any components. @type whoami: string @param whoami: The person (or entity) doing the removal. Defaults to utils.whoami() @type date: string @param date: The date of the removal. Defaults to commands.getoutput("date -R") @type done_bugs: list @param done_bugs: A list of bugs to be closed when doing this removal. @type close_related_bugs: bool @param done_bugs: Whether bugs related to the package being removed should be closed as well. NB: Not implemented for more than one suite. @type carbon_copy: list @param carbon_copy: A list of mail addresses to CC when doing removals. NB: all items are taken "as-is" unlike "dak rm". @rtype: None @return: Nothing """ # Generate the summary of what's to be removed d = {} summary = "" sources = [] binaries = [] whitelists = [] versions = [] suite_ids_list = [] suites_list = utils.join_with_commas_and(suites) cnf = utils.get_conf() con_components = '' ####################################################################################################### if not reason: raise ValueError("Empty removal reason not permitted") if not removals: raise ValueError("Nothing to remove!?") if not suites: raise ValueError("Removals without a suite!?") if whoami is None: whoami = utils.whoami() if date is None: date = commands.getoutput("date -R") if partial and components: component_ids_list = [] for componentname in components: component = get_component(componentname, session=session) if component is None: raise ValueError("component '%s' not recognised." % componentname) else: component_ids_list.append(component.component_id) if component_ids_list: con_components = "AND component IN (%s)" % ", ".join( [str(i) for i in component_ids_list]) for i in removals: package = i[0] version = i[1] architecture = i[2] if package not in d: d[package] = {} if version not in d[package]: d[package][version] = [] if architecture not in d[package][version]: d[package][version].append(architecture) for package in sorted(d): versions = sorted(d[package], cmp=apt_pkg.version_compare) for version in versions: d[package][version].sort(utils.arch_compare_sw) summary += "%10s | %10s | %s\n" % (package, version, ", ".join( d[package][version])) for package in summary.split("\n"): for row in package.split("\n"): element = row.split("|") if len(element) == 3: if element[2].find("source") > 0: sources.append( "%s_%s" % tuple(elem.strip(" ") for elem in element[:2])) element[2] = sub("source\s?,?", "", element[2]).strip(" ") if element[2]: binaries.append("%s_%s [%s]" % tuple(elem.strip(" ") for elem in element)) dsc_type_id = get_override_type('dsc', session).overridetype_id deb_type_id = get_override_type('deb', session).overridetype_id for suite in suites: s = get_suite(suite, session=session) if s is not None: suite_ids_list.append(s.suite_id) whitelists.append(s.mail_whitelist) ####################################################################################################### log_filename = cnf["Rm::LogFile"] log822_filename = cnf["Rm::LogFile822"] with utils.open_file(log_filename, "a") as logfile, utils.open_file( log822_filename, "a") as logfile822: fcntl.lockf(logfile, fcntl.LOCK_EX) fcntl.lockf(logfile822, fcntl.LOCK_EX) logfile.write( "=========================================================================\n" ) logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami)) logfile.write("Removed the following packages from %s:\n\n%s" % (suites_list, summary)) if done_bugs: logfile.write("Closed bugs: %s\n" % (", ".join(done_bugs))) logfile.write( "\n------------------- Reason -------------------\n%s\n" % reason) logfile.write("----------------------------------------------\n") logfile822.write("Date: %s\n" % date) logfile822.write("Ftpmaster: %s\n" % whoami) logfile822.write("Suite: %s\n" % suites_list) if sources: logfile822.write("Sources:\n") for source in sources: logfile822.write(" %s\n" % source) if binaries: logfile822.write("Binaries:\n") for binary in binaries: logfile822.write(" %s\n" % binary) logfile822.write("Reason: %s\n" % reason.replace('\n', '\n ')) if done_bugs: logfile822.write("Bug: %s\n" % (", ".join(done_bugs))) for i in removals: package = i[0] architecture = i[2] package_id = i[3] for suite_id in suite_ids_list: if architecture == "source": session.execute( "DELETE FROM src_associations WHERE source = :packageid AND suite = :suiteid", { 'packageid': package_id, 'suiteid': suite_id }) else: session.execute( "DELETE FROM bin_associations WHERE bin = :packageid AND suite = :suiteid", { 'packageid': package_id, 'suiteid': suite_id }) # Delete from the override file if not partial: if architecture == "source": type_id = dsc_type_id else: type_id = deb_type_id # TODO: Fix this properly to remove the remaining non-bind argument session.execute( "DELETE FROM override WHERE package = :package AND type = :typeid AND suite = :suiteid %s" % (con_components), { 'package': package, 'typeid': type_id, 'suiteid': suite_id }) session.commit() # ### REMOVAL COMPLETE - send mail time ### # # If we don't have a Bug server configured, we're done if "Dinstall::BugServer" not in cnf: if done_bugs or close_related_bugs: utils.warn( "Cannot send mail to BugServer as Dinstall::BugServer is not configured" ) logfile.write( "=========================================================================\n" ) logfile822.write("\n") return # read common subst variables for all bug closure mails Subst_common = {} Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] Subst_common["__BUG_SERVER__"] = cnf["Dinstall::BugServer"] Subst_common["__CC__"] = "X-DAK: dak rm" if carbon_copy: Subst_common["__CC__"] += "\nCc: " + ", ".join(carbon_copy) Subst_common["__SUITE_LIST__"] = suites_list Subst_common["__SUBJECT__"] = "Removed package(s) from %s" % ( suites_list) Subst_common["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"] Subst_common["__DISTRO__"] = cnf["Dinstall::MyDistribution"] Subst_common["__WHOAMI__"] = whoami # Send the bug closing messages if done_bugs: Subst_close_rm = Subst_common bcc = [] if cnf.find("Dinstall::Bcc") != "": bcc.append(cnf["Dinstall::Bcc"]) if cnf.find("Rm::Bcc") != "": bcc.append(cnf["Rm::Bcc"]) if bcc: Subst_close_rm["__BCC__"] = "Bcc: " + ", ".join(bcc) else: Subst_close_rm["__BCC__"] = "X-Filler: 42" summarymail = "%s\n------------------- Reason -------------------\n%s\n" % ( summary, reason) summarymail += "----------------------------------------------\n" Subst_close_rm["__SUMMARY__"] = summarymail for bug in done_bugs: Subst_close_rm["__BUG_NUMBER__"] = bug if close_related_bugs: mail_message = utils.TemplateSubst( Subst_close_rm, cnf["Dir::Templates"] + "/rm.bug-close-with-related") else: mail_message = utils.TemplateSubst( Subst_close_rm, cnf["Dir::Templates"] + "/rm.bug-close") utils.send_mail(mail_message, whitelists=whitelists) # close associated bug reports if close_related_bugs: Subst_close_other = Subst_common bcc = [] wnpp = utils.parse_wnpp_bug_file() versions = list(set([re_bin_only_nmu.sub('', v) for v in versions])) if len(versions) == 1: Subst_close_other["__VERSION__"] = versions[0] else: logfile.write( "=========================================================================\n" ) logfile822.write("\n") raise ValueError( "Closing bugs with multiple package versions is not supported. Do it yourself." ) if bcc: Subst_close_other["__BCC__"] = "Bcc: " + ", ".join(bcc) else: Subst_close_other["__BCC__"] = "X-Filler: 42" # at this point, I just assume, that the first closed bug gives # some useful information on why the package got removed Subst_close_other["__BUG_NUMBER__"] = done_bugs[0] if len(sources) == 1: source_pkg = source.split("_", 1)[0] else: logfile.write( "=========================================================================\n" ) logfile822.write("\n") raise ValueError( "Closing bugs for multiple source packages is not supported. Please do it yourself." ) Subst_close_other["__BUG_NUMBER_ALSO__"] = "" Subst_close_other["__SOURCE__"] = source_pkg merged_bugs = set() other_bugs = bts.get_bugs('src', source_pkg, 'status', 'open', 'status', 'forwarded') if other_bugs: for bugno in other_bugs: if bugno not in merged_bugs: for bug in bts.get_status(bugno): for merged in bug.mergedwith: other_bugs.remove(merged) merged_bugs.add(merged) logfile.write("Also closing bug(s):") logfile822.write("Also-Bugs:") for bug in other_bugs: Subst_close_other["__BUG_NUMBER_ALSO__"] += str( bug) + "-done@" + cnf["Dinstall::BugServer"] + "," logfile.write(" " + str(bug)) logfile822.write(" " + str(bug)) logfile.write("\n") logfile822.write("\n") if source_pkg in wnpp: logfile.write("Also closing WNPP bug(s):") logfile822.write("Also-WNPP:") for bug in wnpp[source_pkg]: # the wnpp-rm file we parse also contains our removal # bugs, filtering that out if bug != Subst_close_other["__BUG_NUMBER__"]: Subst_close_other["__BUG_NUMBER_ALSO__"] += str( bug) + "-done@" + cnf["Dinstall::BugServer"] + "," logfile.write(" " + str(bug)) logfile822.write(" " + str(bug)) logfile.write("\n") logfile822.write("\n") mail_message = utils.TemplateSubst( Subst_close_other, cnf["Dir::Templates"] + "/rm.bug-close-related") if Subst_close_other["__BUG_NUMBER_ALSO__"]: utils.send_mail(mail_message) logfile.write( "=========================================================================\n" ) logfile822.write("\n")
def main(): global Cnf keyrings = None Cnf = utils.get_conf() Arguments = [ ('h', "help", "Add-User::Options::Help"), ('k', "key", "Add-User::Options::Key", "HasArg"), ('u', "user", "Add-User::Options::User", "HasArg"), ] for i in ["help"]: key = "Add-User::Options::%s" % i if key not in Cnf: Cnf[key] = "" apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) Options = Cnf.subtree("Add-User::Options") if Options["help"]: usage() session = DBConn().session() if not keyrings: keyrings = get_active_keyring_paths() cmd = [ "gpg", "--with-colons", "--no-secmem-warning", "--no-auto-check-trustdb", "--with-fingerprint", "--no-default-keyring" ] cmd.extend(utils.gpg_keyring_args(keyrings).split()) cmd.extend(["--list-key", "--", Cnf["Add-User::Options::Key"]]) output = subprocess.check_output(cmd).rstrip() m = re_gpg_fingerprint_colon.search(output) if not m: print(output) utils.fubar( "0x%s: (1) No fingerprint found in gpg output but it returned 0?\n%s" % (Cnf["Add-User::Options::Key"], utils.prefix_multi_line_string(output, " [GPG output:] "))) primary_key = m.group(1) primary_key = primary_key.replace(" ", "") uid = "" if "Add-User::Options::User" in Cnf and Cnf["Add-User::Options::User"]: uid = Cnf["Add-User::Options::User"] name = Cnf["Add-User::Options::User"] else: u = re_user_address.search(output) if not u: print(output) utils.fubar( "0x%s: (2) No userid found in gpg output but it returned 0?\n%s" % (Cnf["Add-User::Options::Key"], utils.prefix_multi_line_string(output, " [GPG output:] "))) uid = u.group(1) n = re_user_name.search(output) name = n.group(1) # Look for all email addresses on the key. emails = [] for line in output.split('\n'): e = re_user_mails.search(line) if not e: continue emails.append(e.group(2)) print("0x%s -> %s <%s> -> %s -> %s" % (Cnf["Add-User::Options::Key"], name, emails[0], uid, primary_key)) prompt = "Add user %s with above data (y/N) ? " % (uid) yn = utils.our_raw_input(prompt).lower() if yn == "y": # Create an account for the user? summary = "" # Now add user to the database. # Note that we provide a session, so we're responsible for committing uidobj = get_or_set_uid(uid, session=session) uid_id = uidobj.uid_id session.commit() # Lets add user to the email-whitelist file if its configured. if "Dinstall::MailWhiteList" in Cnf and Cnf[ "Dinstall::MailWhiteList"] != "": f = utils.open_file(Cnf["Dinstall::MailWhiteList"], "a") for mail in emails: f.write(mail + '\n') f.close() print("Added:\nUid:\t %s (ID: %s)\nMaint:\t %s\nFP:\t %s" % (uid, uid_id, name, primary_key)) # Should we send mail to the newly added user? if Cnf.find_b("Add-User::SendEmail"): mail = name + "<" + emails[0] + ">" Subst = {} Subst["__NEW_MAINTAINER__"] = mail Subst["__UID__"] = uid Subst["__KEYID__"] = Cnf["Add-User::Options::Key"] Subst["__PRIMARY_KEY__"] = primary_key Subst["__FROM_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"] Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"] Subst["__HOSTNAME__"] = Cnf["Dinstall::MyHost"] Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"] Subst["__SUMMARY__"] = summary new_add_message = utils.TemplateSubst( Subst, Cnf["Dir::Templates"] + "/add-user.added") utils.send_mail(new_add_message) else: uid = None
def main(): cnf = Config() Arguments = [('h',"help","Make-Maintainers::Options::Help")] if not cnf.has_key("Make-Maintainers::Options::Help"): cnf["Make-Maintainers::Options::Help"] = "" extra_files = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Make-Maintainers::Options") if Options["Help"]: usage() Logger = daklog.Logger('make-maintainers') session = DBConn().session() # dictionary packages to maintainer names maintainers = dict() # dictionary packages to list of uploader names uploaders = dict() source_query = session.query(DBSource).from_statement(''' select distinct on (source) * from source order by source, version desc''') binary_query = session.query(DBBinary).from_statement(''' select distinct on (package) * from binaries order by package, version desc''') Logger.log(['sources']) for source in source_query: maintainers[source.source] = source.maintainer.name uploaders[source.source] = uploader_list(source) Logger.log(['binaries']) for binary in binary_query: if binary.package not in maintainers: maintainers[binary.package] = binary.maintainer.name uploaders[binary.package] = uploader_list(binary.source) Logger.log(['files']) # Process any additional Maintainer files (e.g. from pseudo # packages) for filename in extra_files: extrafile = utils.open_file(filename) for line in extrafile.readlines(): line = re_comments.sub('', line).strip() if line == "": continue (package, maintainer) = line.split(None, 1) maintainers[package] = maintainer uploaders[package] = [maintainer] maintainer_file = open('Maintainers', 'w') uploader_file = open('Uploaders', 'w') for package in sorted(uploaders): maintainer_file.write(format(package, maintainers[package])) for uploader in uploaders[package]: uploader_file.write(format(package, uploader)) uploader_file.close() maintainer_file.close() Logger.close()
def main(): global Logger cnf = Config() Arguments = [('a', "add", "Control-Suite::Options::Add", "HasArg"), ('b', "britney", "Control-Suite::Options::Britney"), ('f', 'force', 'Control-Suite::Options::Force'), ('h', "help", "Control-Suite::Options::Help"), ('l', "list", "Control-Suite::Options::List", "HasArg"), ('r', "remove", "Control-Suite::Options::Remove", "HasArg"), ('s', "set", "Control-Suite::Options::Set", "HasArg")] for i in ["add", "britney", "help", "list", "remove", "set", "version"]: key = "Control-Suite::Options::%s" % i if key not in cnf: cnf[key] = "" try: file_list = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) except SystemError as e: print("%s\n" % e) usage(1) Options = cnf.subtree("Control-Suite::Options") if Options["Help"]: usage() force = "Force" in Options and Options["Force"] action = None for i in ("add", "list", "remove", "set"): if cnf["Control-Suite::Options::%s" % (i)] != "": suite_name = cnf["Control-Suite::Options::%s" % (i)] if action: utils.fubar("Can only perform one action at a time.") action = i # Need an action... if action is None: utils.fubar("No action specified.") britney = False if action == "set" and cnf["Control-Suite::Options::Britney"]: britney = True if action == "list": session = DBConn().session() suite = session.query(Suite).filter_by(suite_name=suite_name).one() get_list(suite, session) else: Logger = daklog.Logger("control-suite") with ArchiveTransaction() as transaction: session = transaction.session suite = session.query(Suite).filter_by(suite_name=suite_name).one() if action == "set" and not suite.allowcsset: if force: utils.warn( "Would not normally allow setting suite {0} (allowcsset is FALSE), but --force used" .format(suite_name)) else: utils.fubar( "Will not reset suite {0} due to its database configuration (allowcsset is FALSE)" .format(suite_name)) if file_list: for f in file_list: process_file(utils.open_file(f), suite, action, transaction, britney, force) else: process_file(sys.stdin, suite, action, transaction, britney, force) Logger.close()
def main(): global Cnf keyrings = None Cnf = utils.get_conf() Arguments = [('h', "help", "Add-User::Options::Help"), ('k', "key", "Add-User::Options::Key", "HasArg"), ('u', "user", "Add-User::Options::User", "HasArg"), ] for i in ["help"]: key = "Add-User::Options::%s" % i if key not in Cnf: Cnf[key] = "" apt_pkg.parse_commandline(Cnf, Arguments, sys.argv) Options = Cnf.subtree("Add-User::Options") if Options["help"]: usage() session = DBConn().session() if not keyrings: keyrings = get_active_keyring_paths() cmd = ["gpg", "--with-colons", "--no-secmem-warning", "--no-auto-check-trustdb", "--with-fingerprint", "--no-default-keyring"] cmd.extend(utils.gpg_keyring_args(keyrings).split()) cmd.extend(["--list-key", "--", Cnf["Add-User::Options::Key"]]) output = subprocess.check_output(cmd).rstrip() m = re_gpg_fingerprint_colon.search(output) if not m: print(output) utils.fubar("0x%s: (1) No fingerprint found in gpg output but it returned 0?\n%s" % (Cnf["Add-User::Options::Key"], utils.prefix_multi_line_string(output, " [GPG output:] "))) primary_key = m.group(1) primary_key = primary_key.replace(" ", "") uid = "" if "Add-User::Options::User" in Cnf and Cnf["Add-User::Options::User"]: uid = Cnf["Add-User::Options::User"] name = Cnf["Add-User::Options::User"] else: u = re_user_address.search(output) if not u: print(output) utils.fubar("0x%s: (2) No userid found in gpg output but it returned 0?\n%s" % (Cnf["Add-User::Options::Key"], utils.prefix_multi_line_string(output, " [GPG output:] "))) uid = u.group(1) n = re_user_name.search(output) name = n.group(1) # Look for all email addresses on the key. emails = [] for line in output.split('\n'): e = re_user_mails.search(line) if not e: continue emails.append(e.group(2)) print("0x%s -> %s <%s> -> %s -> %s" % (Cnf["Add-User::Options::Key"], name, emails[0], uid, primary_key)) prompt = "Add user %s with above data (y/N) ? " % (uid) yn = utils.our_raw_input(prompt).lower() if yn == "y": # Create an account for the user? summary = "" # Now add user to the database. # Note that we provide a session, so we're responsible for committing uidobj = get_or_set_uid(uid, session=session) uid_id = uidobj.uid_id session.commit() # Lets add user to the email-whitelist file if its configured. if "Dinstall::MailWhiteList" in Cnf and Cnf["Dinstall::MailWhiteList"] != "": f = utils.open_file(Cnf["Dinstall::MailWhiteList"], "a") for mail in emails: f.write(mail + '\n') f.close() print("Added:\nUid:\t %s (ID: %s)\nMaint:\t %s\nFP:\t %s" % (uid, uid_id, name, primary_key)) # Should we send mail to the newly added user? if Cnf.find_b("Add-User::SendEmail"): mail = name + "<" + emails[0] + ">" Subst = {} Subst["__NEW_MAINTAINER__"] = mail Subst["__UID__"] = uid Subst["__KEYID__"] = Cnf["Add-User::Options::Key"] Subst["__PRIMARY_KEY__"] = primary_key Subst["__FROM_ADDRESS__"] = Cnf["Dinstall::MyEmailAddress"] Subst["__ADMIN_ADDRESS__"] = Cnf["Dinstall::MyAdminAddress"] Subst["__HOSTNAME__"] = Cnf["Dinstall::MyHost"] Subst["__DISTRO__"] = Cnf["Dinstall::MyDistribution"] Subst["__SUMMARY__"] = summary new_add_message = utils.TemplateSubst(Subst, Cnf["Dir::Templates"] + "/add-user.added") utils.send_mail(new_add_message) else: uid = None
def main(): cnf = Config() Arguments = [('h', "help", "Make-Maintainers::Options::Help"), ('a', "archive", "Make-Maintainers::Options::Archive", 'HasArg'), ('s', "source", "Make-Maintainers::Options::Source"), ('p', "print", "Make-Maintainers::Options::Print")] for i in ["Help", "Source", "Print"]: key = "Make-Maintainers::Options::%s" % i if key not in cnf: cnf[key] = "" extra_files = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Make-Maintainers::Options") if Options["Help"] or not Options.get('Archive'): usage() Logger = daklog.Logger('make-maintainers') session = DBConn().session() archive = session.query(Archive).filter_by(archive_name=Options['Archive']).one() # dictionary packages to maintainer names maintainers = dict() # dictionary packages to list of uploader names uploaders = dict() query = session.execute(text(''' SELECT bs.package, bs.name AS maintainer, array_agg(mu.name) OVER ( PARTITION BY bs.source, bs.id ORDER BY mu.name ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING ) AS uploaders FROM ( SELECT DISTINCT ON (package) * FROM ( SELECT s.id AS source, 0 AS id, s.source AS package, s.version, m.name FROM source AS s INNER JOIN maintainer AS m ON s.maintainer = m.id INNER JOIN src_associations AS sa ON s.id = sa.source INNER JOIN suite on sa.suite = suite.id WHERE suite.archive_id = :archive_id UNION SELECT b.source, b.id, b.package, b.version, m.name FROM binaries AS b INNER JOIN maintainer AS m ON b.maintainer = m.id INNER JOIN bin_associations AS ba ON b.id = ba.bin INNER JOIN suite on ba.suite = suite.id WHERE NOT :source_only AND suite.archive_id = :archive_id ) AS bs ORDER BY package, version desc ) AS bs LEFT OUTER JOIN -- find all uploaders for a given source src_uploaders AS su ON bs.source = su.source LEFT OUTER JOIN maintainer AS mu ON su.maintainer = mu.id ''').params( archive_id=archive.archive_id, source_only="True" if Options["Source"] else "False" )) Logger.log(['database']) for entry in query: maintainers[entry['package']] = entry['maintainer'] if all(x is None for x in entry['uploaders']): uploaders[entry['package']] = [''] else: uploaders[entry['package']] = entry['uploaders'] Logger.log(['files']) # Process any additional Maintainer files (e.g. from pseudo # packages) for filename in extra_files: extrafile = utils.open_file(filename) for line in extrafile.readlines(): line = re_comments.sub('', line).strip() if line == "": continue (package, maintainer) = line.split(None, 1) maintainers[package] = maintainer uploaders[package] = [maintainer] if Options["Print"]: for package in sorted(maintainers): print(format(package, maintainers[package]), end='') else: maintainer_file = open('Maintainers', 'w') uploader_file = open('Uploaders', 'w') for package in sorted(uploaders): maintainer_file.write(format(package, maintainers[package])) for uploader in uploaders[package]: uploader_file.write(format(package, uploader)) uploader_file.close() maintainer_file.close() Logger.close()
def main(): cnf = Config() Arguments = [('h', "help", "Make-Maintainers::Options::Help"), ('a', "archive", "Make-Maintainers::Options::Archive", 'HasArg'), ('s', "source", "Make-Maintainers::Options::Source"), ('p', "print", "Make-Maintainers::Options::Print")] for i in ["Help", "Source", "Print"]: key = "Make-Maintainers::Options::%s" % i if key not in cnf: cnf[key] = "" extra_files = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Make-Maintainers::Options") if Options["Help"] or not Options.get('Archive'): usage() Logger = daklog.Logger('make-maintainers') session = DBConn().session() archive = session.query(Archive).filter_by( archive_name=Options['Archive']).one() # dictionary packages to maintainer names maintainers = dict() # dictionary packages to list of uploader names uploaders = dict() query = session.execute( text(''' SELECT bs.package, bs.name AS maintainer, array_agg(mu.name) OVER ( PARTITION BY bs.source, bs.id ORDER BY mu.name ROWS BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING ) AS uploaders FROM ( SELECT DISTINCT ON (package) * FROM ( SELECT s.id AS source, 0 AS id, s.source AS package, s.version, m.name FROM source AS s INNER JOIN maintainer AS m ON s.maintainer = m.id INNER JOIN src_associations AS sa ON s.id = sa.source INNER JOIN suite on sa.suite = suite.id WHERE suite.archive_id = :archive_id UNION SELECT b.source, b.id, b.package, b.version, m.name FROM binaries AS b INNER JOIN maintainer AS m ON b.maintainer = m.id INNER JOIN bin_associations AS ba ON b.id = ba.bin INNER JOIN suite on ba.suite = suite.id WHERE NOT :source_only AND suite.archive_id = :archive_id ) AS bs ORDER BY package, version desc ) AS bs LEFT OUTER JOIN -- find all uploaders for a given source src_uploaders AS su ON bs.source = su.source LEFT OUTER JOIN maintainer AS mu ON su.maintainer = mu.id ''').params(archive_id=archive.archive_id, source_only="True" if Options["Source"] else "False")) Logger.log(['database']) for entry in query: maintainers[entry['package']] = entry['maintainer'] if all(x is None for x in entry['uploaders']): uploaders[entry['package']] = [''] else: uploaders[entry['package']] = entry['uploaders'] Logger.log(['files']) # Process any additional Maintainer files (e.g. from pseudo # packages) for filename in extra_files: extrafile = utils.open_file(filename) for line in extrafile.readlines(): line = re_comments.sub('', line).strip() if line == "": continue (package, maintainer) = line.split(None, 1) maintainers[package] = maintainer uploaders[package] = [maintainer] if Options["Print"]: for package in sorted(maintainers): sys.stdout.write(format(package, maintainers[package])) else: maintainer_file = open('Maintainers', 'w') uploader_file = open('Uploaders', 'w') for package in sorted(uploaders): maintainer_file.write(format(package, maintainers[package])) for uploader in uploaders[package]: uploader_file.write(format(package, uploader)) uploader_file.close() maintainer_file.close() Logger.close()
def main (): global Logger cnf = Config() Arguments = [('a',"add","Control-Suite::Options::Add", "HasArg"), ('b',"britney","Control-Suite::Options::Britney"), ('f','force','Control-Suite::Options::Force'), ('h',"help","Control-Suite::Options::Help"), ('l',"list","Control-Suite::Options::List","HasArg"), ('r',"remove", "Control-Suite::Options::Remove", "HasArg"), ('s',"set", "Control-Suite::Options::Set", "HasArg")] for i in ["add", "britney", "help", "list", "remove", "set", "version" ]: if not cnf.has_key("Control-Suite::Options::%s" % (i)): cnf["Control-Suite::Options::%s" % (i)] = "" try: file_list = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv); except SystemError as e: print "%s\n" % e usage(1) Options = cnf.subtree("Control-Suite::Options") if Options["Help"]: usage() session = DBConn().session() force = Options.has_key("Force") and Options["Force"] action = None for i in ("add", "list", "remove", "set"): if cnf["Control-Suite::Options::%s" % (i)] != "": suite_name = cnf["Control-Suite::Options::%s" % (i)] suite = get_suite(suite_name, session=session) if suite is None: utils.fubar("Unknown suite '%s'." % (suite_name)) else: if action: utils.fubar("Can only perform one action at a time.") action = i # Safety/Sanity check if action == "set" and (not suite.allowcsset): if force: utils.warn("Would not normally allow setting suite %s (allowsetcs is FALSE), but --force used" % (suite_name)) else: utils.fubar("Will not reset suite %s due to its database configuration (allowsetcs is FALSE)" % (suite_name)) # Need an action... if action == None: utils.fubar("No action specified.") britney = False if action == "set" and cnf["Control-Suite::Options::Britney"]: britney = True if action == "list": get_list(suite, session) else: Logger = daklog.Logger("control-suite") if file_list: for f in file_list: process_file(utils.open_file(f), suite, action, session, britney, force) else: process_file(sys.stdin, suite, action, session, britney, force) Logger.close()
def main (): global Options cnf = Config() Arguments = [('h',"help","Rm::Options::Help"), ('a',"architecture","Rm::Options::Architecture", "HasArg"), ('b',"binary", "Rm::Options::Binary"), ('B',"binary-only", "Rm::Options::Binary-Only"), ('c',"component", "Rm::Options::Component", "HasArg"), ('C',"carbon-copy", "Rm::Options::Carbon-Copy", "HasArg"), # Bugs to Cc ('d',"done","Rm::Options::Done", "HasArg"), # Bugs fixed ('D',"do-close","Rm::Options::Do-Close"), ('R',"rdep-check", "Rm::Options::Rdep-Check"), ('m',"reason", "Rm::Options::Reason", "HasArg"), # Hysterical raisins; -m is old-dinstall option for rejection reason ('n',"no-action","Rm::Options::No-Action"), ('p',"partial", "Rm::Options::Partial"), ('s',"suite","Rm::Options::Suite", "HasArg"), ('S',"source-only", "Rm::Options::Source-Only"), ] for i in [ "architecture", "binary", "binary-only", "carbon-copy", "component", "done", "help", "no-action", "partial", "rdep-check", "reason", "source-only", "Do-Close" ]: if not cnf.has_key("Rm::Options::%s" % (i)): cnf["Rm::Options::%s" % (i)] = "" if not cnf.has_key("Rm::Options::Suite"): cnf["Rm::Options::Suite"] = "unstable" arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Rm::Options") if Options["Help"]: usage() session = DBConn().session() # Sanity check options if not arguments: utils.fubar("need at least one package name as an argument.") if Options["Architecture"] and Options["Source-Only"]: utils.fubar("can't use -a/--architecture and -S/--source-only options simultaneously.") if ((Options["Binary"] and Options["Source-Only"]) or (Options["Binary"] and Options["Binary-Only"]) or (Options["Binary-Only"] and Options["Source-Only"])): utils.fubar("Only one of -b/--binary, -B/--binary-only and -S/--source-only can be used.") if Options.has_key("Carbon-Copy") and not Options.has_key("Done"): utils.fubar("can't use -C/--carbon-copy without also using -d/--done option.") if Options["Architecture"] and not Options["Partial"]: utils.warn("-a/--architecture implies -p/--partial.") Options["Partial"] = "true" if Options["Do-Close"] and not Options["Done"]: utils.fubar("No.") if (Options["Do-Close"] and (Options["Binary"] or Options["Binary-Only"] or Options["Source-Only"])): utils.fubar("No.") # Force the admin to tell someone if we're not doing a 'dak # cruft-report' inspired removal (or closing a bug, which counts # as telling someone). if not Options["No-Action"] and not Options["Carbon-Copy"] \ and not Options["Done"] and Options["Reason"].find("[auto-cruft]") == -1: utils.fubar("Need a -C/--carbon-copy if not closing a bug and not doing a cruft removal.") # Process -C/--carbon-copy # # Accept 3 types of arguments (space separated): # 1) a number - assumed to be a bug number, i.e. [email protected] # 2) the keyword 'package' - cc's [email protected] for every argument # 3) contains a '@' - assumed to be an email address, used unmofidied # carbon_copy = [] for copy_to in utils.split_args(Options.get("Carbon-Copy")): if copy_to.isdigit(): if cnf.has_key("Dinstall::BugServer"): carbon_copy.append(copy_to + "@" + cnf["Dinstall::BugServer"]) else: utils.fubar("Asked to send mail to #%s in BTS but Dinstall::BugServer is not configured" % copy_to) elif copy_to == 'package': for package in arguments: if cnf.has_key("Dinstall::PackagesServer"): carbon_copy.append(package + "@" + cnf["Dinstall::PackagesServer"]) if cnf.has_key("Dinstall::TrackingServer"): carbon_copy.append(package + "@" + cnf["Dinstall::TrackingServer"]) elif '@' in copy_to: carbon_copy.append(copy_to) else: utils.fubar("Invalid -C/--carbon-copy argument '%s'; not a bug number, 'package' or email address." % (copy_to)) if Options["Binary"]: field = "b.package" else: field = "s.source" con_packages = "AND %s IN (%s)" % (field, ", ".join([ repr(i) for i in arguments ])) (con_suites, con_architectures, con_components, check_source) = \ utils.parse_args(Options) # Additional suite checks suite_ids_list = [] whitelists = [] suites = utils.split_args(Options["Suite"]) suites_list = utils.join_with_commas_and(suites) if not Options["No-Action"]: for suite in suites: s = get_suite(suite, session=session) if s is not None: suite_ids_list.append(s.suite_id) whitelists.append(s.mail_whitelist) if suite in ("oldstable", "stable"): print "**WARNING** About to remove from the (old)stable suite!" print "This should only be done just prior to a (point) release and not at" print "any other time." game_over() elif suite == "testing": print "**WARNING About to remove from the testing suite!" print "There's no need to do this normally as removals from unstable will" print "propogate to testing automagically." game_over() # Additional architecture checks if Options["Architecture"] and check_source: utils.warn("'source' in -a/--argument makes no sense and is ignored.") # Additional component processing over_con_components = con_components.replace("c.id", "component") # Don't do dependency checks on multiple suites if Options["Rdep-Check"] and len(suites) > 1: utils.fubar("Reverse dependency check on multiple suites is not implemented.") to_remove = [] maintainers = {} # We have 3 modes of package selection: binary, source-only, binary-only # and source+binary. # XXX: TODO: This all needs converting to use placeholders or the object # API. It's an SQL injection dream at the moment if Options["Binary"]: # Removal by binary package name q = session.execute("SELECT b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b, bin_associations ba, architecture a, suite su, files f, files_archive_map af, component c WHERE ba.bin = b.id AND ba.suite = su.id AND b.architecture = a.id AND b.file = f.id AND af.file_id = f.id AND af.archive_id = su.archive_id AND af.component_id = c.id %s %s %s %s" % (con_packages, con_suites, con_components, con_architectures)) to_remove.extend(q) else: # Source-only if not Options["Binary-Only"]: q = session.execute("SELECT s.source, s.version, 'source', s.id, s.maintainer FROM source s, src_associations sa, suite su, archive, files f, files_archive_map af, component c WHERE sa.source = s.id AND sa.suite = su.id AND archive.id = su.archive_id AND s.file = f.id AND af.file_id = f.id AND af.archive_id = su.archive_id AND af.component_id = c.id %s %s %s" % (con_packages, con_suites, con_components)) to_remove.extend(q) if not Options["Source-Only"]: # Source + Binary q = session.execute(""" SELECT b.package, b.version, a.arch_string, b.id, b.maintainer FROM binaries b JOIN bin_associations ba ON b.id = ba.bin JOIN architecture a ON b.architecture = a.id JOIN suite su ON ba.suite = su.id JOIN archive ON archive.id = su.archive_id JOIN files_archive_map af ON b.file = af.file_id AND af.archive_id = archive.id JOIN component c ON af.component_id = c.id JOIN source s ON b.source = s.id JOIN src_associations sa ON s.id = sa.source AND sa.suite = su.id WHERE TRUE %s %s %s %s""" % (con_packages, con_suites, con_components, con_architectures)) to_remove.extend(q) if not to_remove: print "Nothing to do." sys.exit(0) # If we don't have a reason; spawn an editor so the user can add one # Write the rejection email out as the <foo>.reason file if not Options["Reason"] and not Options["No-Action"]: (fd, temp_filename) = utils.temp_filename() editor = os.environ.get("EDITOR","vi") result = os.system("%s %s" % (editor, temp_filename)) if result != 0: utils.fubar ("vi invocation failed for `%s'!" % (temp_filename), result) temp_file = utils.open_file(temp_filename) for line in temp_file.readlines(): Options["Reason"] += line temp_file.close() os.unlink(temp_filename) # Generate the summary of what's to be removed d = {} for i in to_remove: package = i[0] version = i[1] architecture = i[2] maintainer = i[4] maintainers[maintainer] = "" if not d.has_key(package): d[package] = {} if not d[package].has_key(version): d[package][version] = [] if architecture not in d[package][version]: d[package][version].append(architecture) maintainer_list = [] for maintainer_id in maintainers.keys(): maintainer_list.append(get_maintainer(maintainer_id).name) summary = "" removals = d.keys() removals.sort() versions = [] for package in removals: versions = d[package].keys() versions.sort(apt_pkg.version_compare) for version in versions: d[package][version].sort(utils.arch_compare_sw) summary += "%10s | %10s | %s\n" % (package, version, ", ".join(d[package][version])) print "Will remove the following packages from %s:" % (suites_list) print print summary print "Maintainer: %s" % ", ".join(maintainer_list) if Options["Done"]: print "Will also close bugs: "+Options["Done"] if carbon_copy: print "Will also send CCs to: " + ", ".join(carbon_copy) if Options["Do-Close"]: print "Will also close associated bug reports." print print "------------------- Reason -------------------" print Options["Reason"] print "----------------------------------------------" print if Options["Rdep-Check"]: arches = utils.split_args(Options["Architecture"]) reverse_depends_check(removals, suites[0], arches, session) # If -n/--no-action, drop out here if Options["No-Action"]: sys.exit(0) print "Going to remove the packages now." game_over() whoami = utils.whoami() date = commands.getoutput('date -R') # Log first; if it all falls apart I want a record that we at least tried. logfile = utils.open_file(cnf["Rm::LogFile"], 'a') logfile.write("=========================================================================\n") logfile.write("[Date: %s] [ftpmaster: %s]\n" % (date, whoami)) logfile.write("Removed the following packages from %s:\n\n%s" % (suites_list, summary)) if Options["Done"]: logfile.write("Closed bugs: %s\n" % (Options["Done"])) logfile.write("\n------------------- Reason -------------------\n%s\n" % (Options["Reason"])) logfile.write("----------------------------------------------\n") # Do the same in rfc822 format logfile822 = utils.open_file(cnf["Rm::LogFile822"], 'a') logfile822.write("Date: %s\n" % date) logfile822.write("Ftpmaster: %s\n" % whoami) logfile822.write("Suite: %s\n" % suites_list) sources = [] binaries = [] for package in summary.split("\n"): for row in package.split("\n"): element = row.split("|") if len(element) == 3: if element[2].find("source") > 0: sources.append("%s_%s" % tuple(elem.strip(" ") for elem in element[:2])) element[2] = sub("source\s?,?", "", element[2]).strip(" ") if element[2]: binaries.append("%s_%s [%s]" % tuple(elem.strip(" ") for elem in element)) if sources: logfile822.write("Sources:\n") for source in sources: logfile822.write(" %s\n" % source) if binaries: logfile822.write("Binaries:\n") for binary in binaries: logfile822.write(" %s\n" % binary) logfile822.write("Reason: %s\n" % Options["Reason"].replace('\n', '\n ')) if Options["Done"]: logfile822.write("Bug: %s\n" % Options["Done"]) dsc_type_id = get_override_type('dsc', session).overridetype_id deb_type_id = get_override_type('deb', session).overridetype_id # Do the actual deletion print "Deleting...", sys.stdout.flush() for i in to_remove: package = i[0] architecture = i[2] package_id = i[3] for suite_id in suite_ids_list: if architecture == "source": session.execute("DELETE FROM src_associations WHERE source = :packageid AND suite = :suiteid", {'packageid': package_id, 'suiteid': suite_id}) #print "DELETE FROM src_associations WHERE source = %s AND suite = %s" % (package_id, suite_id) else: session.execute("DELETE FROM bin_associations WHERE bin = :packageid AND suite = :suiteid", {'packageid': package_id, 'suiteid': suite_id}) #print "DELETE FROM bin_associations WHERE bin = %s AND suite = %s" % (package_id, suite_id) # Delete from the override file if not Options["Partial"]: if architecture == "source": type_id = dsc_type_id else: type_id = deb_type_id # TODO: Again, fix this properly to remove the remaining non-bind argument session.execute("DELETE FROM override WHERE package = :package AND type = :typeid AND suite = :suiteid %s" % (over_con_components), {'package': package, 'typeid': type_id, 'suiteid': suite_id}) session.commit() print "done." # If we don't have a Bug server configured, we're done if not cnf.has_key("Dinstall::BugServer"): if Options["Done"] or Options["Do-Close"]: print "Cannot send mail to BugServer as Dinstall::BugServer is not configured" logfile.write("=========================================================================\n") logfile.close() logfile822.write("\n") logfile822.close() return # read common subst variables for all bug closure mails Subst_common = {} Subst_common["__RM_ADDRESS__"] = cnf["Dinstall::MyEmailAddress"] Subst_common["__BUG_SERVER__"] = cnf["Dinstall::BugServer"] Subst_common["__CC__"] = "X-DAK: dak rm" if carbon_copy: Subst_common["__CC__"] += "\nCc: " + ", ".join(carbon_copy) Subst_common["__SUITE_LIST__"] = suites_list Subst_common["__SUBJECT__"] = "Removed package(s) from %s" % (suites_list) Subst_common["__ADMIN_ADDRESS__"] = cnf["Dinstall::MyAdminAddress"] Subst_common["__DISTRO__"] = cnf["Dinstall::MyDistribution"] Subst_common["__WHOAMI__"] = whoami # Send the bug closing messages if Options["Done"]: Subst_close_rm = Subst_common bcc = [] if cnf.find("Dinstall::Bcc") != "": bcc.append(cnf["Dinstall::Bcc"]) if cnf.find("Rm::Bcc") != "": bcc.append(cnf["Rm::Bcc"]) if bcc: Subst_close_rm["__BCC__"] = "Bcc: " + ", ".join(bcc) else: Subst_close_rm["__BCC__"] = "X-Filler: 42" summarymail = "%s\n------------------- Reason -------------------\n%s\n" % (summary, Options["Reason"]) summarymail += "----------------------------------------------\n" Subst_close_rm["__SUMMARY__"] = summarymail for bug in utils.split_args(Options["Done"]): Subst_close_rm["__BUG_NUMBER__"] = bug if Options["Do-Close"]: mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close-with-related") else: mail_message = utils.TemplateSubst(Subst_close_rm,cnf["Dir::Templates"]+"/rm.bug-close") utils.send_mail(mail_message, whitelists=whitelists) # close associated bug reports if Options["Do-Close"]: Subst_close_other = Subst_common bcc = [] wnpp = utils.parse_wnpp_bug_file() versions = list(set([re_bin_only_nmu.sub('', v) for v in versions])) if len(versions) == 1: Subst_close_other["__VERSION__"] = versions[0] else: utils.fubar("Closing bugs with multiple package versions is not supported. Do it yourself.") if bcc: Subst_close_other["__BCC__"] = "Bcc: " + ", ".join(bcc) else: Subst_close_other["__BCC__"] = "X-Filler: 42" # at this point, I just assume, that the first closed bug gives # some useful information on why the package got removed Subst_close_other["__BUG_NUMBER__"] = utils.split_args(Options["Done"])[0] if len(sources) == 1: source_pkg = source.split("_", 1)[0] else: utils.fubar("Closing bugs for multiple source packages is not supported. Do it yourself.") Subst_close_other["__BUG_NUMBER_ALSO__"] = "" Subst_close_other["__SOURCE__"] = source_pkg merged_bugs = set() other_bugs = bts.get_bugs('src', source_pkg, 'status', 'open', 'status', 'forwarded') if other_bugs: for bugno in other_bugs: if bugno not in merged_bugs: for bug in bts.get_status(bugno): for merged in bug.mergedwith: other_bugs.remove(merged) merged_bugs.add(merged) logfile.write("Also closing bug(s):") logfile822.write("Also-Bugs:") for bug in other_bugs: Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + "," logfile.write(" " + str(bug)) logfile822.write(" " + str(bug)) logfile.write("\n") logfile822.write("\n") if source_pkg in wnpp.keys(): logfile.write("Also closing WNPP bug(s):") logfile822.write("Also-WNPP:") for bug in wnpp[source_pkg]: # the wnpp-rm file we parse also contains our removal # bugs, filtering that out if bug != Subst_close_other["__BUG_NUMBER__"]: Subst_close_other["__BUG_NUMBER_ALSO__"] += str(bug) + "-done@" + cnf["Dinstall::BugServer"] + "," logfile.write(" " + str(bug)) logfile822.write(" " + str(bug)) logfile.write("\n") logfile822.write("\n") mail_message = utils.TemplateSubst(Subst_close_other,cnf["Dir::Templates"]+"/rm.bug-close-related") if Subst_close_other["__BUG_NUMBER_ALSO__"]: utils.send_mail(mail_message) logfile.write("=========================================================================\n") logfile.close() logfile822.write("\n") logfile822.close()
def main(): global Logger cnf = Config() Arguments = [('a', "add", "Control-Suite::Options::Add", "HasArg"), ('b', "britney", "Control-Suite::Options::Britney"), ('f', 'force', 'Control-Suite::Options::Force'), ('h', "help", "Control-Suite::Options::Help"), ('l', "list", "Control-Suite::Options::List", "HasArg"), ('r', "remove", "Control-Suite::Options::Remove", "HasArg"), ('s', "set", "Control-Suite::Options::Set", "HasArg")] for i in ["add", "britney", "help", "list", "remove", "set", "version"]: key = "Control-Suite::Options::%s" % i if key not in cnf: cnf[key] = "" try: file_list = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) except SystemError as e: print("%s\n" % e) usage(1) Options = cnf.subtree("Control-Suite::Options") if Options["Help"]: usage() force = "Force" in Options and Options["Force"] action = None for i in ("add", "list", "remove", "set"): if cnf["Control-Suite::Options::%s" % (i)] != "": suite_name = cnf["Control-Suite::Options::%s" % (i)] if action: utils.fubar("Can only perform one action at a time.") action = i # Need an action... if action is None: utils.fubar("No action specified.") britney = False if action == "set" and cnf["Control-Suite::Options::Britney"]: britney = True if action == "list": session = DBConn().session() suite = session.query(Suite).filter_by(suite_name=suite_name).one() get_list(suite, session) else: Logger = daklog.Logger("control-suite") with ArchiveTransaction() as transaction: session = transaction.session suite = session.query(Suite).filter_by(suite_name=suite_name).one() if action == "set" and not suite.allowcsset: if force: utils.warn("Would not normally allow setting suite {0} (allowcsset is FALSE), but --force used".format(suite_name)) else: utils.fubar("Will not reset suite {0} due to its database configuration (allowcsset is FALSE)".format(suite_name)) if file_list: for f in file_list: process_file(utils.open_file(f), suite, action, transaction, britney, force) else: process_file(sys.stdin, suite, action, transaction, britney, force) Logger.close()
def britney_changelog(packages, suite, session): old = {} current = {} Cnf = utils.get_conf() try: q = session.execute("SELECT changelog FROM suite WHERE id = :suiteid", {'suiteid': suite.suite_id}) brit_file = q.fetchone()[0] except: brit_file = None if brit_file: brit_file = os.path.join(Cnf['Dir::Root'], brit_file) else: return q = session.execute("""SELECT s.source, s.version, sa.id FROM source s, src_associations sa WHERE sa.suite = :suiteid AND sa.source = s.id""", {'suiteid': suite.suite_id}) for p in q.fetchall(): current[p[0]] = p[1] for p in packages.keys(): if p[2] == "source": old[p[0]] = p[1] new = {} for p in current.keys(): if p in old: if apt_pkg.version_compare(current[p], old[p]) > 0: new[p] = [current[p], old[p]] else: new[p] = [current[p], 0] query = "SELECT source, changelog FROM changelogs WHERE" for p in new.keys(): query += " source = '%s' AND version > '%s' AND version <= '%s'" \ % (p, new[p][1], new[p][0]) query += " AND architecture LIKE '%source%' AND distribution in \ ('unstable', 'experimental', 'testing-proposed-updates') OR" query += " False ORDER BY source, version DESC" q = session.execute(query) pu = None brit = utils.open_file(brit_file, 'w') for u in q: if pu and pu != u[0]: brit.write("\n") brit.write("%s\n" % u[1]) pu = u[0] if q.rowcount: brit.write("\n\n\n") for p in list(set(old.keys()).difference(current.keys())): brit.write("REMOVED: %s %s\n" % (p, old[p])) brit.flush() brit.close()