def main(): cnf = Config() Arguments = [('h', "help", "External-Overrides::Options::Help"), ('f', 'force', 'External-Overrides::Options::Force')] args = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) try: Options = cnf.subtree("External-Overrides::Options") except KeyError: Options = {} if "Help" in Options: usage() force = False if "Force" in Options and Options["Force"]: force = True logger = daklog.Logger('external-overrides') command = args[0] if command in ('import', 'i'): external_overrides_import(args[1], args[2], args[3], sys.stdin, force) elif command in ('copy', 'c'): external_overrides_copy(args[1], args[2], force) else: print("E: Unknown commands.")
def action_sync_package(self, fingerprint, section, session): cnf = Config() allowed_keyrings = cnf.value_list('Command::Sync::AdminKeyrings') if fingerprint.keyring.keyring_name not in allowed_keyrings: raise CommandError('Key {0} is not allowed to sync Debian packages.'.format(fingerprint.fingerprint)) if 'Packages' not in section or 'Suite' not in section or 'Component' not in section: raise CommandError('Invalid commands: Section is missing.') packages_str = section['Packages'] suite = section['Suite'] component = section['Component'] if " " in packages_str: packages = packages_str.split(" ") else: packages = [packages_str] for pkg in packages: p = subprocess.Popen(["sync-debian-package", "-i", suite, "staging", component, pkg], stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) output = p.communicate() if p.returncode is not 0: self.result.append("Failed syncing: {0} from {1} ({2})".format(pkg, suite, component)) out_str = "" if output[0] != None: out_str = output[0] if output[1] != None: out_str += output[1] self.result.append(" - Error: {0}".format(out_str)) else: self.result.append("Synced package: {0} from {1} ({2})".format(pkg, suite, component))
def sign_release_dir(suite, dirname): cnf = Config() if 'Dinstall::SigningKeyring' in cnf or 'Dinstall::SigningHomedir' in cnf: args = { 'keyids': suite.signingkeys or [], 'pubring': cnf.get('Dinstall::SigningPubKeyring') or None, 'secring': cnf.get('Dinstall::SigningKeyring') or None, 'homedir': cnf.get('Dinstall::SigningHomedir') or None, 'passphrase_file': cnf.get('Dinstall::SigningPassphraseFile') or None, } relname = os.path.join(dirname, 'Release') dest = os.path.join(dirname, 'Release.gpg') if os.path.exists(dest): os.unlink(dest) inlinedest = os.path.join(dirname, 'InRelease') if os.path.exists(inlinedest): os.unlink(inlinedest) with open(relname, 'r') as stdin: with open(dest, 'w') as stdout: daklib.gpg.sign(stdin, stdout, inline=False, **args) stdin.seek(0) with open(inlinedest, 'w') as stdout: daklib.gpg.sign(stdin, stdout, inline=True, **args)
def init (self): cnf = Config() arguments = [('h', "help", "Update-DB::Options::Help")] for i in [ "help" ]: if not cnf.has_key("Update-DB::Options::%s" % (i)): cnf["Update-DB::Options::%s" % (i)] = "" arguments = apt_pkg.parse_commandline(cnf.Cnf, arguments, sys.argv) options = cnf.subtree("Update-DB::Options") if options["Help"]: self.usage() elif arguments: utils.warn("dak update-db takes no arguments.") self.usage(exit_code=1) try: if os.path.isdir(cnf["Dir::Lock"]): lock_fd = os.open(os.path.join(cnf["Dir::Lock"], 'dinstall.lock'), os.O_RDWR | os.O_CREAT) fcntl.lockf(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) else: utils.warn("Lock directory doesn't exist yet - not locking") except IOError as e: if errno.errorcode[e.errno] == 'EACCES' or errno.errorcode[e.errno] == 'EAGAIN': utils.fubar("Couldn't obtain lock; assuming another 'dak process-unchecked' is already running.") self.update_db()
def sign_release_dir(suite, dirname): cnf = Config() if cnf.has_key("Dinstall::SigningKeyring"): keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"] if cnf.has_key("Dinstall::SigningPubKeyring"): keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"] arguments = "--no-options --batch --no-tty --armour" relname = os.path.join(dirname, 'Release') dest = os.path.join(dirname, 'Release.gpg') if os.path.exists(dest): os.unlink(dest) inlinedest = os.path.join(dirname, 'InRelease') if os.path.exists(inlinedest): os.unlink(inlinedest) # We can only use one key for inline signing so use the first one in # the array for consistency firstkey = True for keyid in suite.signingkeys: defkeyid = "--default-key %s" % keyid os.system("gpg %s %s %s --detach-sign <%s >>%s" % (keyring, defkeyid, arguments, relname, dest)) if firstkey: os.system("gpg %s %s %s --clearsign <%s >>%s" % (keyring, defkeyid, arguments, relname, inlinedest)) firstkey = False
def do_update(self): """ Add missing PrimaryMirror field to archive table """ print __doc__ try: cnf = Config() c = self.db.cursor() c.execute("ALTER TABLE archive ADD COLUMN primary_mirror TEXT") c.execute("SELECT id, name FROM archive") query = "UPDATE archive SET primary_mirror = %s WHERE id = %s" for a_id, a_name in c.fetchall(): if cnf.has_key('Archive::%s::PrimaryMirror' % a_name): primloc = cnf['Archive::%s::PrimaryMirror' % a_name] print "Setting archive %s PrimaryMirror to %s" % (a_name, primloc) c.execute(query, [primloc, a_id]) c.execute("UPDATE config SET value = '63' WHERE name = 'db_revision'") self.db.commit() except psycopg2.ProgrammingError as msg: self.db.rollback() raise DBUpdateError('Unable to apply sick update 63, rollback issued. Error message : %s' % (str(msg)))
def main(): global Options, Logger cnf = Config() Arguments = [('h', "help", "Obsolete::Options::Help"), ('s', "suite", "Obsolete::Options::Suite", "HasArg"), ('n', "no-action", "Obsolete::Options::No-Action"), ('f', "force", "Obsolete::Options::Force")] cnf['Obsolete::Options::Help'] = '' cnf['Obsolete::Options::No-Action'] = '' cnf['Obsolete::Options::Force'] = '' apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Obsolete::Options") if Options['Help']: usage() if 'Suite' not in Options: query_suites = DBConn().session().query(Suite) suites = [suite.suite_name for suite in query_suites.all()] cnf['Obsolete::Options::Suite'] = ','.join(suites) Logger = daklog.Logger("dominate") session = DBConn().session() for suite_name in utils.split_args(Options['Suite']): suite = session.query(Suite).filter_by(suite_name = suite_name).one() if not suite.untouchable or Options['Force']: doDaDoDa(suite.suite_id, session) if Options['No-Action']: session.rollback() else: session.commit() Logger.close()
def main(): cnf = Config() arguments = [('h',"help", "%s::%s" % (options_prefix,"Help")), ('q',"quiet", "%s::%s" % (options_prefix,"Quiet")), ('v',"verbose", "%s::%s" % (options_prefix,"Verbose")), ] args = apt_pkg.parse_commandline(cnf.Cnf, arguments,sys.argv) num_threads = 1 if len(args) > 0: usage(1) if cnf.has_key("%s::%s" % (options_prefix,"Help")): usage(0) level=logging.INFO if cnf.has_key("%s::%s" % (options_prefix,"Quiet")): level=logging.ERROR elif cnf.has_key("%s::%s" % (options_prefix,"Verbose")): level=logging.DEBUG logging.basicConfig( level=level, format='%(asctime)s %(levelname)s %(message)s', stream = sys.stderr ) ImportNewFiles()
def init(self): cnf = Config() arguments = [('h', "help", "Update-DB::Options::Help"), ("y", "yes", "Update-DB::Options::Yes")] for i in ["help"]: key = "Update-DB::Options::%s" % i if key not in cnf: cnf[key] = "" arguments = apt_pkg.parse_commandline(cnf.Cnf, arguments, sys.argv) options = cnf.subtree("Update-DB::Options") if options["Help"]: self.usage() elif arguments: utils.warn("dak update-db takes no arguments.") self.usage(exit_code=1) try: if os.path.isdir(cnf["Dir::Lock"]): lock_fd = os.open(os.path.join(cnf["Dir::Lock"], 'daily.lock'), os.O_RDONLY | os.O_CREAT) fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) else: utils.warn("Lock directory doesn't exist yet - not locking") except IOError as e: if e.errno in (errno.EACCES, errno.EAGAIN): utils.fubar("Couldn't obtain lock, looks like archive is doing something, try again later.") self.update_db()
def clean_byhash(now_date, session): cnf = Config() suite_suffix = cnf.find("Dinstall::SuiteSuffix", "") Logger.log(["Cleaning out unused by-hash files..."]) q = session.execute(""" DELETE FROM hashfile h USING suite s, archive a WHERE s.id = h.suite_id AND a.id = s.archive_id AND h.unreferenced + a.stayofexecution < CURRENT_TIMESTAMP RETURNING a.path, s.suite_name, h.path""") count = q.rowcount if not Options["No-Action"]: for base, suite, path in q: filename = os.path.join(base, 'dists', suite, suite_suffix, path) try: os.unlink(filename) except OSError as exc: if exc.errno != errno.ENOENT: raise Logger.log(['database referred to non-existing file', filename]) else: Logger.log(['delete hashfile', suite, path]) session.commit() if count > 0: Logger.log(["total", count])
def action_dm_migrate(self, fingerprint, section, session): self._action_dm_admin_common(fingerprint, section, session) cnf = Config() acl_name = cnf.get('Command::DM::ACL', 'dm') acl = session.query(ACL).filter_by(name=acl_name).one() fpr_hash_from = section['From'].translate(None, ' ') fpr_from = session.query(Fingerprint).filter_by(fingerprint=fpr_hash_from).first() if fpr_from is None: self.result.append('Unknown fingerprint (From): {0}\nNo action taken.'.format(fpr_hash_from)) return fpr_hash_to = section['To'].translate(None, ' ') fpr_to = session.query(Fingerprint).filter_by(fingerprint=fpr_hash_to).first() if fpr_to is None: self.result.append('Unknown fingerprint (To): {0}\nNo action taken.'.format(fpr_hash_to)) return if fpr_to.keyring is None or fpr_to.keyring.keyring_name not in cnf.value_list('Command::DM::Keyrings'): self.result.append('Key (To) {0} is not in DM keyring.\nNo action taken.'.format(fpr_to.fingerprint)) return self.log.log(['dm-migrate', 'from={0}'.format(fpr_hash_from), 'to={0}'.format(fpr_hash_to)]) sources = [] for entry in session.query(ACLPerSource).filter_by(acl=acl, fingerprint=fpr_from): self.log.log(['dm-migrate', 'from={0}'.format(fpr_hash_from), 'to={0}'.format(fpr_hash_to), 'source={0}'.format(entry.source)]) entry.fingerprint = fpr_to sources.append(entry.source) self.result.append('Migrated {0} to {1}.\n{2} acl entries changed: {3}'.format(fpr_hash_from, fpr_hash_to, len(sources), ", ".join(sources))) session.commit()
def init(session): global cnf, Options cnf = Config() Arguments = [ ("h", "help", "Show-New::Options::Help"), ("p", "html-path", "Show-New::HTMLPath", "HasArg"), ("q", "queue", "Show-New::Options::Queue", "HasArg"), ] for i in ["help"]: if not cnf.has_key("Show-New::Options::%s" % (i)): cnf["Show-New::Options::%s" % (i)] = "" changesnames = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Show-New::Options") if Options["help"]: usage() queue_names = Options.find("Queue", "new").split(",") uploads = ( session.query(PolicyQueueUpload) .join(PolicyQueueUpload.policy_queue) .filter(PolicyQueue.queue_name.in_(queue_names)) .join(PolicyQueueUpload.changes) .order_by(DBChange.source) ) if len(changesnames) > 0: uploads = uploads.filter(DBChange.changesname.in_(changesnames)) return uploads
def sign_release_dir(suite, dirname): cnf = Config() if cnf.has_key("Dinstall::SigningKeyring"): keyring = "--secret-keyring \"%s\"" % cnf["Dinstall::SigningKeyring"] if cnf.has_key("Dinstall::SigningPubKeyring"): keyring += " --keyring \"%s\"" % cnf["Dinstall::SigningPubKeyring"] arguments = "--no-options --batch --no-tty --armour --personal-digest-preferences=SHA256" relname = os.path.join(dirname, 'Release') dest = os.path.join(dirname, 'Release.gpg') if os.path.exists(dest): os.unlink(dest) inlinedest = os.path.join(dirname, 'InRelease') if os.path.exists(inlinedest): os.unlink(inlinedest) defkeyid="" for keyid in suite.signingkeys or []: defkeyid += "--local-user %s " % keyid os.system("gpg %s %s %s --detach-sign <%s >>%s" % (keyring, defkeyid, arguments, relname, dest)) os.system("gpg %s %s %s --clearsign <%s >>%s" % (keyring, defkeyid, arguments, relname, inlinedest))
def _do_bts_versiontracking(self): cnf = Config() fs = self.transaction.fs btsdir = cnf.get('Dir::BTSVersionTrack') if btsdir is None or btsdir == '': return base = os.path.join(btsdir, self.changes.filename[:-8]) # version history sourcedir = self.unpacked_source() if sourcedir is not None: fh = open(os.path.join(sourcedir, 'debian', 'changelog'), 'r') versions = fs.create("{0}.versions".format(base), mode=0o644) for line in fh.readlines(): if re_changelog_versions.match(line): versions.write(line) fh.close() versions.close() # binary -> source mapping debinfo = fs.create("{0}.debinfo".format(base), mode=0o644) for binary in self.changes.binaries: control = binary.control source_package, source_version = binary.source line = " ".join([control['Package'], control['Version'], control['Architecture'], source_package, source_version]) print >>debinfo, line debinfo.close()
def check(self, upload): cnf = Config() future_cutoff = time.time() + cnf.find_i('Dinstall::FutureTimeTravelGrace', 24*3600) past_cutoff = time.mktime(time.strptime(cnf.find('Dinstall::PastCutoffYear', '1975'), '%Y')) class TarTime(object): def __init__(self): self.future_files = dict() self.past_files = dict() def callback(self, member, data): if member.mtime > future_cutoff: self.future_files[member.name] = member.mtime elif member.mtime < past_cutoff: self.past_files[member.name] = member.mtime def format_reason(filename, direction, files): reason = "{0}: has {1} file(s) with a timestamp too far in the {2}:\n".format(filename, len(files), direction) for fn, ts in files.iteritems(): reason += " {0} ({1})".format(fn, time.ctime(ts)) return reason for binary in upload.changes.binaries: filename = binary.hashed_file.filename path = os.path.join(upload.directory, filename) deb = apt_inst.DebFile(path) tar = TarTime() deb.control.go(tar.callback) if tar.future_files: raise Reject(format_reason(filename, 'future', tar.future_files)) if tar.past_files: raise Reject(format_reason(filename, 'past', tar.past_files))
def check_upload_for_external_signature_request(session, target_suite, suite, binary): if 'External-Signature-Requests' not in Config(): return config = Config().subtree('External-Signature-Requests') config_sources = config.subtree('Sources') source = binary.source if source.source not in config_sources: return src_config = config_sources.subtree(source.source) if binary.package not in src_config.value_list('Packages'): return suites = config.value_list('Default-Suites') if 'Suites' in src_config: suites = src_config.value_list('Suites') if target_suite.suite_name not in suites: return archs = config.value_list('Default-Architectures') if 'Architectures' in src_config: archs = src_config.value_list('Architectures') if binary.architecture.arch_string not in archs: return add_external_signature_request(session, target_suite, suite, binary)
def main(): global Options, Logger cnf = Config() session = DBConn().session() Arguments = [('h', "help", "Archive-Dedup-Pool::Options::Help")] apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) for i in ["help"]: key = "Archive-Dedup-Pool::Options::%s" % i if key not in cnf: cnf[key] = "" Options = cnf.subtree("Archive-Dedup-Pool::Options") if Options["Help"]: usage() Logger = daklog.Logger("archive-dedup-pool") dedup(session) Logger.close()
def main(argv=None): if argv is None: argv = sys.argv arguments = [('h', 'help', 'Export::Options::Help'), ('a', 'all', 'Export::Options::All'), ('c', 'copy', 'Export::Options::Copy'), ('d', 'directory', 'Export::Options::Directory', 'HasArg'), ('q', 'queue', 'Export::Options::Queue', 'HasArg')] cnf = Config() source_names = apt_pkg.parse_commandline(cnf.Cnf, arguments, argv) options = cnf.subtree('Export::Options') if 'Help' in options or 'Queue' not in options: usage() sys.exit(0) session = DBConn().session() queue = session.query(PolicyQueue).filter_by(queue_name=options['Queue']).first() if queue is None: print("Unknown queue '{0}'".format(options['Queue'])) sys.exit(1) uploads = session.query(PolicyQueueUpload).filter_by(policy_queue=queue) if 'All' not in options: uploads = uploads.filter(DBChange.source.in_(source_names)) directory = options.get('Directory', '.') symlink = 'Copy' not in options for u in uploads: UploadCopy(u).export(directory, symlink=symlink, ignore_existing=True)
def main(): cnf = Config() arguments = [('h',"help", "%s::%s" % (options_prefix,"Help")), ('j',"concurrency", "%s::%s" % (options_prefix,"Concurrency"),"HasArg"), ('q',"quiet", "%s::%s" % (options_prefix,"Quiet")), ('v',"verbose", "%s::%s" % (options_prefix,"Verbose")), ] args = apt_pkg.parse_commandline(cnf.Cnf, arguments,sys.argv) num_threads = 1 if len(args) > 0: usage() if cnf.has_key("%s::%s" % (options_prefix,"Help")): usage() level=logging.INFO if cnf.has_key("%s::%s" % (options_prefix,"Quiet")): level=logging.ERROR elif cnf.has_key("%s::%s" % (options_prefix,"Verbose")): level=logging.DEBUG logging.basicConfig( level=level, format='%(asctime)s %(levelname)s %(message)s', stream = sys.stderr ) if Config().has_key( "%s::%s" %(options_prefix,"Concurrency")): num_threads = int(Config()[ "%s::%s" %(options_prefix,"Concurrency")]) ImportKnownChanges(num_threads)
def main(): cnf = Config() Arguments = [ ('h', "help", "Copy-Installer::Options::Help"), ('s', "source", "Copy-Installer::Options::Source", "HasArg"), ('d', "destination", "Copy-Installer::Options::Destination", "HasArg"), ('n', "no-action", "Copy-Installer::Options::No-Action"), ] for option in ["help", "source", "destination", "no-action"]: key = "Copy-Installer::Options::%s" % option if key not in cnf: cnf[key] = "" extra_arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Copy-Installer::Options") if Options["Help"]: usage() if len(extra_arguments) != 1: usage(1) initializer = {"version": extra_arguments[0]} if Options["Source"] != "": initializer["source"] = Options["Source"] if Options["Destination"] != "": initializer["dest"] = Options["Destination"] copier = InstallerCopier(**initializer) print(copier.get_message()) if Options["No-Action"]: print('Do nothing because --no-action has been set.') else: copier.do_copy() print('Installer has been copied successfully.')
def do_pkg(changes_full_path, session): changes_dir = os.path.dirname(changes_full_path) changes_file = os.path.basename(changes_full_path) u = Upload() u.pkg.changes_file = changes_file (u.pkg.changes["fingerprint"], rejects) = utils.check_signature(changes_file) u.load_changes(changes_file) u.pkg.directory = changes_dir u.update_subst() u.logger = Logger origchanges = os.path.abspath(u.pkg.changes_file) # Try to get an included dsc dsc = None (status, _) = u.load_dsc() if status: dsc = u.pkg.dsc cnf = Config() bcc = "X-DAK: dak process-new" if cnf.has_key("Dinstall::Bcc"): u.Subst["__BCC__"] = bcc + "\nBcc: %s" % (cnf["Dinstall::Bcc"]) else: u.Subst["__BCC__"] = bcc files = u.pkg.files u.check_distributions() for deb_filename, f in files.items(): if deb_filename.endswith(".udeb") or deb_filename.endswith(".deb"): u.binary_file_checks(deb_filename, session) u.check_binary_against_db(deb_filename, session) else: u.source_file_checks(deb_filename, session) u.check_source_against_db(deb_filename, session) u.pkg.changes["suite"] = copy.copy(u.pkg.changes["distribution"]) try: with lock_package(u.pkg.changes["source"]): with clean_holding(u.pkg): if not recheck(u, session): return new, byhand = determine_new(u.pkg.changes_file, u.pkg.changes, files, dsc=dsc, session=session) if byhand: do_byhand(u, session) elif new: do_new(u, session) else: try: check_daily_lock() new_accept(u, Options["No-Action"], session) except CantGetLockError: print "Hello? Operator! Give me the number for 911!" print "Dinstall in the locked area, cant process packages, come back later" except AlreadyLockedError as e: print "Seems to be locked by %s already, skipping..." % (e)
def main(): cnf = Config() Arguments = [('a', "architecture", "Ls::Options::Architecture", "HasArg"), ('b', "binarytype", "Ls::Options::BinaryType", "HasArg"), ('c', "component", "Ls::Options::Component", "HasArg"), ('f', "format", "Ls::Options::Format", "HasArg"), ('g', "greaterorequal", "Ls::Options::GreaterOrEqual"), ('G', "greaterthan", "Ls::Options::GreaterThan"), ('r', "regex", "Ls::Options::Regex"), ('s', "suite", "Ls::Options::Suite", "HasArg"), ('S', "source-and-binary", "Ls::Options::Source-And-Binary"), ('h', "help", "Ls::Options::Help")] for i in ["architecture", "binarytype", "component", "format", "greaterorequal", "greaterthan", "regex", "suite", "source-and-binary", "help"]: key = "Ls::Options::%s" % i if key not in cnf: cnf[key] = "" packages = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Ls::Options") if Options["Help"]: usage() if not packages: utils.fubar("need at least one package name as an argument.") # Handle buildd maintenance helper options if Options["GreaterOrEqual"] or Options["GreaterThan"]: if Options["GreaterOrEqual"] and Options["GreaterThan"]: utils.fubar("-g/--greaterorequal and -G/--greaterthan are mutually exclusive.") if not Options["Suite"]: Options["Suite"] = "unstable" kwargs = dict() if Options["Regex"]: kwargs['regex'] = True if Options["Source-And-Binary"]: kwargs['source_and_binary'] = True if Options["Suite"]: kwargs['suites'] = utils.split_args(Options['Suite']) if Options["Architecture"]: kwargs['architectures'] = utils.split_args(Options['Architecture']) if Options['BinaryType']: kwargs['binary_types'] = utils.split_args(Options['BinaryType']) if Options['Component']: kwargs['components'] = utils.split_args(Options['Component']) if Options['Format']: kwargs['format'] = Options['Format'] if Options['GreaterOrEqual']: kwargs['highest'] = '>=' elif Options['GreaterThan']: kwargs['highest'] = '>>' for line in list_packages(packages, **kwargs): print(line)
def do_update(self): print "Updating use of queue table" try: c = self.db.cursor() cnf = Config() print "Adding path to queue table" c.execute("ALTER TABLE queue ADD COLUMN path TEXT") c.execute("SELECT * FROM queue") rows = c.fetchall() seenqueues = {} for row in rows: dir = cnf["Dir::Queue::%s" % row[1]].rstrip('/') seenqueues[row[1].lower()] = 1 print "Setting %s queue to use path %s" % (row[1], dir) c.execute("UPDATE queue SET path = %s WHERE id = %s", (dir, row[0])) print "Adding missing queues to the queue table" for q in cnf.subtree("Dir::Queue").keys(): qname = q.lower() if qname in seenqueues.keys(): continue if qname in ["done", "holding", "reject", "newstage", "btsversiontrack"]: print "Skipping queue %s" % qname continue pth = cnf["Dir::Queue::%s" % qname].rstrip('/') if not os.path.exists(pth): print "Skipping %s as %s does not exist" % (qname, pth) continue print "Adding %s queue with path %s" % (qname, pth) c.execute("INSERT INTO queue (queue_name, path) VALUES (%s, %s)", (qname, pth)) seenqueues[qname] = 1 print "Adding queue and approved_for columns to known_changes" c.execute("ALTER TABLE known_changes ADD COLUMN in_queue INT4 REFERENCES queue(id) DEFAULT NULL") c.execute("ALTER TABLE known_changes ADD COLUMN approved_for INT4 REFERENCES queue(id) DEFAULT NULL") print "Adding policy queue column to suite table" c.execute("ALTER TABLE suite DROP COLUMN policy_engine") c.execute("ALTER TABLE suite ADD COLUMN policy_queue_id INT4 REFERENCES queue(id) DEFAULT NULL") # Handle some of our common cases automatically if seenqueues.has_key('proposedupdates'): c.execute("""UPDATE suite SET policy_queue_id = (SELECT id FROM queue WHERE queue_name = 'proposedupdates') WHERE suite_name = 'proposed-updates'""") if seenqueues.has_key('oldproposedupdates'): c.execute("""UPDATE suite SET policy_queue_id = (SELECT id FROM queue WHERE queue_name = 'oldproposedupdates') WHERE suite_name = 'oldstable-proposed-updates'""") print "Committing" c.execute("UPDATE config SET value = '20' WHERE name = 'db_revision'") self.db.commit() except psycopg2.InternalError as msg: self.db.rollback() raise DBUpdateError("Unable to apply debversion update 20, rollback issued. Error message : %s" % (str(msg)))
def check(self, upload): changes = upload.changes # Only check sourceful uploads. if changes.source is None: return True # Only check uploads to unstable or experimental. if 'unstable' not in changes.distributions and 'experimental' not in changes.distributions: return True cnf = Config() if 'Dinstall::LintianTags' not in cnf: return True tagfile = cnf['Dinstall::LintianTags'] with open(tagfile, 'r') as sourcefile: sourcecontent = sourcefile.read() try: lintiantags = yaml.safe_load(sourcecontent)['lintian'] except yaml.YAMLError as msg: raise Exception('Could not read lintian tags file {0}, YAML error: {1}'.format(tagfile, msg)) fd, temp_filename = utils.temp_filename(mode=0o644) temptagfile = os.fdopen(fd, 'w') for tags in lintiantags.itervalues(): for tag in tags: print >>temptagfile, tag temptagfile.close() changespath = os.path.join(upload.directory, changes.filename) try: cmd = [] result = 0 user = cnf.get('Dinstall::UnprivUser') or None if user is not None: cmd.extend(['sudo', '-H', '-u', user]) cmd.extend(['/usr/bin/lintian', '--show-overrides', '--tags-from-file', temp_filename, changespath]) output = daklib.daksubprocess.check_output(cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: result = e.returncode output = e.output finally: os.unlink(temp_filename) if result == 2: utils.warn("lintian failed for %s [return code: %s]." % \ (changespath, result)) utils.warn(utils.prefix_multi_line_string(output, \ " [possible output:] ")) parsed_tags = lintian.parse_lintian_output(output) rejects = list(lintian.generate_reject_messages(parsed_tags, lintiantags)) if len(rejects) != 0: raise Reject('\n'.join(rejects)) return True
def main(argv=None): if argv is None: argv = sys.argv arguments = [('h', 'help', 'Export::Options::Help'), ('c', 'copy', 'Export::Options::Copy'), ('d', 'directory', 'Export::Options::Directory', 'HasArg'), ('r', 'relative', 'Export::Options::Relative'), ('s', 'suite', 'Export::Options::Suite', 'HasArg')] cnf = Config() apt_pkg.parse_commandline(cnf.Cnf, arguments, argv) options = cnf.subtree('Export::Options') if 'Help' in options or 'Suite' not in options: usage() sys.exit(0) session = DBConn().session() suite = session.query(Suite).filter_by(suite_name=options['Suite']).first() if suite is None: print "Unknown suite '{0}'".format(options['Suite']) sys.exit(1) directory = options.get('Directory') if not directory: print "No target directory." sys.exit(1) symlink = 'Copy' not in options relative = 'Relative' in options if relative and not symlink: print "E: --relative and --copy cannot be used together." sys.exit(1) binaries = suite.binaries sources = suite.sources files = [] files.extend([ b.poolfile for b in binaries ]) for s in sources: files.extend([ ds.poolfile for ds in s.srcfiles ]) with FilesystemTransaction() as fs: for f in files: af = session.query(ArchiveFile) \ .join(ArchiveFile.component).join(ArchiveFile.file) \ .filter(ArchiveFile.archive == suite.archive) \ .filter(ArchiveFile.file == f).first() src = af.path if relative: src = os.path.relpath(src, directory) dst = os.path.join(directory, f.basename) if not os.path.exists(dst): fs.copy(src, dst, symlink=symlink) fs.commit()
def prepare(self): """prepare upload for further processing This copies the files involved to a temporary directory. If you use this method directly, you have to remove the directory given by the C{directory} attribute later on your own. Instead of using the method directly, you can also use a with-statement:: with ArchiveUpload(...) as upload: ... This will automatically handle any required cleanup. """ assert self.directory is None assert self.original_changes.valid_signature cnf = Config() session = self.transaction.session group = cnf.get('Dinstall::UnprivGroup') or None self.directory = utils.temp_dirname(parent=cnf.get('Dir::TempPath'), mode=0o2750, group=group) with FilesystemTransaction() as fs: src = os.path.join(self.original_directory, self.original_changes.filename) dst = os.path.join(self.directory, self.original_changes.filename) fs.copy(src, dst, mode=0o640) self.changes = upload.Changes(self.directory, self.original_changes.filename, self.keyrings) for f in self.changes.files.itervalues(): src = os.path.join(self.original_directory, f.filename) dst = os.path.join(self.directory, f.filename) if not os.path.exists(src): continue fs.copy(src, dst, mode=0o640) source = None try: source = self.changes.source except Exception: # Do not raise an exception here if the .dsc is invalid. pass if source is not None: for f in source.files.itervalues(): src = os.path.join(self.original_directory, f.filename) dst = os.path.join(self.directory, f.filename) if not os.path.exists(dst): try: db_file = self.transaction.get_file(f, source.dsc['Source'], check_hashes=False) db_archive_file = session.query(ArchiveFile).filter_by(file=db_file).first() fs.copy(db_archive_file.path, dst, mode=0o640) except KeyError: # Ignore if get_file could not find it. Upload will # probably be rejected later. pass
def main(argv=None): if argv is None: argv = sys.argv arguments = [('h', 'help', 'Export::Options::Help'), ('c', 'copy', 'Export::Options::Copy'), ('d', 'directory', 'Export::Options::Directory', 'HasArg'), ('s', 'suite', 'Export::Options::Suite', 'HasArg')] cnf = Config() apt_pkg.parse_commandline(cnf.Cnf, arguments, argv) options = cnf.subtree('Export::Options') if 'Help' in options or 'Suite' not in options: usage() sys.exit(0) session = DBConn().session() suite = session.query(Suite).filter_by(suite_name=options['Suite']).first() if suite is None: print "Unknown suite '{0}'".format(options['Suite']) sys.exit(1) directory = options.get('Directory') if not directory: print "No target directory." sys.exit(1) symlink = 'Copy' not in options binaries = suite.binaries sources = suite.sources files = [] files.extend([ b.poolfile for b in binaries ]) for s in sources: files.extend([ ds.poolfile for ds in s.srcfiles ]) with FilesystemTransaction() as fs: for f in files: af = session.query(ArchiveFile) \ .join(ArchiveFile.component).join(ArchiveFile.file) \ .filter(ArchiveFile.archive == suite.archive) \ .filter(ArchiveFile.file == f).first() # XXX: Remove later. There was a bug that caused only the *.dsc to # be installed in build queues and we do not want to break them. # The bug was fixed in 55d2c7e6e2418518704623246021021e05b90e58 # on 2012-11-04 if af is None: af = session.query(ArchiveFile) \ .join(ArchiveFile.component).join(ArchiveFile.file) \ .filter(ArchiveFile.file == f).first() dst = os.path.join(directory, f.basename) if not os.path.exists(dst): fs.copy(af.path, dst, symlink=symlink) fs.commit()
def __init__(self, suitename, component): self._suite_name = suitename self._component = component cnf = Config() self._icon_theme_packages = cnf.value_list('DEP11::IconThemePackages') self._pool_dir = cnf["Dir::Pool"] self._allowed_exts = (".png")
def announce_accept(upload): """ Announce an upload. @type upload: L{daklib.upload.Source} or L{daklib.upload.Binary} @param upload: upload to handle """ cnf = Config() subst = _subst_for_upload(upload) whitelists = _whitelists(upload) accepted_to_real_suite = any(suite.policy_queue is None or suite in upload.from_policy_suites for suite in upload.suites) suite_names = [] for suite in upload.suites: if suite.policy_queue: suite_names.append("{0}->{1}".format(suite.suite_name, suite.policy_queue.queue_name)) else: suite_names.append(suite.suite_name) suite_names.extend(suite.suite_name for suite in upload.from_policy_suites) subst['__SUITE__'] = ', '.join(suite_names) or '(none)' message = TemplateSubst(subst, os.path.join(cnf['Dir::Templates'], 'process-unchecked.accepted')) send_mail(message, whitelists=whitelists) if accepted_to_real_suite and upload.sourceful: # send mail to announce lists and tracking server announce = set() for suite in upload.suites: if suite.policy_queue is None or suite in upload.from_policy_suites: announce.update(suite.announce or []) announce_list_address = ", ".join(announce) # according to #890944 this email shall be sent to dispatch@<TrackingServer> to avoid # bouncing emails # the package email alias is not yet created shortly after accepting the package tracker = cnf.get('Dinstall::TrackingServer') if tracker: announce_list_address = "{0}\nBcc: dispatch@{1}".format(announce_list_address, tracker) if len(announce_list_address) != 0: my_subst = subst.copy() my_subst['__ANNOUNCE_LIST_ADDRESS__'] = announce_list_address message = TemplateSubst(my_subst, os.path.join(cnf['Dir::Templates'], 'process-unchecked.announce')) send_mail(message, whitelists=whitelists) close_bugs_default = cnf.find_b('Dinstall::CloseBugs') close_bugs = any(s.close_bugs if s.close_bugs is not None else close_bugs_default for s in upload.suites) if accepted_to_real_suite and upload.sourceful and close_bugs: for bug in upload.bugs: my_subst = subst.copy() my_subst['__BUG_NUMBER__'] = str(bug) message = TemplateSubst(my_subst, os.path.join(cnf['Dir::Templates'], 'process-unchecked.bug-close')) send_mail(message, whitelists=whitelists)
def _action_dm_admin_common(self, fingerprint, section, session): cnf = Config() if "Command::DM-Admin::AdminFingerprints" not in cnf or "Command::DM::ACL" not in cnf: raise CommandError("DM admin command is not configured for this archive.") allowed_fingerprints = cnf.value_list("Command::DM-Admin::AdminFingerprints") if fingerprint.fingerprint not in allowed_fingerprints: raise CommandError("Key {0} is not allowed to admin DM".format(fingerprint.fingerprint))
def main(): from daklib.config import Config config = Config() import apt_pkg arguments = [ ('h', 'help', 'Update-Suite::Options::Help'), ('n', 'no-act', 'Update-Suite::options::NoAct'), ] argv = apt_pkg.parse_commandline(config.Cnf, arguments, sys.argv) try: options = config.subtree("Update-Suite::Options") except KeyError: options = {} if 'Help' in options or len(argv) != 2: usage() origin_name = argv[0] target_name = argv[1] dry_run = True if 'NoAct' in options else False with ArchiveTransaction() as transaction: session = transaction.session try: origin = session.query(Suite).filter_by( suite_name=origin_name).one() except NoResultFound: daklib.utils.fubar( "Origin suite '{0}' is unknown.".format(origin_name)) try: target = session.query(Suite).filter_by( suite_name=target_name).one() except NoResultFound: daklib.utils.fubar( "Target suite '{0}' is unknown.".format(target_name)) su = SuiteUpdater(transaction, origin, target, dry_run=dry_run) su.update_suite() if dry_run: transaction.rollback() else: transaction.commit()
def make_icon_tar(suitename, component): ''' icons-%(component)_%(size).tar.gz of each Component. ''' cnf = Config() sizes = cnf.value_list('DEP11::IconSizes') for size in sizes: icon_location_glob = os.path.join (cnf["Dir::MetaInfo"], suitename, component, "*", "icons", size, "*.*") tar_location = os.path.join (cnf["Dir::Root"], "dists", suitename, component) icon_tar_fname = os.path.join(tar_location, "icons-%s_%s.tar.gz" % (component, size)) tar = tarfile.open(icon_tar_fname, "w:gz") for filename in glob.glob(icon_location_glob): icon_name = os.path.basename (filename) tar.add(filename,arcname=icon_name) tar.close()
def do_pkg(upload, session): # Try to get an included dsc dsc = upload.source cnf = Config() group = cnf.get('Dinstall::UnprivGroup') or None try: with lock_package(upload.changes.source), \ UploadCopy(upload, group=group) as upload_copy: handler = PolicyQueueUploadHandler(upload, session) if handler.get_action() is not None: print("PENDING %s\n" % handler.get_action()) return do_new(upload, upload_copy, handler, session) except AlreadyLockedError as e: print("Seems to be locked by %s already, skipping..." % (e))
def main(): cnf = Config() cnf['Contents::Options::Help'] = '' cnf['Contents::Options::Suite'] = '' cnf['Contents::Options::Component'] = '' cnf['Contents::Options::Limit'] = '' cnf['Contents::Options::Force'] = '' arguments = [ ('h', "help", 'Contents::Options::Help'), ('a', 'archive', 'Contents::Options::Archive', 'HasArg'), ('s', "suite", 'Contents::Options::Suite', "HasArg"), ('c', "component", 'Contents::Options::Component', "HasArg"), ('l', "limit", 'Contents::Options::Limit', "HasArg"), ('f', "force", 'Contents::Options::Force'), ] args = apt_pkg.parse_commandline(cnf.Cnf, arguments, sys.argv) options = cnf.subtree('Contents::Options') if (len(args) != 1) or options['Help']: usage() limit = None if len(options['Limit']) > 0: limit = int(options['Limit']) if args[0] == 'scan-source': source_scan_all(cnf, limit) return if args[0] == 'scan-binary': binary_scan_all(cnf, limit) return archive_names = utils.split_args(options['Archive']) suite_names = utils.split_args(options['Suite']) component_names = utils.split_args(options['Component']) force = bool(options['Force']) if args[0] == 'generate': write_all(cnf, archive_names, suite_names, component_names, force) return usage()
def main(): global Options, Logger cnf = Config() for i in ["Help", "No-Action", "All"]: key = "Manage-Debug-Suites::Options::%s" % i if key not in cnf: cnf[key] = "" Arguments = [('h', "help", "Manage-Debug-Suites::Options::Help"), ('n', "no-action", "Manage-Debug-Suites::Options::No-Action"), ('a', "all", "Manage-Debug-Suites::Options::All")] debug_suite_names = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Manage-Debug-Suites::Options") if Options["Help"]: usage() Logger = daklog.Logger('manage-debug-suites', Options['No-Action']) with ArchiveTransaction() as transaction: session = transaction.session if Options['All']: if len(debug_suite_names) != 0: print("E: Cannot use both -a and a queue name") sys.exit(1) raise Exception("Not yet implemented.") else: debug_suites = session.query(Suite).filter( Suite.suite_name.in_(debug_suite_names)) for debug_suite in debug_suites: Logger.log( ['cleaning debug suite {0}'.format(debug_suite.suite_name)]) clean(debug_suite, transaction) if not Options['No-Action']: transaction.commit() else: transaction.rollback() Logger.close()
def do_lintian(filename): cnf = Config() cmd = [] user = cnf.get('Dinstall::UnprivUser') or None if user is not None: cmd.extend(['sudo', '-H', '-u', user]) color = 'always' if use_html: color = 'html' cmd.extend( ['lintian', '--show-overrides', '--color', color, "--", filename]) try: return do_command(cmd, escaped=True) except OSError as e: return (colour_output("Running lintian failed: %s" % (e), "error"))
def check(self, upload): if not self.is_source_only_upload(upload): return True allow_source_only_uploads = Config().find_b( 'Dinstall::AllowSourceOnlyUploads') allow_source_only_uploads_without_package_list = Config().find_b( 'Dinstall::AllowSourceOnlyUploadsWithoutPackageList') allow_source_only_new = Config().find_b('Dinstall::AllowSourceOnlyNew') allow_no_arch_indep_uploads = Config().find_b( 'Dinstall::AllowNoArchIndepUploads', True) changes = upload.changes if not allow_source_only_uploads: raise Reject('Source-only uploads are not allowed.') if not allow_source_only_uploads_without_package_list \ and changes.source.package_list.fallback: raise Reject( 'Source-only uploads are only allowed if a Package-List field that also list architectures is included in the source package. dpkg (>= 1.17.7) includes this information.' ) if not allow_source_only_new and upload.new: raise Reject('Source-only uploads to NEW are not allowed.') if 'all' not in changes.architectures and changes.source.package_list.has_arch_indep_packages( ): if not allow_no_arch_indep_uploads: raise Reject( 'Uploads must include architecture-independent packages.') for suite in ('oldstable', 'oldstable-proposed-updates', 'oldstable-security', 'jessie', 'jessie-proposed-updates', 'jessie-security', 'oldstable-backports', 'oldstable-backports-sloppy', 'jessie-backports', 'jessie-backports-sloppy', 'oldoldstable', 'oldoldstable-security', 'wheezy', 'wheezy-security', 'oldoldstable-backports', 'oldoldstable-backports-sloppy', 'wheezy-backports', 'wheezy-backports-sloppy'): if suite in changes.distributions: raise Reject( 'Suite {} is not configured to build arch:all packages. Please include them in your upload' .format(suite)) return True
def make_icon_tar(suitename, component): ''' icons-%(component)_%(size).tar.gz of each Component. ''' icon_location_glob = os.path.join(Config()["Dir::MetaInfo"], suitename, component, "*", "icons", "*.*") tar_location = os.path.join(Config()["Dir::Root"], "dists", suitename, component) icon_tar_fname = os.path.join(tar_location, "icons-%s_64px.tar.gz" % (component)) tar = tarfile.open(icon_tar_fname, "w:gz") for filename in glob.glob(icon_location_glob): icon_name = os.path.basename(filename) tar.add(filename, arcname=icon_name) tar.close()
def action_sync_package(self, fingerprint, section, session): cnf = Config() allowed_keyrings = cnf.value_list('Command::Sync::AdminKeyrings') if fingerprint.keyring.keyring_name not in allowed_keyrings: raise CommandError( 'Key {0} is not allowed to sync Debian packages.'.format( fingerprint.fingerprint)) if 'Packages' not in section or 'Suite' not in section or 'Component' not in section: raise CommandError('Invalid commands: Section is missing.') packages_str = section['Packages'] suite = section['Suite'] component = section['Component'] if " " in packages_str: packages = packages_str.split(" ") else: packages = [packages_str] # we always sync to the staging suite of Tanglu sync_cmd = ["synchrotron", "-i", suite, "staging", component] # add to-be-synced packages to the parameter list sync_cmd.extend(packages) p = subprocess.Popen(sync_cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) output = p.communicate() if p.returncode is not 0: self.result.append("Failed syncing: {0} from {1} ({2})".format( packages_str, suite, component)) out_str = "" if output[0] != None: out_str = output[0] if output[1] != None: out_str += output[1] self.result.append(" - Error: {0}".format(out_str)) else: self.result.append("Synced package(s): {0} from {1} ({2})".format( packages_str, suite, component))
def __init__(self, dscfilename, tmpbasedir=None): ''' The dscfilename is a name of a DSC file that will be extracted. ''' basedir = tmpbasedir if tmpbasedir else Config()['Dir::TempPath'] temp_directory = mkdtemp(dir=basedir) self.root_directory = os.path.join(temp_directory, 'root') command = ('dpkg-source', '--no-copy', '--no-check', '-q', '-x', dscfilename, self.root_directory) daklib.daksubprocess.check_call(command)
def main(): global Options, Logger cnf = Config() Arguments = [('h', "help", "Obsolete::Options::Help"), ('s', "suite", "Obsolete::Options::Suite", "HasArg"), ('n', "no-action", "Obsolete::Options::No-Action"), ('f', "force", "Obsolete::Options::Force")] cnf['Obsolete::Options::Help'] = '' cnf['Obsolete::Options::No-Action'] = '' cnf['Obsolete::Options::Force'] = '' apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Obsolete::Options") if Options['Help']: usage() if not Options['No-Action']: Logger = daklog.Logger("dominate") session = DBConn().session() suites_query = (session.query(Suite).order_by( Suite.suite_name).filter(~exists().where( Suite.suite_id == PolicyQueue.suite_id))) if 'Suite' in Options: suites_query = suites_query.filter( Suite.suite_name.in_(utils.split_args(Options['Suite']))) if not Options['Force']: suites_query = suites_query.filter_by(untouchable=False) suites = suites_query.all() assocs = list(retrieve_associations(suites, session)) if Options['No-Action']: headers = ('source package', 'source version', 'package', 'version', 'arch', 'suite', 'id') print(tabulate(assocs, headers, tablefmt="orgtbl")) session.rollback() else: delete_associations(assocs, session) session.commit() if Logger: Logger.close()
def suite_release_path(self): """ Absolute path where Release files are physically stored. This should be a path that sorts after the dists/ directory. """ cnf = Config() suite_suffix = utils.suite_suffix(self.suite.suite_name) return os.path.join(self.suite.archive.path, 'zzz-dists', self.suite.suite_name, suite_suffix)
def accept(directory, upload): cnf = Config() Logger.log(['ACCEPT', upload.changes.filename]) print("ACCEPT") upload.install() utils.process_buildinfos(upload.directory, upload.changes.buildinfo_files, upload.transaction.fs, Logger) accepted_to_real_suite = any(suite.policy_queue is None for suite in upload.final_suites) sourceful_upload = upload.changes.sourceful control = upload.changes.changes if sourceful_upload and not Options['No-Action']: urgency = control.get('Urgency') # As per policy 5.6.17, the urgency can be followed by a space and a # comment. Extract only the urgency from the string. if ' ' in urgency: urgency, comment = urgency.split(' ', 1) if urgency not in cnf.value_list('Urgency::Valid'): urgency = cnf['Urgency::Default'] UrgencyLog().log(control['Source'], control['Version'], urgency) pu = get_processed_upload(upload) daklib.announce.announce_accept(pu) # Move .changes to done, but only for uploads that were accepted to a # real suite. process-policy will handle this for uploads to queues. if accepted_to_real_suite: src = os.path.join(upload.directory, upload.changes.filename) now = datetime.datetime.now() donedir = os.path.join(cnf['Dir::Done'], now.strftime('%Y/%m/%d')) dst = os.path.join(donedir, upload.changes.filename) dst = utils.find_next_free(dst) upload.transaction.fs.copy(src, dst, mode=0o644) SummaryStats().accept_count += 1 SummaryStats().accept_bytes += upload.changes.bytes
def check(self, upload): if not upload.new: return True new_override_keys = Config().value_list('Dinstall::NewOverrideKeys') changes = upload.changes if changes.primary_fingerprint in new_override_keys: upload.new = False return True
def main(): global Options, Logger cnf = Config() for i in ["Help", "Incoming", "No-Action", "Verbose"]: key = "Clean-Queues::Options::%s" % i if key not in cnf: cnf[key] = "" if "Clean-Queues::Options::Days" not in cnf: cnf["Clean-Queues::Options::Days"] = "14" Arguments = [('h', "help", "Clean-Queues::Options::Help"), ('d', "days", "Clean-Queues::Options::Days", "IntLevel"), ('i', "incoming", "Clean-Queues::Options::Incoming", "HasArg"), ('n', "no-action", "Clean-Queues::Options::No-Action"), ('v', "verbose", "Clean-Queues::Options::Verbose")] apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Clean-Queues::Options") if Options["Help"]: usage() Logger = daklog.Logger('clean-queues', Options['No-Action']) init(cnf) if Options["Verbose"]: print("Processing incoming...") flush_orphans() reject = cnf["Dir::Reject"] if os.path.exists(reject) and os.path.isdir(reject): if Options["Verbose"]: print("Processing reject directory...") os.chdir(reject) flush_old() Logger.close()
def _subst_for_upload(upload): cnf = Config() maintainer = upload.maintainer or cnf['Dinstall::MyEmailAddress'] changed_by = upload.changed_by or maintainer if upload.sourceful: maintainer_to = mail_addresses_for_upload(maintainer, changed_by, upload.fingerprint) else: maintainer_to = mail_addresses_for_upload(maintainer, maintainer, upload.fingerprint) bcc = 'X-DAK: dak {0}'.format(upload.program) if 'Dinstall::Bcc' in cnf: bcc = '{0}\nBcc: {1}'.format(bcc, cnf['Dinstall::Bcc']) subst = { '__DISTRO__': cnf['Dinstall::MyDistribution'], '__BUG_SERVER__': cnf.get('Dinstall::BugServer'), '__ADMIN_ADDRESS__': cnf['Dinstall::MyAdminAddress'], '__DAK_ADDRESS__': cnf['Dinstall::MyEmailAddress'], '__REJECTOR_ADDRESS__': cnf['Dinstall::MyEmailAddress'], '__MANUAL_REJECT_MESSAGE__': '', '__BCC__': bcc, '__MAINTAINER__': changed_by, '__MAINTAINER_FROM__': fix_maintainer(changed_by)[1], '__MAINTAINER_TO__': ', '.join(maintainer_to), '__CHANGES_FILENAME__': upload.changes_filename, '__FILE_CONTENTS__': upload.changes, '__SOURCE__': upload.source, '__VERSION__': upload.version, '__ARCHITECTURE__': upload.architecture, '__WARNINGS__': '\n'.join(upload.warnings), } override_maintainer = cnf.get('Dinstall::OverrideMaintainer') if override_maintainer: subst['__MAINTAINER_FROM__'] = subst[ '__MAINTAINER_TO__'] = override_maintainer return subst
def determine_target(u): cnf = Config() # Statically handled queues target = None for q in ["autobyhand", "byhand", "new", "unembargoed", "embargoed"]: if QueueInfo[q]["is"](u): target = q break return target
def do_update(self): """ Add support for Description-md5 """ print __doc__ try: cnf = Config() c = self.db.cursor() c.execute( """CREATE OR REPLACE FUNCTION public.add_missing_description_md5() RETURNS VOID VOLATILE LANGUAGE plpgsql AS $function$ DECLARE description_key_id metadata_keys.key_id%TYPE; description_md5_key_id metadata_keys.key_id%TYPE; BEGIN SELECT key_id INTO STRICT description_key_id FROM metadata_keys WHERE key='Description'; SELECT key_id INTO description_md5_key_id FROM metadata_keys WHERE key='Description-md5'; IF NOT FOUND THEN INSERT INTO metadata_keys (key) VALUES ('Description-md5') RETURNING key_id INTO description_md5_key_id; END IF; INSERT INTO binaries_metadata (bin_id, key_id, value) SELECT bm.bin_id AS bin_id, description_md5_key_id AS key_id, MD5(bm.value || E'\n') AS value FROM binaries_metadata AS bm WHERE bm.key_id = description_key_id AND NOT EXISTS (SELECT 1 FROM binaries_metadata AS bm2 WHERE bm.bin_id = bm2.bin_id AND bm2.key_id = description_md5_key_id); END; $function$""") c.execute( "ALTER TABLE suite ADD COLUMN include_long_description BOOLEAN NOT NULL DEFAULT 't'" ) c.execute("UPDATE config SET value = '69' WHERE name = 'db_revision'") self.db.commit() except psycopg2.ProgrammingError, msg: self.db.rollback() raise DBUpdateError( 'Unable to apply sick update 69, rollback issued. Error message : %s' % (str(msg)))
def main(): cnf = Config() Arguments = [ ('h', "help", "DEP11::Options::Help"), ('e', "expire", "DEP11::Options::ExpireCache"), ('s', "suite", "DEP11::Options::Suite", "HasArg"), ] for i in ["help", "suite", "ExpireCache"]: if not cnf.has_key("DEP11::Options::%s" % (i)): cnf["DEP11::Options::%s" % (i)] = "" arguments = apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("DEP11::Options") if Options["Help"]: usage() return suitename = Options["Suite"] if not suitename: print("You need to specify a suite!") return logger = daklog.Logger('generate-metadata') from daklib.dbconn import Component, DBConn, get_suite, Suite session = DBConn().session() suite = get_suite(suitename.lower(), session) if Options["ExpireCache"]: expire_dep11_data_cache(session, suitename) process_suite(session, suite, logger) # export database content as Components-<arch>.xz YAML documents write_component_files(suite) # we're done logger.close()
def announce_new(upload): """ Announce an upload going to NEW. @type upload: L{daklib.upload.Source} or L{daklib.upload.Binary} @param upload: upload to handle """ cnf = Config() subst = _subst_for_upload(upload) whitelists = _whitelists(upload) message = TemplateSubst(subst, os.path.join(cnf['Dir::Templates'], 'process-unchecked.new')) send_mail(message, whitelists=whitelists)
def do_update(self): """ Add suite options for overrides and control-suite to DB """ print(__doc__) try: cnf = Config() c = self.db.cursor() c.execute("ALTER TABLE suite ADD COLUMN overrideprocess BOOLEAN NOT NULL DEFAULT FALSE") c.execute("COMMENT ON COLUMN suite.overrideprocess IS %s", ['If true, check-overrides will process the suite by default']) c.execute("ALTER TABLE suite ADD COLUMN overrideorigin TEXT DEFAULT NULL") c.execute("COMMENT ON COLUMN suite.overrideprocess IS %s", ['If NOT NULL, check-overrides will take missing overrides from the named suite']) # Migrate config file values into database if "Check-Overrides::OverrideSuites" in cnf: for suitename in cnf.subtree("Check-Overrides::OverrideSuites").list(): if cnf.get("Check-Overrides::OverrideSuites::%s::Process" % suitename, "0") == "1": print("Marking %s to have overrides processed automatically" % suitename.lower()) c.execute("UPDATE suite SET overrideprocess = TRUE WHERE suite_name = %s", [suitename.lower()]) originsuite = cnf.get("Check-Overrides::OverrideSuites::%s::OriginSuite" % suitename, '') if originsuite != '': print("Setting %s to use %s as origin for overrides" % (suitename.lower(), originsuite.lower())) c.execute("UPDATE suite SET overrideorigin = %s WHERE suite_name = %s", [originsuite.lower(), suitename.lower()]) c.execute("ALTER TABLE suite ADD COLUMN allowcsset BOOLEAN NOT NULL DEFAULT FALSE") c.execute("COMMENT ON COLUMN suite.allowcsset IS %s", ['Allow control-suite to be used with the --set option without forcing']) # Import historical hard-coded values c.execute("UPDATE suite SET allowcsset = TRUE WHERE suite_name IN ('testing', 'squeeze-updates')") c.execute("UPDATE config SET value = '70' WHERE name = 'db_revision'") self.db.commit() except psycopg2.ProgrammingError as msg: self.db.rollback() raise DBUpdateError('Unable to apply sick update 70, rollback issued. Error message : %s' % (str(msg)))
def accept(directory, upload): cnf = Config() Logger.log(['ACCEPT', upload.changes.filename]) print "ACCEPT" upload.install() process_buildinfos(upload) accepted_to_real_suite = False for suite in upload.final_suites: accepted_to_real_suite = accepted_to_real_suite or suite.policy_queue is None sourceful_upload = 'source' in upload.changes.architectures control = upload.changes.changes if sourceful_upload and not Options['No-Action']: urgency = control.get('Urgency') if urgency not in cnf.value_list('Urgency::Valid'): urgency = cnf['Urgency::Default'] UrgencyLog().log(control['Source'], control['Version'], urgency) pu = get_processed_upload(upload) daklib.announce.announce_accept(pu) # Move .changes to done, but only for uploads that were accepted to a # real suite. process-policy will handle this for uploads to queues. if accepted_to_real_suite: src = os.path.join(upload.directory, upload.changes.filename) now = datetime.datetime.now() donedir = os.path.join(cnf['Dir::Done'], now.strftime('%Y/%m/%d')) dst = os.path.join(donedir, upload.changes.filename) dst = utils.find_next_free(dst) upload.transaction.fs.copy(src, dst, mode=0o644) SummaryStats().accept_count += 1 SummaryStats().accept_bytes += upload.changes.bytes
def do_update(self): print(__doc__) try: cnf = Config() c = self.db.cursor() c.execute("INSERT INTO config (name, value) VALUES('unprivgroup', 'dak-unpriv')") c.execute("UPDATE config SET value = '86' WHERE name = 'db_revision'") self.db.commit() except psycopg2.ProgrammingError as msg: self.db.rollback() raise DBUpdateError('Unable to apply sick update 86, rollback issued. Error message: {0}'.format(msg))
def sign_release_dir(suite, dirname): cnf = Config() if 'Dinstall::SigningKeyring' in cnf or 'Dinstall::SigningHomedir' in cnf: arguments = [ '/usr/bin/gpg', '--no-options', '--no-tty', '--batch', '--armour', '--personal-digest-preferences', 'SHA256', ] if 'Dinstall::SigningHomedir' in cnf: arguments.extend(['--homedir', cnf['Dinstall::SigningHomedir']]) if 'Dinstall::SigningPassphraseFile' in cnf: arguments.extend([ '--pinentry-mode', 'loopback', '--passphrase-file', cnf['Dinstall::SigningPassphraseFile'] ]) if 'Dinstall::SigningKeyring' in cnf: arguments.extend( ['--secret-keyring', cnf['Dinstall::SigningKeyring']]) if 'Dinstall::SigningPubKeyring' in cnf: arguments.extend(['--keyring', cnf['Dinstall::SigningPubKeyring']]) relname = os.path.join(dirname, 'Release') dest = os.path.join(dirname, 'Release.gpg') if os.path.exists(dest): os.unlink(dest) inlinedest = os.path.join(dirname, 'InRelease') if os.path.exists(inlinedest): os.unlink(inlinedest) for keyid in suite.signingkeys or []: arguments.extend(['--local-user', keyid]) with open(relname, 'r') as stdin: with open(dest, 'w') as stdout: arguments_sign = arguments + ['--detach-sign'] subprocess.check_call(arguments_sign, stdin=stdin, stdout=stdout) stdin.seek(0) with open(inlinedest, 'w') as stdout: arguments_sign = arguments + ['--clearsign'] subprocess.check_call(arguments_sign, stdin=stdin, stdout=stdout)
def main(): global Options, Logger cnf = Config() Arguments = [('h', "help", "Obsolete::Options::Help"), ('s', "suite", "Obsolete::Options::Suite", "HasArg"), ('n', "no-action", "Obsolete::Options::No-Action"), ('f', "force", "Obsolete::Options::Force")] cnf['Obsolete::Options::Help'] = '' cnf['Obsolete::Options::No-Action'] = '' cnf['Obsolete::Options::Force'] = '' apt_pkg.parse_commandline(cnf.Cnf, Arguments, sys.argv) Options = cnf.subtree("Obsolete::Options") if Options['Help']: usage() if 'Suite' not in Options: query_suites = DBConn().session().query(Suite) suites = [suite.suite_name for suite in query_suites] cnf['Obsolete::Options::Suite'] = str(','.join(suites)) if not Options['No-Action']: Logger = daklog.Logger("dominate") session = DBConn().session() for suite_name in utils.split_args(Options['Suite']): suite = session.query(Suite).filter_by(suite_name = suite_name).one() # Skip policy queues. We don't want to remove obsolete packages from those. policy_queue = session.query(PolicyQueue).filter_by(suite=suite).first() if policy_queue is not None: continue if not suite.untouchable or Options['Force']: doDaDoDa(suite.suite_id, session) if Options['No-Action']: session.rollback() else: session.commit() if Logger: Logger.close()
def is_autobyhand(u): cnf = Config() all_auto = 1 any_auto = 0 for f in u.pkg.files.keys(): if u.pkg.files[f].has_key("byhand"): any_auto = 1 # filename is of form "PKG_VER_ARCH.EXT" where PKG, VER and ARCH # don't contain underscores, and ARCH doesn't contain dots. # further VER matches the .changes Version:, and ARCH should be in # the .changes Architecture: list. if f.count("_") < 2: all_auto = 0 continue (pckg, ver, archext) = f.split("_", 2) if archext.count(".") < 1 or u.pkg.changes["version"] != ver: all_auto = 0 continue ABH = cnf.subtree("AutomaticByHandPackages") if not ABH.has_key(pckg) or \ ABH["%s::Source" % (pckg)] != u.pkg.changes["source"]: print "not match %s %s" % (pckg, u.pkg.changes["source"]) all_auto = 0 continue (arch, ext) = archext.split(".", 1) if arch not in u.pkg.changes["architecture"]: all_auto = 0 continue u.pkg.files[f]["byhand-arch"] = arch u.pkg.files[f]["byhand-script"] = ABH["%s::Script" % (pckg)] return any_auto and all_auto
def do_update(self): print __doc__ try: cnf = Config() c = self.db.cursor() c.execute("CREATE SCHEMA world") c.execute("GRANT USAGE ON SCHEMA world TO PUBLIC") c.execute( "ALTER DEFAULT PRIVILEGES IN SCHEMA world GRANT SELECT ON TABLES TO PUBLIC" ) c.execute( "ALTER DEFAULT PRIVILEGES IN SCHEMA world GRANT ALL ON TABLES TO ftpmaster" ) c.execute( "ALTER DEFAULT PRIVILEGES IN SCHEMA world GRANT SELECT ON SEQUENCES TO PUBLIC" ) c.execute( "ALTER DEFAULT PRIVILEGES IN SCHEMA world GRANT ALL ON SEQUENCES TO ftpmaster" ) c.execute( "ALTER DEFAULT PRIVILEGES IN SCHEMA world GRANT ALL ON FUNCTIONS TO ftpmaster" ) c.execute(""" CREATE OR REPLACE VIEW world."files-1" AS SELECT files.id AS id, component.name || '/' || files.filename AS filename, files.size AS size, files.md5sum AS md5sum, files.sha1sum AS sha1sum, files.sha256sum AS sha256sum, files.last_used AS last_used, files.created AS created, files.modified AS modified FROM files JOIN files_archive_map fam ON files.id = fam.file_id JOIN component ON fam.component_id = component.id WHERE fam.archive_id = (SELECT id FROM archive WHERE name IN ('backports', 'ftp-master', 'security')) """) c.execute("UPDATE config SET value = '79' WHERE name = 'db_revision'") self.db.commit() except psycopg2.ProgrammingError as msg: self.db.rollback() raise DBUpdateError( 'Unable to apply sick update 79, rollback issued. Error message: {0}' .format(msg))
def do_update(self): print(__doc__) try: cnf = Config() c = self.db.cursor() c.execute("GRANT SELECT, UPDATE, USAGE ON policy_queue_byhand_file_id_seq TO ftpmaster") c.execute("GRANT SELECT ON policy_queue_byhand_file_id_seq TO public") c.execute("UPDATE config SET value = '81' WHERE name = 'db_revision'") self.db.commit() except psycopg2.ProgrammingError as msg: self.db.rollback() raise DBUpdateError('Unable to apply sick update 81, rollback issued. Error message: {0}'.format(msg))
def do_update(self): print(__doc__) try: cnf = Config() c = self.db.cursor() stayofexecution = cnf.get('Clean-Suites::StayOfExecution', '129600') c.execute( "ALTER TABLE archive ADD COLUMN stayofexecution INTERVAL NOT NULL DEFAULT %s", (stayofexecution, )) c.execute( "UPDATE archive SET stayofexecution='0' WHERE name IN ('new', 'policy', 'build-queues')" ) c.execute("UPDATE config SET value = '77' WHERE name = 'db_revision'") self.db.commit() except psycopg2.ProgrammingError as msg: self.db.rollback() raise DBUpdateError( 'Unable to apply sick update 77, rollback issued. Error message: {0}' .format(msg))