def check_distribution_matches(changes, profile, interface): """ The ``suite-mismatch`` checker is a stock dput checker that checks packages intended for upload for matching Distribution and last Changelog target. Profile key: none This checker simply verified that the Changes' Distribution key matches the last changelog target. If the mixup is between experimental and unstable, it'll remind you to pass ``-c unstable -d experimental`` to sbuild. """ changelog_distribution = changes.get("Changes").split()[2].strip(';') intent = changelog_distribution.strip() actual = changes.get("Distribution").strip() if intent != actual: logger.info("Upload is targeting %s but the changes will hit %s" % ( intent, actual)) err = "Upload is targeting `%s', but the changes will hit `%s'." % ( intent, actual ) if intent == 'experimental' and ( actual == 'unstable' or actual == 'sid' ): err += \ "\nLooks like you forgot -d experimental when invoking sbuild." raise SuiteMismatchError(err)
def initialize(self, **kwargs): """ See :meth:`dput.uploader.AbstractUploader.initialize` """ fqdn = self._config['fqdn'] incoming = self._config['incoming'] ssh_options = [] if "ssh_options" in self._config: ssh_options.extend(self._config['ssh_options']) if 'port' in self._config: ssh_options.append("-oPort=%d" % self._config['port']) username = None if 'login' in self._config and self._config['login'] != "*": username = self._config['login'] if incoming.startswith('~/'): logger.warning("SFTP does not support ~/path, continuing with" "relative directory name instead.") incoming = incoming[2:] if username: logger.info("Logging into host %s as %s" % (fqdn, username)) else: logger.info("Logging into host %s" % fqdn) self._sftp = Sftp(servername=fqdn, username=username, ssh_options=ssh_options) self.incoming = incoming
def upload_commands_file(filename, upload_filename, profile, args): with uploader(profile['method'], profile, simulate=args.simulate) as obj: logger.info("Uploading %s to %s" % ( upload_filename, profile['name'] )) obj.upload_file(filename, upload_filename=upload_filename)
def lintian(changes, profile, interface): """ The ``lintian`` checker is a stock dput checker that checks packages intended for upload for common mistakes, using the static checking tool, `lintian <http://lintian.debian.org/>`. Profile key: ``lintian`` Example profile:: { "run_lintian": true "lintian": { } } No keys are current supported, but there are plans to set custom ignore lists, etc. """ if "run_lintian" in profile: logger.warning("Setting 'run_lintian' is deprecated. " "Please configure the lintian checker instead.") if not profile['run_lintian']: # XXX: Broken. Fixme. logger.info("skipping lintian checking, enable with " "run_lintian = 1 in your dput.cf") return tags = lint( changes._absfile, pedantic=True, info=True, experimental=True ) sorted_tags = {} for tag in tags: if not tag['severity'] in sorted_tags: sorted_tags[tag['severity']] = {} if tag['tag'] not in sorted_tags[tag['severity']]: sorted_tags[tag['severity']][tag['tag']] = tag tags = sorted_tags # XXX: Make this configurable if not "E" in tags: return for tag in set(tags["E"]): print " - %s: %s" % (tags["E"][tag]['severity'], tag) inp = interface.boolean('Lintian Checker', 'Upload despite of Lintian finding issues?', default=BUTTON_NO) if not inp: raise LintianHookException( "User didn't own up to the " "Lintian issues" ) else: logger.warning("Uploading with outstanding Lintian issues.")
def check_gpg_signature(changes, profile, interface): """ The ``gpg`` checker is a stock dput checker that checks packages intended for upload for a GPG signature. Profile key: ``gpg`` Example profile:: { "allowed_keys": [ "8F049AD82C92066C7352D28A7B585B30807C2A87", "B7982329" ] } ``allowed_keys`` is an optional entry which contains all the keys that may upload to this host. This can come in handy if you use more then one key to upload to more then one host. Use any length of the last N chars of the fingerprint. """ if "allow_unsigned_uploads" in profile: if profile['allow_unsigned_uploads']: logger.info("Not checking GPG signature due to " "allow_unsigned_uploads being set.") return gpg = {} if 'gpg' in profile: gpg = profile['gpg'] try: key = changes.validate_signature() if 'allowed_keys' in gpg: allowed_keys = gpg['allowed_keys'] found = False for k in allowed_keys: if k == key[-len(k):]: logger.info("Key %s is trusted to upload to this host." % ( k )) found = True if not found: raise GPGCheckerError("Key %s is not in %s" % ( key, allowed_keys )) except ChangesFileException as e: raise GPGCheckerError( "No valid signature on %s: %s" % (changes.get_filename(), e) )
def validate(self, check_hash="sha1", check_signature=True): """ See :meth:`validate_checksums` for ``check_hash``, and :meth:`validate_signature` if ``check_signature`` is True. """ self.validate_checksums(check_hash) if check_signature: self.validate_signature(check_signature) else: logger.info("Not checking signature")
def run_hook(name, hook, changes, profile): """ Run a hook (by the name of ``hook``) against the changes file (by the name of ``changes``), with the upload profile (named ``profile``). args: ``hook`` (str) string of the hook (which is the name of the JSON file which contains the hook def) ``changes`` (:class:`dput.changes.Changes`) changes file that the hook should be run against. ``profile`` (dict) dictionary of the profile that will help guide the hook's runtime. """ logger.info("running %s: %s" % (name, hook['description'])) return run_func_by_name('hooks', name, changes, profile)
def validate(self, args): # TODO: argument can be either a path or a base name, but then the user # most likely wants to add --searchdirs file_list = [] if args.file: for argument in args.file: if argument.endswith("changes"): # force searchdirs args.searchdirs = True changes_file = Changes(filename=argument) file_list += changes_file.get_files() file_list.append(changes_file.get_filename()) logger.info("Expanding package list for removals to: %s" % reduce(lambda x, xs: xs + ", " + x, file_list)) args.file = file_list else: raise RmCommandError("No file to be removed supplied?")
def validate(self, args): if args.force: return if not os.path.exists(DM_KEYRING): raise DmCommandError( "To manage DM permissions, the `debian-keyring' " "keyring package must be installed. " "File %s does not exist" % (DM_KEYRING) ) return # I HATE embedded functions. But OTOH this function is not usable # somewhere else, so... def pretty_print_list(tuples): fingerprints = "" for entry in tuples: fingerprints += "\n- %s (%s)" % entry return fingerprints # TODO: Validate input. Packages must exist (i.e. be not NEW) (out, err, exit_status) = run_command([ "gpg", "--no-options", "--no-auto-check-trustdb", "--no-default-keyring", "--list-key", "--with-colons", "--fingerprint", "--keyring", DM_KEYRING, args.dm ]) if exit_status != 0: logger.warning("") logger.warning("There was an error looking up the DM's key") logger.warning("") logger.warning(" dput-ng uses the DM keyring in /usr/share/keyrings/") logger.warning(" as the keyring to pull full fingerprints from.") logger.warning("") logger.warning(" Please ensure your keyring is up to date:") logger.warning("") logger.warning(" sudo apt-get install debian-keyring") logger.warning("") logger.warning(" Or, if you can not get the keyring, you may use their") logger.warning(" full fingerprint (without spaces) and pass the --force") logger.warning(" argument in. This goes to dak directly, so try to") logger.warning(" pay attention to formatting.") logger.warning("") logger.warning("") raise DmCommandError("DM fingerprint lookup " "for argument %s failed. " "GnuPG returned error: %s" % (args.dm, err)) possible_fingerprints = [] current_uid = None next_line_contains_fpr = False gpg_out = out.split("\n") for line in gpg_out: if next_line_contains_fpr: assert(line.startswith("fpr")) parsed_fingerprint = line.split(":") # fpr:::::::::CACE80AE01512F9AE8AB80D61C01F443C9C93C5A: possible_fingerprints.append((current_uid, parsed_fingerprint[9],)) next_line_contains_fpr = False continue elif not line.startswith("pub"): continue else: # will give a line like: # pub:-:4096:1:7B585B30807C2A87:2011-08-18:::-: # Paul Tagliamonte <*****@*****.**>::scESC: # without the newline parsed_fingerprint = line.split(":") current_uid = parsed_fingerprint[9] next_line_contains_fpr = True if len(possible_fingerprints) > 1: raise DmCommandError("DM argument `%s' is ambiguous. " "Possible choices:\n%s" % (args.dm, pretty_print_list(possible_fingerprints))) possible_fingerprints = possible_fingerprints[0] logger.info("Picking DM %s with fingerprint %s" % possible_fingerprints) args.dm = possible_fingerprints[1]
def invoke_dput(changes, args): """ .. warning:: This method may change names. Please use it via :func:`dput.upload`. also, please don't depend on args, that's likely to change shortly. Given a changes file ``changes``, and arguments to dput ``args``, upload a package to the archive that makes sense. """ profile = dput.profile.load_profile(args.host) check_modules(profile) fqdn = None if "fqdn" in profile: fqdn = profile['fqdn'] else: fqdn = profile['name'] logfile = determine_logfile(changes, profile, args) tmp_logfile = tempfile.NamedTemporaryFile() if should_write_logfile(args): full_upload_log = profile["full_upload_log"] if args.full_upload_log: full_upload_log = args.full_upload_log _write_upload_log(tmp_logfile.name, full_upload_log) if args.delayed: make_delayed_upload(profile, args.delayed) if args.simulate: logger.warning("Not uploading for real - dry run") if args.passive: force_passive_ftp_upload(profile) logger.info("Uploading %s using %s to %s (host: %s; directory: %s)" % ( changes.get_package_name(), profile['method'], profile['name'], fqdn, profile['incoming'] )) if 'hooks' in profile: run_pre_hooks(changes, profile) else: logger.trace(profile) logger.warning("No hooks defined in the profile. " "Not checking upload.") # check only is a special case of -s if args.check_only: args.simulate = 1 with uploader(profile['method'], profile, simulate=args.simulate) as obj: if args.check_only: logger.info("Package %s passes all checks" % ( changes.get_package_name() )) return if args.no_upload_log: logger.info("Not writing upload log upon request") files = changes.get_files() + [changes.get_changes_file()] for path in files: logger.info("Uploading %s%s" % ( os.path.basename(path), " (simulation)" if args.simulate else "" )) if not args.simulate: obj.upload_file(path) if args.simulate: return if 'hooks' in profile: run_post_hooks(changes, profile) else: logger.trace(profile) logger.warning("No hooks defined in the profile. " "Not post-processing upload.") if should_write_logfile(args): tmp_logfile.flush() shutil.copy(tmp_logfile.name, logfile) #print(tmp_logfile.name) tmp_logfile.close()
def invoke_dcut(args): profile = dput.profile.load_profile(args.host) fqdn = None if "fqdn" in profile: fqdn = profile["fqdn"] if not "allow_dcut" in profile or not profile["allow_dcut"]: raise UploadException( "Profile %s does not allow command file uploads" "Please set allow_dcut=1 to allow such uploads" ) logger.info("Uploading commands file to %s (incoming: %s)" % (fqdn or profile["name"], profile["incoming"])) if args.simulate: logger.warning("Not uploading for real - dry run") command = args.command assert issubclass(type(command), AbstractCommand) command.validate(args) if args.passive: force_passive_ftp_upload(profile) upload_path = None fh = None upload_filename = command.generate_commands_name(profile) try: if command.cmd_name == "upload": logger.debug("Uploading file %s as is to %s" % (args.upload_file, profile["name"])) if not os.access(args.upload_file, os.R_OK): raise DcutError("Cannot access %s: No such file" % (args.upload_file)) upload_path = args.upload_file else: fh = tempfile.NamedTemporaryFile(mode="w+r", delete=False) (name, email) = write_header(fh, profile, args) command.produce(fh, args) fh.flush() # print fh.name fh.close() signing_key = None if "default_keyid" in profile: signing_key = profile["default_keyid"] if args.keyid: signing_key = args.keyid sign_file(fh.name, signing_key, profile, name, email) upload_path = fh.name if not args.simulate and not args.output: upload_commands_file(upload_path, upload_filename, profile, args) elif args.output and not args.simulate: if os.access(args.output, os.R_OK): logger.error("Not writing %s: File already exists" % (args.output)) # ... but intentionally do nothing # TODO: or raise exception? return shutil.move(fh.name, args.output) elif args.simulate: pass else: # we should *never* come here assert False finally: if fh and os.access(fh.name, os.R_OK): os.unlink(fh.name)
def initialize(self, **kwargs): """ See :meth:`dput.uploader.AbstractUploader.initialize` """ fqdn = self._config['fqdn'] incoming = self._config['incoming'] self.sftp_config = {} if "sftp" in self._config: self.sftp_config = self._config['sftp'] self.putargs = {'confirm': False} if "confirm_upload" in self.sftp_config: self.putargs['confirm'] = self.sftp_config['confirm_upload'] if incoming.startswith('~/'): logger.warning("SFTP does not support ~/path, continuing with" "relative directory name instead.") incoming = incoming[2:] # elif incoming.startswith('~') and not self.host_is_launchpad: # raise SftpUploadException("SFTP doesn't support ~path. " # "if you need $HOME paths, use SCP.") # XXX: What to do here?? - PRT ssh_kwargs = { "port": 22, "compress": True } # XXX: Timeout override if 'port' in self._config: ssh_kwargs['port'] = self._config['port'] if 'scp_compress' in self._config: ssh_kwargs['compress'] = self._config['scp_compress'] config = paramiko.SSHConfig() if os.path.exists('/etc/ssh/ssh_config'): config.parse(open('/etc/ssh/ssh_config')) if os.path.exists(os.path.expanduser('~/.ssh/config')): config.parse(open(os.path.expanduser('~/.ssh/config'))) o = config.lookup(fqdn) user = find_username(self._config) if "user" in o: user = o['user'] ssh_kwargs['username'] = user if 'identityfile' in o: pkey = os.path.expanduser(o['identityfile']) ssh_kwargs['key_filename'] = pkey logger.info("Logging into host %s as %s" % (fqdn, user)) self._sshclient = paramiko.SSHClient() if 'globalknownhostsfile' in o: for gkhf in o['globalknownhostsfile'].split(): if os.path.isfile(gkhf): self._sshclient.load_system_host_keys(gkhf) else: files = [ "/etc/ssh/ssh_known_hosts", "/etc/ssh/ssh_known_hosts2" ] for fpath in files: if os.path.isfile(fpath): self._sshclient.load_system_host_keys(fpath) if 'userknownhostsfile' in o: for u in o['userknownhostsfile'].split(): # actually, ssh supports a bit more than ~/, # but that would be a task for paramiko... ukhf = os.path.expanduser(u) if os.path.isfile(ukhf): self._sshclient.load_host_keys(ukhf) else: for u in ['~/.ssh/known_hosts2', '~/.ssh/known_hosts']: ukhf = os.path.expanduser(u) if os.path.isfile(ukhf): # Ideally, that should be load_host_keys, # so that the known_hosts file can be written # again. But paramiko can destroy the contents # or parts of it, so no writing by using # load_system_host_keys here, too: self._sshclient.load_system_host_keys(ukhf) self._sshclient.set_missing_host_key_policy(AskToAccept(self)) self._auth(fqdn, ssh_kwargs) try: self._sftp = self._sshclient.open_sftp() except paramiko.SSHException, e: raise SftpUploadException( "Error opening SFTP channel to %s (perhaps sftp is " "disabled there?): %s" % ( fqdn, repr(e) ) )