Beispiel #1
0
def write_header(fh, profile, args):

    email_address = os.environ.get("DEBEMAIL", None)
    if email_address is None:
        email_address = os.environ.get("EMAIL", None)

    name = os.environ.get("DEBFULLNAME", None)

    if not name:
        pwd_entry = pwd.getpwnam(os.getlogin())
        gecos_name = pwd_entry.pw_gecos.split(",", 1)
        if len(gecos_name) > 1:
            name = gecos_name[0]
    if not email_address:
        email_address = socket.getfqdn(socket.gethostname())

    if args.maintainer:
        (name, email_address) = email.utils.parseaddr(args.maintainer)

    logger.debug("Using %s <%s> as uploader identity" % (name, email_address))

    if not name or not email_address:
        raise DcutError(
            "Your name or email could not be retrieved."
            "Please set DEBEMAIL and DEBFULLNAME or provide"
            " a full identity through --maintainer"
        )

    fh.write("Archive: %s\n" % (profile["fqdn"]))
    fh.write("Uploader: %s <%s>\n" % (name, email_address))
    return (name, email_address)
Beispiel #2
0
    def preload(self, configs):
        """
        See :meth:`dput.config.AbstractConfig.preload`
        """
        parser = configparser.ConfigParser()
        if configs is None:
            configs = dput.core.DPUT_CONFIG_LOCATIONS

        for config in configs:
            if not os.access(config, os.R_OK):
                logger.debug("Skipping file %s: Not accessible" % (
                    config
                ))
                continue
            try:
                logger.trace("Parsing %s" % (config))
                parser.readfp(open(config, 'r'))
            except IOError as e:
                logger.warning("Skipping file %s: %s" % (
                    config,
                    e
                ))
                continue
            except configparser.ParsingError as e:
                raise DputConfigurationError("Error parsing file %s: %s" % (
                    config,
                    e
                ))
        self.parser = parser
        self.configs = configs
        self.defaults = self._translate_strs(self.get_config("DEFAULT"))
        self.parser.remove_section("DEFAULT")
Beispiel #3
0
def determine_logfile(changes, conf, args):
    """
    Figure out what logfile to write to. This is mostly an internal
    implementation. Returns the file to log to, given a changes and
    profile.
    """
    # dak requires '<package>_<version>_<[a-zA-Z0-9+-]+>.changes'

    # XXX: Correct --force behavior
    logfile = changes.get_changes_file()  # XXX: Check for existing one
    xtn = ".changes"
    if logfile.endswith(xtn):
        logfile = "%s.%s.upload" % (logfile[:-len(xtn)], conf['name'])
    else:
        raise UploadException("File %s does not look like a .changes file" % (
            changes.get_filename()
        ))

    if (
        os.access(logfile, os.R_OK) and
        os.stat(logfile).st_size > 0 and
        not args.force
    ):

        raise UploadException("""Package %s was already uploaded to %s
If you want to upload nonetheless, use --force or remove %s""" % (
            changes.get_package_name(),
            conf['name'],
            logfile
        ))

    logger.debug("Writing log to %s" % (logfile))
    return logfile
Beispiel #4
0
    def _auth(self, fqdn, ssh_kwargs, _first=0):
        if _first == 3:
            raise SftpUploadException("Failed to authenticate")
        try:
            self._sshclient.connect(fqdn, **ssh_kwargs)
            logger.debug("Logged in!")
        except socket.error as e:
            raise SftpUploadException("SFTP error uploading to %s: %s" % (
                fqdn,
                repr(e)
            ))
        except paramiko.AuthenticationException:
            logger.warning("Failed to auth. Prompting for a login pair.")
            # XXX: Ask for pw only
            user = self.interface.question('please login', 'Username')
            # 4 first error
            pw = self.interface.password(None, "Password")

            if user is not None:
                ssh_kwargs['username'] = user
            ssh_kwargs['password'] = pw
            self._auth(fqdn, ssh_kwargs, _first=_first + 1)
        except paramiko.SSHException as e:
            raise SftpUploadException("SFTP error uploading to %s: %s" % (
                fqdn,
                repr(e)
            ))
Beispiel #5
0
def sign_file(filename, keyid=None, profile=None, name=None, email=None):
    logger.debug(
        "Signing file %s - signature hints are key: %s, " "name: %s, email: %s" % (filename, keyid, name, email)
    )

    gpg_path = "gpg"
    if keyid:
        identity_hint = keyid
    else:
        # hard to see here, but name and email is  guaranteed to be set in
        # write_header()
        if name:
            identity_hint = name
        if email:
            identity_hint += " <%s>" % (email)

    logger.trace("GPG identity hint: %s" % (identity_hint))

    (gpg_output, gpg_output_stderr, exit_status) = run_command(
        [gpg_path, "--default-key", identity_hint, "--status-fd", "1", "--sign", "--armor", "--clearsign", filename]
    )

    if exit_status == -1:
        raise DcutError("Unknown problem while making cleartext signature")

    if exit_status != 0:
        raise DcutError("Failed to make cleartext signature " "to commands file:\n%s" % (gpg_output_stderr))

    if gpg_output.count("[GNUPG:] SIG_CREATED"):
        pass
    else:
        raise DcutError("Failed to make cleartext signature:\n%s" % (gpg_output_stderr))

    os.unlink(filename)
    shutil.move("%s.asc" % (filename), filename)
Beispiel #6
0
def check_debs_in_upload(changes, profile, interface):
    """
    The ``check-debs`` checker is a stock dput checker that checks packages
    intended for upload for .deb packages.

    Profile key: ``foo``

    Example profile::

        {
            "skip": false,
            "enforce": "debs"
        }

    ``skip``    controls if the checker should drop out without checking
                for anything at all.

    ``enforce`` This controls what we check for. Valid values are
                "debs" or "source". Nonsense values will cause
                an abort.
    """
    debs = {}
    if 'check-debs' in profile:
        debs = profile['check-debs']

    if 'skip' in debs and debs['skip']:
        logger.debug("Skipping deb checker.")
        return

    enforce_debs = True
    if 'enforce' in debs:
        model = debs['enforce']
        if model == 'debs':
            enforce_debs = True
        elif model == 'source':
            enforce_debs = False
        else:
            logger.warning("Garbage value for check-debs/enforce - is %s,"
                           " valid values are `debs` and `source`. Skipping"
                           " checks." % (model))
            return
    else:
        logger.warning("No `enforce` key in check-debs. Skipping checks.")
        return

    has_debs = False
    for fil in changes.get_files():
        xtns = ['.deb', '.udeb']
        for xtn in xtns:
            if fil.endswith(xtn):
                has_debs = True

    if enforce_debs and not has_debs:
        raise BinaryUploadError(
            "There are no .debs in this upload, and we're enforcing them."
        )
    if not enforce_debs and has_debs:
        raise BinaryUploadError(
            "There are .debs in this upload, and enforcing they don't exist."
        )
Beispiel #7
0
def force_passive_ftp_upload(conf):
    """
    Force FTP to use passive mode.

    Mangles the supplied configuration object
    """
    logger.debug("overriding configuration to force FTP passive mode")
    conf['passive_ftp'] = True
Beispiel #8
0
 def _auth(self, fqdn, ssh_kwargs, _first=0):
     if _first == 3:
         raise SftpUploadException("Failed to authenticate")
     try:
         self._sshclient.connect(fqdn, **ssh_kwargs)
         logger.debug("Logged in!")
     except socket.error, e:
         raise SftpUploadException("SFTP error uploading to %s: %s" % (
             fqdn,
             repr(e)
         ))
Beispiel #9
0
 def initialize(self, **kwargs):
     """
     See :meth:`dput.uploader.AbstractUploader.initialize`
     """
     login = find_username(self._config)
     self._scp_base = ["scp", "-p", "-C"]
     # XXX: Timeout?
     if 'port' in self._config:
         self._scp_base += ("-P", "%s" % self._config['port'])
     self._scp_host = "%s@%s" % (login, self._config['fqdn'])
     logger.debug("Using scp to upload to %s" % (self._scp_host))
     logger.warning("SCP is deprecated. Please consider upgrading to SFTP.")
Beispiel #10
0
    def upload_file(self, filename, upload_filename=None):
        """
        See :meth:`dput.uploader.AbstractUploader.upload_file`
        """

        if not upload_filename:
            upload_filename = os.path.basename(filename)

        upload_filename = os.path.join(self.incoming, upload_filename)
        logger.debug("Writing to: %s" % (upload_filename))

        self._sftp.put(upload_filename, filename)
Beispiel #11
0
def make_delayed_upload(conf, delayed_days):
    """
    DELAYED uploads to ftp-master eventually means to use another incoming
    directory instead of the default. This is easy enough to be implemented

    Mangles the supplied configuration object
    """
    incoming_directory = os.path.join(
        conf['incoming'],
        "DELAYED",
        "%d-day" % (delayed_days)
    )
    logger.debug("overriding upload directory to %s" % (incoming_directory))
    conf['incoming'] = incoming_directory
Beispiel #12
0
    def validate_checksums(self, check_hash="sha1"):
        """
        Validate checksums for a package, using ``check_hack``'s type
        to validate the package.

        Valid ``check_hash`` types:

            * sha1
            * sha256
            * md5
            * md5sum
        """
        logger.debug("validating %s checksums" % (check_hash))

        for filename in self.get_files():
            if check_hash == "sha1":
                hash_type = hashlib.sha1()
                checksums = self.get("Checksums-Sha1")
                field_name = "sha1"
            elif check_hash == "sha256":
                hash_type = hashlib.sha256()
                checksums = self.get("Checksums-Sha256")
                field_name = "sha256"
            elif check_hash == "md5":
                hash_type = hashlib.md5()
                checksums = self.get("Files")
                field_name = "md5sum"

            for changed_files in checksums:
                if changed_files["name"] == os.path.basename(filename):
                    break
            else:
                assert "get_files() returns different files than Files: knows?!"

            with open(filename, "rb") as fc:
                while True:
                    chunk = fc.read(131072)
                    if not chunk:
                        break
                    hash_type.update(chunk)
            fc.close()

            if not hash_type.hexdigest() == changed_files[field_name]:
                raise ChangesFileException(
                    "Checksum mismatch for file %s: %s != %s"
                    % (filename, hash_type.hexdigest(), changed_files[field_name])
                )
            else:
                logger.trace("%s Checksum for file %s matches" % (field_name, filename))
Beispiel #13
0
def check_allowed_distribution(changes, profile, interface):
    """
    The ``allowed-distribution`` checker is a stock dput checker that checks
    packages intended for upload for a valid upload distribution.

    Profile key: none

    Example profile::

        {
            ...
            "allowed_distributions": "(?!UNRELEASED)",
            "distributions": ["unstable", "testing"],
            "disallowed_distributions": []
            ...
        }

    The allowed_distributions key is in Python ``re`` syntax.
    """
    allowed_block = profile.get("allowed-distribution", {})
    suite = changes["Distribution"]
    if "allowed_distributions" in profile:
        srgx = profile["allowed_distributions"]
        if re.match(srgx, suite) is None:
            logger.debug("Distribution does not %s match '%s'" % (suite, profile["allowed_distributions"]))
            raise BadDistributionError("'%s' doesn't match '%s'" % (suite, srgx))

    if "distributions" in profile:
        allowed_dists = profile["distributions"]
        if suite not in allowed_dists.split(","):
            raise BadDistributionError("'%s' doesn't contain distribution '%s'" % (suite, profile["distributions"]))

    if "disallowed_distributions" in profile:
        disallowed_dists = profile["disallowed_distributions"]
        if suite in disallowed_dists:
            raise BadDistributionError("'%s' is in '%s'" % (suite, disallowed_dists))

    if "codenames" in profile and profile["codenames"]:
        codenames = load_config("codenames", profile["codenames"])
        blocks = allowed_block.get("codename-groups", [])
        if blocks != []:
            failed = True
            for block in blocks:
                names = codenames.get(block, [])
                if suite in names:
                    failed = False

            if failed:
                raise BadDistributionError("`%s' not in the codename group" % (suite))
Beispiel #14
0
def load_commands():
    commands = []
    for command in find_commands():
        logger.debug("importing command: %s" % (command))
        try:
            # XXX: Stubbed the profile for now. That ignores any user choice
            #      on the profile.
            #      Reason being that the profile and the argument parser is a
            #      transitive circular dependency. That should be fixed at some
            #      point.
            with get_obj_by_name("commands", command, {}) as (obj, interface):
                commands.append(obj(interface))
        except NoSuchConfigError:
            raise DputConfigurationError("No such command: `%s'" % (command))

    return commands
Beispiel #15
0
    def get_config(self, name):
        """
        See :meth:`dput.config.AbstractConfig.get_config`
        """
        logger.trace("Loading entry %s" % (name))
        ret = self.defaults.copy()
        for config in self.configs:
            obj = config.get_config(name)
            logger.trace(obj)
            ret.update(obj)
            ret = _config_cleanup(ret)
            logger.trace('Rewrote to:')
            logger.trace(obj)
        if logger.isEnabledFor(logging.DEBUG):
            logger.debug("Got configuration: %s" % (name))
            for key in ret:
                logger.debug("\t%s: %s" % (key, ret[key]))

        validate_object('config', ret, 'profiles/%s' % (name))
        return ret
Beispiel #16
0
    def upload_file(self, filename, upload_filename=None):
        """
        See :meth:`dput.uploader.AbstractUploader.upload_file`
        """

        if not upload_filename:
            upload_filename = os.path.basename(filename)

        upload_filename = os.path.join(self.incoming, upload_filename)
        logger.debug("Writing to: %s" % (upload_filename))

        try:
            self._sftp.put(filename, upload_filename, **self.putargs)
        except IOError as e:
            if e.errno == 13:
                self.upload_write_error(e)
            else:
                raise SftpUploadException("Could not upload file %s: %s" % (
                    filename,
                    e
                ))
Beispiel #17
0
def check_allowed_distribution(changes, profile, interface):
    """
    The ``allowed-distribution`` checker is a stock dput checker that checks
    packages intended for upload for a valid upload distribution.

    Profile key: none

    Example profile::

        {
            ...
            "allowed_distributions": "(?!UNRELEASED)",
            "distributions": ["unstable", "testing"]
            ...
        }

    The allowed_distributions key is in Python ``re`` syntax.
    """
    suite = changes['Distribution']
    if 'allowed_distributions' in profile:
        srgx = profile['allowed_distributions']
        if re.match(srgx, suite) is None:
            logger.debug("Distribution does not %s match '%s'" % (
                suite,
                profile['allowed_distributions']
            ))
            raise BadDistributionError("'%s' doesn't match '%s'" % (
                suite,
                srgx
            ))

    if'distributions' in profile:
        allowed_dists = profile['distributions']
        if suite not in allowed_dists.split(","):
            raise BadDistributionError(
                "'%s' doesn't contain distribution '%s'" % (
                    suite,
                    profile['distributions']
                ))
Beispiel #18
0
def get_obj(cls, checker_method):  # checker_method is a bad name.
    """
    Get an object by plugin def (``checker_method``) in class ``cls`` (such
    as ``hooks``).
    """
    logger.trace("Attempting to resolve %s %s" % (cls, checker_method))
    try:
        config = load_config(cls, checker_method)
        validate_object('plugin', config, "%s/%s" % (cls, checker_method))

        if config is None or config == {}:
            raise NoSuchConfigError("No such config")
    except NoSuchConfigError:
        logger.debug("failed to resolve config %s" % (checker_method))
        return None
    path = config['path']
    logger.trace("loading %s %s" % (cls, path))
    try:
        return load_obj(path)
    except ImportError as e:
        logger.warning("failed to resolve path %s: %s" % (path, e))
        return None
Beispiel #19
0
    def upload_file(self, filename, upload_filename=None):
        """
        See :meth:`dput.uploader.AbstractUploader.upload_file`
        """
        upload_filename = self._baseurl + os.path.basename(filename)
        logger.debug("Upload to %s" % (upload_filename))

        (mime_type, _) = mimetypes.guess_type(filename)
        fh = open(filename, 'rb')
        mmaped_fh = mmap.mmap(fh.fileno(), 0, access=mmap.ACCESS_READ)
        req = urllib2.Request(url=upload_filename, data=mmaped_fh)
        req.add_header("Content-Type", mime_type)
        req.get_method = lambda: 'PUT'

        try:
            urllib2.urlopen(req)
        except urllib2.HTTPError as e:
            if e.code == 403:
                self.upload_write_error(e)
            else:
                raise HttpUploadException(e)
        mmaped_fh.close()
        fh.close()
Beispiel #20
0
def validate_object(schema, obj, name):
    sobj = None
    for root in dput.core.SCHEMA_DIRS:
        if sobj is not None:
            logger.debug("Skipping %s" % (root))
            continue

        logger.debug("Loading schema %s from %s" % (schema, root))
        spath = "%s/%s.json" % (
            root,
            schema
        )
        try:
            if os.path.exists(spath):
                sobj = json.load(open(spath, 'r'))
            else:
                logger.debug("No such config: %s" % (spath))
        except ValueError as e:
            raise DputConfigurationError("syntax error in %s: %s" % (
                spath,
                e
            ))

    if sobj is None:
        logger.critical("Schema not found: %s" % (schema))
        raise DputConfigurationError("No such schema: %s" % (schema))

    try:
        import validictory
        validictory.validate(obj, sobj)
    except ImportError:
        pass
    except validictory.validator.ValidationError as e:
        err = str(e)
        error = "Error with config file %s - %s" % (
            name,
            err
        )
        ex = InvalidConfigError(error)
        ex.obj = obj
        ex.root = e
        ex.config_name = name
        ex.sdir = dput.core.SCHEMA_DIRS
        ex.schema = schema
        raise ex
Beispiel #21
0
    def initialize(self, **kwargs):
        """
        See :meth:`dput.uploader.AbstractUploader.initialize`
        """
        logger.debug("Logging into %s as %s" % (
            self._config["fqdn"],
            self._config["login"]
        ))

        conf = self._config['ftp'] if 'ftp' in self._config else {}
        timeout = conf['timeout'] if 'timeout' in conf else 10

        try:
            self._ftp = ftplib.FTP(
                self._config["fqdn"],
                self._config["login"],
                None,
                timeout=timeout
            )
        except Exception as e:
            raise FtpUploadException(
                "Could not establish FTP connection to %s: %s" % (
                    self._config['fqdn'],
                    e
                )
            )

        if self._config["passive_ftp"] or kwargs['passive_mode']:
            logger.debug("Enable PASV mode")
            self._ftp.set_pasv(True)
        if self._config["incoming"]:
            logger.debug("Change directory to %s" % (
                self._config["incoming"]
            ))
            try:
                self._ftp.cwd(self._config["incoming"])
            except ftplib.error_perm as e:
                raise FtpUploadException(
                    "Could not change directory to %s: %s" % (
                        self._config["incoming"],
                        e
                    )
                )
Beispiel #22
0
def invoke_dcut(args):
    profile = dput.profile.load_profile(args.host)

    fqdn = None
    if "fqdn" in profile:
        fqdn = profile["fqdn"]

    if not "allow_dcut" in profile or not profile["allow_dcut"]:
        raise UploadException(
            "Profile %s does not allow command file uploads" "Please set allow_dcut=1 to allow such uploads"
        )

    logger.info("Uploading commands file to %s (incoming: %s)" % (fqdn or profile["name"], profile["incoming"]))

    if args.simulate:
        logger.warning("Not uploading for real - dry run")

    command = args.command
    assert issubclass(type(command), AbstractCommand)
    command.validate(args)

    if args.passive:
        force_passive_ftp_upload(profile)

    upload_path = None
    fh = None
    upload_filename = command.generate_commands_name(profile)
    try:
        if command.cmd_name == "upload":
            logger.debug("Uploading file %s as is to %s" % (args.upload_file, profile["name"]))
            if not os.access(args.upload_file, os.R_OK):
                raise DcutError("Cannot access %s: No such file" % (args.upload_file))
            upload_path = args.upload_file
        else:
            fh = tempfile.NamedTemporaryFile(mode="w+r", delete=False)
            (name, email) = write_header(fh, profile, args)
            command.produce(fh, args)
            fh.flush()
            # print fh.name
            fh.close()

            signing_key = None
            if "default_keyid" in profile:
                signing_key = profile["default_keyid"]
            if args.keyid:
                signing_key = args.keyid

            sign_file(fh.name, signing_key, profile, name, email)
            upload_path = fh.name

        if not args.simulate and not args.output:
            upload_commands_file(upload_path, upload_filename, profile, args)
        elif args.output and not args.simulate:
            if os.access(args.output, os.R_OK):
                logger.error("Not writing %s: File already exists" % (args.output))
                # ... but intentionally do nothing
                # TODO: or raise exception?
                return
            shutil.move(fh.name, args.output)
        elif args.simulate:
            pass
        else:
            # we should *never* come here
            assert False

    finally:
        if fh and os.access(fh.name, os.R_OK):
            os.unlink(fh.name)
Beispiel #23
0
def check_archive_integrity(changes, profile, interface):
    """
    The ``source`` checker is a stock dput checker that checks packages
    intended for upload for source attached.

    Profile key: none

    .. warning::
        This is all magic and pre-beta. Please don't rely on it.

    This simply checks, based on Debian policy rules, if the upload aught to
    have source attached.
    """

    package_version = changes.get("Version")
    package_name = changes.get("Source")
    package_distribution = changes.get("Distribution")
    dsc = parse_dsc_file(filename=changes.get_dsc())
    orig_tarballs = {}
    # technically this will also contain .debian.tar.gz or .diff.gz stuff.
    # We don't care.
    for files in dsc["Files"]:
        orig_tarballs[files['name']] = files['md5sum']

    (previous_version, previous_checksums) = _find_previous_upload(
        package_name,
        package_distribution
    )

    if previous_version:
        (p_ev, p_uv, p_dv) = _parse_version(previous_version)
        (c_ev, c_uv, c_dv) = _parse_version(package_version)

        logger.trace("Parsing versions: (old/new) %s/%s; debian: %s/%s" % (
            p_uv,
            c_uv,
            p_dv,
            c_dv
        ))

        if p_ev == c_ev and p_uv == c_uv:
            logger.trace("Upload %s/%s appears to be a Debian revision only" %
                         (package_name, package_version))
            for checksum in previous_checksums:
                if checksum in orig_tarballs:
                    logger.debug("Checking %s: %s == %s" % (
                        checksum,
                        previous_checksums[checksum],
                        orig_tarballs[checksum]
                    ))
                    if previous_checksums[checksum] != orig_tarballs[checksum]:
                        raise SourceMissingError(
                            "MD5 checksum for a Debian version only "
                            "upload for package %s/%s does not match the "
                            "archive's checksum: %s != %s" % (
                                package_name,
                                package_version,
                                previous_checksums[checksum],
                                orig_tarballs[checksum]
                            )
                        )
                else:
                    logger.debug("Checking %s: new orig stuff? %s" % (
                        checksum,
                        checksum  # XXX: This is wrong?
                    ))
                    raise SourceMissingError(
                        "Package %s/%s introduces new upstream changes: %s" % (
                            package_name,
                            package_version,
                            checksum
                        )
                    )
        else:
            logger.debug("Not checking archive integrity. "
                         "Upload %s/%s is packaging a new upstream version" %
                         (package_name, package_version))

        #TODO: It may be also well possible to find out if the new upload has
        #      a higher number than the previous. But that either needs a
        #      Python version parser, or a call to dpkg --compare-versions

    else:
        logger.debug(
            "Upload appears to be native, or packaging a new upstream version."
        )

    raise Exception("Intentional Barrier")
Beispiel #24
0
    def initialize(self, **kwargs):
        """
        See :meth:`dput.uploader.AbstractUploader.initialize`
        """
        fqdn = self._config['fqdn']
        incoming = self._config['incoming']

        self.sftp_config = {}
        if "sftp" in self._config:
            self.sftp_config = self._config['sftp']

        self.putargs = {'confirm': False}

        if "confirm_upload" in self.sftp_config:
            self.putargs['confirm'] = self.sftp_config['confirm_upload']

        if incoming.startswith('~/'):
            logger.warning("SFTP does not support ~/path, continuing with"
                           "relative directory name instead.")
            incoming = incoming[2:]
        # elif incoming.startswith('~') and not self.host_is_launchpad:
        #    raise SftpUploadException("SFTP doesn't support ~path. "
        #                              "if you need $HOME paths, use SCP.")
        #  XXX: What to do here?? - PRT

        ssh_kwargs = {
            "port": 22,
            "compress": True
        }

        # XXX: Timeout override

        if 'port' in self._config:
            ssh_kwargs['port'] = self._config['port']

        if 'scp_compress' in self._config:
            ssh_kwargs['compress'] = self._config['scp_compress']

        config = paramiko.SSHConfig()
        if os.path.exists('/etc/ssh/ssh_config'):
            config.parse(open('/etc/ssh/ssh_config'))
        if os.path.exists(os.path.expanduser('~/.ssh/config')):
            config.parse(open(os.path.expanduser('~/.ssh/config')))
        o = config.lookup(fqdn)

        user = find_username(self._config)
        if "user" in o:
            user = o['user']

        ssh_kwargs['username'] = user

        if 'identityfile' in o:
            if check_paramiko_version((1, 10)):
                # Starting with paramiko 1.10 identityfile is always a list.
                pkey = [os.path.expanduser(path) for path in o['identityfile']]
            else:
                pkey = os.path.expanduser(o['identityfile'])
            ssh_kwargs['key_filename'] = pkey

        logger.info("Logging into host %s as %s" % (fqdn, user))
        self._sshclient = paramiko.SSHClient()
        if 'globalknownhostsfile' in o:
            for gkhf in o['globalknownhostsfile'].split():
                if os.path.isfile(gkhf):
                    self._sshclient.load_system_host_keys(gkhf)
        else:
            files = [
                "/etc/ssh/ssh_known_hosts",
                "/etc/ssh/ssh_known_hosts2"
            ]
            for fpath in files:
                if os.path.isfile(fpath):
                    self._sshclient.load_system_host_keys(fpath)

        if 'userknownhostsfile' in o:
            for u in o['userknownhostsfile'].split():
                # actually, ssh supports a bit more than ~/,
                # but that would be a task for paramiko...
                ukhf = os.path.expanduser(u)
                if os.path.isfile(ukhf):
                    self._sshclient.load_host_keys(ukhf)
        else:
            for u in ['~/.ssh/known_hosts2', '~/.ssh/known_hosts']:
                ukhf = os.path.expanduser(u)
                if os.path.isfile(ukhf):
                    # Ideally, that should be load_host_keys,
                    # so that the known_hosts file can be written
                    # again. But paramiko can destroy the contents
                    # or parts of it, so no writing by using
                    # load_system_host_keys here, too:
                    self._sshclient.load_system_host_keys(ukhf)
        self._sshclient.set_missing_host_key_policy(AskToAccept(self))
        self._auth(fqdn, ssh_kwargs)
        try:
            self._sftp = self._sshclient.open_sftp()
        except paramiko.SSHException as e:
            raise SftpUploadException(
                "Error opening SFTP channel to %s (perhaps sftp is "
                "disabled there?): %s" % (
                    fqdn,
                    repr(e)
                )
            )

        # logger.debug("Changing directory to %s" % (incoming))
        # self._sftp.chdir(incoming)
        try:
            self._sftp.stat(incoming)
        except IOError as e:
            # launchpad does not support any operations to check if a directory
            # exists. stat will fail with an IOError with errno equal to None.
            if e.errno is None:
                logger.debug(
                    "Failed to stat incoming directory %s on %s. This should "
                    "only happen on launchpad." % (
                        incoming,
                        fqdn
                    )
                )
            else:
                raise SftpUploadException(
                    "Failed to stat incoming directory %s on %s: %s" % (
                        incoming,
                        fqdn,
                        e.strerror
                    )
            )
        except paramiko.SSHException as e:
            raise SftpUploadException("SFTP error uploading to %s: %s" % (
                fqdn,
                repr(e)
            ))
        self.incoming = incoming
Beispiel #25
0
def load_config(config_class, config_name,
                default=None, configs=None, config_cleanup=True):
    """
    Load any dput configuration given a ``config_class`` (such as
    ``hooks``), and a ``config_name`` (such as
    ``lintian`` or ``tweet``).

    Optional kwargs:

        ``default`` is a default to return, in case the config file
        isn't found. If this isn't provided, this function will
        raise a :class:`dput.exceptions.NoSuchConfigError`.

        ``configs`` is a list of config files to check. When this
        isn't provided, we check dput.core.CONFIG_LOCATIONS.
    """

    logger.debug("Loading configuration: %s %s" % (
        config_class,
        config_name
    ))
    roots = []
    ret = {}
    found = False
    template_path = "%s/%s/%s.json"
    locations = configs or dput.core.CONFIG_LOCATIONS
    for config in locations:
        logger.trace("Checking for configuration: %s" % (config))
        path = template_path % (
            config,
            config_class,
            config_name
        )
        logger.trace("Checking - %s" % (path))
        try:
            if os.path.exists(path):
                found = True
                roots.append(path)
                ret.update(json.load(open(path, 'r')))
        except ValueError as e:
            raise DputConfigurationError("syntax error in %s: %s" % (
                path, e
            ))

    if not found:
        if default is not None:
            return default

        raise NoSuchConfigError("No such config: %s/%s" % (
            config_class,
            config_name
        ))

    if 'meta' in ret and (
        config_class != 'metas' or
        ret['meta'] != config_name
    ):
        metainfo = load_config(
            "metas",
            ret['meta'],
            default={}
        )  # configs=configs)
        # Erm, is this right? For some reason, I don't think it is. Meta
        # handling is a hemorrhoid in my ass. F**k it, it works. Ship it.
        #   -- PRT
        for key in metainfo:
            if not key in ret:
                ret[key] = metainfo[key]
            else:
                logger.trace("Ignoring key %s for %s (%s)" % (
                    key,
                    ret['meta'],
                    metainfo[key]
                ))

    obj = ret
    if config_cleanup:
        obj = _config_cleanup(ret)

    if obj != {}:
        return obj

    if default is not None:
        return default

    logger.debug("Failed to load configuration %s" % (config_name))

    nsce = NoSuchConfigError("No such configuration: '%s' in class '%s'" % (
        config_name,
        config_class
    ))

    nsce.config_class = config_class
    nsce.config_name = config_name
    nsce.checked = dput.core.CONFIG_LOCATIONS
    raise nsce