Beispiel #1
0
def generate_dak_commands_name(profile):
    # for debianqueued: $login-$timestamp.commands
    # for dak: $login-$timestamp.dak-commands
    the_file = "%s-%s.dak-commands" % (get_local_username(), int(time.time()))
    # XXX: override w/ DEBEMAIL (if DEBEMAIL is @debian.org?)
    logger.trace("Commands file will be named %s" % (the_file))
    return the_file
Beispiel #2
0
def get_obj_by_name(cls, name, profile):
    """
    Run a function, defined by ``name``, filed in class ``cls``
    """
    logger.trace("running %s: %s" % (cls, name))
    obj = get_obj(cls, name)
    if obj is None:
        raise DputConfigurationError("No such obj: `%s'" % (
            name
        ))

    interface = 'cli'
    if 'interface' in profile:
        interface = profile['interface']
    logger.trace("Using interface %s" % (interface))
    interface_obj = get_obj('interfaces', interface)
    if interface_obj is None:
        raise DputConfigurationError("No such interface: `%s'" % (
            interface
        ))
    interface = interface_obj()
    interface.initialize()

    try:
        yield (obj, interface)
    finally:
        pass

    interface.shutdown()
Beispiel #3
0
def check_protected_distributions(changes, profile, interface):
    """
    The ``protected distributions`` checker is a stock dput checker that makes
    sure, users intending an upload for a special care archive (
    testing-proposed-updates, stable-security, etc.) did really follow the
    archive policies for that.

    Profile key: none

    """
    # XXX: This check does not contain code names yet. We need a global way
    #      to retrieve and share current code names.
    suite = changes["Distribution"]
    query_user = False
    release_team_suites = ["testing-proposed-updates", "proposed-updates", "stable", "testing"]
    if suite in release_team_suites:
        msg = "Are you sure to upload to %s? Did you coordinate with the " "Release Team before your upload?" % (suite)
        error_msg = "Aborting upload to Release Team managed suite upon " "request"
        query_user = True
    security_team_suites = ["stable-security", "oldstable-security", "testing-security"]
    if suite in security_team_suites:
        msg = "Are you sure to upload to %s? Did you coordinate with the " "Security Team before your upload?" % (suite)
        error_msg = "Aborting upload to Security Team managed suite upon " "request"
        query_user = True

    if query_user:
        logger.trace("Querying the user for input. The upload targets a " "protected distribution")
        if not interface.boolean("Protected Checker", msg, default=BUTTON_NO):
            raise BadDistributionError(error_msg)
        else:
            logger.warning("Uploading with explicit confirmation by the user")
    else:
        logger.trace("Nothing to do for checker protected_distributions")
Beispiel #4
0
    def preload(self, configs):
        """
        See :meth:`dput.config.AbstractConfig.preload`
        """
        parser = configparser.ConfigParser()
        if configs is None:
            configs = dput.core.DPUT_CONFIG_LOCATIONS

        for config in configs:
            if not os.access(config, os.R_OK):
                logger.debug("Skipping file %s: Not accessible" % (
                    config
                ))
                continue
            try:
                logger.trace("Parsing %s" % (config))
                parser.readfp(open(config, 'r'))
            except IOError as e:
                logger.warning("Skipping file %s: %s" % (
                    config,
                    e
                ))
                continue
            except configparser.ParsingError as e:
                raise DputConfigurationError("Error parsing file %s: %s" % (
                    config,
                    e
                ))
        self.parser = parser
        self.configs = configs
        self.defaults = self._translate_strs(self.get_config("DEFAULT"))
        self.parser.remove_section("DEFAULT")
Beispiel #5
0
def sign_file(filename, keyid=None, profile=None, name=None, email=None):
    logger.debug(
        "Signing file %s - signature hints are key: %s, " "name: %s, email: %s" % (filename, keyid, name, email)
    )

    gpg_path = "gpg"
    if keyid:
        identity_hint = keyid
    else:
        # hard to see here, but name and email is  guaranteed to be set in
        # write_header()
        if name:
            identity_hint = name
        if email:
            identity_hint += " <%s>" % (email)

    logger.trace("GPG identity hint: %s" % (identity_hint))

    (gpg_output, gpg_output_stderr, exit_status) = run_command(
        [gpg_path, "--default-key", identity_hint, "--status-fd", "1", "--sign", "--armor", "--clearsign", filename]
    )

    if exit_status == -1:
        raise DcutError("Unknown problem while making cleartext signature")

    if exit_status != 0:
        raise DcutError("Failed to make cleartext signature " "to commands file:\n%s" % (gpg_output_stderr))

    if gpg_output.count("[GNUPG:] SIG_CREATED"):
        pass
    else:
        raise DcutError("Failed to make cleartext signature:\n%s" % (gpg_output_stderr))

    os.unlink(filename)
    shutil.move("%s.asc" % (filename), filename)
Beispiel #6
0
    def boolean(self, title, message, question_type=BUTTON_YES_NO,
                default=None):
        """
        See :meth:`dput.interface.AbstractInterface.boolean`
        """
        super(CLInterface, self).boolean(title, message, question_type)

        choices = ""
        question_len = len(question_type)
        for question in question_type:
            button_name = self.button_to_str(question)
            if question == default:
                button_name = button_name.upper()
            choices += button_name
            question_len -= 1
            if question_len:
                choices += ", "
        user_input = None
        while not user_input:
            user_input = self.question(title, "%s [%s]" % (message, choices))
            user_input = self.str_to_button(user_input, default)
        logger.trace("translated user input '%s'" % (user_input))
        if user_input in (BUTTON_OK, BUTTON_YES):
            return True
        return False
Beispiel #7
0
 def __init__(self, profile):
     self._config = profile
     interface = 'cli'
     if 'interface' in profile:
         interface = profile['interface']
     logger.trace("Using interface %s" % (interface))
     interface_obj = get_obj('interfaces', interface)
     if interface_obj is None:
         raise DputConfigurationError("No such interface: `%s'" % (
             interface
         ))
     self.interface = interface_obj()
     self.interface.initialize()
Beispiel #8
0
    def validate_checksums(self, check_hash="sha1"):
        """
        Validate checksums for a package, using ``check_hack``'s type
        to validate the package.

        Valid ``check_hash`` types:

            * sha1
            * sha256
            * md5
            * md5sum
        """
        logger.debug("validating %s checksums" % (check_hash))

        for filename in self.get_files():
            if check_hash == "sha1":
                hash_type = hashlib.sha1()
                checksums = self.get("Checksums-Sha1")
                field_name = "sha1"
            elif check_hash == "sha256":
                hash_type = hashlib.sha256()
                checksums = self.get("Checksums-Sha256")
                field_name = "sha256"
            elif check_hash == "md5":
                hash_type = hashlib.md5()
                checksums = self.get("Files")
                field_name = "md5sum"

            for changed_files in checksums:
                if changed_files["name"] == os.path.basename(filename):
                    break
            else:
                assert "get_files() returns different files than Files: knows?!"

            with open(filename, "rb") as fc:
                while True:
                    chunk = fc.read(131072)
                    if not chunk:
                        break
                    hash_type.update(chunk)
            fc.close()

            if not hash_type.hexdigest() == changed_files[field_name]:
                raise ChangesFileException(
                    "Checksum mismatch for file %s: %s != %s"
                    % (filename, hash_type.hexdigest(), changed_files[field_name])
                )
            else:
                logger.trace("%s Checksum for file %s matches" % (field_name, filename))
Beispiel #9
0
def load_obj(obj_path):
    """
    Dynamically load an object (class, method, etc) by name (such as
    `dput.core.ClassName`), and return that object to work with. This is
    useful for loading modules on the fly, without them being all loaded at
    once, or even in the same package.

    Call this routine with at least one dot in it -- it attempts to load the
    module (such as dput.core) and use getattr to load the thing - similar to
    how `from` works.
    """
    dput.core.mangle_sys()
    logger.trace("Loading object: %s" % (obj_path))
    module, obj = obj_path.rsplit(".", 1)
    mod = importlib.import_module(module)
    fltr = getattr(mod, obj)
    return fltr
Beispiel #10
0
    def get_config(self, name):
        """
        See :meth:`dput.config.AbstractConfig.get_config`
        """
        kwargs = {
            "default": {}
        }

        configs = self.configs
        if configs is not None:
            kwargs['configs'] = configs

        kwargs['config_cleanup'] = False

        profile = load_config(
            'profiles',
            name,
            **kwargs
        )
        logger.trace("name: %s - %s / %s" % (name, profile, kwargs))
        repls = self.replacements
        for thing in profile:
            val = profile[thing]
            if not isinstance(val, _basestr_type):
                continue
            for repl in repls:
                if repl in val:
                    val = val.replace("%%(%s)s" % (repl), repls[repl])
            profile[thing] = val

        ret = {}
        ret.update(profile)
        ret['name'] = name

        for key in ret:
            val = ret[key]
            if isinstance(val, _basestr_type):
                if "%(" in val and ")s" in val:
                    raise DputConfigurationError(
                        "Half-converted block: %s --> %s" % (
                            key,
                            val
                        )
                    )
        return ret
Beispiel #11
0
def get_obj(cls, checker_method):  # checker_method is a bad name.
    """
    Get an object by plugin def (``checker_method``) in class ``cls`` (such
    as ``hooks``).
    """
    logger.trace("Attempting to resolve %s %s" % (cls, checker_method))
    try:
        config = load_config(cls, checker_method)
        validate_object('plugin', config, "%s/%s" % (cls, checker_method))

        if config is None or config == {}:
            raise NoSuchConfigError("No such config")
    except NoSuchConfigError:
        logger.debug("failed to resolve config %s" % (checker_method))
        return None
    path = config['path']
    logger.trace("loading %s %s" % (cls, path))
    try:
        return load_obj(path)
    except ImportError as e:
        logger.warning("failed to resolve path %s: %s" % (path, e))
        return None
Beispiel #12
0
    def get_config(self, name):
        """
        See :meth:`dput.config.AbstractConfig.get_config`
        """
        logger.trace("Loading entry %s" % (name))
        ret = self.defaults.copy()
        for config in self.configs:
            obj = config.get_config(name)
            logger.trace(obj)
            ret.update(obj)
            ret = _config_cleanup(ret)
            logger.trace('Rewrote to:')
            logger.trace(obj)
        if logger.isEnabledFor(logging.DEBUG):
            logger.debug("Got configuration: %s" % (name))
            for key in ret:
                logger.debug("\t%s: %s" % (key, ret[key]))

        validate_object('config', ret, 'profiles/%s' % (name))
        return ret
Beispiel #13
0
def check_archive_integrity(changes, profile, interface):
    """
    The ``source`` checker is a stock dput checker that checks packages
    intended for upload for source attached.

    Profile key: none

    .. warning::
        This is all magic and pre-beta. Please don't rely on it.

    This simply checks, based on Debian policy rules, if the upload aught to
    have source attached.
    """

    package_version = changes.get("Version")
    package_name = changes.get("Source")
    package_distribution = changes.get("Distribution")
    dsc = parse_dsc_file(filename=changes.get_dsc())
    orig_tarballs = {}
    # technically this will also contain .debian.tar.gz or .diff.gz stuff.
    # We don't care.
    for files in dsc["Files"]:
        orig_tarballs[files['name']] = files['md5sum']

    (previous_version, previous_checksums) = _find_previous_upload(
        package_name,
        package_distribution
    )

    if previous_version:
        (p_ev, p_uv, p_dv) = _parse_version(previous_version)
        (c_ev, c_uv, c_dv) = _parse_version(package_version)

        logger.trace("Parsing versions: (old/new) %s/%s; debian: %s/%s" % (
            p_uv,
            c_uv,
            p_dv,
            c_dv
        ))

        if p_ev == c_ev and p_uv == c_uv:
            logger.trace("Upload %s/%s appears to be a Debian revision only" %
                         (package_name, package_version))
            for checksum in previous_checksums:
                if checksum in orig_tarballs:
                    logger.debug("Checking %s: %s == %s" % (
                        checksum,
                        previous_checksums[checksum],
                        orig_tarballs[checksum]
                    ))
                    if previous_checksums[checksum] != orig_tarballs[checksum]:
                        raise SourceMissingError(
                            "MD5 checksum for a Debian version only "
                            "upload for package %s/%s does not match the "
                            "archive's checksum: %s != %s" % (
                                package_name,
                                package_version,
                                previous_checksums[checksum],
                                orig_tarballs[checksum]
                            )
                        )
                else:
                    logger.debug("Checking %s: new orig stuff? %s" % (
                        checksum,
                        checksum  # XXX: This is wrong?
                    ))
                    raise SourceMissingError(
                        "Package %s/%s introduces new upstream changes: %s" % (
                            package_name,
                            package_version,
                            checksum
                        )
                    )
        else:
            logger.debug("Not checking archive integrity. "
                         "Upload %s/%s is packaging a new upstream version" %
                         (package_name, package_version))

        #TODO: It may be also well possible to find out if the new upload has
        #      a higher number than the previous. But that either needs a
        #      Python version parser, or a call to dpkg --compare-versions

    else:
        logger.debug(
            "Upload appears to be native, or packaging a new upstream version."
        )

    raise Exception("Intentional Barrier")
Beispiel #14
0
def load_config(config_class, config_name,
                default=None, configs=None, config_cleanup=True):
    """
    Load any dput configuration given a ``config_class`` (such as
    ``hooks``), and a ``config_name`` (such as
    ``lintian`` or ``tweet``).

    Optional kwargs:

        ``default`` is a default to return, in case the config file
        isn't found. If this isn't provided, this function will
        raise a :class:`dput.exceptions.NoSuchConfigError`.

        ``configs`` is a list of config files to check. When this
        isn't provided, we check dput.core.CONFIG_LOCATIONS.
    """

    logger.debug("Loading configuration: %s %s" % (
        config_class,
        config_name
    ))
    roots = []
    ret = {}
    found = False
    template_path = "%s/%s/%s.json"
    locations = configs or dput.core.CONFIG_LOCATIONS
    for config in locations:
        logger.trace("Checking for configuration: %s" % (config))
        path = template_path % (
            config,
            config_class,
            config_name
        )
        logger.trace("Checking - %s" % (path))
        try:
            if os.path.exists(path):
                found = True
                roots.append(path)
                ret.update(json.load(open(path, 'r')))
        except ValueError as e:
            raise DputConfigurationError("syntax error in %s: %s" % (
                path, e
            ))

    if not found:
        if default is not None:
            return default

        raise NoSuchConfigError("No such config: %s/%s" % (
            config_class,
            config_name
        ))

    if 'meta' in ret and (
        config_class != 'metas' or
        ret['meta'] != config_name
    ):
        metainfo = load_config(
            "metas",
            ret['meta'],
            default={}
        )  # configs=configs)
        # Erm, is this right? For some reason, I don't think it is. Meta
        # handling is a hemorrhoid in my ass. F**k it, it works. Ship it.
        #   -- PRT
        for key in metainfo:
            if not key in ret:
                ret[key] = metainfo[key]
            else:
                logger.trace("Ignoring key %s for %s (%s)" % (
                    key,
                    ret['meta'],
                    metainfo[key]
                ))

    obj = ret
    if config_cleanup:
        obj = _config_cleanup(ret)

    if obj != {}:
        return obj

    if default is not None:
        return default

    logger.debug("Failed to load configuration %s" % (config_name))

    nsce = NoSuchConfigError("No such configuration: '%s' in class '%s'" % (
        config_name,
        config_class
    ))

    nsce.config_class = config_class
    nsce.config_name = config_name
    nsce.checked = dput.core.CONFIG_LOCATIONS
    raise nsce
Beispiel #15
0
def generate_debianqueued_commands_name(profile):
    # for debianqueued: $login-$timestamp.commands
    # for dak: $login-$timestamp.dak-commands
    the_file = "%s-%s.commands" % (os.getlogin(), int(time.time()))
    logger.trace("Commands file will be named %s" % (the_file))
    return the_file
Beispiel #16
0
def invoke_dput(changes, args):
    """
    .. warning::
       This method may change names. Please use it via :func:`dput.upload`.
       also, please don't depend on args, that's likely to change shortly.

    Given a changes file ``changes``, and arguments to dput ``args``,
    upload a package to the archive that makes sense.

    """
    profile = dput.profile.load_profile(args.host)
    check_modules(profile)

    fqdn = None
    if "fqdn" in profile:
        fqdn = profile['fqdn']
    else:
        fqdn = profile['name']

    logfile = determine_logfile(changes, profile, args)
    tmp_logfile = tempfile.NamedTemporaryFile()
    if should_write_logfile(args):
        full_upload_log = profile["full_upload_log"]
        if args.full_upload_log:
            full_upload_log = args.full_upload_log
        _write_upload_log(tmp_logfile.name, full_upload_log)

    if args.delayed:
        make_delayed_upload(profile, args.delayed)

    if args.simulate:
        logger.warning("Not uploading for real - dry run")

    if args.passive:
        force_passive_ftp_upload(profile)

    logger.info("Uploading %s using %s to %s (host: %s; directory: %s)" % (
        changes.get_package_name(),
        profile['method'],
        profile['name'],
        fqdn,
        profile['incoming']
    ))

    if 'hooks' in profile:
        run_pre_hooks(changes, profile)
    else:
        logger.trace(profile)
        logger.warning("No hooks defined in the profile. "
                       "Not checking upload.")

    # check only is a special case of -s
    if args.check_only:
        args.simulate = 1

    with uploader(profile['method'], profile,
                  simulate=args.simulate) as obj:

        if args.check_only:
            logger.info("Package %s passes all checks" % (
                changes.get_package_name()
            ))
            return

        if args.no_upload_log:
            logger.info("Not writing upload log upon request")

        files = changes.get_files() + [changes.get_changes_file()]
        for path in files:
            logger.info("Uploading %s%s" % (
                os.path.basename(path),
                " (simulation)" if args.simulate else ""
            ))

            if not args.simulate:
                obj.upload_file(path)

        if args.simulate:
            return

        if 'hooks' in profile:
            run_post_hooks(changes, profile)
        else:
            logger.trace(profile)
            logger.warning("No hooks defined in the profile. "
                           "Not post-processing upload.")
    if should_write_logfile(args):
        tmp_logfile.flush()
        shutil.copy(tmp_logfile.name, logfile)
        #print(tmp_logfile.name)
        tmp_logfile.close()
Beispiel #17
0
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""
Implementation of the interface to run a hook.
"""

from dput.util import obj_docs, run_func_by_name, load_config, validate_object
from dput.core import logger
try:
    import clojure.main  # NOQA
except ImportError:
    logger.trace("No clojure support :(")


def hook_docs(hook):
    return obj_docs('hooks', hook)


def get_hooks(profile):
    for hook in profile['hooks']:
        conf = load_config('hooks', hook)
        validate_object('plugin', conf, 'hooks/%s' % (hook))
        yield (hook, conf)


def run_pre_hooks(changes, profile):
    for name, hook in get_hooks(profile):