Exemplo n.º 1
0
def target(cls, minion_instance, opts, data, connected):
    """
    Handle targeting of the minion.

    Calling _thread_multi_return or _thread_return
    depending on a single or multiple commands.
    """
    log.debug(
        "Deltaproxy minion_instance %s(ID: %s). Target: %s",
        minion_instance,
        minion_instance.opts["id"],
        opts["id"],
    )

    if not hasattr(minion_instance, "serial"):
        minion_instance.serial = salt.payload.Serial(opts)

    if not hasattr(minion_instance, "proc_dir"):
        uid = salt.utils.user.get_uid(user=opts.get("user", None))
        minion_instance.proc_dir = salt.minion.get_proc_dir(opts["cachedir"],
                                                            uid=uid)

    with tornado.stack_context.StackContext(minion_instance.ctx):
        if isinstance(data["fun"], tuple) or isinstance(data["fun"], list):
            ProxyMinion._thread_multi_return(minion_instance, opts, data)
        else:
            ProxyMinion._thread_return(minion_instance, opts, data)
Exemplo n.º 2
0
Arquivo: proxy.py Projeto: jodok/salt
def target(cls, minion_instance, opts, data, connected):

    if not minion_instance:
        minion_instance = cls(opts)
        minion_instance.connected = connected
        if not hasattr(minion_instance, 'functions'):
            # Need to load the modules so they get all the dunder variables
            functions, returners, function_errors, executors = (
                minion_instance._load_modules(grains=opts['grains'])
            )
            minion_instance.functions = functions
            minion_instance.returners = returners
            minion_instance.function_errors = function_errors
            minion_instance.executors = executors

            # Pull in the utils
            minion_instance.utils = salt.loader.utils(minion_instance.opts)

            # Then load the proxy module
            minion_instance.proxy = salt.loader.proxy(minion_instance.opts, utils=minion_instance.utils)

            # And re-load the modules so the __proxy__ variable gets injected
            functions, returners, function_errors, executors = (
                minion_instance._load_modules(grains=opts['grains'])
            )
            minion_instance.functions = functions
            minion_instance.returners = returners
            minion_instance.function_errors = function_errors
            minion_instance.executors = executors

            minion_instance.functions.pack['__proxy__'] = minion_instance.proxy
            minion_instance.proxy.pack['__salt__'] = minion_instance.functions
            minion_instance.proxy.pack['__ret__'] = minion_instance.returners
            minion_instance.proxy.pack['__pillar__'] = minion_instance.opts['pillar']

            # Reload utils as well (chicken and egg, __utils__ needs __proxy__ and __proxy__ needs __utils__
            minion_instance.utils = salt.loader.utils(minion_instance.opts, proxy=minion_instance.proxy)
            minion_instance.proxy.pack['__utils__'] = minion_instance.utils

            # Reload all modules so all dunder variables are injected
            minion_instance.proxy.reload_modules()

            fq_proxyname = opts['proxy']['proxytype']

            minion_instance.module_executors = minion_instance.proxy.get('{0}.module_executors'.format(fq_proxyname), lambda: [])()

            proxy_init_fn = minion_instance.proxy[fq_proxyname + '.init']
            proxy_init_fn(opts)
        if not hasattr(minion_instance, 'serial'):
            minion_instance.serial = salt.payload.Serial(opts)
        if not hasattr(minion_instance, 'proc_dir'):
            uid = salt.utils.user.get_uid(user=opts.get('user', None))
            minion_instance.proc_dir = (
                salt.minion.get_proc_dir(opts['cachedir'], uid=uid)
            )

    with tornado.stack_context.StackContext(minion_instance.ctx):
        if isinstance(data['fun'], tuple) or isinstance(data['fun'], list):
            ProxyMinion._thread_multi_return(minion_instance, opts, data)
        else:
            ProxyMinion._thread_return(minion_instance, opts, data)
Exemplo n.º 3
0
def target(cls, minion_instance, opts, data, connected):
    """
    Handle targeting of the minion.

    Calling _thread_multi_return or _thread_return
    depending on a single or multiple commands.
    """
    if not minion_instance:
        minion_instance = cls(opts)
        minion_instance.connected = connected
        if not hasattr(minion_instance, "functions"):
            # Need to load the modules so they get all the dunder variables
            (
                functions,
                returners,
                function_errors,
                executors,
            ) = minion_instance._load_modules(grains=opts["grains"])
            minion_instance.functions = functions
            minion_instance.returners = returners
            minion_instance.function_errors = function_errors
            minion_instance.executors = executors

            # Pull in the utils
            minion_instance.utils = salt.loader.utils(minion_instance.opts)

            # Then load the proxy module
            minion_instance.proxy = salt.loader.proxy(
                minion_instance.opts, utils=minion_instance.utils)

            # And re-load the modules so the __proxy__ variable gets injected
            (
                functions,
                returners,
                function_errors,
                executors,
            ) = minion_instance._load_modules(grains=opts["grains"])
            minion_instance.functions = functions
            minion_instance.returners = returners
            minion_instance.function_errors = function_errors
            minion_instance.executors = executors

            minion_instance.functions.pack["__proxy__"] = minion_instance.proxy
            minion_instance.proxy.pack["__salt__"] = minion_instance.functions
            minion_instance.proxy.pack["__ret__"] = minion_instance.returners
            minion_instance.proxy.pack["__pillar__"] = minion_instance.opts[
                "pillar"]

            # Reload utils as well (chicken and egg, __utils__ needs __proxy__ and __proxy__ needs __utils__
            minion_instance.utils = salt.loader.utils(
                minion_instance.opts, proxy=minion_instance.proxy)
            minion_instance.proxy.pack["__utils__"] = minion_instance.utils

            # Reload all modules so all dunder variables are injected
            minion_instance.proxy.reload_modules()

            fq_proxyname = opts["proxy"]["proxytype"]

            minion_instance.module_executors = minion_instance.proxy.get(
                "{}.module_executors".format(fq_proxyname), lambda: [])()

            proxy_init_fn = minion_instance.proxy[fq_proxyname + ".init"]
            proxy_init_fn(opts)
        if not hasattr(minion_instance, "proc_dir"):
            uid = salt.utils.user.get_uid(user=opts.get("user", None))
            minion_instance.proc_dir = salt.minion.get_proc_dir(
                opts["cachedir"], uid=uid)

    with salt.ext.tornado.stack_context.StackContext(minion_instance.ctx):
        if isinstance(data["fun"], tuple) or isinstance(data["fun"], list):
            ProxyMinion._thread_multi_return(minion_instance, opts, data)
        else:
            ProxyMinion._thread_return(minion_instance, opts, data)
Exemplo n.º 4
0
def post_master_init(self, master):
    """
    Function to finish init after a deltaproxy proxy
    minion has finished connecting to a master.

    This is primarily loading modules, pillars, etc. (since they need
    to know which master they connected to)
    """

    if self.connected:
        self.opts["pillar"] = yield salt.pillar.get_async_pillar(
            self.opts,
            self.opts["grains"],
            self.opts["id"],
            saltenv=self.opts["saltenv"],
            pillarenv=self.opts.get("pillarenv"),
        ).compile_pillar()

        # Ensure that the value of master is the one we passed in.
        # if pillar_opts is enabled then master could be overwritten
        # when compile_pillar is run.
        self.opts["master"] = master

        tag = "salt/deltaproxy/start"
        self._fire_master(tag=tag)

    if "proxy" not in self.opts["pillar"] and "proxy" not in self.opts:
        errmsg = (
            "No proxy key found in pillar or opts for id {}. Check your pillar/opts "
            "configuration and contents.  Salt-proxy aborted.".format(
                self.opts["id"]))
        log.error(errmsg)
        self._running = False
        raise SaltSystemExit(code=-1, msg=errmsg)

    if "proxy" not in self.opts:
        self.opts["proxy"] = self.opts["pillar"]["proxy"]

    self.opts = salt.utils.dictupdate.merge(
        self.opts,
        self.opts["pillar"],
        strategy=self.opts.get("proxy_merge_pillar_in_opts_strategy"),
        merge_lists=self.opts.get("proxy_deep_merge_pillar_in_opts", False),
    )

    if self.opts.get("proxy_mines_pillar"):
        # Even when not required, some details such as mine configuration
        # should be merged anyway whenever possible.
        if "mine_interval" in self.opts["pillar"]:
            self.opts["mine_interval"] = self.opts["pillar"]["mine_interval"]
        if "mine_functions" in self.opts["pillar"]:
            general_proxy_mines = self.opts.get("mine_functions", [])
            specific_proxy_mines = self.opts["pillar"]["mine_functions"]
            try:
                self.opts[
                    "mine_functions"] = general_proxy_mines + specific_proxy_mines
            except TypeError as terr:
                log.error(
                    "Unable to merge mine functions from the pillar in the opts, for proxy %s",
                    self.opts["id"],
                )

    fq_proxyname = self.opts["proxy"]["proxytype"]

    # Need to load the modules so they get all the dunder variables
    (
        self.functions,
        self.returners,
        self.function_errors,
        self.executors,
    ) = self._load_modules()

    # we can then sync any proxymodules down from the master
    # we do a sync_all here in case proxy code was installed by
    # SPM or was manually placed in /srv/salt/_modules etc.
    self.functions["saltutil.sync_all"](saltenv=self.opts["saltenv"])

    # Pull in the utils
    self.utils = salt.loader.utils(self.opts)

    # Then load the proxy module
    self.proxy = salt.loader.proxy(self.opts, utils=self.utils)

    # And re-load the modules so the __proxy__ variable gets injected
    (
        self.functions,
        self.returners,
        self.function_errors,
        self.executors,
    ) = self._load_modules()
    self.functions.pack["__proxy__"] = self.proxy
    self.proxy.pack["__salt__"] = self.functions
    self.proxy.pack["__ret__"] = self.returners
    self.proxy.pack["__pillar__"] = self.opts["pillar"]

    # Reload utils as well (chicken and egg, __utils__ needs __proxy__ and __proxy__ needs __utils__
    self.utils = salt.loader.utils(self.opts, proxy=self.proxy)
    self.proxy.pack["__utils__"] = self.utils

    # Reload all modules so all dunder variables are injected
    self.proxy.reload_modules()

    # Start engines here instead of in the Minion superclass __init__
    # This is because we need to inject the __proxy__ variable but
    # it is not setup until now.
    self.io_loop.spawn_callback(salt.engines.start_engines,
                                self.opts,
                                self.process_manager,
                                proxy=self.proxy)

    proxy_init_func_name = "{}.init".format(fq_proxyname)
    proxy_shutdown_func_name = "{}.shutdown".format(fq_proxyname)
    if (proxy_init_func_name not in self.proxy
            or proxy_shutdown_func_name not in self.proxy):
        errmsg = (
            "Proxymodule {} is missing an init() or a shutdown() or both. "
            "Check your proxymodule.  Salt-proxy aborted.".format(fq_proxyname)
        )
        log.error(errmsg)
        self._running = False
        raise SaltSystemExit(code=-1, msg=errmsg)

    self.module_executors = self.proxy.get(
        "{}.module_executors".format(fq_proxyname), lambda: [])()
    proxy_init_fn = self.proxy[proxy_init_func_name]
    proxy_init_fn(self.opts)

    self.opts["grains"] = salt.loader.grains(self.opts, proxy=self.proxy)

    self.mod_opts = self._prep_mod_opts()
    self.matchers = salt.loader.matchers(self.opts)
    self.beacons = salt.beacons.Beacon(self.opts, self.functions)
    uid = salt.utils.user.get_uid(user=self.opts.get("user", None))
    self.proc_dir = salt.minion.get_proc_dir(self.opts["cachedir"], uid=uid)

    if self.connected and self.opts["pillar"]:
        # The pillar has changed due to the connection to the master.
        # Reload the functions so that they can use the new pillar data.
        (
            self.functions,
            self.returners,
            self.function_errors,
            self.executors,
        ) = self._load_modules()
        if hasattr(self, "schedule"):
            self.schedule.functions = self.functions
            self.schedule.returners = self.returners

    if not hasattr(self, "schedule"):
        self.schedule = salt.utils.schedule.Schedule(
            self.opts,
            self.functions,
            self.returners,
            cleanup=[salt.minion.master_event(type="alive")],
            proxy=self.proxy,
            _subprocess_list=self.subprocess_list,
        )

    # add default scheduling jobs to the minions scheduler
    if self.opts["mine_enabled"] and "mine.update" in self.functions:
        self.schedule.add_job(
            {
                "__mine_interval": {
                    "function": "mine.update",
                    "minutes": self.opts["mine_interval"],
                    "jid_include": True,
                    "maxrunning": 2,
                    "run_on_start": True,
                    "return_job": self.opts.get("mine_return_job", False),
                }
            },
            persist=True,
        )
        log.info("Added mine.update to scheduler")
    else:
        self.schedule.delete_job("__mine_interval", persist=True)

    # add master_alive job if enabled
    if self.opts["transport"] != "tcp" and self.opts[
            "master_alive_interval"] > 0:
        self.schedule.add_job(
            {
                salt.minion.master_event(type="alive",
                                         master=self.opts["master"]): {
                    "function": "status.master",
                    "seconds": self.opts["master_alive_interval"],
                    "jid_include": True,
                    "maxrunning": 1,
                    "return_job": False,
                    "kwargs": {
                        "master": self.opts["master"],
                        "connected": True
                    },
                }
            },
            persist=True,
        )
        if (self.opts["master_failback"] and "master_list" in self.opts
                and self.opts["master"] != self.opts["master_list"][0]):
            self.schedule.add_job(
                {
                    salt.minion.master_event(type="failback"): {
                        "function": "status.ping_master",
                        "seconds": self.opts["master_failback_interval"],
                        "jid_include": True,
                        "maxrunning": 1,
                        "return_job": False,
                        "kwargs": {
                            "master": self.opts["master_list"][0]
                        },
                    }
                },
                persist=True,
            )
        else:
            self.schedule.delete_job(salt.minion.master_event(type="failback"),
                                     persist=True)
    else:
        self.schedule.delete_job(
            salt.minion.master_event(type="alive", master=self.opts["master"]),
            persist=True,
        )
        self.schedule.delete_job(salt.minion.master_event(type="failback"),
                                 persist=True)

    # proxy keepalive
    proxy_alive_fn = fq_proxyname + ".alive"
    if (proxy_alive_fn in self.proxy
            and "status.proxy_reconnect" in self.functions
            and self.opts.get("proxy_keep_alive", True)):
        # if `proxy_keep_alive` is either not specified, either set to False does not retry reconnecting
        self.schedule.add_job(
            {
                "__proxy_keepalive": {
                    "function": "status.proxy_reconnect",
                    "minutes":
                    self.opts.get("proxy_keep_alive_interval",
                                  1),  # by default, check once per minute
                    "jid_include": True,
                    "maxrunning": 1,
                    "return_job": False,
                    "kwargs": {
                        "proxy_name": fq_proxyname
                    },
                }
            },
            persist=True,
        )
        self.schedule.enable_schedule()
    else:
        self.schedule.delete_job("__proxy_keepalive", persist=True)

    #  Sync the grains here so the proxy can communicate them to the master
    self.functions["saltutil.sync_grains"](saltenv="base")
    self.grains_cache = self.opts["grains"]
    # Now setup the deltaproxies
    self.deltaproxy = {}
    self.deltaproxy_opts = {}
    self.deltaproxy_objs = {}
    self.proxy_grains = {}
    self.proxy_pillar = {}
    self.proxy_context = {}
    self.add_periodic_callback("cleanup", self.cleanup_subprocesses)
    for _id in self.opts["proxy"].get("ids", []):
        control_id = self.opts["id"]
        proxyopts = self.opts.copy()
        proxyopts["id"] = _id

        proxyopts = salt.config.proxy_config(self.opts["conf_file"],
                                             defaults=proxyopts,
                                             minion_id=_id)
        proxyopts["id"] = proxyopts["proxyid"] = _id

        proxyopts["subproxy"] = True

        self.proxy_context[_id] = {"proxy_id": _id}

        # We need grains first to be able to load pillar, which is where we keep the proxy
        # configurations
        self.proxy_grains[_id] = salt.loader.grains(
            proxyopts, proxy=self.proxy, context=self.proxy_context[_id])
        self.proxy_pillar[_id] = yield salt.pillar.get_async_pillar(
            proxyopts,
            self.proxy_grains[_id],
            _id,
            saltenv=proxyopts["saltenv"],
            pillarenv=proxyopts.get("pillarenv"),
        ).compile_pillar()

        proxyopts["proxy"] = self.proxy_pillar[_id].get("proxy", {})
        if not proxyopts["proxy"]:
            log.warning(
                "Pillar data for proxy minion %s could not be loaded, skipping.",
                _id)
            continue

        # Remove ids
        proxyopts["proxy"].pop("ids", None)

        proxyopts["pillar"] = self.proxy_pillar[_id]
        proxyopts["grains"] = self.proxy_grains[_id]

        proxyopts["hash_id"] = self.opts["id"]

        _proxy_minion = ProxyMinion(proxyopts)
        _proxy_minion.proc_dir = salt.minion.get_proc_dir(
            proxyopts["cachedir"], uid=uid)

        _proxy_minion.proxy = salt.loader.proxy(
            proxyopts, utils=self.utils, context=self.proxy_context[_id])
        _proxy_minion.subprocess_list = self.subprocess_list

        # And load the modules
        (
            _proxy_minion.functions,
            _proxy_minion.returners,
            _proxy_minion.function_errors,
            _proxy_minion.executors,
        ) = _proxy_minion._load_modules(opts=proxyopts,
                                        grains=proxyopts["grains"],
                                        context=self.proxy_context[_id])

        # we can then sync any proxymodules down from the master
        # we do a sync_all here in case proxy code was installed by
        # SPM or was manually placed in /srv/salt/_modules etc.
        _proxy_minion.functions["saltutil.sync_all"](
            saltenv=self.opts["saltenv"])

        # And re-load the modules so the __proxy__ variable gets injected
        (
            _proxy_minion.functions,
            _proxy_minion.returners,
            _proxy_minion.function_errors,
            _proxy_minion.executors,
        ) = _proxy_minion._load_modules(opts=proxyopts,
                                        grains=proxyopts["grains"],
                                        context=self.proxy_context[_id])

        _proxy_minion.functions.pack["__proxy__"] = _proxy_minion.proxy
        _proxy_minion.proxy.pack["__salt__"] = _proxy_minion.functions
        _proxy_minion.proxy.pack["__ret__"] = _proxy_minion.returners
        _proxy_minion.proxy.pack["__pillar__"] = proxyopts["pillar"]
        _proxy_minion.proxy.pack["__grains__"] = proxyopts["grains"]

        # Reload utils as well (chicken and egg, __utils__ needs __proxy__ and __proxy__ needs __utils__
        _proxy_minion.proxy.utils = salt.loader.utils(
            proxyopts,
            proxy=_proxy_minion.proxy,
            context=self.proxy_context[_id])

        _proxy_minion.proxy.pack["__utils__"] = _proxy_minion.proxy.utils

        # Reload all modules so all dunder variables are injected
        _proxy_minion.proxy.reload_modules()

        _proxy_minion.connected = True

        _fq_proxyname = proxyopts["proxy"]["proxytype"]

        proxy_init_fn = _proxy_minion.proxy[_fq_proxyname + ".init"]
        try:
            proxy_init_fn(proxyopts)
        except Exception as exc:  # pylint: disable=broad-except
            log.error(
                "An exception occured during the initialization of minion %s: %s",
                _id,
                exc,
                exc_info=True,
            )
            continue

        # Reload the grains
        self.proxy_grains[_id] = salt.loader.grains(
            proxyopts,
            proxy=_proxy_minion.proxy,
            context=self.proxy_context[_id])
        proxyopts["grains"] = self.proxy_grains[_id]

        if not hasattr(_proxy_minion, "schedule"):
            _proxy_minion.schedule = salt.utils.schedule.Schedule(
                proxyopts,
                _proxy_minion.functions,
                _proxy_minion.returners,
                cleanup=[salt.minion.master_event(type="alive")],
                proxy=_proxy_minion.proxy,
                new_instance=True,
                _subprocess_list=_proxy_minion.subprocess_list,
            )

        self.deltaproxy_objs[_id] = _proxy_minion
        self.deltaproxy_opts[_id] = copy.deepcopy(proxyopts)

        # proxy keepalive
        _proxy_alive_fn = _fq_proxyname + ".alive"
        if (_proxy_alive_fn in _proxy_minion.proxy and "status.proxy_reconnect"
                in self.deltaproxy_objs[_id].functions
                and proxyopts.get("proxy_keep_alive", True)):
            # if `proxy_keep_alive` is either not specified, either set to False does not retry reconnecting
            _proxy_minion.schedule.add_job(
                {
                    "__proxy_keepalive": {
                        "function": "status.proxy_reconnect",
                        "minutes":
                        proxyopts.get("proxy_keep_alive_interval",
                                      1),  # by default, check once per minute
                        "jid_include": True,
                        "maxrunning": 1,
                        "return_job": False,
                        "kwargs": {
                            "proxy_name": _fq_proxyname
                        },
                    }
                },
                persist=True,
            )
            _proxy_minion.schedule.enable_schedule()
        else:
            _proxy_minion.schedule.delete_job("__proxy_keepalive",
                                              persist=True)

    self.ready = True