def process_role_internal(self, host, policy, role, local=True, sender=None): if not local: from opsmop.callbacks.event_stream import EventStreamStreamCallbacks from opsmop.callbacks.common import CommonCallbacks Context.set_callbacks([ EventStreamCallbacks(sender=sender), CommonCallbacks() ]) role.pre() # set up the variable scope - this is done later by walk_handlers for lower-level objects in the tree policy.attach_child_scope_for(role) # tell the callbacks we are in validate mode - this may alter or quiet their output Callbacks.on_validate() # always validate the role in every mode (VALIDATE, CHECK ,or APPLY) self.validate_role(role) # skip the role if we need to if not role.conditions_true(): Callbacks.on_skipped(role) return # process the tree for real for non-validate modes if not Context.is_validate(): self.execute_role_resources(host, role) self.execute_role_handlers(host, role) # run any user hooks role.post()
def execute_resource(self, host, resource, handlers=False): """ This handles the plan/apply intercharge for a given resource in the resource tree. It is called recursively via walk_children to run against all resources. """ assert host is not None # we only care about processing leaf node objects if issubclass(type(resource), Collection): return # if in handler mode we do not process the handler unless it was signaled if handlers and not Context().has_seen_any_signal( host, resource.all_handles()): Callbacks().on_skipped(resource, is_handler=handlers) return # tell the callbacks we are about to process a resource # they may use this to print information about the resource Callbacks().on_resource(resource, handlers) # plan always, apply() only if not in check mode, else assume # the plan was executed. provider = self.do_plan(resource) assert provider is not None if Context().is_apply(): self.do_apply(host, provider, handlers) else: # is_check self.do_simulate(host, provider) # if anything has changed, let the callbacks know about it self.signal_changes(host=host, provider=provider, resource=resource)
def process_role_for_host(self, host, policy, role, router=None): Context.set_host(host) if host.name == "127.0.0.1": self.process_role_internal(host, policy, role, local=True) else: context = self.get_remote_context(host, role) print("DEBUG: SSH CONTEXT =%s" % context ) final = None remote = router.ssh(hostname=context['hostname'], check_host_keys=context['check_host_keys'], username=context['username'], password=context['password']) if context['sudo']: sudo = router.sudo(username=context['sudo_username'], password=context['sudo_password'], via=remote) final = sudo else: final = remote receiver = mitogen.core.Receiver(router) global MITOGEN_SELECT MITOGEN_SELECT.add(receiver) sender = receiver.to_sender() def remote_fn(sender): return self.process_role_remote(host, policy, role, local=False, sender=sender) call_recv = final.call_async(remote_fn, sender) MITOGEN_SELECT.add(call_recv)
def on_complete(self, host, evt): try: Context().role().after_contact(host) except Exception as e: print(str(e)) Context().record_host_failure(host, e) self.info(host, 'COMPLETE', sep='=', foreground=Fore.GREEN)
def __init__(self, policies, local_host=None, tags=None, push=False, extra_vars=None, limit_groups=None, limit_hosts=None, relative_root=None): """ The Executor runs a list of policies in either CHECK, APPLY, or VALIDATE modes """ assert type(policies) == list self._policies = policies self._tags = tags self._push = push self._limit_groups = limit_groups self._limit_hosts = limit_hosts if local_host is None: local_host = Host("127.0.0.1") self._local_host = local_host Context().set_extra_vars(extra_vars) Context().set_relative_root(relative_root) self.connection_manager = None
def run_all_policies(self, mode=None): """ Runs all policies in the specified mode """ Context.set_mode(mode) for policy in self._policies: self.run_policy(policy=policy)
def scope(self): """ Gets the scope object for this resource, which is assigned while executing the resource. Without a scope object, it is impossible to compute variables for use in template and conditional evaluation, hence the assert. """ if self._scope is None: role = Context().role() role.attach_child_scope_for(self) return self._scope
def process_summary(self, hosts): failures = Context().host_failures() failed_hosts = [f for f in failures.keys()] changed_hosts = [h for h in hosts if h.actions()] if len(changed_hosts): ReplayCallbacks().on_host_changed_list(hosts) if len(failed_hosts): ReplayCallbacks().on_terminate_with_host_list(failed_hosts) raise OpsMopStop()
def template_context(self): """ Get the full Jinja2 context available for templating at this resource. This includes facts + variables """ context = Context() results = self.get_variables() results.update(context.globals()) results.update(self.fact_context()) results.update(context.extra_vars()) return results
def signal_changes(self, host=None, provider=None, resource=None): """ If any events were signaled, add them to the context here. """ assert host is not None if not provider.has_changed(): return if resource.signals: # record the list of all events signaled while processing this role Context.add_signal(host, resource.signals) # tell the callbacks that a signal occurred Callbacks.on_signaled(resource, resource.signals)
def same_contents(self, dest, src, remote=False): if not self.exists(dest): return False m = hashlib.sha256() c1 = self.checksum(dest) c2 = None if not remote: c2 = self.checksum(src) else: # FIXME: this is slightly duplicated with provider code if not src.startswith('/'): src = os.path.join(Context().relative_root(), src) c2 = Context().get_checksum(src) return (c1 == c2)
def copy_file(self, src, dest): """ Copy a file in local mode, or download from the fileserver in push mode """ caller = Context().caller() if caller: bio = open(dest, "wb", buffering=0) if not src.startswith('/'): src = os.path.join(Context().relative_root(), src) ok, metadata = mitogen.service.FileService.get(caller, src, bio) if not ok: raise Exception("file transfer failed") else: shutil.copy2(src, dest)
def validate_role(self, role): """ Validates inputs for one role """ def validate(resource): return resource.validate # resources and handlers must be processed seperately # the validate method will raise exceptions when problems are found original_mode = Context.mode() Context.set_mode(VALIDATE) role.walk_children(items=role.get_children('resources'), which='resources', fn=validate, tags=self._tags) role.walk_children(items=role.get_children('handlers'), which='handlers', fn=validate) if original_mode: Context.set_mode(original_mode)
def on_command_result(self, provider, result): if not result.primary and result.fatal: # only process intermediate command results here, if the command result is to be the final # return of a module, let the Executor code handle this so failed_when/ignore_errors can take # effect err = CommandError(provider, "command failed", result) self.record_host_failure(Context().host(), err) raise err
def remote_fn(caller, params, sender): """ This is the remote function used for mitogen calls """ # FIXME: REFACTOR: we have a bit of a growing inconsistency between what is a constructor parameter and what is passed around in Context. # we should change this to have context objects that have more meat, but also get passed around versus acting globally, and smaller # function signatures across the board. import dill from opsmop.core.executor import Executor params = dill.loads(zlib.decompress(params)) host = params['host'] policy = params['policy'] role = params['role'] mode = params['mode'] tags = params['tags'] checksums = params['checksums'] relative_root = params['relative_root'] hostvars = params['hostvars'] extra_vars = params['extra_vars'] Context().set_mode(mode) Context().set_caller(caller) assert relative_root is not None Context().set_checksums(checksums) Context().update_globals(hostvars) policy.roles = Roles(role) Callbacks().set_callbacks([ EventStreamCallbacks(sender=sender), LocalCliCallbacks(), CommonCallbacks() ]) executor = Executor([policy], local_host=host, push=False, tags=params['tags'], extra_vars=extra_vars, relative_root=relative_root) # remove single_role # FIXME: care about mode executor.apply()
def slurp(self, src, remote=False): """ Read a file into memory, use the fileserver if in push mode, otherwise just use the filesystem. """ caller = Context().caller() if caller and remote: bio = io.BytesIO() if not src.startswith('/'): src = os.path.join(Context().relative_root(), src) ok, metadata = mitogen.service.FileService.get(caller, src, bio) data = bio.getvalue().decode('utf-8') bio.close() return data else: fd = open(src) data = fd.read() fd.close() return data
def remotify_role(self, host, policy, role, mode): if self.should_exclude_from_limits(host): return try: if not role.should_contact(host): Callbacks().on_skipped(role) return True else: role.before_contact(host) except Exception as e: print(str(e)) Context().record_host_failure(host, e) return False target_host = self.actual_host(role, host) target_host.reset_actions() import dill conn = self.connect(host, role) receiver = mitogen.core.Receiver(self.router) self.events_select.add(receiver) sender = self.status_recv.to_sender() params = dict(host=target_host, policy=policy, role=role, mode=mode, relative_root=Context().relative_root(), tags=self.tags, checksums=self.checksums, hostvars=host.all_variables(), extra_vars=Context().extra_vars()) params = zlib.compress(dill.dumps(params), level=9) call_recv = conn.call_async(remote_fn, self.myself, params, sender) self.calls_sel.add(call_recv) return True
def on_taken_actions(self, provider, actions_taken): if provider.skip_plan_stage(): return taken = sorted([str(x) for x in provider.actions_taken]) planned = sorted([str(x) for x in provider.actions_planned]) if (taken != planned): err = ProviderError( provider, "actions taken (%s) do not equal planned (%s)" % (taken, planned)) self.record_host_failure(Context().host(), err) raise err
def run_all_policies(self, mode=None): """ Runs all policies in the specified mode """ Context().set_mode(mode) for policy in self._policies: if self._push: self.connection_manager = ConnectionManager( policy, limit_groups=self._limit_groups, limit_hosts=self._limit_hosts) self.run_policy(policy=policy)
def run(self): Callbacks().on_resource(self) provider = self.do_plan() if Context().is_apply(): result = self.do_apply(provider) else: result = self.do_simulate(provider) # copy over results self.changed = result.changed self.data = result.data self.rc = result.rc
def process_local_role(self, policy=None, role=None): host = self._local_host Context().set_host(host) policy.attach_child_scope_for(role) try: role.main() except Exception as e: tb = traceback.format_exc() # process *any* uncaught exceptions through the configured exception handlers # this includes any resources where failed_when / ignore_errors was not used # but also any random python exceptions Callbacks().on_fatal(e, tb)
def should_replace_using_file(self): """ for from_file, should we write the file? """ if not FileTests.exists(self.name): return True if not self.overwrite: return False remote = False if Context().caller(): remote = True same = FileTests.same_contents(self.name, self.from_file, remote=remote) return not same
def event_loop(self): both_sel = mitogen.select.Select([self.status_recv, self.calls_sel], oneshot=False) try: while self.calls_sel: try: msg = both_sel.get(timeout=60.0) except mitogen.core.TimeoutError: print("No update in 60 seconds, something's broke?") raise Exception("boom") host = self.hosts_by_context[msg.src_id] if msg.receiver is self.status_recv: # https://mitogen.readthedocs.io/en/stable/api.html#mitogen.core.Message.receiver # handle a status update try: response = msg.unpickle() except Exception as e: self.replay_callbacks.on_failed_host(host, e) event = response['evt'] cb_func = getattr(self.replay_callbacks, "on_%s" % event, None) if cb_func: cb_func(host, response) else: self.replay_callbacks.on_default(host, response) elif msg.receiver is self.calls_sel: # handle a function call result. try: msg.unpickle() # all done for host except mitogen.core.CallError as e: Context().record_host_failure(host, e) if 'opsmop.core.errors' in str(e): # callbacks should have already eaten it pass else: raise e finally: both_sel.close() self.calls_sel.close() self.status_recv.close() self.pool.stop(join=True)
def template_context(self): """ Get the full Jinja2 context available for templating at this resource. This includes facts + variables """ # this precedence order is just a first pass, but should be what we want # if there are concerns/questions, please ask! Things mentioned first # have the LOWEST priority context = Context() results = dict() # 1. 'globals' includes things like inventory variables in push mode. # It's a bit of a misnomer presently. # FIXME: verify inventory variables hop over when doing local ops in push mode results.update(context.globals()) # 2. any loose variables on the resource itself, this would include role parameters results.update(self.get_variables()) # 3. this puts things like facts into the template namespace results.update(self.fact_context()) # 4. these are regular python variabels inside the method results.update(context.scope_variables()) # 5. these are specified on the command line with --extra-vars results.update(context.extra_vars()) return results
def run_policy(self, policy=None): """ Runs one specific policy in VALIDATE, CHECK, or APPLY mode """ # assign a new top scope to the policy object. policy.init_scope() roles = policy.get_roles() for role in roles.items: Context().set_role(role) if not self._push: self.process_local_role(policy, role) else: self.process_remote_role(policy, role) Callbacks().on_complete(policy)
def process_local_role(self, policy=None, role=None): host = self._local_host Context().set_host(host) role.pre() # set up the variable scope - this is done later by walk_handlers for lower-level objects in the tree policy.attach_child_scope_for(role) # tell the callbacks we are in validate mode - this may alter or quiet their output Callbacks().on_validate() # always validate the role in every mode (VALIDATE, CHECK ,or APPLY) self.validate_role(role) # skip the role if we need to if not role.conditions_true(): Callbacks().on_skipped(role) return # process the tree for real for non-validate modes if not Context().is_validate(): self.execute_role_resources(host, role) self.execute_role_handlers(host, role) # run any user hooks role.post()
def run_all_policies(self, mode): """ Runs all policies in the specified mode """ contexts = [] context = None for policy in self._policies: # the context holds some types of state, such as signalled events # and is cleared between each policy execution context = Context(mode=mode, callbacks=self._callbacks) # actual running of the policy here: self.run_policy(policy=policy, context=context, mode=mode) contexts.append(context) return contexts
def prepare_for_role(self, role): self.file_service = mitogen.service.FileService(self.router) self.pool = mitogen.service.Pool(self.router, services=[self.file_service]) self.events_select = mitogen.select.Select(oneshot=False) self.replay_callbacks = ReplayCallbacks() self.calls_sel = mitogen.select.Select() self.status_recv = mitogen.core.Receiver(self.router) self.myself = self.router.myself() fileserving_paths = role.allow_fileserving_paths() if fileserving_paths is None: fileserving_paths = self.policy.allow_fileserving_paths() for p in fileserving_paths: if p == '.': p = Context().relative_root() self.register_files(p)
def on_do(self, provider, action): if Context.is_apply(): self.i3("do: %s" % action.do)
def on_needs(self, provider, action): if provider.skip_plan_stage(): return if Context.is_check(): self.i3("needs: %s" % action.do)