def as_ecs_autoscaling(docker_service_autoscaling): if not docker_service_autoscaling: return _app_scaling_defaults autoscaling_policy = combine(_app_scaling_defaults, docker_service_autoscaling) service_metric = autoscaling_policy.get('service_metric') if service_metric not in _ecs_service_metrics: raise AnsibleFilterError(f"scaling policy must have a service_metric in set {_ecs_service_metrics}") target_value = str(autoscaling_policy.get('target_value', '')) if not target_value.isdigit(): raise AnsibleFilterError(f"scaling policy must have a target_value as a positive integer") target_value = int(target_value) autoscaling_policy['target_value'] = target_value autoscaling_policy['enabled'] = True if target_value < 10: raise AnsibleFilterError(f"scaling policy target_value should be >= 10") if service_metric == 'ALBRequestCountPerTarget': return autoscaling_policy if target_value > 90: raise AnsibleFilterError(f"Utilization scaling policy target_value should be between 10 and 90") return autoscaling_policy
def as_ecs_service(docker_service): ecs_service = [combine(_container_defaults, container) for container in docker_service] if sum(1 for _ in ecs_service if _["expose"]) != 1: raise AnsibleFilterError("you must have exactly one exposed service, set 'expose: false' on others") port_counts = Counter(container['port'] for container in ecs_service) if port_counts.get(9000, 0) != 1: raise AnsibleFilterError("ecs_service should expose exactly one container with hostPort 9000") for port, count in port_counts.items(): if count > 1: raise AnsibleFilterError(f"ecs_service should not have multiple containers on the same port ({port})") if sum(1 for _ in ecs_service if _.get('name') is None): raise AnsibleFilterError("all ecs containers should hava name") name_counts = Counter(container['name'] for container in ecs_service) for name, count in name_counts.items(): if count > 1: raise AnsibleFilterError(f"ecs_service should not have multiple containers with the same name ({name})") return ecs_service
def table_join(item, lookup_table, key_name='id', collection=False): """ Searches a lookup table and merges the dicts. :param item: the item to transform :param lookup_table: the lookup table to search for the key in :param key_name: the key to use from the item :param collection: whether to cojoined elements are collections :return: the transformed item """ if key_name in item: found = table_lookup(item, lookup_table, key_name) if collection: return [dict(combine(f, item)) for f in found] else: return dict(combine(found, item)) else: return item
def reverse_combine(a, b): """Apply ansible combine in a reverse way Args: a (dict): the dict with the values to merge b (dict): the dict to merge in Returns: dict: dict b with the values of dict a merged """ return combine(b, a, recursive=True)
def run_module(): # define available arguments/parameters a user can pass to the module module_args = dict( id=dict(type='str', required=True), config=dict(type='dict', required=True), merge=dict(type='bool', required=False, default=True), root=dict(type='str', required=False, default=os.getcwd()), drush_path=dict(type='str', required=False, default='drush') ) result = dict( changed=False, old_config='', config='' ) module = AnsibleModule( argument_spec=module_args, supports_check_mode=True ) try: orig_data, clean_data = _drush_get(module) except DrushException: module.fail_json(msg=traceback.format_exc(), **result) result['old_config'] = orig_data if module.check_mode: module.exit_json(**result) if module.params['merge']: new_data = combine(clean_data, module.params['config'], recursive=True) else: new_data = module.params['config'] dump_args = { 'default_flow_style': False } new_str = dump(new_data, **dump_args) old_str = dump(clean_data, **dump_args) if new_str == old_str: result['config'] = orig_data module.exit_json(**result) result['changed'] = True new_data = _restore_strip_config(new_data, orig_data) result['config'] = new_data try: _drush_set(module, new_data) except DrushException: module.fail_json(msg='{0}\n{1}\n{2}\n'.format(*sys.exc_info()), **result) module.exit_json(**result)
def rcombine(*terms, **kwargs): recursive = kwargs.get('recursive', False) if len(kwargs) > 1 or (len(kwargs) == 1 and 'recursive' not in kwargs): raise AnsibleFilterError( "'recursive' is the only valid keyword argument") dicts = [] for t in terms: if isinstance(t, MutableMapping): dicts.append(t) elif isinstance(t, list): dicts.append(combine(*t, **kwargs)) else: raise AnsibleFilterError("|combine expects dictionaries, got " + repr(t)) dicts.reverse() if recursive: return reduce(merge_hash, dicts) else: return dict(itertools.chain(*map(iteritems, dicts)))
def run(self, tmp=None, task_vars=None): if task_vars is None: task_vars = {} result = super().run(tmp, task_vars) del tmp # tmp no longer has any effect root_key = "" if self._task.args: cprofile_file = self._task.args.get("cprofile_file") if cprofile_file: profiler = cProfile.Profile() profiler.enable() if "root_key" in self._task.args: n = self._task.args.get("root_key") n = self._templar.template(n) if not isidentifier(n): raise AnsibleActionFail( f"The argument 'root_key' value of '{n}' is not valid. Keys must start with a letter or underscore character, \ and contain only letters, numbers and underscores." ) root_key = n if "templates" in self._task.args: t = self._task.args.get("templates") if isinstance(t, list): template_list = t else: raise AnsibleActionFail( "The argument 'templates' is not a list") else: raise AnsibleActionFail("The argument 'templates' must be set") dest = self._task.args.get("dest", False) template_output = self._task.args.get("template_output", False) debug = self._task.args.get("debug", False) remove_avd_switch_facts = self._task.args.get( "remove_avd_switch_facts", False) else: raise AnsibleActionFail("The argument 'templates' must be set") output = {} template_lookup_module = lookup_loader.get('ansible.builtin.template', loader=self._loader, templar=self._templar) template_vars = task_vars # If the argument 'debug' is set, a 'avd_yaml_templates_to_facts_debug' list will be added to the output. # This list contains timestamps from every step for every template. This is useful for identifying slow templates. # Here we pull in the list from any previous tasks, so we can just add the the list. if debug: avd_yaml_templates_to_facts_debug = template_vars.get( 'avd_yaml_templates_to_facts_debug', []) for template_item in template_list: if debug: debug_item = template_item debug_item['timestamps'] = {"starting": datetime.now()} template = template_item.get('template') if not template: raise AnsibleActionFail("Invalid template data") template_options = template_item.get('options', {}) list_merge = template_options.get('list_merge', 'append') strip_empty_keys = template_options.get('strip_empty_keys', True) # If the argument 'root_key' is set, output will be assigned to this variable. If not set, the output will be set at as "root" variables. # Here we combine the previous output with the input task_vars, to be able to use variables generated by the previous template in the next. if root_key: template_vars[root_key] = output else: template_vars = combine(task_vars, output, recursive=True) if debug: debug_item['timestamps']['run_template'] = datetime.now() # Here we parse the template, expecting the result to be a YAML formatted string template_result = template_lookup_module.run([template], template_vars) if debug: debug_item['timestamps']['load_yaml'] = datetime.now() # Load data from the template result. template_result_data = yaml.safe_load(template_result[0]) # If the argument 'strip_empty_keys' is set, remove keys with value of null / None from the resulting dict (recursively). if strip_empty_keys: if debug: debug_item['timestamps'][ 'strip_empty_keys'] = datetime.now() template_result_data = strip_null_from_data( template_result_data) # If there is any data produced by the template, combine it on top of previous output. if template_result_data: if debug: debug_item['timestamps']['combine_data'] = datetime.now() output = combine(output, template_result_data, recursive=True, list_merge=list_merge) if debug: debug_item['timestamps']['done'] = datetime.now() avd_yaml_templates_to_facts_debug.append(debug_item) # If the argument 'template_output' is set, run the output data through another jinja2 rendering. # This is to resolve any input values with inline jinja using variables/facts set by the input templates. if template_output: if debug: debug_item = { 'action': 'template_output', 'timestamps': { 'combine_data': datetime.now() } } if root_key: template_vars[root_key] = output else: template_vars = combine(task_vars, output, recursive=True) if debug: debug_item['timestamps']['templating'] = datetime.now() self._templar.available_variables = template_vars output = self._templar.template(output) if debug: debug_item['timestamps']['done'] = datetime.now() avd_yaml_templates_to_facts_debug.append(debug_item) # If the argument 'dest' is set, write the output data to a file. if dest: if debug: debug_item = { 'action': 'dest', 'dest': dest, 'timestamps': { 'write_file': datetime.now() } } # Depending on the file suffix of 'dest' (default: 'json') we will format the data to yaml or just write the output data directly. # The Copy module used in 'write_file' will convert the output data to json automatically. if dest.split('.')[-1] in ["yml", "yaml"]: write_file_result = self.write_file( yaml.dump(output, indent=2, sort_keys=False, width=130), task_vars) else: write_file_result = self.write_file(output, task_vars) # Overwrite result with the result from the copy operation (setting 'changed' flag accordingly) result.update(write_file_result) if debug: debug_item['timestamps']['done'] = datetime.now() avd_yaml_templates_to_facts_debug.append(debug_item) # If 'dest' is not set, hardcode 'changed' to true, since we don't know if something changed and later tasks may depend on this. else: result['changed'] = True if debug: output[ 'avd_yaml_templates_to_facts_debug'] = avd_yaml_templates_to_facts_debug # If the argument 'root_key' is set, output will be assigned to this variable. If not set, the output will be set at as "root" variables. if root_key: result['ansible_facts'] = {root_key: output} else: result['ansible_facts'] = output if remove_avd_switch_facts: result['ansible_facts']['avd_switch_facts'] = None if cprofile_file: profiler.disable() stats = pstats.Stats(profiler).sort_stats('cumtime') stats.dump_stats(cprofile_file) return result
def run(self, tmp=None, task_vars=None): """Run the action module""" super(ActionModule, self).run(tmp, task_vars) self.__tmp = tmp self.__task_vars = task_vars self.__changed = False self.__debug_info = dict() try: self._gather() packages_to_manage = list() packages = self._get_packages_to_manage() for package in packages: name = package["name"] state = package["state"] virtualenv = package.get( "virtualenv", self.__packages_python_virtualenv) virtualenv_command = package.get( "virtualenv_command", self.__packages_python_virtualenv_command) virtualenv_python = package.get( "virtualenv_python", self.__packages_python_virtualenv_python) virtualenv_site_packages = package.get( "virtualenv_site_packages", self.__packages_python_virtualenv_site_packages) extra_args = package.get( "extra_args", self.__packages_python_extra_args) package_dict = self._get_package_spec(name, state, virtualenv, virtualenv_command, virtualenv_python, virtualenv_site_packages, extra_args) if package_dict is not None: packages_to_manage = packages_to_manage + [package_dict] action_result = dict(changed=self.__changed, packages=packages_to_manage) ansible_facts = dict( _packages_capabilities_present=self.__capabilities_present, _packages_groups_present=self.__groups_present, packages=self.__package_facts ) if self.__family == "os": ansible_facts["_packages_os_managed"] = \ self.__packages_os_managed \ + packages_to_manage action_result["module"] = self.__package_module if self.__family == "python": ansible_facts["_packages_python_managed"] = \ self.__packages_python_managed \ + packages_to_manage ansible_facts["_packages_python_present"] = \ self.__packages_python_present ansible_facts["_packages_virtualenv_exists"] = \ self.__packages_virtualenv_exists ansible_facts["__packages_virtualenv_needs_upgrade"] = \ self.__packages_virtualenv_needs_upgrade ansible_facts["_packages_python_virtualenv"] = \ self.__packages_python_virtualenv ansible_facts["packages_python_virtualenv_dir"] = \ "{path}/".format(path=self.__packages_python_virtualenv) ansible_facts["packages_python_bin_dir"] = \ "{path}/bin/".format( path=self.__packages_python_virtualenv) action = self._action( action="set_fact", args=dict( ansible_python_interpreter=self.__python_interpreter)) action.run(task_vars=self.__task_vars) action_result["python_interpreter"] = self.__python_interpreter if self.__ansible_facts_gathered: ansible_facts = combine(self.__ansible_facts, ansible_facts) if self.__packages_debug: action_result["debug"] = self.__debug_info action_result["ansible_facts"] = ansible_facts finally: self._remove_tmp_path(self._connection._shell.tmpdir) return action_result
def run(self, tmp=None, task_vars=None): if task_vars is None: task_vars = dict() result = super().run(tmp, task_vars) del tmp # tmp no longer has any effect root_key = "" if self._task.args: if "root_key" in self._task.args: n = self._task.args.get("root_key") n = self._templar.template(n) if not isidentifier(n): raise AnsibleActionFail("The argument 'root_key' value of '%s' is not valid. Keys must start with a letter or underscore character, " "and contain only letters, numbers and underscores." % n) root_key = n if "templates" in self._task.args: t = self._task.args.get("templates") if isinstance(t, list): template_list = t else: raise AnsibleActionFail("The argument 'templates' is not a list") else: raise AnsibleActionFail("The argument 'templates' must be set") else: raise AnsibleActionFail("The argument 'templates' must be set") output = dict() template_lookup_module = TemplateLookupModule(loader=self._loader, templar=self._templar) template_vars = task_vars for template_item in template_list: template = template_item.get('template') if not template: raise AnsibleActionFail("Invalid template data") template_options = template_item.get('options', {}) list_merge = template_options.get('list_merge', 'append') strip_empty_keys = template_options.get('strip_empty_keys', True) if root_key: template_vars[root_key] = output else: template_vars = combine(task_vars, output, recursive=True) template_output = template_lookup_module.run([template], template_vars) template_output_data = yaml.safe_load(template_output[0]) if strip_empty_keys: template_output_data = strip_null_from_data(template_output_data) if template_output_data: output = combine(output, template_output_data, recursive=True, list_merge=list_merge) if root_key: result['ansible_facts'] = {root_key: output} else: result['ansible_facts'] = output return result