def check_valid_modification(module, values, modifiable_params):
    """ Check if the parameters and values in values are valid.  """
    changed_with_update = False

    for parameter in values:
        new_value = values[parameter]

        # check valid modifiable parameters
        if parameter not in modifiable_params:
            module.fail_json(msg="%s is not a modifiable parameter. Valid parameters to modify are: %s." % (parameter, modifiable_params.keys()))

        # check allowed datatype for modified parameters
        str_to_type = {"integer": int, "string": text_type}
        if not isinstance(new_value, str_to_type[modifiable_params[parameter][1]]):
            module.fail_json(msg="%s (type %s) is not an allowed value for the parameter %s. Expected a type %s." %
                             (new_value, type(new_value), parameter, modifiable_params[parameter][1]))

        # check allowed values for modifiable parameters
        if text_type(new_value) not in modifiable_params[parameter][0] and not isinstance(new_value, int):
            module.fail_json(msg="%s is not an allowed value for the parameter %s. Valid parameters are: %s." %
                                 (new_value, parameter, modifiable_params[parameter][0]))

        # check if a new value is different from current value
        if text_type(new_value) != modifiable_params[parameter][2]:
            changed_with_update = True

    return changed_with_update
예제 #2
0
    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string.
        '''
        new_data = None
        try:
            # we first try to load this data as JSON
            new_data = json.loads(data)
        except:
            # must not be JSON, let the rest try
            if isinstance(data, AnsibleUnicode):
                # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
                # they are unable to cope with our subclass.
                # Unwrap and re-wrap the unicode so we can keep track of line
                # numbers
                in_data = text_type(data)
            else:
                in_data = data
            try:
                new_data = self._safe_load(in_data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

            if isinstance(data, AnsibleUnicode):
                new_data = AnsibleUnicode(new_data)
                new_data.ansible_pos = data.ansible_pos

        return new_data
예제 #3
0
    def _normalize_to_cf(self, value):
        """
        Return value with all python datetime and Data objects converted
        to their CoreFoundation equivalent. Python strings are converted
        to unicode.

        If value contains a type not supported by the .plist format,
        a TypeError will be raise.
        """
        if isinstance(value, dict):
            for key, item in value.items():
                value[key] = self._normalize_to_cf(item)
        elif isinstance(value, (list, tuple)):
            value = [self._normalize_to_cf(item) for item in value]
        elif isinstance(value, datetime.datetime):
            value = self._datetime_to_cfdate(value)
        elif isinstance(value, Data):
            value = value.binary
            value = CoreFoundation.CFDataCreate(None, value, len(value))
        elif isinstance(value, binary_type):
            try:
                value = text_type(value, 'utf-8')
            except UnicodeDecodeError:
                raise TypeError(
                    'Invalid string {0} of value `{1}` is unsupported.'
                    .format(type(value), repr(value))
                )
        elif (value is not None and
                not isinstance(value, integer_types) and
                not isinstance(value, (bool, float, text_type))):
            raise TypeError('{0} of value `{1}` is unsupported.'.format(
                type(value), repr(value)
            ))
        return value
def reset(module, conn, name, values):
    """ Reset ElastiCache parameter group if the current information is different from the new information. """
    # used to compare with the reset parameters' dict to see if there have been changes
    old_parameters_dict = make_current_modifiable_param_dict(module, conn, name)

    format_parameters = []

    # determine whether to reset all or specific parameters
    if values:
        all_parameters = False
        format_parameters = []
        for key in values:
            value = text_type(values[key])
            format_parameters.append({'ParameterName': key, 'ParameterValue': value})
    else:
        all_parameters = True

    try:
        response = conn.reset_cache_parameter_group(CacheParameterGroupName=name, ParameterNameValues=format_parameters, ResetAllParameters=all_parameters)
    except boto.exception.BotoServerError as e:
        module.fail_json(msg="Unable to reset cache parameter group.", exception=traceback.format_exc())

    # determine changed
    new_parameters_dict = make_current_modifiable_param_dict(module, conn, name)
    changed = check_changed_parameter_values(values, old_parameters_dict, new_parameters_dict)

    return response, changed
예제 #5
0
파일: core.py 프로젝트: sshyran/ansible
def from_yaml_all(data):
    if isinstance(data, string_types):
        # The ``text_type`` call here strips any custom
        # string wrapper class, so that CSafeLoader can
        # read the data
        return yaml_load_all(text_type(to_text(data, errors='surrogate_or_strict')))
    return data
예제 #6
0
 def env_prefix(self, **kwargs):
     env = self.env.copy()
     env.update(kwargs)
     return ' '.join([
         'set -lx %s %s;' % (k, shlex_quote(text_type(v)))
         for k, v in env.items()
     ])
예제 #7
0
    def _get_magic_variables(self, play, host, task, include_hostvars,
                             include_delegate_to):
        '''
        Returns a dictionary of so-called "magic" variables in Ansible,
        which are special variables we set internally for use.
        '''

        variables = {}
        variables['playbook_dir'] = os.path.abspath(self._loader.get_basedir())
        variables['ansible_playbook_python'] = sys.executable

        if host:
            # host already provides some magic vars via host.get_vars()
            if self._inventory:
                variables['groups'] = self._inventory.get_groups_dict()
        if play:
            variables['role_names'] = [r._role_name for r in play.roles]

        if task:
            if task._role:
                variables['role_name'] = task._role.get_name()
                variables['role_path'] = task._role._role_path
                variables['role_uuid'] = text_type(task._role._uuid)

        if self._inventory is not None:
            if play:
                templar = Templar(loader=self._loader)
                if templar.is_template(play.hosts):
                    pattern = 'all'
                else:
                    pattern = play.hosts or 'all'
                # add the list of hosts in the play, as adjusted for limit/filters
                variables['ansible_play_hosts_all'] = [
                    x.name for x in self._inventory.get_hosts(
                        pattern=pattern, ignore_restrictions=True)
                ]
                variables['ansible_play_hosts'] = [
                    x for x in variables['ansible_play_hosts_all']
                    if x not in play._removed_hosts
                ]
                variables['ansible_play_batch'] = [
                    x.name for x in self._inventory.get_hosts()
                    if x.name not in play._removed_hosts
                ]

                # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_batch,
                # however this would take work in the templating engine, so for now we'll add both
                variables['play_hosts'] = variables['ansible_play_batch']

        # the 'omit' value alows params to be left out if the variable they are based on is undefined
        variables['omit'] = self._omit_token
        # Set options vars
        for option, option_value in iteritems(self._options_vars):
            variables[option] = option_value

        if self._hostvars is not None and include_hostvars:
            variables['hostvars'] = self._hostvars

        return variables
예제 #8
0
 def env_prefix(self, **kwargs):
     ret = []
     # All the -u options must be first, so we process them first
     ret += ['-u %s' % k for k, v in kwargs.items() if v is None]
     ret += [
         '%s=%s' % (k, shlex_quote(text_type(v)))
         for k, v in kwargs.items() if v is not None
     ]
     return 'env %s' % ' '.join(ret)
예제 #9
0
    def install_plugin(self):
        if not self.existing_plugin:
            if not self.check_mode:
                try:
                    self.existing_plugin = self.dclient.plugins.install(self.parameters.plugin_name, None)
                except APIError as e:
                    self.client.fail(text_type(e))

            self.results['actions'].append("Installed plugin %s" % self.parameters.plugin_name)
            self.results['changed'] = True
예제 #10
0
    def remove_volume(self):
        if self.existing_volume:
            if not self.check_mode:
                try:
                    self.client.remove_volume(self.parameters.volume_name)
                except APIError as e:
                    self.client.fail(text_type(e))

            self.results['actions'].append("Removed volume %s" % self.parameters.volume_name)
            self.results['changed'] = True
예제 #11
0
    def remove_volume(self):
        if self.existing_volume:
            if not self.check_mode:
                try:
                    self.client.remove_volume(self.parameters.volume_name)
                except APIError as e:
                    self.client.fail(text_type(e))

            self.results['actions'].append("Removed volume %s" % self.parameters.volume_name)
            self.results['changed'] = True
예제 #12
0
 def env_prefix(self, **kwargs):
     env = self.env.copy()
     env.update(kwargs)
     ret = []
     for k, v in kwargs.items():
         if v is None:
             ret.append('set -e %s;' % k)
         else:
             ret.append('set -lx %s %s;' % (k, shlex_quote(text_type(v))))
     return ' '.join(ret)
예제 #13
0
    def remove_plugin(self):
        force = self.parameters.force_remove
        if self.existing_plugin:
            if not self.check_mode:
                try:
                    self.existing_plugin.remove(force)
                except APIError as e:
                    self.client.fail(text_type(e))

            self.results['actions'].append("Removed plugin %s" % self.parameters.plugin_name)
            self.results['changed'] = True
예제 #14
0
 def enable(self):
     timeout = self.parameters.enable_timeout
     if self.existing_plugin:
         if not self.existing_plugin.enabled:
             if not self.check_mode:
                 try:
                     self.existing_plugin.enable(timeout)
                 except APIError as e:
                     self.client.fail(text_type(e))
             self.results['actions'].append("Enabled plugin %s" % self.parameters.plugin_name)
             self.results['changed'] = True
     else:
         self.install_plugin()
         if not self.check_mode:
             try:
                 self.existing_plugin.enable(timeout)
             except APIError as e:
                 self.client.fail(text_type(e))
         self.results['actions'].append("Enabled plugin %s" % self.parameters.plugin_name)
         self.results['changed'] = True
예제 #15
0
    def get_existing_volume(self):
        try:
            volumes = self.client.volumes()
        except APIError as e:
            self.client.fail(text_type(e))

        for volume in volumes[u'Volumes']:
            if volume['Name'] == self.parameters.volume_name:
                return volume

        return None
예제 #16
0
 def _cast_value(self, value):
     if value in BOOLEANS_TRUE:
         return True
     elif value in BOOLEANS_FALSE:
         return False
     elif re.match(r'^\d+\.d+$', value):
         return float(value)
     elif re.match(r'^\d+$', value):
         return int(value)
     else:
         return text_type(value)
예제 #17
0
    def get_existing_volume(self):
        try:
            volumes = self.client.volumes()
        except APIError as e:
            self.client.fail(text_type(e))

        for volume in volumes[u'Volumes']:
            if volume['Name'] == self.parameters.volume_name:
                return volume

        return None
예제 #18
0
 def _cast_value(self, value):
     if value in BOOLEANS_TRUE:
         return True
     elif value in BOOLEANS_FALSE:
         return False
     elif re.match(r"^\d+\.d+$", value):
         return float(value)
     elif re.match(r"^\d+$", value):
         return int(value)
     else:
         return text_type(value)
예제 #19
0
 def disable(self):
     if self.existing_plugin:
         if self.existing_plugin.enabled:
             if not self.check_mode:
                 try:
                     self.existing_plugin.disable()
                 except APIError as e:
                     self.client.fail(text_type(e))
             self.results['actions'].append("Disable plugin %s" % self.parameters.plugin_name)
             self.results['changed'] = True
     else:
         self.fail("Plugin not found: Plugin does not exist.")
def modify(module, conn, name, values):
    """ Modify ElastiCache parameter group to reflect the new information if it differs from the current. """
    # compares current group parameters with the parameters we've specified to to a value to see if this will change the group
    format_parameters = []
    for key in values:
        value = text_type(values[key])
        format_parameters.append({'ParameterName': key, 'ParameterValue': value})
    try:
        response = conn.modify_cache_parameter_group(CacheParameterGroupName=name, ParameterNameValues=format_parameters)
    except boto.exception.BotoServerError as e:
        module.fail_json(msg="Unable to modify cache parameter group.", exception=traceback.format_exc())
    return response
예제 #21
0
    def load(self, data, file_name='<string>', show_content=True):
        '''
        Creates a python datastructure from the given data, which can be either
        a JSON or YAML string.
        '''
        new_data = None

        # YAML parser will take JSON as it is a subset.
        if isinstance(data, AnsibleUnicode):
            # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
            # they are unable to cope with our subclass.
            # Unwrap and re-wrap the unicode so we can keep track of line
            # numbers
            in_data = text_type(data)
        else:
            in_data = data

        try:
            # we first try to load this data as JSON
            new_data = json.loads(data)
        except:
            # must not be JSON, let the rest try
            if isinstance(data, AnsibleUnicode):
                # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
                # they are unable to cope with our subclass.
                # Unwrap and re-wrap the unicode so we can keep track of line
                # numbers
                in_data = text_type(data)
            else:
                in_data = data
            try:
                new_data = self._safe_load(in_data, file_name=file_name)
            except YAMLError as yaml_exc:
                self._handle_error(yaml_exc, file_name, show_content)

            if isinstance(data, AnsibleUnicode):
                new_data = AnsibleUnicode(new_data)
                new_data.ansible_pos = data.ansible_pos

        return new_data
예제 #22
0
 def update_plugin(self):
     if self.existing_plugin:
         differences = self.has_different_config()
         if not differences.empty:
             if not self.check_mode:
                 try:
                     self.existing_plugin.configure(prepare_options(self.parameters.plugin_options))
                 except APIError as e:
                     self.client.fail(text_type(e))
             self.results['actions'].append("Updated plugin %s settings" % self.parameters.plugin_name)
             self.results['changed'] = True
     else:
         self.fail("Cannot update the plugin: Plugin does not exist")
예제 #23
0
    def get_existing_plugin(self):
        name = self.parameters.plugin_name
        try:
            plugin = self.dclient.plugins.get(name)
        except NotFound:
            return None
        except APIError as e:
            self.client.fail(text_type(e))

        if plugin is None:
            return None
        else:
            return plugin
예제 #24
0
    def create_volume(self):
        if not self.existing_volume:
            if not self.check_mode:
                try:
                    resp = self.client.create_volume(self.parameters.volume_name,
                                                     driver=self.parameters.driver,
                                                     driver_opts=self.parameters.driver_options,
                                                     labels=self.parameters.labels)
                    self.existing_volume = self.client.inspect_volume(resp['Name'])
                except APIError as e:
                    self.client.fail(text_type(e))

            self.results['actions'].append("Created volume %s with driver %s" % (self.parameters.volume_name, self.parameters.driver))
            self.results['changed'] = True
예제 #25
0
    def create_volume(self):
        if not self.existing_volume:
            if not self.check_mode:
                try:
                    resp = self.client.create_volume(self.parameters.volume_name,
                                                     driver=self.parameters.driver,
                                                     driver_opts=self.parameters.driver_options,
                                                     labels=self.parameters.labels)
                    self.existing_volume = self.client.inspect_volume(resp['Name'])
                except APIError as e:
                    self.client.fail(text_type(e))

            self.results['actions'].append("Created volume %s with driver %s" % (self.parameters.volume_name, self.parameters.driver))
            self.results['changed'] = True
예제 #26
0
def _pretty_represent_str(self, data):
    """Uses block style for multi-line strings"""
    data = text_type(data)
    if _should_use_block(data):
        style = '|'
        if self._lossy:
            data = _munge_data_for_lossy_yaml(data)
    else:
        style = self.default_style

    node = yaml.representer.ScalarNode('tag:yaml.org,2002:str', data, style=style)
    if self.alias_key is not None:
        self.represented_objects[self.alias_key] = node
    return node
예제 #27
0
 def _normalize_to_python(self, value):
     """
     Return value with all Foundation types converted to their python
     equivalent.
     """
     if isinstance(value, (Foundation.NSMutableDictionary, dict)):
         value = dict(value)
         for key, item in value.items():
             value[key] = self._normalize_to_python(item)
     elif isinstance(value, (Foundation.NSMutableArray, list, tuple)):
         value = [self._normalize_to_python(item) for item in value]
     elif isinstance(value, Foundation.NSDate):
         value = string_to_datetime(text_type(value))
     elif isinstance(value, Foundation.NSMutableData):
         value = Data(value.base64Encoding())
     return value
예제 #28
0
 def _get_obj_info(self, obj, depth=99, seen=None):
     '''
     Recursively build a data structure for the given pSphere object (depth
     only applies to ManagedObject instances).
     '''
     seen = seen or set()
     if isinstance(obj, ManagedObject):
         try:
             obj_unicode = text_type(getattr(obj, 'name'))
         except AttributeError:
             obj_unicode = ()
         if obj in seen:
             return obj_unicode
         seen.add(obj)
         if depth <= 0:
             return obj_unicode
         d = {}
         for attr in dir(obj):
             if attr.startswith('_'):
                 continue
             try:
                 val = getattr(obj, attr)
                 obj_info = self._get_obj_info(val, depth - 1, seen)
                 if obj_info != ():
                     d[attr] = obj_info
             except Exception as e:
                 pass
         return d
     elif isinstance(obj, SudsObject):
         d = {}
         for key, val in iter(obj):
             obj_info = self._get_obj_info(val, depth, seen)
             if obj_info != ():
                 d[key] = obj_info
         return d
     elif isinstance(obj, (list, tuple)):
         l = []
         for val in iter(obj):
             obj_info = self._get_obj_info(val, depth, seen)
             if obj_info != ():
                 l.append(obj_info)
         return l
     elif isinstance(obj, (type(None), bool, float) + string_types +
                     integer_types):
         return obj
     else:
         return ()
예제 #29
0
파일: manager.py 프로젝트: ernstp/ansible
    def _get_magic_variables(self, play, host, task, include_hostvars, include_delegate_to):
        '''
        Returns a dictionary of so-called "magic" variables in Ansible,
        which are special variables we set internally for use.
        '''

        variables = {}
        variables['playbook_dir'] = os.path.abspath(self._loader.get_basedir())
        variables['ansible_playbook_python'] = sys.executable

        if play:
            variables['role_names'] = [r._role_name for r in play.roles]

        if task:
            if task._role:
                variables['role_name'] = task._role.get_name()
                variables['role_path'] = task._role._role_path
                variables['role_uuid'] = text_type(task._role._uuid)

        if self._inventory is not None:
            variables['groups'] = self._inventory.get_groups_dict()
            if play:
                templar = Templar(loader=self._loader)
                if templar.is_template(play.hosts):
                    pattern = 'all'
                else:
                    pattern = play.hosts or 'all'
                # add the list of hosts in the play, as adjusted for limit/filters
                variables['ansible_play_hosts_all'] = [x.name for x in self._inventory.get_hosts(pattern=pattern, ignore_restrictions=True)]
                variables['ansible_play_hosts'] = [x for x in variables['ansible_play_hosts_all'] if x not in play._removed_hosts]
                variables['ansible_play_batch'] = [x.name for x in self._inventory.get_hosts() if x.name not in play._removed_hosts]

                # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_batch,
                # however this would take work in the templating engine, so for now we'll add both
                variables['play_hosts'] = variables['ansible_play_batch']

        # the 'omit' value alows params to be left out if the variable they are based on is undefined
        variables['omit'] = self._omit_token
        # Set options vars
        for option, option_value in iteritems(self._options_vars):
            variables[option] = option_value

        if self._hostvars is not None and include_hostvars:
            variables['hostvars'] = self._hostvars

        return variables
예제 #30
0
def main():

    argument_spec = dict(
        src=dict(required=True, type='path'),
        confirm=dict(default=0, type='int'),
        comment=dict(default=DEFAULT_COMMENT),
        action=dict(default='merge', choices=['merge', 'overwrite',
                                              'replace']),
        config_format=dict(choices=['text', 'set', 'xml']),
        backup=dict(default=False, type='bool'),
    )

    argument_spec.update(junos_argument_spec)

    module = AnsibleModule(argument_spec=argument_spec,
                           supports_check_mode=True)

    check_transport(module)

    warnings = list()
    check_args(module, warnings)

    result = {'changed': False, 'warnings': warnings}

    comment = module.params['comment']
    confirm = module.params['confirm']
    commit = not module.check_mode
    action = module.params['action']
    src = module.params['src']
    fmt = module.params['config_format']

    if action == 'overwrite' and fmt == 'set':
        module.fail_json(msg="overwrite cannot be used when format is "
                         "set per junos-pyez documentation")

    if module.params['backup']:
        result['__backup__'] = text_type(get_configuration(module))

    diff = load(module, src, action=action, commit=commit, format=fmt)
    if diff:
        result['changed'] = True
        if module._diff:
            result['diff'] = {'prepared': diff}

    module.exit_json(**result)
def set_proxy_facts(facts):
    """ Set global proxy facts

        Args:
            facts(dict): existing facts
        Returns:
            facts(dict): Updated facts with missing values
    """
    if 'common' in facts:
        common = facts['common']
        if 'http_proxy' in common or 'https_proxy' in common or 'no_proxy' in common:
            if 'no_proxy' in common and isinstance(common['no_proxy'], string_types):
                common['no_proxy'] = common['no_proxy'].split(",")
            elif 'no_proxy' not in common:
                common['no_proxy'] = []

            # See https://bugzilla.redhat.com/show_bug.cgi?id=1466783
            # masters behind a proxy need to connect to etcd via IP
            if 'no_proxy_etcd_host_ips' in common:
                if isinstance(common['no_proxy_etcd_host_ips'], string_types):
                    common['no_proxy'].extend(common['no_proxy_etcd_host_ips'].split(','))

            # Master IPs should be added to no proxy lists to make liveness probes to pass
            if 'no_proxy_master_ips' in common:
                if isinstance(common['no_proxy_master_ips'], string_types):
                    common['no_proxy'].extend(common['no_proxy_master_ips'].split(','))

            if 'generate_no_proxy_hosts' in common and safe_get_bool(common['generate_no_proxy_hosts']):
                if 'no_proxy_internal_hostnames' in common:
                    common['no_proxy'].extend(common['no_proxy_internal_hostnames'].split(','))
            # TODO: This is Azure specific and should be scoped out to only Azure installs
            common['no_proxy'].append('169.254.169.254')
            # We always add local dns domain and ourselves no matter what
            kube_svc_ip = str(ipaddress.ip_network(text_type(common['portal_net']))[1])
            common['no_proxy'].append(kube_svc_ip)
            common['no_proxy'].append('.' + common['dns_domain'])
            common['no_proxy'].append('.svc')
            common['no_proxy'].append(common['hostname'])
            common['no_proxy'] = ','.join(sort_unique(common['no_proxy']))
        facts['common'] = common

    return facts
예제 #32
0
def from_yaml(data,
              file_name='<string>',
              show_content=True,
              vault_secrets=None):
    '''
    Creates a python datastructure from the given data, which can be either
    a JSON or YAML string.
    '''
    new_data = None

    if isinstance(data, AnsibleUnicode):
        # The PyYAML's libyaml bindings use PyUnicode_CheckExact so
        # they are unable to cope with our subclass.
        # Unwrap and re-wrap the unicode so we can keep track of line
        # numbers
        # Note: Cannot use to_text() because AnsibleUnicode is a subclass of the text_type.
        # Should not have to worry about tracebacks because python's text constructors (unicode() on
        # python2 and str() on python3) can handle a subtype of themselves.
        in_data = text_type(data)
    else:
        in_data = data

    try:
        # we first try to load this data as JSON.  Fixes issues with extra vars json strings not
        # being parsed correctly by the yaml parser
        new_data = json.loads(in_data)
    except Exception:
        # must not be JSON, let the rest try
        try:
            new_data = _safe_load(in_data,
                                  file_name=file_name,
                                  vault_secrets=vault_secrets)
        except YAMLError as yaml_exc:
            _handle_error(yaml_exc, file_name, show_content)

        if isinstance(data, AnsibleUnicode):
            new_data = AnsibleUnicode(new_data)
            new_data.ansible_pos = data.ansible_pos

    return new_data
예제 #33
0
파일: toml.py 프로젝트: alexey74/ansible
def convert_yaml_objects_to_native(obj):
    """Older versions of the ``toml`` python library, don't have a pluggable
    way to tell the encoder about custom types, so we need to ensure objects
    that we pass are native types.

    Only used on ``toml<0.10.0`` where ``toml.TomlEncoder`` is missing.

    This function recurses an object and ensures we cast any of the types from
    ``ansible.parsing.yaml.objects`` into their native types, effectively cleansing
    the data before we hand it over to ``toml``

    This function doesn't directly check for the types from ``ansible.parsing.yaml.objects``
    but instead checks for the types those objects inherit from, to offer more flexibility.
    """
    if isinstance(obj, dict):
        return dict(
            (k, convert_yaml_objects_to_native(v)) for k, v in obj.items())
    elif isinstance(obj, list):
        return [convert_yaml_objects_to_native(v) for v in obj]
    elif isinstance(obj, text_type):
        return text_type(obj)
    else:
        return obj
예제 #34
0
    def _get_magic_variables(self,
                             play,
                             host,
                             task,
                             include_hostvars,
                             include_delegate_to,
                             _hosts=None,
                             _hosts_all=None):
        '''
        Returns a dictionary of so-called "magic" variables in Ansible,
        which are special variables we set internally for use.
        '''

        variables = {}
        variables['playbook_dir'] = os.path.abspath(self._loader.get_basedir())
        variables['ansible_playbook_python'] = sys.executable
        variables['ansible_config_file'] = C.CONFIG_FILE

        if play:
            # This is a list of all role names of all dependencies for all roles for this play
            dependency_role_names = list({
                d.get_name()
                for r in play.roles for d in r.get_all_dependencies()
            })
            # This is a list of all role names of all roles for this play
            play_role_names = [r.get_name() for r in play.roles]

            # ansible_role_names includes all role names, dependent or directly referenced by the play
            variables['ansible_role_names'] = list(
                set(dependency_role_names + play_role_names))
            # ansible_play_role_names includes the names of all roles directly referenced by this play
            # roles that are implicitly referenced via dependencies are not listed.
            variables['ansible_play_role_names'] = play_role_names
            # ansible_dependent_role_names includes the names of all roles that are referenced via dependencies
            # dependencies that are also explicitly named as roles are included in this list
            variables['ansible_dependent_role_names'] = dependency_role_names

            # DEPRECATED: role_names should be deprecated in favor of ansible_role_names or ansible_play_role_names
            variables['role_names'] = variables['ansible_play_role_names']

            variables['ansible_play_name'] = play.get_name()

        if task:
            if task._role:
                variables['role_name'] = task._role.get_name(
                    include_role_fqcn=False)
                variables['role_path'] = task._role._role_path
                variables['role_uuid'] = text_type(task._role._uuid)
                variables[
                    'ansible_collection_name'] = task._role._role_collection
                variables['ansible_role_name'] = task._role.get_name()

        if self._inventory is not None:
            variables['groups'] = self._inventory.get_groups_dict()
            if play:
                templar = Templar(loader=self._loader)
                if not play.finalized and templar.is_template(play.hosts):
                    pattern = 'all'
                else:
                    pattern = play.hosts or 'all'
                # add the list of hosts in the play, as adjusted for limit/filters
                if not _hosts_all:
                    _hosts_all = [
                        h.name for h in self._inventory.get_hosts(
                            pattern=pattern, ignore_restrictions=True)
                    ]
                if not _hosts:
                    _hosts = [h.name for h in self._inventory.get_hosts()]

                variables['ansible_play_hosts_all'] = _hosts_all[:]
                variables['ansible_play_hosts'] = [
                    x for x in variables['ansible_play_hosts_all']
                    if x not in play._removed_hosts
                ]
                variables['ansible_play_batch'] = [
                    x for x in _hosts if x not in play._removed_hosts
                ]

                # DEPRECATED: play_hosts should be deprecated in favor of ansible_play_batch,
                # however this would take work in the templating engine, so for now we'll add both
                variables['play_hosts'] = variables['ansible_play_batch']

        # the 'omit' value allows params to be left out if the variable they are based on is undefined
        variables['omit'] = self._omit_token
        # Set options vars
        for option, option_value in self._options_vars.items():
            variables[option] = option_value

        if self._hostvars is not None and include_hostvars:
            variables['hostvars'] = self._hostvars

        return variables
예제 #35
0
    def template(self, variable, convert_bare=False, preserve_trailing_newlines=True, escape_backslashes=True, fail_on_undefined=None, overrides=None,
                 convert_data=True, static_vars=None, cache=True, disable_lookups=False):
        '''
        Templates (possibly recursively) any given data as input. If convert_bare is
        set to True, the given data will be wrapped as a jinja2 variable ('{{foo}}')
        before being sent through the template engine.
        '''
        static_vars = [''] if static_vars is None else static_vars

        # Don't template unsafe variables, just return them.
        if hasattr(variable, '__UNSAFE__'):
            return variable

        if fail_on_undefined is None:
            fail_on_undefined = self._fail_on_undefined_errors

        try:
            if convert_bare:
                variable = self._convert_bare_variable(variable)

            if isinstance(variable, string_types):
                result = variable

                if self.is_possibly_template(variable):
                    # Check to see if the string we are trying to render is just referencing a single
                    # var.  In this case we don't want to accidentally change the type of the variable
                    # to a string by using the jinja template renderer. We just want to pass it.
                    only_one = self.SINGLE_VAR.match(variable)
                    if only_one:
                        var_name = only_one.group(1)
                        if var_name in self._available_variables:
                            resolved_val = self._available_variables[var_name]
                            if isinstance(resolved_val, NON_TEMPLATED_TYPES):
                                return resolved_val
                            elif resolved_val is None:
                                return C.DEFAULT_NULL_REPRESENTATION

                    # Using a cache in order to prevent template calls with already templated variables
                    sha1_hash = None
                    if cache:
                        variable_hash = sha1(text_type(variable).encode('utf-8'))
                        options_hash = sha1(
                            (
                                text_type(preserve_trailing_newlines) +
                                text_type(escape_backslashes) +
                                text_type(fail_on_undefined) +
                                text_type(overrides)
                            ).encode('utf-8')
                        )
                        sha1_hash = variable_hash.hexdigest() + options_hash.hexdigest()
                    if cache and sha1_hash in self._cached_result:
                        result = self._cached_result[sha1_hash]
                    else:
                        result = self.do_template(
                            variable,
                            preserve_trailing_newlines=preserve_trailing_newlines,
                            escape_backslashes=escape_backslashes,
                            fail_on_undefined=fail_on_undefined,
                            overrides=overrides,
                            disable_lookups=disable_lookups,
                        )

                        if not USE_JINJA2_NATIVE:
                            unsafe = hasattr(result, '__UNSAFE__')
                            if convert_data and not self._no_type_regex.match(variable):
                                # if this looks like a dictionary or list, convert it to such using the safe_eval method
                                if (result.startswith("{") and not result.startswith(self.environment.variable_start_string)) or \
                                        result.startswith("[") or result in ("True", "False"):
                                    eval_results = safe_eval(result, include_exceptions=True)
                                    if eval_results[1] is None:
                                        result = eval_results[0]
                                        if unsafe:
                                            result = wrap_var(result)
                                    else:
                                        # FIXME: if the safe_eval raised an error, should we do something with it?
                                        pass

                        # we only cache in the case where we have a single variable
                        # name, to make sure we're not putting things which may otherwise
                        # be dynamic in the cache (filters, lookups, etc.)
                        if cache and only_one:
                            self._cached_result[sha1_hash] = result

                return result

            elif is_sequence(variable):
                return [self.template(
                    v,
                    preserve_trailing_newlines=preserve_trailing_newlines,
                    fail_on_undefined=fail_on_undefined,
                    overrides=overrides,
                    disable_lookups=disable_lookups,
                ) for v in variable]
            elif isinstance(variable, Mapping):
                d = {}
                # we don't use iteritems() here to avoid problems if the underlying dict
                # changes sizes due to the templating, which can happen with hostvars
                for k in variable.keys():
                    if k not in static_vars:
                        d[k] = self.template(
                            variable[k],
                            preserve_trailing_newlines=preserve_trailing_newlines,
                            fail_on_undefined=fail_on_undefined,
                            overrides=overrides,
                            disable_lookups=disable_lookups,
                        )
                    else:
                        d[k] = variable[k]
                return d
            else:
                return variable

        except AnsibleFilterError:
            if self._fail_on_filter_errors:
                raise
            else:
                return variable
예제 #36
0
def get_aws_connection_info(module, boto3=False):

    # Check module args for credentials, then check environment vars
    # access_key

    ec2_url = module.params.get('ec2_url')
    access_key = module.params.get('aws_access_key')
    secret_key = module.params.get('aws_secret_key')
    security_token = module.params.get('security_token')
    region = module.params.get('region')
    profile_name = module.params.get('profile')
    validate_certs = module.params.get('validate_certs')

    if not ec2_url:
        if 'AWS_URL' in os.environ:
            ec2_url = os.environ['AWS_URL']
        elif 'EC2_URL' in os.environ:
            ec2_url = os.environ['EC2_URL']

    if not access_key:
        if 'AWS_ACCESS_KEY_ID' in os.environ:
            access_key = os.environ['AWS_ACCESS_KEY_ID']
        elif 'AWS_ACCESS_KEY' in os.environ:
            access_key = os.environ['AWS_ACCESS_KEY']
        elif 'EC2_ACCESS_KEY' in os.environ:
            access_key = os.environ['EC2_ACCESS_KEY']
        else:
            # in case access_key came in as empty string
            access_key = None

    if not secret_key:
        if 'AWS_SECRET_ACCESS_KEY' in os.environ:
            secret_key = os.environ['AWS_SECRET_ACCESS_KEY']
        elif 'AWS_SECRET_KEY' in os.environ:
            secret_key = os.environ['AWS_SECRET_KEY']
        elif 'EC2_SECRET_KEY' in os.environ:
            secret_key = os.environ['EC2_SECRET_KEY']
        else:
            # in case secret_key came in as empty string
            secret_key = None

    if not region:
        if 'AWS_REGION' in os.environ:
            region = os.environ['AWS_REGION']
        elif 'AWS_DEFAULT_REGION' in os.environ:
            region = os.environ['AWS_DEFAULT_REGION']
        elif 'EC2_REGION' in os.environ:
            region = os.environ['EC2_REGION']
        else:
            if not boto3:
                # boto.config.get returns None if config not found
                region = boto.config.get('Boto', 'aws_region')
                if not region:
                    region = boto.config.get('Boto', 'ec2_region')
            elif HAS_BOTO3:
                # here we don't need to make an additional call, will default to 'us-east-1' if the below evaluates to None.
                region = botocore.session.get_session().get_config_variable('region')
            else:
                module.fail_json(msg="Boto3 is required for this module. Please install boto3 and try again")

    if not security_token:
        if 'AWS_SECURITY_TOKEN' in os.environ:
            security_token = os.environ['AWS_SECURITY_TOKEN']
        elif 'AWS_SESSION_TOKEN' in os.environ:
            security_token = os.environ['AWS_SESSION_TOKEN']
        elif 'EC2_SECURITY_TOKEN' in os.environ:
            security_token = os.environ['EC2_SECURITY_TOKEN']
        else:
            # in case security_token came in as empty string
            security_token = None

    if HAS_BOTO3 and boto3:
        boto_params = dict(aws_access_key_id=access_key,
                           aws_secret_access_key=secret_key,
                           aws_session_token=security_token)
        boto_params['verify'] = validate_certs

        if profile_name:
            boto_params['profile_name'] = profile_name

    else:
        boto_params = dict(aws_access_key_id=access_key,
                           aws_secret_access_key=secret_key,
                           security_token=security_token)

        # only set profile_name if passed as an argument
        if profile_name:
            boto_params['profile_name'] = profile_name

        boto_params['validate_certs'] = validate_certs

    for param, value in boto_params.items():
        if isinstance(value, binary_type):
            boto_params[param] = text_type(value, 'utf-8', 'strict')

    return region, ec2_url, boto_params
예제 #37
0
    def check_node(self):
        """Gather diagnostic information on a node and perform connectivity
        checks on pods and services."""
        node_name = self.get_var('openshift', 'node', 'nodename', default=None)
        if not node_name:
            self.register_failure('Could not determine node name.')
            return

        # The "openvswitch" container uses the host netnamespace, but the host
        # file system may not have the ovs-appctl and ovs-ofctl binaries, which
        # we use for some diagnostics.  Thus we run these binaries inside the
        # container, and to that end, we need to determine its container id.
        exec_in_ovs_container = self.get_container_exec_command('openvswitch',
                                                                'openshift-sdn')

        if self.want_full_results:
            try:
                service_prefix = self.get_var('openshift_service_type')
                if self._templar is not None:
                    service_prefix = self._templar.template(service_prefix)
                self.save_service_logs('%s-node' % service_prefix)

                if self.get_var('openshift_use_crio', default=False):
                    self.save_command_output('crio-unit-file',
                                             ['/bin/systemctl',
                                              'cat', 'crio.service'])
                    self.save_command_output('crio-ps', ['/bin/crictl', 'ps'])

                if not self.get_var('openshift_use_crio_only', default=False):
                    self.save_command_output('docker-unit-file',
                                             ['/bin/systemctl',
                                              'cat', 'docker.service'])
                    self.save_command_output('docker-ps', ['/bin/docker', 'ps'])

                self.save_command_output('flows', exec_in_ovs_container +
                                         ['/bin/ovs-ofctl', '-O', 'OpenFlow13',
                                          'dump-flows', 'br0'])
                self.save_command_output('ovs-show', exec_in_ovs_container +
                                         ['/bin/ovs-ofctl', '-O', 'OpenFlow13',
                                          'show', 'br0'])

                self.save_command_output('tc-qdisc',
                                         ['/sbin/tc', 'qdisc', 'show'])
                self.save_command_output('tc-class',
                                         ['/sbin/tc', 'class', 'show'])
                self.save_command_output('tc-filter',
                                         ['/sbin/tc', 'filter', 'show'])
            except OpenShiftCheckException as exc:
                self.register_failure(exc)

        subnets = {hostsubnet['metadata']['name']: hostsubnet['subnet']
                   for hostsubnet in self.get_resource('hostsubnets')}

        subnet = subnets.get(node_name, None)
        if subnet is None:
            self.register_failure('Node %s has no hostsubnet.' % node_name)
            return
        subnet = six.text_type(subnet)
        address = ipaddress.ip_network(subnet)[1]

        for remote_node in self.get_resource('nodes'):
            remote_node_name = remote_node['metadata']['name']
            if remote_node_name == node_name:
                continue

            remote_subnet = subnets.get(remote_node_name, None)
            if remote_subnet is None:
                continue
            remote_subnet = six.text_type(remote_subnet)
            remote_address = ipaddress.ip_network(remote_subnet)[1]

            self.save_command_output(
                'trace_node_%s_to_node_%s' % (node_name, remote_node_name),
                exec_in_ovs_container +
                ['/bin/ovs-appctl', 'ofproto/trace', 'br0',
                 'in_port=2,reg0=0,ip,nw_src=%s,nw_dst=%s' %
                 (address, remote_address)])

            try:
                self.save_command_output('ping_node_%s_to_node_%s' %
                                         (node_name, remote_node_name),
                                         ['/bin/ping', '-c', '1', '-W', '2',
                                          str(remote_address)])
            except OpenShiftCheckException as exc:
                self.register_failure('Node %s cannot ping node %s.' %
                                      (node_name, remote_node_name))
예제 #38
0
파일: fish.py 프로젝트: ernstp/ansible
 def env_prefix(self, **kwargs):
     env = self.env.copy()
     env.update(kwargs)
     return ' '.join(['set -lx %s %s;' % (k, shlex_quote(text_type(v))) for k, v in env.items()])
예제 #39
0
파일: __init__.py 프로젝트: ernstp/ansible
    def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=True, wrap_async=False):
        '''
        Transfer and run a module along with its arguments.
        '''
        if task_vars is None:
            task_vars = dict()

        remote_module_path = None
        args_file_path = None
        remote_files = []

        # if a module name was not specified for this execution, use the action from the task
        if module_name is None:
            module_name = self._task.action
        if module_args is None:
            module_args = self._task.args

        self._update_module_args(module_name, module_args, task_vars)

        # FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality
        (module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
        display.vvv("Using module file %s" % module_path)
        if not shebang and module_style != 'binary':
            raise AnsibleError("module (%s) is missing interpreter line" % module_name)

        if not self._is_pipelining_enabled(module_style, wrap_async):

            # we might need remote tmp dir
            if not tmp or 'tmp' not in tmp:
                tmp = self._make_tmp_path()

            remote_module_filename = self._connection._shell.get_remote_filename(module_path)
            remote_module_path = self._connection._shell.join_path(tmp, remote_module_filename)

        if module_style in ('old', 'non_native_want_json', 'binary'):
            # we'll also need a temp file to hold our module arguments
            args_file_path = self._connection._shell.join_path(tmp, 'args')

        if remote_module_path or module_style != 'new':
            display.debug("transferring module to remote %s" % remote_module_path)
            if module_style == 'binary':
                self._transfer_file(module_path, remote_module_path)
            else:
                self._transfer_data(remote_module_path, module_data)
            if module_style == 'old':
                # we need to dump the module args to a k=v string in a file on
                # the remote system, which can be read and parsed by the module
                args_data = ""
                for k, v in iteritems(module_args):
                    args_data += '%s=%s ' % (k, shlex_quote(text_type(v)))
                self._transfer_data(args_file_path, args_data)
            elif module_style in ('non_native_want_json', 'binary'):
                self._transfer_data(args_file_path, json.dumps(module_args))
            display.debug("done transferring module to remote")

        environment_string = self._compute_environment_string()

        if tmp and remote_module_path:
            remote_files = [tmp, remote_module_path]

        if args_file_path:
            remote_files.append(args_file_path)

        sudoable = True
        in_data = None
        cmd = ""

        if wrap_async:
            # configure, upload, and chmod the async_wrapper module
            (async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(module_name='async_wrapper', module_args=dict(),
                                                                                                         task_vars=task_vars)
            async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path)
            remote_async_module_path = self._connection._shell.join_path(tmp, async_module_remote_filename)
            self._transfer_data(remote_async_module_path, async_module_data)
            remote_files.append(remote_async_module_path)

            async_limit = self._task.async
예제 #40
0
    def _execute_module(self, module_name=None, module_args=None, tmp=None, task_vars=None, persist_files=False, delete_remote_tmp=None, wrap_async=False):
        '''
        Transfer and run a module along with its arguments.
        '''
        if tmp is not None:
            display.warning('_execute_module no longer honors the tmp parameter. Action plugins'
                            ' should set self._connection._shell.tmpdir to share the tmpdir')
        del tmp  # No longer used
        if delete_remote_tmp is not None:
            display.warning('_execute_module no longer honors the delete_remote_tmp parameter.'
                            ' Action plugins should check self._connection._shell.tmpdir to'
                            ' see if a tmpdir existed before they were called to determine'
                            ' if they are responsible for removing it.')
        del delete_remote_tmp  # No longer used

        if task_vars is None:
            task_vars = dict()

        # if a module name was not specified for this execution, use the action from the task
        if module_name is None:
            module_name = self._task.action
        if module_args is None:
            module_args = self._task.args

        self._update_module_args(module_name, module_args, task_vars)

        # FUTURE: refactor this along with module build process to better encapsulate "smart wrapper" functionality
        (module_style, shebang, module_data, module_path) = self._configure_module(module_name=module_name, module_args=module_args, task_vars=task_vars)
        display.vvv("Using module file %s" % module_path)
        if not shebang and module_style != 'binary':
            raise AnsibleError("module (%s) is missing interpreter line" % module_name)

        tmpdir = self._connection._shell.tmpdir
        remote_module_path = None

        if not self._is_pipelining_enabled(module_style, wrap_async):
            # we might need remote tmp dir
            if tmpdir is None:
                self._make_tmp_path()
                tmpdir = self._connection._shell.tmpdir

            remote_module_filename = self._connection._shell.get_remote_filename(module_path)
            remote_module_path = self._connection._shell.join_path(tmpdir, remote_module_filename)

        args_file_path = None
        if module_style in ('old', 'non_native_want_json', 'binary'):
            # we'll also need a tmp file to hold our module arguments
            args_file_path = self._connection._shell.join_path(tmpdir, 'args')

        if remote_module_path or module_style != 'new':
            display.debug("transferring module to remote %s" % remote_module_path)
            if module_style == 'binary':
                self._transfer_file(module_path, remote_module_path)
            else:
                self._transfer_data(remote_module_path, module_data)
            if module_style == 'old':
                # we need to dump the module args to a k=v string in a file on
                # the remote system, which can be read and parsed by the module
                args_data = ""
                for k, v in iteritems(module_args):
                    args_data += '%s=%s ' % (k, shlex_quote(text_type(v)))
                self._transfer_data(args_file_path, args_data)
            elif module_style in ('non_native_want_json', 'binary'):
                self._transfer_data(args_file_path, json.dumps(module_args))
            display.debug("done transferring module to remote")

        environment_string = self._compute_environment_string()

        remote_files = []
        if tmpdir and remote_module_path:
            remote_files = [tmpdir, remote_module_path]

        if args_file_path:
            remote_files.append(args_file_path)

        sudoable = True
        in_data = None
        cmd = ""

        if wrap_async and not self._connection.always_pipeline_modules:
            # configure, upload, and chmod the async_wrapper module
            (async_module_style, shebang, async_module_data, async_module_path) = self._configure_module(module_name='async_wrapper', module_args=dict(),
                                                                                                         task_vars=task_vars)
            async_module_remote_filename = self._connection._shell.get_remote_filename(async_module_path)
            remote_async_module_path = self._connection._shell.join_path(tmpdir, async_module_remote_filename)
            self._transfer_data(remote_async_module_path, async_module_data)
            remote_files.append(remote_async_module_path)

            async_limit = self._task.async_val
            async_jid = str(random.randint(0, 999999999999))

            # call the interpreter for async_wrapper directly
            # this permits use of a script for an interpreter on non-Linux platforms
            # TODO: re-implement async_wrapper as a regular module to avoid this special case
            interpreter = shebang.replace('#!', '').strip()
            async_cmd = [interpreter, remote_async_module_path, async_jid, async_limit, remote_module_path]

            if environment_string:
                async_cmd.insert(0, environment_string)

            if args_file_path:
                async_cmd.append(args_file_path)
            else:
                # maintain a fixed number of positional parameters for async_wrapper
                async_cmd.append('_')

            if not self._should_remove_tmp_path(tmpdir):
                async_cmd.append("-preserve_tmp")

            cmd = " ".join(to_text(x) for x in async_cmd)

        else:

            if self._is_pipelining_enabled(module_style):
                in_data = module_data
            else:
                cmd = remote_module_path

            cmd = self._connection._shell.build_module_command(environment_string, shebang, cmd, arg_path=args_file_path).strip()

        # Fix permissions of the tmpdir path and tmpdir files. This should be called after all
        # files have been transferred.
        if remote_files:
            # remove none/empty
            remote_files = [x for x in remote_files if x]
            self._fixup_perms2(remote_files, self._play_context.remote_user)

        # actually execute
        res = self._low_level_execute_command(cmd, sudoable=sudoable, in_data=in_data)

        # parse the main result
        data = self._parse_returned_data(res)

        # NOTE: INTERNAL KEYS ONLY ACCESSIBLE HERE
        # get internal info before cleaning
        if data.pop("_ansible_suppress_tmpdir_delete", False):
            self._cleanup_remote_tmp = False

        # remove internal keys
        remove_internal_keys(data)

        if wrap_async:
            # async_wrapper will clean up its tmpdir on its own so we want the controller side to
            # forget about it now
            self._connection._shell.tmpdir = None

            # FIXME: for backwards compat, figure out if still makes sense
            data['changed'] = True

        # pre-split stdout/stderr into lines if needed
        if 'stdout' in data and 'stdout_lines' not in data:
            # if the value is 'False', a default won't catch it.
            txt = data.get('stdout', None) or u''
            data['stdout_lines'] = txt.splitlines()
        if 'stderr' in data and 'stderr_lines' not in data:
            # if the value is 'False', a default won't catch it.
            txt = data.get('stderr', None) or u''
            data['stderr_lines'] = txt.splitlines()

        display.debug("done with _execute_module (%s, %s)" % (module_name, module_args))
        return data
예제 #41
0
파일: ec2.py 프로젝트: awiddersheim/ansible
def get_aws_connection_info(module, boto3=False):

    # Check module args for credentials, then check environment vars
    # access_key

    ec2_url = module.params.get('ec2_url')
    access_key = module.params.get('aws_access_key')
    secret_key = module.params.get('aws_secret_key')
    security_token = module.params.get('security_token')
    region = module.params.get('region')
    profile_name = module.params.get('profile')
    validate_certs = module.params.get('validate_certs')

    if not ec2_url:
        if 'AWS_URL' in os.environ:
            ec2_url = os.environ['AWS_URL']
        elif 'EC2_URL' in os.environ:
            ec2_url = os.environ['EC2_URL']

    if not access_key:
        if os.environ.get('AWS_ACCESS_KEY_ID'):
            access_key = os.environ['AWS_ACCESS_KEY_ID']
        elif os.environ.get('AWS_ACCESS_KEY'):
            access_key = os.environ['AWS_ACCESS_KEY']
        elif os.environ.get('EC2_ACCESS_KEY'):
            access_key = os.environ['EC2_ACCESS_KEY']
        elif HAS_BOTO and boto.config.get('Credentials', 'aws_access_key_id'):
            access_key = boto.config.get('Credentials', 'aws_access_key_id')
        elif HAS_BOTO and boto.config.get('default', 'aws_access_key_id'):
            access_key = boto.config.get('default', 'aws_access_key_id')
        else:
            # in case access_key came in as empty string
            access_key = None

    if not secret_key:
        if os.environ.get('AWS_SECRET_ACCESS_KEY'):
            secret_key = os.environ['AWS_SECRET_ACCESS_KEY']
        elif os.environ.get('AWS_SECRET_KEY'):
            secret_key = os.environ['AWS_SECRET_KEY']
        elif os.environ.get('EC2_SECRET_KEY'):
            secret_key = os.environ['EC2_SECRET_KEY']
        elif HAS_BOTO and boto.config.get('Credentials', 'aws_secret_access_key'):
            secret_key = boto.config.get('Credentials', 'aws_secret_access_key')
        elif HAS_BOTO and boto.config.get('default', 'aws_secret_access_key'):
            secret_key = boto.config.get('default', 'aws_secret_access_key')
        else:
            # in case secret_key came in as empty string
            secret_key = None

    if not region:
        if 'AWS_REGION' in os.environ:
            region = os.environ['AWS_REGION']
        elif 'AWS_DEFAULT_REGION' in os.environ:
            region = os.environ['AWS_DEFAULT_REGION']
        elif 'EC2_REGION' in os.environ:
            region = os.environ['EC2_REGION']
        else:
            if not boto3:
                if HAS_BOTO:
                    # boto.config.get returns None if config not found
                    region = boto.config.get('Boto', 'aws_region')
                    if not region:
                        region = boto.config.get('Boto', 'ec2_region')
                else:
                    module.fail_json(msg="boto is required for this module. Please install boto and try again")
            elif HAS_BOTO3:
                # here we don't need to make an additional call, will default to 'us-east-1' if the below evaluates to None.
                try:
                    region = botocore.session.Session(profile=profile_name).get_config_variable('region')
                except botocore.exceptions.ProfileNotFound as e:
                    pass
            else:
                module.fail_json(msg="Boto3 is required for this module. Please install boto3 and try again")

    if not security_token:
        if os.environ.get('AWS_SECURITY_TOKEN'):
            security_token = os.environ['AWS_SECURITY_TOKEN']
        elif os.environ.get('AWS_SESSION_TOKEN'):
            security_token = os.environ['AWS_SESSION_TOKEN']
        elif os.environ.get('EC2_SECURITY_TOKEN'):
            security_token = os.environ['EC2_SECURITY_TOKEN']
        elif HAS_BOTO and boto.config.get('Credentials', 'aws_security_token'):
            security_token = boto.config.get('Credentials', 'aws_security_token')
        elif HAS_BOTO and boto.config.get('default', 'aws_security_token'):
            security_token = boto.config.get('default', 'aws_security_token')
        else:
            # in case secret_token came in as empty string
            security_token = None

    if HAS_BOTO3 and boto3:
        boto_params = dict(aws_access_key_id=access_key,
                           aws_secret_access_key=secret_key,
                           aws_session_token=security_token)
        boto_params['verify'] = validate_certs

        if profile_name:
            boto_params['profile_name'] = profile_name

    else:
        boto_params = dict(aws_access_key_id=access_key,
                           aws_secret_access_key=secret_key,
                           security_token=security_token)

        # only set profile_name if passed as an argument
        if profile_name:
            boto_params['profile_name'] = profile_name

        boto_params['validate_certs'] = validate_certs

    for param, value in boto_params.items():
        if isinstance(value, binary_type):
            boto_params[param] = text_type(value, 'utf-8', 'strict')

    return region, ec2_url, boto_params
예제 #42
0
 def env_prefix(**args):
     return ' '.join(['%s=%s' % (k, shlex_quote(text_type(v))) for k, v in args.items()])
예제 #43
0
    def template(self, variable, convert_bare=False, preserve_trailing_newlines=True, escape_backslashes=True, fail_on_undefined=None, overrides=None,
                 convert_data=True, static_vars=None, cache=True, bare_deprecated=True, disable_lookups=False):
        '''
        Templates (possibly recursively) any given data as input. If convert_bare is
        set to True, the given data will be wrapped as a jinja2 variable ('{{foo}}')
        before being sent through the template engine.
        '''
        static_vars = [''] if static_vars is None else static_vars

        # Don't template unsafe variables, just return them.
        if hasattr(variable, '__UNSAFE__'):
            return variable

        if fail_on_undefined is None:
            fail_on_undefined = self._fail_on_undefined_errors

        try:
            if convert_bare:
                variable = self._convert_bare_variable(variable, bare_deprecated=bare_deprecated)

            if isinstance(variable, string_types):
                result = variable

                if self._contains_vars(variable):
                    # Check to see if the string we are trying to render is just referencing a single
                    # var.  In this case we don't want to accidentally change the type of the variable
                    # to a string by using the jinja template renderer. We just want to pass it.
                    only_one = self.SINGLE_VAR.match(variable)
                    if only_one:
                        var_name = only_one.group(1)
                        if var_name in self._available_variables:
                            resolved_val = self._available_variables[var_name]
                            if isinstance(resolved_val, NON_TEMPLATED_TYPES):
                                return resolved_val
                            elif resolved_val is None:
                                return C.DEFAULT_NULL_REPRESENTATION

                    # Using a cache in order to prevent template calls with already templated variables
                    sha1_hash = None
                    if cache:
                        variable_hash = sha1(text_type(variable).encode('utf-8'))
                        options_hash = sha1(
                            (
                                text_type(preserve_trailing_newlines) +
                                text_type(escape_backslashes) +
                                text_type(fail_on_undefined) +
                                text_type(overrides)
                            ).encode('utf-8')
                        )
                        sha1_hash = variable_hash.hexdigest() + options_hash.hexdigest()
                    if cache and sha1_hash in self._cached_result:
                        result = self._cached_result[sha1_hash]
                    else:
                        result = self.do_template(
                            variable,
                            preserve_trailing_newlines=preserve_trailing_newlines,
                            escape_backslashes=escape_backslashes,
                            fail_on_undefined=fail_on_undefined,
                            overrides=overrides,
                            disable_lookups=disable_lookups,
                        )

                        unsafe = hasattr(result, '__UNSAFE__')
                        if convert_data and not self._no_type_regex.match(variable):
                            # if this looks like a dictionary or list, convert it to such using the safe_eval method
                            if (result.startswith("{") and not result.startswith(self.environment.variable_start_string)) or \
                                    result.startswith("[") or result in ("True", "False"):
                                eval_results = safe_eval(result, locals=self._available_variables, include_exceptions=True)
                                if eval_results[1] is None:
                                    result = eval_results[0]
                                    if unsafe:
                                        result = wrap_var(result)
                                else:
                                    # FIXME: if the safe_eval raised an error, should we do something with it?
                                    pass

                        # we only cache in the case where we have a single variable
                        # name, to make sure we're not putting things which may otherwise
                        # be dynamic in the cache (filters, lookups, etc.)
                        if cache:
                            self._cached_result[sha1_hash] = result

                return result

            elif isinstance(variable, (list, tuple)):
                return [self.template(
                    v,
                    preserve_trailing_newlines=preserve_trailing_newlines,
                    fail_on_undefined=fail_on_undefined,
                    overrides=overrides,
                    disable_lookups=disable_lookups,
                ) for v in variable]
            elif isinstance(variable, (dict, Mapping)):
                d = {}
                # we don't use iteritems() here to avoid problems if the underlying dict
                # changes sizes due to the templating, which can happen with hostvars
                for k in variable.keys():
                    if k not in static_vars:
                        d[k] = self.template(
                            variable[k],
                            preserve_trailing_newlines=preserve_trailing_newlines,
                            fail_on_undefined=fail_on_undefined,
                            overrides=overrides,
                            disable_lookups=disable_lookups,
                        )
                    else:
                        d[k] = variable[k]
                return d
            else:
                return variable

        except AnsibleFilterError:
            if self._fail_on_filter_errors:
                raise
            else:
                return variable