Ejemplo n.º 1
0
    def __init__(self, module):
        self.module = module

        self.api_key = self.module.params.get('api_key')
        self.api_secret = self.module.params.get('api_secret')
        if not (self.api_key and self.api_secret):
            try:
                region = self.module.params.get('api_region')
                config = self.read_config(ini_group=region)
                self.api_key = config['key']
                self.api_secret = config['secret']
            except Exception as e:
                self.module.fail_json(msg="Error while processing config: %s" %
                                      to_native(e))

        self.headers = {
            'X-DNS-Token': "%s:%s" % (self.api_key, self.api_secret),
            'Content-Type': 'application/json',
            'Accept': 'application/json',
        }
        self.result = {
            'changed': False,
            'diff': {
                'before': {},
                'after': {},
            }
        }
Ejemplo n.º 2
0
def check_required_arguments(argument_spec, module_parameters):
    """Check all paramaters in argument_spec and return a list of parameters
    that are required but not present in module_parameters

    Raises TypeError if the check fails

    :arg argument_spec: Argument spec dicitionary containing all parameters
        and their specification
    :arg module_paramaters: Dictionary of module parameters

    :returns: Empty list or raises TypeError if the check fails.
    """

    missing = []
    if argument_spec is None:
        return missing

    for (k, v) in argument_spec.items():
        required = v.get('required', False)
        if required and k not in module_parameters:
            missing.append(k)

    if missing:
        msg = "missing required arguments: %s" % ", ".join(missing)
        raise TypeError(to_native(msg))

    return missing
Ejemplo n.º 3
0
def parse_cli_textfsm(value, template):
    if not HAS_TEXTFSM:
        raise AnsibleError(
            'parse_cli_textfsm filter requires TextFSM library to be installed'
        )

    if not isinstance(value, string_types):
        raise AnsibleError(
            "parse_cli_textfsm input should be a string, but was given a input of %s"
            % (type(value)))

    if not os.path.exists(template):
        raise AnsibleError('unable to locate parse_cli_textfsm template: %s' %
                           template)

    try:
        template = open(template)
    except IOError as exc:
        raise AnsibleError(to_native(exc))

    re_table = textfsm.TextFSM(template)
    fsm_results = re_table.ParseText(value)

    results = list()
    for item in fsm_results:
        results.append(dict(zip(re_table.header, item)))

    return results
Ejemplo n.º 4
0
    def get_service_inspect(self, service_id, skip_missing=False):
        """
        Returns Swarm service info as in 'docker service inspect' command about single service

        :param service_id: service ID or name
        :param skip_missing: if True then function will return None instead of failing the task
        :return:
            Single service information structure
        """
        try:
            service_info = self.inspect_service(service_id)
        except NotFound as exc:
            if skip_missing is False:
                self.fail("Error while reading from Swarm manager: %s" %
                          to_native(exc))
            else:
                return None
        except APIError as exc:
            if exc.status_code == 503:
                self.fail(
                    "Cannot inspect service: To inspect service execute module on Swarm Manager"
                )
            self.fail("Error inspecting swarm service: %s" % exc)
        except Exception as exc:
            self.fail("Error inspecting swarm service: %s" % exc)

        json_str = json.dumps(service_info, ensure_ascii=False)
        service_info = json.loads(json_str)
        return service_info
Ejemplo n.º 5
0
def do_load_resource(module, collection, name):
    """
    Create a new object (collection.item) by loading a datastructure directly
    """

    try:
        item = find_collection_item(collection, name, '')
    except Exception:
        module.fail_json(
            msg="An error occurred while running 'find_collection_item'")

    if item.exists:
        module.exit_json(changed=False,
                         name=item.name,
                         id=item.id,
                         value=item.value)

    # If not in check mode, apply the changes
    if not module.check_mode:
        try:
            item.datum = module.params['content']
            item.write()
        except Exception as e:
            module.fail_json(msg="Unable to write item content : %r" %
                             to_native(e))

    module.exit_json(changed=True,
                     name=item.name,
                     id=item.id,
                     value=item.value)
Ejemplo n.º 6
0
def check_required_together(terms, module_parameters):
    """Check each list of terms to ensure every parameter in each list exists
    in the given module parameters

    Accepts a list of lists or tuples

    :arg terms: List of lists of terms to check. Each list should include
        parameters that are all required when at least one is specified
        in the module_parameters.
    :arg module_parameters: Dictionary of module parameters

    :returns: Empty list or raises TypeError if the check fails.
    """

    results = []
    if terms is None:
        return results

    for term in terms:
        counts = [count_terms(field, module_parameters) for field in term]
        non_zero = [c for c in counts if c > 0]
        if len(non_zero) > 0:
            if 0 in counts:
                results.append(term)
    if results:
        for term in results:
            msg = "parameters are required together: %s" % ', '.join(term)
            raise TypeError(to_native(msg))

    return results
Ejemplo n.º 7
0
def check_missing_parameters(module_parameters, required_parameters=None):
    """This is for checking for required params when we can not check via
    argspec because we need more information than is simply given in the argspec.

    Raises TypeError if any required parameters are missing

    :arg module_paramaters: Dictionary of module parameters
    :arg required_parameters: List of parameters to look for in the given module
        parameters

    :returns: Empty list or raises TypeError if the check fails.
    """
    missing_params = []
    if required_parameters is None:
        return missing_params

    for param in required_parameters:
        if not module_parameters.get(param):
            missing_params.append(param)

    if missing_params:
        msg = "missing required arguments: %s" % ', '.join(missing_params)
        raise TypeError(to_native(msg))

    return missing_params
Ejemplo n.º 8
0
    def run(self, terms, variables=None, **kwargs):

        if variables is None:
            raise AnsibleError('No variables available to search')

        # no options, yet
        # self.set_options(direct=kwargs)

        ret = []
        variable_names = list(variables.keys())
        for term in terms:

            if not isinstance(term, string_types):
                raise AnsibleError(
                    'Invalid setting identifier, "%s" is not a string, its a %s'
                    % (term, type(term)))

            try:
                name = re.compile(term)
            except Exception as e:
                raise AnsibleError(
                    'Unable to use "%s" as a search parameter: %s' %
                    (term, to_native(e)))

            for varname in variable_names:
                if name.search(varname):
                    ret.append(varname)

        return ret
Ejemplo n.º 9
0
    def __init__(self, *args, **kwargs):
        connect_on_load = kwargs.pop('connect_on_load', True)

        argument_spec = NET_TRANSPORT_ARGS.copy()
        argument_spec['transport']['choices'] = NET_CONNECTIONS.keys()
        argument_spec.update(NET_CONNECTION_ARGS.copy())

        if kwargs.get('argument_spec'):
            argument_spec.update(kwargs['argument_spec'])
        kwargs['argument_spec'] = argument_spec

        super(NetworkModule, self).__init__(*args, **kwargs)

        self.connection = None
        self._cli = None
        self._config = None

        try:
            transport = self.params['transport'] or '__default__'
            cls = NET_CONNECTIONS[transport]
            self.connection = cls()
        except KeyError:
            self.fail_json(
                msg='Unknown transport or no default transport specified')
        except (TypeError, NetworkError) as exc:
            self.fail_json(msg=to_native(exc),
                           exception=traceback.format_exc())

        if connect_on_load:
            self.connect()
Ejemplo n.º 10
0
    def parse(self, inventory, loader, host_list, cache=True):
        ''' parses the inventory file '''

        super(InventoryModule, self).parse(inventory, loader, host_list)

        try:
            for h in host_list.split(','):
                h = h.strip()
                if h:
                    try:
                        (hostnames, port) = self._expand_hostpattern(h)
                    except AnsibleError as e:
                        self.display.vvv(
                            "Unable to parse address from hostname, leaving unchanged: %s"
                            % to_text(e))
                        host = [h]
                        port = None

                    for host in hostnames:
                        if host not in self.inventory.hosts:
                            self.inventory.add_host(host,
                                                    group='ungrouped',
                                                    port=port)
        except Exception as e:
            raise AnsibleParserError(
                "Invalid data from string, could not parse: %s" % to_native(e))
Ejemplo n.º 11
0
def _fetch_information(token, url):
    results = []
    paginated_url = url
    while True:
        try:
            response = open_url(paginated_url,
                                headers={
                                    'X-Auth-Token': token,
                                    'Content-type': 'application/json'
                                })
        except Exception as e:
            raise AnsibleError("Error while fetching %s: %s" %
                               (url, to_native(e)))
        try:
            raw_json = json.loads(response.read())
        except ValueError:
            raise AnsibleError("Incorrect JSON payload")

        try:
            results.extend(raw_json["servers"])
        except KeyError:
            raise AnsibleError(
                "Incorrect format from the Scaleway API response")

        link = response.headers['Link']
        if not link:
            return results
        relations = parse_pagination_link(link)
        if 'next' not in relations:
            return results
        paginated_url = urllib_parse.urljoin(paginated_url, relations['next'])
Ejemplo n.º 12
0
    def run(self, terms, variables, **kwargs):

        self.set_options(var_options=variables, direct=kwargs)
        boto_credentials = self._get_credentials()

        region = self.get_option('region')
        client = _boto3_conn(region, boto_credentials)

        secrets = []
        for term in terms:
            params = {}
            params['SecretId'] = term
            if kwargs.get('version_id'):
                params['VersionId'] = kwargs.get('version_id')
            if kwargs.get('version_stage'):
                params['VersionStage'] = kwargs.get('version_stage')

            try:
                response = client.get_secret_value(**params)
                if 'SecretBinary' in response:
                    secrets.append(response['SecretBinary'])
                if 'SecretString' in response:
                    secrets.append(response['SecretString'])
            except (botocore.exceptions.ClientError,
                    botocore.exceptions.BotoCoreError) as e:
                raise AnsibleError("Failed to retrieve secret: %s" %
                                   to_native(e))

        if kwargs.get('join'):
            joined_secret = []
            joined_secret.append(''.join(secrets))
            return joined_secret
        else:
            return secrets
Ejemplo n.º 13
0
 def put_file(self, in_path, out_path):
     """ Place a local file located in 'in_path' inside container at 'out_path' """
     super(Connection, self).put_file(in_path, out_path)
     display.vvv("PUT %s TO %s" % (in_path, out_path),
                 host=self._container_id)
     if not self._mount_point:
         rc, stdout, stderr = self._podman(
             "cp", [in_path, self._container_id + ":" + out_path],
             use_container_id=False)
         if rc != 0:
             if 'cannot copy into running rootless container with pause set' in to_native(
                     stderr):
                 rc, stdout, stderr = self._podman("cp", [
                     "--pause=false", in_path,
                     self._container_id + ":" + out_path
                 ],
                                                   use_container_id=False)
                 if rc != 0:
                     raise AnsibleError(
                         "Failed to copy file from %s to %s in container %s\n%s"
                         % (in_path, out_path, self._container_id, stderr))
     else:
         real_out_path = self._mount_point + to_bytes(
             out_path, errors='surrogate_or_strict')
         shutil.copyfile(
             to_bytes(in_path, errors='surrogate_or_strict'),
             to_bytes(real_out_path, errors='surrogate_or_strict'))
Ejemplo n.º 14
0
def ansible_dict_to_boto3_tag_list(tags_dict,
                                   tag_name_key_name='Key',
                                   tag_value_key_name='Value'):
    """ Convert a flat dict of key:value pairs representing AWS resource tags to a boto3 list of dicts
    Args:
        tags_dict (dict): Dict representing AWS resource tags.
        tag_name_key_name (str): Value to use as the key for all tag keys (useful because boto3 doesn't always use "Key")
        tag_value_key_name (str): Value to use as the key for all tag values (useful because boto3 doesn't always use "Value")
    Basic Usage:
        >>> tags_dict = {'MyTagKey': 'MyTagValue'}
        >>> ansible_dict_to_boto3_tag_list(tags_dict)
        {
            'MyTagKey': 'MyTagValue'
        }
    Returns:
        List: List of dicts containing tag keys and values
        [
            {
                'Key': 'MyTagKey',
                'Value': 'MyTagValue'
            }
        ]
    """

    tags_list = []
    for k, v in tags_dict.items():
        tags_list.append({
            tag_name_key_name: k,
            tag_value_key_name: to_native(v)
        })

    return tags_list
Ejemplo n.º 15
0
    def exec_command(self, cmd, in_data=None, sudoable=True):
        """Execute command."""
        super(Connection, self).exec_command(cmd,
                                             in_data=in_data,
                                             sudoable=sudoable)

        stdout = self.create_temporary_file_in_guest(suffix=".stdout")
        stderr = self.create_temporary_file_in_guest(suffix=".stderr")

        guest_program_spec = self._get_guest_program_spec(cmd, stdout, stderr)

        try:
            pid = self.processManager.StartProgramInGuest(
                vm=self.vm, auth=self.vm_auth, spec=guest_program_spec)
        except vim.fault.NoPermission as e:
            raise AnsibleError("No Permission Error: %s %s" %
                               (to_native(e.msg), to_native(e.privilegeId)))
        except vim.fault.FileNotFound as e:
            raise AnsibleError("StartProgramInGuest Error: %s" %
                               to_native(e.msg))
        except vmodl.fault.SystemError as e:
            if e.reason == 'vix error codes = (3016, 0).\n':
                raise AnsibleConnectionFailure(
                    "Connection failed, is the vm currently rebooting? Reason: %s"
                    % (to_native(e.reason)))
            else:
                raise AnsibleConnectionFailure("Connection failed. Reason %s" %
                                               (to_native(e.reason)))
        except vim.fault.GuestOperationsUnavailable:
            raise AnsibleConnectionFailure(
                "Cannot connect to guest. Native error: GuestOperationsUnavailable"
            )

        pid_info = self._get_pid_info(pid)

        while pid_info.endTime is None:
            sleep(self.get_option("exec_command_sleep_interval"))
            pid_info = self._get_pid_info(pid)

        stdout_response = self._fetch_file_from_vm(stdout)
        self.delete_file_in_guest(stdout)

        stderr_response = self._fetch_file_from_vm(stderr)
        self.delete_file_in_guest(stderr)

        return pid_info.exitCode, stdout_response.text, stderr_response.text
Ejemplo n.º 16
0
 def disconnect(self):
     try:
         if self.connected:
             self.connection.disconnect()
         self.log('disconnected from %s' % self.params['host'])
     except NetworkError as exc:
         self.fail_json(msg=to_native(exc),
                        exception=traceback.format_exc())
Ejemplo n.º 17
0
 def playbook_on_stats(self, stats):
     if self.aws_resource_actions:
         self.aws_resource_actions = sorted(
             list(
                 to_native(action)
                 for action in set(self.aws_resource_actions)))
         self._display.display("AWS ACTIONS: {0}".format(
             self.aws_resource_actions))
Ejemplo n.º 18
0
    def process_playbook_values(self):
        ''' Get playbook values and perform input validation '''
        argument_spec = dict(
            vrf=dict(type='str', default='management'),
            connect_ssh_port=dict(type='int', default=22),
            file_system=dict(type='str', default='bootflash:'),
            file_pull=dict(type='bool', default=False),
            file_pull_timeout=dict(type='int', default=300),
            file_pull_compact=dict(type='bool', default=False),
            file_pull_kstack=dict(type='bool', default=False),
            local_file=dict(type='path'),
            local_file_directory=dict(type='path'),
            remote_file=dict(type='path'),
            remote_scp_server=dict(type='str'),
            remote_scp_server_user=dict(type='str'),
            remote_scp_server_password=dict(no_log=True),
        )

        playvals = {}
        # Process key value pairs from playbook task
        for key in argument_spec.keys():
            playvals[key] = self._task.args.get(key, argument_spec[key].get('default'))
            if playvals[key] is None:
                continue

            option_type = argument_spec[key].get('type', 'str')
            try:
                if option_type == 'str':
                    playvals[key] = validation.check_type_str(playvals[key])
                elif option_type == 'int':
                    playvals[key] = validation.check_type_int(playvals[key])
                elif option_type == 'bool':
                    playvals[key] = validation.check_type_bool(playvals[key])
                elif option_type == 'path':
                    playvals[key] = validation.check_type_path(playvals[key])
                else:
                    raise AnsibleError('Unrecognized type <{0}> for playbook parameter <{1}>'.format(option_type, key))

            except (TypeError, ValueError) as e:
                raise AnsibleError("argument %s is of type %s and we were unable to convert to %s: %s"
                                   % (key, type(playvals[key]), option_type, to_native(e)))

        # Validate playbook dependencies
        if playvals['file_pull']:
            if playvals.get('remote_file') is None:
                raise AnsibleError('Playbook parameter <remote_file> required when <file_pull> is True')
            if playvals.get('remote_scp_server') is None:
                raise AnsibleError('Playbook parameter <remote_scp_server> required when <file_pull> is True')

        if playvals['remote_scp_server'] or \
           playvals['remote_scp_server_user']:

            if None in (playvals['remote_scp_server'],
                        playvals['remote_scp_server_user']):
                params = '<remote_scp_server>, <remote_scp_server_user>'
                raise AnsibleError('Playbook parameters {0} must be set together'.format(params))

        return playvals
Ejemplo n.º 19
0
    def _connect(self):
        if not HAS_PYPSRP:
            raise AnsibleError("pypsrp or dependencies are not installed: %s"
                               % to_native(PYPSRP_IMP_ERR))
        super(Connection, self)._connect()
        self._build_kwargs()
        display.vvv("ESTABLISH PSRP CONNECTION FOR USER: %s ON PORT %s TO %s" %
                    (self._psrp_user, self._psrp_port, self._psrp_host),
                    host=self._psrp_host)

        if not self.runspace:
            connection = WSMan(**self._psrp_conn_kwargs)

            # create our psuedo host to capture the exit code and host output
            host_ui = PSHostUserInterface()
            self.host = PSHost(None, None, False, "Ansible PSRP Host", None,
                               host_ui, None)

            self.runspace = RunspacePool(
                connection, host=self.host,
                configuration_name=self._psrp_configuration_name
            )
            display.vvvvv(
                "PSRP OPEN RUNSPACE: auth=%s configuration=%s endpoint=%s" %
                (self._psrp_auth, self._psrp_configuration_name,
                 connection.transport.endpoint), host=self._psrp_host
            )
            try:
                self.runspace.open()
            except AuthenticationError as e:
                raise AnsibleConnectionFailure("failed to authenticate with "
                                               "the server: %s" % to_native(e))
            except WinRMError as e:
                raise AnsibleConnectionFailure(
                    "psrp connection failure during runspace open: %s"
                    % to_native(e)
                )
            except (ConnectionError, ConnectTimeout) as e:
                raise AnsibleConnectionFailure(
                    "Failed to connect to the host via PSRP: %s"
                    % to_native(e)
                )

            self._connected = True
        return self
Ejemplo n.º 20
0
def inversepower(x, base=2):
    try:
        if base == 2:
            return math.sqrt(x)
        else:
            return math.pow(x, 1.0 / float(base))
    except (ValueError, TypeError) as e:
        raise AnsibleFilterError('root() can only be used on numbers: %s' %
                                 to_native(e))
Ejemplo n.º 21
0
def logarithm(x, base=math.e):
    try:
        if base == 10:
            return math.log10(x)
        else:
            return math.log(x, base)
    except TypeError as e:
        raise AnsibleFilterError('log() can only be used on numbers: %s' %
                                 to_native(e))
Ejemplo n.º 22
0
    def parse(self, inventory, loader, path, cache=True):

        try:
            self._vbox_path = get_bin_path(self.VBOX, True)
        except ValueError as e:
            raise AnsibleParserError(e)

        super(InventoryModule, self).parse(inventory, loader, path)

        cache_key = self.get_cache_key(path)

        config_data = self._read_config_data(path)

        # set _options from config data
        self._consume_options(config_data)

        source_data = None
        if cache:
            cache = self.get_option('cache')

        update_cache = False
        if cache:
            try:
                source_data = self._cache[cache_key]
            except KeyError:
                update_cache = True

        if not source_data:
            b_pwfile = to_bytes(self.get_option('settings_password_file'),
                                errors='surrogate_or_strict',
                                nonstring='passthru')
            running = self.get_option('running_only')

            # start getting data
            cmd = [self._vbox_path, b'list', b'-l']
            if running:
                cmd.append(b'runningvms')
            else:
                cmd.append(b'vms')

            if b_pwfile and os.path.exists(b_pwfile):
                cmd.append(b'--settingspwfile')
                cmd.append(b_pwfile)

            try:
                p = Popen(cmd, stdout=PIPE)
            except Exception as e:
                raise AnsibleParserError(to_native(e))

            source_data = p.stdout.read().splitlines()

        using_current_cache = cache and not update_cache
        cacheable_results = self._populate_from_source(source_data,
                                                       using_current_cache)

        if update_cache:
            self._cache[cache_key] = cacheable_results
Ejemplo n.º 23
0
def removed_module(
    removed_in,
    msg='This module has been removed. The module documentation for'
    ' Ansible-%(version)s may contain hints for porting'):
    """
    Returns module failure along with a message about the module being removed

    :arg removed_in: The version that the module was removed in
    :kwarg msg: Message to use in the module's failure message. The default says that the module
        has been removed and what version of the Ansible documentation to search for porting help.

    Remove the actual code and instead have boilerplate like this::

        from ansible.module_utils.common.removed import removed_module

        if __name__ == '__main__':
            removed_module("2.4")
    """
    results = {'failed': True}

    # Convert numbers into strings
    removed_in = to_native(removed_in)

    version = removed_in.split('.')
    try:
        numeric_minor = int(version[-1])
    except Exception:
        last_version = None
    else:
        version = version[:-1]
        version.append(to_native(numeric_minor - 1))
        last_version = '.'.join(version)

    if last_version is None:
        results['warnings'] = [
            'removed modules should specify the version they were removed in'
        ]
        results['msg'] = 'This module has been removed'
    else:
        results['msg'] = msg % {'version': last_version}

    print('\n{0}\n'.format(json.dumps(results)))
    sys.exit(1)
Ejemplo n.º 24
0
 def _get_connection(self, credentials, region='us-east-1'):
     try:
         connection = boto3.session.Session(
             profile_name=self.boto_profile).client('ec2', region,
                                                    **credentials)
     except (botocore.exceptions.ProfileNotFound,
             botocore.exceptions.PartialCredentialsError) as e:
         if self.boto_profile:
             try:
                 connection = boto3.session.Session(
                     profile_name=self.boto_profile).client('ec2', region)
             except (botocore.exceptions.ProfileNotFound,
                     botocore.exceptions.PartialCredentialsError) as e:
                 raise AnsibleError("Insufficient credentials found: %s" %
                                    to_native(e))
         else:
             raise AnsibleError("Insufficient credentials found: %s" %
                                to_native(e))
     return connection
Ejemplo n.º 25
0
 def wrapper(f, *args, **kwargs):
     try:
         results = f(*args, **kwargs)
         if 'DBInstances' in results:
             results = results['DBInstances']
         else:
             results = results['DBClusters']
         _add_tags_for_hosts(connection, results, strict)
     except is_boto3_error_code('AccessDenied') as e:  # pylint: disable=duplicate-except
         if not strict:
             results = []
         else:
             raise AnsibleError("Failed to query RDS: {0}".format(
                 to_native(e)))
     except (botocore.exceptions.BotoCoreError,
             botocore.exceptions.ClientError) as e:  # pylint: disable=duplicate-except
         raise AnsibleError("Failed to query RDS: {0}".format(
             to_native(e)))
     return results
Ejemplo n.º 26
0
    def _populate(self):
        daemon_env = self.get_option('daemon_env')
        try:
            for self.node in self._get_machine_names():
                self.node_attrs = self._inspect_docker_machine_host(self.node)
                if not self.node_attrs:
                    continue

                machine_name = self.node_attrs['Driver']['MachineName']

                # query `docker-machine env` to obtain remote Docker daemon connection settings in the form of commands
                # that could be used to set environment variables to influence a local Docker client:
                if daemon_env == 'skip':
                    env_var_tuples = []
                else:
                    env_var_tuples = self._get_docker_daemon_variables(machine_name)
                    if self._should_skip_host(machine_name, env_var_tuples, daemon_env):
                        continue

                # add an entry in the inventory for this host
                self.inventory.add_host(machine_name)

                # set standard Ansible remote host connection settings to details captured from `docker-machine`
                # see: https://docs.ansible.com/ansible/latest/user_guide/intro_inventory.html
                self.inventory.set_variable(machine_name, 'ansible_host', self.node_attrs['Driver']['IPAddress'])
                self.inventory.set_variable(machine_name, 'ansible_port', self.node_attrs['Driver']['SSHPort'])
                self.inventory.set_variable(machine_name, 'ansible_user', self.node_attrs['Driver']['SSHUser'])
                self.inventory.set_variable(machine_name, 'ansible_ssh_private_key_file', self.node_attrs['Driver']['SSHKeyPath'])

                # set variables based on Docker Machine tags
                tags = self.node_attrs['Driver'].get('Tags') or ''
                self.inventory.set_variable(machine_name, 'dm_tags', tags)

                # set variables based on Docker Machine env variables
                for kv in env_var_tuples:
                    self.inventory.set_variable(machine_name, 'dm_{0}'.format(kv[0]), kv[1])

                if self.get_option('verbose_output'):
                    self.inventory.set_variable(machine_name, 'docker_machine_node_attributes', self.node_attrs)

                # Use constructed if applicable
                strict = self.get_option('strict')

                # Composed variables
                self._set_composite_vars(self.get_option('compose'), self.node_attrs, machine_name, strict=strict)

                # Complex groups based on jinja2 conditionals, hosts that meet the conditional are added to group
                self._add_host_to_composed_groups(self.get_option('groups'), self.node_attrs, machine_name, strict=strict)

                # Create groups based on variable values and add the corresponding hosts to it
                self._add_host_to_keyed_groups(self.get_option('keyed_groups'), self.node_attrs, machine_name, strict=strict)

        except Exception as e:
            raise AnsibleError('Unable to fetch hosts from Docker Machine, this was the original exception: %s' %
                               to_native(e), orig_exc=e)
Ejemplo n.º 27
0
 def create_temporary_file_in_guest(self, prefix="", suffix=""):
     """Create a temporary file in the VM."""
     try:
         return self.fileManager.CreateTemporaryFileInGuest(
             vm=self.vm, auth=self.vm_auth, prefix=prefix, suffix=suffix)
     except vim.fault.NoPermission as e:
         raise AnsibleError("No Permission Error: %s %s" %
                            (to_native(e.msg), to_native(e.privilegeId)))
     except vmodl.fault.SystemError as e:
         if e.reason == 'vix error codes = (3016, 0).\n':
             raise AnsibleConnectionFailure(
                 "Connection failed, is the vm currently rebooting? Reason: %s"
                 % (to_native(e.reason)))
         else:
             raise AnsibleConnectionFailure("Connection failed. Reason %s" %
                                            (to_native(e.reason)))
     except vim.fault.GuestOperationsUnavailable:
         raise AnsibleConnectionFailure(
             "Cannot connect to guest. Native error: GuestOperationsUnavailable"
         )
Ejemplo n.º 28
0
    def run(self, terms, inject=None, **kwargs):

        ret = terms
        if terms:
            try:
                ret = [random.choice(terms)]
            except Exception as e:
                raise AnsibleError("Unable to choose random term: %s" %
                                   to_native(e))

        return ret
Ejemplo n.º 29
0
    def grafana_list_dashboards(self):
        # define http headers
        headers = self.grafana_headers()

        dashboard_list = []
        try:
            if self.search:
                r = open_url('%s/api/search?query=%s' % (self.grafana_url, self.search), headers=headers, method='GET')
            else:
                r = open_url('%s/api/search/' % self.grafana_url, headers=headers, method='GET')
        except HTTPError as e:
            raise GrafanaAPIException('Unable to search dashboards : %s' % to_native(e))
        if r.getcode() == 200:
            try:
                dashboard_list = json.loads(r.read())
            except Exception as e:
                raise GrafanaAPIException('Unable to parse json list %s' % to_native(e))
        else:
            raise GrafanaAPIException('Unable to list grafana dashboards : %s' % str(r.getcode()))

        return dashboard_list
Ejemplo n.º 30
0
    def parse(self, lines, comment_tokens=None):
        toplevel = re.compile(r'\S')
        childline = re.compile(r'^\s*(.+)$')
        entry_reg = re.compile(r'([{};])')

        ancestors = list()
        config = list()

        indents = [0]

        for linenum, line in enumerate(to_native(lines, errors='surrogate_or_strict').split('\n')):
            text = entry_reg.sub('', line).strip()

            cfg = ConfigLine(line)

            if not text or ignore_line(text, comment_tokens):
                continue

            # handle top level commands
            if toplevel.match(line):
                ancestors = [cfg]
                indents = [0]

            # handle sub level commands
            else:
                match = childline.match(line)
                line_indent = match.start(1)

                if line_indent < indents[-1]:
                    while indents[-1] > line_indent:
                        indents.pop()

                if line_indent > indents[-1]:
                    indents.append(line_indent)

                curlevel = len(indents) - 1
                parent_level = curlevel - 1

                cfg._parents = ancestors[:curlevel]

                if curlevel > len(ancestors):
                    config.append(cfg)
                    continue

                for i in range(curlevel, len(ancestors)):
                    ancestors.pop()

                ancestors.append(cfg)
                ancestors[parent_level].add_child(cfg)

            config.append(cfg)

        return config