def check_lib(self): if not HAS_REQUESTS: self.module.fail_json(msg=missing_required_lib('requests'), exception=REQUESTS_IMP_ERR) if not HAS_INFLUXDB: self.module.fail_json(msg=missing_required_lib('influxdb'), exception=INFLUXDB_IMP_ERR)
def __init__(self, *args, **kwargs): kwargs['argument_spec'] = self.argspec AnsibleModule.__init__(self, *args, **kwargs) if not HAS_K8S_MODULE_HELPER: self.fail_json(msg=missing_required_lib('openshift'), exception=K8S_IMP_ERR, error=to_native(k8s_import_exception)) self.openshift_version = openshift.__version__ if not HAS_YAML: self.fail_json(msg=missing_required_lib("PyYAML"), exception=YAML_IMP_ERR)
def __init__(self, module, represent): self.module = module self.represent = represent self.result = {"changed": False, self.represent: None} if not HAS_HCLOUD: module.fail_json(msg=missing_required_lib("hcloud-python")) self._build_client()
def ECSClient(entrust_api_user=None, entrust_api_key=None, entrust_api_cert=None, entrust_api_cert_key=None, entrust_api_specification_path=None): """Create an ECS client""" if not YAML_FOUND: raise SessionConfigurationException(missing_required_lib("PyYAML"), exception=YAML_IMP_ERR) if entrust_api_specification_path is None: entrust_api_specification_path = "https://cloud.entrust.net/EntrustCloud/documentation/cms-api-2.1.0.yaml" # Not functionally necessary with current uses of this module_util, but better to be explicit for future use cases entrust_api_user = to_text(entrust_api_user) entrust_api_key = to_text(entrust_api_key) entrust_api_cert_key = to_text(entrust_api_cert_key) entrust_api_specification_path = to_text(entrust_api_specification_path) return ECSSession( "ecs", entrust_api_user=entrust_api_user, entrust_api_key=entrust_api_key, entrust_api_cert=entrust_api_cert, entrust_api_cert_key=entrust_api_cert_key, entrust_api_specification_path=entrust_api_specification_path, ).client()
def _connect(self, check_vm_credentials=True): if not HAS_REQUESTS: raise AnsibleError( "%s : %s" % (missing_required_lib('requests'), REQUESTS_IMP_ERR)) if not HAS_PYVMOMI: raise AnsibleError( "%s : %s" % (missing_required_lib('PyVmomi'), PYVMOMI_IMP_ERR)) super(Connection, self)._connect() if not self.connected: self._establish_connection() self._establish_vm(check_vm_credentials=check_vm_credentials) self._connected = True
def set_crypto_backend(module): ''' Sets which crypto backend to use (default: auto detection). Does not care whether a new enough cryptoraphy is available or not. Must be called before any real stuff is done which might evaluate ``HAS_CURRENT_CRYPTOGRAPHY``. ''' global HAS_CURRENT_CRYPTOGRAPHY # Choose backend backend = module.params['select_crypto_backend'] if backend == 'auto': pass elif backend == 'openssl': HAS_CURRENT_CRYPTOGRAPHY = False elif backend == 'cryptography': try: cryptography.__version__ except Exception as dummy: module.fail_json(msg=missing_required_lib('cryptography')) HAS_CURRENT_CRYPTOGRAPHY = True else: module.fail_json(msg='Unknown crypto backend "{0}"!'.format(backend)) # Inform about choices if HAS_CURRENT_CRYPTOGRAPHY: module.debug('Using cryptography backend (library version {0})'.format(CRYPTOGRAPHY_VERSION)) return 'cryptography' else: module.debug('Using OpenSSL binary backend') return 'openssl'
def __init__(self, *args, **kwargs): if not HAS_BOTO_3: raise AnsibleError('{0}: {1}'.format(missing_required_lib("boto3"), HAS_BOTO_3_ERROR)) super(Connection, self).__init__(*args, **kwargs) self.host = self._play_context.remote_addr if getattr(self._shell, "SHELL_FAMILY", '') == 'powershell': self.delegate = None self.has_native_async = True self.always_pipeline_modules = True self.module_implementation_preferences = ('.ps1', '.exe', '') self.protocol = None self.shell_id = None self._shell_type = 'powershell' self.is_windows = True
def __init__(self, module): """ Create a new DimensionDataModule. Will fail if Apache libcloud is not present. :param module: The underlying Ansible module. :type module: AnsibleModule """ self.module = module if not HAS_LIBCLOUD: self.module.fail_json(msg=missing_required_lib('libcloud'), exception=LIBCLOUD_IMP_ERR) # Credentials are common to all Dimension Data modules. credentials = self.get_credentials() self.user_id = credentials['user_id'] self.key = credentials['key'] # Region and location are common to all Dimension Data modules. region = self.module.params['region'] self.region = 'dd-{0}'.format(region) self.location = self.module.params['location'] libcloud.security.VERIFY_SSL_CERT = self.module.params[ 'validate_certs'] self.driver = get_driver(Provider.DIMENSIONDATA)(self.user_id, self.key, region=self.region) # Determine the MCP API version (this depends on the target datacenter). self.mcp_version = self.get_mcp_version(self.location) # Optional "wait-for-completion" arguments if 'wait' in self.module.params: self.wait = self.module.params['wait'] self.wait_time = self.module.params['wait_time'] self.wait_poll_interval = self.module.params['wait_poll_interval'] else: self.wait = False self.wait_time = 0 self.wait_poll_interval = 0
def gitlabAuthentication(module): gitlab_url = module.params['api_url'] validate_certs = module.params['validate_certs'] gitlab_user = module.params['api_username'] gitlab_password = module.params['api_password'] gitlab_token = module.params['api_token'] if not HAS_GITLAB_PACKAGE: module.fail_json(msg=missing_required_lib("python-gitlab"), exception=GITLAB_IMP_ERR) try: # python-gitlab library remove support for username/password authentication since 1.13.0 # Changelog : https://github.com/python-gitlab/python-gitlab/releases/tag/v1.13.0 # This condition allow to still support older version of the python-gitlab library if StrictVersion(gitlab.__version__) < StrictVersion("1.13.0"): gitlab_instance = gitlab.Gitlab(url=gitlab_url, ssl_verify=validate_certs, email=gitlab_user, password=gitlab_password, private_token=gitlab_token, api_version=4) else: gitlab_instance = gitlab.Gitlab(url=gitlab_url, ssl_verify=validate_certs, private_token=gitlab_token, api_version=4) gitlab_instance.auth() except (gitlab.exceptions.GitlabAuthenticationError, gitlab.exceptions.GitlabGetError) as e: module.fail_json(msg="Failed to connect to GitLab server: %s" % to_native(e)) except (gitlab.exceptions.GitlabHttpError) as e: module.fail_json(msg="Failed to connect to GitLab server: %s. \ GitLab remove Session API now that private tokens are removed from user API endpoints since version 10.2." % to_native(e)) return gitlab_instance
def __init__(self, argument_spec, **kwargs): args = dict( tower_host=dict(), tower_username=dict(), tower_password=dict(no_log=True), validate_certs=dict(type='bool', aliases=['tower_verify_ssl']), tower_config_file=dict(type='path'), ) args.update(argument_spec) mutually_exclusive = kwargs.get('mutually_exclusive', []) kwargs['mutually_exclusive'] = mutually_exclusive.extend(( ('tower_config_file', 'tower_host'), ('tower_config_file', 'tower_username'), ('tower_config_file', 'tower_password'), ('tower_config_file', 'validate_certs'), )) super(TowerModule, self).__init__(argument_spec=args, **kwargs) if not HAS_TOWER_CLI: self.fail_json(msg=missing_required_lib('ansible-tower-cli'), exception=TOWER_CLI_IMP_ERR)
def __init__(self, argument_spec=None, supports_check_mode=False, mutually_exclusive=None, required_together=None, required_if=None, min_docker_version=MIN_DOCKER_VERSION, min_docker_api_version=None, option_minimal_versions=None, option_minimal_versions_ignore_params=None, fail_results=None): # Modules can put information in here which will always be returned # in case client.fail() is called. self.fail_results = fail_results or {} merged_arg_spec = dict() merged_arg_spec.update(DOCKER_COMMON_ARGS) if argument_spec: merged_arg_spec.update(argument_spec) self.arg_spec = merged_arg_spec mutually_exclusive_params = [] mutually_exclusive_params += DOCKER_MUTUALLY_EXCLUSIVE if mutually_exclusive: mutually_exclusive_params += mutually_exclusive required_together_params = [] required_together_params += DOCKER_REQUIRED_TOGETHER if required_together: required_together_params += required_together self.module = AnsibleModule( argument_spec=merged_arg_spec, supports_check_mode=supports_check_mode, mutually_exclusive=mutually_exclusive_params, required_together=required_together_params, required_if=required_if) NEEDS_DOCKER_PY2 = (LooseVersion(min_docker_version) >= LooseVersion('2.0.0')) self.docker_py_version = LooseVersion(docker_version) if HAS_DOCKER_MODELS and HAS_DOCKER_SSLADAPTER: self.fail( "Cannot have both the docker-py and docker python modules (old and new version of Docker " "SDK for Python) installed together as they use the same namespace and cause a corrupt " "installation. Please uninstall both packages, and re-install only the docker-py or docker " "python module (for %s's Python %s). It is recommended to install the docker module if no " "support for Python 2.6 is required. Please note that simply uninstalling one of the modules " "can leave the other module in a broken state." % (platform.node(), sys.executable)) if not HAS_DOCKER_PY: if NEEDS_DOCKER_PY2: msg = missing_required_lib("Docker SDK for Python: docker") msg = msg + ", for example via `pip install docker`. The error was: %s" else: msg = missing_required_lib( "Docker SDK for Python: docker (Python >= 2.7) or docker-py (Python 2.6)" ) msg = msg + ", for example via `pip install docker` or `pip install docker-py` (Python 2.6). The error was: %s" self.fail(msg % HAS_DOCKER_ERROR) if self.docker_py_version < LooseVersion(min_docker_version): msg = "Error: Docker SDK for Python version is %s (%s's Python %s). Minimum version required is %s." if not NEEDS_DOCKER_PY2: # The minimal required version is < 2.0 (and the current version as well). # Advertise docker (instead of docker-py) for non-Python-2.6 users. msg += DOCKERPYUPGRADE_RECOMMEND_DOCKER elif docker_version < LooseVersion('2.0'): msg += DOCKERPYUPGRADE_SWITCH_TO_DOCKER else: msg += DOCKERPYUPGRADE_UPGRADE_DOCKER self.fail(msg % (docker_version, platform.node(), sys.executable, min_docker_version)) self.debug = self.module.params.get('debug') self.check_mode = self.module.check_mode self._connect_params = get_connect_params(self.auth_params, fail_function=self.fail) try: super(AnsibleDockerClient, self).__init__(**self._connect_params) self.docker_api_version_str = self.version()['ApiVersion'] except APIError as exc: self.fail("Docker API error: %s" % exc) except Exception as exc: self.fail("Error connecting: %s" % exc) self.docker_api_version = LooseVersion(self.docker_api_version_str) if min_docker_api_version is not None: if self.docker_api_version < LooseVersion(min_docker_api_version): self.fail( 'Docker API version is %s. Minimum version required is %s.' % (self.docker_api_version_str, min_docker_api_version)) if option_minimal_versions is not None: self._get_minimal_versions(option_minimal_versions, option_minimal_versions_ignore_params)
def is_pyxcli_installed(module): if not PYXCLI_INSTALLED: module.fail_json(msg=missing_required_lib('pyxcli'), exception=PYXCLI_IMP_ERR)
def run(self, terms, variables=None, **kwargs): if not HAS_LDAP: msg = missing_required_lib("python-ldap", url="https://pypi.org/project/python-ldap/") msg += ". Import Error: %s" % LDAP_IMP_ERR raise AnsibleLookupError(msg) # Load the variables and direct args into the lookup options self.set_options(var_options=variables, direct=kwargs) domain = self.get_option('domain') port = self.get_option('port') scheme = self.get_option('scheme') start_tls = self.get_option('start_tls') validate_certs = self.get_option('validate_certs') cacert_file = self.get_option('ca_cert') search_base = self.get_option('search_base') username = self.get_option('username') password = self.get_option('password') auth = self.get_option('auth') allow_plaintext = self.get_option('allow_plaintext') # Validate and set input values # https://www.openldap.org/lists/openldap-software/200202/msg00456.html validate_certs_map = { 'never': ldap.OPT_X_TLS_NEVER, 'allow': ldap.OPT_X_TLS_ALLOW, 'try': ldap.OPT_X_TLS_TRY, 'demand': ldap.OPT_X_TLS_DEMAND, # Same as OPT_X_TLS_HARD } validate_certs_value = validate_certs_map.get(validate_certs, None) if validate_certs_value is None: valid_keys = list(validate_certs_map.keys()) valid_keys.sort() raise AnsibleLookupError("Invalid validate_certs value '%s': valid values are '%s'" % (validate_certs, "', '".join(valid_keys))) if auth not in ['gssapi', 'simple']: raise AnsibleLookupError("Invalid auth value '%s': expecting either 'gssapi', or 'simple'" % auth) elif auth == 'gssapi': if not ldap.SASL_AVAIL: raise AnsibleLookupError("Cannot use auth=gssapi when SASL is not configured with the local LDAP " "install") if username or password: raise AnsibleLookupError("Explicit credentials are not supported when auth='gssapi'. Call kinit " "outside of Ansible") elif auth == 'simple' and not (username and password): raise AnsibleLookupError("The username and password values are required when auth=simple") if ldapurl.isLDAPUrl(domain): ldap_url = ldapurl.LDAPUrl(ldapUrl=domain) else: port = port if port else 389 if scheme == 'ldap' else 636 ldap_url = ldapurl.LDAPUrl(hostport="%s:%d" % (domain, port), urlscheme=scheme) # We have encryption if using LDAPS, or StartTLS is used, or we auth with SASL/GSSAPI encrypted = ldap_url.urlscheme == 'ldaps' or start_tls or auth == 'gssapi' if not encrypted and not allow_plaintext: raise AnsibleLookupError("Current configuration will result in plaintext traffic exposing credentials. " "Set auth=gssapi, scheme=ldaps, start_tls=True, or allow_plaintext=True to " "continue") if ldap_url.urlscheme == 'ldaps' or start_tls: # We cannot use conn.set_option as OPT_X_TLS_NEWCTX (required to use the new context) is not supported on # older distros like EL7. Setting it on the ldap object works instead if not ldap.TLS_AVAIL: raise AnsibleLookupError("Cannot use TLS as the local LDAP installed has not been configured to support it") ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, validate_certs_value) if cacert_file: cacert_path = os.path.expanduser(os.path.expandvars(cacert_file)) if not os.path.exists(to_bytes(cacert_path)): raise AnsibleLookupError("The cacert_file specified '%s' does not exist" % to_native(cacert_path)) try: # While this is a path, python-ldap expects a str/unicode and not bytes ldap.set_option(ldap.OPT_X_TLS_CACERTFILE, to_text(cacert_path)) except ValueError: # https://keathmilligan.net/python-ldap-and-macos/ raise AnsibleLookupError("Failed to set path to cacert file, this is a known issue with older " "OpenLDAP libraries on the host. Update OpenLDAP and reinstall " "python-ldap to continue") conn_url = ldap_url.initializeUrl() conn = ldap.initialize(conn_url, bytes_mode=False) conn.set_option(ldap.OPT_PROTOCOL_VERSION, 3) conn.set_option(ldap.OPT_REFERRALS, 0) # Allow us to search from the base # Make sure we run StartTLS before doing the bind to protect the credentials if start_tls: try: conn.start_tls_s() except ldap.LDAPError as err: raise AnsibleLookupError("Failed to send StartTLS to LDAP host '%s': %s" % (conn_url, to_native(err))) if auth == 'simple': try: conn.bind_s(to_text(username), to_text(password)) except ldap.LDAPError as err: raise AnsibleLookupError("Failed to simple bind against LDAP host '%s': %s" % (conn_url, to_native(err))) else: try: conn.sasl_gssapi_bind_s() except ldap.AUTH_UNKNOWN as err: # The SASL GSSAPI binding is not installed, e.g. cyrus-sasl-gssapi. Give a better error message than # what python-ldap provides raise AnsibleLookupError("Failed to do a sasl bind against LDAP host '%s', the GSSAPI mech is not " "installed: %s" % (conn_url, to_native(err))) except ldap.LDAPError as err: raise AnsibleLookupError("Failed to do a sasl bind against LDAP host '%s': %s" % (conn_url, to_native(err))) try: if not search_base: root_dse = conn.read_rootdse_s() search_base = root_dse['defaultNamingContext'][0] ret = [] # TODO: change method to search for all servers in 1 request instead of multiple requests for server in terms: ret.append(get_laps_password(conn, server, search_base)) finally: conn.unbind_s() return ret
def _validate(self): if not HAS_THIRD_LIBRARIES: self.module.fail_json( msg=missing_required_lib('keystoneauth1'), exception=THIRD_LIBRARIES_IMP_ERR)
def check_required_library(self): if not HAS_REQUESTS: self.module.fail_json(msg=missing_required_lib('requests'))
def __init__(self, k8s_kind=None, *args, **kwargs): self.client = None self.warnings = [] mutually_exclusive = [ ('resource_definition', 'src'), ('merge_type', 'apply'), ] KubernetesAnsibleModule.__init__(self, *args, mutually_exclusive=mutually_exclusive, supports_check_mode=True, **kwargs) self.kind = k8s_kind or self.params.get('kind') self.api_version = self.params.get('api_version') self.name = self.params.get('name') self.namespace = self.params.get('namespace') resource_definition = self.params.get('resource_definition') validate = self.params.get('validate') if validate: if LooseVersion(self.openshift_version) < LooseVersion("0.8.0"): self.fail_json( msg="openshift >= 0.8.0 is required for validate") self.append_hash = self.params.get('append_hash') if self.append_hash: if not HAS_K8S_CONFIG_HASH: self.fail_json(msg=missing_required_lib( "openshift >= 0.7.2", reason="for append_hash"), exception=K8S_CONFIG_HASH_IMP_ERR) if self.params['merge_type']: if LooseVersion(self.openshift_version) < LooseVersion("0.6.2"): self.fail_json(msg=missing_required_lib( "openshift >= 0.6.2", reason="for merge_type")) self.apply = self.params.get('apply', False) if self.apply: if not HAS_K8S_APPLY: self.fail_json(msg=missing_required_lib("openshift >= 0.9.2", reason="for apply")) if resource_definition: if isinstance(resource_definition, string_types): try: self.resource_definitions = yaml.safe_load_all( resource_definition) except (IOError, yaml.YAMLError) as exc: self.fail(msg="Error loading resource_definition: {0}". format(exc)) elif isinstance(resource_definition, list): self.resource_definitions = resource_definition else: self.resource_definitions = [resource_definition] src = self.params.get('src') if src: self.resource_definitions = self.load_resource_definitions(src) try: self.resource_definitions = [ item for item in self.resource_definitions if item ] except AttributeError: pass if not resource_definition and not src: implicit_definition = dict(kind=self.kind, apiVersion=self.api_version, metadata=dict(name=self.name)) if self.namespace: implicit_definition['metadata']['namespace'] = self.namespace self.resource_definitions = [implicit_definition]
def get_aws_connection_info(module, boto3=False): # Check module args for credentials, then check environment vars # access_key ec2_url = module.params.get('ec2_url') access_key = module.params.get('aws_access_key') secret_key = module.params.get('aws_secret_key') security_token = module.params.get('security_token') region = module.params.get('region') profile_name = module.params.get('profile') validate_certs = module.params.get('validate_certs') if not ec2_url: if 'AWS_URL' in os.environ: ec2_url = os.environ['AWS_URL'] elif 'EC2_URL' in os.environ: ec2_url = os.environ['EC2_URL'] if not access_key: if os.environ.get('AWS_ACCESS_KEY_ID'): access_key = os.environ['AWS_ACCESS_KEY_ID'] elif os.environ.get('AWS_ACCESS_KEY'): access_key = os.environ['AWS_ACCESS_KEY'] elif os.environ.get('EC2_ACCESS_KEY'): access_key = os.environ['EC2_ACCESS_KEY'] elif HAS_BOTO and boto.config.get('Credentials', 'aws_access_key_id'): access_key = boto.config.get('Credentials', 'aws_access_key_id') elif HAS_BOTO and boto.config.get('default', 'aws_access_key_id'): access_key = boto.config.get('default', 'aws_access_key_id') else: # in case access_key came in as empty string access_key = None if not secret_key: if os.environ.get('AWS_SECRET_ACCESS_KEY'): secret_key = os.environ['AWS_SECRET_ACCESS_KEY'] elif os.environ.get('AWS_SECRET_KEY'): secret_key = os.environ['AWS_SECRET_KEY'] elif os.environ.get('EC2_SECRET_KEY'): secret_key = os.environ['EC2_SECRET_KEY'] elif HAS_BOTO and boto.config.get('Credentials', 'aws_secret_access_key'): secret_key = boto.config.get('Credentials', 'aws_secret_access_key') elif HAS_BOTO and boto.config.get('default', 'aws_secret_access_key'): secret_key = boto.config.get('default', 'aws_secret_access_key') else: # in case secret_key came in as empty string secret_key = None if not region: if 'AWS_REGION' in os.environ: region = os.environ['AWS_REGION'] elif 'AWS_DEFAULT_REGION' in os.environ: region = os.environ['AWS_DEFAULT_REGION'] elif 'EC2_REGION' in os.environ: region = os.environ['EC2_REGION'] else: if not boto3: if HAS_BOTO: # boto.config.get returns None if config not found region = boto.config.get('Boto', 'aws_region') if not region: region = boto.config.get('Boto', 'ec2_region') else: module.fail_json(msg=missing_required_lib('boto'), exception=BOTO_IMP_ERR) elif HAS_BOTO3: # here we don't need to make an additional call, will default to 'us-east-1' if the below evaluates to None. try: region = botocore.session.Session( profile=profile_name).get_config_variable('region') except botocore.exceptions.ProfileNotFound as e: pass else: module.fail_json(msg=missing_required_lib('boto3'), exception=BOTO3_IMP_ERR) if not security_token: if os.environ.get('AWS_SECURITY_TOKEN'): security_token = os.environ['AWS_SECURITY_TOKEN'] elif os.environ.get('AWS_SESSION_TOKEN'): security_token = os.environ['AWS_SESSION_TOKEN'] elif os.environ.get('EC2_SECURITY_TOKEN'): security_token = os.environ['EC2_SECURITY_TOKEN'] elif HAS_BOTO and boto.config.get('Credentials', 'aws_security_token'): security_token = boto.config.get('Credentials', 'aws_security_token') elif HAS_BOTO and boto.config.get('default', 'aws_security_token'): security_token = boto.config.get('default', 'aws_security_token') else: # in case secret_token came in as empty string security_token = None if HAS_BOTO3 and boto3: boto_params = dict(aws_access_key_id=access_key, aws_secret_access_key=secret_key, aws_session_token=security_token) boto_params['verify'] = validate_certs if profile_name: boto_params = dict(aws_access_key_id=None, aws_secret_access_key=None, aws_session_token=None) boto_params['profile_name'] = profile_name else: boto_params = dict(aws_access_key_id=access_key, aws_secret_access_key=secret_key, security_token=security_token) # only set profile_name if passed as an argument if profile_name: boto_params['profile_name'] = profile_name boto_params['validate_certs'] = validate_certs for param, value in boto_params.items(): if isinstance(value, binary_type): boto_params[param] = text_type(value, 'utf-8', 'strict') return region, ec2_url, boto_params
def _connect(self): if not HAS_NCCLIENT: raise AnsibleError("%s: %s" % (missing_required_lib("ncclient"), to_native(NCCLIENT_IMP_ERR))) self.queue_message('log', 'ssh connection done, starting ncclient') allow_agent = True if self._play_context.password is not None: allow_agent = False setattr(self._play_context, 'allow_agent', allow_agent) self.key_filename = self._play_context.private_key_file or self.get_option( 'private_key_file') if self.key_filename: self.key_filename = str(os.path.expanduser(self.key_filename)) self._ssh_config = self.get_option('netconf_ssh_config') if self._ssh_config in BOOLEANS_TRUE: self._ssh_config = True elif self._ssh_config in BOOLEANS_FALSE: self._ssh_config = None # Try to guess the network_os if the network_os is set to auto if self._network_os == 'auto': for cls in netconf_loader.all(class_only=True): network_os = cls.guess_network_os(self) if network_os: self.queue_message('vvv', 'discovered network_os %s' % network_os) self._network_os = network_os # If we have tried to detect the network_os but were unable to i.e. network_os is still 'auto' # then use default as the network_os if self._network_os == 'auto': # Network os not discovered. Set it to default self.queue_message( 'vvv', 'Unable to discover network_os. Falling back to default.') self._network_os = 'default' try: ncclient_device_handler = self.netconf.get_option( 'ncclient_device_handler') except KeyError: ncclient_device_handler = 'default' self.queue_message( 'vvv', 'identified ncclient device handler: %s.' % ncclient_device_handler) device_params = {'name': ncclient_device_handler} try: port = self._play_context.port or 830 self.queue_message( 'vvv', "ESTABLISH NETCONF SSH CONNECTION FOR USER: %s on PORT %s TO %s WITH SSH_CONFIG = %s" % (self._play_context.remote_user, port, self._play_context.remote_addr, self._ssh_config)) self._manager = manager.connect( host=self._play_context.remote_addr, port=port, username=self._play_context.remote_user, password=self._play_context.password, key_filename=self.key_filename, hostkey_verify=self.get_option('host_key_checking'), look_for_keys=self.get_option('look_for_keys'), device_params=device_params, allow_agent=self._play_context.allow_agent, timeout=self.get_option('persistent_connect_timeout'), ssh_config=self._ssh_config) self._manager._timeout = self.get_option( 'persistent_command_timeout') except SSHUnknownHostError as exc: raise AnsibleConnectionFailure(to_native(exc)) except ImportError: raise AnsibleError( "connection=netconf is not supported on {0}".format( self._network_os)) if not self._manager.connected: return 1, b'', b'not connected' self.queue_message('log', 'ncclient manager object created successfully') self._connected = True super(Connection, self)._connect() return 0, to_bytes(self._manager.session_id, errors='surrogate_or_strict'), b''
def parse(self, inventory, loader, path, cache=True): if not HAS_GOOGLE_LIBRARIES: raise AnsibleParserError( "gce inventory plugin cannot start: %s" % missing_required_lib("google-auth") ) super(InventoryModule, self).parse(inventory, loader, path) config_data = {} config_data = self._read_config_data(path) if self.get_option("use_contrib_script_compatible_sanitization"): self._sanitize_group_name = ( self._legacy_script_compatible_group_sanitization ) # setup parameters as expected by 'fake module class' to reuse module_utils w/o changing the API params = { "filters": self.get_option("filters"), "projects": self.get_option("projects"), "scopes": self.get_option("scopes"), "zones": self.get_option("zones"), "auth_kind": self.get_option("auth_kind"), "service_account_file": self.get_option("service_account_file"), "service_account_contents": self.get_option("service_account_contents"), "service_account_email": self.get_option("service_account_email"), } self.fake_module = GcpMockModule(params) self.auth_session = GcpSession(self.fake_module, "compute") query = self._get_query_options(params["filters"]) if self.get_option("retrieve_image_info"): project_disks = self._get_project_disks(config_data, query) else: project_disks = None # Cache logic if cache: cache = self.get_option("cache") cache_key = self.get_cache_key(path) else: cache_key = None cache_needs_update = False if cache: try: results = self._cache[cache_key] for project in results: for zone in results[project]: self._add_hosts( results[project][zone], config_data, False, project_disks=project_disks, ) except KeyError: cache_needs_update = True if not cache or cache_needs_update: cached_data = {} for project in params["projects"]: cached_data[project] = {} params["project"] = project zones = params["zones"] # Fetch all instances link = self._instances % project resp = self.fetch_list(params, link, query) for key, value in resp.items(): zone = key[6:] if not zones or zone in zones: self._add_hosts(value, config_data, project_disks=project_disks) cached_data[project][zone] = value if cache_needs_update: self._cache[cache_key] = cached_data
def check_client(module): if not HAS_CLIENT: module.fail_json(msg=missing_required_lib('manageiq-client'), exception=CLIENT_IMP_ERR)
def check_lib(self): if not HAS_HEROKU: self.module.fail_json(msg=missing_required_lib('heroku3'), exception=HEROKU_IMP_ERR)
def __init__(self, module): if not HAS_LIB_CS: module.fail_json(msg=missing_required_lib('cs'), exception=CS_IMP_ERR) self.result = { 'changed': False, 'diff': { 'before': dict(), 'after': dict() } } # Common returns, will be merged with self.returns # search_for_key: replace_with_key self.common_returns = { 'id': 'id', 'name': 'name', 'created': 'created', 'zonename': 'zone', 'state': 'state', 'project': 'project', 'account': 'account', 'domain': 'domain', 'displaytext': 'display_text', 'displayname': 'display_name', 'description': 'description', } # Init returns dict for use in subclasses self.returns = {} # these values will be casted to int self.returns_to_int = {} # these keys will be compared case sensitive in self.has_changed() self.case_sensitive_keys = [ 'id', 'displaytext', 'displayname', 'description', ] self.module = module self._cs = None # Helper for VPCs self._vpc_networks_ids = None self.domain = None self.account = None self.project = None self.ip_address = None self.network = None self.physical_network = None self.vpc = None self.zone = None self.vm = None self.vm_default_nic = None self.os_type = None self.hypervisor = None self.capabilities = None self.network_acl = None