def check_compatibility(module, client): """Check the compatibility between the driver and the database. See: https://docs.mongodb.com/ecosystem/drivers/driver-compatibility-reference/#python-driver-compatibility Args: module: Ansible module. client (cursor): Mongodb cursor on admin database. """ loose_srv_version = LooseVersion(client.server_info()['version']) loose_driver_version = LooseVersion(PyMongoVersion) if loose_srv_version >= LooseVersion( '3.2') and loose_driver_version < LooseVersion('3.2'): module.fail_json( msg=' (Note: you must use pymongo 3.2+ with MongoDB >= 3.2)') elif loose_srv_version >= LooseVersion( '3.0') and loose_driver_version <= LooseVersion('2.8'): module.fail_json( msg=' (Note: you must use pymongo 2.8+ with MongoDB 3.0)') elif loose_srv_version >= LooseVersion( '2.6') and loose_driver_version <= LooseVersion('2.7'): module.fail_json( msg=' (Note: you must use pymongo 2.7+ with MongoDB 2.6)') elif LooseVersion(PyMongoVersion) <= LooseVersion('2.5'): module.fail_json( msg= ' (Note: you must be on mongodb 2.4+ and pymongo 2.5+ to use the roles param)' )
def get_all(self): """Return the complete context as a dict including the exported variables. For optimizations reasons this might not return an actual copy so be careful with using it. This is to prevent from running ``AnsibleJ2Vars`` through dict(): ``dict(self.parent, **self.vars)`` In Ansible this means that ALL variables would be templated in the process of re-creating the parent because ``AnsibleJ2Vars`` templates each variable in its ``__getitem__`` method. Instead we re-create the parent via ``AnsibleJ2Vars.add_locals`` that creates a new ``AnsibleJ2Vars`` copy without templating each variable. This will prevent unnecessarily templating unused variables in cases like setting a local variable and passing it to {% include %} in a template. Also see ``AnsibleJ2Template``and https://github.com/pallets/jinja/commit/d67f0fd4cc2a4af08f51f4466150d49da7798729 """ if LooseVersion(j2_version) >= LooseVersion('2.9'): if not self.vars: return self.parent if not self.parent: return self.vars if isinstance(self.parent, AnsibleJ2Vars): return self.parent.add_locals(self.vars) else: # can this happen in Ansible? return dict(self.parent, **self.vars)
def ensure_required_libs(module): """Check required libraries.""" if not HAS_PSYCOPG2: module.fail_json(msg=missing_required_lib('psycopg2')) if module.params.get('ca_cert') and LooseVersion(psycopg2.__version__) < LooseVersion('2.4.3'): module.fail_json(msg='psycopg2 must be at least 2.4.3 in order to use the ca_cert parameter')
def boto3_at_least(self, desired): """Check if the available boto3 version is greater than or equal to a desired version. Usage: if module.params.get('assign_ipv6_address') and not module.boto3_at_least('1.4.4'): # conditionally fail on old boto3 versions if a specific feature is not supported module.fail_json(msg="Boto3 can't deal with EC2 IPv6 addresses before version 1.4.4.") """ existing = self._gather_versions() return LooseVersion(existing['boto3_version']) >= LooseVersion(desired)
def present(dest, username, password, crypt_scheme, create, check_mode): """ Ensures user is present Returns (msg, changed) """ if crypt_scheme in apache_hashes: context = htpasswd_context else: context = CryptContext(schemes=[crypt_scheme] + apache_hashes) if not os.path.exists(dest): if not create: raise ValueError('Destination %s does not exist' % dest) if check_mode: return ("Create %s" % dest, True) create_missing_directories(dest) if LooseVersion(passlib.__version__) >= LooseVersion('1.6'): ht = HtpasswdFile(dest, new=True, default_scheme=crypt_scheme, context=context) else: ht = HtpasswdFile(dest, autoload=False, default=crypt_scheme, context=context) if getattr(ht, 'set_password', None): ht.set_password(username, password) else: ht.update(username, password) ht.save() return ("Created %s and added %s" % (dest, username), True) else: if LooseVersion(passlib.__version__) >= LooseVersion('1.6'): ht = HtpasswdFile(dest, new=False, default_scheme=crypt_scheme, context=context) else: ht = HtpasswdFile(dest, default=crypt_scheme, context=context) found = None if getattr(ht, 'check_password', None): found = ht.check_password(username, password) else: found = ht.verify(username, password) if found: return ("%s already present" % username, False) else: if not check_mode: if getattr(ht, 'set_password', None): ht.set_password(username, password) else: ht.update(username, password) ht.save() return ("Add/update %s" % username, True)
def is_satisfied_by(self, version_to_test): if not self._plain_package: return False try: return self._requirement.specifier.contains(version_to_test, prereleases=True) except AttributeError: # old setuptools has no specifier, do fallback version_to_test = LooseVersion(version_to_test) return all(op_dict[op](version_to_test, LooseVersion(ver)) for op, ver in self._requirement.specs)
def connect_to_db(module, conn_params, autocommit=False, fail_on_conn=True): """Connect to a PostgreSQL database. Return psycopg2 connection object. Args: module (AnsibleModule) -- object of ansible.module_utils.basic.AnsibleModule class conn_params (dict) -- dictionary with connection parameters Kwargs: autocommit (bool) -- commit automatically (default False) fail_on_conn (bool) -- fail if connection failed or just warn and return None (default True) """ ensure_required_libs(module) db_connection = None try: db_connection = psycopg2.connect(**conn_params) if autocommit: if LooseVersion(psycopg2.__version__) >= LooseVersion('2.4.2'): db_connection.set_session(autocommit=True) else: db_connection.set_isolation_level(psycopg2.extensions.ISOLATION_LEVEL_AUTOCOMMIT) # Switch role, if specified: if module.params.get('session_role'): cursor = db_connection.cursor(cursor_factory=psycopg2.extras.DictCursor) try: cursor.execute('SET ROLE "%s"' % module.params['session_role']) except Exception as e: module.fail_json(msg="Could not switch role: %s" % to_native(e)) finally: cursor.close() except TypeError as e: if 'sslrootcert' in e.args[0]: module.fail_json(msg='Postgresql server must be at least ' 'version 8.4 to support sslrootcert') if fail_on_conn: module.fail_json(msg="unable to connect to database: %s" % to_native(e)) else: module.warn("PostgreSQL server is unavailable: %s" % to_native(e)) db_connection = None except Exception as e: if fail_on_conn: module.fail_json(msg="unable to connect to database: %s" % to_native(e)) else: module.warn("PostgreSQL server is unavailable: %s" % to_native(e)) db_connection = None return db_connection
def botocore_at_least(self, desired): """Check if the available botocore version is greater than or equal to a desired version. Usage: if not module.botocore_at_least('1.2.3'): module.fail_json(msg='The Serverless Elastic Load Compute Service is not in botocore before v1.2.3') if not module.botocore_at_least('1.5.3'): module.warn('Botocore did not include waiters for Service X before 1.5.3. ' 'To wait until Service X resources are fully available, update botocore.') """ existing = self._gather_versions() return LooseVersion( existing['botocore_version']) >= LooseVersion(desired)
def _get_minimal_versions(self, option_minimal_versions, ignore_params=None): self.option_minimal_versions = dict() for option in self.module.argument_spec: if ignore_params is not None: if option in ignore_params: continue self.option_minimal_versions[option] = dict() self.option_minimal_versions.update(option_minimal_versions) for option, data in self.option_minimal_versions.items(): # Test whether option is supported, and store result support_docker_py = True support_docker_api = True if 'docker_py_version' in data: support_docker_py = self.docker_py_version >= LooseVersion(data['docker_py_version']) if 'docker_api_version' in data: support_docker_api = self.docker_api_version >= LooseVersion(data['docker_api_version']) data['supported'] = support_docker_py and support_docker_api # Fail if option is not supported but used if not data['supported']: # Test whether option is specified if 'detect_usage' in data: used = data['detect_usage'](self) else: used = self.module.params.get(option) is not None if used and 'default' in self.module.argument_spec[option]: used = self.module.params[option] != self.module.argument_spec[option]['default'] if used: # If the option is used, compose error message. if 'usage_msg' in data: usg = data['usage_msg'] else: usg = 'set %s option' % (option, ) if not support_docker_api: msg = 'Docker API version is %s. Minimum version required is %s to %s.' msg = msg % (self.docker_api_version_str, data['docker_api_version'], usg) elif not support_docker_py: msg = "Docker SDK for Python version is %s (%s's Python %s). Minimum version required is %s to %s. " if LooseVersion(data['docker_py_version']) < LooseVersion('2.0.0'): msg += DOCKERPYUPGRADE_RECOMMEND_DOCKER elif self.docker_py_version < LooseVersion('2.0.0'): msg += DOCKERPYUPGRADE_SWITCH_TO_DOCKER else: msg += DOCKERPYUPGRADE_UPGRADE_DOCKER msg = msg % (docker_version, platform.node(), sys.executable, data['docker_py_version'], usg) else: # should not happen msg = 'Cannot %s with your configuration.' % (usg, ) self.fail(msg)
def _parse_version_match(self, match, attribute): new_version = to_text(match.group(1)).rstrip() # PowerShell cannot cast a string of "1" to Version, it must have at # least the major.minor for it to be valid so we append 0 if match.group(2) is None: new_version = "%s.0" % new_version existing_version = getattr(self, attribute, None) if existing_version is None: setattr(self, attribute, new_version) else: # determine which is the latest version and set that if LooseVersion(new_version) > LooseVersion(existing_version): setattr(self, attribute, new_version)
def absent(dest, username, check_mode): """ Ensures user is absent Returns (msg, changed) """ if LooseVersion(passlib.__version__) >= LooseVersion('1.6'): ht = HtpasswdFile(dest, new=False) else: ht = HtpasswdFile(dest) if username not in ht.users(): return ("%s not present" % username, False) else: if not check_mode: ht.delete(username) ht.save() return ("Remove %s" % username, True)
def removal_version(value, is_ansible, current_version=None, is_tombstone=False): """Validate a removal version string.""" msg = ( 'Removal version must be a string' if is_ansible else 'Removal version must be a semantic version (https://semver.org/)' ) if not isinstance(value, string_types): raise Invalid(msg) try: if is_ansible: version = StrictVersion() version.parse(value) version = LooseVersion(value) # We're storing Ansible's version as a LooseVersion else: version = SemanticVersion() version.parse(value) if version.major != 0 and (version.minor != 0 or version.patch != 0): raise Invalid('removal_version (%r) must be a major release, not a minor or patch release ' '(see specification at https://semver.org/)' % (value, )) if current_version is not None: if is_tombstone: # For a tombstone, the removal version must not be in the future if version > current_version: raise Invalid('The tombstone removal_version (%r) must not be after the ' 'current version (%s)' % (value, current_version)) else: # For a deprecation, the removal version must be in the future if version <= current_version: raise Invalid('The deprecation removal_version (%r) must be after the ' 'current version (%s)' % (value, current_version)) except ValueError: raise Invalid(msg) return value
def generate_page(pb_keywords, template_dir): env = Environment( loader=FileSystemLoader(template_dir), trim_blocks=True, ) template = env.get_template(TEMPLATE_FILE) tempvars = { 'pb_keywords': pb_keywords, 'playbook_class_names': PLAYBOOK_CLASS_NAMES } keyword_page = template.render(tempvars) if LooseVersion(jinja2.__version__) < LooseVersion('2.10'): # jinja2 < 2.10's indent filter indents blank lines. Cleanup keyword_page = re.sub(' +\n', '\n', keyword_page) return keyword_page
def main(): """Entrypoint to the script""" paths = sys.argv[1:] or sys.stdin.read().splitlines() bundled_libs = get_bundled_libs(paths) files_with_bundled_metadata = get_files_with_bundled_metadata(paths) for filename in files_with_bundled_metadata.difference(bundled_libs): if filename.startswith('test/support/'): continue # bundled support code does not need to be updated or tracked print( '{0}: ERROR: File contains _BUNDLED_METADATA but needs to be added to' ' test/sanity/code-smell/update-bundled.py'.format(filename)) for filename in bundled_libs: try: metadata = get_bundled_metadata(filename) except ValueError as e: print('{0}: ERROR: {1}'.format(filename, e)) continue except (IOError, OSError) as e: if e.errno == 2: print( '{0}: ERROR: {1}. Perhaps the bundled library has been removed' ' or moved and the bundled library test needs to be modified as' ' well?'.format(filename, e)) if metadata is None: continue pypi_fh = open_url('https://pypi.org/pypi/{0}/json'.format( metadata['pypi_name'])) pypi_data = json.loads(pypi_fh.read().decode('utf-8')) constraints = metadata.get('version_constraints', None) latest_version = get_latest_applicable_version(pypi_data, constraints) if LooseVersion(metadata['version']) < LooseVersion(latest_version): print('{0}: UPDATE {1} from {2} to {3} {4}'.format( filename, metadata['pypi_name'], metadata['version'], latest_version, 'https://pypi.org/pypi/{0}/json'.format( metadata['pypi_name'])))
def get_latest_applicable_version(pypi_data, constraints=None): """Get the latest pypi version of the package that we allow :arg pypi_data: Pypi information about the data as returned by ``https://pypi.org/pypi/{pkg_name}/json`` :kwarg constraints: version constraints on what we're allowed to use as specified by the bundled metadata :returns: The most recent version on pypi that are allowed by ``constraints`` """ latest_version = "0" if constraints: version_specification = packaging.specifiers.SpecifierSet(constraints) for version in pypi_data['releases']: if version in version_specification: if LooseVersion(version) > LooseVersion(latest_version): latest_version = version else: latest_version = pypi_data['info']['version'] return latest_version
def _version_fuzzy_match(version, version_map): # try exact match first res = version_map.get(version) if res: return res sorted_looseversions = sorted([LooseVersion(v) for v in version_map.keys()]) find_looseversion = LooseVersion(version) # slot match; return nearest previous version we're newer than kpos = bisect.bisect(sorted_looseversions, find_looseversion) if kpos == 0: # older than everything in the list, return the oldest version # TODO: warning-worthy? return version_map.get(sorted_looseversions[0].vstring) # TODO: is "past the end of the list" warning-worthy too (at least if it's not a major version match)? # return the next-oldest entry that we're newer than... return version_map.get(sorted_looseversions[kpos - 1].vstring)
def inspect_distribution(self, image, **kwargs): ''' Get image digest by directly calling the Docker API when running Docker SDK < 4.0.0 since prior versions did not support accessing private repositories. ''' if self.docker_py_version < LooseVersion('4.0.0'): registry = auth.resolve_repository_name(image)[0] header = auth.get_config_header(self, registry) if header: return self._result(self._get( self._url('/distribution/{0}/json', image), headers={'X-Registry-Auth': header} ), json=True) return super(AnsibleDockerClient, self).inspect_distribution(image, **kwargs)
def meets_requirements(version, requirements): # type: (str, str) -> bool """Verify if a given version satisfies all the requirements. Supported version identifiers are: * '==' * '!=' * '>' * '>=' * '<' * '<=' * '*' Each requirement is delimited by ','. """ op_map = { '!=': operator.ne, '==': operator.eq, '=': operator.eq, '>=': operator.ge, '>': operator.gt, '<=': operator.le, '<': operator.lt, } for req in requirements.split(','): op_pos = 2 if len(req) > 1 and req[1] == '=' else 1 op = op_map.get(req[:op_pos]) requirement = req[op_pos:] if not op: requirement = req op = operator.eq if requirement == '*' or version == '*': continue if not op( SemanticVersion(version), SemanticVersion.from_loose_version(LooseVersion(requirement)), ): break else: return True # The loop was broken early, it does not meet all the requirements return False
def _check_version(self, node, version, collection_name): if not isinstance(version, (str, float)): if collection_name == 'ansible.builtin': symbol = 'ansible-invalid-deprecated-version' else: symbol = 'collection-invalid-deprecated-version' self.add_message(symbol, node=node, args=(version, )) return version_no = str(version) if collection_name == 'ansible.builtin': # Ansible-base try: if not version_no: raise ValueError('Version string should not be empty') loose_version = LooseVersion(str(version_no)) if ANSIBLE_VERSION >= loose_version: self.add_message('ansible-deprecated-version', node=node, args=(version, )) except ValueError: self.add_message('ansible-invalid-deprecated-version', node=node, args=(version, )) elif collection_name: # Collections try: if not version_no: raise ValueError('Version string should not be empty') semantic_version = SemanticVersion(version_no) if collection_name == self.collection_name and self.collection_version is not None: if self.collection_version >= semantic_version: self.add_message('collection-deprecated-version', node=node, args=(version, )) if semantic_version.major != 0 and ( semantic_version.minor != 0 or semantic_version.patch != 0): self.add_message('removal-version-must-be-major', node=node, args=(version, )) except ValueError: self.add_message('collection-invalid-deprecated-version', node=node, args=(version, ))
def collect(self, module=None, collected_facts=None): facts_dict = {} if not module: return facts_dict collected_facts = collected_facts or {} service_mgr_name = None # TODO: detect more custom init setups like bootscripts, dmd, s6, Epoch, etc # also other OSs other than linux might need to check across several possible candidates # Mapping of proc_1 values to more useful names proc_1_map = { 'procd': 'openwrt_init', 'runit-init': 'runit', 'svscan': 'svc', 'openrc-init': 'openrc', } # try various forms of querying pid 1 proc_1 = get_file_content('/proc/1/comm') if proc_1 is None: # FIXME: return code isnt checked # FIXME: if stdout is empty string, odd things # FIXME: other code seems to think we could get proc_1 == None past this point rc, proc_1, err = module.run_command("ps -p 1 -o comm|tail -n 1", use_unsafe_shell=True) # If the output of the command starts with what looks like a PID, then the 'ps' command # probably didn't work the way we wanted, probably because it's busybox if re.match(r' *[0-9]+ ', proc_1): proc_1 = None # The ps command above may return "COMMAND" if the user cannot read /proc, e.g. with grsecurity if proc_1 == "COMMAND\n": proc_1 = None # FIXME: empty string proc_1 staus empty string if proc_1 is not None: proc_1 = os.path.basename(proc_1) proc_1 = to_native(proc_1) proc_1 = proc_1.strip() if proc_1 is not None and (proc_1 == 'init' or proc_1.endswith('sh')): # many systems return init, so this cannot be trusted, if it ends in 'sh' it probalby is a shell in a container proc_1 = None # if not init/None it should be an identifiable or custom init, so we are done! if proc_1 is not None: # Lookup proc_1 value in map and use proc_1 value itself if no match # FIXME: empty string still falls through service_mgr_name = proc_1_map.get(proc_1, proc_1) # FIXME: replace with a system->service_mgr_name map? # start with the easy ones elif collected_facts.get('ansible_distribution', None) == 'MacOSX': # FIXME: find way to query executable, version matching is not ideal if LooseVersion(platform.mac_ver()[0]) >= LooseVersion('10.4'): service_mgr_name = 'launchd' else: service_mgr_name = 'systemstarter' elif 'BSD' in collected_facts.get( 'ansible_system', '') or collected_facts.get('ansible_system') in [ 'Bitrig', 'DragonFly' ]: # FIXME: we might want to break out to individual BSDs or 'rc' service_mgr_name = 'bsdinit' elif collected_facts.get('ansible_system') == 'AIX': service_mgr_name = 'src' elif collected_facts.get('ansible_system') == 'SunOS': service_mgr_name = 'smf' elif collected_facts.get('ansible_distribution') == 'OpenWrt': service_mgr_name = 'openwrt_init' elif collected_facts.get('ansible_system') == 'Linux': # FIXME: mv is_systemd_managed if self.is_systemd_managed(module=module): service_mgr_name = 'systemd' elif module.get_bin_path('initctl') and os.path.exists( "/etc/init/"): service_mgr_name = 'upstart' elif os.path.exists('/sbin/openrc'): service_mgr_name = 'openrc' elif self.is_systemd_managed_offline(module=module): service_mgr_name = 'systemd' elif os.path.exists('/etc/init.d/'): service_mgr_name = 'sysvinit' if not service_mgr_name: # if we cannot detect, fallback to generic 'service' service_mgr_name = 'service' facts_dict['service_mgr'] = service_mgr_name return facts_dict
def _connect_uncached(self): ''' activates the connection object ''' if paramiko is None: raise AnsibleError("paramiko is not installed: %s" % to_native(PARAMIKO_IMPORT_ERR)) port = self._play_context.port or 22 display.vvv("ESTABLISH PARAMIKO SSH CONNECTION FOR USER: %s on PORT %s TO %s" % (self._play_context.remote_user, port, self._play_context.remote_addr), host=self._play_context.remote_addr) ssh = paramiko.SSHClient() # override paramiko's default logger name if self._log_channel is not None: ssh.set_log_channel(self._log_channel) self.keyfile = os.path.expanduser("~/.ssh/known_hosts") if self.get_option('host_key_checking'): for ssh_known_hosts in ("/etc/ssh/ssh_known_hosts", "/etc/openssh/ssh_known_hosts"): try: # TODO: check if we need to look at several possible locations, possible for loop ssh.load_system_host_keys(ssh_known_hosts) break except IOError: pass # file was not found, but not required to function ssh.load_system_host_keys() ssh_connect_kwargs = self._parse_proxy_command(port) ssh.set_missing_host_key_policy(MyAddPolicy(self._new_stdin, self)) conn_password = self.get_option('password') or self._play_context.password allow_agent = True if conn_password is not None: allow_agent = False try: key_filename = None if self._play_context.private_key_file: key_filename = os.path.expanduser(self._play_context.private_key_file) # paramiko 2.2 introduced auth_timeout parameter if LooseVersion(paramiko.__version__) >= LooseVersion('2.2.0'): ssh_connect_kwargs['auth_timeout'] = self._play_context.timeout # paramiko 1.15 introduced banner timeout parameter if LooseVersion(paramiko.__version__) >= LooseVersion('1.15.0'): ssh_connect_kwargs['banner_timeout'] = self.get_option('banner_timeout') ssh.connect( self._play_context.remote_addr.lower(), username=self._play_context.remote_user, allow_agent=allow_agent, look_for_keys=self.get_option('look_for_keys'), key_filename=key_filename, password=conn_password, timeout=self._play_context.timeout, port=port, **ssh_connect_kwargs ) except paramiko.ssh_exception.BadHostKeyException as e: raise AnsibleConnectionFailure('host key mismatch for %s' % e.hostname) except paramiko.ssh_exception.AuthenticationException as e: msg = 'Failed to authenticate: {0}'.format(to_text(e)) raise AnsibleAuthenticationFailure(msg) except Exception as e: msg = to_text(e) if u"PID check failed" in msg: raise AnsibleError("paramiko version issue, please upgrade paramiko on the machine running ansible") elif u"Private key file is encrypted" in msg: msg = 'ssh %s@%s:%s : %s\nTo connect as a different user, use -u <username>.' % ( self._play_context.remote_user, self._play_context.remote_addr, port, msg) raise AnsibleConnectionFailure(msg) else: raise AnsibleConnectionFailure(msg) return ssh
import sys # Used for determining if the system is running a new enough python version # and should only restrict on our documented minimum versions if sys.version_info < (3, 8): raise SystemExit( 'ERROR: Ansible requires Python 3.8 or newer on the controller. ' 'Current version: %s' % ''.join(sys.version.splitlines())) from importlib.metadata import version from ansible.module_utils.compat.version import LooseVersion # Used for determining if the system is running a new enough Jinja2 version # and should only restrict on our documented minimum versions jinja2_version = version('jinja2') if jinja2_version < LooseVersion('3.0'): raise SystemExit( 'ERROR: Ansible requires Jinja2 3.0 or newer on the controller. ' 'Current version: %s' % jinja2_version) import errno import getpass import os import subprocess import traceback from abc import ABC, abstractmethod from pathlib import Path try: from ansible import constants as C from ansible.utils.display import Display, initialize_locale
def get_ansible_version(): """Return current ansible-core version""" from ansible.release import __version__ return LooseVersion('.'.join(__version__.split('.')[:3]))
def install(self): if self.scm: # create tar file from scm url tmp_file = RoleRequirement.scm_archive_role( keep_scm_meta=context.CLIARGS['keep_scm_meta'], **self.spec) elif self.src: if os.path.isfile(self.src): tmp_file = self.src elif '://' in self.src: role_data = self.src tmp_file = self.fetch(role_data) else: role_data = self.api.lookup_role_by_name(self.src) if not role_data: raise AnsibleError("- sorry, %s was not found on %s." % (self.src, self.api.api_server)) if role_data.get('role_type') == 'APP': # Container Role display.warning( "%s is a Container App role, and should only be installed using Ansible " "Container" % self.name) role_versions = self.api.fetch_role_related( 'versions', role_data['id']) if not self.version: # convert the version names to LooseVersion objects # and sort them to get the latest version. If there # are no versions in the list, we'll grab the head # of the master branch if len(role_versions) > 0: loose_versions = [ LooseVersion(a.get('name', None)) for a in role_versions ] try: loose_versions.sort() except TypeError: raise AnsibleError( 'Unable to compare role versions (%s) to determine the most recent version due to incompatible version formats. ' 'Please contact the role author to resolve versioning conflicts, or specify an explicit role version to ' 'install.' % ', '.join([v.vstring for v in loose_versions])) self.version = to_text(loose_versions[-1]) elif role_data.get('github_branch', None): self.version = role_data['github_branch'] else: self.version = 'master' elif self.version != 'master': if role_versions and to_text(self.version) not in [ a.get('name', None) for a in role_versions ]: raise AnsibleError( "- the specified version (%s) of %s was not found in the list of available versions (%s)." % (self.version, self.name, role_versions)) # check if there's a source link/url for our role_version for role_version in role_versions: if role_version[ 'name'] == self.version and 'source' in role_version: self.src = role_version['source'] if role_version[ 'name'] == self.version and 'download_url' in role_version: self.download_url = role_version['download_url'] tmp_file = self.fetch(role_data) else: raise AnsibleError("No valid role data found") if tmp_file: display.debug("installing from %s" % tmp_file) if not tarfile.is_tarfile(tmp_file): raise AnsibleError( "the downloaded file does not appear to be a valid tar archive." ) else: role_tar_file = tarfile.open(tmp_file, "r") # verify the role's meta file meta_file = None members = role_tar_file.getmembers() # next find the metadata file for member in members: for meta_main in self.META_MAIN: if meta_main in member.name: # Look for parent of meta/main.yml # Due to possibility of sub roles each containing meta/main.yml # look for shortest length parent meta_parent_dir = os.path.dirname( os.path.dirname(member.name)) if not meta_file: archive_parent_dir = meta_parent_dir meta_file = member else: if len(meta_parent_dir) < len( archive_parent_dir): archive_parent_dir = meta_parent_dir meta_file = member if not meta_file: raise AnsibleError( "this role does not appear to have a meta/main.yml file." ) else: try: self._metadata = yaml_load( role_tar_file.extractfile(meta_file)) except Exception: raise AnsibleError( "this role does not appear to have a valid meta/main.yml file." ) paths = self.paths if self.path != paths[0]: # path can be passed though __init__ # FIXME should this be done in __init__? paths[:0] = self.path paths_len = len(paths) for idx, path in enumerate(paths): self.path = path display.display("- extracting %s to %s" % (self.name, self.path)) try: if os.path.exists(self.path): if not os.path.isdir(self.path): raise AnsibleError( "the specified roles path exists and is not a directory." ) elif not context.CLIARGS.get("force", False): raise AnsibleError( "the specified role %s appears to already exist. Use --force to replace it." % self.name) else: # using --force, remove the old path if not self.remove(): raise AnsibleError( "%s doesn't appear to contain a role.\n please remove this directory manually if you really " "want to put the role here." % self.path) else: os.makedirs(self.path) # We strip off any higher-level directories for all of the files # contained within the tar file here. The default is 'github_repo-target'. # Gerrit instances, on the other hand, does not have a parent directory at all. for member in members: # we only extract files, and remove any relative path # bits that might be in the file for security purposes # and drop any containing directory, as mentioned above if member.isreg() or member.issym(): n_member_name = to_native(member.name) n_archive_parent_dir = to_native( archive_parent_dir) n_parts = n_member_name.replace( n_archive_parent_dir, "", 1).split(os.sep) n_final_parts = [] for n_part in n_parts: # TODO if the condition triggers it produces a broken installation. # It will create the parent directory as an empty file and will # explode if the directory contains valid files. # Leaving this as is since the whole module needs a rewrite. if n_part != '..' and not n_part.startswith( '~') and '$' not in n_part: n_final_parts.append(n_part) member.name = os.path.join(*n_final_parts) role_tar_file.extract(member, to_native(self.path)) # write out the install info file for later use self._write_galaxy_install_info() break except OSError as e: if e.errno == errno.EACCES and idx < paths_len - 1: continue raise AnsibleError("Could not update files in %s: %s" % (self.path, to_native(e))) # return the parsed yaml metadata display.display("- %s was installed successfully" % str(self)) if not (self.src and os.path.isfile(self.src)): try: os.unlink(tmp_file) except (OSError, IOError) as e: display.warning(u"Unable to remove tmp file (%s): %s" % (tmp_file, to_text(e))) return True return False
def has_option_password_from_stdin(self): rc, version, err = self.module.run_command( [self.svn_path, '--version', '--quiet'], check_rc=True) return LooseVersion(version) >= LooseVersion('1.10.0')
MINIMAL_CRYPTOGRAPHY_VERSION = '1.2' CRYPTOGRAPHY_IMP_ERR = None try: import cryptography from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.serialization import Encoding from cryptography.x509 import ( CertificateRevocationListBuilder, RevokedCertificateBuilder, NameAttribute, Name, ) CRYPTOGRAPHY_VERSION = LooseVersion(cryptography.__version__) except ImportError: CRYPTOGRAPHY_IMP_ERR = traceback.format_exc() CRYPTOGRAPHY_FOUND = False else: CRYPTOGRAPHY_FOUND = True TIMESTAMP_FORMAT = "%Y%m%d%H%M%SZ" class CRLError(crypto_utils.OpenSSLObjectError): pass class CRL(crypto_utils.OpenSSLObject):
def main(): module = AnsibleModule( supports_check_mode=True, argument_spec=dict( table=dict(type='str', default='filter', choices=['filter', 'nat', 'mangle', 'raw', 'security']), state=dict(type='str', default='present', choices=['absent', 'present']), action=dict(type='str', default='append', choices=['append', 'insert']), ip_version=dict(type='str', default='ipv4', choices=['ipv4', 'ipv6']), chain=dict(type='str'), rule_num=dict(type='str'), protocol=dict(type='str'), wait=dict(type='str'), source=dict(type='str'), to_source=dict(type='str'), destination=dict(type='str'), to_destination=dict(type='str'), match=dict(type='list', elements='str', default=[]), tcp_flags=dict(type='dict', options=dict( flags=dict(type='list', elements='str'), flags_set=dict(type='list', elements='str')) ), jump=dict(type='str'), gateway=dict(type='str'), log_prefix=dict(type='str'), log_level=dict(type='str', choices=['0', '1', '2', '3', '4', '5', '6', '7', 'emerg', 'alert', 'crit', 'error', 'warning', 'notice', 'info', 'debug'], default=None, ), goto=dict(type='str'), in_interface=dict(type='str'), out_interface=dict(type='str'), fragment=dict(type='str'), set_counters=dict(type='str'), source_port=dict(type='str'), destination_port=dict(type='str'), destination_ports=dict(type='list', elements='str', default=[]), to_ports=dict(type='str'), set_dscp_mark=dict(type='str'), set_dscp_mark_class=dict(type='str'), comment=dict(type='str'), ctstate=dict(type='list', elements='str', default=[]), src_range=dict(type='str'), dst_range=dict(type='str'), match_set=dict(type='str'), match_set_flags=dict(type='str', choices=['src', 'dst', 'src,dst', 'dst,src']), limit=dict(type='str'), limit_burst=dict(type='str'), uid_owner=dict(type='str'), gid_owner=dict(type='str'), reject_with=dict(type='str'), icmp_type=dict(type='str'), syn=dict(type='str', default='ignore', choices=['ignore', 'match', 'negate']), flush=dict(type='bool', default=False), policy=dict(type='str', choices=['ACCEPT', 'DROP', 'QUEUE', 'RETURN']), ), mutually_exclusive=( ['set_dscp_mark', 'set_dscp_mark_class'], ['flush', 'policy'], ), required_if=[ ['jump', 'TEE', ['gateway']], ['jump', 'tee', ['gateway']], ] ) args = dict( changed=False, failed=False, ip_version=module.params['ip_version'], table=module.params['table'], chain=module.params['chain'], flush=module.params['flush'], rule=' '.join(construct_rule(module.params)), state=module.params['state'], ) ip_version = module.params['ip_version'] iptables_path = module.get_bin_path(BINS[ip_version], True) # Check if chain option is required if args['flush'] is False and args['chain'] is None: module.fail_json(msg="Either chain or flush parameter must be specified.") if module.params.get('log_prefix', None) or module.params.get('log_level', None): if module.params['jump'] is None: module.params['jump'] = 'LOG' elif module.params['jump'] != 'LOG': module.fail_json(msg="Logging options can only be used with the LOG jump target.") # Check if wait option is supported iptables_version = LooseVersion(get_iptables_version(iptables_path, module)) if iptables_version >= LooseVersion(IPTABLES_WAIT_SUPPORT_ADDED): if iptables_version < LooseVersion(IPTABLES_WAIT_WITH_SECONDS_SUPPORT_ADDED): module.params['wait'] = '' else: module.params['wait'] = None # Flush the table if args['flush'] is True: args['changed'] = True if not module.check_mode: flush_table(iptables_path, module, module.params) # Set the policy elif module.params['policy']: current_policy = get_chain_policy(iptables_path, module, module.params) if not current_policy: module.fail_json(msg='Can\'t detect current policy') changed = current_policy != module.params['policy'] args['changed'] = changed if changed and not module.check_mode: set_chain_policy(iptables_path, module, module.params) else: insert = (module.params['action'] == 'insert') rule_is_present = check_present(iptables_path, module, module.params) should_be_present = (args['state'] == 'present') # Check if target is up to date args['changed'] = (rule_is_present != should_be_present) if args['changed'] is False: # Target is already up to date module.exit_json(**args) # Check only; don't modify if not module.check_mode: if should_be_present: if insert: insert_rule(iptables_path, module, module.params) else: append_rule(iptables_path, module, module.params) else: remove_rule(iptables_path, module, module.params) module.exit_json(**args)
user: ansible-tester password: secure validate_certs: False ''' from ansible.module_utils.compat.version import LooseVersion from ansible.errors import AnsibleError from ansible.module_utils._text import to_bytes, to_native, to_text from ansible.module_utils.common._collections_compat import MutableMapping from ansible.plugins.inventory import BaseInventoryPlugin, Cacheable, to_safe_group_name, Constructable # 3rd party imports try: import requests if LooseVersion(requests.__version__) < LooseVersion('1.1.0'): raise ImportError except ImportError: raise AnsibleError( 'This script requires python-requests 1.1 as a minimum version') from requests.auth import HTTPBasicAuth class InventoryModule(BaseInventoryPlugin, Cacheable, Constructable): ''' Host inventory parser for ansible using foreman as source. ''' NAME = 'foreman' def __init__(self):
def __init__(self, string): super().__init__(string.strip()) self.ver_obj = LooseVersion(string)
"Display.deprecated or AnsibleModule.deprecate", "ansible-deprecated-both-version-and-date", "Only one of version and date must be specified", { 'minversion': (2, 6) }), 'E9511': ("Removal version (%r) must be a major release, not a minor or " "patch release (see the specification at https://semver.org/)", "removal-version-must-be-major", "Used when a call to Display.deprecated or " "AnsibleModule.deprecate for a collection specifies a version " "which is not of the form x.0.0", { 'minversion': (2, 6) }), } ANSIBLE_VERSION = LooseVersion('.'.join(ansible_version_raw.split('.')[:3])) def _get_expr_name(node): """Funciton to get either ``attrname`` or ``name`` from ``node.func.expr`` Created specifically for the case of ``display.deprecated`` or ``self._display.deprecated`` """ try: return node.func.expr.attrname except AttributeError: # If this fails too, we'll let it raise, the caller should catch it return node.func.expr.name def parse_isodate(value):