def _git_version(): try: git_version = subprocess.Popen( ['git', '--version'], shell=False, close_fds=False if salt.utils.platform.is_windows() else True, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()[0] except OSError: return False if not git_version: log.debug('Git not installed') return False git_version = git_version.strip().split()[-1] if six.PY3: git_version = git_version.decode(__salt_system_encoding__) log.debug('Detected git version: %s', git_version) return LooseVersion(git_version)
def test_pyOpenSSL_version_destructive(self): ''' Test extension logic with different pyOpenSSL versions ''' pillarval = {'csr': {'extendedKeyUsage': 'serverAuth'}} mock_pgt = MagicMock(return_value=pillarval) ca_path = tempfile.mkdtemp(dir=integration.SYS_TMP_DIR) ca_name = 'test_ca' certp = '{0}/{1}/{2}_ca_cert.crt'.format(ca_path, ca_name, ca_name) certk = '{0}/{1}/{2}_ca_cert.key'.format(ca_path, ca_name, ca_name) ret = 'Created Private Key: "{0}." Created CA "{1}": "{2}."'.format( certk, ca_name, certp) mock_opt = MagicMock(return_value=ca_path) mock_ret = MagicMock(return_value=0) try: with patch.dict(tls.__salt__, { 'config.option': mock_opt, 'cmd.retcode': mock_ret }): with patch.dict(tls.__opts__, { 'hash_type': 'sha256', 'cachedir': ca_path }): with patch.dict(_TLS_TEST_DATA['create_ca'], {'replace': True}): with patch.dict( tls.__dict__, { 'OpenSSL_version': LooseVersion('0.1.1'), 'X509_EXT_ENABLED': False }): self.assertEqual( tls.create_ca(ca_name, days=365, fixmode=False, **_TLS_TEST_DATA['create_ca']), ret) with patch.dict( tls.__dict__, { 'OpenSSL_version': LooseVersion('0.14.1'), 'X509_EXT_ENABLED': True }): self.assertEqual( tls.create_ca(ca_name, days=365, fixmode=False, **_TLS_TEST_DATA['create_ca']), ret) with patch.dict( tls.__dict__, { 'OpenSSL_version': LooseVersion('0.15.1'), 'X509_EXT_ENABLED': True }): self.assertEqual( tls.create_ca(ca_name, days=365, fixmode=False, **_TLS_TEST_DATA['create_ca']), ret) finally: if os.path.isdir(ca_path): shutil.rmtree(ca_path) try: certp = '{0}/{1}/certs/{2}.csr'.format( ca_path, ca_name, _TLS_TEST_DATA['create_ca']['CN']) certk = '{0}/{1}/certs/{2}.key'.format( ca_path, ca_name, _TLS_TEST_DATA['create_ca']['CN']) ret = ('Created Private Key: "{0}." ' 'Created CSR for "{1}": "{2}."').format( certk, _TLS_TEST_DATA['create_ca']['CN'], certp) with patch.dict( tls.__salt__, { 'config.option': mock_opt, 'cmd.retcode': mock_ret, 'pillar.get': mock_pgt }): with patch.dict(tls.__opts__, { 'hash_type': 'sha256', 'cachedir': ca_path }): with patch.dict(_TLS_TEST_DATA['create_ca'], { 'subjectAltName': 'DNS:foo.bar', 'replace': True }): with patch.dict( tls.__dict__, { 'OpenSSL_version': LooseVersion('0.1.1'), 'X509_EXT_ENABLED': False }): tls.create_ca(ca_name) tls.create_csr(ca_name) self.assertRaises(ValueError, tls.create_csr, ca_name, **_TLS_TEST_DATA['create_ca']) with patch.dict( tls.__dict__, { 'OpenSSL_version': LooseVersion('0.14.1'), 'X509_EXT_ENABLED': True }): tls.create_ca(ca_name) tls.create_csr(ca_name) self.assertEqual( tls.create_csr(ca_name, **_TLS_TEST_DATA['create_ca']), ret) with patch.dict( tls.__dict__, { 'OpenSSL_version': LooseVersion('0.15.1'), 'X509_EXT_ENABLED': True }): tls.create_ca(ca_name) tls.create_csr(ca_name) self.assertEqual( tls.create_csr(ca_name, **_TLS_TEST_DATA['create_ca']), ret) finally: if os.path.isdir(ca_path): shutil.rmtree(ca_path)
import salt.fileserver import salt.utils.files import salt.utils.gzip_util import salt.utils.hashutils import salt.utils.json import salt.utils.path import salt.utils.stringutils # Import third party libs from salt.ext import six from salt.utils.versions import LooseVersion try: import azure.storage if LooseVersion(azure.storage.__version__) < LooseVersion("0.20.0"): raise ImportError("azure.storage.__version__ must be >= 0.20.0") HAS_AZURE = True except (ImportError, AttributeError): HAS_AZURE = False __virtualname__ = "azurefs" log = logging.getLogger() def __virtual__(): """ Only load if defined in fileserver_backend and azure.storage is present """ if __virtualname__ not in __opts__["fileserver_backend"]:
VM_NAME = 'kings_landing' DUMMY_TOKEN = { 'refresh_token': None, 'client_id': 'dany123', 'client_secret': 'lalalalalalala', 'grant_type': 'refresh_token' } # Use certifi if installed try: if HAS_LIBCLOUD: # This work-around for Issue #32743 is no longer needed for libcloud >= 1.4.0. # However, older versions of libcloud must still be supported with this work-around. # This work-around can be removed when the required minimum version of libcloud is # 2.0.0 (See PR #40837 - which is implemented in Salt Oxygen). if LooseVersion(libcloud.__version__) < LooseVersion('1.4.0'): import certifi libcloud.security.CA_CERTS_PATH.append(certifi.where()) except ImportError: pass @skipIf(NO_MOCK, NO_MOCK_REASON) class GCETestCase(TestCase, LoaderModuleMockMixin): ''' Unit TestCase for salt.cloud.clouds.gce module. ''' def setup_loader_modules(self): return { gce: { '__active_provider_name__': '',
from salt.utils.decorators.jinja import jinja_filter, jinja_global, jinja_test from salt.utils.odict import OrderedDict from salt.utils.versions import LooseVersion try: from collections.abc import Hashable except ImportError: # pylint: disable=no-name-in-module from collections import Hashable log = logging.getLogger(__name__) __all__ = ["SaltCacheLoader", "SerializerExtension"] GLOBAL_UUID = uuid.UUID("91633EBF-1C86-5E33-935A-28061F4B480E") JINJA_VERSION = LooseVersion(jinja2.__version__) class SaltCacheLoader(BaseLoader): """ A special jinja Template Loader for salt. Requested templates are always fetched from the server to guarantee that the file is up to date. Templates are cached like regular salt states and only loaded once per loader instance. """ _cached_pillar_client = None _cached_client = None @classmethod
def version_compatible(version): """ Checks profitbricks version """ return LooseVersion(profitbricks.API_VERSION) >= LooseVersion(version)
def test_pyOpenSSL_version_destructive(self, ca_path): """ Test extension logic with different pyOpenSSL versions """ pillarval = {"csr": {"extendedKeyUsage": "serverAuth"}} mock_pgt = MagicMock(return_value=pillarval) ca_name = "test_ca" certp = "{0}/{1}/{1}_ca_cert.crt".format(ca_path, ca_name) certk = "{0}/{1}/{1}_ca_cert.key".format(ca_path, ca_name) ret = 'Created Private Key: "{0}." Created CA "{1}": "{2}."'.format( certk, ca_name, certp) mock_opt = MagicMock(return_value=ca_path) mock_ret = MagicMock(return_value=0) with patch.dict(tls.__salt__, { "config.option": mock_opt, "cmd.retcode": mock_ret }): with patch.dict(tls.__opts__, { "hash_type": "sha256", "cachedir": ca_path }): with patch.dict(_TLS_TEST_DATA["create_ca"], {"replace": True}): with patch.dict( tls.__dict__, { "OpenSSL_version": LooseVersion("0.1.1"), "X509_EXT_ENABLED": False, }, ): self.assertEqual( tls.create_ca(ca_name, days=365, fixmode=False, **_TLS_TEST_DATA["create_ca"]), ret, ) with patch.dict( tls.__dict__, { "OpenSSL_version": LooseVersion("0.14.1"), "X509_EXT_ENABLED": True, }, ): self.assertEqual( tls.create_ca(ca_name, days=365, fixmode=False, **_TLS_TEST_DATA["create_ca"]), ret, ) with patch.dict( tls.__dict__, { "OpenSSL_version": LooseVersion("0.15.1"), "X509_EXT_ENABLED": True, }, ): self.assertEqual( tls.create_ca(ca_name, days=365, fixmode=False, **_TLS_TEST_DATA["create_ca"]), ret, ) certp = "{0}/{1}/certs/{2}.csr".format( ca_path, ca_name, _TLS_TEST_DATA["create_ca"]["CN"]) certk = "{0}/{1}/certs/{2}.key".format( ca_path, ca_name, _TLS_TEST_DATA["create_ca"]["CN"]) ret = ('Created Private Key: "{0}." ' 'Created CSR for "{1}": "{2}."').format( certk, _TLS_TEST_DATA["create_ca"]["CN"], certp) with patch.dict( tls.__salt__, { "config.option": mock_opt, "cmd.retcode": mock_ret, "pillar.get": mock_pgt, }, ): with patch.dict(tls.__opts__, { "hash_type": "sha256", "cachedir": ca_path }): with patch.dict( _TLS_TEST_DATA["create_ca"], { "subjectAltName": "DNS:foo.bar", "replace": True }, ): with patch.dict( tls.__dict__, { "OpenSSL_version": LooseVersion("0.1.1"), "X509_EXT_ENABLED": False, }, ): tls.create_ca(ca_name) tls.create_csr(ca_name) self.assertRaises(ValueError, tls.create_csr, ca_name, **_TLS_TEST_DATA["create_ca"]) with patch.dict( tls.__dict__, { "OpenSSL_version": LooseVersion("0.14.1"), "X509_EXT_ENABLED": True, }, ): tls.create_ca(ca_name) tls.create_csr(ca_name) self.assertEqual( tls.create_csr(ca_name, **_TLS_TEST_DATA["create_ca"]), ret) with patch.dict( tls.__dict__, { "OpenSSL_version": LooseVersion("0.15.1"), "X509_EXT_ENABLED": True, }, ): tls.create_ca(ca_name) tls.create_csr(ca_name) self.assertEqual( tls.create_csr(ca_name, **_TLS_TEST_DATA["create_ca"]), ret)
True, 'Cancellable': False, 'UpdateStatus': 'ELIGIBLE', 'Description': ('A newer release R20190418 is available. This release ' 'will be automatically deployed after somedate'), 'AutomatedUpdateDate': None } } @skipIf(HAS_BOTO3 is False, 'The boto module must be installed.') @skipIf( LooseVersion(boto3.__version__) < LooseVersion(REQUIRED_BOTO3_VERSION), 'The boto3 module must be greater or equal to version {}'.format( REQUIRED_BOTO3_VERSION)) class Boto3ElasticsearchTestCase(TestCase, LoaderModuleMockMixin): ''' TestCase for salt.modules.boto3_elasticsearch module ''' conn = None def setup_loader_modules(self): self.opts = salt.config.DEFAULT_MINION_OPTS.copy() utils = salt.loader.utils( self.opts, whitelist=['boto3', 'args', 'systemd', 'path', 'platform'], context={}) return {boto3_elasticsearch: {'__utils__': utils}}
def info(*packages, **kwargs): ''' Return a detailed package(s) summary information. If no packages specified, all packages will be returned. :param packages: :param attr: Comma-separated package attributes. If no 'attr' is specified, all available attributes returned. Valid attributes are: version, vendor, release, build_date, build_date_time_t, install_date, install_date_time_t, build_host, group, source_rpm, arch, epoch, size, license, signature, packager, url, summary, description. :param all_versions: Return information for all installed versions of the packages :return: CLI example: .. code-block:: bash salt '*' lowpkg.info apache2 bash salt '*' lowpkg.info apache2 bash attr=version salt '*' lowpkg.info apache2 bash attr=version,build_date_iso,size salt '*' lowpkg.info apache2 bash attr=version,build_date_iso,size all_versions=True ''' all_versions = kwargs.get('all_versions', False) # LONGSIZE is not a valid tag for all versions of rpm. If LONGSIZE isn't # available, then we can just use SIZE for older versions. See Issue #31366. rpm_tags = __salt__['cmd.run_stdout'](['rpm', '--querytags'], python_shell=False).splitlines() if 'LONGSIZE' in rpm_tags: size_tag = '%{LONGSIZE}' else: size_tag = '%{SIZE}' cmd = packages and "rpm -q {0}".format(' '.join(packages)) or "rpm -qa" # Construct query format attr_map = { "name": "name: %{NAME}\\n", "relocations": "relocations: %|PREFIXES?{[%{PREFIXES} ]}:{(not relocatable)}|\\n", "version": "version: %{VERSION}\\n", "vendor": "vendor: %{VENDOR}\\n", "release": "release: %{RELEASE}\\n", "epoch": "%|EPOCH?{epoch: %{EPOCH}\\n}|", "build_date_time_t": "build_date_time_t: %{BUILDTIME}\\n", "build_date": "build_date: %{BUILDTIME}\\n", "install_date_time_t": "install_date_time_t: %|INSTALLTIME?{%{INSTALLTIME}}:{(not installed)}|\\n", "install_date": "install_date: %|INSTALLTIME?{%{INSTALLTIME}}:{(not installed)}|\\n", "build_host": "build_host: %{BUILDHOST}\\n", "group": "group: %{GROUP}\\n", "source_rpm": "source_rpm: %{SOURCERPM}\\n", "size": "size: " + size_tag + "\\n", "arch": "arch: %{ARCH}\\n", "license": "%|LICENSE?{license: %{LICENSE}\\n}|", "signature": "signature: %|DSAHEADER?{%{DSAHEADER:pgpsig}}:{%|RSAHEADER?{%{RSAHEADER:pgpsig}}:" "{%|SIGGPG?{%{SIGGPG:pgpsig}}:{%|SIGPGP?{%{SIGPGP:pgpsig}}:{(none)}|}|}|}|\\n", "packager": "%|PACKAGER?{packager: %{PACKAGER}\\n}|", "url": "%|URL?{url: %{URL}\\n}|", "summary": "summary: %{SUMMARY}\\n", "description": "description:\\n%{DESCRIPTION}\\n", "edition": "edition: %|EPOCH?{%{EPOCH}:}|%{VERSION}-%{RELEASE}\\n", } attr = kwargs.get('attr', None) and kwargs['attr'].split(",") or None query = list() if attr: for attr_k in attr: if attr_k in attr_map and attr_k != 'description': query.append(attr_map[attr_k]) if not query: raise CommandExecutionError('No valid attributes found.') if 'name' not in attr: attr.append('name') query.append(attr_map['name']) if 'edition' not in attr: attr.append('edition') query.append(attr_map['edition']) else: for attr_k, attr_v in six.iteritems(attr_map): if attr_k != 'description': query.append(attr_v) if attr and 'description' in attr or not attr: query.append(attr_map['description']) query.append("-----\\n") call = __salt__['cmd.run_all']( cmd + (" --queryformat '{0}'".format(''.join(query))), output_loglevel='trace', env={ 'TZ': 'UTC' }, clean_env=True) if call['retcode'] != 0: comment = '' if 'stderr' in call: comment += (call['stderr'] or call['stdout']) raise CommandExecutionError(comment) elif 'error' in call['stderr']: raise CommandExecutionError(call['stderr']) else: out = call['stdout'] _ret = list() for pkg_info in re.split(r"----*", out): pkg_info = pkg_info.strip() if not pkg_info: continue pkg_info = pkg_info.split(os.linesep) if pkg_info[-1].lower().startswith('distribution'): pkg_info = pkg_info[:-1] pkg_data = dict() pkg_name = None descr_marker = False descr = list() for line in pkg_info: if descr_marker: descr.append(line) continue line = [item.strip() for item in line.split(':', 1)] if len(line) != 2: continue key, value = line if key == 'description': descr_marker = True continue if key == 'name': pkg_name = value # Convert Unix ticks into ISO time format if key in ['build_date', 'install_date']: try: pkg_data[key] = datetime.datetime.utcfromtimestamp( int(value)).isoformat() + "Z" except ValueError: log.warning('Could not convert "%s" into Unix time', value) continue # Convert Unix ticks into an Integer if key in ['build_date_time_t', 'install_date_time_t']: try: pkg_data[key] = int(value) except ValueError: log.warning('Could not convert "%s" into Unix time', value) continue if key not in ['description', 'name'] and value: pkg_data[key] = value if attr and 'description' in attr or not attr: pkg_data['description'] = os.linesep.join(descr) if pkg_name: pkg_data['name'] = pkg_name _ret.append(pkg_data) # Force-sort package data by version, # pick only latest versions # (in case multiple packages installed, e.g. kernel) ret = dict() for pkg_data in reversed( sorted(_ret, key=lambda x: LooseVersion(x['edition']))): pkg_name = pkg_data.pop('name') # Filter out GPG public keys packages if pkg_name.startswith('gpg-pubkey'): continue if pkg_name not in ret: if all_versions: ret[pkg_name] = [pkg_data.copy()] else: ret[pkg_name] = pkg_data.copy() del ret[pkg_name]['edition'] elif all_versions: ret[pkg_name].append(pkg_data.copy()) return ret
def test_cert_info(self): """ Test cert info """ self.maxDiff = None with patch("os.path.exists", MagicMock(return_value=True)), patch( "salt.modules.tls.maybe_fix_ssl_version", MagicMock(return_value=True)): ca_path = "/tmp/test_tls" ca_name = "test_ca" certp = "{0}/{1}/{1}_ca_cert.crt".format(ca_path, ca_name) ret = { "not_after": 1462379961, "signature_algorithm": "sha256WithRSAEncryption", "extensions": None, "fingerprint": ("96:72:B3:0A:1D:34:37:05:75:57:44:7E:08:81:A7:09:" "0C:E1:8F:5F:4D:0C:49:CE:5B:D2:6B:45:D3:4D:FF:31"), "serial_number": 284092004844685647925744086791559203700, "subject": { "C": "US", "CN": "localhost", "L": "Salt Lake City", "O": "SaltStack", "ST": "Utah", "emailAddress": "*****@*****.**", }, "not_before": 1430843961, "issuer": { "C": "US", "CN": "localhost", "L": "Salt Lake City", "O": "SaltStack", "ST": "Utah", "emailAddress": "*****@*****.**", }, } def ignore_extensions(data): """ Ignore extensions pending a resolution of issue 24338 """ if "extensions" in data.keys(): data["extensions"] = None return data # older pyopenssl versions don't have extensions or # signature_algorithms def remove_not_in_result(source, reference): if "signature_algorithm" not in reference: del source["signature_algorithm"] if "extensions" not in reference: del source["extensions"] with patch("salt.utils.files.fopen", mock_open(read_data=_TLS_TEST_DATA["ca_cert"])): try: result = ignore_extensions(tls.cert_info(certp)) except AttributeError as err: # PyOpenSSL version 16.0.0 has an upstream bug in it where a call is made # in OpenSSL/crypto.py in the get_signature_algorithm function referencing # the cert_info attribute, which doesn't exist. This was fixed in subsequent # releases of PyOpenSSL with https://github.com/pyca/pyopenssl/pull/476 if ("'_cffi_backend.CDataGCP' object has no attribute 'cert_info'" == six.text_type(err)): log.exception(err) self.skipTest( "Encountered an upstream error with PyOpenSSL: {0}" .format(err)) if ("'_cffi_backend.CDataGCP' object has no attribute 'object'" == str(err)): log.exception(err) self.skipTest( "Encountered an upstream error with PyOpenSSL: {0}" .format(err)) # python-openssl version 0.14, when installed with the "junos-eznc" pip # package, causes an error on this test. Newer versions of PyOpenSSL do not have # this issue. If 0.14 is installed and we hit this error, skip the test. if LooseVersion( OpenSSL.__version__) == LooseVersion("0.14"): log.exception(err) self.skipTest( "Encountered a package conflict. OpenSSL version 0.14 cannot be used with " 'the "junos-eznc" pip package on this test. Skipping.' ) result = {} remove_not_in_result(ret, result) self.assertEqual(result, ret)
def test_cmp(v1, v2, wanted): res = LooseVersion(v1)._cmp(LooseVersion(v2)) assert res == wanted, "cmp({}, {}) should be {}, got {}".format( v1, v2, wanted, res)
DUMMY_TOKEN = { "refresh_token": None, "client_id": "dany123", "client_secret": "lalalalalalala", "grant_type": "refresh_token", } # Use certifi if installed try: if HAS_LIBCLOUD: # This work-around for Issue #32743 is no longer needed for libcloud >= # 1.4.0. However, older versions of libcloud must still be supported # with this work-around. This work-around can be removed when the # required minimum version of libcloud is 2.0.0 (See PR #40837 - which # is implemented in Salt 2018.3.0). if LooseVersion(libcloud.__version__) < LooseVersion("1.4.0"): import certifi libcloud.security.CA_CERTS_PATH.append(certifi.where()) except ImportError: pass class DummyGCEConn(object): def __init__(self): self.create_node = MagicMock() def __getattr__(self, attr): if attr != "create_node": # Return back the first thing passed in (i.e. don't call out to get # the override value).
def test_worktree_add_rm(self): ''' This tests git.worktree_add, git.is_worktree, git.worktree_rm, and git.worktree_prune. Tests for 'git worktree list' are covered in tests.unit.modules.git_test. ''' # We don't need to enclose this comparison in a try/except, since the # decorator would skip this test if git is not installed and we'd never # get here in the first place. if _git_version() >= LooseVersion('2.6.0'): worktree_add_prefix = 'Preparing ' else: worktree_add_prefix = 'Enter ' worktree_path = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) worktree_basename = os.path.basename(worktree_path) worktree_path2 = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) worktree_basename2 = os.path.basename(worktree_path2) # Even though this is Windows, git commands return a unix style path if salt.utils.platform.is_windows(): worktree_path = worktree_path.replace('\\', '/') worktree_path2 = worktree_path2.replace('\\', '/') # Add the worktrees ret = self.run_function( 'git.worktree_add', [self.repo, worktree_path], ) self.assertTrue(worktree_add_prefix in ret) self.assertTrue(worktree_basename in ret) ret = self.run_function( 'git.worktree_add', [self.repo, worktree_path2] ) self.assertTrue(worktree_add_prefix in ret) self.assertTrue(worktree_basename2 in ret) # Check if this new path is a worktree self.assertTrue(self.run_function('git.is_worktree', [worktree_path])) # Check if the main repo is a worktree self.assertFalse(self.run_function('git.is_worktree', [self.repo])) # Check if a non-repo directory is a worktree empty_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) self.assertFalse(self.run_function('git.is_worktree', [empty_dir])) shutil.rmtree(empty_dir) # Remove the first worktree self.assertTrue(self.run_function('git.worktree_rm', [worktree_path])) # Prune the worktrees prune_message = ( 'Removing worktrees/{0}: gitdir file points to non-existent ' 'location'.format(worktree_basename) ) # Test dry run output. It should match the same output we get when we # actually prune the worktrees. result = self.run_function('git.worktree_prune', [self.repo], dry_run=True) self.assertEqual(result, prune_message) # Test pruning for real, and make sure the output is the same self.assertEqual( self.run_function('git.worktree_prune', [self.repo]), prune_message )
def render_jinja_tmpl(tmplstr, context, tmplpath=None): opts = context["opts"] saltenv = context["saltenv"] loader = None newline = False file_client = context.get("fileclient", None) if tmplstr and not isinstance(tmplstr, str): # https://jinja.palletsprojects.com/en/2.11.x/api/#unicode tmplstr = tmplstr.decode(SLS_ENCODING) if tmplstr.endswith(os.linesep): newline = os.linesep elif tmplstr.endswith("\n"): newline = "\n" if not saltenv: if tmplpath: loader = jinja2.FileSystemLoader(os.path.dirname(tmplpath)) else: loader = salt.utils.jinja.SaltCacheLoader( opts, saltenv, pillar_rend=context.get("_pillar_rend", False), _file_client=file_client, ) env_args = {"extensions": [], "loader": loader} if hasattr(jinja2.ext, "with_"): env_args["extensions"].append("jinja2.ext.with_") if hasattr(jinja2.ext, "do"): env_args["extensions"].append("jinja2.ext.do") if hasattr(jinja2.ext, "loopcontrols"): env_args["extensions"].append("jinja2.ext.loopcontrols") env_args["extensions"].append(salt.utils.jinja.SerializerExtension) opt_jinja_env = opts.get("jinja_env", {}) opt_jinja_sls_env = opts.get("jinja_sls_env", {}) opt_jinja_env = opt_jinja_env if isinstance(opt_jinja_env, dict) else {} opt_jinja_sls_env = opt_jinja_sls_env if isinstance(opt_jinja_sls_env, dict) else {} # Pass through trim_blocks and lstrip_blocks Jinja parameters # trim_blocks removes newlines around Jinja blocks # lstrip_blocks strips tabs and spaces from the beginning of # line to the start of a block. if opts.get("jinja_trim_blocks", False): log.debug("Jinja2 trim_blocks is enabled") log.warning( "jinja_trim_blocks is deprecated and will be removed in a future release, please use jinja_env and/or jinja_sls_env instead" ) opt_jinja_env["trim_blocks"] = True opt_jinja_sls_env["trim_blocks"] = True if opts.get("jinja_lstrip_blocks", False): log.debug("Jinja2 lstrip_blocks is enabled") log.warning( "jinja_lstrip_blocks is deprecated and will be removed in a future release, please use jinja_env and/or jinja_sls_env instead" ) opt_jinja_env["lstrip_blocks"] = True opt_jinja_sls_env["lstrip_blocks"] = True def opt_jinja_env_helper(opts, optname): for k, v in opts.items(): k = k.lower() if hasattr(jinja2.defaults, k.upper()): log.debug("Jinja2 environment %s was set to %s by %s", k, v, optname) env_args[k] = v else: log.warning("Jinja2 environment %s is not recognized", k) if "sls" in context and context["sls"] != "": opt_jinja_env_helper(opt_jinja_sls_env, "jinja_sls_env") else: opt_jinja_env_helper(opt_jinja_env, "jinja_env") if opts.get("allow_undefined", False): jinja_env = jinja2.Environment(**env_args) else: jinja_env = jinja2.Environment(undefined=jinja2.StrictUndefined, **env_args) tojson_filter = jinja_env.filters.get("tojson") indent_filter = jinja_env.filters.get("indent") jinja_env.tests.update(JinjaTest.salt_jinja_tests) jinja_env.filters.update(JinjaFilter.salt_jinja_filters) if tojson_filter is not None: # Use the existing tojson filter, if present (jinja2 >= 2.9) jinja_env.filters["tojson"] = tojson_filter if salt.utils.jinja.JINJA_VERSION >= LooseVersion("2.11"): # Use the existing indent filter on Jinja versions where it's not broken jinja_env.filters["indent"] = indent_filter jinja_env.globals.update(JinjaGlobal.salt_jinja_globals) # globals jinja_env.globals["odict"] = OrderedDict jinja_env.globals["show_full_context"] = salt.utils.jinja.show_full_context jinja_env.tests["list"] = salt.utils.data.is_list decoded_context = {} for key, value in context.items(): if not isinstance(value, str): decoded_context[key] = value continue try: decoded_context[key] = salt.utils.stringutils.to_unicode( value, encoding=SLS_ENCODING ) except UnicodeDecodeError as ex: log.debug( "Failed to decode using default encoding (%s), trying system encoding", SLS_ENCODING, ) decoded_context[key] = salt.utils.data.decode(value) try: template = jinja_env.from_string(tmplstr) template.globals.update(decoded_context) output = template.render(**decoded_context) except jinja2.exceptions.UndefinedError as exc: trace = traceback.extract_tb(sys.exc_info()[2]) out = _get_jinja_error(trace, context=decoded_context)[1] tmplstr = "" # Don't include the line number, since it is misreported # https://github.com/mitsuhiko/jinja2/issues/276 raise SaltRenderError("Jinja variable {}{}".format(exc, out), buf=tmplstr) except ( jinja2.exceptions.TemplateRuntimeError, jinja2.exceptions.TemplateSyntaxError, ) as exc: trace = traceback.extract_tb(sys.exc_info()[2]) line, out = _get_jinja_error(trace, context=decoded_context) if not line: tmplstr = "" raise SaltRenderError( "Jinja syntax error: {}{}".format(exc, out), line, tmplstr ) except (SaltInvocationError, CommandExecutionError) as exc: trace = traceback.extract_tb(sys.exc_info()[2]) line, out = _get_jinja_error(trace, context=decoded_context) if not line: tmplstr = "" raise SaltRenderError( "Problem running salt function in Jinja template: {}{}".format(exc, out), line, tmplstr, ) except Exception as exc: # pylint: disable=broad-except tracestr = traceback.format_exc() trace = traceback.extract_tb(sys.exc_info()[2]) line, out = _get_jinja_error(trace, context=decoded_context) if not line: tmplstr = "" else: tmplstr += "\n{}".format(tracestr) log.debug("Jinja Error") log.debug("Exception:", exc_info=True) log.debug("Out: %s", out) log.debug("Line: %s", line) log.debug("TmplStr: %s", tmplstr) log.debug("TraceStr: %s", tracestr) raise SaltRenderError( "Jinja error: {}{}".format(exc, out), line, tmplstr, trace=tracestr ) # Workaround a bug in Jinja that removes the final newline # (https://github.com/mitsuhiko/jinja2/issues/75) if newline: output += newline return output
class NpmStateTest(ModuleCase, SaltReturnAssertsMixin): @requires_network() @destructiveTest def test_npm_installed_removed(self): ''' Basic test to determine if NPM module was successfully installed and removed. ''' ret = self.run_state('npm.installed', name='[email protected]', registry="http://registry.npmjs.org/") self.assertSaltTrueReturn(ret) ret = self.run_state('npm.removed', name='pm2') self.assertSaltTrueReturn(ret) @requires_network() @destructiveTest def test_npm_install_url_referenced_package(self): ''' Determine if URL-referenced NPM module can be successfully installed. ''' if LooseVersion(cmd.run('npm -v')) >= LooseVersion(MAX_NPM_VERSION): user = os.environ.get('SUDO_USER', 'root') npm_dir = os.path.join(RUNTIME_VARS.TMP, 'git-install-npm') self.run_state('file.directory', name=npm_dir, user=user, dir_mode='755') else: user = None npm_dir = None ret = self.run_state('npm.installed', name='request/request#v2.81.1', runas=user, dir=npm_dir, registry="http://registry.npmjs.org/") self.assertSaltTrueReturn(ret) ret = self.run_state('npm.removed', name='git://github.com/request/request', runas=user, dir=npm_dir) self.assertSaltTrueReturn(ret) if npm_dir is not None: self.run_state('file.absent', name=npm_dir) @requires_network() @destructiveTest def test_npm_installed_pkgs(self): ''' Basic test to determine if NPM module successfully installs multiple packages. ''' ret = self.run_state('npm.installed', name=None, pkgs=['[email protected]', '[email protected]'], registry="http://registry.npmjs.org/") self.assertSaltTrueReturn(ret) @skipIf( salt.utils.which('npm') and LooseVersion(cmd.run('npm -v')) >= LooseVersion(MAX_NPM_VERSION), 'Skip with npm >= 5.0.0 until #41770 is fixed') @destructiveTest def test_npm_cache_clean(self): ''' Basic test to determine if NPM successfully cleans its cached packages. ''' ret = self.run_state('npm.cache_cleaned', name=None, force=True) self.assertSaltTrueReturn(ret)
def upgraded( name, elasticsearch_version, blocking=True, region=None, keyid=None, key=None, profile=None, ): """ Ensures the Elasticsearch domain specified runs on the specified version of elasticsearch. Only upgrades are possible as downgrades require a manual snapshot and an S3 bucket to store them in. Note that this operation is blocking until the upgrade is complete. :param str name: The name of the Elasticsearch domain to upgrade. :param str elasticsearch_version: String of format X.Y to specify version for the Elasticsearch domain eg. "1.5" or "2.3". .. versionadded:: Natrium Example: .. code-block:: yaml Upgrade Elasticsearch Domain: boto3_elasticsearch.upgraded: - name: my_domain - elasticsearch_version: '7.2' - region: eu-west-1 """ ret = {"name": name, "result": "oops", "comment": [], "changes": {}} current_domain = None res = __salt__["boto3_elasticsearch.describe_elasticsearch_domain"]( name, region=region, keyid=keyid, key=key, profile=profile) if not res["result"]: ret["result"] = False if "ResourceNotFoundException" in res["error"]: ret["comment"].append( 'The Elasticsearch domain "{}" does not exist.' "".format(name)) else: ret["comment"].append(res["error"]) else: current_domain = res["response"] current_version = current_domain["ElasticsearchVersion"] if elasticsearch_version and current_version == elasticsearch_version: ret["result"] = True ret["comment"].append('The Elasticsearch domain "{}" is already ' "at the desired version {}" "".format(name, elasticsearch_version)) elif LooseVersion(elasticsearch_version) < LooseVersion( current_version): ret["result"] = False ret["comment"].append( 'Elasticsearch domain "{}" cannot be downgraded ' 'to version "{}".' "".format(name, elasticsearch_version)) if isinstance(ret["result"], bool): return ret log.debug("%s :upgraded: Check upgrade in progress", __name__) # Check if an upgrade is already in progress res = __salt__["boto3_elasticsearch.get_upgrade_status"](name, region=region, keyid=keyid, key=key, profile=profile) if "error" in res: ret["result"] = False ret["comment"].append("Error determining current upgrade status " 'of domain "{}": {}'.format(name, res["error"])) return ret if res["response"].get("StepStatus") == "IN_PROGRESS": if blocking: # An upgrade is already in progress, wait for it to complete res2 = __salt__["boto3_elasticsearch.wait_for_upgrade"]( name, region=region, keyid=keyid, key=key, profile=profile) if "error" in res2: ret["result"] = False ret["comment"].append("Error waiting for upgrade of domain " '"{}" to complete: {}' "".format(name, res2["error"])) elif (res2["response"].get("UpgradeName", "").endswith(elasticsearch_version)): ret["result"] = True ret["comment"].append('Elasticsearch Domain "{}" is ' 'already at version "{}".' "".format(name, elasticsearch_version)) else: # We are not going to wait for it to complete, so bail. ret["result"] = True ret["comment"].append( 'An upgrade of Elasticsearch domain "{}" ' "is already underway: {}" "".format(name, res["response"].get("UpgradeName"))) if isinstance(ret["result"], bool): return ret log.debug("%s :upgraded: Check upgrade eligibility", __name__) # Check if the domain is eligible for an upgrade res = __salt__["boto3_elasticsearch.check_upgrade_eligibility"]( name, elasticsearch_version, region=region, keyid=keyid, key=key, profile=profile, ) if "error" in res: ret["result"] = False ret["comment"].append("Error checking upgrade eligibility for " 'domain "{}": {}'.format(name, res["error"])) elif not res["response"]: ret["result"] = False ret["comment"].append( 'The Elasticsearch Domain "{}" is not eligible to ' "be upgraded to version {}." "".format(name, elasticsearch_version)) else: log.debug("%s :upgraded: Start the upgrade", __name__) # Start the upgrade if __opts__["test"]: ret["result"] = None ret["comment"].append( 'The Elasticsearch version for domain "{}" would have been upgraded.' ) ret["changes"] = { "old": current_domain["ElasticsearchVersion"], "new": elasticsearch_version, } else: res = __salt__["boto3_elasticsearch.upgrade_elasticsearch_domain"]( name, elasticsearch_version, blocking=blocking, region=region, keyid=keyid, key=key, profile=profile, ) if "error" in res: ret["result"] = False ret["comment"].append( 'Error upgrading Elasticsearch domain "{}": {}' "".format(name, res["error"])) else: ret["result"] = True ret["comment"].append('The Elasticsearch domain "{}" has been ' "upgraded to version {}." "".format(name, elasticsearch_version)) ret["changes"] = { "old": current_domain["ElasticsearchVersion"], "new": elasticsearch_version, } ret = _check_return_value(ret) return ret
def test_cert_info(self): ''' Test cert info ''' self.maxDiff = None with patch('os.path.exists', MagicMock(return_value=True)), \ patch('salt.modules.tls.maybe_fix_ssl_version', MagicMock(return_value=True)): ca_path = '/tmp/test_tls' ca_name = 'test_ca' certp = '{0}/{1}/{2}_ca_cert.crt'.format(ca_path, ca_name, ca_name) ret = { 'not_after': 1462379961, 'signature_algorithm': 'sha256WithRSAEncryption', 'extensions': None, 'fingerprint': ('96:72:B3:0A:1D:34:37:05:75:57:44:7E:08:81:A7:09:' '0C:E1:8F:5F:4D:0C:49:CE:5B:D2:6B:45:D3:4D:FF:31'), 'serial_number': 284092004844685647925744086791559203700, 'subject': { 'C': 'US', 'CN': 'localhost', 'L': 'Salt Lake City', 'O': 'SaltStack', 'ST': 'Utah', 'emailAddress': '*****@*****.**' }, 'not_before': 1430843961, 'issuer': { 'C': 'US', 'CN': 'localhost', 'L': 'Salt Lake City', 'O': 'SaltStack', 'ST': 'Utah', 'emailAddress': '*****@*****.**' } } def ignore_extensions(data): ''' Ignore extensions pending a resolution of issue 24338 ''' if 'extensions' in data.keys(): data['extensions'] = None return data # older pyopenssl versions don't have extensions or # signature_algorithms def remove_not_in_result(source, reference): if 'signature_algorithm' not in reference: del source['signature_algorithm'] if 'extensions' not in reference: del source['extensions'] with patch('salt.utils.files.fopen', mock_open(read_data=_TLS_TEST_DATA['ca_cert'])): try: result = ignore_extensions(tls.cert_info(certp)) except AttributeError as err: # PyOpenSSL version 16.0.0 has an upstream bug in it where a call is made # in OpenSSL/crypto.py in the get_signature_algorithm function referencing # the cert_info attribute, which doesn't exist. This was fixed in subsequent # releases of PyOpenSSL with https://github.com/pyca/pyopenssl/pull/476 if '\'_cffi_backend.CDataGCP\' object has no attribute \'cert_info\'' == six.text_type( err): log.exception(err) self.skipTest( 'Encountered an upstream error with PyOpenSSL: {0}' .format(err)) if '\'_cffi_backend.CDataGCP\' object has no attribute \'object\'' == str( err): log.exception(err) self.skipTest( 'Encountered an upstream error with PyOpenSSL: {0}' .format(err)) # python-openssl version 0.14, when installed with the "junos-eznc" pip # package, causes an error on this test. Newer versions of PyOpenSSL do not have # this issue. If 0.14 is installed and we hit this error, skip the test. if LooseVersion( OpenSSL.__version__) == LooseVersion('0.14'): log.exception(err) self.skipTest( 'Encountered a package conflict. OpenSSL version 0.14 cannot be used with ' 'the "junos-eznc" pip package on this test. Skipping.' ) result = {} remove_not_in_result(ret, result) self.assertEqual(result, ret)
def info(*packages, **kwargs): """ Return a detailed package(s) summary information. If no packages specified, all packages will be returned. :param packages: :param attr: Comma-separated package attributes. If no 'attr' is specified, all available attributes returned. Valid attributes are: version, vendor, release, build_date, build_date_time_t, install_date, install_date_time_t, build_host, group, source_rpm, arch, epoch, size, license, signature, packager, url, summary, description. :param all_versions: Return information for all installed versions of the packages :param root: use root as top level directory (default: "/") :return: CLI Example: .. code-block:: bash salt '*' lowpkg.info apache2 bash salt '*' lowpkg.info apache2 bash attr=version salt '*' lowpkg.info apache2 bash attr=version,build_date_iso,size salt '*' lowpkg.info apache2 bash attr=version,build_date_iso,size all_versions=True """ all_versions = kwargs.get("all_versions", False) # LONGSIZE is not a valid tag for all versions of rpm. If LONGSIZE isn't # available, then we can just use SIZE for older versions. See Issue #31366. rpm_tags = __salt__["cmd.run_stdout"](["rpm", "--querytags"], python_shell=False).splitlines() if "LONGSIZE" in rpm_tags: size_tag = "%{LONGSIZE}" else: size_tag = "%{SIZE}" cmd = ["rpm"] if kwargs.get("root"): cmd.extend(["--root", kwargs["root"]]) if packages: cmd.append("-q") cmd.extend(packages) else: cmd.append("-qa") # Construct query format attr_map = { "name": "name: %{NAME}\\n", "relocations": "relocations: %|PREFIXES?{[%{PREFIXES} ]}:{(not relocatable)}|\\n", "version": "version: %{VERSION}\\n", "vendor": "vendor: %{VENDOR}\\n", "release": "release: %{RELEASE}\\n", "epoch": "%|EPOCH?{epoch: %{EPOCH}\\n}|", "build_date_time_t": "build_date_time_t: %{BUILDTIME}\\n", "build_date": "build_date: %{BUILDTIME}\\n", "install_date_time_t": "install_date_time_t: %|INSTALLTIME?{%{INSTALLTIME}}:{(not installed)}|\\n", "install_date": "install_date: %|INSTALLTIME?{%{INSTALLTIME}}:{(not installed)}|\\n", "build_host": "build_host: %{BUILDHOST}\\n", "group": "group: %{GROUP}\\n", "source_rpm": "source_rpm: %{SOURCERPM}\\n", "size": "size: " + size_tag + "\\n", "arch": "arch: %{ARCH}\\n", "license": "%|LICENSE?{license: %{LICENSE}\\n}|", "signature": "signature: %|DSAHEADER?{%{DSAHEADER:pgpsig}}:{%|RSAHEADER?{%{RSAHEADER:pgpsig}}:" "{%|SIGGPG?{%{SIGGPG:pgpsig}}:{%|SIGPGP?{%{SIGPGP:pgpsig}}:{(none)}|}|}|}|\\n", "packager": "%|PACKAGER?{packager: %{PACKAGER}\\n}|", "url": "%|URL?{url: %{URL}\\n}|", "summary": "summary: %{SUMMARY}\\n", "description": "description:\\n%{DESCRIPTION}\\n", "edition": "edition: %|EPOCH?{%{EPOCH}:}|%{VERSION}-%{RELEASE}\\n", } attr = kwargs.get("attr", None) and kwargs["attr"].split(",") or None query = list() if attr: for attr_k in attr: if attr_k in attr_map and attr_k != "description": query.append(attr_map[attr_k]) if not query: raise CommandExecutionError("No valid attributes found.") if "name" not in attr: attr.append("name") query.append(attr_map["name"]) if "edition" not in attr: attr.append("edition") query.append(attr_map["edition"]) else: for attr_k, attr_v in attr_map.items(): if attr_k != "description": query.append(attr_v) if attr and "description" in attr or not attr: query.append(attr_map["description"]) query.append("-----\\n") cmd = " ".join(cmd) call = __salt__["cmd.run_all"]( cmd + (" --queryformat '{}'".format("".join(query))), output_loglevel="trace", env={ "TZ": "UTC" }, clean_env=True, ) if call["retcode"] != 0: comment = "" if "stderr" in call: comment += call["stderr"] or call["stdout"] raise CommandExecutionError(comment) elif "error" in call["stderr"]: raise CommandExecutionError(call["stderr"]) else: out = call["stdout"] _ret = list() for pkg_info in re.split(r"----*", out): pkg_info = pkg_info.strip() if not pkg_info: continue pkg_info = pkg_info.split(os.linesep) if pkg_info[-1].lower().startswith("distribution"): pkg_info = pkg_info[:-1] pkg_data = dict() pkg_name = None descr_marker = False descr = list() for line in pkg_info: if descr_marker: descr.append(line) continue line = [item.strip() for item in line.split(":", 1)] if len(line) != 2: continue key, value = line if key == "description": descr_marker = True continue if key == "name": pkg_name = value # Convert Unix ticks into ISO time format if key in ["build_date", "install_date"]: try: pkg_data[key] = (datetime.datetime.utcfromtimestamp( int(value)).isoformat() + "Z") except ValueError: log.warning('Could not convert "%s" into Unix time', value) continue # Convert Unix ticks into an Integer if key in ["build_date_time_t", "install_date_time_t"]: try: pkg_data[key] = int(value) except ValueError: log.warning('Could not convert "%s" into Unix time', value) continue if key not in ["description", "name"] and value: pkg_data[key] = value if attr and "description" in attr or not attr: pkg_data["description"] = os.linesep.join(descr) if pkg_name: pkg_data["name"] = pkg_name _ret.append(pkg_data) # Force-sort package data by version, # pick only latest versions # (in case multiple packages installed, e.g. kernel) ret = dict() for pkg_data in reversed( sorted(_ret, key=lambda x: LooseVersion(x["edition"]))): pkg_name = pkg_data.pop("name") # Filter out GPG public keys packages if pkg_name.startswith("gpg-pubkey"): continue if pkg_name not in ret: if all_versions: ret[pkg_name] = [pkg_data.copy()] else: ret[pkg_name] = pkg_data.copy() del ret[pkg_name]["edition"] elif all_versions: ret[pkg_name].append(pkg_data.copy()) return ret
# Import Salt Testing libs from tests.support.case import ModuleCase from tests.support.unit import skipIf from tests.support.paths import TMP_STATE_TREE from tests.support.helpers import requires_network # Import salt libs from salt.utils.versions import LooseVersion GIT_PYTHON = '0.3.2' HAS_GIT_PYTHON = False try: import git if LooseVersion(git.__version__) >= LooseVersion(GIT_PYTHON): HAS_GIT_PYTHON = True except ImportError: pass class PillarModuleTest(ModuleCase): ''' Validate the pillar module ''' def test_data(self): ''' pillar.data ''' grains = self.run_function('grains.items') pillar = self.run_function('pillar.data')
import os import shutil # Import salt libs import salt.fileserver import salt.utils.files import salt.utils.gzip_util import salt.utils.hashutils import salt.utils.json import salt.utils.path import salt.utils.stringutils from salt.utils.versions import LooseVersion try: import azure.storage if LooseVersion(azure.storage.__version__) < LooseVersion('0.20.0'): raise ImportError('azure.storage.__version__ must be >= 0.20.0') HAS_AZURE = True except (ImportError, AttributeError): HAS_AZURE = False # Import third party libs from salt.ext import six __virtualname__ = 'azurefs' log = logging.getLogger() def __virtual__():
def upgraded( name, elasticsearch_version, blocking=True, region=None, keyid=None, key=None, profile=None): ''' Ensures the Elasticsearch domain specified runs on the specified version of elasticsearch. Only upgrades are possible as downgrades require a manual snapshot and an S3 bucket to store them in. Note that this operation is blocking until the upgrade is complete. :param str name: The name of the Elasticsearch domain to upgrade. :param str elasticsearch_version: String of format X.Y to specify version for the Elasticsearch domain eg. "1.5" or "2.3". .. versionadded:: Natrium Example: .. code-block:: yaml Upgrade Elasticsearch Domain: boto3_elasticsearch.upgraded: - name: my_domain - elasticsearch_version: '7.2' - region: eu-west-1 ''' ret = {'name': name, 'result': 'oops', 'comment': [], 'changes': {}} current_domain = None res = __salt__['boto3_elasticsearch.describe_elasticsearch_domain']( name, region=region, keyid=keyid, key=key, profile=profile) if not res['result']: ret['result'] = False if 'ResourceNotFoundException' in res['error']: ret['comment'].append('The Elasticsearch domain "{}" does not exist.' ''.format(name)) else: ret['comment'].append(res['error']) else: current_domain = res['response'] current_version = current_domain['ElasticsearchVersion'] if elasticsearch_version and current_version == elasticsearch_version: ret['result'] = True ret['comment'].append('The Elasticsearch domain "{}" is already ' 'at the desired version {}' ''.format(name, elasticsearch_version)) elif LooseVersion(elasticsearch_version) < LooseVersion(current_version): ret['result'] = False ret['comment'].append('Elasticsearch domain "{}" cannot be downgraded ' 'to version "{}".' ''.format(name, elasticsearch_version)) if isinstance(ret['result'], bool): return ret log.debug('%s :upgraded: Check upgrade in progress', __name__) # Check if an upgrade is already in progress res = __salt__['boto3_elasticsearch.get_upgrade_status']( name, region=region, keyid=keyid, key=key, profile=profile) if 'error' in res: ret['result'] = False ret['comment'].append('Error determining current upgrade status ' 'of domain "{}": {}'.format(name, res['error'])) return ret if res['response'].get('StepStatus') == 'IN_PROGRESS': if blocking: # An upgrade is already in progress, wait for it to complete res2 = __salt__['boto3_elasticsearch.wait_for_upgrade']( name, region=region, keyid=keyid, key=key, profile=profile) if 'error' in res2: ret['result'] = False ret['comment'].append('Error waiting for upgrade of domain ' '"{}" to complete: {}' ''.format(name, res2['error'])) elif res2['response'].get('UpgradeName', '').endswith(elasticsearch_version): ret['result'] = True ret['comment'].append('Elasticsearch Domain "{}" is ' 'already at version "{}".' ''.format(name, elasticsearch_version)) else: # We are not going to wait for it to complete, so bail. ret['result'] = True ret['comment'].append('An upgrade of Elasticsearch domain "{}" ' 'is already underway: {}' ''.format(name, res['response'].get('UpgradeName'))) if isinstance(ret['result'], bool): return ret log.debug('%s :upgraded: Check upgrade eligibility', __name__) # Check if the domain is eligible for an upgrade res = __salt__['boto3_elasticsearch.check_upgrade_eligibility']( name, elasticsearch_version, region=region, keyid=keyid, key=key, profile=profile) if 'error' in res: ret['result'] = False ret['comment'].append('Error checking upgrade eligibility for ' 'domain "{}": {}'.format(name, res['error'])) elif not res['response']: ret['result'] = False ret['comment'].append('The Elasticsearch Domain "{}" is not eligible to ' 'be upgraded to version {}.' ''.format(name, elasticsearch_version)) else: log.debug('%s :upgraded: Start the upgrade', __name__) # Start the upgrade if __opts__['test']: ret['result'] = None ret['comment'].append('The Elasticsearch version for domain "{}" would have been upgraded.') ret['changes'] = {'old': current_domain['ElasticsearchVersion'], 'new': elasticsearch_version} else: res = __salt__['boto3_elasticsearch.upgrade_elasticsearch_domain']( name, elasticsearch_version, blocking=blocking, region=region, keyid=keyid, key=key, profile=profile) if 'error' in res: ret['result'] = False ret['comment'].append('Error upgrading Elasticsearch domain "{}": {}' ''.format(name, res['error'])) else: ret['result'] = True ret['comment'].append('The Elasticsearch domain "{}" has been ' 'upgraded to version {}.' ''.format(name, elasticsearch_version)) ret['changes'] = {'old': current_domain['ElasticsearchVersion'], 'new': elasticsearch_version} ret = _check_return_value(ret) return ret