def check_minion_cache(self, preserve_minions=None): ''' Check the minion cache to make sure that old minion data is cleared Optionally, pass in a list of minions which should have their caches preserved. To preserve all caches, set __opts__['preserve_minion_cache'] ''' if preserve_minions is None: preserve_minions = [] keys = self.list_keys() minions = [] for key, val in six.iteritems(keys): minions.extend(val) if not self.opts.get('preserve_minion_cache', False) or not preserve_minions: m_cache = os.path.join(self.opts['cachedir'], self.ACC) if os.path.isdir(m_cache): for minion in os.listdir(m_cache): if minion not in minions and minion not in preserve_minions: shutil.rmtree(os.path.join(m_cache, minion)) cache = salt.cache.Cache(self.opts) clist = cache.list(self.ACC) if clist: for minion in cache.list(self.ACC): if minion not in minions and minion not in preserve_minions: cache.flush('{0}/{1}'.format(self.ACC, minion))
def check_minion_cache(self, preserve_minions=False): ''' Check the minion cache to make sure that old minion data is cleared ''' keys = self.list_keys() minions = [] for key, val in six.iteritems(keys): minions.extend(val) m_cache = os.path.join(self.opts['cachedir'], 'minions') if os.path.isdir(m_cache): for minion in os.listdir(m_cache): if minion not in minions: shutil.rmtree(os.path.join(m_cache, minion)) cache = salt.cache.Cache(self.opts) clist = cache.list(self.ACC) if clist: for minion in cache.list(self.ACC): if minion not in minions and minion not in preserve_minions: cache.flush('{0}/{1}'.format(self.ACC, minion)) kind = self.opts.get('__role', '') # application kind if kind not in salt.utils.kinds.APPL_KINDS: emsg = ("Invalid application kind = '{0}'.".format(kind)) log.error(emsg + '\n') raise ValueError(emsg) role = self.opts.get('id', '') if not role: emsg = ("Invalid id.") log.error(emsg + "\n") raise ValueError(emsg) name = "{0}_{1}".format(role, kind) road_cache = os.path.join(self.opts['cachedir'], 'raet', name, 'remote') if os.path.isdir(road_cache): for road in os.listdir(road_cache): root, ext = os.path.splitext(road) if ext not in ['.json', '.msgpack']: continue prefix, sep, name = root.partition('.') if not name or prefix != 'estate': continue path = os.path.join(road_cache, road) with salt.utils.fopen(path, 'rb') as fp_: if ext == '.json': data = json.load(fp_) elif ext == '.msgpack': data = msgpack.load(fp_) if data['role'] not in minions: os.remove(path)
def check_minion_cache(self, preserve_minions=None): ''' Check the minion cache to make sure that old minion data is cleared Optionally, pass in a list of minions which should have their caches preserved. To preserve all caches, set __opts__['preserve_minion_cache'] ''' if preserve_minions is None: preserve_minions = [] keys = self.list_keys() minions = [] for key, val in six.iteritems(keys): minions.extend(val) if not self.opts.get('preserve_minion_cache', False): m_cache = os.path.join(self.opts['cachedir'], self.ACC) if os.path.isdir(m_cache): for minion in os.listdir(m_cache): if minion not in minions and minion not in preserve_minions: try: shutil.rmtree(os.path.join(m_cache, minion)) except (OSError, IOError) as ex: log.warning( 'Key: Delete cache for %s got OSError/IOError: %s \n', minion, ex) continue cache = salt.cache.factory(self.opts) clist = cache.list(self.ACC) if clist: for minion in clist: if minion not in minions and minion not in preserve_minions: cache.flush('{0}/{1}'.format(self.ACC, minion))
def list_domain_cache(): ''' List domains that have been cached CLI Example: .. code-block:: bash salt-run venafi.list_domain_cache ''' cache = salt.cache.Cache(__opts__, syspaths.CACHE_DIR) return cache.list('venafi/domains')
def list_domain_cache(): """ List domains that have been cached CLI Example: .. code-block:: bash salt-run digicert.list_domain_cache """ cache = salt.cache.Cache(__opts__, syspaths.CACHE_DIR) return cache.list("digicert/domains")
def ext_pillar(minion_id, pillar, conf): """ Return an existing set of certificates """ cache = salt.cache.Cache(__opts__, syspaths.CACHE_DIR) ret = {} for dns_name in cache.list("venafi/domains"): data = cache.fetch("venafi/domains", dns_name) if data["minion_id"] == minion_id: ret[dns_name] = data return {"venafi": ret}
def ext_pillar(minion_id, pillar, conf): ''' Return an existing set of certificates ''' cache = salt.cache.Cache(__opts__, syspaths.CACHE_DIR) ret = {} for dns_name in cache.list('venafi/domains'): data = cache.fetch('venafi/domains', dns_name) if data['minion_id'] == minion_id: ret[dns_name] = data return {'venafi': ret}
def list_(bank, cachedir=None): ''' Lists entries stored in the specified bank. CLI Example: .. code-block:: bash salt-run cache.list cloud/active/ec2/myec2 cachedir=/var/cache/salt/ ''' if cachedir is None: cachedir = __opts__['cachedir'] try: cache = salt.cache.Cache(__opts__, cachedir=cachedir) except TypeError: cache = salt.cache.Cache(__opts__) return cache.list(bank)
def get_minion_data(minion, opts): ''' Get the grains/pillar for a specific minion. If minion is None, it will return the grains/pillar for the first minion it finds. Return value is a tuple of the minion ID, grains, and pillar ''' grains = None pillar = None if opts.get('minion_data_cache', False): cache = salt.cache.Cache(opts) if minion is None: for id_ in cache.list('minions'): data = cache.fetch('minions/{0}'.format(id_), 'data') if data is None: continue else: data = cache.fetch('minions/{0}'.format(minion), 'data') if data is not None: grains = data['grains'] pillar = data['pillar'] return minion if minion else None, grains, pillar
def get_minion_data(minion, opts): ''' Get the grains/pillar for a specific minion. If minion is None, it will return the grains/pillar for the first minion it finds. Return value is a tuple of the minion ID, grains, and pillar ''' grains = None pillar = None if opts.get('minion_data_cache', False): cache = salt.cache.factory(opts) if minion is None: for id_ in cache.list('minions'): data = cache.fetch('minions/{0}'.format(id_), 'data') if data is None: continue else: data = cache.fetch('minions/{0}'.format(minion), 'data') if data is not None: grains = data.get('grains', None) pillar = data.get('pillar', None) return minion if minion else None, grains, pillar
def get_minion_data(minion, opts): """ Get the grains/pillar for a specific minion. If minion is None, it will return the grains/pillar for the first minion it finds. Return value is a tuple of the minion ID, grains, and pillar """ grains = None pillar = None if opts.get("minion_data_cache", False): cache = salt.cache.factory(opts) if minion is None: for id_ in cache.list("minions"): data = cache.fetch("minions/{}".format(id_), "data") if data is None: continue else: data = cache.fetch("minions/{}".format(minion), "data") if data is not None: grains = data.get("grains", None) pillar = data.get("pillar", None) return minion if minion else None, grains, pillar
def run_common_cache_tests(subtests, cache): bank = "fnord/kevin/stuart" # ^^^^ This bank can be just fnord, or fnord/foo, or any mildly reasonable # or possibly unreasonably nested names. # # No. Seriously. Try import string; bank = '/'.join(string.ascii_letters) # - it works! # import string; bank = "/".join(string.ascii_letters) good_key = "roscivs" bad_key = "monkey" with subtests.test("non-existent bank should be empty on cache start"): assert not cache.contains(bank=bank) assert cache.list(bank=bank) == [] with subtests.test("after storing key in bank it should be in cache list"): cache.store(bank=bank, key=good_key, data=b"\x01\x04\x05fnordy data") assert cache.list(bank) == [good_key] with subtests.test("after storing value, it should be fetchable"): expected_data = "trombone pleasantry" cache.store(bank=bank, key=good_key, data=expected_data) assert cache.fetch(bank=bank, key=good_key) == expected_data with subtests.test("bad key should still be absent from cache"): assert cache.fetch(bank=bank, key=bad_key) == {} with subtests.test("storing new value should update it"): # Double check that the data was still the old stuff old_data = expected_data assert cache.fetch(bank=bank, key=good_key) == old_data new_data = "stromboli" cache.store(bank=bank, key=good_key, data=new_data) assert cache.fetch(bank=bank, key=good_key) == new_data with subtests.test("storing complex object works"): new_thing = { "some": "data", 42: "wheee", "some other": {"sub": {"objects": "here"}}, } cache.store(bank=bank, key=good_key, data=new_thing) actual_thing = cache.fetch(bank=bank, key=good_key) if isinstance(cache, salt.cache.MemCache): # MemCache should actually store the object - everything else # should create a copy of it. assert actual_thing is new_thing else: assert actual_thing is not new_thing assert actual_thing == new_thing with subtests.test("contains returns true if key in bank"): assert cache.contains(bank=bank, key=good_key) with subtests.test("contains returns true if bank exists and key is None"): assert cache.contains(bank=bank, key=None) with subtests.test( "contains returns False when bank not in cache and/or key not in bank" ): assert not cache.contains(bank=bank, key=bad_key) assert not cache.contains(bank="nonexistent", key=good_key) assert not cache.contains(bank="nonexistent", key=bad_key) assert not cache.contains(bank="nonexistent", key=None) with subtests.test("flushing nonexistent key should not remove other keys"): cache.flush(bank=bank, key=bad_key) assert cache.contains(bank=bank, key=good_key) with subtests.test( "flushing existing key should not remove bank if no more keys exist" ): pytest.skip( "This is impossible with redis. Should we make localfs behave the same way?" ) cache.flush(bank=bank, key=good_key) assert cache.contains(bank=bank) assert cache.list(bank=bank) == [] with subtests.test( "after existing key is flushed updated should not return a timestamp for that key" ): cache.store(bank=bank, key=good_key, data="fnord") cache.flush(bank=bank, key=good_key) timestamp = cache.updated(bank=bank, key=good_key) assert timestamp is None with subtests.test( "after flushing bank containing a good key, updated should not return a timestamp for that key" ): cache.store(bank=bank, key=good_key, data="fnord") cache.flush(bank=bank, key=None) timestamp = cache.updated(bank=bank, key=good_key) assert timestamp is None with subtests.test("flushing bank with None as key should remove bank"): cache.flush(bank=bank, key=None) assert not cache.contains(bank=bank) with subtests.test("Exception should happen when flushing None bank"): # This bit is maybe an accidental API, but currently there is no # protection at least with the localfs cache when bank is None. If # bank is None we try to `os.path.normpath` the bank, which explodes # and is at least the current behavior. If we want to change that # this test should change. Or be removed altogether. # TODO: this should actually not raise. Not sure if there's a test that we can do here... or just call the code which will fail if there's actually an exception. -W. Werner, 2021-09-28 pytest.skip( "Skipping for now - etcd, redis, and mysql do not raise. Should ensure all backends behave consistently" ) with pytest.raises(Exception): cache.flush(bank=None, key=None) with subtests.test("Updated for non-existent key should return None"): timestamp = cache.updated(bank="nonexistent", key="whatever") assert timestamp is None with subtests.test("Updated for key should return a reasonable time"): before_storage = int(time.time()) cache.store(bank="fnord", key="updated test part 2", data="fnord") after_storage = int(time.time()) timestamp = cache.updated(bank="fnord", key="updated test part 2") assert before_storage <= timestamp <= after_storage with subtests.test( "If the module raises SaltCacheError then it should make it out of updated" ): with patch.dict( cache.modules._dict, {"{}.updated".format(cache.driver): MagicMock(side_effect=SaltCacheError)}, ), pytest.raises(SaltCacheError): cache.updated(bank="kaboom", key="oops") with subtests.test( "cache.cache right after a value is cached should not update the cache" ): expected_value = "some cool value yo" cache.store(bank=bank, key=good_key, data=expected_value) result = cache.cache( bank=bank, key=good_key, fun=lambda **kwargs: "bad bad value no good", value="some other value?", loop_fun=lambda x: "super very no good bad", ) fetch_result = cache.fetch(bank=bank, key=good_key) assert result == fetch_result == expected_value with subtests.test( "cache.cache should update the value with the result of fun when value was updated longer than expiration", ), patch( "salt.cache.Cache.updated", return_value=42, # Dec 31, 1969... time to update the cache! autospec=True, ): expected_value = "this is the return value woo woo woo" cache.store(bank=bank, key=good_key, data="not this value") cache_result = cache.cache( bank=bank, key=good_key, fun=lambda *args, **kwargs: expected_value ) fetch_result = cache.fetch(bank=bank, key=good_key) assert cache_result == fetch_result == expected_value with subtests.test( "cache.cache should update the value with all of the outputs from loop_fun if loop_fun was provided", ), patch( "salt.cache.Cache.updated", return_value=42, autospec=True, ): expected_value = "SOME HUGE STRING OKAY?" cache.store(bank=bank, key=good_key, data="nope, not me") cache_result = cache.cache( bank=bank, key=good_key, fun=lambda **kwargs: "some huge string okay?", loop_fun=str.upper, ) fetch_result = cache.fetch(bank=bank, key=good_key) assert cache_result == fetch_result assert "".join(fetch_result) == expected_value with subtests.test( "cache.cache should update the value if the stored value is empty but present and expiry is way in the future" ), patch( "salt.cache.Cache.updated", return_value=time.time() * 2, autospec=True, ): # Unclear if this was intended behavior: currently any falsey data will # be updated by cache.cache. If this is incorrect, this test should # be updated or removed. expected_data = "some random string whatever" for empty in ("", (), [], {}, 0, 0.0, False, None): with subtests.test(empty=empty): cache.store( bank=bank, key=good_key, data=empty ) # empty chairs and empty data cache_result = cache.cache( bank=bank, key=good_key, fun=lambda **kwargs: expected_data ) fetch_result = cache.fetch(bank=bank, key=good_key) assert cache_result == fetch_result == expected_data with subtests.test("cache.cache should store a value if it does not exist"): expected_result = "some result plz" cache.flush(bank=bank, key=None) assert cache.fetch(bank=bank, key=good_key) == {} cache_result = cache.cache( bank=bank, key=good_key, fun=lambda **kwargs: expected_result ) fetch_result = cache.fetch(bank=bank, key=good_key) assert cache_result == fetch_result assert fetch_result == expected_result assert cache_result == fetch_result == expected_result
def load_cache(pool, __runner__, opts, tgt, tgt_type=None): ''' Load the Pillar and Grain cache, as required, and merge the Roster Grains and Pillar into. ''' if opts.get('grains'): for device, device_opts in six.iteritems(pool): if 'minion_opts' not in device_opts: device_opts['minion_opts'] = {} if 'grains' not in device_opts['minion_opts']: device_opts['minion_opts']['grains'] = {} device_opts['minion_opts']['grains'] = salt.utils.dictupdate.merge( opts['grains'], device_opts['minion_opts']['grains'], merge_lists=True, ) if tgt_type in ('glob', 'pcre', 'list'): # When the target type is glob, pcre, or list, we don't require grains # or pillar loaded from the cache, because the targeting won't depend on # those. return pool if not opts.get('use_cached_grains', True) and not opts.get( 'use_cached_pillar', True): return pool # NOTE: It wouldn't be feasible to use the cache.grains or cache.pillar # Runners as they rely on fetching data from the Master, for Minions that # are accepted. What we're doing here is reading straight from the cache. log.debug('Loading cached and merging into the Roster data') cache = salt.cache.factory(opts) cache_pool = cache.list('minions') for device in cache_pool: if device not in pool: log.trace('%s has cache, but is not in the Roster pool', device) continue if 'minion_opts' not in pool[device]: pool[device]['minion_opts'] = {'grains': {}, 'pillar': {}} cache_key = 'minions/{}/data'.format(device) if opts.get('target_use_cached_grains', True) and tgt_type in ( 'compound', 'grain', 'grain_pcre', 'nodegroup', ): log.debug('Fetching cached Grains for %s', device) cached_grains = cache.fetch(cache_key, 'grains') if cached_grains: pool[device]['minion_opts'][ 'grains'] = salt.utils.dictupdate.merge( cached_grains, pool[device]['minion_opts'].get('grains', {}), merge_lists=True, ) if opts.get('target_use_cached_pillar', True) and tgt_type in ( 'compound', 'pillar', 'pillar_pcre', 'pillar_target', 'nodegroup', ): log.debug('Fetching cached Pillar for %s', device) cached_pillar = cache.fetch(cache_key, 'pillar') if cached_pillar: pool[device]['minion_opts'][ 'pillar'] = salt.utils.dictupdate.merge( cached_pillar, pool[device]['minion_opts'].get('pillar', {}), merge_lists=True, ) log.debug('The device pool with the cached data') log.debug(pool) return pool