def set_cache( cache, key, value, value_update=False, extend=False, ): """Set a cached item. :param cache: Cached access object. :type cache: Object :param key: Key for the cached item. :type key: String :param value: Value for the cached item. :type value: ANY :param value_update: Instructs the method to update a Dictionary with another dictionary. :type value_update: Boolean :param extend: Enable|Disable Extend a map :type extend: Boolean :returns" Boolean """ if value_update: orig = cache.get(key, default=dict()) value = utils.merge_dict(orig, value, extend=extend) cache_set = cache.setdefault(key, value) if not cache_set: return cache.setdefault(key, value) return cache_set
def test_merge_dict_extend(self): a = { "dict": { "a": "test", "b": { "int1": 1 } }, "list": ["a"], "str": "a", "int": 1, "tuple": ("a", ), "set": { "a", }, } b = { "dict": { "b": { "int2": 2 }, "c": "test2" }, "list": ["b"], "key": "value", "tuple": ("b", ), "set": { "b", }, } merge = { "dict": { "a": "test", "b": { "int1": 1, "int2": 2 }, "c": "test2" }, "int": 1, "key": "value", "list": ["a", "b"], "set": {"a", "b"}, "str": "a", "tuple": ( "a", "b", ), } new = utils.merge_dict(base=a, new=b) self.assertEqual(new, merge)
def client(self, cache, job): """Run cache echo command operation. :param cache: Caching object used to template items within a command. :type cache: Object :param job: Information containing the original job specification. :type job: Dictionary :returns: tuple """ # Set parameters config_path = job["config_path"] config_pattern = job["config_pattern"] config_overrides = job["config_overrides"] # Generate dict from JSON files that match search pattern if os.path.exists(config_path): matched_configs = glob.glob( os.path.join(config_path, config_pattern)) config_dict = {} for mc in matched_configs: name = os.path.splitext(os.path.basename(mc))[0] config = json.loads(self._slurp(mc)) self.log.debug( "Job [ %s ] Config found for %s: %s", job["job_id"], name, config, ) config_dict.update({name: config}) # Merge the config dict with given overrides configs = utils.merge_dict(config_dict, config_overrides) self.set_cache( cache=cache, key="configs", value=configs, value_update=False, ) else: self.log.debug( "Job [ %s ] %s does not exists, skipping step", job["job_id"], config_path, ) configs = dict() return configs, None, True, None
def test_merge_dict_no_extend(self): a = { "dict": { "a": "test", "b": { "int1": 1 } }, "list": ["a"], "str": "a", "int": 1, } b = { "dict": { "b": { "int2": 2 }, "c": "test2" }, "list": ["b"], "key": "value", } merge = { "dict": { "b": { "int2": 2 }, "c": "test2" }, "int": 1, "key": "value", "list": ["b"], "str": "a", } new = utils.merge_dict(base=a, new=b, extend=False) self.assertEqual(new, merge)
def test_merge_dict_list_no_extend(self): a = ["a"] b = ["b"] merge = ["b"] new = utils.merge_dict(base=a, new=b, extend=False) self.assertEqual(new, merge)
def bootstrap_cluster(self, run_indicator=None): """Run a cluster wide bootstrap using a catalog file. Cluster bootstrap requires a catalog file to run. Catalogs are broken up into two sections, `directord_server` and `directord_client`. All servers are processed serially and first. All clients are processing in parallel using a maximum of the threads argument. :param run_indicator: Enable | disable the run indicator :type run_indicator: Boolean :returns: Tuple """ q = self.get_queue() catalog = dict() if not self.catalog: raise SystemExit("No catalog was defined.") for c in self.catalog: utils.merge_dict(base=catalog, new=yaml.safe_load(c)) if run_indicator is None: run_indicator = self.debug with directord.Spinner(run=run_indicator, queue=q) as indicator: self.indicator = indicator directord_server = catalog.get("directord_server") if directord_server: self.log.debug("Loading server information") for s in self.bootstrap_catalog_entry( entry=directord_server, required_entries=["targets"] ): s["key_file"] = self.key_file catalog["directord_bootstrap"] = s self.bootstrap_run(job_def=s, catalog=catalog) directord_clients = catalog.get("directord_clients") if directord_clients: self.log.debug("Loading client information") for c in self.bootstrap_catalog_entry(entry=directord_clients): c["key_file"] = self.key_file q.put(c) threads = list() for _ in range(self.threads): threads.append( ( self.thread( target=self.bootstrap_q_processor, args=(q, catalog), ), True, ) ) else: self.run_threads(threads=threads) targets = set() while not self.return_queue.empty(): try: targets.add(self.return_queue.get_nowait()) except Exception: pass return tuple(sorted(targets))