def test_topological_sort(self) -> None: """ Test that topological sort works properly. """ # tuple convention: (outgoing, incoming) graph = { "1": (["4"], []), "2": (["4"], []), "3": (["5", "6"], []), "4": (["7", "5"], ["1", "2"]), "5": (["8"], ["4", "3"]), "6": ([], ["3"]), "7": (["8"], ["4"]), "8": ([], ["7", "5"]) } # type: Dict[str, Tuple[List[str], List[str]]] self.assertEqual(topological_sort(graph, ["1", "2", "3"]), ["1", "2", "3", "4", "6", "7", "5", "8"])
def combine_configs(configs: Iterable[dict]) -> dict: """ Combine the given list of *unpacked* configs into a single config. Later configs in the list will override the earlier configs. :param configs: List of configs. :param handle_meta: Handle meta configs? :return: A loaded config dictionary. """ expanded_config_reduce = reduce(update_and_expand_meta, configs, {}) # type: dict expanded_config = deepdict(expanded_config_reduce) # type: dict expanded_config_orig = deepdict(expanded_config) # type: dict # Now, we need to handle lazy* metas. lazy_metas = {} meta_dict_keys = list(expanded_config.keys()) meta_keys = list(filter(lambda k: k.endswith("_meta"), meta_dict_keys)) # Graph to keep track of which lazy settings depend on others. # key1 -> key2 means key2 depends on key1 graph = {} # type: Dict[str, Tuple[List[str], List[str]]] meta_len = len("_meta") for meta_key in meta_keys: setting = meta_key[:-meta_len] # type: str lazy_meta_type = expanded_config[meta_key] # type: str assert lazy_meta_type.startswith( "lazy"), "Should have only lazy metas left now" # Create lazy_metas without the lazy part. # e.g. what used to be a lazysubst just becomes a plain subst since everything is fully resolved now. meta_type = lazy_meta_type[len("lazy"):] lazy_metas[meta_key] = meta_type lazy_metas[setting] = expanded_config[ setting] # copy over the template too # Build the graph of which lazy settings depend on what. # Always ensure that this lazy setting's node exists even if it has no dependencies. if setting not in graph: graph[setting] = ([], []) for target_var in get_meta_directives()[meta_type].target_settings( setting, expanded_config[setting]): # Make sure the order in which we delete doesn't affect this # search, since expanded_config might have some deleted stuff. if target_var + "_meta" in expanded_config_orig: # Add a dependency for target -> this setting if target_var not in graph: graph[target_var] = ([], []) graph[target_var][0].append(setting) graph[setting][1].append(target_var) else: # The target setting that this depends on is not a lazy setting. pass # Delete from expanded_config del expanded_config[meta_key] del expanded_config[setting] if len(graph) > 0: # Find all the starting nodes (no incoming edges). starting_nodes = list( map(lambda key_val: key_val[0], filter(lambda key_val: len(key_val[1][1]) == 0, graph.items()))) # Sort starting nodes for determinism. starting_nodes = sorted(starting_nodes) if len(starting_nodes) == 0: raise ValueError("There appears to be a loop of lazy settings") # List of settings to expand first according to topological sort. settings_ordered = topological_sort(graph, starting_nodes) # type: List[str] def combine_meta(config_dict: dict, meta_setting: str) -> dict: # Merge in the metas in the given order. return update_and_expand_meta( config_dict, { meta_setting: lazy_metas[meta_setting], meta_setting + "_meta": lazy_metas[meta_setting + "_meta"] }) final_dict = reduce(combine_meta, settings_ordered, expanded_config) # type: dict else: final_dict = deepdict(expanded_config) # Remove any temporary keys. for key in HammerDatabase.internal_keys(): if key in final_dict: del final_dict[key] return final_dict
def combine_configs(configs: Iterable[dict]) -> dict: """ Combine the given list of *unpacked* configs into a single config. Later configs in the list will override the earlier configs. :param configs: List of configs. :param handle_meta: Handle meta configs? :return: A loaded config dictionary. """ expanded_config_reduce = reduce(update_and_expand_meta, configs, {}) # type: dict expanded_config = deepdict(expanded_config_reduce) # type: dict expanded_config_orig = deepdict(expanded_config) # type: dict # Now, we need to handle lazy* metas. lazy_metas = {} meta_dict_keys = list(expanded_config.keys()) meta_keys = list(filter(lambda k: k.endswith("_meta"), meta_dict_keys)) # Graph to keep track of which lazy settings depend on others. # key1 -> key2 means key2 depends on key1 graph = {} # type: Dict[str, Tuple[List[str], List[str]]] meta_len = len("_meta") for meta_key in meta_keys: setting = meta_key[:-meta_len] # type: str lazy_meta_type = expanded_config[meta_key] # type: str assert lazy_meta_type.startswith("lazy"), "Should have only lazy metas left now" # Create lazy_metas without the lazy part. # e.g. what used to be a lazysubst just becomes a plain subst since everything is fully resolved now. meta_type = lazy_meta_type[len("lazy"):] lazy_metas[meta_key] = meta_type lazy_metas[setting] = expanded_config[setting] # copy over the template too # Build the graph of which lazy settings depend on what. # Always ensure that this lazy setting's node exists even if it has no dependencies. if setting not in graph: graph[setting] = ([], []) for target_var in get_meta_directives()[meta_type].target_settings(setting, expanded_config[setting]): # Make sure the order in which we delete doesn't affect this # search, since expanded_config might have some deleted stuff. if target_var + "_meta" in expanded_config_orig: # Add a dependency for target -> this setting if target_var not in graph: graph[target_var] = ([], []) graph[target_var][0].append(setting) graph[setting][1].append(target_var) else: # The target setting that this depends on is not a lazy setting. pass # Delete from expanded_config del expanded_config[meta_key] del expanded_config[setting] if len(graph) > 0: # Find all the starting nodes (no incoming edges). starting_nodes = list( map(lambda key_val: key_val[0], filter(lambda key_val: len(key_val[1][1]) == 0, graph.items()))) # Sort starting nodes for determinism. starting_nodes = sorted(starting_nodes) if len(starting_nodes) == 0: raise ValueError("There appears to be a loop of lazy settings") # List of settings to expand first according to topological sort. settings_ordered = topological_sort(graph, starting_nodes) # type: List[str] def combine_meta(config_dict: dict, meta_setting: str) -> dict: # Merge in the metas in the given order. return update_and_expand_meta(config_dict, { meta_setting: lazy_metas[meta_setting], meta_setting + "_meta": lazy_metas[meta_setting + "_meta"] }) final_dict = reduce(combine_meta, settings_ordered, expanded_config) # type: dict else: final_dict = deepdict(expanded_config) # Remove any temporary keys. for key in HammerDatabase.internal_keys(): if key in final_dict: del final_dict[key] return final_dict