def combine_configs(configs: Iterable[dict]) -> dict: """ Combine the given list of *unpacked* configs into a single config. Later configs in the list will override the earlier configs. :param configs: List of configs. :param handle_meta: Handle meta configs? :return: A loaded config dictionary. """ expanded_config_reduce = reduce(update_and_expand_meta, configs, {}) # type: dict expanded_config = deepdict(expanded_config_reduce) # type: dict expanded_config_orig = deepdict(expanded_config) # type: dict # Now, we need to handle dynamic* metas. dynamic_metas = {} meta_dict_keys = list(expanded_config.keys()) meta_keys = list(filter(lambda k: k.endswith("_meta"), meta_dict_keys)) meta_len = len("_meta") for meta_key in meta_keys: setting = meta_key[:-meta_len] # type: str meta_type = expanded_config[meta_key] # type: str assert meta_type.startswith("dynamic"), "Should have only dynamic metas left now" # Create dynamic_metas without the dynamic part. # e.g. what used to be a dynamicsubst just becomes a plain subst since everything is fully resolved now. dynamic_metas[meta_key] = meta_type[len("dynamic"):] dynamic_metas[setting] = expanded_config[setting] # copy over the template too # Just check that we don't reference any other dynamicsubst variables for now. # We can always go to a DAG tree later if need be. if meta_type == "dynamicsubst": matches = re.finditer(__VARIABLE_EXPANSION_REGEX, expanded_config[setting], re.DOTALL) for match in matches: target_var = match.group(1) # Ensure that the target variable isn't also a dynamicsubst variable. if target_var + "_meta" in expanded_config_orig: # make sure the order in which we delete doesn't affect this search raise ValueError("dynamicsubst variable referencing another dynamic variable not supported yet") # Delete from expanded_config del expanded_config[meta_key] del expanded_config[setting] final_dict = update_and_expand_meta(expanded_config, dynamic_metas) # Remove the temporary key used for path metas. if CONFIG_PATH_KEY in final_dict: del final_dict[CONFIG_PATH_KEY] return final_dict
def add_drc_lvs_decks(in_dict: Dict[str, Any]) -> Dict[str, Any]: out_dict = deepdict(in_dict) out_dict.update({ "drc decks": [{ "tool name": "hammer", "deck name": "a_nail", "path": "/path/to/hammer/a_nail.drc.rules" }, { "tool name": "chisel", "deck name": "some_wood", "path": "/path/to/chisel/some_wood.drc.rules" }, { "tool name": "hammer", "deck name": "head_shark", "path": "/path/to/hammer/head_shark.drc.rules" }] }) out_dict.update({ "lvs decks": [{ "tool name": "hammer", "deck name": "a_nail", "path": "/path/to/hammer/a_nail.lvs.rules" }, { "tool name": "chisel", "deck name": "some_wood", "path": "/path/to/chisel/some_wood.lvs.rules" }, { "tool name": "hammer", "deck name": "head_shark", "path": "/path/to/hammer/head_shark.lvs.rules" }] }) return out_dict
def add_special_cells(in_dict: Dict[str, Any]) -> Dict[str, Any]: out_dict = deepdict(in_dict) out_dict.update({ "special_cells": [ { "name": "cell1", "cell_type": "tiehicell" }, { "name": "cell2", "cell_type": "tiehicell", "size": 1.5 }, { "name": "cell3", "cell_type": "iofiller", "size": 0.5 }, { "name": "cell4", "cell_type": "stdfiller" }, { "name": "cell5", "cell_type": "endcap" }, ] }) return out_dict
def add_named_library(in_dict: Dict[str, Any]) -> Dict[str, Any]: out_dict = deepdict(in_dict) out_dict["libraries"].append({ "name": "abcdef", "milkyway techfile": "test/abcdef.tf" }) return out_dict
def export_config_outputs(self) -> Dict[str, Any]: outputs = deepdict(super().export_config_outputs()) outputs["par.outputs.output_ilms"] = list(map(lambda s: s.to_setting(), self.output_ilms)) outputs["par.outputs.output_ilms_meta"] = "append" outputs["par.outputs.output_gds"] = str(self.output_gds) outputs["par.outputs.output_netlist"] = str(self.output_netlist) outputs["par.outputs.power_nets"] = list(self.power_nets) outputs["par.outputs.ground_nets"] = list(self.ground_nets) outputs["par.outputs.hcells_list"] = list(self.hcells_list) return outputs
def add_duplicates(in_dict: Dict[str, Any]) -> Dict[str, Any]: out_dict = deepdict(in_dict) out_dict["libraries"].append({ "name": "abcdef", "gds file": "test/abcdef.gds" }) out_dict["libraries"].append({ "name": "abcdef2", "gds file": "test/abcdef.gds" }) return out_dict
def add_hier(d: Dict[str, Any]) -> Dict[str, Any]: output = deepdict(d) dummy_placement = PlacementConstraint( path="dummy", type=PlacementConstraintType.Dummy, x=Decimal("0"), y=Decimal("0"), width=Decimal("10"), height=Decimal("10"), master=None, create_physical=None, orientation=None, margins=None, top_layer=None, layers=None, obs_types=None).to_dict() output["vlsi.inputs.default_output_load"] = 1 output["vlsi.inputs.hierarchical.top_module"] = top_module output["vlsi.inputs.hierarchical.flat"] = "hierarchical" output["vlsi.inputs.hierarchical.config_source"] = "manual" output["vlsi.inputs.hierarchical.manual_modules"] = [{ "mod1": ["m1s1", "m1s2"], "mod2": ["m2s1"], top_module: ["mod1", "mod2"] }] manual_constraints = [{ "mod1": [dummy_placement] }, { "mod2": [dummy_placement] }, { "m1s1": [dummy_placement] }, { "m1s2": [dummy_placement] }, { "m2s1": [dummy_placement] }, { top_module: [dummy_placement] }] output[ "vlsi.inputs.hierarchical.manual_placement_constraints"] = manual_constraints output["vlsi.inputs.hierarchical.constraints"] = [{ "mod1": [{ "vlsi.inputs.default_output_load": 2 }] }, { "m2s1": [{ "vlsi.inputs.default_output_load": 3 }] }] return output
def get_full_config(driver: HammerDriver, output: dict) -> dict: """ Get the full configuration by combining the project config from the driver with the given output dict (i.e. it contains only "synthesis.output.blah") that we want to combine with the project config. :param driver: HammerDriver that has the full project config. :param output: Output dict containing specific settings we want to add to the full project config. :return: Full project config combined with the output dict """ if "vlsi.builtins.is_complete" in output: if bool(output["vlsi.builtins.is_complete"]): raise ValueError("Output-only config claims it is complete") else: raise ValueError( "Output-only config does not appear to be output only") output_full = deepdict(driver.project_config) output_full.update(deepdict(output)) # Merged configs are always complete if "vlsi.builtins.is_complete" in output_full: del output_full["vlsi.builtins.is_complete"] return output_full
def add_hier(d: Dict[str, Any]) -> Dict[str, Any]: output = deepdict(d) output["vlsi.inputs.default_output_load"] = 1 output["vlsi.inputs.hierarchical.top_module"] = top_module output["vlsi.inputs.hierarchical.flat"] = "hierarchical" output["vlsi.inputs.hierarchical.config_source"] = "manual" output["vlsi.inputs.hierarchical.manual_modules"] = [{ "mod1": ["m1s1", "m1s2"], "mod2": ["m2s1"], top_module: ["mod1", "mod2"] }] output[ "vlsi.inputs.hierarchical.manual_placement_constraints"] = [] output["vlsi.inputs.hierarchical.constraints"] = [] return output
def add_tarballs(in_dict: Dict[str, Any]) -> Dict[str, Any]: """ Helper method to take an input .tech.json and transform it for tarball tests. It replaces the source files with a single tarball and replaces the libraries with a single library that uses said tarball. :param in_dict: Input tech schema :return: Output tech schema for tarball tests """ out_dict = deepdict(in_dict) del out_dict["installs"] out_dict["tarballs"] = [{ "path": "foobar.tar.gz", "homepage": "http://www.example.com/tarballs", "base var": "technology.dummy28.tarball_dir" }] out_dict["libraries"] = [{ "name": "abcdef", "gds file": "foobar.tar.gz/test.gds" }] return out_dict
def add_lib_with_lef(d: Dict[str, Any]) -> Dict[str, Any]: with open(os.path.join(tech_dir, 'my_vendor_lib.lef'), 'w') as f: f.write("""VERSION 5.8 ; BUSBITCHARS "[]" ; DIVIDERCHAR "/" ; MACRO my_awesome_macro CLASS BLOCK ; ORIGIN -0.435 607.525 ; FOREIGN my_awesome_macro 0.435 -607.525 ; SIZE 810.522 BY 607.525 ; SYMMETRY X Y R90 ; END my_awesome_macro END LIBRARY """) r = deepdict(d) r['libraries'].append({ 'name': 'my_vendor_lib', 'lef file': 'test/my_vendor_lib.lef' }) return r
def env_vars(self) -> Dict[str, str]: new_dict = deepdict(super().env_vars) new_dict.update({}) # TODO: stuffs return new_dict
def combine_configs(configs: Iterable[dict]) -> dict: """ Combine the given list of *unpacked* configs into a single config. Later configs in the list will override the earlier configs. :param configs: List of configs. :param handle_meta: Handle meta configs? :return: A loaded config dictionary. """ expanded_config_reduce = reduce(update_and_expand_meta, configs, {}) # type: dict expanded_config = deepdict(expanded_config_reduce) # type: dict expanded_config_orig = deepdict(expanded_config) # type: dict # Now, we need to handle lazy* metas. lazy_metas = {} meta_dict_keys = list(expanded_config.keys()) meta_keys = list(filter(lambda k: k.endswith("_meta"), meta_dict_keys)) # Graph to keep track of which lazy settings depend on others. # key1 -> key2 means key2 depends on key1 graph = {} # type: Dict[str, Tuple[List[str], List[str]]] meta_len = len("_meta") for meta_key in meta_keys: setting = meta_key[:-meta_len] # type: str lazy_meta_type = expanded_config[meta_key] # type: str assert lazy_meta_type.startswith( "lazy"), "Should have only lazy metas left now" # Create lazy_metas without the lazy part. # e.g. what used to be a lazysubst just becomes a plain subst since everything is fully resolved now. meta_type = lazy_meta_type[len("lazy"):] lazy_metas[meta_key] = meta_type lazy_metas[setting] = expanded_config[ setting] # copy over the template too # Build the graph of which lazy settings depend on what. # Always ensure that this lazy setting's node exists even if it has no dependencies. if setting not in graph: graph[setting] = ([], []) for target_var in get_meta_directives()[meta_type].target_settings( setting, expanded_config[setting]): # Make sure the order in which we delete doesn't affect this # search, since expanded_config might have some deleted stuff. if target_var + "_meta" in expanded_config_orig: # Add a dependency for target -> this setting if target_var not in graph: graph[target_var] = ([], []) graph[target_var][0].append(setting) graph[setting][1].append(target_var) else: # The target setting that this depends on is not a lazy setting. pass # Delete from expanded_config del expanded_config[meta_key] del expanded_config[setting] if len(graph) > 0: # Find all the starting nodes (no incoming edges). starting_nodes = list( map(lambda key_val: key_val[0], filter(lambda key_val: len(key_val[1][1]) == 0, graph.items()))) # Sort starting nodes for determinism. starting_nodes = sorted(starting_nodes) if len(starting_nodes) == 0: raise ValueError("There appears to be a loop of lazy settings") # List of settings to expand first according to topological sort. settings_ordered = topological_sort(graph, starting_nodes) # type: List[str] def combine_meta(config_dict: dict, meta_setting: str) -> dict: # Merge in the metas in the given order. return update_and_expand_meta( config_dict, { meta_setting: lazy_metas[meta_setting], meta_setting + "_meta": lazy_metas[meta_setting + "_meta"] }) final_dict = reduce(combine_meta, settings_ordered, expanded_config) # type: dict else: final_dict = deepdict(expanded_config) # Remove any temporary keys. for key in HammerDatabase.internal_keys(): if key in final_dict: del final_dict[key] return final_dict
def update_and_expand_meta(config_dict: dict, meta_dict: dict) -> dict: """ Expand the meta directives for the given config dict and return a new dictionary containing the updated settings with respect to the base config_dict. :param config_dict: Base config. :param meta_dict: Dictionary with potentially new meta directives. :return: New dictionary with meta_dict updating config_dict. """ assert isinstance(config_dict, dict) assert isinstance(meta_dict, dict) newdict = deepdict(config_dict) # Find meta directives. meta_dict = deepdict(meta_dict) # create a copy so we can remove items. meta_dict_keys = list(meta_dict.keys()) meta_keys = filter(lambda k: k.endswith("_meta"), meta_dict_keys) # Deal with meta directives. meta_len = len("_meta") for meta_key in meta_keys: setting = meta_key[:-meta_len] meta_type_from_dict = meta_dict[ meta_key] # type: Union[str, List[str]] meta_directives = [] # type: List[str] if isinstance(meta_type_from_dict, str): meta_directives = [meta_type_from_dict] else: if not isinstance(meta_type_from_dict, list): raise ValueError( "A meta directive must either be a string or a list of strings" ) meta_directives = meta_type_from_dict # Process each meta type in order. seen_lazy = False # type: bool for meta_type in meta_directives: if not isinstance(meta_type, str): raise TypeError("meta_type was not a string: " + repr(meta_type)) # If it's a lazy meta, skip it for now since they are lazily # processed at the very end. if meta_type.startswith("dynamic"): raise ValueError( "Found meta type {meta_type}. " "Dynamic meta directives were renamed to lazy meta directives after issue #134. " "Please change your metas from dynamic* to lazy*".format( meta_type=meta_type)) if meta_type.startswith("lazy"): lazy_base_meta_type = meta_type[len("lazy"):] if lazy_base_meta_type not in get_meta_directives(): raise ValueError( "The type of lazy meta variable %s is not supported (%s)" % (meta_key, meta_type)) if seen_lazy: raise ValueError( "Multiple lazy directives in a single directive array not supported yet" ) else: seen_lazy = True update_dict = {} # type: dict # Check if this lazy meta references itself by checking if any of its targets is itself. targets = get_meta_directives( )[lazy_base_meta_type].target_settings(setting, meta_dict[setting]) if len(list(filter(lambda x: x == setting, targets))) > 0: # If it does, rename this lazy meta to reference a new base. # e.g. if a (dict 2) -> a (dict 1), rename "a (dict 1)" to a_1. next_index = _get_next_free_index(newdict) new_base_setting = "{setting}_{index}".format( setting=setting, index=next_index) new_value_meta = get_meta_directives( )[lazy_base_meta_type].rename_target( setting, meta_dict[setting], setting, new_base_setting) # type: Optional[Tuple[Any, str]] if new_value_meta is None: raise ValueError( "Failed to rename lazy setting which depends on itself ({})" .format(setting)) else: new_value, new_meta = new_value_meta # Rename base setting to new_base_setting, and add the new setting. update_dict.update({ new_base_setting: newdict[setting], setting: new_value, setting + "_meta": "lazy" + new_meta # these are lazy metas }) if setting + "_meta" in newdict: update_dict.update({ new_base_setting + "_meta": newdict[setting + "_meta"] }) else: # Store it into newdict and skip processing now. update_dict.update({ setting: meta_dict[setting], setting + "_meta": meta_type }) newdict.update(update_dict) continue else: if seen_lazy: raise ValueError( "Cannot use a non-lazy meta directive after a lazy one" ) try: meta_func = get_meta_directives()[meta_type].action except KeyError: raise ValueError( "The type of meta variable %s is not supported (%s)" % (meta_key, meta_type)) meta_func( newdict, setting, meta_dict[setting], MetaDirectiveParams( meta_path=meta_dict.get(_CONFIG_PATH_KEY, "unspecified"))) # Update meta_dict if there are multiple meta directives. meta_dict[setting] = newdict[setting] del meta_dict[meta_key] del meta_dict[setting] newdict.update(deepdict(meta_dict)) # Update everything else. return newdict
def add_stackup(in_dict: Dict[str, Any]) -> Dict[str, Any]: out_dict = deepdict(in_dict) out_dict.update({"stackups": [test_stackup]}) return out_dict
def env_vars(self) -> Dict[str, str]: new_dict = deepdict(super().env_vars) return new_dict
def export_config_outputs(self) -> Dict[str, Any]: outputs = deepdict(super().export_config_outputs()) outputs["par.outputs.output_ilms"] = list( map(lambda s: s.to_setting(), self.output_ilms)) return outputs
def add_dont_use_list(in_dict: Dict[str, Any]) -> Dict[str, Any]: out_dict = deepdict(in_dict) out_dict.update({"dont use list": ["cell1", "cell2"]}) return out_dict
def add_gds_map(in_dict: Dict[str, Any]) -> Dict[str, Any]: out_dict = deepdict(in_dict) out_dict.update({"gds map file": "test/gds_map_file"}) return out_dict
def update_dict(old: dict, new: dict) -> dict: tmp = deepdict(old) tmp.update(new) return tmp
def add_gds_map(d: Dict[str, Any]) -> Dict[str, Any]: r = deepdict(d) r.update({"gds map file": "test/gds_map_file"}) return r
def update_and_expand_meta(config_dict: dict, meta_dict: dict) -> dict: """ Expand the meta directives for the given config dict and return a new dictionary containing the updated settings with respect to the base config_dict. :param config_dict: Base config. :param meta_dict: Dictionary with potentially new meta directives. :return: New dictionary with meta_dict updating config_dict. """ def perform_subst(value: Union[str, List[str]]) -> Union[str, List[str]]: """ Perform substitutions for the given value. If value is a string, perform substitutions in the string. If value is a list, then perform substitutions in every string in the list. :param value: String or list :return: String or list but with everything substituted. """ def subst_str(input_str: str) -> str: """Substitute ${...}""" return re.sub(__VARIABLE_EXPANSION_REGEX, lambda x: config_dict[x.group(1)], input_str) newval = "" # type: Union[str, List[str]] if isinstance(value, list): newval = list(map(subst_str, value)) else: newval = subst_str(value) return newval # Helper functions to implement each meta directive. def meta_append(config_dict: dict, key: str, value: Any) -> None: if key not in config_dict: config_dict[key] = [] if not isinstance(config_dict[key], list): raise ValueError("Trying to append to non-list setting %s" % (key)) if not isinstance(value, list): raise ValueError("Trying to append to list %s with non-list %s" % (key, str(value))) config_dict[key] += value def meta_subst(config_dict: dict, key: str, value: Any) -> None: config_dict[key] = perform_subst(value) def meta_transclude(config_dict: dict, key: str, value: Any) -> None: """Transclude the contents of the file pointed to by value.""" assert isinstance(value, str), "Path to file for transclusion must be a string" with open(value, "r") as f: file_contents = str(f.read()) config_dict[key] = file_contents def meta_json2list(config_dict: dict, key: str, value: Any) -> None: """Turn the value of the key (JSON list) into a list.""" assert isinstance(value, str), "json2list requires a JSON string that is a list" parsed = json.loads(value) assert isinstance(parsed, list), "json2list requires a JSON string that is a list" config_dict[key] = parsed def make_meta_dynamic(dynamic_meta: str) -> Callable[[dict, str, Any], None]: """ Create a meta_dynamicFOO function. :param dynamic_meta: Dynamic meta type e.g. "dynamicsubst" :return: A function for meta_directive_functions. """ def meta_dynamic(config_dict: dict, key: str, value: Any) -> None: # Do nothing at this stage, since we need to deal with dynamicsubst only after # everything has been bound. config_dict[key] = value config_dict[key + "_meta"] = dynamic_meta return meta_dynamic def meta_prependlocal(config_dict: dict, key: str, value) -> None: """Prepend the local path of the config dict.""" config_dict[key] = os.path.join(meta_dict[CONFIG_PATH_KEY], str(value)) # Lookup table of meta functions. meta_directive_functions = { 'append': meta_append, 'subst': meta_subst, 'dynamicsubst': make_meta_dynamic('dynamicsubst'), 'transclude': meta_transclude, 'dynamictransclude': make_meta_dynamic('dynamictransclude'), 'json2list': meta_json2list, 'dynamicjson2list': make_meta_dynamic('dynamicjson2list'), 'prependlocal': meta_prependlocal } # type: Dict[str, Callable[[dict, str, Any], None]] newdict = deepdict(config_dict) # Find meta directives. assert isinstance(meta_dict, dict) meta_dict = deepdict(meta_dict) # create a copy so we can remove items. meta_dict_keys = list(meta_dict.keys()) meta_keys = filter(lambda k: k.endswith("_meta"), meta_dict_keys) # Deal with meta directives. meta_len = len("_meta") for meta_key in meta_keys: setting = meta_key[:-meta_len] meta_type_from_dict = meta_dict[meta_key] # type: Union[str, List[str]] meta_directives = [] # type: List[str] if isinstance(meta_type_from_dict, str): meta_directives = [meta_type_from_dict] else: assert isinstance(meta_type_from_dict, List) meta_directives = meta_type_from_dict # Process each meta type in order. for meta_type in meta_directives: if not isinstance(meta_type, str): raise TypeError("meta_type was not a string: " + repr(meta_type)) try: meta_func = meta_directive_functions[meta_type] except KeyError: raise ValueError("The type of meta variable %s is not supported (%s)" % (meta_key, meta_type)) meta_func(newdict, setting, meta_dict[setting]) # Update meta_dict if there are multiple meta directives. meta_dict[setting] = newdict[setting] del meta_dict[meta_key] del meta_dict[setting] newdict.update(deepdict(meta_dict)) # Update everything else. return newdict
def export_config_outputs(self) -> Dict[str, Any]: outputs = deepdict(super().export_config_outputs()) outputs["synthesis.outputs.output_files"] = self.output_files outputs["synthesis.inputs.input_files"] = self.input_files outputs["synthesis.inputs.top_module"] = self.top_module return outputs
def add_macro_sizes(d: Dict[str, Any]) -> Dict[str, Any]: output = deepdict(d) output["vlsi.technology.extra_macro_sizes"] = [ my_size.to_setting() ] return output
def add_physical_only_cells_list( in_dict: Dict[str, Any]) -> Dict[str, Any]: out_dict = deepdict(in_dict) out_dict.update({"physical only cells list": ["cell1", "cell2"]}) return out_dict