def test_lsf_submit(self) -> None: """ Test that an LSF submission produces the desired output """ with self.create_context("lsf") as c: cmd = c.submit_command assert isinstance(cmd, hammer_vlsi.HammerLSFSubmitCommand) output = cmd.submit(c.echo_command, c.env, c.logger).splitlines() self.assertEqual(output[0], "BLOCKING is: 1") self.assertEqual( output[1], "QUEUE is: %s" % get_or_else(cmd.settings.queue, "")) self.assertEqual( output[2], "NUMCPU is: %d" % get_or_else(cmd.settings.num_cpus, 0)) self.assertEqual( output[3], "OUTPUT is: %s" % get_or_else(cmd.settings.log_file, "")) extra = cmd.settings.extra_args has_resource = 0 if "-R" in extra: has_resource = 1 self.assertEqual( output[4], "RESOURCE is: %s" % extra[extra.index("-R") + 1]) else: raise NotImplementedError("You forgot to test the extra_args!") self.assertEqual(output[4 + has_resource], "COMMAND is: %s" % ' '.join(c.echo_command)) self.assertEqual(output[5 + has_resource], ' '.join(c.echo_command_args))
def action(driver: HammerDriver, append_error_func: Callable[[str], None]) -> Optional[dict]: if pre_action_func is not None: pre_action_func(driver) # If the driver didn't successfully load, return None. if action_type == "synthesis" or action_type == "syn": if not driver.load_synthesis_tool( get_or_else(self.syn_rundir, "")): return None else: post_load_func_checked(driver) success, output = driver.run_synthesis(extra_hooks) post_run_func_checked(driver) elif action_type == "par": if not driver.load_par_tool(get_or_else(self.par_rundir, "")): return None else: post_load_func_checked(driver) success, output = driver.run_par(extra_hooks) post_run_func_checked(driver) else: raise ValueError("Invalid action_type = " + str(action_type)) # TODO: detect errors return output
def action(driver: HammerDriver, append_error_func: Callable[[str], None]) -> Optional[dict]: if pre_action_func is not None: pre_action_func(driver) # If the driver didn't successfully load, return None. if action_type == "synthesis" or action_type == "syn": if not driver.load_synthesis_tool(get_or_else(self.syn_rundir, "")): return None else: post_load_func_checked(driver) assert driver.syn_tool is not None, "load_synthesis_tool was successful" success, output = driver.run_synthesis(extra_hooks) if not success: driver.log.error("Synthesis tool did not succeed") return None dump_config_to_json_file(os.path.join(driver.syn_tool.run_dir, "syn-output.json"), output) dump_config_to_json_file(os.path.join(driver.syn_tool.run_dir, "syn-output-full.json"), self.get_full_config(driver, output)) post_run_func_checked(driver) elif action_type == "par": if not driver.load_par_tool(get_or_else(self.par_rundir, "")): return None else: post_load_func_checked(driver) assert driver.par_tool is not None, "load_par_tool was successful" success, output = driver.run_par(extra_hooks) if not success: driver.log.error("Place-and-route tool did not succeed") return None dump_config_to_json_file(os.path.join(driver.par_tool.run_dir, "par-output.json"), output) dump_config_to_json_file(os.path.join(driver.par_tool.run_dir, "par-output-full.json"), self.get_full_config(driver, output)) post_run_func_checked(driver) elif action_type == "drc": if not driver.load_drc_tool(get_or_else(self.drc_rundir, "")): return None else: post_load_func_checked(driver) success, output = driver.run_drc(extra_hooks) post_run_func_checked(driver) elif action_type == "lvs": if not driver.load_lvs_tool(get_or_else(self.lvs_rundir, "")): return None else: post_load_func_checked(driver) success, output = driver.run_lvs(extra_hooks) post_run_func_checked(driver) elif action_type == "sram_generator": if not driver.load_sram_generator_tool(get_or_else(self.sram_generator_rundir, "")): return None else: post_load_func_checked(driver) success, output = driver.run_sram_generator(extra_hooks) post_run_func_checked(driver) else: raise ValueError("Invalid action_type = " + str(action_type)) # TODO: detect errors return output
def get_physical_only_cells(self) -> List[str]: """ Get a list of physical only cells in accordance with settings in the Hammer IR. Return a list of cells which are physical only. :return: A list of physical only cells. """ # Mode can be auto, manual, or append physical_only_cells_mode = str(self.get_setting("par.inputs.physical_only_cells_mode")) # type: str # physical_only_cells_list will only be used in manual and append mode manual_physical_only_cells_list = self.get_setting("par.inputs.physical_only_cells_list") # type: List[str] assert isinstance(manual_physical_only_cells_list, list), "par.inputs.physical_only_cells_list must be a list" # tech_physical_only_cells_list will only be used in auto and append mode tech_physical_only_cells_list = get_or_else(self.technology.physical_only_cells_list, []) # type: List[str] # Default to auto (use tech_physical_only_cells_list). physical_only_cells_list = tech_physical_only_cells_list # type: List[str] if physical_only_cells_mode == "auto": pass elif physical_only_cells_mode == "manual": physical_only_cells_list = manual_physical_only_cells_list elif physical_only_cells_mode == "append": physical_only_cells_list = tech_physical_only_cells_list + manual_physical_only_cells_list else: self.logger.error( "Invalid physical_only_cells_mode {mode}. Using auto physical only cells list.".format(mode=physical_only_cells_mode)) return physical_only_cells_list
def from_setting(settings: Dict[str, Any]) -> "HammerLSFSettings": if not isinstance(settings, dict): raise ValueError("Must be a dictionary") try: bsub_binary = settings["bsub_binary"] except KeyError: raise ValueError("Missing mandatory key bsub_binary for LSF settings.") try: num_cpus = settings["num_cpus"] except KeyError: num_cpus = None try: queue = settings["queue"] except KeyError: queue = None try: log_file = settings["log_file"] except KeyError: log_file = None return HammerLSFSettings( bsub_binary=bsub_binary, num_cpus=num_cpus, queue=queue, log_file=log_file, extra_args=get_or_else(settings["extra_args"], []) )
def get_dont_use_list(self) -> List[str]: """ Get a "don't use" list in accordance with settings in the Hammer IR. Return a list of cells to mark as "don't use". :return: A list of cells to avoid using. """ # Mode can be auto, manual, or append dont_use_mode = str(self.get_setting("vlsi.inputs.dont_use_mode")) # type: str # dont_use_list will only be used in manual and append mode manual_dont_use_list = self.get_setting("vlsi.inputs.dont_use_list") # type: List[str] assert isinstance(manual_dont_use_list, list), "vlsi.inputs.dont_use_list must be a list" # tech_dont_use_list will only be used in auto and append mode tech_dont_use_list = get_or_else(self.technology.dont_use_list, []) # type: List[str] # Default to auto (use tech_dont_use_list). dont_use_list = tech_dont_use_list # type: List[str] if dont_use_mode == "auto": pass elif dont_use_mode == "manual": dont_use_list = manual_dont_use_list elif dont_use_mode == "append": dont_use_list = tech_dont_use_list + manual_dont_use_list else: self.logger.error( "Invalid dont_use_mode {mode}. Using auto dont use list.".format(mode=dont_use_mode)) return dont_use_list
def __init__(self, value: str, prefix: Optional[str] = None) -> None: """ Create a value from parsing the given string. :param value: Value encoded in the given string. :param prefix: If value does not have a prefix (e.g. "0.25"), then use the given prefix, or the default prefix defined by the class if one is not specified. """ import re default_prefix = get_or_else(prefix, self.default_prefix) regex = r"^(-?[\d.]+) *(.*){}$".format(re.escape(self.unit)) match = re.search(regex, value) if match is None: try: num = str(float(value)) self._value_prefix = default_prefix except ValueError: raise ValueError("Malformed {type} value {value}".format( type=self.unit_type, value=value)) else: num = match.group(1) self._value_prefix = match.group(2) if num.count('.') > 1 or len(self._value_prefix) > 1: raise ValueError("Malformed {type} value {value}".format( type=self.unit_type, value=value)) if self._value_prefix not in self._prefix_table: raise ValueError("Bad prefix for {value}".format(value=value)) self._value = float(num) # type: float # Preserve the prefix too to preserve precision self._prefix = self._prefix_table[self._value_prefix] # type: float
def get_additional_lvs_text(self) -> str: """ Get the additional custom LVS command text to add after the boilerplate commands at the top of the LVS run file. """ # Mode can be auto, manual, append, or prepend add_lvs_text_mode = str(self.get_setting("lvs.inputs.additional_lvs_text_mode")) # manul_add_lvs_text will only be used in manual, append, and prepend modes manual_add_lvs_text = str(self.get_setting("lvs.inputs.additional_lvs_text")) # tech_add_lvs_text will only be used in auto, append, and prepend modes tech_add_lvs_text = get_or_else(self.technology.additional_lvs_text, "") # type: str # Default to auto (use tech_add_lvs_text) add_lvs_text = tech_add_lvs_text if add_lvs_text_mode == "auto": pass elif add_lvs_text_mode == "manual": add_lvs_text = manual_add_lvs_text elif add_lvs_text_mode == "append": add_lvs_text = tech_add_lvs_text + manual_add_lvs_text elif add_lvs_text_mode == "prepend": add_lvs_text = manual_add_lvs_text + tech_add_lvs_text else: self.logger.error( "Invalid additional_lvs_text_mode {mode}. Using auto.".format(mode=add_lvs_text_mode)) return add_lvs_text
def __init__(self, value: str, prefix: Optional[str] = None) -> None: """ Create a value from parsing the given string. :param value: Value encoded in the given string. :param prefix: If value does not have a prefix (e.g. "0.25"), then use the given prefix, or the default prefix defined by the class if one is not specified. """ import re default_prefix = get_or_else(prefix, self.default_prefix) regex = r"^(-?[\d.]+) *(.*){}$".format(re.escape(self.unit)) match = re.search(regex, value) if match is None: try: num = str(float(value)) value_prefix = default_prefix except ValueError: raise ValueError("Malformed {type} value {value}".format(type=self.unit_type, value=value)) else: num = match.group(1) value_prefix = match.group(2) if num.count('.') > 1 or len(value_prefix) > 1: raise ValueError("Malformed {type} value {value}".format(type=self.unit_type, value=value)) if value_prefix not in self._prefix_table: raise ValueError("Bad prefix for {value}".format(value=value)) self._value = float(num) # type: float # Preserve the prefix too to preserve precision self._prefix = self._prefix_table[value_prefix] # type: float
def store_into_library(self) -> Library: """ Store the prefix into extra_prefixes of the library, and return a new copy. :return: A copy of the library in this ExtraPrefix with the prefix stored in extra_prefixes, if one exists. """ lib_copied = copy_library(self.library) # type: Library extra_prefixes = get_or_else(optional_map(self.prefix, lambda p: [p]), []) # type: List[LibraryPrefix] lib_copied.extra_prefixes = extra_prefixes # type: ignore return lib_copied
def create(pins: str, side: Optional[str] = None, layers: Optional[List[str]] = None, preplaced: Optional[bool] = None, location: Optional[Tuple[float, float]] = None, width: Optional[float] = None, depth: Optional[float] = None) -> "PinAssignment": """ Static method that works around the fact that mypy gets very confused at the custom constructor above that defines default arguments. """ return PinAssignment(pins, side, layers, get_or_else(preplaced, False), location, width, depth)
def store_into_library(self) -> Library: """ Store the prefix into extra_prefixes of the library, and return a new copy. :return: A copy of the library in this ExtraPrefix with the prefix stored in extra_prefixes, if one exists. """ lib_copied = copy_library(self.library) # type: Library extra_prefixes = get_or_else(optional_map(self.prefix, lambda p: [p]), []) # type: List[PathPrefix] lib_copied.extra_prefixes = extra_prefixes # type: ignore return lib_copied
def __new__(cls, pins: str, side: Optional[str] = None, layers: Optional[List[str]] = None, preplaced: Optional[bool] = None, location: Optional[Tuple[float, float]] = None, width: Optional[float] = None, depth: Optional[float] = None) -> "PinAssignment": return super().__new__(cls, pins, side, layers, get_or_else(preplaced, False), location, width, depth)
def test_lsf_submit(self) -> None: """ Test that an LSF submission produces the desired output """ with self.create_context("lsf") as c: cmd = c.submit_command assert isinstance(cmd, hammer_vlsi.HammerLSFSubmitCommand) output = cmd.submit(c.echo_command, c.env, c.logger).splitlines() self.assertEqual(output[0], "BLOCKING is: 1") self.assertEqual(output[1], "QUEUE is: %s" % get_or_else(cmd.settings.queue, "")) self.assertEqual(output[2], "NUMCPU is: %d" % get_or_else(cmd.settings.num_cpus, 0)) self.assertEqual(output[3], "OUTPUT is: %s" % get_or_else(cmd.settings.log_file, "")) extra = cmd.settings.extra_args has_resource = 0 if "-R" in extra: has_resource = 1 self.assertEqual(output[4], "RESOURCE is: %s" % extra[extra.index("-R") + 1]) else: raise NotImplementedError("You forgot to test the extra_args!") self.assertEqual(output[4 + has_resource], "COMMAND is: %s" % ' '.join(c.echo_command)) self.assertEqual(output[5 + has_resource], ' '.join(c.echo_command_args))
def prepend_dir_path(self, path: str, lib: Optional[Library] = None) -> str: """ Prepend the appropriate path (either from tarballs or installs) to the given library item. e.g. if the path argument is "foo/bar" and we have a prefix that defines foo as "/usr/share/foo", then this will return "/usr/share/foo/bar". :param path: Path to which we should prepend :param lib: (optional) Library which produced this path. Used to look for additional prefixes. """ assert len(path) > 0, "path must not be empty" # If the path is an absolute path, return it as-is. if path[0] == "/": return path base_path = path.split(os.path.sep)[0] rest_of_path = path.split(os.path.sep)[1:] if self.config.installs is not None: matching_installs = list(filter(lambda install: install.path == base_path, self.config.installs)) else: matching_installs = [] if self.config.tarballs is not None: matching_tarballs = list(filter(lambda tarball: tarball.path == base_path, self.config.tarballs)) else: matching_tarballs = [] # Some extra typing junk because Library is a dynamically-generated class... get_extra_prefixes = lambda l: l.extra_prefixes # type: Callable[[Any], List[LibraryPrefix]] extra_prefixes = get_or_else(optional_map(lib, get_extra_prefixes), []) # type: List[LibraryPrefix] matching_extra_prefixes = list(filter(lambda p: p.prefix == base_path, extra_prefixes)) matches = len(matching_installs) + len(matching_tarballs) + len(matching_extra_prefixes) if matches < 1: raise ValueError("Path {0} did not match any tarballs or installs".format(path)) elif matches > 1: raise ValueError("Path {0} matched more than one tarball or install".format(path)) else: if len(matching_installs) == 1: install = matching_installs[0] if install.base_var == "": base = self.path else: base = self.get_setting(install.base_var) return os.path.join(*([base] + rest_of_path)) elif len(matching_tarballs) == 1: return os.path.join(self.extracted_tarballs_dir, path) else: matched = matching_extra_prefixes[0] return matched.prepend(os.path.join(*rest_of_path))
def new( tag: str, description: str, is_file: bool, paths_func: PathsFunctionType, extraction_func: Optional[ExtractionFunctionType] = None, filter_func: Optional[Callable[["Library"], bool]] = None, sort_func: Optional[Callable[["Library"], Union[Number, str, tuple]]] = None, extra_post_filter_funcs: Optional[List[Callable[[List[str]], List[str]]]] = None) -> "LibraryFilter": """Convenience "constructor" with some default arguments.""" check_paths_func(paths_func) if extraction_func is not None: check_extraction_func(extraction_func) if filter_func is not None: check_filter_func(filter_func) return LibraryFilter( tag, description, is_file, paths_func, extraction_func, filter_func, sort_func, list(get_or_else(extra_post_filter_funcs, [])) )
def new( tag: str, description: str, is_file: bool, paths_func: PathsFunctionType, extraction_func: Optional[ExtractionFunctionType] = None, filter_func: Optional[Callable[["Library"], bool]] = None, sort_func: Optional[Callable[["Library"], Union[Number, str, tuple]]] = None, extra_post_filter_funcs: Optional[List[Callable[[List[str]], List[str]]]] = None ) -> "LibraryFilter": """Convenience "constructor" with some default arguments.""" check_paths_func(paths_func) if extraction_func is not None: check_extraction_func(extraction_func) if filter_func is not None: check_filter_func(filter_func) return LibraryFilter(tag, description, is_file, paths_func, extraction_func, filter_func, sort_func, list(get_or_else(extra_post_filter_funcs, [])))
def par_post_run(d: HammerDriver) -> None: post_run(d, get_or_else(self.par_rundir, ""))
def syn_post_run(d: HammerDriver) -> None: post_run(d, get_or_else(self.syn_rundir, ""))
def from_dict(raw_assign: Dict[str, Any], semi_auto: bool = True) -> "PinAssignment": pins = str(raw_assign["pins"]) # type: str side = None # type: Optional[str] if "side" in raw_assign: raw_side = raw_assign["side"] # type: str assert isinstance(raw_side, str), "side must be a str" if raw_side in ("top", "bottom", "right", "left", "internal"): side = raw_side if side == "internal" and not semi_auto: raise PinAssignmentSemiAutoError("side set to internal") else: raise PinAssignmentError( "Pins {p} have invalid side {s}. Assuming pins will be handled by CAD tool." .format(p=pins, s=raw_side)) preplaced = raw_assign.get("preplaced", False) # type: bool assert isinstance(preplaced, bool), "preplaced must be a bool" location = None # type: Optional[Tuple[float, float]] if "location" in raw_assign: location_raw = raw_assign[ "location"] # type: Union[List[float], Tuple[float, float]] assert len( location_raw ) == 2, "location must be a Optional[Tuple[float, float]]" assert isinstance( location_raw[0], float), "location must be a Optional[Tuple[float, float]]" assert isinstance( location_raw[1], float), "location must be a Optional[Tuple[float, float]]" location = (location_raw[0], location_raw[1]) if not semi_auto and location is not None: raise PinAssignmentSemiAutoError("location requires semi_auto") width = raw_assign.get("width", None) # type: Optional[float] if width is not None: assert isinstance(width, float), "width must be a float" if not semi_auto and width is not None: raise PinAssignmentSemiAutoError("width requires semi_auto") depth = raw_assign.get("depth", None) # type: Optional[float] if depth is not None: assert isinstance(depth, float), "depth must be a float" if not semi_auto and depth is not None: raise PinAssignmentSemiAutoError("depth requires semi_auto") layers = None # type: Optional[List[str]] if "layers" in raw_assign: raw_layers = raw_assign["layers"] # type: List[str] assert isinstance(raw_layers, list), "layers must be a List[str]" for layer in "layers": assert isinstance(layer, str), "layers must be a List[str]" layers = raw_layers if preplaced: should_be_none = reduce( operator.and_, map(lambda x: x is None, [side, location, width, depth])) if len(get_or_else(layers, cast(List[str], []))) > 0 or not should_be_none: raise PinAssignmentPreplacedError( PinAssignment(pins=pins, side=None, layers=[], preplaced=preplaced, location=None, width=None, depth=None)) else: if len(get_or_else(layers, cast(List[str], []))) == 0 or side is None: raise PinAssignmentError( "Pins {p} assigned without layers or side. Assuming pins will be handled by CAD tool." .format(p=pins)) return PinAssignment(pins=pins, side=side, layers=layers, preplaced=preplaced, location=location, width=width, depth=depth)
def process_library_filter(self, filt: LibraryFilter, pre_filts: List[Callable[[Library], bool]], output_func: Callable[[str, LibraryFilter], List[str]], must_exist: bool = True, uniquify: bool = True) -> List[str]: """ Process the given library filter and return a list of items from that library filter with any extra post-processing. - Get a list of lib items - Run any extra_post_filter_funcs (if needed) - For every lib item in each lib items, run output_func :param filt: LibraryFilter to check against all libraries. :param pre_filts: List of functions with which to pre-filter the libraries. Each function must return true in order for this library to be used. :param output_func: Function which processes the outputs, taking in the filtered lib and the library filter which generated it. :param must_exist: Must each library item actually exist? Default: True (yes, they must exist) :param uniquify: Must uniqify the list of output files. Default: True :return: Resultant items from the filter and post-processed. (e.g. --timing foo.db --timing bar.db) """ # First, filter the list of available libraries with pre_filts and the library itself. lib_filters = pre_filts + get_or_else(optional_map(filt.filter_func, lambda x: [x]), []) filtered_libs = list(reduce_named( sequence=lib_filters, initial=self.get_available_libraries(), function=lambda libs, func: filter(func, libs) )) # type: List[Library] # Next, sort the list of libraries if a sort function exists. if filt.sort_func is not None: filtered_libs = sorted(filtered_libs, key=filt.sort_func) # Next, extract paths and prepend them to get the real paths. def get_and_prepend_path(lib: Library) -> Tuple[Library, List[str]]: paths = filt.paths_func(lib) full_paths = list(map(lambda path: self.prepend_dir_path(path, lib), paths)) return lib, full_paths libs_and_paths = list(map(get_and_prepend_path, filtered_libs)) # type: List[Tuple[Library, List[str]]] # Existence checks for paths. def check_lib_and_paths(inp: Tuple[Library, List[str]]) -> Tuple[Library, List[str]]: lib = inp[0] # type: Library paths = inp[1] # type: List[str] existence_check_func = self.make_check_isfile(filt.description) if filt.is_file else self.make_check_isdir( filt.description) paths = list(map(existence_check_func, paths)) return lib, paths if must_exist: libs_and_paths = list(map(check_lib_and_paths, libs_and_paths)) # Now call the extraction function to get a final list of strings. # If no extraction function was specified, use the identity extraction # function. def identity_extraction_func(lib: "Library", paths: List[str]) -> List[str]: return paths extraction_func = get_or_else(filt.extraction_func, identity_extraction_func) output_list = reduce_list_str(add_lists, list(map(lambda t: extraction_func(t[0], t[1]), libs_and_paths)), []) # type: List[str] # Quickly check that it is actually a List[str]. if not isinstance(output_list, List): raise TypeError("output_list is not a List[str], but a " + str(type(output_list))) for i in output_list: if not isinstance(i, str): raise TypeError("output_list is a List but not a List[str]") # Uniquify results. # TODO: think about whether this really belongs here and whether we always need to uniquify. # This is here to get stuff working since some CAD tools dislike duplicated arguments (e.g. duplicated stdcell # lib, etc). if uniquify: in_place_unique(output_list) # Apply any list-level functions. after_post_filter = reduce_named( sequence=filt.extra_post_filter_funcs, initial=output_list, function=lambda libs, func: func(list(libs)), ) # Finally, apply any output functions. # e.g. turning foo.db into ["--timing", "foo.db"]. after_output_functions = list(map(lambda item: output_func(item, filt), after_post_filter)) # Concatenate lists of List[str] together. return reduce_list_str(add_lists, after_output_functions, [])
def test_get_or_else(self) -> None: self.assertEqual(get_or_else(None, "default"), "default") self.assertEqual(get_or_else(None, ""), "") self.assertEqual(get_or_else("Hello World", "default"), "Hello World") self.assertEqual(get_or_else("Hello World", ""), "Hello World")
def lvs_post_run(d: HammerDriver) -> None: post_run(d, get_or_else(self.lvs_rundir, ""))
def prepend_dir_path(self, path: str, lib: Optional[Library] = None) -> str: """ Prepend the appropriate path (either from tarballs or installs) to the given library item. e.g. if the path argument is "foo/bar" and we have a prefix that defines foo as "/usr/share/foo", then this will return "/usr/share/foo/bar". :param path: Path to which we should prepend :param lib: (optional) Library which produced this path. Used to look for additional prefixes. """ assert len(path) > 0, "path must not be empty" # If the path is an absolute path, return it as-is. if path[0] == "/": return path base_path = path.split(os.path.sep)[0] rest_of_path = path.split(os.path.sep)[1:] if self.config.installs is not None: matching_installs = list( filter(lambda install: install.path == base_path, self.config.installs)) else: matching_installs = [] if self.config.tarballs is not None: matching_tarballs = list( filter(lambda tarball: tarball.path == base_path, self.config.tarballs)) else: matching_tarballs = [] # Some extra typing junk because Library is a dynamically-generated class... get_extra_prefixes = lambda l: l.extra_prefixes # type: Callable[[Any], List[LibraryPrefix]] extra_prefixes = get_or_else(optional_map(lib, get_extra_prefixes), []) # type: List[LibraryPrefix] matching_extra_prefixes = list( filter(lambda p: p.prefix == base_path, extra_prefixes)) matches = len(matching_installs) + len(matching_tarballs) + len( matching_extra_prefixes) if matches < 1: raise ValueError( "Path {0} did not match any tarballs or installs".format(path)) elif matches > 1: raise ValueError( "Path {0} matched more than one tarball or install".format( path)) else: if len(matching_installs) == 1: install = matching_installs[0] if install.base_var == "": if install.path == os.path.basename( self.cache_dir ): # default is tech-<techname>-cache base = self.cache_dir else: base = self.path else: base = self.get_setting(install.base_var) return os.path.join(*([base] + rest_of_path)) elif len(matching_tarballs) == 1: return os.path.join(self.extracted_tarballs_dir, path) else: matched = matching_extra_prefixes[0] return matched.prepend(os.path.join(*rest_of_path))
def from_dict(raw_assign: Dict[str, Any], semi_auto: bool = True) -> "PinAssignment": pins = str(raw_assign["pins"]) # type: str side = None # type: Optional[str] if "side" in raw_assign: raw_side = raw_assign["side"] # type: str assert isinstance(raw_side, str), "side must be a str" if raw_side in ("top", "bottom", "right", "left", "internal"): side = raw_side if side == "internal" and not semi_auto: raise PinAssignmentSemiAutoError("side set to internal") else: raise PinAssignmentError( "Pins {p} have invalid side {s}. Assuming pins will be handled by CAD tool.".format(p=pins, s=raw_side)) preplaced = raw_assign.get("preplaced", False) # type: bool assert isinstance(preplaced, bool), "preplaced must be a bool" location = None # type: Optional[Tuple[float, float]] if "location" in raw_assign: location_raw = raw_assign["location"] # type: Union[List[float], Tuple[float, float]] assert len(location_raw) == 2, "location must be a Optional[Tuple[float, float]]" assert isinstance(location_raw[0], float), "location must be a Optional[Tuple[float, float]]" assert isinstance(location_raw[1], float), "location must be a Optional[Tuple[float, float]]" location = (location_raw[0], location_raw[1]) if not semi_auto and location is not None: raise PinAssignmentSemiAutoError("location requires semi_auto") width = raw_assign.get("width", None) # type: Optional[float] if width is not None: assert isinstance(width, float), "width must be a float" if not semi_auto and width is not None: raise PinAssignmentSemiAutoError("width requires semi_auto") depth = raw_assign.get("depth", None) # type: Optional[float] if depth is not None: assert isinstance(depth, float), "depth must be a float" if not semi_auto and depth is not None: raise PinAssignmentSemiAutoError("depth requires semi_auto") layers = None # type: Optional[List[str]] if "layers" in raw_assign: raw_layers = raw_assign["layers"] # type: List[str] assert isinstance(raw_layers, list), "layers must be a List[str]" for layer in "layers": assert isinstance(layer, str), "layers must be a List[str]" layers = raw_layers if preplaced: should_be_none = reduce(operator.and_, map(lambda x: x is None, [side, location, width, depth])) if len(get_or_else(layers, cast(List[str], []))) > 0 or not should_be_none: raise PinAssignmentPreplacedError( PinAssignment(pins=pins, side=None, layers=[], preplaced=preplaced, location=None, width=None, depth=None)) else: if len(get_or_else(layers, cast(List[str], []))) == 0 or side is None: raise PinAssignmentError( "Pins {p} assigned without layers or side. Assuming pins will be handled by CAD tool.".format( p=pins)) return PinAssignment( pins=pins, side=side, layers=layers, preplaced=preplaced, location=location, width=width, depth=depth )
def action(driver: HammerDriver, append_error_func: Callable[[str], None]) -> Optional[dict]: if pre_action_func is not None: pre_action_func(driver) # If the driver didn't successfully load, return None. if action_type == "synthesis" or action_type == "syn": if not driver.load_synthesis_tool( get_or_else(self.syn_rundir, "")): return None else: post_load_func_checked(driver) assert driver.syn_tool is not None, "load_synthesis_tool was successful" success, output = driver.run_synthesis(extra_hooks) if not success: driver.log.error("Synthesis tool did not succeed") return None dump_config_to_json_file( os.path.join(driver.syn_tool.run_dir, "syn-output.json"), output) dump_config_to_json_file( os.path.join(driver.syn_tool.run_dir, "syn-output-full.json"), self.get_full_config(driver, output)) post_run_func_checked(driver) elif action_type == "par": if not driver.load_par_tool(get_or_else(self.par_rundir, "")): return None else: post_load_func_checked(driver) assert driver.par_tool is not None, "load_par_tool was successful" success, output = driver.run_par(extra_hooks) if not success: driver.log.error("Place-and-route tool did not succeed") return None dump_config_to_json_file( os.path.join(driver.par_tool.run_dir, "par-output.json"), output) dump_config_to_json_file( os.path.join(driver.par_tool.run_dir, "par-output-full.json"), self.get_full_config(driver, output)) post_run_func_checked(driver) elif action_type == "drc": if not driver.load_drc_tool(get_or_else(self.drc_rundir, "")): return None else: post_load_func_checked(driver) success, output = driver.run_drc(extra_hooks) post_run_func_checked(driver) elif action_type == "lvs": if not driver.load_lvs_tool(get_or_else(self.lvs_rundir, "")): return None else: post_load_func_checked(driver) success, output = driver.run_lvs(extra_hooks) post_run_func_checked(driver) elif action_type == "sram_generator": if not driver.load_sram_generator_tool( get_or_else(self.sram_generator_rundir, "")): return None else: post_load_func_checked(driver) success, output = driver.run_sram_generator(extra_hooks) post_run_func_checked(driver) else: raise ValueError("Invalid action_type = " + str(action_type)) # TODO: detect errors return output
def drc_post_run(d: HammerDriver) -> None: post_run(d, get_or_else(self.drc_rundir, ""))
def process_library_filter(self, filt: LibraryFilter, pre_filts: List[Callable[[Library], bool]], output_func: Callable[[str, LibraryFilter], List[str]], must_exist: bool = True, uniquify: bool = True) -> List[str]: """ Process the given library filter and return a list of items from that library filter with any extra post-processing. - Get a list of lib items - Run any extra_post_filter_funcs (if needed) - For every lib item in each lib items, run output_func :param filt: LibraryFilter to check against all libraries. :param pre_filts: List of functions with which to pre-filter the libraries. Each function must return true in order for this library to be used. :param output_func: Function which processes the outputs, taking in the filtered lib and the library filter which generated it. :param must_exist: Must each library item actually exist? Default: True (yes, they must exist) :param uniquify: Must uniqify the list of output files. Default: True :return: Resultant items from the filter and post-processed. (e.g. --timing foo.db --timing bar.db) """ # First, filter the list of available libraries with pre_filts and the library itself. lib_filters = pre_filts + get_or_else( optional_map(filt.filter_func, lambda x: [x]), []) filtered_libs = list( reduce_named(sequence=lib_filters, initial=self.get_available_libraries(), function=lambda libs, func: filter(func, libs)) ) # type: List[Library] # Next, sort the list of libraries if a sort function exists. if filt.sort_func is not None: filtered_libs = sorted(filtered_libs, key=filt.sort_func) # Next, extract paths and prepend them to get the real paths. def get_and_prepend_path(lib: Library) -> Tuple[Library, List[str]]: paths = filt.paths_func(lib) full_paths = list( map(lambda path: self.prepend_dir_path(path, lib), paths)) return lib, full_paths libs_and_paths = list( map(get_and_prepend_path, filtered_libs)) # type: List[Tuple[Library, List[str]]] # Existence checks for paths. def check_lib_and_paths( inp: Tuple[Library, List[str]]) -> Tuple[Library, List[str]]: lib = inp[0] # type: Library paths = inp[1] # type: List[str] existence_check_func = self.make_check_isfile( filt.description) if filt.is_file else self.make_check_isdir( filt.description) paths = list(map(existence_check_func, paths)) return lib, paths if must_exist: libs_and_paths = list(map(check_lib_and_paths, libs_and_paths)) # Now call the extraction function to get a final list of strings. # If no extraction function was specified, use the identity extraction # function. def identity_extraction_func(lib: "Library", paths: List[str]) -> List[str]: return paths extraction_func = get_or_else(filt.extraction_func, identity_extraction_func) output_list = reduce_list_str( add_lists, list(map(lambda t: extraction_func(t[0], t[1]), libs_and_paths)), []) # type: List[str] # Quickly check that it is actually a List[str]. if not isinstance(output_list, List): raise TypeError("output_list is not a List[str], but a " + str(type(output_list))) for i in output_list: if not isinstance(i, str): raise TypeError("output_list is a List but not a List[str]") # Uniquify results. # TODO: think about whether this really belongs here and whether we always need to uniquify. # This is here to get stuff working since some CAD tools dislike duplicated arguments (e.g. duplicated stdcell # lib, etc). if uniquify: in_place_unique(output_list) # Apply any list-level functions. after_post_filter = reduce_named( sequence=filt.extra_post_filter_funcs, initial=output_list, function=lambda libs, func: func(list(libs)), ) # Finally, apply any output functions. # e.g. turning foo.db into ["--timing", "foo.db"]. after_output_functions = list( map(lambda item: output_func(item, filt), after_post_filter)) # Concatenate lists of List[str] together. return reduce_list_str(add_lists, after_output_functions, [])