def get_gds_map_file(self) -> Optional[str]: """ Get a GDS map in accordance with settings in the Hammer IR. Return a fully-resolved (i.e. already prepended path) path to the GDS map or None if none was specified. :return: Fully-resolved path to GDS map file or None. """ # Mode can be auto, empty, or manual gds_map_mode = str(self.get_setting("par.inputs.gds_map_mode")) # type: str # gds_map_file will only be used in manual mode # Not including the map_file flag includes all layers but with no specific layer numbers manual_map_file = str(self.get_setting("par.inputs.gds_map_file")) if self.get_setting( "par.inputs.gds_map_file") is not None else None # type: Optional[str] # tech_map_file will only be used in auto mode tech_map_file_raw = self.technology.config.gds_map_file # type: ignore tech_map_file_optional = str( tech_map_file_raw) if tech_map_file_raw is not None else None # type: Optional[str] tech_map_file = optional_map(tech_map_file_optional, lambda p: self.technology.prepend_dir_path(p)) if gds_map_mode == "auto": map_file = tech_map_file elif gds_map_mode == "manual": map_file = manual_map_file elif gds_map_mode == "empty": map_file = None else: self.logger.error( "Invalid gds_map_mode {mode}. Using auto gds map.".format(mode=gds_map_mode)) map_file = tech_map_file return map_file
def get_gds_map_file(self) -> Optional[str]: """ Get a GDS map in accordance with settings in the Hammer IR. Return a fully-resolved (i.e. already prepended path) path to the GDS map or None if none was specified. :return: Fully-resolved path to GDS map file or None. """ # Mode can be auto, empty, or manual gds_map_mode = str(self.get_setting("par.inputs.gds_map_mode")) # type: str # gds_map_file will only be used in manual mode # Not including the map_file flag includes all layers but with no specific layer numbers manual_map_file = str(self.get_setting("par.inputs.gds_map_file")) if self.get_setting( "par.inputs.gds_map_file") is not None else None # type: Optional[str] # tech_map_file will only be used in auto mode tech_map_file_raw = self.technology.config.gds_map_file # type: ignore tech_map_file_optional = str( tech_map_file_raw) if tech_map_file_raw is not None else None # type: Optional[str] tech_map_file = optional_map(tech_map_file_optional, lambda p: self.technology.prepend_dir_path(p)) if gds_map_mode == "auto": map_file = tech_map_file elif gds_map_mode == "manual": map_file = manual_map_file elif gds_map_mode == "empty": map_file = None else: self.logger.error( "Invalid gds_map_mode {mode}. Using auto gds map.".format(mode=gds_map_mode)) map_file = tech_map_file return map_file
def store_into_library(self) -> Library: """ Store the prefix into extra_prefixes of the library, and return a new copy. :return: A copy of the library in this ExtraPrefix with the prefix stored in extra_prefixes, if one exists. """ lib_copied = copy_library(self.library) # type: Library extra_prefixes = get_or_else(optional_map(self.prefix, lambda p: [p]), []) # type: List[LibraryPrefix] lib_copied.extra_prefixes = extra_prefixes # type: ignore return lib_copied
def store_into_library(self) -> Library: """ Store the prefix into extra_prefixes of the library, and return a new copy. :return: A copy of the library in this ExtraPrefix with the prefix stored in extra_prefixes, if one exists. """ lib_copied = copy_library(self.library) # type: Library extra_prefixes = get_or_else(optional_map(self.prefix, lambda p: [p]), []) # type: List[PathPrefix] lib_copied.extra_prefixes = extra_prefixes # type: ignore return lib_copied
def test_optional_map(self) -> None: num_to_str = lambda x: str(x) + "_str" str_to_num = lambda x: int(x) * 10 self.assertEqual(optional_map(None, num_to_str), None) self.assertEqual(optional_map(10, num_to_str), "10_str") self.assertEqual(optional_map(0, num_to_str), "0_str") self.assertEqual(optional_map(None, str_to_num), None) self.assertEqual(optional_map("88", str_to_num), 880) self.assertNotEqual(optional_map("88", str_to_num), "880") self.assertEqual(optional_map("42", str_to_num), 420)
def test_optional_map(self) -> None: num_to_str = lambda x: str(x) + "_str" str_to_num = lambda x: int(x) * 10 self.assertEqual(optional_map(None, num_to_str), None) self.assertEqual(optional_map(10, num_to_str), "10_str") self.assertEqual(optional_map(0, num_to_str), "0_str") self.assertEqual(optional_map(None, str_to_num), None) self.assertEqual(optional_map("88", str_to_num), 880) self.assertNotEqual(optional_map("88", str_to_num), "880") self.assertEqual(optional_map("42", str_to_num), 420)
def prepend_dir_path(self, path: str, lib: Optional[Library] = None) -> str: """ Prepend the appropriate path (either from tarballs or installs) to the given library item. e.g. if the path argument is "foo/bar" and we have a prefix that defines foo as "/usr/share/foo", then this will return "/usr/share/foo/bar". :param path: Path to which we should prepend :param lib: (optional) Library which produced this path. Used to look for additional prefixes. """ assert len(path) > 0, "path must not be empty" # If the path is an absolute path, return it as-is. if path[0] == "/": return path base_path = path.split(os.path.sep)[0] rest_of_path = path.split(os.path.sep)[1:] if self.config.installs is not None: matching_installs = list(filter(lambda install: install.path == base_path, self.config.installs)) else: matching_installs = [] if self.config.tarballs is not None: matching_tarballs = list(filter(lambda tarball: tarball.path == base_path, self.config.tarballs)) else: matching_tarballs = [] # Some extra typing junk because Library is a dynamically-generated class... get_extra_prefixes = lambda l: l.extra_prefixes # type: Callable[[Any], List[LibraryPrefix]] extra_prefixes = get_or_else(optional_map(lib, get_extra_prefixes), []) # type: List[LibraryPrefix] matching_extra_prefixes = list(filter(lambda p: p.prefix == base_path, extra_prefixes)) matches = len(matching_installs) + len(matching_tarballs) + len(matching_extra_prefixes) if matches < 1: raise ValueError("Path {0} did not match any tarballs or installs".format(path)) elif matches > 1: raise ValueError("Path {0} matched more than one tarball or install".format(path)) else: if len(matching_installs) == 1: install = matching_installs[0] if install.base_var == "": base = self.path else: base = self.get_setting(install.base_var) return os.path.join(*([base] + rest_of_path)) elif len(matching_tarballs) == 1: return os.path.join(self.extracted_tarballs_dir, path) else: matched = matching_extra_prefixes[0] return matched.prepend(os.path.join(*rest_of_path))
def get_timing_libs(self, corner: Optional[MMMCCorner] = None) -> str: """ Helper function to get the list of ASCII timing .lib files in space separated format. Note that Cadence tools support ECSM, so we can use the ECSM-based filter. :param corner: Optional corner to consider. If supplied, this will use filter_for_mmmc to select libraries that match a given corner (voltage/temperature). :return: List of lib files separated by spaces """ pre_filters = optional_map(corner, lambda c: [self.filter_for_mmmc(voltage=c.voltage, temp=c.temp)]) # type: Optional[List[Callable[[hammer_tech.Library],bool]]] lib_args = self.technology.read_libs([hammer_tech.filters.timing_lib_with_ecsm_filter], hammer_tech.HammerTechnologyUtils.to_plain_item, extra_pre_filters=pre_filters) return " ".join(lib_args)
def process_library_filter(self, filt: LibraryFilter, pre_filts: List[Callable[[Library], bool]], output_func: Callable[[str, LibraryFilter], List[str]], must_exist: bool = True, uniquify: bool = True) -> List[str]: """ Process the given library filter and return a list of items from that library filter with any extra post-processing. - Get a list of lib items - Run any extra_post_filter_funcs (if needed) - For every lib item in each lib items, run output_func :param filt: LibraryFilter to check against all libraries. :param pre_filts: List of functions with which to pre-filter the libraries. Each function must return true in order for this library to be used. :param output_func: Function which processes the outputs, taking in the filtered lib and the library filter which generated it. :param must_exist: Must each library item actually exist? Default: True (yes, they must exist) :param uniquify: Must uniqify the list of output files. Default: True :return: Resultant items from the filter and post-processed. (e.g. --timing foo.db --timing bar.db) """ # First, filter the list of available libraries with pre_filts and the library itself. lib_filters = pre_filts + get_or_else( optional_map(filt.filter_func, lambda x: [x]), []) filtered_libs = list( reduce_named(sequence=lib_filters, initial=self.get_available_libraries(), function=lambda libs, func: filter(func, libs)) ) # type: List[Library] # Next, sort the list of libraries if a sort function exists. if filt.sort_func is not None: filtered_libs = sorted(filtered_libs, key=filt.sort_func) # Next, extract paths and prepend them to get the real paths. def get_and_prepend_path(lib: Library) -> Tuple[Library, List[str]]: paths = filt.paths_func(lib) full_paths = list( map(lambda path: self.prepend_dir_path(path, lib), paths)) return lib, full_paths libs_and_paths = list( map(get_and_prepend_path, filtered_libs)) # type: List[Tuple[Library, List[str]]] # Existence checks for paths. def check_lib_and_paths( inp: Tuple[Library, List[str]]) -> Tuple[Library, List[str]]: lib = inp[0] # type: Library paths = inp[1] # type: List[str] existence_check_func = self.make_check_isfile( filt.description) if filt.is_file else self.make_check_isdir( filt.description) paths = list(map(existence_check_func, paths)) return lib, paths if must_exist: libs_and_paths = list(map(check_lib_and_paths, libs_and_paths)) # Now call the extraction function to get a final list of strings. # If no extraction function was specified, use the identity extraction # function. def identity_extraction_func(lib: "Library", paths: List[str]) -> List[str]: return paths extraction_func = get_or_else(filt.extraction_func, identity_extraction_func) output_list = reduce_list_str( add_lists, list(map(lambda t: extraction_func(t[0], t[1]), libs_and_paths)), []) # type: List[str] # Quickly check that it is actually a List[str]. if not isinstance(output_list, List): raise TypeError("output_list is not a List[str], but a " + str(type(output_list))) for i in output_list: if not isinstance(i, str): raise TypeError("output_list is a List but not a List[str]") # Uniquify results. # TODO: think about whether this really belongs here and whether we always need to uniquify. # This is here to get stuff working since some CAD tools dislike duplicated arguments (e.g. duplicated stdcell # lib, etc). if uniquify: in_place_unique(output_list) # Apply any list-level functions. after_post_filter = reduce_named( sequence=filt.extra_post_filter_funcs, initial=output_list, function=lambda libs, func: func(list(libs)), ) # Finally, apply any output functions. # e.g. turning foo.db into ["--timing", "foo.db"]. after_output_functions = list( map(lambda item: output_func(item, filt), after_post_filter)) # Concatenate lists of List[str] together. return reduce_list_str(add_lists, after_output_functions, [])
def prepend_dir_path(self, path: str, lib: Optional[Library] = None) -> str: """ Prepend the appropriate path (either from tarballs or installs) to the given library item. e.g. if the path argument is "foo/bar" and we have a prefix that defines foo as "/usr/share/foo", then this will return "/usr/share/foo/bar". :param path: Path to which we should prepend :param lib: (optional) Library which produced this path. Used to look for additional prefixes. """ assert len(path) > 0, "path must not be empty" # If the path is an absolute path, return it as-is. if path[0] == "/": return path base_path = path.split(os.path.sep)[0] rest_of_path = path.split(os.path.sep)[1:] if self.config.installs is not None: matching_installs = list( filter(lambda install: install.path == base_path, self.config.installs)) else: matching_installs = [] if self.config.tarballs is not None: matching_tarballs = list( filter(lambda tarball: tarball.path == base_path, self.config.tarballs)) else: matching_tarballs = [] # Some extra typing junk because Library is a dynamically-generated class... get_extra_prefixes = lambda l: l.extra_prefixes # type: Callable[[Any], List[LibraryPrefix]] extra_prefixes = get_or_else(optional_map(lib, get_extra_prefixes), []) # type: List[LibraryPrefix] matching_extra_prefixes = list( filter(lambda p: p.prefix == base_path, extra_prefixes)) matches = len(matching_installs) + len(matching_tarballs) + len( matching_extra_prefixes) if matches < 1: raise ValueError( "Path {0} did not match any tarballs or installs".format(path)) elif matches > 1: raise ValueError( "Path {0} matched more than one tarball or install".format( path)) else: if len(matching_installs) == 1: install = matching_installs[0] if install.base_var == "": if install.path == os.path.basename( self.cache_dir ): # default is tech-<techname>-cache base = self.cache_dir else: base = self.path else: base = self.get_setting(install.base_var) return os.path.join(*([base] + rest_of_path)) elif len(matching_tarballs) == 1: return os.path.join(self.extracted_tarballs_dir, path) else: matched = matching_extra_prefixes[0] return matched.prepend(os.path.join(*rest_of_path))
def process_library_filter(self, filt: LibraryFilter, pre_filts: List[Callable[[Library], bool]], output_func: Callable[[str, LibraryFilter], List[str]], must_exist: bool = True, uniquify: bool = True) -> List[str]: """ Process the given library filter and return a list of items from that library filter with any extra post-processing. - Get a list of lib items - Run any extra_post_filter_funcs (if needed) - For every lib item in each lib items, run output_func :param filt: LibraryFilter to check against all libraries. :param pre_filts: List of functions with which to pre-filter the libraries. Each function must return true in order for this library to be used. :param output_func: Function which processes the outputs, taking in the filtered lib and the library filter which generated it. :param must_exist: Must each library item actually exist? Default: True (yes, they must exist) :param uniquify: Must uniqify the list of output files. Default: True :return: Resultant items from the filter and post-processed. (e.g. --timing foo.db --timing bar.db) """ # First, filter the list of available libraries with pre_filts and the library itself. lib_filters = pre_filts + get_or_else(optional_map(filt.filter_func, lambda x: [x]), []) filtered_libs = list(reduce_named( sequence=lib_filters, initial=self.get_available_libraries(), function=lambda libs, func: filter(func, libs) )) # type: List[Library] # Next, sort the list of libraries if a sort function exists. if filt.sort_func is not None: filtered_libs = sorted(filtered_libs, key=filt.sort_func) # Next, extract paths and prepend them to get the real paths. def get_and_prepend_path(lib: Library) -> Tuple[Library, List[str]]: paths = filt.paths_func(lib) full_paths = list(map(lambda path: self.prepend_dir_path(path, lib), paths)) return lib, full_paths libs_and_paths = list(map(get_and_prepend_path, filtered_libs)) # type: List[Tuple[Library, List[str]]] # Existence checks for paths. def check_lib_and_paths(inp: Tuple[Library, List[str]]) -> Tuple[Library, List[str]]: lib = inp[0] # type: Library paths = inp[1] # type: List[str] existence_check_func = self.make_check_isfile(filt.description) if filt.is_file else self.make_check_isdir( filt.description) paths = list(map(existence_check_func, paths)) return lib, paths if must_exist: libs_and_paths = list(map(check_lib_and_paths, libs_and_paths)) # Now call the extraction function to get a final list of strings. # If no extraction function was specified, use the identity extraction # function. def identity_extraction_func(lib: "Library", paths: List[str]) -> List[str]: return paths extraction_func = get_or_else(filt.extraction_func, identity_extraction_func) output_list = reduce_list_str(add_lists, list(map(lambda t: extraction_func(t[0], t[1]), libs_and_paths)), []) # type: List[str] # Quickly check that it is actually a List[str]. if not isinstance(output_list, List): raise TypeError("output_list is not a List[str], but a " + str(type(output_list))) for i in output_list: if not isinstance(i, str): raise TypeError("output_list is a List but not a List[str]") # Uniquify results. # TODO: think about whether this really belongs here and whether we always need to uniquify. # This is here to get stuff working since some CAD tools dislike duplicated arguments (e.g. duplicated stdcell # lib, etc). if uniquify: in_place_unique(output_list) # Apply any list-level functions. after_post_filter = reduce_named( sequence=filt.extra_post_filter_funcs, initial=output_list, function=lambda libs, func: func(list(libs)), ) # Finally, apply any output functions. # e.g. turning foo.db into ["--timing", "foo.db"]. after_output_functions = list(map(lambda item: output_func(item, filt), after_post_filter)) # Concatenate lists of List[str] together. return reduce_list_str(add_lists, after_output_functions, [])