def get_object_from_module(clsname, module): """ :param clsname: :param module: """ try: module = imp.load_source(_fix_importname(module + clsname), module) except IOError: raise MicroprobeArchitectureDefinitionError( "Module '%s' not found" % module ) for name in dir(module): if name == clsname: obj = getattr(module, name) if inspect.isclass(obj) or inspect.isfunction(obj): return obj raise MicroprobeArchitectureDefinitionError( "Class '%s' not found in " "module '%s'" % ( clsname, module ) )
def import_definition(cls, filenames, regtypes): """ :param filenames: :param regtypes: """ LOG.debug("Start") regs = {} for filename in filenames: reg_data = read_yaml(filename, SCHEMA) if reg_data is None: continue for elem in reg_data: name = elem["Name"] descr = elem.get("Description", "No description") rtype = elem["Type"] rrepr = elem["Representation"] rcodi = elem.get("Codification", elem["Representation"]) repeat = elem.get("Repeat", None) rfrom = 0 rto = 0 replace = "0" if rtype not in regtypes: raise MicroprobeArchitectureDefinitionError( "Unknown register type in definition of " "register '%s' in file '%s'" % (name, filename) ) if repeat: rfrom = repeat["From"] replace = "%s" % rfrom rto = repeat["To"] for index in range(rfrom, rto + 1): cname = name.replace(replace, "%d" % index) cdescr = descr.replace(replace, "%d" % index) crepr = rrepr.replace(replace, "%d" % index) ccodi = rcodi.replace(replace, "%d" % index) ctype = regtypes[rtype] regt = cls(cname, cdescr, ctype, crepr, ccodi) if cname in regs: raise MicroprobeArchitectureDefinitionError( "Duplicated register definition of '%s' found" " in '%s'" % (cname, filename) ) LOG.debug(regt) regs[cname] = regt LOG.debug("End") return regs
def _caches_from_elements(elements): """ :param elements: """ LOG.debug("Start") caches = [] for element in elements.values(): LOG.debug("Checking: '%s'", element) try: if not element.type.data_cache and \ not element.type.instruction_cache: continue except Exception: raise MicroprobeArchitectureDefinitionError( "Microarchitecture " "definition requires the definition of the " "'data_cache' and 'instruction_cache' properties " "for the element types.") LOG.debug("Cache element found:'%s'", element) try: size = element.type.cache_size * 1024 line_size = element.type.cache_linesize level = element.type.cache_level address_size = element.type.cache_address_size except AttributeError as exc: # pylint: disable-msg=E1101 raise MicroprobeArchitectureDefinitionError( "Element '%s' defined as a cache, but required " "property '%s' not specified in its type " "'%s'" % (element, exc.message.split("'")[3], element.type)) # Figuring out the cache type based on element attributes # Right now, we only implement N-way set associative ways = getattr(element.type, "cache_ways", None) if ways is not None: new_cache = SetAssociativeCache(element, size, level, line_size, address_size, element.type.data_cache, element.type.instruction_cache, ways) caches.append(new_cache) else: raise MicroprobeArchitectureDefinitionError( "Element '%s' defined " "as a cache, but cache type can not be " "determined. Please specify on of the " "following properties: ['cache_ways']") LOG.debug("End") return caches
def import_definition(cls, filenames, operands): """ :param filenames: :param operands: """ LOG.debug("Start") ifields = {} ifields_duplicated = {} for filename in filenames: ifield_data = read_yaml(filename, SCHEMA) if ifield_data is None: continue for elem in ifield_data: name = elem["Name"] descr = elem.get("Description", "No description") size = elem["Size"] show = elem.get("Show", False) fio = elem.get("IO", "?") operand_def = elem.get("Operand", "Zero") key = tuple([size, show, fio, operand_def]) if key in ifields_duplicated: LOG.warning( "Similar definition of instruction field: '%s' and" " '%s'. Check if definition needed.", name, ifields_duplicated[key]) else: ifields_duplicated[key] = name try: operand = operands[operand_def] except KeyError: raise MicroprobeArchitectureDefinitionError( "Unknown operand " "defined in instruction" " field '%s' in '%s'." % (name, filename)) ifield = cls(name, descr, size, show, fio, operand) if name in ifields: raise MicroprobeArchitectureDefinitionError( "Duplicated " "definition " "of instruction field" " '%s' found in '%s'" % (name, filename)) LOG.debug(ifield) ifields[name] = ifield LOG.debug("End") return ifields
def import_operand_definition( defdict, yaml, key, base_module, regs, force=False ): """ :param defdict: :param yaml: :param key: :param base_module: :param regs: """ try: entry = defdict[key] except KeyError: raise MicroprobeArchitectureDefinitionError( "'%s' key in %s " "file missing or not defined " "correctly." % (key, yaml) ) filenames = [yaml] + entry["YAML"] cache_filename = cache_file("%s.Operand" % (yaml)) result = update_cache_needed(filenames, cachefile=cache_filename) result = result or force entry["YAML_inherits"] = entry.get("YAML_inherits", []) if not result: LOG.debug("Reading cache contents for Operand") try: return read_cache_data(cache_filename), result except ImportError: LOG.exception("Unable to read cache contents for Operand") except MicroprobeCacheError: LOG.debug("Cache error when reading cache contents for Operand") try: data = base_module.import_definition( entry["YAML"], entry["YAML_inherits"], regs ) except KeyError: raise MicroprobeArchitectureDefinitionError( "'%s' key in %s " "file missing or not defined " "correctly." % (key, yaml) ) try: write_cache_data(cache_filename, data) except MicroprobeCacheError: LOG.debug("Cache error when writing cache contents for Operand") return data, result
def import_definition(defdict, yaml, key, base_module, args, force=False): """Import definition :param defdict: :param yaml: :param key: :param base_module: :param args: """ try: entry = defdict[key] cls = get_object_from_module(entry["Class"], entry["Module"]) except KeyError: raise MicroprobeArchitectureDefinitionError( "'%s' key in %s " "file missing or not defined " "correctly." % (key, yaml) ) filenames = [yaml, entry["Module"]] + entry["YAML"] if issubclass(cls, PropertyHolder): for cfile in entry["YAML"]: filenames += list_property_files(cfile) cache_filename = cache_file("%s.%s" % (yaml, cls.__name__)) result = update_cache_needed(filenames, cachefile=cache_filename) result = result or force if not result: LOG.debug("Reading cache contents for %s", cls.__name__) try: return read_cache_data(cache_filename), result except ImportError: LOG.exception("Unable to read cache contents for %s", cls.__name__) except MicroprobeCacheError: LOG.debug("Cache error when reading class %s", cls.__name__) try: data = base_module.import_definition(cls, entry["YAML"], args) except KeyError: raise MicroprobeArchitectureDefinitionError( "'%s' key in %s " "missing the YAML attribute." % (key, yaml) ) try: write_cache_data(cache_filename, data) except MicroprobeCacheError: LOG.debug("Cache error when writing class %s", cls.__name__) return data, result
def import_classes_from(modules): """ :param modules: """ LOG.info("Start") classes = {} for module_str in modules: for cls in find_subclasses(module_str, Generator): name = cls.__name__ if name in classes: raise MicroprobeArchitectureDefinitionError( "Duplicated " "definition" " of Generator '%s' " "in module '%s'", name, module_str) LOG.info("%s generator imported", name) classes[name] = cls if len(classes) == 0: LOG.warning("No generators imported.") LOG.info("End") return list(classes.values())
def __init__(self, fname, descr, fields, assembly): super(PowerInstructionFormat, self).__init__(fname, descr, fields, assembly) if self.length != 4: raise MicroprobeArchitectureDefinitionError( "Instruction format '%s' length: %d is not 4 bytes " % (self.name, self.length))
def _get_value(cfield, base): try: value = [ arg for arg in args if arg.descriptor.type.name == self._operand_descriptors[cfield].type.name ][0].value return self._operand_descriptors[cfield].type.representation( value) except KeyError: raise MicroprobeArchitectureDefinitionError( "Unable to find sub-field '%s' for the '%s' field" % (cfield, base))
def _compute_length(self): """ """ length = sum([field.size for field in self._fields]) if length % 8 != 0: LOG.error("%s", self) LOG.error("\tTotal length: %d", length) for field in self._fields: LOG.error("\t\t - %s: %d", field.name, field.size) raise MicroprobeArchitectureDefinitionError( "Instruction format" " '%s' length is not multiple of a byte" % self.name) self._length = sum([field.size for field in self._fields]) // 8
def import_definition(cls, filenames, dummy): """ :param filenames: :param dummy: """ LOG.debug("Start") regts = {} regts_duplicated = {} for filename in filenames: regt_data = read_yaml(filename, SCHEMA) for elem in regt_data: name = elem["Name"] size = elem["Size"] descr = elem.get("Description", "No description") u4aa = elem.get("AddressArithmetic", False) u4fa = elem.get("FloatArithmetic", False) u4va = elem.get("VectorArithmetic", False) regt = cls(name, descr, size, u4aa, u4fa, u4va) key = tuple([size, u4aa, u4fa, u4va]) if key in regts_duplicated: LOG.warning( "Similar definition of register types: '%s' and" " '%s'. Check if definition needed.", name, regts_duplicated[key]) else: regts_duplicated[key] = name LOG.debug(regt) if name in regts: raise MicroprobeArchitectureDefinitionError( "Duplicated " "definition of register type '%s' " "found in '%s'" % (name, filename)) regts[name] = regt LOG.debug("End") return regts
def cache_hierarchy_from_elements(elements): """ :param elements: """ caches = _caches_from_elements(elements) if len(caches) == 0: raise MicroprobeArchitectureDefinitionError( "Expecting cache hierarchy" " elements in the microarchitecture" " description, but none found.") cache_hierarchy = CacheHierarchy(caches) return cache_hierarchy
def import_cls_definition(isadef, yaml, key, base_module): """ :param isadef: :param yaml: :param key: :param base_module: """ try: entry = isadef[key] return base_module.import_classes_from(entry["Modules"]) except KeyError: raise MicroprobeArchitectureDefinitionError( "'%s' key in %s " "file missing or not defined " "correctly." % (key, yaml) )
def __init__(self, name, descr, values, rep): """ :param name: :param descr: :param values: """ super(OperandValueSet, self).__init__(name, descr) # TODO: add input value checking self._values = values self._imm = True self._rep = None if rep is not None and len(rep) != len(values): raise MicroprobeArchitectureDefinitionError( "Values and representation of operand definition " "'%s' do not have the same length." % name) if rep is not None: self._rep = dict(zip(values, rep))
def __init__(self, fname, descr, fsize, fshow, fio, foperand): """ :param fname: :param descr: :param fsize: :param fshow: :param fio: :param foperand: """ super(GenericInstructionField, self).__init__() self._fname = fname self._fdescr = descr self._fsize = fsize self._fshow = fshow self._fio = fio self._foperand = foperand if fio not in self._valid_fio_values: raise MicroprobeArchitectureDefinitionError("Invalid default IO " "definition for field " "%s" % fname)
def import_properties(filename, objects): """ :param filename: :param objects: """ LOG.info("Start importing object properties") dirname = filename[::-1].replace('lmay.', 'sporp_', 1)[::-1] if not os.path.isdir(dirname): return for filename in os.listdir(dirname): if filename.startswith(".") or filename.endswith(".cache"): continue if not filename.endswith(".yaml"): continue property_definitions = read_yaml( os.path.join( dirname, filename ), SCHEMA ) for property_def in property_definitions: property_objs = RejectingDict() property_name = property_def["Name"] property_description = property_def.get( "Description", "No description" ) property_override = property_def.get("Override", False) property_default_value = property_def.get("Default", "__unset__") property_values = property_def.get("Values", {}) property_class = Property LOG.debug( "Importing property '%s - %s'", property_name, property_description ) if "Value" in property_def: # Single value property_value = property_def["Value"] default_property = property_class( property_name, property_description, property_value ) for obj in objects.values(): obj.register_property( default_property, force=property_override ) LOG.debug("Single value property") continue if ( property_default_value != "__unset__" and property_default_value != "NO_DEFAULT" ): default_property = property_class( property_name, property_description, property_default_value, default=True ) LOG.debug("Default value: %s", property_default_value) else: default_property = None LOG.debug("Default value: No default value set") for key, obj in objects.items(): if key not in property_values: if property_default_value == "NO_DEFAULT": continue if default_property is None: raise MicroprobeArchitectureDefinitionError( "Wrong property '%s' definition in file '%s'. " "Value for '%s' is not provided and a default " "value is not defined" % ( property_name, filename, key ) ) obj.register_property( default_property, force=property_override ) else: property_value = property_values[key] del property_values[key] property_value_key = property_value if isinstance(property_value, list): property_value_key = str(property_value) if property_value_key in property_objs: obj.register_property( property_objs[property_value_key], force=property_override ) else: new_property = property_class( property_name, property_description, property_value, default=False ) obj.register_property( new_property, force=property_override ) property_objs[property_value_key] = new_property for key, value in property_values.items(): LOG.warning( "'%s' not found. Property '%s' not set to '%s'", key, property_name, value ) LOG.info("Property '%s' imported", property_name) LOG.info("End importing object properties")
def __init__(self, caches): """ :param caches: """ first_data_levels = [ cache for cache in caches if cache.level == 1 and cache.contains_data ] first_ins_levels = [ cache for cache in caches if cache.level == 1 and cache.contains_instructions ] if len(first_ins_levels) == 0: raise MicroprobeArchitectureDefinitionError( "At least one cache" "should be defined as first level instruction" "cache.") if len(first_data_levels) == 0: raise MicroprobeArchitectureDefinitionError( "At least one cache" "should be defined as first level data cache") data_levels = {} for cache in first_data_levels: data_levels[cache.element] = [cache] ins_levels = {} for cache in first_ins_levels: ins_levels[cache.element] = [cache] current_level = 2 next_data_levels = [ cache for cache in caches if cache.level == current_level and cache.contains_data ] next_ins_levels = [ cache for cache in caches if cache.level == current_level and cache.contains_instructions ] while len(next_data_levels + next_ins_levels) > 0: if len(next_data_levels) > 0: for element in data_levels: data_level = data_levels[element][-1] assert data_level.level == (current_level - 1) new_level = sorted(next_data_levels, key=lambda x, elem=element: x.element. closest_common_element(elem).depth)[-1] data_levels[element].append(new_level) if len(next_ins_levels) > 0: for element in ins_levels: ins_level = ins_levels[element][-1] assert ins_level.level == (current_level - 1) def my_key(elem): """ :param elem: :type elem: """ return elem.element.closest_common_element( element).depth new_level = sorted(next_ins_levels, key=my_key)[-1] ins_levels[element].append(new_level) current_level += 1 next_data_levels = [ cache for cache in caches if cache.level == current_level and cache.contains_data ] next_ins_levels = [ cache for cache in caches if cache.level == current_level and cache.contains_instructions ] self._data_levels = data_levels self._ins_levels = ins_levels
def import_definition(filenames, registers): """ :param filenames: :param registers: """ LOG.debug("Start") operands = {} operands_duplicated = {} register_types = tuple([reg.type.name for reg in registers.values()]) for filename in filenames: ope_data = read_yaml(filename, SCHEMA) if ope_data is None: continue for elem in ope_data: name = elem["Name"] descr = elem.get("Description", "No description") override = elem.get("Override", False) key = [] try: if "Registers" in elem: regnames = elem["Registers"] if isinstance(regnames, list): if len(regnames) == 1 and \ regnames[0] in register_types: regs = [ reg for reg in registers.values() if reg.type.name == regnames[0] ] else: regs = [ registers[regname] for regname in natural_sort(regnames) ] key.append(tuple(regnames)) else: regs = OrderedDict() for regname in natural_sort(regnames): regs[registers[regname]] = [] for regname2 in regnames[regname]: regs[registers[regname]].append( registers[regname2]) key.append( tuple([(k, tuple(v)) for k, v in regnames.items()])) address_base = elem.get("AddressBase", False) address_index = elem.get("AddressIndex", False) floating_point = elem.get("FloatingPoint", None) vector = elem.get("Vector", None) key.append(address_base) key.append(address_index) key.append(floating_point) key.append(vector) # Filter out Register without # representation (N/A) # # These are pseudo registers used in # simulation/emulation environment. # They are not architected registers. regs = [reg for reg in regs if reg.representation != 'N/A'] operand = OperandReg(name, descr, regs, address_base, address_index, floating_point, vector) elif "Min" in elem and "Max" in elem: minval = elem["Min"] maxval = elem["Max"] step = elem.get("Step", 1) novalues = elem.get("Except", []) address_index = elem.get("AddressIndex", False) shift = elem.get("Shift", 0) add = elem.get("Add", 0) key.append(minval) key.append(maxval) key.append(step) key.append(tuple(novalues)) key.append(address_index) key.append(shift) key.append(add) operand = OperandImmRange(name, descr, minval, maxval, step, address_index, shift, novalues, add) elif "Values" in elem: values = tuple(elem["Values"]) key.append(tuple(values)) operand = OperandValueSet(name, descr, values) elif "Value" in elem: value = elem["Value"] key.append(value) operand = OperandConst(name, descr, value) elif "Register" in elem: reg = registers[elem["Register"]] address_base = elem.get("AddressBase", False) address_index = elem.get("AddressIndex", False) floating_point = elem.get("FloatingPoint", False) vector = elem.get("Vector", False) key.append(elem["Register"]) key.append(address_base) key.append(address_index) key.append(floating_point) key.append(vector) operand = OperandConstReg(name, descr, reg, address_base, address_index, floating_point, vector) elif "Relative" in elem: mindispl = elem["MinDisplacement"] maxdispl = elem["MaxDisplacement"] relative = elem["Relative"] shift = elem.get("Shift", 0) except_ranges = elem.get("ExceptRange", []) key.append(mindispl) key.append(maxdispl) key.append(shift) key.append(tuple([tuple(elem) for elem in except_ranges])) operand = InstructionAddressRelativeOperand( name, descr, maxdispl, mindispl, shift, except_ranges, relative) else: raise MicroprobeArchitectureDefinitionError( "Operand definition '%s' in '%s' not supported" % (name, filename)) tkey = tuple(key) if tkey in operands_duplicated: LOG.warning( "Similar definition of operands: '%s' and" " '%s'. Check if definition needed.", name, operands_duplicated[tkey]) else: operands_duplicated[tkey] = name except KeyError as exception: raise MicroprobeArchitectureDefinitionError( "Definition" " of operand '%s' " "uses an unknown " "register in '%s'" "\nMissing defini" "tion of: %s" % (name, filename, exception)) if name in operands and not override: raise MicroprobeArchitectureDefinitionError( "Duplicated definition of operand '%s' found in '%s'" % (name, filename)) LOG.debug(operand) operands[name] = operand LOG.debug("End") return operands
def import_definition(cls, filenames, ifields): """ :param filenames: :param ifields: """ LOG.debug("Start") iformats = {} iformats_duplicated = {} for filename in filenames: iformat_data = read_yaml(filename, SCHEMA) if iformat_data is None: continue for elem in iformat_data: name = elem["Name"] descr = elem.get("Description", "No description") assembly = elem["Assembly"] # TODO: Document the convention nonzero_fields = [ field for field in elem["Fields"] if not field.startswith("0_") ] key = tuple([tuple(elem["Fields"]), assembly]) if key in iformats_duplicated: LOG.warning( "Similar definition of instruction format: '%s' " "and '%s'. Check if definition needed.", name, iformats_duplicated[key]) else: iformats_duplicated[key] = name if len(nonzero_fields) != len(set(nonzero_fields)): raise MicroprobeArchitectureDefinitionError( "Definition of " "instruction format" " '%s' found in '%s'" " contains duplicated" " fields." % (name, filename)) try: fields = [ifields[ifieldname] for ifieldname in elem["Fields"]] except KeyError as key: raise MicroprobeArchitectureDefinitionError( "Unknown field %s " "definition in " "instruction format" " '%s' found in '%s'." % (key, name, filename)) iformat = cls(name, descr, fields, assembly) if name in iformats: raise MicroprobeArchitectureDefinitionError("Duplicated " "definition " "of instruction " "format " "'%s' found " "in '%s'" % (name, filename)) LOG.debug(iformat) iformats[name] = iformat LOG.debug("End") return iformats
def import_definition(cls, filenames, element_types): """ """ LOG.debug("Start importing microarchitecture elements") elements = RejectingDict() elements_subelements = RejectingDict() for filename in filenames: element_data = read_yaml(filename, SCHEMA) if element_data is None: continue for elem in element_data: name = elem["Name"] parent = elem.get("Parent", None) subelements = elem.get("Subelements", []) repeat = elem.get("Repeat", None) rfrom = 0 rto = 0 replace = "0" try: elem_type = element_types[elem["Type"]] except KeyError: raise MicroprobeArchitectureDefinitionError( "Unknown " "microarchitecture element type in " "microarchitecture element definition " " '%s' found in '%s'" % (name, filename)) descr = elem.get("Description", elem_type.description) if repeat: rfrom = repeat["From"] replace = "%s" % rfrom rto = repeat["To"] for index in range(rfrom, rto + 1): cname = name.replace(replace, "%d" % index) cdescr = descr.replace(replace, "%d" % index) element = cls(cname, cdescr, elem_type) try: elements[cname] = element elements_subelements[cname] = subelements except ValueError: raise MicroprobeArchitectureDefinitionError( "Duplicated microarchitecture element " "definition '%s' found in '%s'" % (name, filename)) LOG.debug(element) for filename in filenames: import_properties(filename, elements) for elem, subelements in elements_subelements.items(): try: subelements_instances = [elements[item] for item in subelements] except KeyError as exc: raise MicroprobeArchitectureDefinitionError( "Undefined sub-element '%s' in element " "definition '%s'. Check following " "files: %s" % (exc, elem, filenames)) elements[elem].set_subelements(subelements_instances) element_list = list(elements.values()) fixing_hierarchy = True LOG.info("Start building element hierarchy...") fix_pass = 0 while fixing_hierarchy: fix_pass += 1 LOG.debug("Start building element hierarchy... pass %d", fix_pass) fixing_hierarchy = False for element in element_list: parents = [ item for item in element_list if element in item.subelements ] if len(parents) > 1: # needs duplication LOG.debug("Element %s has %d parents", element, len(parents)) for parent in sorted(parents): LOG.debug("Duplicating for parent: %s", parent) # Create a new copy new_element = cls(element.name, element.description, element.type) new_element.set_subelements(element.subelements) element_list.append(new_element) # Update parent to point to the new copy new_subelements = parent.subelements new_subelements.remove(element) new_subelements.append(new_element) parent.set_subelements(new_subelements) fixing_hierarchy = True element_list.remove(element) LOG.info("Finish building element hierarchy") # Check correctness of the structure and set parents LOG.info("Checking element hierarchy...") top_element = None for element in element_list: parents = [ item for item in element_list if element in item.subelements ] if len(parents) > 1: raise MicroprobeArchitectureDefinitionError( "Wrong hierarchy of microarchitecture " "elements. The definition of element" " '%s' has multiple parents: '%s'." % (element, [str(elem) for elem in parents])) elif len(parents) == 0: if top_element is not None: raise MicroprobeArchitectureDefinitionError( "Wrong hierarchy of microarchitecture " "elements. There are at least two top " "elements: '%s' and '%s'. Define a single " "parent element for all the hierarchy." % (element, top_element)) top_element = element else: element.set_parent_element(parents[0]) if top_element is None: raise MicroprobeArchitectureDefinitionError( "Wrong hierarchy of microarchitecture " "elements. There is not a top element." " Define a single parent element for all " "the hierarchy.") LOG.info("Element hierarchy correct") elem_dict = dict([(element.full_name, element) for element in element_list]) for filename in filenames: import_properties(filename, elem_dict) LOG.info("End importing elements") return elem_dict