def check_clock_groups(top): # default empty assignment if "groups" not in top['clocks']: top['clocks']['groups'] = [] error = 0 for group in top['clocks']['groups']: error = check_keys(group, clock_groups_required, clock_groups_optional, clock_groups_added, "Clock Groups") # Check sw_cg values are valid if group['sw_cg'] not in ['yes', 'no', 'hint']: log.error("Incorrect attribute for sw_cg: {}".format( group['sw_cg'])) error += 1 # Check combination of src and sw are valid if group['src'] == 'yes' and group['sw_cg'] != 'no': log.error("Invalid combination of src and sw_cg: {} and {}".format( group['src'], group['sw_cg'])) error += 1 # Check combination of sw_cg and unique are valid unique = group['unique'] if 'unique' in group else 'no' if group['sw_cg'] == 'no' and unique != 'no': log.error( "Incorrect attribute combination. When sw_cg is no, unique must be no" ) error += 1 if error: break return error
def check_pad(top: Dict, pad: Dict, known_pad_names: Dict, valid_connections: List[str], prefix: str) -> int: error = 0 error += check_keys(pad, pad_required, pad_optional, pad_added, prefix) # check name uniqueness if pad['name'] in known_pad_names: log.warning('Pad name {} is not unique'.format(pad['name'])) error += 1 known_pad_names[pad['name']] = 1 if not is_valid_pad_type(pad['type']): log.warning('Unkown pad type {}'.format(pad['type'])) error += 1 if pad['bank'] not in top['pinout']['banks']: log.warning('Unkown io power bank {}'.format(pad['bank'])) error += 1 if pad['connection'] not in valid_connections: log.warning('Connection type {} of pad {} is invalid'.format( pad['connection'], pad['name'])) error += 1 return error
def validate_top(top, ipobjs, xbarobjs): # return as it is for now error = check_keys(top, top_required, top_optional, top_added, "top") if error != 0: log.error("Top HJSON has top level errors. Aborting") return top, error component = top['name'] # MODULE check err, ip_idxs = check_target(top, ipobjs, Target(TargetType.MODULE)) error += err # XBAR check err, xbar_idxs = check_target(top, xbarobjs, Target(TargetType.XBAR)) error += err # MEMORY check error += check_flash(top) # Power domain check error += check_power_domains(top) # Clock / Reset check error += check_clocks_resets(top, ipobjs, ip_idxs, xbarobjs, xbar_idxs) # Clock group check error += check_clock_groups(top) # RV_PLIC check # PINMUX & PADS check if "padctrl" not in top: log.warning("padsctrl field doesn't exist in top. Skipping pads \ generation. Top input/output are directly connected from \ peripherals.") # Pads configuration check else: error += check_padctrl(top, component) if "pinmux" not in top: log.warning("Top {} has no 'pinmux' field. Please consider specifying \ pinmux and pads configuration") top["pinmux"] = OrderedDict() # checking pinmux after pads as dio connects to PAD error += check_pinmux(top, component) return top, error
def check_clocks_resets(top, ipobjs, ip_idxs, xbarobjs, xbar_idxs): # check clock fields are all there for src in top['clocks']['srcs']: check_keys(src, clock_srcs_required, {}, {}, "Clock source") # all defined clock/reset nets reset_nets = [reset['name'] for reset in top['resets']] clock_srcs = [clock['name'] for clock in top['clocks']['srcs']] error = 0 # Check clock/reset port connection for all IPs for ipcfg in top['module']: ipcfg_name = ipcfg['name'].lower() log.info("Checking clock/resets for %s" % ipcfg_name) error += validate_reset(ipcfg, ipobjs[ip_idxs[ipcfg_name]], reset_nets) error += validate_clock(ipcfg, ipobjs[ip_idxs[ipcfg_name]], clock_srcs) if error: log.error("module clock/reset checking failed") break # Check clock/reset port connection for all xbars for xbarcfg in top['xbar']: xbarcfg_name = xbarcfg['name'].lower() log.info("Checking clock/resets for xbar %s" % xbarcfg_name) error += validate_reset(xbarcfg, xbarobjs[xbar_idxs[xbarcfg_name]], reset_nets, "xbar") error += validate_clock(xbarcfg, xbarobjs[xbar_idxs[xbarcfg_name]], clock_srcs, "xbar") if error: log.error("xbar clock/reset checking failed") break return error
def check_flash(top): error = 0 for mem in top['memory']: if mem['type'] == "eflash": error = check_keys(mem, eflash_required, eflash_optional, eflash_added, "Eflash") flash = Flash(mem) error += 1 if not flash.check_values() else 0 if error: log.error("Flash check failed") else: flash.populate(mem) return error
def validate_top(top, ipobjs, xbarobjs): # return as it is for now error = check_keys(top, top_required, top_optional, top_added, "top") if error != 0: log.error("Top HJSON has top level errors. Aborting") return top, error component = top['name'] # Check module instantiations error += check_modules(top, component) # MODULE check err, ip_idxs = check_target(top, ipobjs, Target(TargetType.MODULE)) error += err # XBAR check err, xbar_idxs = check_target(top, xbarobjs, Target(TargetType.XBAR)) error += err # MEMORY check error += check_flash(top) # Power domain check error += check_power_domains(top) # Clock / Reset check error += check_clocks_resets(top, ipobjs, ip_idxs, xbarobjs, xbar_idxs) # Clock group check error += check_clock_groups(top) # RV_PLIC check # Pinout, pinmux and target checks # Note that these checks must happen in this order, as # the pinmux and target configs depend on the pinout. error += check_pinout(top, component) error += check_pinmux(top, component) error += check_implementation_targets(top, component) return top, error
def validate_reset(top, inst, reset_nets, prefix=""): # Gather inst port list error = 0 # Handle either an IpBlock (generated by reggen) or an OrderedDict # (generated by topgen for a crossbar) if isinstance(inst, IpBlock): name = inst.name reset_signals = inst.clocking.reset_signals() else: name = inst['name'] reset_signals = ([inst.get('reset_primary', 'rst_ni')] + inst.get('other_reset_list', [])) log.info("%s %s resets are %s" % (prefix, name, reset_signals)) # Check if reset connections are properly formatted # There are two options # The reset connection for a particular port must be a str # The reset connection for a paritcular port must be a dict # If value is a string, the module can only have ONE domain # If value is a dict, it must have the keys name / domain, and the # value of domain must match that defined for the module. for port, reset in top["reset_connections"].items(): if isinstance(reset, str): top["reset_connections"][port] = {} top["reset_connections"][port]['name'] = reset if len(top["domain"]) > 1: raise ValueError(f"{top['name']} reset connection {reset} " "has no assigned domain") else: top["reset_connections"][port]['domain'] = top["domain"][0] if isinstance(reset, dict): error += check_keys(reset, reset_connection_required, reset_connection_optional, reset_connection_added, 'dict structure for reset connections') if reset['domain'] not in top["domain"]: error += 1 log.error( f"domain {reset['domain']} defined for reset {reset['name']} " f"is not a domain of {top['name']}") # Check if the reset connections are fully populated if len(top['reset_connections']) != len(reset_signals): error += 1 log.error("%s %s mismatched number of reset ports and nets" % (prefix, name)) missing_port = [ port for port in top['reset_connections'].keys() if port not in reset_signals ] if missing_port: error += 1 log.error("%s %s Following reset ports do not exist:" % (prefix, name)) [log.error("%s" % port) for port in missing_port] missing_net = [ net['name'] for net in top['reset_connections'].values() if net['name'] not in reset_nets ] if missing_net: error += 1 log.error("%s %s Following reset nets do not exist:" % (prefix, name)) [log.error("%s" % net) for net in missing_net] return error
def check_implementation_targets(top: Dict, prefix: str) -> int: error = 0 known_names = {} for target in top['targets']: error += check_keys(target, target_required, target_optional, target_added, prefix + ' Targets') # check name uniqueness if target['name'] in known_names: log.warning('Target name {} is not unique'.format(target['name'])) error += 1 known_names[target['name']] = 1 error += check_keys(target['pinmux'], target_pinmux_required, target_pinmux_optional, target_pinmux_added, prefix + ' Target pinmux') error += check_keys(target['pinout'], target_pinout_required, target_pinout_optional, target_pinout_added, prefix + ' Target pinout') # Check special pad signals known_entry_names = {} for entry in target['pinmux']['special_signals']: error += check_keys(entry, special_sig_required, special_sig_optional, special_sig_added, prefix + ' Special signal') # check name uniqueness if entry['name'] in known_entry_names: log.warning('Special pad name {} is not unique'.format( entry['name'])) error += 1 known_entry_names[entry['name']] = 1 # The pad key needs to refer to a valid pad name. is_muxed = False for pad in top['pinout']['pads']: if entry['pad'] == pad['name']: is_muxed = pad['connection'] == 'muxed' break else: log.warning('Unknown pad {}'.format(entry['pad'])) error += 1 if not is_muxed: # If this is not a muxed pad, we need to make sure this refers to # DIO that is NOT a manual pad. for sig in top['pinmux']['signals']: if entry['pad'] == sig['pad']: break else: log.warning( 'Special pad {} cannot refer to a manual pad'.format( entry['pad'])) error += 1 # Check pads to remove and stub out for entry in target['pinout']['remove_pads']: # The pad key needs to refer to a valid pad name. for pad in top['pinout']['pads']: if entry == pad['name']: break else: log.warning('Unknown pad {}'.format(entry)) error += 1 # Check pads to add known_pad_names = {} for pad in top['pinout']['pads']: known_pad_names.update({pad['name']: 1}) for pad in target['pinout']['add_pads']: error += check_pad(top, pad, known_pad_names, ['manual'], prefix + ' Additional Pad') return error
def check_pinmux(top: Dict, prefix: str) -> int: error = check_keys(top['pinmux'], pinmux_required, pinmux_optional, pinmux_added, prefix + ' Pinmux') # This is used for the direct connection accounting below, # where we tick off already connected direct pads. known_direct_pads = {} direct_pad_attr = {} for pad in top['pinout']['pads']: if pad['connection'] == 'direct': known_direct_pads[pad['name']] = 1 direct_pad_attr[pad['name']] = pad['type'] # Note: the actual signal crosscheck is deferred until the merge stage, # since we have no idea at this point which IOs comportable IPs expose. for sig in top['pinmux']['signals']: error += check_keys(sig, pinmux_sig_required, pinmux_sig_optional, pinmux_sig_added, prefix + ' Pinmux signal') if sig['connection'] not in ['direct', 'manual', 'muxed']: log.warning('Invalid connection type {}'.format(sig['connection'])) error += 1 # The pad needs to refer to a valid pad name in the pinout that is of # connection type "direct". We tick off all direct pads that have been # referenced in order to make sure there are no double connections # and unconnected direct pads. padname = sig.setdefault('pad', '') if padname != '': if padname in known_direct_pads: if known_direct_pads[padname] == 1: known_direct_pads[padname] = 0 padattr = direct_pad_attr[padname] else: log.warning( 'Warning, direct pad {} is already connected'.format( padname)) error += 1 else: log.warning('Unknown direct pad {}'.format(padname)) error += 1 # Check port naming scheme. port = sig.setdefault('port', '') pattern = r'^[a-zA-Z0-9_]*(\[[0-9]*\]){0,1}' matches = re.match(pattern, port) if matches is None: log.warning('Port name {} has wrong format'.format(port)) error += 1 # Check that only direct connections have pad keys if sig['connection'] == 'direct': if sig.setdefault('attr', '') != '': log.warning( 'Direct connection of instance {} port {} ' 'must not have an associated pad attribute field'.format( sig['instance'], sig['port'])) error += 1 # Since the signal is directly connected, we can automatically infer # the pad type needed to instantiate the correct attribute CSR WARL # module inside the pinmux. sig['attr'] = padattr if padname == '': log.warning( 'Instance {} port {} connection is of direct type ' 'and therefore must have an associated pad name.'.format( sig['instance'], sig['port'])) error += 1 if port == '': log.warning( 'Instance {} port {} connection is of direct type ' 'and therefore must have an associated port name.'.format( sig['instance'], sig['port'])) error += 1 elif sig['connection'] == 'muxed': # Muxed signals do not have a corresponding pad and attribute CSR, # since they first go through the pinmux matrix. if sig.setdefault('attr', '') != '': log.warning( 'Muxed connection of instance {} port {} ' 'must not have an associated pad attribute field'.format( sig['instance'], sig['port'])) error += 1 if padname != '': log.warning('Muxed connection of instance {} port {} ' 'must not have an associated pad'.format( sig['instance'], sig['port'])) error += 1 elif sig['connection'] == 'manual': # This pad attr key is only allowed in the manual case, # as there is no way to infer the pad type automatically. sig.setdefault('attr', 'BidirStd') if padname != '': log.warning('Manual connection of instance {} port {} ' 'must not have an associated pad'.format( sig['instance'], sig['port'])) error += 1 # At this point, all direct pads should have been ticked off. for key, val in known_direct_pads.items(): if val == 1: log.warning('Direct pad {} has not been connected'.format(key)) error += 1 return error
def check_padctrl(top, prefix): error = check_keys(top["padctrl"], padctrl_required, padctrl_optional, padctrl_added, prefix + " PadControl") return error
def check_clocks_resets(top, ipobjs, ip_idxs, xbarobjs, xbar_idxs): error = 0 # there should only be one each of pwrmgr/clkmgr/rstmgr pwrmgrs = [m for m in top['module'] if m['type'] == 'pwrmgr'] clkmgrs = [m for m in top['module'] if m['type'] == 'clkmgr'] rstmgrs = [m for m in top['module'] if m['type'] == 'rstmgr'] if len(pwrmgrs) == 1 * len(clkmgrs) == 1 * len(rstmgrs) != 1: log.error("Incorrect number of pwrmgr/clkmgr/rstmgr") error += 1 # check clock fields are all there ext_srcs = [] for src in top['clocks']['srcs']: check_keys(src, clock_srcs_required, clock_srcs_optional, {}, "Clock source") ext_srcs.append(src['name']) # check derived clock sources log.info("Collected clocks are {}".format(ext_srcs)) for src in top['clocks']['derived_srcs']: check_keys(src, derived_clock_srcs_required, {}, {}, "Derived clocks") try: ext_srcs.index(src['src']) except Exception: error += 1 log.error("{} is not a valid src for {}".format( src['src'], src['name'])) # all defined clock/reset nets reset_nets = [reset['name'] for reset in top['resets']['nodes']] clock_srcs = [ clock['name'] for clock in top['clocks']['srcs'] + top['clocks']['derived_srcs'] ] # Check clock/reset port connection for all IPs for ipcfg in top['module']: ipcfg_name = ipcfg['name'].lower() log.info("Checking clock/resets for %s" % ipcfg_name) error += validate_reset(ipcfg, ipobjs[ip_idxs[ipcfg_name]], reset_nets) error += validate_clock(ipcfg, ipobjs[ip_idxs[ipcfg_name]], clock_srcs) if error: log.error("module clock/reset checking failed") break # Check clock/reset port connection for all xbars for xbarcfg in top['xbar']: xbarcfg_name = xbarcfg['name'].lower() log.info("Checking clock/resets for xbar %s" % xbarcfg_name) error += validate_reset(xbarcfg, xbarobjs[xbar_idxs[xbarcfg_name]], reset_nets, "xbar") error += validate_clock(xbarcfg, xbarobjs[xbar_idxs[xbarcfg_name]], clock_srcs, "xbar") if error: log.error("xbar clock/reset checking failed") break return error