def _validate_scrambling(scr): '''Validate SCrambling entry''' scr.setdefault("key_size", "16") scr.setdefault("iv_size", "8") scr.setdefault("cnst_size", "16") scr["key_size"] = check_int(scr["key_size"]) scr["iv_size"] = check_int(scr["iv_size"]) scr["cnst_size"] = check_int(scr["cnst_size"]) if "keys" not in scr: log.error("Missing key configuration.") exit(1) if "digests" not in scr: log.error("Missing digest configuration.") exit(1) for key in scr["keys"]: key.setdefault("name", "unknown_key_name") key.setdefault("value", "<random>") random_or_hexvalue(key, "value", scr["key_size"] * 8) for dig in scr["digests"]: dig.setdefault("name", "unknown_key_name") dig.setdefault("iv_value", "<random>") dig.setdefault("cnst_value", "<random>") random_or_hexvalue(dig, "iv_value", scr["iv_size"] * 8) random_or_hexvalue(dig, "cnst_value", scr["cnst_size"] * 8)
def _validate_otp(otp): '''Validate OTP entry''' otp.setdefault("depth", "1024") otp.setdefault("width", "2") otp["depth"] = check_int(otp["depth"]) otp["width"] = check_int(otp["width"]) otp["size"] = otp["depth"] * otp["width"] otp["addr_width"] = ceil(log2(check_int(otp["depth"]))) otp["byte_addr_width"] = ceil(log2(otp["size"]))
def _validate_item(item): '''Validates an item within a partition''' item.setdefault("name", "unknown_name") item.setdefault("size", "0") item.setdefault("isdigest", "false") # Make sure this has integer type. item["size"] = check_int(item["size"]) # Generate random constant to be used when partition has # not been initialized yet or when it is in error state. random_or_hexvalue(item, "inv_default", check_int(item["size"]) * 8)
def _validate_tokens(config): '''Validates and hashes the tokens''' config.setdefault('token_size', 128) config['token_size'] = check_int(config['token_size']) # This needs to be byte aligned if config['token_size'] % 8: raise ValueError('Size of token {} must be byte aligned'.format( token['name'])) num_bytes = config['token_size'] // 8 hashed_tokens = [] for token in config['tokens']: random_or_hexvalue(token, 'value', config['token_size']) hashed_token = OrderedDict() hashed_token['name'] = token['name'] + 'Hashed' data = token['value'].to_bytes(num_bytes, byteorder='big') # Custom string chosen for life cycle KMAC App interface custom = 'LC_CTRL'.encode('UTF-8') hashobj = cSHAKE128.new(data=data, custom=custom) hashed_token['value'] = int.from_bytes(hashobj.read(num_bytes), byteorder='big') hashed_tokens.append(hashed_token) config['tokens'] += hashed_tokens
def __init__(self, config): log.info('') log.info('Parse and translate OTP memory map.') log.info('') if "seed" not in config: raise RuntimeError("Missing seed in configuration.") config["seed"] = check_int(config["seed"]) # Initialize RNG. random.seed(OTP_SEED_DIVERSIFIER + int(config['seed'])) log.info('Seed: {0:x}'.format(config['seed'])) log.info('') if "otp" not in config: raise RuntimeError("Missing otp configuration.") if "scrambling" not in config: raise RuntimeError("Missing scrambling configuration.") if "partitions" not in config: raise RuntimeError("Missing partition configuration.") # Validate OTP info. _validate_otp(config["otp"]) # Validate scrambling info. _validate_scrambling(config["scrambling"]) # Validate memory map. self.part_dict = _validate_mmap(config) self.config = config log.info('') log.info('Successfully parsed and translated OTP memory map.') log.info('')
def _validate_item(item): '''Validates an item within a partition''' item.setdefault("name", "unknown_name") item.setdefault("size", "0") item.setdefault("isdigest", "false") item.setdefault("ismubi", "false") # make sure these have the correct types item["isdigest"] = check_bool(item["isdigest"]) item["ismubi"] = check_bool(item["ismubi"]) item["size"] = check_int(item["size"]) item_width = item["size"] * 8 # defaults are handled differently in case of mubi if item["ismubi"]: if not is_width_valid(item_width): raise RuntimeError("Mubi value {} has invalid width".format( item["name"])) # Convert default to correct mubi value item.setdefault("inv_default", "false") item["inv_default"] = check_bool(item["inv_default"]) item["inv_default"] = mubi_value_as_int(item["inv_default"], item_width) else: # Generate random constant to be used when partition has # not been initialized yet or when it is in error state. random_or_hexvalue(item, "inv_default", item_width)
def create_mmap_table(self): header = [ "Index", "Partition", "Size [B]", "Access Granule", "Item", "Byte Address", "Size [B]" ] table = [header] colalign = ("center", ) * len(header) for k, part in enumerate(self.config["partitions"]): for j, item in enumerate(part["items"]): granule = "64bit" if check_bool(part["secret"]) else "32bit" if check_bool(item["isdigest"]): granule = "64bit" name = "[{}](#Reg_{}_0)".format(item["name"], item["name"].lower()) else: name = item["name"] if j == 0: row = [str(k), part["name"], str(part["size"]), granule] else: row = ["", "", "", granule] row.extend([ name, "0x{:03X}".format(check_int(item["offset"])), str(item["size"]) ]) table.append(row) return tabulate(table, headers="firstrow", tablefmt="pipe", colalign=colalign)
def _validate_constraints(config): '''Validates Hamming weight and distance constraints''' config.setdefault('min_hw', 0) config.setdefault('max_hw', 0) config.setdefault('min_hd', 0) config['min_hw'] = check_int(config['min_hw']) config['max_hw'] = check_int(config['max_hw']) config['min_hd'] = check_int(config['min_hd']) total_width = config['secded']['data_width'] + config['secded']['ecc_width'] if config['min_hw'] >= total_width or \ config['max_hw'] > total_width or \ config['min_hw'] >= config['max_hw']: raise RuntimeError('Hamming weight constraints are inconsistent.') if config['max_hw'] - config['min_hw'] + 1 < config['min_hd']: raise RuntimeError('Hamming distance constraint is inconsistent.')
def __init__(self, lc_state_config, otp_mmap_config, img_config, data_perm): # Initialize memory map super().__init__(otp_mmap_config) # Initialize the LC state and OTP memory map objects first, since # validation and image generation depends on them self.lc_state = LcStEnc(lc_state_config) # Validate memory image configuration log.info('') log.info('Parse OTP image specification.') # Encryption smoke test with known test vector enc_test = _present_64bit_encrypt( 0x0123456789abcdef, 0x0123456789abcdef0123456789abcdef) assert enc_test == 0x0e9d28685e671dd6, \ 'Encryption module test failed' otp_width = self.config['otp']['width'] * 8 secded_width = self.lc_state.config['secded']['data_width'] if otp_width != secded_width: raise RuntimeError('OTP width and SECDED data width must be equal') if 'seed' not in img_config: raise RuntimeError('Missing seed in configuration.') img_config['seed'] = check_int(img_config['seed']) log.info('Seed: {0:x}'.format(img_config['seed'])) log.info('') # Re-initialize with seed to make results reproducible. random.seed(OTP_IMG_SEED_DIVERSIFIER + img_config['seed']) if 'partitions' not in img_config: raise RuntimeError('Missing partitions key in configuration.') for part in img_config['partitions']: self.merge_part_data(part) log.info('Adding values to {} partition.'.format(part['name'])) for item in part['items']: self.merge_item_data(part, item) # Key accounting img_config_check = img_config.copy() del img_config_check['seed'] del img_config_check['partitions'] _check_unused_keys(img_config_check, 'in image config') log.info('') log.info('Parsing OTP image successfully completed.') self.validate_data_perm(data_perm)
def _validate_secded(config): '''Validate SECDED configuration''' config['secded'].setdefault('data_width', 0) config['secded'].setdefault('ecc_width', 0) config['secded'].setdefault('ecc_matrix', [[]]) config['secded']['data_width'] = check_int(config['secded']['data_width']) config['secded']['ecc_width'] = check_int(config['secded']['ecc_width']) total_width = config['secded']['data_width'] + config['secded']['ecc_width'] if config['secded']['data_width'] % 8: raise RuntimeError('SECDED data width must be a multiple of 8') if config['secded']['ecc_width'] != len(config['secded']['ecc_matrix']): raise RuntimeError('ECC matrix does not have correct number of rows') log.info('SECDED Matrix:') for i, l in enumerate(config['secded']['ecc_matrix']): log.info('ECC Bit {} Fanin: {}'.format(i, l)) for j, e in enumerate(l): e = check_int(e) if e < 0 or e >= total_width: raise RuntimeError('ECC bit position is out of bounds') config['secded']['ecc_matrix'][i][j] = e
def _validate_tokens(config): '''Validates and hashes the tokens''' config.setdefault('token_size', 128) config['token_size'] = check_int(config['token_size']) hashed_tokens = [] for token in config['tokens']: random_or_hexvalue(token, 'value', config['token_size']) hashed_token = OrderedDict() hashed_token['name'] = token['name'] + 'Hashed' # TODO: plug in PRESENT-based hashing algo or KMAC hashed_token['value'] = token['value'] hashed_tokens.append(hashed_token) config['tokens'] += hashed_tokens
def __init__(self, config): '''The constructor validates the configuration dict.''' log.info('') log.info('Generate life cycle state') log.info('') if 'seed' not in config: raise RuntimeError('Missing seed in configuration') if 'secded' not in config: raise RuntimeError('Missing secded configuration') if 'tokens' not in config: raise RuntimeError('Missing token configuration') for typ in LC_STATE_TYPES.keys(): if typ not in config: raise RuntimeError('Missing {} definition'.format(typ)) config['seed'] = check_int(config['seed']) log.info('Seed: {0:x}'.format(config['seed'])) log.info('') # Re-initialize with seed to make results reproducible. random.seed(LC_SEED_DIVERSIFIER + int(config['seed'])) log.info('Checking SECDED.') _validate_secded(config) log.info('') log.info('Checking Hamming weight and distance constraints.') _validate_constraints(config) log.info('') log.info('Hashing tokens.') _validate_tokens(config) log.info('') log.info('Checking state declarations.') _validate_state_declarations(config) log.info('') log.info('Generate incremental word encodings.') _generate_words(config) self.config = config log.info('') log.info('Successfully generated life cycle state.') log.info('')
def __init__(self, config): log.info('') log.info('Parse and translate OTP memory map.') log.info('') if "seed" not in config: log.error("Missing seed in configuration.") exit(1) config["seed"] = check_int(config["seed"]) # Initialize RNG. random.seed(int(config['seed'])) if "otp" not in config: log.error("Missing otp configuration.") exit(1) if "scrambling" not in config: log.error("Missing scrambling configuration.") exit(1) if "partitions" not in config: log.error("Missing partition configuration.") exit(1) # Validate OTP info. _validate_otp(config["otp"]) # Validate scrambling info. _validate_scrambling(config["scrambling"]) # Validate memory map. _validate_mmap(config) self.config = config log.info('') log.info('Successfully parsed and translated OTP memory map.') log.info('')
def merge_part_data(self, part): '''This validates and merges the partition data into the memory map dict''' part.setdefault('items', []) if not isinstance(part['items'], list): raise RuntimeError('the "items" key must contain a list') # Check if partition name exists in memory map part.setdefault('name', 'unknown_name') mmap_part = self.get_part(part['name']) if mmap_part is None: raise RuntimeError('Partition {} does not exist'.format( part['name'])) # Only partitions with a hardware digest can be locked. part.setdefault('lock', 'false') part['lock'] = check_bool(part['lock']) if part['lock'] and not \ mmap_part['hw_digest']: raise RuntimeError( 'Partition {} does not contain a hardware digest'.format( part['name'])) # Augment memory map datastructure with lock bit. mmap_part['lock'] = part['lock'] if part['name'] == 'LIFE_CYCLE': part.setdefault('state', 'RAW') part.setdefault('count', 0) part['count'] = check_int(part['count']) if len(part['items']) > 0: raise RuntimeError( 'Life cycle items cannot directly be overridden') if part['lock']: raise RuntimeError('Life cycle partition cannot be locked') if part['count'] == 0 and part['state'] != "RAW": raise RuntimeError( 'Life cycle transition counter can only be zero in the RAW state' ) # Augment life cycle partition with correct life cycle encoding state = self.lc_state.encode('lc_state', str(part['state'])) count = self.lc_state.encode('lc_cnt', str(part['count'])) part['items'] = [{ 'name': 'LC_STATE', 'value': '0x{:X}'.format(state) }, { 'name': 'LC_TRANSITION_CNT', 'value': '0x{:X}'.format(count) }] # Key accounting part_check = part.copy() del part_check['state'] del part_check['count'] else: # Key accounting part_check = part.copy() if len(part['items']) == 0: log.warning("Partition does not contain any items.") # Key accounting del part_check['items'] del part_check['name'] del part_check['lock'] _check_unused_keys(part_check, "in partition {}".format(part['name']))
def _validate_mmap(config): '''Validate the memory map configuration''' # Get valid key names. key_names = [] for key in config["scrambling"]["keys"]: key_names.append(key["name"]) offset = 0 num_part = 0 for part in config["partitions"]: num_part += 1 _validate_part(part, offset, key_names) # Loop over items within a partition for item in part["items"]: _validate_item(item, offset) log.info("> Item {} at offset {} with size {}".format( item["name"], offset, item["size"])) offset += check_int(item["size"]) # Place digest at the end of a partition. if part["sw_digest"] or part["hw_digest"]: part["items"].append({ "name": part["name"] + DIGEST_SUFFIX, "size": DIGEST_SIZE, "offset": check_int(part["offset"]) + check_int(part["size"]) - DIGEST_SIZE, "isdigest": "True", "inv_default": "<random>" }) # Randomize the digest default. random_or_hexvalue(part["items"][-1], "inv_default", DIGEST_SIZE * 8) log.info("> Adding digest {} at offset {} with size {}".format( part["name"] + DIGEST_SUFFIX, offset, DIGEST_SIZE)) offset += DIGEST_SIZE # check offsets and size if offset > check_int(part["offset"]) + check_int(part["size"]): log.error("Not enough space in partitition " "{} to accommodate all items. Bytes available " "= {}, bytes requested = {}".format( part["name"], part["size"], offset - part["offset"])) exit(1) offset = check_int(part["offset"]) + check_int(part["size"]) if offset > config["otp"]["size"]: log.error( "OTP is not big enough to store all partitions. " "Bytes available {}, bytes required {}", config["otp"]["size"], offset) exit(1) log.info("Total number of partitions: {}".format(num_part)) log.info("Bytes available in OTP: {}".format(config["otp"]["size"])) log.info("Bytes required for partitions: {}".format(offset))
def __init__(self, config): '''The constructor validates the configuration dict.''' log.info('') log.info('Generate life cycle state') log.info('') if 'seed' not in config: log.error('Missing seed in configuration') exit(1) if 'secded' not in config: log.error('Missing secded configuration') exit(1) if 'tokens' not in config: log.error('Missing token configuration') exit(1) config['secded'].setdefault('data_width', 0) config['secded'].setdefault('ecc_width', 0) config['secded'].setdefault('ecc_matrix', [[]]) config.setdefault('num_ab_words', 0) config.setdefault('num_cd_words', 0) config.setdefault('num_ef_words', 0) config.setdefault('min_hw', 0) config.setdefault('max_hw', 0) config.setdefault('min_hd', 0) config.setdefault('token_size', 128) config['seed'] = check_int(config['seed']) log.info('Seed: {0:x}'.format(config['seed'])) log.info('') # Re-initialize with seed to make results reproducible. random.seed(LC_SEED_DIVERSIFIER + int(config['seed'])) config['secded']['data_width'] = check_int( config['secded']['data_width']) config['secded']['ecc_width'] = check_int( config['secded']['ecc_width']) total_width = config['secded']['data_width'] + config['secded'][ 'ecc_width'] config['num_ab_words'] = check_int(config['num_ab_words']) config['num_cd_words'] = check_int(config['num_cd_words']) config['num_ef_words'] = check_int(config['num_ef_words']) config['min_hw'] = check_int(config['min_hw']) config['max_hw'] = check_int(config['max_hw']) config['min_hd'] = check_int(config['min_hd']) config['token_size'] = check_int(config['token_size']) total_width = config['secded']['data_width'] + config['secded'][ 'ecc_width'] if config['min_hw'] >= total_width or \ config['max_hw'] > total_width or \ config['min_hw'] >= config['max_hw']: log.error('Hamming weight constraints are inconsistent.') exit(1) if config['max_hw'] - config['min_hw'] + 1 < config['min_hd']: log.error('Hamming distance constraint is inconsistent.') exit(1) if config['secded']['ecc_width'] != len( config['secded']['ecc_matrix']): log.error('ECC matrix does not have correct number of rows') exit(1) log.info('SECDED Matrix:') for i, l in enumerate(config['secded']['ecc_matrix']): log.info('ECC Bit {} Fanin: {}'.format(i, l)) for j, e in enumerate(l): e = check_int(e) if e < 0 or e >= total_width: log.error('ECC bit position is out of bounds') exit(1) config['secded']['ecc_matrix'][i][j] = e log.info('') hashed_tokens = [] for token in config['tokens']: random_or_hexvalue(token, 'value', config['token_size']) hashed_token = OrderedDict() hashed_token['name'] = token['name'] + 'Hashed' # TODO: plug in PRESENT-based hashing algo or KMAC hashed_token['value'] = token['value'] hashed_tokens.append(hashed_token) config['tokens'] += hashed_tokens self.config = config # Generate new encoding words word_types = ['ab_words', 'cd_words', 'ef_words'] existing_words = [] for w in word_types: while len(self.gen[w]) < self.config['num_' + w]: new_word = _get_new_state_word_pair(self.config, existing_words) self.gen[w].append(new_word) # Validate words (this must not fail at this point). _validate_words(self.config, existing_words) # Print out HD histogram self.gen['stats'] = hd_histogram(existing_words) log.info('') log.info('Hamming distance histogram:') log.info('') for bar in self.gen['stats']["bars"]: log.info(bar) log.info('') log.info('Minimum HD: {}'.format(self.gen['stats']['min_hd'])) log.info('Maximum HD: {}'.format(self.gen['stats']['max_hd'])) log.info('Minimum HW: {}'.format(self.gen['stats']['min_hw'])) log.info('Maximum HW: {}'.format(self.gen['stats']['max_hw'])) log.info('') log.info('Successfully generated life cycle state.') log.info('')
def _validate_mmap(config): '''Validate the memory map configuration''' # Get valid key names. key_names = [] for key in config["scrambling"]["keys"]: key_names.append(key["name"]) if not isinstance(config['partitions'], list): raise RuntimeError('the "partitions" key must contain a list') # validate inputs before use allocated = 0 for part in config["partitions"]: _validate_part(part, key_names) allocated += part['size'] # distribute unallocated bits _dist_unused(config, allocated) # Determine offsets and generation dicts offset = 0 part_dict = {} for j, part in enumerate(config["partitions"]): if part['name'] in part_dict: raise RuntimeError('Partition name {} is not unique'.format( part['name'])) part['offset'] = offset if check_int(part['offset']) % SCRAMBLE_BLOCK_WIDTH: raise RuntimeError( "Partition {} offset must be 64bit aligned".format( part['name'])) log.info("Partition {} at offset {} size {}".format( part["name"], part["offset"], part["size"])) # Loop over items within a partition item_dict = {} for k, item in enumerate(part["items"]): if item['name'] in item_dict: raise RuntimeError('Item name {} is not unique'.format( item['name'])) item['offset'] = offset log.info("> Item {} at offset {} with size {}".format( item["name"], offset, item["size"])) offset += check_int(item["size"]) item_dict[item['name']] = k # Place digest at the end of a partition. if part["sw_digest"] or part["hw_digest"]: digest_name = part["name"] + DIGEST_SUFFIX if digest_name in item_dict: raise RuntimeError( 'Digest name {} is not unique'.format(digest_name)) item_dict[digest_name] = len(part["items"]) part["items"].append({ "name": digest_name, "size": DIGEST_SIZE, "offset": check_int(part["offset"]) + check_int(part["size"]) - DIGEST_SIZE, "ismubi": False, "isdigest": True, "inv_default": "<random>" }) # Randomize the digest default. random_or_hexvalue(part["items"][-1], "inv_default", DIGEST_SIZE * 8) # We always place the digest into the last 64bit word # of a partition. canonical_offset = (check_int(part["offset"]) + check_int(part["size"]) - DIGEST_SIZE) if offset > canonical_offset: raise RuntimeError( "Not enough space in partitition " "{} to accommodate a digest. Bytes available " "= {}, bytes allocated to items = {}".format( part["name"], part["size"], offset - part["offset"])) offset = canonical_offset log.info("> Adding digest {} at offset {} with size {}".format( digest_name, offset, DIGEST_SIZE)) offset += DIGEST_SIZE # check offsets and size if offset > check_int(part["offset"]) + check_int(part["size"]): raise RuntimeError("Not enough space in partitition " "{} to accommodate all items. Bytes available " "= {}, bytes allocated to items = {}".format( part["name"], part["size"], offset - part["offset"])) offset = check_int(part["offset"]) + check_int(part["size"]) part_dict.setdefault(part['name'], {'index': j, 'items': item_dict}) if offset > config["otp"]["size"]: raise RuntimeError( "OTP is not big enough to store all partitions. " "Bytes available {}, bytes required {}", config["otp"]["size"], offset) log.info("Total number of partitions: {}".format(len( config["partitions"]))) log.info("Bytes available in OTP: {}".format(config["otp"]["size"])) log.info("Bytes required for partitions: {}".format(offset)) # return the partition/item index dict return part_dict
def _validate_part(part, offset, key_names): '''Validates a partition within the OTP memory map''' part.setdefault("offset", offset) part.setdefault("name", "unknown_name") part.setdefault("variant", "Unbuffered") part.setdefault("size", "0") part.setdefault("secret", "false") part.setdefault("sw_digest", "false") part.setdefault("hw_digest", "false") part.setdefault("write_lock", "none") part.setdefault("read_lock", "none") part.setdefault("key_sel", "NoKey") log.info("Partition {} at offset {} with size {}".format( part["name"], part["offset"], part["size"])) # Make sure these are boolean types (simplifies the mako templates) part["secret"] = check_bool(part["secret"]) part["sw_digest"] = check_bool(part["sw_digest"]) part["hw_digest"] = check_bool(part["hw_digest"]) part["bkout_type"] = check_bool(part["bkout_type"]) # Make sure this has integer type. part["size"] = check_int(part["size"]) # basic checks if part["variant"] not in ["Unbuffered", "Buffered", "LifeCycle"]: log.error("Invalid partition type {}".format(part["variant"])) exit(1) if part["key_sel"] not in (["NoKey"] + key_names): log.error("Invalid key sel {}".format(part["key_sel"])) exit(1) if check_bool(part["secret"]) and part["key_sel"] == "NoKey": log.error( "A secret partition needs a key select value other than NoKey") exit(1) if part["write_lock"].lower() not in ["digest", "csr", "none"]: log.error("Invalid value for write_lock") exit(1) if part["read_lock"].lower() not in ["digest", "csr", "none"]: log.error("Invalid value for read_lock") exit(1) if part["sw_digest"] and part["hw_digest"]: log.error( "Partition cannot support both a SW and a HW digest at the same time." ) exit(1) if part["variant"] == "Unbuffered" and not part["sw_digest"]: log.error( "Unbuffered partitions without digest are not supported at the moment." ) exit(1) if not part["sw_digest"] and not part["hw_digest"]: if part["write_lock"].lower() == "digest" or part["read_lock"].lower( ) == "digest": log.error( "A partition can only be write/read lockable if it has a hw or sw digest." ) exit(1) if check_int(part["offset"]) % 8: log.error("Partition offset must be 64bit aligned") exit(1) if check_int(part["size"]) % 8: log.error("Partition size must be 64bit aligned") exit(1) if len(part["items"]) == 0: log.warning("Partition does not contain any items.")
def _validate_part(part, key_names): '''Validates a partition within the OTP memory map''' part.setdefault("name", "unknown_name") part.setdefault("variant", "Unbuffered") part.setdefault("secret", False) part.setdefault("sw_digest", False) part.setdefault("hw_digest", False) part.setdefault("write_lock", "none") part.setdefault("read_lock", "none") part.setdefault("key_sel", "NoKey") part.setdefault("absorb", False) log.info("Validating partition {}".format(part["name"])) # Make sure these are boolean types (simplifies the mako templates) part["secret"] = check_bool(part["secret"]) part["sw_digest"] = check_bool(part["sw_digest"]) part["hw_digest"] = check_bool(part["hw_digest"]) part["bkout_type"] = check_bool(part["bkout_type"]) part["ecc_fatal"] = check_bool(part["ecc_fatal"]) # basic checks if part["variant"] not in ["Unbuffered", "Buffered", "LifeCycle"]: raise RuntimeError("Invalid partition type {}".format(part["variant"])) if part["key_sel"] not in (["NoKey"] + key_names): raise RuntimeError("Invalid key sel {}".format(part["key_sel"])) if check_bool(part["secret"]) and part["key_sel"] == "NoKey": raise RuntimeError( "A secret partition needs a key select value other than NoKey") if part["write_lock"].lower() not in ["digest", "csr", "none"]: raise RuntimeError("Invalid value for write_lock") if part["read_lock"].lower() not in ["digest", "csr", "none"]: raise RuntimeError("Invalid value for read_lock") if part["sw_digest"] and part["hw_digest"]: raise RuntimeError( "Partition cannot support both a SW and a HW digest at the same time." ) if part["variant"] == "Unbuffered" and not part["sw_digest"]: raise RuntimeError( "Unbuffered partitions without digest are not supported at the moment." ) if not part["sw_digest"] and not part["hw_digest"]: if part["write_lock"].lower() == "digest" or part["read_lock"].lower( ) == "digest": raise RuntimeError( "A partition can only be write/read lockable if it has a hw or sw digest." ) if not isinstance(part['items'], list): raise RuntimeError('the "items" key must contain a list') if len(part["items"]) == 0: log.warning("Partition does not contain any items.") # validate items and calculate partition size if necessary size = 0 for item in part["items"]: _validate_item(item) size += item["size"] # if size not previously defined, set it if "size" not in part: part["size"] = _calc_size(part, size) # Make sure this has integer type. part["size"] = check_int(part["size"]) # Make sure partition size is aligned. if part["size"] % SCRAMBLE_BLOCK_WIDTH: raise RuntimeError("Partition size must be 64bit aligned")