Exemple #1
0
    def _get_config_path(self, chipset_dir):
        """Returns the config found in the chipset dir matching
        the naming conventions. If the config file is not found
        in the dir, None is returned.

        :param str chipset_dir: The directory in which to look for config path.
        :returns: config_file
        :rtype: (str)
        """
        config = None
        chipset_from_dir_name = os.path.basename(chipset_dir)

        for entry in os.listdir(chipset_dir):
            path = c_path.join(chipset_dir, entry)
            if c_path.validate_file(path) and re.match(defines.XML_NAME_REGEX, entry):
                # Extract the chipset from the file
                try:
                    chipset_from_file = ConfigParser.get_chipset_from_file(path)
                except Exception as e:
                    logger.warning('Skipping file: ' + entry + '\n'
                                   '    ' + 'Failed to load the file: ' + str(e))
                    continue

                # Check the naming conventions
                if chipset_from_file == chipset_from_dir_name:
                    config = path
                else:
                    logger.warning('Skipping file: ' + entry + '\n'
                                   '    ' + 'Chipset from file: "' + chipset_from_file + '" does not match chipset from dir name: "' + chipset_from_dir_name + '"')
            else:
                logger.debug2('Skipping file: ' + entry + '\n'
                              '    ' + 'Name does not match any of the naming convention patters')
        logger.debug2('Config path found for chipset_dir: ' + chipset_dir + '\n'
                      '    ' + 'config: ' + str(config))
        return config
    def _get_config_path(self, chipset_dir):
        """Returns the config found in the chipset dir matching
        the naming conventions. If the config file is not found
        in the dir, None is returned.

        :param str chipset_dir: The directory in which to look for config path.
        :returns: config_file
        :rtype: (str)
        """
        config = None
        chipset_from_dir_name = os.path.basename(chipset_dir)

        for entry in os.listdir(chipset_dir):
            path = c_path.join(chipset_dir, entry)
            if c_path.validate_file(path) and entry.endswith(defines.XML_NAME_ENDING):
                # Extract the chipset from the file
                try:
                    chipset_from_file = ConfigParser.get_chipset_from_file(path)
                except Exception as e:
                    logger.warning('Skipping file: ' + entry + '\n'
                                   '    ' + 'Failed to load the file: ' + str(e))
                    continue

                # Check the naming conventions
                if chipset_from_file == chipset_from_dir_name:
                    config = path
                else:
                    logger.warning('Skipping file: ' + entry + '\n'
                                   '    ' + 'Chipset from file: "' + chipset_from_file + '" does not match chipset from dir name: "' + chipset_from_dir_name + '"')
            else:
                logger.debug2('Skipping file: ' + entry + '\n'
                              '    ' + 'Name does not match any of the naming convention patters')
        logger.debug2('Config path found for chipset_dir: ' + chipset_dir + '\n'
                      '    ' + 'config: ' + str(config))
        return config
Exemple #3
0
    def validate(self, args):
        file = open(args.input_file, "rb")
        file_data = file.read()
        file.close()

        # if validate is given no arguments, then we just need to check if the file has an mbn header. that's it
        if self.is_mbn_file(file_data, args.input_file) and args.header_length is None:
            logger.info("\"{0}\" appears to have a valid MBN header".format(args.input_file))
            return

        mbn_handler = ParseGenMbn(data=file_data, header_size=args.header_length, version=MBN_HDR_VERSION_3)

        things_to_check = ["image_id", "image_dest_ptr", "image_src"]
        errors_found = 0

        for thing in things_to_check:
            if getattr(args, thing) != getattr(mbn_handler.header, thing):
                errors_found += 1
                logger.warning("{0} value: {1} provided by configuration does not match {0} value: {2} from given mbn file".format(
                                                                                                                          thing,
                                                                                                                          getattr(args, thing),
                                                                                                                          getattr(mbn_handler.header, thing)))

        # check header_version, AKA flash_parti_Ver separate from everything because it doesnt exist in 80 byte headers
        if args.header_length == 40:
            if args.header_version != mbn_handler.header.flash_parti_ver:
                errors_found += 1
                logger.warning("header_version value: {0} provided by configuration does not match header_version value: {1} from given mbn file".format(
                                                                                                                        args.header_version,
                                                                                                                        mbn_handler.header.flash_parti_ver))
        logger.info("MBN file {0} has {1}passed validation".format(args.input_file, "not " if errors_found > 0 else ""))
Exemple #4
0
    def generate_cert_request(self,
                              cert_type,
                              cert_info,
                              subject_params,
                              self_sign=False,
                              key_exp=None,
                              key_size=None,
                              padding=PAD_PKCS):

        # Ensure that the number of SHA bits is equal to or exceeds the number of ECDSA bits
        if self.using_ecdsa:
            num_sha_bits = int(filter(str.isdigit, self.hash_algo))
            num_ecdsa_bits = ecdsa_functions.curve_size_map[self.signing_attributes.ecdsa_curve]
            if num_sha_bits < num_ecdsa_bits:
                logger.warning("The number of SHA bits is less than the number of ECDSA bits.\n"
                               "SHA is set to {0} bits and ECDSA is set to {1} bits.".format(num_sha_bits, num_ecdsa_bits))

        # Create a new key if there isn't one
        if cert_info.priv_key is None:
            logger.info('Generating new private/public key pair for ' + cert_type)
            cert_info.priv_key, cert_info.pub_key = self.generate_key_pair(key_exp, key_size)

        logger.info('Creating certificate request for ' + cert_type)

        cert = cert_functions.create_cert(cert_info.priv_key,
                                          subject_params=subject_params,
                                          config=self.openssl_info.openssl_config,
                                          hash_algo=self.hash_algo,
                                          serial=self.CERT_SERIAL,
                                          days=self.CERT_DAYS,
                                          self_sign=self_sign,
                                          padding=padding,
                                          pad_salt_len=self.PAD_PSS_SALT_1,
                                          pad_hash_algo=self.hash_algo)
        return cert, cert_info.priv_key, cert_info.pub_key
Exemple #5
0
 def run(self, error):
     """Runs the policy and prints error message per the policy."""
     if self.policy == self.WARN:
         from sectools.common.utils.c_logging import logger
         logger.warning(error)
     elif self.policy == self.ERROR:
         raise RuntimeError(error)
    def _extract_hash_segment_algorithm(self, hash_table_size=0):
        if hash_table_size > 0:
            hash_count = self.get_hash_table(get_hash_count=True)
            hash_size = hash_table_size / (hash_count + 2)  # add 2: +1 for Hash Table Entry 0 (ELF + Program Header). +1 for Hash Table Entry 1 (Dummy Entry)
            logger.debug("Number of hash entries: " + str(hash_count))
            logger.debug("Hash table size: " + str(hash_table_size))
            logger.debug("Hash size: " + str(hash_size))

            if hash_size in SecParseGenElf.HASH_SIZE_TO_ALGO_MAP.keys():
                self._mbn_parsegen.extracted_segment_hash_algorithm = SecParseGenElf.HASH_SIZE_TO_ALGO_MAP[hash_size]
            else:
                self._mbn_parsegen.extracted_segment_hash_algorithm = ParseGenMbn.UNKNOWN_ALGORITHM

            logger.debug("Determined hash table algorithm: " + self._mbn_parsegen.extracted_segment_hash_algorithm)

            # Determine if parsegen was created during validation (-a) and print error if hash algorithm mismatch is present
            extracted_segment_hash_algorithm = self._mbn_parsegen.extracted_segment_hash_algorithm
            segment_hash_algorithm = self.segment_hash_algorithm if self.segment_hash_algorithm is not None else "sha256"
            if extracted_segment_hash_algorithm != segment_hash_algorithm and (self.validating or self.is_encrypted()):
                from sectools.common.utils.c_misc import create_mismatch_table
                errstr = list()
                mismatches = list()
                mismatches.append(("Hash Segment Algorithm", extracted_segment_hash_algorithm, segment_hash_algorithm))
                create_mismatch_table(mismatches, errstr, data_type_to_compare="Attribute", image_region="Hash Segment")
                logger.warning('Following validations failed for the image:\n       ' +
                               '\n       '.join([(str(i + 1) + '. ' + e) for i, e in enumerate(errstr)]))
Exemple #7
0
    def get_data(self, integrity_check=None, sign=None):
        # Resolve the operation
        integrity_check = self.integrity_check if integrity_check is None else integrity_check
        sign = self.sign if sign is None else sign
        integrity_check = True if (integrity_check or sign) else False

        # Allow base to do any checks
        SecParseGenBase.get_data(self, integrity_check, sign)

        if not (integrity_check or sign):
            data = self._elf_parsegen.get_data()
        else:
            # Add the header and hash segment
            prog_phdr_entry, hash_phdr_entry, hash_segment_size =\
                self._add_phdr_and_hash_segs(sign)

            try:
                # Generate the hash segment now
                hash_segment = self.get_hash_segment(sign)

                hash_segment += PAD_BYTE_1 * self._hash_padding_size

                # Check here for sizes mismatching just in case
                if len(hash_segment) != hash_segment_size:
                    raise RuntimeError(
                        'Estimated hash table size was wrong. Estimate - ' +
                        str(hash_segment_size) + ', Actual - ' +
                        str(len(hash_segment)))

                # Re-add the hash segment, this time with the real data
                self._elf_parsegen.remove_segment(hash_phdr_entry)
                self._elf_parsegen.remove_segment(prog_phdr_entry)
                self._elf_parsegen.add_segment(prog_phdr_entry,
                                               '',
                                               toalign=self._align)
                self._elf_parsegen.add_segment(hash_phdr_entry,
                                               hash_segment,
                                               index=1,
                                               toalign=self._align)

                try:
                    # Get the elf data
                    data = self._elf_parsegen.get_data()
                finally:
                    pass
            finally:
                # Remove the prog
                try:
                    self._elf_parsegen.remove_segment(prog_phdr_entry)
                except Exception as e:
                    logger.warning(str(e))

                # Remove the hash
                try:
                    self._elf_parsegen.remove_segment(hash_phdr_entry)
                except Exception as e:
                    logger.warning(str(e))
        return data
def validate_args(arguments, err_strings, init_logging):

    # configure logger to log to filesystem
    if init_logging:
        if arguments.target_base_dir:
            folder = c_path.normalize(arguments.target_base_dir)
            try:
                c_path.create_dir(folder)
            except Exception as e:
                raise RuntimeError('Unable to create directory for logging: ' +
                                   folder + '\n' + 'Error: ' + str(e))
            if arguments.sign_id:
                logging_file_name = SECTOOLS_BUILDER_TOOL_NAME.replace(
                    " ", "_") + "_" + arguments.sign_id
            else:
                logging_file_name = SECTOOLS_BUILDER_TOOL_NAME.replace(
                    " ", "_")
            logger.enable_file_logging(logging_file_name,
                                       num_logs=1,
                                       log_dir=folder)
        else:
            raise RuntimeError(err_strings[0])

    err = []
    # validate that the required fields were provided
    if not arguments.target_base_dir:
        err.append(err_strings[0])
    if not arguments.source:
        err.append(err_strings[1])
    if not arguments.sign_id:
        err.append(err_strings[2])
    if arguments.jtag_id and arguments.soc_hw_version:
        err.append(err_strings[3])
    if hasattr(arguments, "config_type") and arguments.config_type is not None:
        logger.info("Sectools Builder config was provided using new api")
        config = c_path.normalize(arguments.config_type)
        if config not in CONFIGS:
            err.append(err_strings[8])
    else:
        logger.info(
            "Sectools Builder config was provided using deprecated api")
        config = c_path.normalize(arguments.config)
        if config not in CONFIGS:
            logger.warning(
                "Sectools Builder received custom Secimage config file")
    sectools_builder_core.validate_file(config, err_strings[4], err)
    validate_bool(arguments.qti_sign, err_strings[5], err)
    validate_bool(arguments.relocatable, err_strings[6], err)
    if hasattr(
            arguments, "target_image_type_filter"
    ) and arguments.target_image_type_filter is not None and arguments.target_image_type_filter not in [
            sectools_builder_core.INTEGRITY, sectools_builder_core.SIGN,
            sectools_builder_core.ENCRYPT,
            sectools_builder_core.SIGN_AND_ENCRYPT
    ]:
        err.append(err_strings[7])
    return error_list_to_str(err)
Exemple #9
0
def config(path):
    """(obj) object of :class:`ConfigParser` generated by loading the config file in the config_dir.
    """
    try:
        config = ConfigParser(path)
    except Exception as e:
        logger.warning('Failed to load config: ' + path + '\n'
                       '    ' + 'Error: ' + str(e))
    return config
    def sha_hash(self, data, sha_algo=None):
        if not data:
            raise RuntimeError('No data to hash')
        if sha_algo not in utils.hash_algos_map.keys():
            logger.warning('No SHA algo specified. Defaulting to SHA-256')

        msg = utils.hash(data, sha_algo)
        msg_bin = binascii.a2b_hex(msg)
        logger.debug2("H(code image) : " + msg)
        return msg_bin
Exemple #11
0
    def discover(self):
        """Searches for the openssl binary in:

        #. The environment using the openssl tag
        #. Prepackaged binary folder
        #. Current path
        #. System path

        :returns str: Path to the openssl binary.
        """
        module_name = BINARY_NAME.title()
        filenames = bin_names(BINARY_NAME)
        module = ModuleNotFound
        logger.debug2('module_name: ' + str(module_name) + ', filenames: ' + str(filenames))

        for filename in filenames:
            # Using the environment
            if OPENSSL_ENV_DIR_TAG in os.environ:
                logger.debug2(str(OPENSSL_ENV_DIR_TAG) + ' tag found in environment')
                env_module = c_path.join(os.environ[OPENSSL_ENV_DIR_TAG], filename)
                logger.debug2('Looking for: ' + str(env_module))
                if not c_path.validate_file(env_module):
                    logger.warning(module_name + ': File from environment does not exist at - ' + env_module)
                elif not self.is_supported_version(env_module):
                    logger.warning(module_name + ': File from environment is not the correct version - ' + env_module)
                else:
                    module = env_module
                    logger.debug2(module_name + ': Found from environment at - ' + env_module)
                    break

            # Searching in prepacked dir, current dir and system paths
            else:
                folder = packaged_bin_folder
                logger.debug2('Looking for: ' + str(filename) + ' in folder: ' + str(folder))
                for module_found in c_path.which_generator(filename, paths=[folder], find_one=False):
                    if not self.is_supported_version(module_found):
                        logger.debug2('Incorrect version: ' + str(module_found))
                        continue
                    module = module_found
                    conf_file = c_path.join(folder, OPENSSL_CONF_FILE)
                    if c_path.validate_file(conf_file):
                        os.environ[OPENSSL_ENV_CONF_TAG] = conf_file
                    logger.debug2(module_name + ': Found at - ' + module)
                    break
                # Check if module is found
                if module != ModuleNotFound:
                    break
        else:
            logger.error(module_name + ': Not Found')

        # Log if permissions are not correct
        if module != ModuleNotFound and not os.access(module, os.X_OK):
            logger.error(module_name + ': Cannot execute. Missing execution permission.')

        return module
def gen_rsa_key_pair(key_size_in_bits, key_exponent, priv_key_output_file,
                     pub_key_output_file):
    """ Generate RSA Key pair

        input:

        key_size_in_bits: Size of the key in bits.
        key_exponent: [3, 65537]
                      Exponent used in key calculation.
        priv_key_output_file: File name for storing private key
        pub_key_output_file: File name for storing public key

        output:

        returned value: {"public_key": [Generated public key],
                         "private_key": [Generated private key] }
                         Dictionary holding the values of public and private keys
    """

    logger.debug("the openssl_binary is:{0}".format(openssl_binary_path))
    if key_exponent == 3:
        exponent_str = "-3"
    elif key_exponent == 65537:
        exponent_str = "-f4"
    else:
        logger.warning(
            "Exponent value supplied is INVALID! going with default exponent of 65537."
        )
        exponent_str = "-f4"

    key_size_str = str(key_size_in_bits)

    if priv_key_output_file is not None:
        pk_file = open(priv_key_output_file, "wb")
    else:
        pk_file = tempfile.NamedTemporaryFile(delete=False)
        logger.debug(
            "No output file specified for private key storage, so creating temp file: "
            + pk_file.name)

    try:
        private_key = utility_functions.system_command_logged(
            [openssl_binary_path, "genrsa", exponent_str, key_size_str],
            stderr_to_temp=True)
    except subprocess.CalledProcessError, e:
        os.unlink(pk_file.name)
        logger.critical(
            "gen_rsa_key_pair: OPENSSL Errored out on generation of RSA key.")
        logger.critical(
            "call to OpenSSL binary returned an error!: retval = " +
            str(e.returncode) + " Command = " + str(e.cmd))
        raise RuntimeError(
            "call to OpenSSL binary returned an error!: retval = " +
            str(e.returncode) + " Command = " + str(e.cmd))
Exemple #13
0
        def _decode_binary_blob(self, binary_blob, validating):
            if len(binary_blob) != self.SPEC_SIZE_BYTES:
                raise RuntimeError(
                    "L2 Associated Data blob is of the wrong size")

            offset = 0
            end = self.L2_ASSOCIATED_DATA_SIZE_FLD_LEN_BYTES
            self.l2_associated_data_size, = struct.unpack(
                ">H", binary_blob[offset:end])

            offset = end + EncryptionParamsSectionBody_1_0_L2_1_0.RSVD_BYTE_LEN_BYTES * 2
            end = offset + self.MAJOR_VERSION_FLD_LEN_BYTES + \
                  self.MINOR_VERSION_FLD_LEN_BYTES + \
                  self.KEY_LADDER_LEN_FLD_LEN_BYTES + \
                  EncryptionParamsSectionBody_1_0_L2_1_0.RSVD_BYTE_LEN_BYTES

            major_version, minor_version, self.key_ladder_length, tmp = struct.unpack(
                "=BBBB", binary_blob[offset:end])
            offset = end

            if (major_version, minor_version) != (self.MAJOR_VERSION,
                                                  self.MINOR_VERSION):
                raise RuntimeError((
                    "Encryption Parameters L2 Associated Data version \"{0}.{1}\" does not match "
                    "expected version \"{2}.{3}\"\n       Ensure that the correct selected_encryptor value is "
                    "set.".format(major_version, minor_version,
                                  self.MAJOR_VERSION, self.MINOR_VERSION)))

            image_id = int(
                round(math.log(self._decode_image_id(binary_blob[offset:]),
                               2)))
            if image_id != self.image_id:
                if validating:
                    errstr = list()
                    mismatches = list()
                    mismatches.append(
                        ("sw_id", "0x%X" % image_id, "0x%X" % self.image_id))
                    create_mismatch_table(mismatches,
                                          errstr,
                                          operation="encryption",
                                          data_type_to_compare="Attribute",
                                          image_region="Encryption Parameters")
                    raise RuntimeError(
                        'Following validations failed for the image:\n       '
                        + '\n       '.join([(str(i + 1) + '. ' + e)
                                            for i, e in enumerate(errstr)]))
                else:
                    logger.warning(
                        ("Extracted Encryption Parameters sw_id"
                         " value \"{0}\" does not match config value "
                         "\"{1}\"\n\t Encryption Parameters sw_id value "
                         "will be updated with value \"{1}\"".format(
                             hex(image_id), hex(self.image_id))))
    def reset_workspace(self, chipset=None, oem=True, qc=True, ui=True, user=True):
        """Resets the :meth:`workspace` for the chipset that is provided.
        :meth:`config_dir` is searched for the files for the chipset. If the
        files are found in the config_dir, they are copied to the workspace,
        otherwise an exception is raised.

        :param str chipset: The chipset to reset in the workspace.
        :param bool oem: If the oem file should be updated.
        :param bool qc: If the qc file should be updated.
        :param bool ui: If the ui file should be updated.
        :param bool user: If the user file should be updated.
        :raise RuntimeError: If files for the chipset are not found in the
            the config_dir
        """
        if chipset is None:
            chipset = self.chipset

        # Look for the config files in the config_dir
        try:
            template_oem, template_qc, template_ui, template_user = self._config_dir_obj.get_chipset_config_paths(chipset)
        except Exception as e:
            logger.warning('Template config files not found for copying to workspace for chipset: ' + chipset + '\n'
                           '    ' + 'Error: ' + str(e))
            raise

        # Create the directory in the workspace for this chipset
        self._workspace_dir_obj.create_chipset_dir(chipset)
        workspace_chipset_dir = self._workspace_dir_obj.get_chipset_dir(chipset)

        if not oem:
            # Copy the OEM template file
            logger.info('Copying template OEM config file to workspace for chipset: ' + chipset)
            shutil.copy(template_oem, workspace_chipset_dir)
            logger.note('OEM config file created at: ' + workspace_chipset_dir)

        if not qc:
            # Copy the QC template file
            logger.info('Copying template QC config file to workspace for chipset: ' + chipset)
            shutil.copy(template_qc, workspace_chipset_dir)
            logger.note('QC config file created at: ' + workspace_chipset_dir)

        if not ui:
            # Copy the UI template file
            logger.info('Copying template UI config file to workspace for chipset: ' + chipset)
            shutil.copy(template_ui, workspace_chipset_dir)
            logger.note('UI config file created at: ' + workspace_chipset_dir)

        if not user:
            # Copy the USER template file
            logger.info('Copying template USER config file to workspace for chipset: ' + chipset)
            shutil.copy(template_user, workspace_chipset_dir)
            logger.note('USER config file created at: ' + workspace_chipset_dir)
Exemple #15
0
    def __init__(
        self,
        data,
        debug_dir=None,
        debug_prefix=None,
        debug_suffix=None,
    ):
        # Public properties
        self.debug_dir = debug_dir
        self.debug_prefix = debug_prefix
        self.debug_suffix = debug_suffix

        # Store the original image
        self.store_debug_data(df.FILE_DATA_IN, data)
        """
        Extract the various segments/sections of the data:
        1. Elf header
        2. Prog headers
        3. Bin
        """
        # Extract the header
        self.ehdr = extract_ehdr(data)
        self.store_debug_data(df.FILE_HDR_IN, self.ehdr.pack())
        self.store_debug_data(df.FILE_HDR_IN_REPR,
                              repr(self.ehdr),
                              suffix=df.FILE_HDR_IN_REPR_SUFFIX)

        # Extract the program headers
        self.phdrs = extract_phdrs(data, self.ehdr)
        self.store_debug_data(df.FILE_PHDR_IN, pack_phdrs(self.phdrs))
        self.store_debug_data(df.FILE_PHDR_IN_REPR,
                              repr_phdrs(self.phdrs),
                              suffix=df.FILE_PHDR_IN_REPR_SUFFIX)

        # Dump the individual segments
        self.segments = extract_segments(data, self.phdrs)
        for idx, phdr in enumerate(self.phdrs):
            length = len(self.segments[phdr])
            is_load = self._segment_to_put(phdr)
            if length >= 0 and length <= 16:
                logger.debug(('' if is_load else 'Non-') +
                             'Loadable segment - ' + str(idx + 1) +
                             ' is of size: ' + str(length))
                if is_load and (length > 0 and length <= 16):
                    logger.warning(('' if is_load else 'Non-') +
                                   'Loadable segment - ' + str(idx + 1) +
                                   ' is of size: ' + str(length))
            self.store_debug_data(df.FILE_SEGMENT_IN.format(idx),
                                  self.segments[phdr])

        # Zero out the sections for now
        zero_out_sections(self.ehdr)
 def configs(self):
     """(list[obj]) List of objects of :class:`ConfigParser` generated by
     loading the config files in the config_dir.
     """
     configs = []
     for path in self.config_paths:
         logger.debug2('Loading config: ' + path)
         try:
             configs.append(ConfigParser(path))
         except Exception as e:
             logger.warning('Failed to load config: ' + path + '\n'
                            '    ' + 'Error: ' + str(e))
     return configs
Exemple #17
0
 def configs(self):
     """(list[obj]) List of objects of :class:`ConfigParser` generated by
     loading the config files in the config_dir.
     """
     configs = []
     for path in self.config_paths:
         logger.debug2('Loading config: ' + path)
         try:
             configs.append(ConfigParser(path))
         except Exception as e:
             logger.warning('Failed to load config: ' + path + '\n'
                            '    ' + 'Error: ' + str(e))
     return configs
Exemple #18
0
def extract_encryption_params(data, header, pad_max_sig_size=0, pad_max_cert_chain_size=0,
                              pad_max_encr_params_size=0):
    sig_size, cert_chain_size = header.sig_size, header.cert_chain_size
    offset = header.code_size + sig_size + cert_chain_size
    size = len(data) - offset
    if header.supports_qti_signing():
        sig_size_qti, cert_chain_size_qti = header.sig_size_qti, header.cert_chain_size_qti
        offset += sig_size_qti + cert_chain_size_qti
        size = len(data) - offset

        # QTI & OEM are zero, then encryption params must be zero
        if (sig_size, cert_chain_size, sig_size_qti, cert_chain_size_qti) == (0, 0, 0, 0) and size != 0:
            raise RuntimeError('Encryption params must not exist for unsigned file.')

        # QTI is zero or OEM is zero
        elif (0, 0) in [(sig_size, cert_chain_size), (sig_size_qti, cert_chain_size_qti)]:
            # Exclusive use case
            if size in [0, pad_max_encr_params_size]:
                pass
            # Double sign - 1st stage
            elif size == pad_max_sig_size + pad_max_cert_chain_size + pad_max_encr_params_size:
                # Verify padding
                _, seg = _extract_segment(data, offset, size)
                if seg != PAD_BYTE_1 * size:
                    logger.warning('Invalid padding data in file')
                size = 0
            else:
                raise RuntimeError('Invalid size for encryption params: ' + str(size))

        # Double signed image
        else:
            # Second sign did not use max signature size
            potential_padding_size = pad_max_sig_size - sig_size
            if potential_padding_size and (size == pad_max_encr_params_size + potential_padding_size):
                # Verify padding
                _, seg = _extract_segment(data, offset + pad_max_encr_params_size, potential_padding_size)
                if seg != PAD_BYTE_1 * potential_padding_size:
                    raise RuntimeError('Invalid padding data in file')
                size -= potential_padding_size
            elif size != pad_max_encr_params_size:
                raise RuntimeError('Invalid size for encryption params: ' + str(size))

            # Check if there's padding
            _, seg = _extract_segment(data, offset, size)
            if seg == PAD_BYTE_1 * size:
                size = 0

    # Extract encryption params
    data, seg = _extract_segment(data, offset, size)
    return data, seg
    def dump_debug_data_model(self, data_model, path):
        """Dumps the data model related debug data into the output directory.

        :param obj data_model: Data model to dump debug info for
        """
        if not self.debug:
            return

        try:
            debug_dir = c_path.join(path, defines.DEST_DEBUG_DIR)
            c_path.create_dir(debug_dir)
            debug_file = c_path.join(debug_dir, defines.DEST_DEBUG_FILE_DATA_MODEL_REPR.format(self.TOOL_NAME))
            store_data_to_file(debug_file, repr(data_model))
            logger.info('Dumped debug data model repr at: ' + debug_file + ', date & time: ' + datetime.datetime.now().strftime('%c'))
        except Exception as e:
            logger.warning('Failed to store debug logs: ' + str(e))
    def dump_debug_secdat(self, secdat_parser, path):
        """Dumps the secdat related debug data into the output directory.

        :param obj secdat_parser: Parser to dump debug info for
        """
        if not self.debug:
            return

        try:
            debug_dir = c_path.join(path, defines.DEST_DEBUG_DIR)
            c_path.create_dir(debug_dir)
            secdat_repr_log = c_path.join(debug_dir, defines.DEST_DEBUG_FILE_SECDAT_REPR)
            store_data_to_file(secdat_repr_log, repr(secdat_parser))
            logger.info('Dumped debug secdat repr at: ' + secdat_repr_log  + ', date & time: ' + datetime.datetime.now().strftime('%c'))
        except Exception as e:
            logger.warning('Failed to store debug logs: ' + str(e))
    def sha_hash(self, data, sha_algo=None):
        if not data:
            raise RuntimeError('No data to hash')

        if sha_algo == 'sha1':
            hashlib_calc = hashlib.sha1
        elif sha_algo == 'sha256':
            hashlib_calc = hashlib.sha256
        elif sha_algo == 'sha384':
            hashlib_calc = hashlib.sha384
        else:
            logger.warning('No SHA algo specified. Defaulting to SHA-256')
            hashlib_calc = hashlib.sha256

        msg = hashlib_calc(data).hexdigest()
        msg_bin = binascii.a2b_hex(msg)
        logger.debug2("H(code image) : " + msg)

        return msg_bin
Exemple #22
0
    def create_debug_dir(self, output_dir):
        """Creates the debug directory structure.
        """
        if not self.debug:
            return

        if not output_dir:
            logger.note('Output directory not available to dump debug logs')
            return

        try:
            debug_dir = c_path.join(output_dir, defines.DEST_DEBUG_DIR)
            try:
                c_path.create_dir(debug_dir)
            except Exception as e:
                raise RuntimeError('Failed to create debug dir: ' + str(e))

        except Exception as e:
            logger.warning('Failed to store debug logs: ' + str(e))
Exemple #23
0
        def _decode_binary_blob(self, binary_blob, validating):
            if len(binary_blob) != EncryptionParamsSectionBody.L2AssociatedData.SPEC_SIZE_BYTES:
                raise RuntimeError("L2 Associated Data blob is of the wrong size")

            string_offset = 0
            string_end = EncryptionParamsSectionBody.L2AssociatedData.L2_ASSOCIATED_DATA_SIZE_FLD_LEN_BYTES
            self.l2_associated_data_size, = struct.unpack(">H", binary_blob[string_offset:string_end])

            string_offset = string_end + EncryptionParamsSectionBody.RSVD_BYTE_LEN_BYTES * 2
            string_end = string_offset + EncryptionParamsSectionBody.L2AssociatedData.MAJOR_VERSION_FLD_LEN_BYTES + \
                         EncryptionParamsSectionBody.L2AssociatedData.MINOR_VERSION_FLD_LEN_BYTES + \
                         EncryptionParamsSectionBody.L2AssociatedData.KEY_LADDER_LEN_FLD_LEN_BYTES + \
                         EncryptionParamsSectionBody.RSVD_BYTE_LEN_BYTES

            major_version, minor_version, self.key_ladder_length, tmp = struct.unpack("=BBBB", binary_blob[string_offset:string_end])
            string_offset = string_end

            if (major_version, minor_version) != (self.major_version, self.minor_version):
                raise RuntimeError(("Encryption Parameters L2 Associated Data version \"{0}.{1}\" does not match expected version \"{2}.{3}\""
                                    "\n       Ensure that the correct encryptor value is set.").format(major_version, minor_version, self.major_version, self.minor_version))

            if (major_version, minor_version) == (EncryptionParamsSectionBody.L2AssociatedData.MAJOR_VERSION_FLD_VAL_1, EncryptionParamsSectionBody.L2AssociatedData.MINOR_VERSION_FLD_VAL_0):
                string_end = string_offset + EncryptionParamsSectionBody.L2AssociatedData.IMAGE_ID_BITMAP_FLD_VERSION_1_0_LEN_BYTES
                image_id_bitmap, = struct.unpack("=I", binary_blob[string_offset:string_end])
            elif (major_version, minor_version) == (EncryptionParamsSectionBody.L2AssociatedData.MAJOR_VERSION_FLD_VAL_1, EncryptionParamsSectionBody.L2AssociatedData.MINOR_VERSION_FLD_VAL_1):
                string_end = string_offset + EncryptionParamsSectionBody.L2AssociatedData.IMAGE_ID_BITMAP_FLD_VERSION_1_1_LEN_BYTES
                image_id_bitmap_upper, image_id_bitmap_lower = struct.unpack("=QQ", binary_blob[string_offset:string_end])
                image_id_bitmap = image_id_bitmap_lower * (2 ** 64) + image_id_bitmap_upper
            else:
                raise RuntimeError("Configured Encryption Parameters L2 Associated Data version \"{0}.{1}\" is invalid.".format(self.major_version, self.minor_version))

            image_id = int(math.log(image_id_bitmap, 2))
            if image_id != self.image_id:
                if validating:
                    errstr = list()
                    mismatches = list()
                    mismatches.append((EncryptionParamsSectionBody.L2AssociatedData.IMAGE_ID, "0x%X" % image_id, "0x%X" % self.image_id))
                    create_mismatch_table(mismatches, errstr, operation="encryption", data_type_to_compare="Attribute", image_region="Encryption Parameters")
                    logger.error('Following validations failed for the image:\n       ' +
                                 '\n       '.join([(str(i + 1) + '. ' + e) for i, e in enumerate(errstr)]))
                else:
                    logger.warning(("Extracted Encryption Parameters " + EncryptionParamsSectionBody.L2AssociatedData.IMAGE_ID + " value \"{0}\" does not match config value \"{1}\""
                                   "\n\t Encryption Parameters " + EncryptionParamsSectionBody.L2AssociatedData.IMAGE_ID + " value will be updated with value \"{1}\"").format(hex(image_id), hex(self.image_id)))
    def __init__(self, data,
                 debug_dir=None,
                 debug_prefix=None,
                 debug_suffix=None,
                 ):
        # Public properties
        self.debug_dir = debug_dir
        self.debug_prefix = debug_prefix
        self.debug_suffix = debug_suffix

        # Store the original image
        self.store_debug_data(df.FILE_DATA_IN, data)

        """
        Extract the various segments/sections of the data:
        1. Elf header
        2. Prog headers
        3. Bin
        """
        # Extract the header
        self.ehdr = extract_ehdr(data)
        self.store_debug_data(df.FILE_HDR_IN, self.ehdr.pack())
        self.store_debug_data(df.FILE_HDR_IN_REPR, repr(self.ehdr), suffix=df.FILE_HDR_IN_REPR_SUFFIX)

        # Extract the program headers
        self.phdrs = extract_phdrs(data, self.ehdr)
        self.store_debug_data(df.FILE_PHDR_IN, pack_phdrs(self.phdrs))
        self.store_debug_data(df.FILE_PHDR_IN_REPR, repr_phdrs(self.phdrs), suffix=df.FILE_PHDR_IN_REPR_SUFFIX)

        # Dump the individual segments
        self.segments = extract_segments(data, self.phdrs)
        for idx, phdr in enumerate(self.phdrs):
            length = len(self.segments[phdr])
            is_load = self._segment_to_put(phdr)
            if length >= 0 and length <= 16:
                logger.debug(('' if is_load else 'Non-') + 'Loadable segment - ' + str(idx + 1) + ' is of size: '  + str(length))
                if is_load and (length > 0 and length <= 16):
                    logger.warning(('' if is_load else 'Non-') + 'Loadable segment - ' + str(idx + 1) + ' is of size: '  + str(length))
            self.store_debug_data(df.FILE_SEGMENT_IN.format(idx), self.segments[phdr])

        # Zero out the sections for now
        zero_out_sections(self.ehdr)
def generate_hash(hashing_algorithm, file_to_hash):
    """ Function to generate hashes of input file using the standard hashing algoritm specified

        input:  hashing_algorithm: ["SHA1"|"SHA256"]
                file_to_hash: The file to calculate the hash of.

        output: string representing hash of file_to_hash

    """
    if hashing_algorithm.lower() not in ["sha1", "sha256"]:
        logger.warning("The algorithm specified is invalid! Using SHA256 as default.")
        hashing_algorithm="-sha256"
    else:
        hashing_algorithm = "-" + hashing_algorithm

    try:
        generated_hash = utility_functions.system_command_logged([openssl_binary_path, "dgst", hashing_algorithm, file_to_hash]).rstrip().split("= ")[1]
    except:
        logger.critical("generate_hash: OPENSSL Hash generation failed")
    return generated_hash
Exemple #26
0
    def run_cmd(self, cmd):
        if self.py2_exe is None:
            raise RuntimeError(
                r"To access metabuild info running under python3, a python2 executable "
                r"is currently required. If you need this functionality please "
                r"ensure python2 is installed")

        """ shell command to meta_lib_shim.py """
        response = ensure_str(subprocess.check_output(cmd)).splitlines()

        # look for 'Result >>>' line, the line after contains the json we want
        resp_obj = None
        try:
            token_pos = response.index(RESULT_TOKEN)
            resp_obj = json.loads(response[token_pos+1])
        except (KeyError, ValueError) as e:
            logger.warning("Bad return from meta_lib_shim: {}".format(str(e)))
            logger.warning("--> Response was: {}".format(response))

        return resp_obj
    def get_image_info_from_meta(self, meta_info):
        """ Get the image info from the meta build for all applicable sign ID.

        The image info includes:
          sign_id, chipset, image_src_path, image_dest_path and meta status.

        If an image is found or resolved, the meta status is SUCCESS. If not found,
        then meta status is NOT_FOUND. Otherwise, status is UNKNOWN. If unknown,
        a warning is logged, and the search continues.

        All the images defined in the secimage XML file image list is examined.
        Those that do not have meta build location tag is skipped.
        """
        for sign_id in self.img_config_parser.sign_id_list:
            image = self.img_config_parser.get_config_for_sign_id(sign_id)

            if not image.meta_build_location:
                continue

            try:
                image_path = self.resolve_image_path(meta_info, image, sign_id)
                image_src_path = ImagePath()
                image_dest_path = DestImagePath()

                image_src_path.image_dir_base = os.path.dirname(image_path)
                image_src_path.image_dir_ext = ''
                image_src_path.image_name = os.path.basename(image_path)
                image_dest_path.image_dir_base = os.path.dirname(image_path)
                image_dest_path.image_dir_ext = ''
                image_dest_path.image_name = os.path.basename(image_path)

            except MetaError as e:
                logger.warning(str(e))
                yield (sign_id, self.chipset, None, None, e.err_code)
                continue

            except Exception as e:
                logger.warning('Metalib error: ' + str(e))
                continue

            yield (sign_id, self.chipset, image_src_path, image_dest_path, MetaError.SUCCESS)
def gen_rsa_key_pair(key_size_in_bits, key_exponent, priv_key_output_file, pub_key_output_file):
    """ Generate RSA Key pair

        input:

        key_size_in_bits: Size of the key in bits.
        key_exponent: [3, 65537]
                      Exponent used in key calculation.
        priv_key_output_file: File name for storing private key
        pub_key_output_file: File name for storing public key

        output:

        returned value: {"public_key": [Generated public key],
                         "private_key": [Generated private key] }
                         Dictionary holding the values of public and private keys
    """

    logger.debug("the openssl_binary is:{0}".format(openssl_binary_path))
    if key_exponent==3:
        exponent_str="-3"
    elif key_exponent == 65537:
        exponent_str="-f4"
    else:
        logger.warning("Exponent value supplied is INVALID! going with default exponent of 65537.")
        exponent_str="-f4"

    key_size_str=str(key_size_in_bits)

    if priv_key_output_file is not None:
        pk_file=open(priv_key_output_file,"wb")
    else:
        pk_file=tempfile.NamedTemporaryFile(delete=False)
        logger.debug("No output file specified for private key storage, so creating temp file: " + pk_file.name)

    try:
        private_key = utility_functions.system_command_logged([openssl_binary_path, "genrsa", exponent_str, key_size_str], stderr_to_temp=True)
    except subprocess.CalledProcessError, e:
        logger.critical("gen_rsa_key_pair: OPENSSL Errored out on generation of RSA key.")
        logger.critical("call to OpenSSL binary returned an error!: retval = " + str(e.returncode) + " Command = " + str(e.cmd))
        raise RuntimeError("call to OpenSSL binary returned an error!: retval = " + str(e.returncode) + " Command = " + str(e.cmd))
Exemple #29
0
    def configs(self):
        """(list[obj]) List of objects of :class:`ConfigParser` generated by
        loading the config files in the config_dir.
        """
        configs = []
        for oem, qc, ui, user in self.config_paths:
            logger.debug2('Loading configs: ' + '\n'
                          '    ' + 'oem: ' + str(oem) + '\n'
                          '    ' + 'qc: ' + str(qc) + '\n'
                          '    ' + 'ui: ' + str(ui) + '\n'
                          '    ' + 'user: '******'Failed to load configs.'
                               '    ' + 'Error: ' + str(e))
        return configs
Exemple #30
0
    def validate_cert_chain(self, cert, ca_cert_chain):
        ca_cert_chain_path, cert_path = None, None
        try:
            ca_cert_chain_path = c_path.create_tmp_file(ca_cert_chain)
            cert_path = c_path.create_tmp_file(cert)

            # Check validity time
            valid_from, _ = self.get_validity(self.get_text(cert))
            lag = calendar.timegm(valid_from) - int(time.time())
            if lag > 0:
                logger.warning('Local machine\'s time is slow by at least ' + str(lag) + ' seconds.')

            cmd = [self.openssl, 'verify',
                   '-attime', str(calendar.timegm(valid_from)),
                   '-CAfile', ca_cert_chain_path, cert_path]
            return (cert_path + ': OK') in run_command(cmd, ret_binary=False)
        except RuntimeError:
            return False
        finally:
            c_path.remove_tmp_file(ca_cert_chain_path)
            c_path.remove_tmp_file(cert_path)
    def _validate_properties(self):
        if not POLICY_OEM_ID_0.is_ignore():
            if int(self.signing_attributes.oem_id, 16) == 0:
                POLICY_OEM_ID_0.run('OEM ID is set to 0 for sign_id "' +
                                    str(self.sign_id) + '"')

        # Secboot v1 requires oem permissions only
        if self.signing_attributes.secboot_version == defines.SECBOOT_VERSION_1_0:
            if not self.signing_attributes.oem_sign:
                # oem_sign cannot be false for secboot v1
                raise RuntimeError(
                    'OEM operations cannot be disabled for a secboot version '
                    + str(defines.SECBOOT_VERSION_1_0) + ' image.')
            if self.signing_attributes.qti_sign:
                # qti_sign cannot be true for secboot v1
                raise RuntimeError(
                    'Cannot perform QTI exclusive operations on a secboot version '
                    + str(defines.SECBOOT_VERSION_1_0) + ' image.')
        # If all authority permissions are disabled, throw error
        elif not self.signing_attributes.qti_sign and not self.signing_attributes.oem_sign:
            raise RuntimeError(
                'Cannot perform image operations because OEM and QTI operations are disabled for image.'
            )

        # Raise warnings for Secboot v2.1 and greater chipsets
        if self.chipset in SECBOOT_2_0_DOUBLE_SIGN_CHIPSETS + SECBOOT_3_0_DOUBLE_SIGN_CHIPSETS:
            if self.signing_attributes.rot_en is not None and int(
                    self.signing_attributes.rot_en, 16) == 1:
                logger.warning(
                    "rot_en should not be set to \"{0}\" for chipset \"{1}\"".
                    format(self.signing_attributes.rot_en, self.chipset))
            if self.signing_attributes.hash_algorithm == "sha1":
                logger.warning(
                    "hash_algorithm should not be set to \"{0}\" for chipset \"{1}\""
                    .format(self.signing_attributes.hash_algorithm,
                            self.chipset))
            if self.signing_attributes.exponent == 3:
                logger.warning(
                    "exponent should not be set to \"{0}\" for chipset \"{1}\""
                    .format(self.signing_attributes.exponent, self.chipset))
            if self.signing_attributes.rsa_padding is None or self.signing_attributes.rsa_padding == "pkcs":
                logger.warning(
                    "rsa_padding should not be set to RSAPKCS for chipset \"{0}\""
                    .format(self.chipset))

        # Ignore max_num_root_certs value if target is not MRC v2.0 double sign target
        if not (self.chipset in MRC_2_0_CHIPSETS
                and self.signing_attributes.qti_sign
                and self.signing_attributes.oem_sign):
            self.signing_attributes.max_num_root_certs = None
Exemple #32
0
def evaluate_serial_binding_attributes(authority, imageinfo):
    if authority not in imageinfo.get_serial_bound_notes():
        return
    notes = imageinfo.get_serial_bound_notes()[authority]
    logger.debug("image signing authority: " + authority)
    if notes:
        for attr_name, attr_value in notes.items():
            logger.debug("%s is serial-bound to %s" % (attr_name, attr_value))
        intersection = sorted(
            list(set.intersection(*[set(x) for x in notes.values()])))
        if intersection:
            logger.warning(
                "{authority} bound the image to serial numbers: {serial_numbers}"
                .format(authority=authority,
                        serial_numbers=", ".join(intersection)))
        else:
            raise RuntimeError(
                "{authority} incorrectly bound the image to serial numbers "
                "in such a way that it will not authenticate on any device.".
                format(authority=authority))
    else:
        logger.debug(
            "{authority} bound the image to all serial numbers.".format(
                authority=authority))
    def get_data(self, integrity_check=None, sign=None, encrypt=None, get_hash_segment=False):
        # Resolve the operation
        integrity_check = self.integrity_check if integrity_check is None else integrity_check
        sign = self.sign if sign is None else sign
        encrypt = self.encrypt if encrypt is None else encrypt
        integrity_check = True if (integrity_check or sign or encrypt) else False

        # Allow base to do any checks
        SecParseGenBase.get_data(self, integrity_check, sign, encrypt)

        hash_segment = None

        if not (integrity_check or sign or encrypt):
            data = self._elf_parsegen.get_data()
        else:
            # Add the header and hash segment
            prog_phdr_entry, hash_phdr_entry, hash_segment_size = self._add_phdr_and_hash_segs(integrity_check, sign, encrypt)

            try:
                # Generate the hash segment now
                hash_segment = self.get_hash_segment(integrity_check, sign, encrypt)

                # Check here for sizes mismatching just in case
                if len(hash_segment) != hash_segment_size:
                    raise RuntimeError('Estimated hash table size was wrong. Estimate - ' +
                                       str(hash_segment_size) + ', Actual - ' + str(len(hash_segment)))

                # Re-add the hash segment, this time with the real data
                self._elf_parsegen.remove_segment(hash_phdr_entry)
                self._elf_parsegen.remove_segment(prog_phdr_entry)
                self._elf_parsegen.add_segment(prog_phdr_entry, '')
                self._elf_parsegen.add_segment(hash_phdr_entry, hash_segment, index=1)

                # If encrypting, change the process segment data
                if encrypt:
                    parsegen_updater = ParseGenEncDec(self.store_debug_data, self.encrypt_segment)
                    parsegen_updater.update_parsegen(self.encdec.get_segment_num_scheme(), self._elf_parsegen)
                try:
                    # Get the elf data
                    data = self._elf_parsegen.get_data()
                finally:
                    # Restore the process segment data
                    if encrypt:
                        try: parsegen_updater.revert_parsegen(self._elf_parsegen)
                        except Exception as e: logger.warning(str(e))
            finally:
                # Remove the prog
                try: self._elf_parsegen.remove_segment(prog_phdr_entry)
                except Exception as e: logger.warning(str(e))

                # Remove the hash
                try: self._elf_parsegen.remove_segment(hash_phdr_entry)
                except Exception as e: logger.warning(str(e))
        if get_hash_segment:
            return hash_segment
        return data
Exemple #34
0
    def validate(self, images, *args):
        assert (isinstance(images, complex_images_list))

        image_list = images.get_image()
        retval = [True]
        errors = []

        def add_error(sign_id, error):
            retval[0] = False
            errors.append("\nsign_id={0}: ".format(sign_id) + error)

        def validate_hex_list(sign_id, hex_list, max_num_allowed, list_name,
                              item_name, length_of_item):
            if len(hex_list) > max_num_allowed:
                add_error(
                    sign_id,
                    "{0} provided {1}s exceeds allowed maximum of {2}".format(
                        len(hex_list), item_name, max_num_allowed))

            for val in hex_list:
                try:
                    if val[:2] != "0x" or len(val) != length_of_item or (int(
                            val, 16) == 0):
                        raise ValueError("malformed")
                    int(val, 16)
                except ValueError:
                    add_error(
                        sign_id,
                        "Provided {0} value \"{1}\" is not a valid {2}".format(
                            list_name, val, item_name))

            duplicates = get_duplicates(hex_list)
            if len(duplicates) > 0:
                if len(duplicates) == 1:
                    error_str = "{0} contains a duplicate of the following value: {1}".format(
                        list_name, ", ".join(duplicates))
                else:
                    error_str = "{0} contains duplicates of the following values: ".format(
                        list_name, ", ".join(duplicates))
                add_error(sign_id, error_str)

        # expect args[0] to be instance of signing
        # the following default signing attributes are checked if signing is TCG
        assert (isinstance(args[0], complex_general_properties))
        general_properties = args[0]
        default_sw_id = general_properties.get_sw_id()
        default_app_id = general_properties.get_app_id()
        default_crash_dump = general_properties.get_crash_dump()
        default_msm_part = general_properties.get_msm_part()
        default_soc_hw_version = general_properties.get_soc_hw_version()
        default_soc_vers = general_properties.get_soc_vers()
        default_mask_soc_hw_version = general_properties.get_mask_soc_hw_version(
        )
        default_in_use_soc_hw_version = general_properties.get_in_use_soc_hw_version(
        )
        default_use_serial_number_in_signing = general_properties.get_use_serial_number_in_signing(
        )
        default_serial_number = general_properties.get_serial_number()
        default_oem_id = general_properties.get_oem_id()
        default_model_id = general_properties.get_model_id()
        default_debug = general_properties.get_debug()
        default_multi_serial_numbers = general_properties.get_multi_serial_numbers(
        ).get_serial() if general_properties.get_multi_serial_numbers(
        ) is not None else []
        default_oid = general_properties.get_object_id()
        default_oid_min = default_oid.min if default_oid is not None else None
        default_oid_max = default_oid.max if default_oid is not None else None
        default_hash_algorithm = general_properties.get_hash_algorithm()
        default_hmac = general_properties.get_hmac()
        default_rsa_padding = general_properties.get_rsa_padding()
        default_num_root_certs = general_properties.get_num_root_certs()
        default_max_num_root_certs = general_properties.get_max_num_root_certs(
        )
        default_mrc_index = general_properties.get_mrc_index()
        default_secboot_version = general_properties.get_secboot_version()
        default_dsa_type = general_properties.get_dsa_type()
        default_ecdsa_curve = general_properties.get_ecdsa_curve()
        default_key_size = general_properties.get_key_size()
        default_exponent = general_properties.get_exponent()
        default_client_id = general_properties.get_client_id()
        default_lib_id = general_properties.get_lib_id()
        default_UIE_server_cert_path = general_properties.get_UIE_server_cert_path(
        )

        assert (isinstance(args[1], complex_metadata))
        metadata = args[1]
        chipset = metadata.get_chipset()

        for image in image_list:
            sign_id = image.get_sign_id()
            image_type = image.get_image_type()
            overrides = image.get_general_properties_overrides()

            sw_id = overrides.get_sw_id() if overrides.get_sw_id(
            ) is not None else default_sw_id
            app_id = overrides.get_app_id() if overrides.get_app_id(
            ) is not None else default_app_id
            crash_dump = overrides.get_crash_dump(
            ) if overrides.get_crash_dump() is not None else default_crash_dump
            msm_part = overrides.get_msm_part() if overrides.get_msm_part(
            ) is not None else default_msm_part
            soc_hw_version = overrides.get_soc_hw_version(
            ) if overrides.get_soc_hw_version(
            ) is not None else default_soc_hw_version
            soc_vers = overrides.get_soc_vers() if overrides.get_soc_vers(
            ) is not None else default_soc_vers
            mask_soc_hw_version = overrides.get_mask_soc_hw_version(
            ) if overrides.get_mask_soc_hw_version(
            ) is not None else default_mask_soc_hw_version
            in_use_soc_hw_version = overrides.get_in_use_soc_hw_version(
            ) if overrides.get_in_use_soc_hw_version(
            ) is not None else default_in_use_soc_hw_version
            use_serial_number_in_signing = overrides.get_use_serial_number_in_signing(
            ) if overrides.get_use_serial_number_in_signing(
            ) is not None else default_use_serial_number_in_signing
            serial_number = overrides.get_serial_number(
            ) if overrides.get_serial_number(
            ) is not None else default_serial_number
            oem_id = overrides.get_oem_id() if overrides.get_oem_id(
            ) is not None else default_oem_id
            model_id = overrides.get_model_id() if overrides.get_model_id(
            ) is not None else default_model_id
            debug = overrides.get_debug() if overrides.get_debug(
            ) is not None else default_debug
            multi_serial_numbers = list(overrides.get_multi_serial_numbers(
            ).get_serial() if overrides.get_multi_serial_numbers(
            ) is not None else default_multi_serial_numbers)
            override_oid = overrides.get_object_id(
            ) if overrides.get_object_id() is not None else default_oid
            oid_min = override_oid.min if override_oid is not None else default_oid_min
            oid_max = override_oid.max if override_oid is not None else default_oid_max
            hmac = overrides.get_hmac() if overrides.get_hmac(
            ) is not None else default_hmac
            hmac = hmac or hmac is None
            rsa_padding = overrides.get_rsa_padding(
            ) if overrides.get_rsa_padding(
            ) is not None else default_rsa_padding
            hash_algorithm = overrides.get_hash_algorithm(
            ) if overrides.get_hash_algorithm(
            ) is not None else default_hash_algorithm
            num_root_certs = overrides.get_num_root_certs(
            ) if overrides.get_num_root_certs(
            ) is not None else default_num_root_certs
            max_num_root_certs = overrides.get_max_num_root_certs(
            ) if overrides.get_max_num_root_certs(
            ) is not None else default_max_num_root_certs
            mrc_index = overrides.get_mrc_index() if overrides.get_mrc_index(
            ) is not None else default_mrc_index
            secboot_version = overrides.get_secboot_version(
            ) if overrides.get_secboot_version(
            ) is not None else default_secboot_version
            dsa_type = overrides.get_dsa_type() if overrides.get_dsa_type(
            ) is not None else default_dsa_type
            ecdsa_curve = overrides.get_ecdsa_curve(
            ) if overrides.get_ecdsa_curve(
            ) is not None else default_ecdsa_curve
            key_size = overrides.get_key_size() if overrides.get_key_size(
            ) is not None else default_key_size
            exponent = overrides.get_exponent() if overrides.get_exponent(
            ) is not None else default_exponent
            client_id = overrides.get_client_id() if overrides.get_client_id(
            ) is not None else default_client_id
            lib_id = overrides.get_lib_id() if overrides.get_lib_id(
            ) is not None else default_lib_id
            UIE_server_cert_path = overrides.get_UIE_server_cert_path(
            ) if overrides.get_UIE_server_cert_path(
            ) is not None else default_UIE_server_cert_path

            # Secboot version checking
            if secboot_version is None or (
                    image_type in SECBOOT_1_IMAGE_TYPES
                    and secboot_version != SECBOOT_VERSION_1_0):
                # general properties update occurs in imageinfo's _sanitize_general_properties (like for all other config value updates)
                secboot_version = SECBOOT_VERSION_1_0
            if num_root_certs > 1 and secboot_version != SECBOOT_VERSION_1_0 and chipset not in (
                    SECBOOT_2_0_CHIPSETS + SECBOOT_3_0_CHIPSETS):
                # Multiple root certs not allowed with secboot 2.0 and greater multi-party signing
                add_error(
                    sign_id,
                    "Multiple root certs are not supported for secboot {0} chipset {1}."
                    .format(str(secboot_version), chipset))

            # Default hash algorithm to sha256
            if hash_algorithm is None:
                hash_algorithm = "sha256"

            # Backward compatibility: dsa_type is optional, so if it does not exist assume RSA
            if dsa_type is None:
                dsa_type = "rsa"

            # Manually check that ecdsa / rsa arguments exist when the corresponding
            if dsa_type == "rsa":
                if exponent is None or key_size is None:
                    add_error(
                        sign_id,
                        "You must specify exponent and key size when using RSA."
                    )
            if dsa_type == "ecdsa":
                general_properties.exponent = None
                general_properties.key_size = None
                if ecdsa_curve is None:
                    add_error(
                        sign_id,
                        "You must specify the ecdsa curve when using ECDSA.")

                # Check that ecdsa value is only used with supported chipset
                if chipset not in ECDSA_CHIPSETS:
                    add_error(
                        sign_id,
                        "ECDSA is not supported for chipset {0}.".format(
                            chipset))
            else:
                # RSAPSS requires SHA256 or SHA384
                if rsa_padding == "pss":
                    if hash_algorithm not in ["sha256", "sha384"]:
                        add_error(
                            sign_id,
                            "RSAPSS requires SHA256 or SHA384 hash. hash_algorithm={0}."
                            .format(hash_algorithm))
                    if hmac:
                        add_error(
                            sign_id,
                            "RSAPSS cannot be used with HMAC. hmac must be set as False. hmac={0}."
                            .format(hmac))
                else:
                    if not hmac:
                        add_error(sign_id, "RSAPKCS should be used with HMAC.")

            if secboot_version == SECBOOT_VERSION_3_0:
                # Secboot 3 only support sha256 and sha384
                if hash_algorithm not in ["sha256", "sha384"]:
                    add_error(
                        sign_id,
                        "Secboot {0} requires SHA256 or SHA384 hash.".format(
                            secboot_version))
                # Secboot 3 HMAC has restriction due to soc_hw_version and other values being removed from MBN header
                if hmac:
                    if mask_soc_hw_version is not None:
                        add_error(
                            sign_id,
                            "Secboot {0} requires that mask_soc_hw_version not be set when HMAC is set."
                            .format(secboot_version))
                    if in_use_soc_hw_version == 1:
                        add_error(
                            sign_id,
                            "Secboot {0} requires in_use_soc_hw_version to be set to 0 when HMAC is set."
                            .format(secboot_version))
                    if use_serial_number_in_signing == 1:
                        if serial_number is None or not multi_serial_numbers:
                            add_error(
                                sign_id,
                                "Secboot {0} requires that serial_number and first serial in multi_serial_numbers match when HMAC and use_serial_number_in_signing are set."
                                .format(secboot_version))
                        if serial_number is not None and multi_serial_numbers and serial_number != multi_serial_numbers[
                                0]:
                            add_error(
                                sign_id,
                                "Secboot {0} requires that serial_number and first serial in multi_serial_numbers match when HMAC and use_serial_number_in_signing are set.\n\t"
                                "serial_number={1} serial={2}".format(
                                    secboot_version, serial_number,
                                    multi_serial_numbers[0]))

            # TZ apps rule, must have app_id set
            if (int(sw_id, 16) & 0xFFFFFFFF) == 0xC:
                if app_id is None or int(app_id, 16) == 0:
                    add_error(
                        sign_id,
                        "app_id is not set for TZ apps: sw_id={0}.".format(
                            sw_id))
            #TODO: app_id is image specific, current rule checking will cover all the images in the config file
            """
            # other than tz, app_id must not be present
            else:
                if app_id is not None:
                    retval = False
                    error_str += '\n app_id is set for Non-TZ image: sign_id=%s, sw_id=%s, app_id=%s' % (sign_id, sw_id, app_id)
            """

            # crash_dump rule, LSB 32bits must not be greater than 1
            if crash_dump is not None and (int(crash_dump, 16)
                                           & 0xFFFFFFFF) > 1:
                add_error(
                    sign_id,
                    "crash_dump 32bit LSB must be 0 or 1: crash_dump={0}.".
                    format(crash_dump))

            if secboot_version == SECBOOT_VERSION_3_0:
                # msm_part must be provided for secboot 3 images
                if msm_part is None:
                    add_error(
                        sign_id,
                        "MSM_PART must be used to sign Secboot {0} images but MSM_PART is missing."
                        .format(secboot_version))

                # Make sure soc_vers is provided when in_use_soc_hw_version is set to 1
                if in_use_soc_hw_version == 1 and soc_vers is None:
                    add_error(
                        sign_id,
                        "IN_USE_SOC_HW_VERSION specifies SOC_VERS is used but SOC_VERS tag is missing."
                    )
            else:
                # At least one of MSM_PART or SOC_HW_VERSION must be specified
                if msm_part is None and soc_hw_version is None:
                    add_error(
                        sign_id,
                        "MSM_PART and SOC_HW_VERSION are missing from config. At least one must exist."
                    )

                # Only use Family and Device number for soc_hw_version:
                if soc_hw_version is not None and len(soc_hw_version) != 10:
                    add_error(
                        sign_id,
                        "soc_hw_version value {0} is not valid. Value must start with 0x and be 8 bytes."
                        .format(soc_hw_version))

                # Check if any sign_ids in SOC-chipset need to sign with JTAG ID
                if (soc_hw_version
                        is not None) and (chipset in JTAGID_SIGN_IDS) and (
                            sign_id in JTAGID_SIGN_IDS[chipset]) and (msm_part
                                                                      is None):
                    add_error(
                        sign_id,
                        "MSM_PART must be used to sign this image but MSM_PART is missing."
                    )

                # Assure in_use_soc_hw_version exists if both msm_part and soc_hw_version are given
                if soc_hw_version is not None and msm_part is not None and in_use_soc_hw_version is None:
                    add_error(
                        sign_id,
                        "IN_USE_SOC_HW_VERSION must exist to chose between MSM_PART and SOC_HW_VERSION."
                    )

                # in_use_soc_hw_version must exist with soc_hw_version
                if soc_hw_version is not None and in_use_soc_hw_version is None:
                    add_error(
                        sign_id,
                        "IN_USE_SOC_HW_VERSION must be set when using SOC_HW_VERSION."
                    )

                # Make sure in_use_soc_hw_version's specification exists
                if in_use_soc_hw_version == 1 and soc_hw_version is None:
                    add_error(
                        sign_id,
                        "IN_USE_SOC_HW_VERSION specifies SOC_HW_VERSION is used but SOC_HW_VERSION tag is missing."
                    )
                elif in_use_soc_hw_version == 0 and msm_part is None:
                    add_error(
                        sign_id,
                        "IN_USE_SOC_HW_VERSION specifies SOC_HW_VERSION is NOT used but MSM_PART tag is missing."
                    )

                # mask_soc_hw_version may only exist if soc_hw_version exists
                if soc_hw_version is None:
                    if mask_soc_hw_version is not None:
                        add_error(
                            sign_id,
                            "MASK_SOC_HW_VERSION can not exist without the SOC_HW_VERSION tag."
                        )

                if mask_soc_hw_version is not None and in_use_soc_hw_version == 1 and self.mask_warning is True:
                    logger.warning(
                        "The mask_soc_hw_version field is set and will mask the soc_hw_version during signing. Please ensure this is the desired result."
                    )
                    self.mask_warning = False

            # Validate soc_vers
            if soc_vers is not None:
                soc_vers_list = soc_vers.split()
                validate_hex_list(sign_id, soc_vers_list,
                                  MAX_NUM_SOC_VERS_MAP[secboot_version],
                                  "soc_vers", "soc_ver", 6)

            # use_serial_number_in_signing rule: serial number must be set
            if use_serial_number_in_signing == 1:
                if serial_number is None or int(serial_number, 16) == 0:
                    add_error(
                        sign_id,
                        "serial_number must be set when use_serial_number_in_signing is enabled."
                    )
                elif len(serial_number) > 10:
                    add_error(
                        "sign_id={0}: serial_number value must be 10 characters or less in length"
                        .format(sign_id))

            # Ensure MRC targets are properly configured to avoid OU field conflicts
            misconfigured_mrc_chipsets = get_duplicates(MRC_1_0_CHIPSETS +
                                                        MRC_2_0_CHIPSETS)
            if misconfigured_mrc_chipsets:
                raise RuntimeError(
                    "MRC 1.0 and MRC 2.0 chipsets must be mutually exclusive. The following chipsets are configured incorrectly: {0}."
                    .format(", ".join(misconfigured_mrc_chipsets)))

            # Validate MRC cert values
            if chipset in MRC_2_0_CHIPSETS and chipset in SECBOOT_2_0_DOUBLE_SIGN_CHIPSETS + SECBOOT_3_0_DOUBLE_SIGN_CHIPSETS and max_num_root_certs is None:
                add_error(
                    sign_id,
                    "max_num_root_certs must be provided for chipset \"{0}\".".
                    format(chipset))

            # MRC 1.0 support up to 4 roots, MRC 2.0 supports 1 or 4 roots
            if chipset in MRC_1_0_CHIPSETS + MRC_2_0_CHIPSETS:
                if max_num_root_certs is not None and num_root_certs > max_num_root_certs:
                    add_error(
                        sign_id,
                        "num_root_certs must be less than or equal to max_num_root_certs: num_root_certs={0}, max_num_root_certs={1}."
                        .format(num_root_certs, max_num_root_certs))
                if num_root_certs not in [1, 2, 3, 4]:
                    add_error(sign_id,
                              "num_root_certs must be in range [1-4].")
            # Legacy MRC supports up to 16 roots
            elif num_root_certs < 1 or num_root_certs > 16:
                add_error(sign_id, "num_root_certs must be in range [1-16].")

            if num_root_certs > 1 and mrc_index is None:
                add_error(
                    sign_id,
                    "mrc_index must be provided when num_root_certs is greater than 1."
                )

            if mrc_index is not None and mrc_index >= num_root_certs:
                add_error(
                    sign_id,
                    "Index out of range: mrc_index={0}, num_root_certs={1}.".
                    format(mrc_index, num_root_certs))

            # Format and validate debug
            if debug is not None:
                if len(debug) > 18:
                    add_error(
                        sign_id,
                        "Debug value must be 18 characters or less in length.")
                elif secboot_version != SECBOOT_VERSION_3_0 and len(
                        debug) < 18:
                    padding_len = 18 - len(debug)
                    debug = debug[:2] + "0" * padding_len + debug[2:]

            # Validate the multi_serial_numbers
            if len(multi_serial_numbers) > 0:
                if secboot_version == SECBOOT_VERSION_2_0:
                    if debug is None:
                        add_error(
                            sign_id,
                            "Debug serials were provides but debug field was not provided."
                        )
                    elif debug[:-8] in multi_serial_numbers:
                        add_error(
                            sign_id,
                            "Duplicate serial value of {0} in debug and multi_serial_numbers."
                            .format(debug[:-8]))
                    elif int(debug[:-8], 16) != 0:
                        multi_serial_numbers.append(debug[:-8])
                validate_hex_list(sign_id, multi_serial_numbers,
                                  MAX_NUM_SERIALS_MAP[secboot_version],
                                  "multi_serial_numbers", "serial", 10)

            # TCG rules
            if oid_min is not None and oid_max is None:
                add_error(
                    sign_id, "{0} min is set, must also set max.".format(
                        default_oid.name))
            elif oid_max is not None and oid_min is None:
                add_error(
                    sign_id, "{0} max is set, must also set min.".format(
                        default_oid.name))
            elif default_oid is not None:
                # Move the min > max checking to signer. It should be validated after valid 32
                # bit integer is checked. Otherwise, int() conversion will throw an exception

                oid_min_config_str = oid_min
                oid_max_config_str = oid_max
                if oid_min_config_str and oid_max_config_str:
                    oid_min = int(oid_min,
                                  16) if "0x" in oid_min else int(oid_min)
                    oid_max = int(oid_max,
                                  16) if "0x" in oid_max else int(oid_max)

                    if oid_min > oid_max:
                        add_error(
                            sign_id,
                            "{0} min must be less than max, min={1} max={2}.".
                            format(default_oid.name, oid_min_config_str,
                                   oid_max_config_str))
                if int(sw_id, 16) != 0:
                    add_error(
                        sign_id,
                        "For {0}, sw_id must be 0, sw_id = {1}.".format(
                            default_oid.name, sw_id))
                if int(msm_part, 16) != 0:
                    add_error(
                        sign_id,
                        "For {0}, msm_part must be 0, msm_part = {1}.".format(
                            default_oid.name, msm_part))
                if int(oem_id, 16) != 0:
                    add_error(
                        sign_id,
                        "For {0}, oem_id must be 0, oem_id = {1}.".format(
                            default_oid.name, oem_id))
                if int(model_id, 16) != 0:
                    add_error(
                        sign_id,
                        "For {0}, model_id must be 0, model_id = {1}.".format(
                            default_oid.name, model_id))
                if int(debug, 16) != 2:
                    add_error(
                        sign_id,
                        "For {0}, debug must be 2, debug = {1}.".format(
                            default_oid.name, debug))

            if client_id is not None and int(client_id, 16) == 0:
                add_error(
                    sign_id,
                    "client_id must be a non-zero value, client_id={0}.".
                    format(client_id))
            if lib_id is not None and int(lib_id, 16) == 0:
                add_error(
                    sign_id,
                    "lib_id must be a non-zero value, lib_id={0}.".format(
                        lib_id))

            if UIE_server_cert_path and c_path.validate_file(
                    UIE_server_cert_path) is False:
                add_error(
                    sign_id,
                    "UIE_server_cert_path is invalid, path={0}".format(
                        UIE_server_cert_path))

        return retval[0], "".join(remove_duplicates(errors))
    def get_image_info_from_meta(self, meta_info):
        # Cache dicts
        build_paths = dict()
        attrs = dict()
        file_types = dict()

        for sign_id in self.img_config_parser.sign_id_list:
            try:
                logger.debug('Searching metabuild for ' + sign_id)
                image = self.img_config_parser.get_config_for_sign_id(sign_id)
                image_path = image.meta_build_location

                # Replace any tags
                re_match = re.match('\$\((.*?)\)', image_path)
                if re_match:
                    tags = re_match.group(1)
                    tags_dict = {}
                    for tag in tags.split(','):
                        tag = tag.strip().split(':')
                        tags_dict[tag[0]] = tag[1]

                    replacement = None
                    if self.TAG_BUILD_PATH in tags_dict:
                        build = tags_dict[self.TAG_BUILD_PATH]
                        paths_data = build_paths.get(build, None)
                        if paths_data is None:
                            paths_data = meta_info.get_build_path(build)
                            build_paths[build] = paths_data
                        replacement = paths_data

                    elif self.TAG_ATTR in tags_dict:
                        attr = tags_dict[self.TAG_ATTR]
                        file_type = tags_dict[self.TAG_FILE_TYPE]
                        paths_data = attrs.get((attr, file_type), None)
                        if paths_data is None:
                            paths_data = meta_info.get_file_vars(attr, file_type)
                            attrs[(attr, file_type)] = paths_data
                        if tags_dict[self.TAG_VAR] in paths_data:
                            replacement = paths_data[tags_dict[self.TAG_VAR]][0]

                    elif self.TAG_FILE_TYPE in tags_dict:
                        file_type = tags_dict[self.TAG_FILE_TYPE]
                        paths_data = file_types.get(file_type, None)
                        if paths_data is None:
                            paths_data = meta_info.get_files(file_type)
                            file_types[file_type] = paths_data
                        if paths_data:
                            for each_path in paths_data:
                                if each_path.lower().endswith(image.name.lower()):
                                    replacement = each_path
                                    break
                    else:
                        raise RuntimeError('Unknown image type')

                    if replacement:
                        image_path = image_path.replace(re_match.group(0), replacement)
                    else:
                        logger.warning('File not found in meta build: ' + sign_id)
                        continue

                image_path = image_path.replace(self.REPL_META_PATH, self._meta_build_path)

                image_src_path = ImagePath()
                image_dest_path = DestImagePath()

                image_src_path.image_dir_base = os.path.dirname(image_path)
                image_src_path.image_dir_ext = ''
                image_src_path.image_name = os.path.basename(image_path)
                image_dest_path.image_dir_base = os.path.dirname(image_path)
                image_dest_path.image_dir_ext = ''
                image_dest_path.image_name = os.path.basename(image_path)

            except Exception as e:
                logger.error(str(e))
                continue

            yield (sign_id, self.chipset, image_src_path, image_dest_path)
    def get_data(self, integrity_check=None, sign=None, encrypt=None):
        # Resolve the operation
        integrity_check = self.integrity_check if integrity_check is None else integrity_check
        sign = self.sign if sign is None else sign
        encrypt = self.encrypt if encrypt is None else encrypt
        integrity_check = True if (integrity_check or sign or encrypt) else False

        # Allow base to do any checks
        SecParseGenBase.get_data(self, integrity_check, sign, encrypt)

        if not (integrity_check or sign or encrypt):
            data = self._elf_parsegen.get_data()
        else:
            # Get the size of the hash segment
            hash_segment_size = self._compute_hash_segment_size(integrity_check, sign, encrypt)
            hash_segment_addr = self._compute_hash_address(self._elf_parsegen)

            # Add the prog & hash entries in phdrs
            # The data in the prog and hash at this time maybe dummy data
            phdr_class = self._elf_parsegen.get_new_phdr_entry()
            prog_phdr_entry = self._get_prog_phdr_entry(phdr_class(),
                                                        self._elf_parsegen.ehdr)
            hash_phdr_entry = self._get_hash_phdr_entry(phdr_class(),
                                                        hash_segment_size,
                                                        hash_segment_addr)
            self._elf_parsegen.add_segment(hash_phdr_entry, PAD_BYTE_1 * hash_segment_size)
            self._elf_parsegen.add_segment(prog_phdr_entry, '')

            # Update the dest pointer for the mbn
            self._mbn_parsegen.header.image_dest_ptr = hash_segment_addr + self._mbn_parsegen.header.get_size()

            error = None
            try:
                # Generate the hash segment now
                hash_segment = self.get_hash_segment(integrity_check, sign, encrypt)

                # Check here for sizes mismatching just in case
                if len(hash_segment) != hash_segment_size:
                    raise RuntimeError('Estimating the size of the hash table was wrong.')

                # Re-add the hash segment, this time with the real data
                self._elf_parsegen.remove_segment(hash_phdr_entry)
                self._elf_parsegen.remove_segment(prog_phdr_entry)
                self._elf_parsegen.add_segment(hash_phdr_entry, hash_segment)
                self._elf_parsegen.add_segment(prog_phdr_entry, '')

                # If encrypting, change the process segment data
                if encrypt:
                    orig_process_data = self._elf_parsegen._process_segment_data
                    self._elf_parsegen._process_segment_data = self._int_process_data
                    self._int_process_data_segment_number = 0

                try:
                    # Get the elf data
                    data = self._elf_parsegen.get_data()
                except Exception as e:
                    error = e
                finally:
                    # Restore the process segment data
                    if encrypt:
                        try:
                            self._elf_parsegen._process_segment_data = orig_process_data
                        except Exception as tmp_e:
                            logger.warning(str(tmp_e))

                    # Raise error if needed
                    if error is not None:
                        raise error

            except Exception as e:
                error = e
            finally:
                # Remove the prog
                try:
                    self._elf_parsegen.remove_segment(prog_phdr_entry)
                except Exception as tmp_e:
                    logger.warning(str(tmp_e))

                # Remove the hash
                try:
                    self._elf_parsegen.remove_segment(hash_phdr_entry)
                except Exception as e:
                    logger.warning(str(tmp_e))

                # Raise error if needed
                if error is not None:
                    raise error

        return data