def AddDtbo(output_zip):
    """Adds the DTBO image.

  Uses the image under IMAGES/ if it already exists. Otherwise looks for the
  image under PREBUILT_IMAGES/, signs it as needed, and returns the image name.
  """
    img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "dtbo.img")
    if os.path.exists(img.name):
        logger.info("dtbo.img already exists; no need to rebuild...")
        return img.name

    dtbo_prebuilt_path = os.path.join(OPTIONS.input_tmp, "PREBUILT_IMAGES",
                                      "dtbo.img")
    assert os.path.exists(dtbo_prebuilt_path)
    shutil.copy(dtbo_prebuilt_path, img.name)

    # AVB-sign the image as needed.
    if OPTIONS.info_dict.get("avb_enable") == "true":
        avbtool = OPTIONS.info_dict["avb_avbtool"]
        part_size = OPTIONS.info_dict["dtbo_size"]
        # The AVB hash footer will be replaced if already present.
        cmd = [
            avbtool, "add_hash_footer", "--image", img.name,
            "--partition_size",
            str(part_size), "--partition_name", "dtbo"
        ]
        common.AppendAVBSigningArgs(cmd, "dtbo")
        args = OPTIONS.info_dict.get("avb_dtbo_add_hash_footer_args")
        if args and args.strip():
            cmd.extend(shlex.split(args))
        common.RunAndCheckOutput(cmd)

    img.Write()
    return img.name
Пример #2
0
def AddOdm(output_zip):
    """Turn the contents of ODM into an odm image and store it in output_zip."""

    img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "odm.img")
    if os.path.exists(img.name):
        logger.info("odm.img already exists; no need to rebuild...")

        # AVB-sign the image as needed.
        if OPTIONS.info_dict.get("avb_enable") == "true":
            logger.info("updating avb hash for prebuilt odm.img...")
            avbtool = OPTIONS.info_dict["avb_avbtool"]
            # The AVB hash footer will be replaced if already present.
            cmd = [
                avbtool, "add_hashtree_footer", "--image", img.name,
                "--partition_name", "odm"
            ]
            common.AppendAVBSigningArgs(cmd, "odm")
            args = OPTIONS.info_dict.get("avb_odm_add_hash_footer_args")
            if args and args.strip():
                cmd.extend(shlex.split(args))
            common.RunAndCheckOutput(cmd)

        return img.name

    block_list = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "odm.map")
    CreateImage(OPTIONS.input_tmp,
                OPTIONS.info_dict,
                "odm",
                img,
                block_list=block_list)
    return img.name
def SignApexPayload(avbtool,
                    payload_file,
                    payload_key_path,
                    payload_key_name,
                    algorithm,
                    salt,
                    no_hashtree,
                    signing_args=None):
    """Signs a given payload_file with the payload key."""
    # Add the new footer. Old footer, if any, will be replaced by avbtool.
    cmd = [
        avbtool, 'add_hashtree_footer', '--do_not_generate_fec', '--algorithm',
        algorithm, '--key', payload_key_path, '--prop',
        'apex.key:{}'.format(payload_key_name), '--image', payload_file,
        '--salt', salt
    ]
    if no_hashtree:
        cmd.append('--no_hashtree')
    if signing_args:
        cmd.extend(shlex.split(signing_args))

    try:
        common.RunAndCheckOutput(cmd)
    except common.ExternalError as e:
        raise ApexSigningError(
            'Failed to sign APEX payload {} with {}:\n{}'.format(
                payload_file, payload_key_path, e))

    # Verify the signed payload image with specified public key.
    logger.info('Verifying %s', payload_file)
    VerifyApexPayload(avbtool, payload_file, payload_key_path, no_hashtree)
    def ExtractApexPayloadAndSignApks(self, apk_entries, apk_keys):
        """Extracts the payload image and signs the containing apk files."""
        payload_dir = common.MakeTempDir()
        extract_cmd = ['deapexer', 'extract', self.apex_path, payload_dir]
        common.RunAndCheckOutput(extract_cmd)

        has_signed_apk = False
        for entry in apk_entries:
            apk_path = os.path.join(payload_dir, entry)
            assert os.path.exists(self.apex_path)

            key_name = apk_keys.get(os.path.basename(entry))
            if key_name in common.SPECIAL_CERT_STRINGS:
                logger.info('Not signing: %s due to special cert string',
                            apk_path)
                continue

            logger.info('Signing apk file %s in apex %s', apk_path,
                        self.apex_path)
            # Rename the unsigned apk and overwrite the original apk path with the
            # signed apk file.
            unsigned_apk = common.MakeTempFile()
            os.rename(apk_path, unsigned_apk)
            common.SignFile(
                unsigned_apk,
                apk_path,
                key_name,
                self.key_passwords,
                codename_to_api_level_map=self.codename_to_api_level_map)
            has_signed_apk = True
        return payload_dir, has_signed_apk
Пример #5
0
def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
                   padding_size):
    cmd = [
        "fec", "-e", "-p",
        str(padding_size), sparse_image_path, verity_path, verity_fec_path
    ]
    common.RunAndCheckOutput(cmd)
Пример #6
0
  def test_Build_SquashFs(self):
    verity_image_builder = CreateVerityImageBuilder(self.DEFAULT_PROP_DICT)
    verity_image_builder.CalculateMaxImageSize()

    # Build the sparse image with verity metadata.
    input_dir = common.MakeTempDir()
    image = common.MakeTempFile(suffix='.img')
    cmd = ['mksquashfsimage.sh', input_dir, image, '-s']
    common.RunAndCheckOutput(cmd)
    verity_image_builder.PadSparseImage(image)
    verity_image_builder.Build(image)

    # Verify the verity metadata.
    cmd = ["verity_verifier", image, '-mincrypt',
           os.path.join(get_testdata_dir(), 'testkey_mincrypt')]
    common.RunAndCheckOutput(cmd)
Пример #7
0
def GetFilesystemCharacteristics(image_path, sparse_image=True):
    """Returns various filesystem characteristics of "image_path".

  Args:
    image_path: The file to analyze.
    sparse_image: Image is sparse

  Returns:
    The characteristics dictionary.
  """
    unsparse_image_path = image_path
    if sparse_image:
        unsparse_image_path = UnsparseImage(image_path, replace=False)

    cmd = ["tune2fs", "-l", unsparse_image_path]
    try:
        output = common.RunAndCheckOutput(cmd, verbose=False)
    finally:
        if sparse_image:
            os.remove(unsparse_image_path)
    fs_dict = {}
    for line in output.splitlines():
        fields = line.split(":")
        if len(fields) == 2:
            fs_dict[fields[0].strip()] = fields[1].strip()
    return fs_dict
def AddPartitionTable(output_zip):
    """Create a partition table image and store it in output_zip."""

    img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES",
                     "partition-table.img")
    bpt = OutputFile(output_zip, OPTIONS.input_tmp, "META",
                     "partition-table.bpt")

    # use BPTTOOL from environ, or "bpttool" if empty or not set.
    bpttool = os.getenv("BPTTOOL") or "bpttool"
    cmd = [
        bpttool, "make_table", "--output_json", bpt.name, "--output_gpt",
        img.name
    ]
    input_files_str = OPTIONS.info_dict["board_bpt_input_files"]
    input_files = input_files_str.split(" ")
    for i in input_files:
        cmd.extend(["--input", i])
    disk_size = OPTIONS.info_dict.get("board_bpt_disk_size")
    if disk_size:
        cmd.extend(["--disk_size", disk_size])
    args = OPTIONS.info_dict.get("board_bpt_make_table_args")
    if args:
        cmd.extend(shlex.split(args))
    common.RunAndCheckOutput(cmd)

    img.Write()
    bpt.Write()
Пример #9
0
def BuildVerityTree(sparse_image_path, verity_image_path):
    cmd = [
        "build_verity_tree", "-A", FIXED_SALT, sparse_image_path,
        verity_image_path
    ]
    output = common.RunAndCheckOutput(cmd)
    root, salt = output.split()
    return root, salt
Пример #10
0
 def files_from_path(target_path, extra_args=None):
   """Gets files under the given path and return a sorted list."""
   find_command = ['find', target_path] + (extra_args or [])
   find_process = common.Run(
       find_command, stdout=subprocess.PIPE, verbose=False)
   return common.RunAndCheckOutput(['sort'],
                                   stdin=find_process.stdout,
                                   verbose=False)
Пример #11
0
    def _BuildAndVerify(prop, verify_key):
        verity_image_builder = CreateVerityImageBuilder(prop)
        image_size = verity_image_builder.CalculateMaxImageSize()

        # Build the sparse image with verity metadata.
        input_dir = common.MakeTempDir()
        image = common.MakeTempFile(suffix='.img')
        cmd = [
            'mkuserimg_mke2fs', input_dir, image, 'ext4', '/system',
            str(image_size), '-j', '0', '-s'
        ]
        common.RunAndCheckOutput(cmd)
        verity_image_builder.Build(image)

        # Verify the verity metadata.
        cmd = ['verity_verifier', image, '-mincrypt', verify_key]
        common.RunAndCheckOutput(cmd)
Пример #12
0
def AddCareMapForAbOta(output_zip, ab_partitions, image_paths):
  """Generates and adds care_map.pb for a/b partition that has care_map.

  Args:
    output_zip: The output zip file (needs to be already open), or None to
        write care_map.pb to OPTIONS.input_tmp/.
    ab_partitions: The list of A/B partitions.
    image_paths: A map from the partition name to the image path.
  """
  care_map_list = []
  for partition in ab_partitions:
    partition = partition.strip()
    if partition not in common.PARTITIONS_WITH_CARE_MAP:
      continue

    verity_block_device = "{}_verity_block_device".format(partition)
    avb_hashtree_enable = "avb_{}_hashtree_enable".format(partition)
    if (verity_block_device in OPTIONS.info_dict or
        OPTIONS.info_dict.get(avb_hashtree_enable) == "true"):
      image_path = image_paths[partition]
      assert os.path.exists(image_path)
      care_map_list += GetCareMap(partition, image_path)

      # adds fingerprint field to the care_map
      build_props = OPTIONS.info_dict.get(partition + ".build.prop", {})
      prop_name_list = ["ro.{}.build.fingerprint".format(partition),
                        "ro.{}.build.thumbprint".format(partition)]

      present_props = [x for x in prop_name_list if x in build_props]
      if not present_props:
        logger.warning("fingerprint is not present for partition %s", partition)
        property_id, fingerprint = "unknown", "unknown"
      else:
        property_id = present_props[0]
        fingerprint = build_props[property_id]
      care_map_list += [property_id, fingerprint]

  if not care_map_list:
    return

  # Converts the list into proto buf message by calling care_map_generator; and
  # writes the result to a temp file.
  temp_care_map_text = common.MakeTempFile(prefix="caremap_text-",
                                           suffix=".txt")
  with open(temp_care_map_text, 'w') as text_file:
    text_file.write('\n'.join(care_map_list))

  temp_care_map = common.MakeTempFile(prefix="caremap-", suffix=".pb")
  care_map_gen_cmd = ["care_map_generator", temp_care_map_text, temp_care_map]
  common.RunAndCheckOutput(care_map_gen_cmd)

  care_map_path = "META/care_map.pb"
  if output_zip and care_map_path not in output_zip.namelist():
    common.ZipWrite(output_zip, temp_care_map, arcname=care_map_path)
  else:
    shutil.copy(temp_care_map, os.path.join(OPTIONS.input_tmp, care_map_path))
    if output_zip:
      OPTIONS.replace_updated_files_list.append(care_map_path)
Пример #13
0
def SignApex(apex_data,
             payload_key,
             container_key,
             container_pw,
             codename_to_api_level_map,
             signing_args=None):
    """Signs the current APEX with the given payload/container keys.

  Args:
    apex_data: Raw APEX data.
    payload_key: The path to payload signing key (w/o extension).
    container_key: The path to container signing key (w/o extension).
    container_pw: The matching password of the container_key, or None.
    codename_to_api_level_map: A dict that maps from codename to API level.
    signing_args: Additional args to be passed to the payload signer.

  Returns:
    (signed_apex, payload_key_name): signed_apex is the path to the signed APEX
        file; payload_key_name is a str of the payload signing key name (e.g.
        com.android.tzdata).
  """
    apex_file = common.MakeTempFile(prefix='apex-', suffix='.apex')
    with open(apex_file, 'wb') as apex_fp:
        apex_fp.write(apex_data)

    APEX_PAYLOAD_IMAGE = 'apex_payload.img'

    # Signing an APEX is a two step process.
    # 1. Extract and sign the APEX_PAYLOAD_IMAGE entry with the given payload_key.
    payload_dir = common.MakeTempDir(prefix='apex-payload-')
    with zipfile.ZipFile(apex_file) as apex_fd:
        payload_file = apex_fd.extract(APEX_PAYLOAD_IMAGE, payload_dir)

    payload_info = apex_utils.ParseApexPayloadInfo(payload_file)
    apex_utils.SignApexPayload(payload_file, payload_key,
                               payload_info['apex.key'],
                               payload_info['Algorithm'], payload_info['Salt'],
                               signing_args)

    common.ZipDelete(apex_file, APEX_PAYLOAD_IMAGE)
    apex_zip = zipfile.ZipFile(apex_file, 'a')
    common.ZipWrite(apex_zip, payload_file, arcname=APEX_PAYLOAD_IMAGE)
    common.ZipClose(apex_zip)

    # 2. Sign the overall APEX container with container_key.
    signed_apex = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
    common.SignFile(apex_file,
                    signed_apex,
                    container_key,
                    container_pw,
                    codename_to_api_level_map=codename_to_api_level_map)

    signed_and_aligned_apex = common.MakeTempFile(prefix='apex-container-',
                                                  suffix='.apex')
    common.RunAndCheckOutput(
        ['zipalign', '-f', '4096', signed_apex, signed_and_aligned_apex])

    return (signed_and_aligned_apex, payload_info['apex.key'])
Пример #14
0
    def RepackApexPayload(self, payload_dir, payload_key, signing_args=None):
        """Rebuilds the apex file with the updated payload directory."""
        apex_dir = common.MakeTempDir()
        # Extract the apex file and reuse its meta files as repack parameters.
        common.UnzipToDir(self.apex_path, apex_dir)
        arguments_dict = {
            'manifest': os.path.join(apex_dir, 'apex_manifest.pb'),
            'build_info': os.path.join(apex_dir, 'apex_build_info.pb'),
            'key': payload_key,
        }
        for filename in arguments_dict.values():
            assert os.path.exists(filename), 'file {} not found'.format(
                filename)

        # The repack process will add back these files later in the payload image.
        for name in ['apex_manifest.pb', 'apex_manifest.json', 'lost+found']:
            path = os.path.join(payload_dir, name)
            if os.path.isfile(path):
                os.remove(path)
            elif os.path.isdir(path):
                shutil.rmtree(path)

        # TODO(xunchang) the signing process can be improved by using
        # '--unsigned_payload_only'. But we need to parse the vbmeta earlier for
        # the signing arguments, e.g. algorithm, salt, etc.
        payload_img = os.path.join(apex_dir, APEX_PAYLOAD_IMAGE)
        generate_image_cmd = [
            'apexer', '--force', '--payload_only', '--do_not_check_keyname',
            '--apexer_tool_path',
            os.getenv('PATH')
        ]
        for key, val in arguments_dict.items():
            generate_image_cmd.extend(['--' + key, val])

        # Add quote to the signing_args as we will pass
        # --signing_args "--signing_helper_with_files=%path" to apexer
        if signing_args:
            generate_image_cmd.extend(
                ['--signing_args', '"{}"'.format(signing_args)])

        # optional arguments for apex repacking
        manifest_json = os.path.join(apex_dir, 'apex_manifest.json')
        if os.path.exists(manifest_json):
            generate_image_cmd.extend(['--manifest_json', manifest_json])
        generate_image_cmd.extend([payload_dir, payload_img])
        if OPTIONS.verbose:
            generate_image_cmd.append('-v')
        common.RunAndCheckOutput(generate_image_cmd)

        # Add the payload image back to the apex file.
        common.ZipDelete(self.apex_path, APEX_PAYLOAD_IMAGE)
        with zipfile.ZipFile(self.apex_path, 'a',
                             allowZip64=True) as output_apex:
            common.ZipWrite(output_apex,
                            payload_img,
                            APEX_PAYLOAD_IMAGE,
                            compress_type=zipfile.ZIP_STORED)
        return self.apex_path
def AddSuperSplit(output_zip):
    """Create split super_*.img and store it in output_zip."""
    def GetPartitionSizeFromImage(img):
        try:
            simg = sparse_img.SparseImage(img)
            return simg.blocksize * simg.total_blocks
        except ValueError:
            return os.path.getsize(img)

    def TransformPartitionArg(arg):
        lst = arg.split(':')
        # Because --auto-slot-suffixing for A/B, there is no need to remove suffix.
        name = lst[0]
        if name + '_size' in OPTIONS.info_dict:
            size = str(OPTIONS.info_dict[name + '_size'])
            logger.info("Using %s_size = %s", name, size)
        else:
            size = str(
                GetPartitionSizeFromImage(
                    os.path.join(OPTIONS.input_tmp, "IMAGES",
                                 '{}.img'.format(name))))
            logger.info("Using size of prebuilt %s = %s", name, size)
        lst[2] = size
        return ':'.join(lst)

    def GetLpmakeArgsWithSizes():
        lpmake_args = shlex.split(OPTIONS.info_dict['lpmake_args'].strip())

        for i, arg in enumerate(lpmake_args):
            if arg == '--partition':
                assert i + 1 < len(lpmake_args), \
                  'lpmake_args has --partition without value'
                lpmake_args[i + 1] = TransformPartitionArg(lpmake_args[i + 1])

        return lpmake_args

    outdir = OutputFile(output_zip, OPTIONS.input_tmp, "OTA", "")
    cmd = [OPTIONS.info_dict['lpmake']]
    cmd += GetLpmakeArgsWithSizes()

    source = OPTIONS.info_dict.get('dynamic_partition_list', '').strip()
    if source:
        cmd.append('--sparse')
        for name in shlex.split(source):
            img = os.path.join(OPTIONS.input_tmp, "IMAGES",
                               '{}.img'.format(name))
            # Because --auto-slot-suffixing for A/B, there is no need to add suffix.
            cmd += ['--image', '{}={}'.format(name, img)]

    cmd += ['--output', outdir.name]

    common.RunAndCheckOutput(cmd)

    for dev in OPTIONS.info_dict['super_block_devices'].strip().split():
        img = OutputFile(output_zip, OPTIONS.input_tmp, "OTA",
                         "super_" + dev + ".img")
        img.Write()
Пример #16
0
    def _CreateSimg(self, raw_data):  # pylint: disable=no-self-use
        output_file = common.MakeTempFile()
        raw_image = common.MakeTempFile()
        with open(raw_image, 'wb') as f:
            f.write(raw_data)

        cmd = ["img2simg", raw_image, output_file, '4096']
        common.RunAndCheckOutput(cmd)
        return output_file
Пример #17
0
def SignApex(avbtool, apex_data, payload_key, container_key, container_pw,
             apk_keys, codename_to_api_level_map,
             no_hashtree, signing_args=None):
  """Signs the current APEX with the given payload/container keys.

  Args:
    apex_file: Path to apex file path.
    payload_key: The path to payload signing key (w/ extension).
    container_key: The path to container signing key (w/o extension).
    container_pw: The matching password of the container_key, or None.
    apk_keys: A dict that holds the signing keys for apk files.
    codename_to_api_level_map: A dict that maps from codename to API level.
    no_hashtree: Don't include hashtree in the signed APEX.
    signing_args: Additional args to be passed to the payload signer.

  Returns:
    The path to the signed APEX file.
  """
  apex_file = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
  with open(apex_file, 'wb') as output_fp:
    output_fp.write(apex_data)

  debugfs_path = os.path.join(OPTIONS.search_path, 'bin', 'debugfs_static')
  cmd = ['deapexer', '--debugfs_path', debugfs_path,
         'info', '--print-type', apex_file]

  try:
    apex_type = common.RunAndCheckOutput(cmd).strip()
    if apex_type == 'UNCOMPRESSED':
      return SignUncompressedApex(
          avbtool,
          apex_file,
          payload_key=payload_key,
          container_key=container_key,
          container_pw=None,
          codename_to_api_level_map=codename_to_api_level_map,
          no_hashtree=no_hashtree,
          apk_keys=apk_keys,
          signing_args=signing_args)
    elif apex_type == 'COMPRESSED':
      return SignCompressedApex(
          avbtool,
          apex_file,
          payload_key=payload_key,
          container_key=container_key,
          container_pw=None,
          codename_to_api_level_map=codename_to_api_level_map,
          no_hashtree=no_hashtree,
          apk_keys=apk_keys,
          signing_args=signing_args)
    else:
      # TODO(b/172912232): support signing compressed apex
      raise ApexInfoError('Unsupported apex type {}'.format(apex_type))

  except common.ExternalError as e:
    raise ApexInfoError(
        'Failed to get type for {}:\n{}'.format(apex_file, e))
Пример #18
0
def AddVendor(output_zip, recovery_img=None, boot_img=None):
    """Turn the contents of VENDOR into a vendor image and store in it
  output_zip."""

    img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vendor.img")
    if os.path.exists(img.name):
        logger.info("vendor.img already exists; no need to rebuild...")

        # AVB-sign the image as needed.
        if OPTIONS.info_dict.get("avb_enable") == "true":
            logger.info("updating avb hash for prebuilt vendor.img...")
            avbtool = OPTIONS.info_dict["avb_avbtool"]
            # The AVB hash footer will be replaced if already present.
            cmd = [
                avbtool, "add_hashtree_footer", "--image", img.name,
                "--partition_name", "vendor"
            ]
            common.AppendAVBSigningArgs(cmd, "vendor")
            args = OPTIONS.info_dict.get("avb_vendor_add_hash_footer_args")
            if args and args.strip():
                cmd.extend(shlex.split(args))
            common.RunAndCheckOutput(cmd)

        return img.name

    def output_sink(fn, data):
        ofile = open(os.path.join(OPTIONS.input_tmp, "VENDOR", fn), "w")
        ofile.write(data)
        ofile.close()

        if output_zip:
            arc_name = "VENDOR/" + fn
            if arc_name in output_zip.namelist():
                OPTIONS.replace_updated_files_list.append(arc_name)
            else:
                common.ZipWrite(output_zip, ofile.name, arc_name)

    board_uses_vendorimage = OPTIONS.info_dict.get(
        "board_uses_vendorimage") == "true"

    if (OPTIONS.rebuild_recovery and board_uses_vendorimage
            and recovery_img is not None and boot_img is not None):
        logger.info("Building new recovery patch on vendor")
        common.MakeRecoveryPatch(OPTIONS.input_tmp,
                                 output_sink,
                                 recovery_img,
                                 boot_img,
                                 info_dict=OPTIONS.info_dict)

    block_list = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES",
                            "vendor.map")
    CreateImage(OPTIONS.input_tmp,
                OPTIONS.info_dict,
                "vendor",
                img,
                block_list=block_list)
    return img.name
Пример #19
0
 def _GetMaximumSignatureSizeInBytes(signing_key):
   out_signature_size_file = common.MakeTempFile("signature_size")
   cmd = ["delta_generator", "--out_maximum_signature_size_file={}".format(
       out_signature_size_file), "--private_key={}".format(signing_key)]
   common.RunAndCheckOutput(cmd)
   with open(out_signature_size_file) as f:
     signature_size = f.read().rstrip()
   logger.info("%s outputs the maximum signature size: %s", cmd[0],
               signature_size)
   return int(signature_size)
Пример #20
0
def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
                        block_device, signer_path, key, signer_args,
                        verity_disable):
  cmd = ["build_verity_metadata", "build", str(image_size),
         verity_metadata_path, root_hash, salt, block_device, signer_path, key]
  if signer_args:
    cmd.append("--signer_args=\"%s\"" % (' '.join(signer_args),))
  if verity_disable:
    cmd.append("--verity_disable")
  common.RunAndCheckOutput(cmd)
Пример #21
0
  def _verifyCareMap(self, expected, file_name):
    """Parses the care_map.pb; and checks the content in plain text."""
    text_file = common.MakeTempFile(prefix="caremap-", suffix=".txt")

    # Calls an external binary to convert the proto message.
    cmd = ["care_map_generator", "--parse_proto", file_name, text_file]
    common.RunAndCheckOutput(cmd)

    with open(text_file) as verify_fp:
      plain_text = verify_fp.read()
    self.assertEqual('\n'.join(expected), plain_text)
Пример #22
0
def VerifyApexPayload(avbtool, payload_file, payload_key):
    """Verifies the APEX payload signature with the given key."""
    cmd = [
        avbtool, 'verify_image', '--image', payload_file, '--key', payload_key
    ]
    try:
        common.RunAndCheckOutput(cmd)
    except common.ExternalError as e:
        raise ApexSigningError(
            'Failed to validate payload signing for {} with {}:\n{}'.format(
                payload_file, payload_key, e))
def AddSuperEmpty(output_zip):
    """Create a super_empty.img and store it in output_zip."""

    img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES",
                     "super_empty.img")
    cmd = [OPTIONS.info_dict['lpmake']]
    cmd += shlex.split(OPTIONS.info_dict['lpmake_args'].strip())
    cmd += ['--output', img.name]
    common.RunAndCheckOutput(cmd)

    img.Write()
Пример #24
0
    def RepackApexPayload(self, payload_dir, payload_key, payload_public_key):
        """Rebuilds the apex file with the updated payload directory."""
        apex_dir = common.MakeTempDir()
        # Extract the apex file and reuse its meta files as repack parameters.
        common.UnzipToDir(self.apex_path, apex_dir)

        android_jar_path = common.OPTIONS.android_jar_path
        if not android_jar_path:
            android_jar_path = os.path.join(
                os.environ.get('ANDROID_BUILD_TOP', ''), 'prebuilts', 'sdk',
                'current', 'public', 'android.jar')
            logger.warning(
                'android_jar_path not found in options, falling back to'
                ' use %s', android_jar_path)

        arguments_dict = {
            'manifest': os.path.join(apex_dir, 'apex_manifest.pb'),
            'build_info': os.path.join(apex_dir, 'apex_build_info.pb'),
            'assets_dir': os.path.join(apex_dir, 'assets'),
            'android_jar_path': android_jar_path,
            'key': payload_key,
            'pubkey': payload_public_key,
        }
        for filename in arguments_dict.values():
            assert os.path.exists(filename), 'file {} not found'.format(
                filename)

        # The repack process will add back these files later in the payload image.
        for name in ['apex_manifest.pb', 'apex_manifest.json', 'lost+found']:
            path = os.path.join(payload_dir, name)
            if os.path.isfile(path):
                os.remove(path)
            elif os.path.isdir(path):
                shutil.rmtree(path)

        repacked_apex = common.MakeTempFile(suffix='.apex')
        repack_cmd = [
            'apexer', '--force', '--include_build_info',
            '--do_not_check_keyname', '--apexer_tool_path',
            os.getenv('PATH')
        ]
        for key, val in arguments_dict.items():
            repack_cmd.append('--' + key)
            repack_cmd.append(val)
        manifest_json = os.path.join(apex_dir, 'apex_manifest.json')
        if os.path.exists(manifest_json):
            repack_cmd.append('--manifest_json')
            repack_cmd.append(manifest_json)
        repack_cmd.append(payload_dir)
        repack_cmd.append(repacked_apex)
        common.RunAndCheckOutput(repack_cmd)

        return repacked_apex
Пример #25
0
def GetDiskUsage(path):
    """Returns the number of bytes that "path" occupies on host.

  Args:
    path: The directory or file to calculate size on.

  Returns:
    The number of bytes based on a 1K block_size.
  """
    cmd = ["du", "-k", "-s", path]
    output = common.RunAndCheckOutput(cmd, verbose=False)
    return int(output.split()[0]) * 1024
Пример #26
0
def GetInodeUsage(path):
    """Returns the number of inodes that "path" occupies on host.

  Args:
    path: The directory or file to calculate inode number on.

  Returns:
    The number of inodes used.
  """
    cmd = ["find", path, "-print"]
    output = common.RunAndCheckOutput(cmd, verbose=False)
    # TODO(b/122328872) Fix estimation algorithm to not need the multiplier.
    return output.count('\n') * 2
def CertUsesSha256(cert):
    """Check if the cert uses SHA-256 hashing algorithm."""

    cmd = ['openssl', 'x509', '-text', '-noout', '-in', cert]
    cert_dump = common.RunAndCheckOutput(cmd, stdout=subprocess.PIPE)

    algorithm = re.search(r'Signature Algorithm: ([a-zA-Z0-9]+)', cert_dump)
    assert algorithm, "Failed to identify the signature algorithm."

    assert not algorithm.group(1).startswith('ecdsa'), (
        'This script doesn\'t support verifying ECDSA signed package yet.')

    return algorithm.group(1).startswith('sha256')
Пример #28
0
def GetInodeUsage(path):
    """Returns the number of inodes that "path" occupies on host.

  Args:
    path: The directory or file to calculate inode number on.

  Returns:
    The number of inodes used.
  """
    cmd = ["find", path, "-print"]
    output = common.RunAndCheckOutput(cmd, verbose=False)
    # increase by 25 % as number of files and directories is not whole picture.
    return output.count('\n') * 30 // 24
Пример #29
0
def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
    """Appends the unsparse image to the given sparse image.

  Args:
    sparse_image_path: the path to the (sparse) image
    unsparse_image_path: the path to the (unsparse) image

  Raises:
    BuildVerityImageError: On error.
  """
    cmd = ["append2simg", sparse_image_path, unsparse_image_path]
    try:
        common.RunAndCheckOutput(cmd)
    except:
        raise BuildVerityImageError(error_message)
Пример #30
0
  def test_Build(self):
    prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
    verity_image_builder = CreateVerityImageBuilder(prop_dict)
    self.assertIsNotNone(verity_image_builder)
    self.assertEqual(2, verity_image_builder.version)

    input_dir = common.MakeTempDir()
    image_dir = common.MakeTempDir()
    system_image = os.path.join(image_dir, 'system.img')
    system_image_size = verity_image_builder.CalculateMaxImageSize()
    cmd = ['mkuserimg_mke2fs', input_dir, system_image, 'ext4', '/system',
           str(system_image_size), '-j', '0', '-s']
    common.RunAndCheckOutput(cmd)
    verity_image_builder.Build(system_image)

    # Additionally make vbmeta image so that we can verify with avbtool.
    vbmeta_image = os.path.join(image_dir, 'vbmeta.img')
    cmd = ['avbtool', 'make_vbmeta_image', '--include_descriptors_from_image',
           system_image, '--output', vbmeta_image]
    common.RunAndCheckOutput(cmd)

    # Verify the verity metadata.
    cmd = ['avbtool', 'verify_image', '--image', vbmeta_image]
    common.RunAndCheckOutput(cmd)