Ejemplo n.º 1
0
def get_json_signed(asn_metadata):
    json_signed = {
        '_type': 'Root',
        'compression_algorithms': ['gz'],
        'consistent_snapshot': False
    }

    asn_signed = asn_metadata['signed']
    json_signed['expires'] = datetime.utcfromtimestamp(
        asn_signed['expires']).isoformat() + 'Z'
    json_signed['version'] = int(asn_signed['version'])

    rootMetadata = asn_signed['body']['rootMetadata']

    assert rootMetadata[
        'numberOfKeys'] == 4  # TODO: <~> Remove this hardcoding. This has to be TUF-compliant. It can't assume no Targets delegations.
    keys = rootMetadata['keys']
    json_keys = {}
    for i in range(4):
        publicKey = keys[i]
        publicKeyid = hex_from_octetstring(publicKey['publicKeyid'])
        # Only ed25519 keys allowed for now.
        publicKeyType = int(publicKey['publicKeyType'])
        assert publicKeyType == 1
        publicKeyType = 'ed25519'
        publicKeyValue = hex_from_octetstring(publicKey['publicKeyValue'])
        json_keys[publicKeyid] = {
            'keyid_hash_algorithms':
            ['sha256', 'sha512'],  # TODO: <~> This was hard-coded. Fix it.
            'keytype': publicKeyType,
            'keyval': {
                'public': publicKeyValue
            }
        }
    json_signed['keys'] = json_keys

    assert rootMetadata['numberOfRoles'] == 4
    roles = rootMetadata['roles']
    json_roles = {}
    # Quick workaround for now.
    roletype_to_rolename = {
        0: 'root',
        1: 'targets',
        2: 'snapshot',
        3: 'timestamp'
    }
    for i in range(4):
        topLevelRole = roles[i]
        rolename = roletype_to_rolename[int(topLevelRole['role'])]
        assert topLevelRole['numberOfKeyids'] == 1
        keyid = hex_from_octetstring(topLevelRole['keyids'][0])
        keyids = [keyid]
        threshold = int(topLevelRole['threshold'])
        assert threshold == 1
        json_roles[rolename] = {'keyids': keyids, 'threshold': threshold}
    json_signed['roles'] = json_roles

    return json_signed
Ejemplo n.º 2
0
def set_json_targets(json_signed, targetsMetadata):
    numberOfTargets = int(targetsMetadata['numberOfTargets'])
    targets = targetsMetadata['targets']
    json_targets = {}

    for i in range(numberOfTargets):
        targetAndCustom = targets[i]

        target = targetAndCustom['target']
        filename = str(target['filename'])
        filemeta = {'length': int(target['length'])}

        numberOfHashes = int(target['numberOfHashes'])
        # Quick workaround for now.
        hashenum_to_hashfunction = {1: 'sha256', 3: 'sha512'}
        hashes = target['hashes']
        json_hashes = {}
        for j in range(numberOfHashes):
            hash = hashes[j]
            hash_function = hashenum_to_hashfunction[int(hash['function'])]
            hash_value = hex_from_octetstring(hash['digest'])
            json_hashes[hash_function] = hash_value
        filemeta['hashes'] = json_hashes

        # Optional bit.
        custom = targetAndCustom['custom']
        if custom:
            json_custom = {'ecu_serial': str(custom['ecuIdentifier'])}
            filemeta['custom'] = json_custom

        json_targets[filename] = filemeta

    json_signed['targets'] = json_targets
Ejemplo n.º 3
0
def get_json_signed(asn_metadata):
  json_signed = {
    '_type': 'Timestamp'
  }

  asn_signed = asn_metadata['signed']
  json_signed['expires'] = datetime.utcfromtimestamp(
    asn_signed['expires']).isoformat()+'Z'
  json_signed['version'] = int(asn_signed['version'])

  timestampMetadata = asn_signed['body']['timestampMetadata']
  filename = str(timestampMetadata['filename'])
  # TODO: Remove hardcoded hash assumptions here.
  sha256 = hex_from_octetstring(timestampMetadata['hashes'][0]['digest'])
  json_signed['meta'] = {
    filename : {
      'hashes': {
        'sha256': sha256
      },
      'length': int(timestampMetadata['length']),
      'version': int(timestampMetadata['version'])
    }
  }

  return json_signed
Ejemplo n.º 4
0
    def test_hex_from_octetstring(self):

        original_hex_str = '5f1a1354'

        octet_str = metadata_asn1_spec.OctetString(hexValue=original_hex_str)

        hex_str = hex_from_octetstring(octet_str)

        self.assertEqual(original_hex_str, hex_str)
Ejemplo n.º 5
0
def set_json_keys(json_signed, delegations):
    numberOfKeys = int(delegations['numberOfKeys'])
    keys = delegations['keys']
    json_keys = {}

    for i in range(numberOfKeys):
        key = keys[i]
        keyid = hex_from_octetstring(key['publicKeyid'])
        keytype = int(key['publicKeyType'])
        # FIXME: Only ed25519 keys allowed for now.
        assert keytype == 1
        keytype = 'ed25519'
        keyval = hex_from_octetstring(key['publicKeyValue'])
        json_keys[keyid] = {
            "keyid_hash_algorithms": ["sha256", "sha512"],
            "keytype": keytype,
            "keyval": {
                "public": keyval
            }
        }

    return json_keys
Ejemplo n.º 6
0
def get_json_signed(asn_metadata):
    """
  Given an ASN.1 object conforming to the new ASN.1 metadata definitions
  derived from Snapshot*.asn1, return a Python dictionary containing the same
  information, conformant to TUF's standard data specification for Snapshot
  metadata (tuf.formats.SNAPSHOT_SCHEMA).
  TUF internally does not use the ASN.1, converting it in and out of the
  standard Python dictionary formats defined in tuf.formats.
  """
    pydict_signed = {}

    # TODO: Normalize this function's interface: the asn_metadata given is
    # actually both 'signed' and 'signatures', which is strange since the
    # get_asn_signed function takes only the contents of the 'signed' entry, and
    # this function only returns the contents of a corresponding 'signed' entry.
    # (It is confusingly inconsistent to take the full object, return a converted
    # partial object, and have parallel naming and placement with a function that
    # takes and returns a partial object.)
    # This change has to percolate across all modules, however.
    asn_signed = asn_metadata[
        'signed']  # This should be the argument instead of asn_metadata.

    # Should check this from the ASN, but... the ASN definitions don't actually
    # USE a type, so I'm entirely basing the type encoded on the filename. This
    # is bad, I think. Could it be a security issue to not sign the metadata type
    # in there? The metadata types are pretty distinct, but... it's still best to
    # fix this at some point.
    pydict_signed['_type'] = 'Snapshot'

    pydict_signed['expires'] = datetime.utcfromtimestamp(
        asn_signed['expires']).isoformat() + 'Z'

    pydict_signed['version'] = int(asn_signed['version'])

    # Next, extract the fileinfo for each role file described in the ASN.1
    # Snapshot metadata.

    snapshot_metadata = asn_signed['body']['snapshotMetadata']

    number_of_target_role_files = int(
        snapshot_metadata['numberOfTargetRoleFiles'])
    asn_target_fileinfos = snapshot_metadata['targetRoleFileInfos']

    pydict_fileinfos = {}

    # Copy the Targets and delegated roles fileinfos:
    for i in range(number_of_target_role_files):
        asn_role_fileinfo = asn_target_fileinfos[i]
        filename = str(asn_role_fileinfo['filename'])
        pydict_fileinfos[filename] = {
            'version': int(asn_role_fileinfo['version'])
        }

    # Add in the Root role fileinfo:
    # In the Python dictionary format for Snapshot metadata, these all exist in
    # one dictionary.
    filename = str(snapshot_metadata['rootRoleFileInfo']['filename'])
    version = int(snapshot_metadata['rootRoleFileInfo']['version'])
    length = int(snapshot_metadata['rootRoleFileInfo']['length'])

    if filename in pydict_fileinfos:
        raise tuf.Error(
            'ASN1 Conversion failure for Snapshot role: duplicate '
            'fileinfo entries detected: filename ' + str(filename) +
            ' identified '
            'both as Root role and Targets role in Snapshot metadata.')

    # Populate the hashes in the fileinfo describing the Root role.
    hashes = {}
    for i in range(snapshot_metadata['rootRoleFileInfo']['numberOfHashes']):
        asn_hash_info = snapshot_metadata['rootRoleFileInfo']['hashes'][i]

        # This is how we'd extract the name of the hash function from the
        # enumeration (namedValues) that is in the class (HashFunction), indexed by
        # the underlying "value" of asn_hash_info. The [0] at the end selects
        # the string description from a 2-tuple of e.g. ('sha256', 1), where 1 is
        # the value in the enum.
        # TODO: Should probably make this its own function. The following should
        # work:
        #   def translate_pyasn_enum_to_value(asn_enum_value):
        #     return asn_enum_value.namedValues[asn_enum_value][0]
        #
        hashtype = asn_hash_info['function'].namedValues[
            asn_hash_info['function']]
        hashval = hex_from_octetstring(asn_hash_info['digest'])

        hashes[hashtype] = hashval

    # Finally, add all the information gathered about the Root role.
    pydict_fileinfos[filename] = {
        'version': version,
        'length': length,
        'hashes': hashes
    }

    pydict_signed['meta'] = pydict_fileinfos

    return pydict_signed
Ejemplo n.º 7
0
def convert_signed_der_to_dersigned_json(der_data):
    """
  Convert the given der_data to a Python dictionary representation consistent
  with TUF's typical JSON encoding.

  The 'signed' portion will be a JSON-style (essentially Python dict)
  translation of the der data's 'signed' portion. Likewise for the 'signatures'
  portion. The result will be a dict containing a 'signatures' section that has
  signatures over not what is in the 'signed' section, but rather over a
  different format and encoding of what is in the 'signed' section. Please take
  care.

  """

    I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[convert_signed_der_to_dersigned_json(der_data)]: ' + uptane.ENDCOLORS
    #TODO: Print to be deleted
    print(
        str('%s %s %s' %
            (I_TO_PRINT, 'Converting signed der to dersigned json. der_data:',
             '?')))
    #TODO: Until here

    if not PYASN1_EXISTS:
        raise tuf.Error(
            'Request was made to load a DER file, but the required '
            'pyasn1 library failed to import.')

    # "_signed" here refers to the portion of the metadata that will be signed.
    # The metadata is divided into "signed" and "signature" portions. The
    # signatures are signatures over the "signed" portion. "json_signed" below
    # is actually not signed - it is simply the portion that will be put into
    # the "signed" section - the portion to be signed. The nomenclature is
    # unfortunate....
    # Note that decode() returns a tuple: (pyasn1_object, remaining_input)
    # We don't expect any remaining input (TODO: Consider testing it?) and
    # are only interested in the pyasn1 object decoded from the DER.
    asn_metadata = p_der_decoder.decode(
        der_data, asn1Spec=metadata_asn1_spec.Metadata())[0]

    # asn_metadata here now has three components, indexed by integer 0, 1, 2.
    # 0 is the signed component (Signed())
    # 1 i the numberOfSignatures component (Length())
    # 2 is the signatures component (Signatures())

    asn_signed_metadata = asn_metadata[0]

    # TODO: The 'signed' component here should probably already be DER, since
    # that is what the signature is over. Because this would entail some changes
    # changes to the ASN.1 data specifications in metadata_asn1_definitions.py,
    # I'm not doing this yet (though I expect to).
    # So, for the time being, if we wanted to check the signature, we'd have to
    # encode this thing into DER again.
    # der_signed_metadata = p_der_encoder.encode(asn_signed)

    # Now we have to figure out what type of metadata the ASN.1 metadata is
    # so that we can use the appropriate spec to convert it back to JSON.

    # (Even though this takes asn_metadata, it only uses asn_metadata[0],
    # asn_signed_metadata....)
    asn_type_data = asn_signed_metadata[
        0]  # This is the RoleType info, a class.

    # This is how we'd extract the name of the type from the enumeration that is
    # in the class (namedValues), indexed by the underlying "value" of
    # asn_type_data.
    # We call lower() on it because I don't care about the casing, which has
    # varied somewhat in TUF history, and I don't want casing to ruin this
    # detection.
    metadata_type = asn_type_data.namedValues[asn_type_data._value].lower()

    # Make sure it's a supported type of metadata for ASN.1 to Python dict
    # translation. (Throw an exception if not.)
    _ensure_valid_metadata_type_for_asn1(metadata_type)

    # Handle for the corresponding module.
    relevant_asn_module = SUPPORTED_ASN1_METADATA_MODULES[metadata_type]

    # Convert into the basic Python dict we use in the JSON encoding.
    json_signed = relevant_asn_module.get_json_signed(asn_metadata)

    # Extract the signatures from the ASN.1 representation.
    asn_signatures = asn_metadata[2]
    json_signatures = []

    for asn_signature in asn_signatures:
        json_signatures.append({
            'keyid':
            hex_from_octetstring(asn_signature['keyid']),
            # TODO: See if it's possible to tweak the definition of 'method' so
            # that str(method) returns what we want rather here than the enum, so
            # that we don't have to do make this weird enum translation call?
            'method':
            asn_signature['method'].namedValues[
                asn_signature['method']._value],
            'sig':
            hex_from_octetstring(asn_signature['value'])
        })

    #TODO: Print to be deleted
    print(
        str('%s %s ' % (
            I_TO_PRINT,
            'Returning dictionary with fields \'signatures\' and \'signed\'')))
    #TODO: Until here

    return {'signatures': json_signatures, 'signed': json_signed}