예제 #1
0
def keyed_arbitrary_package_attack(target_filepath):
  """
  Add a new, malicious target to the Image Repository and sign malicious
  metadata with the valid Image Repository timestamp, snapshot, and targets
  keys.

  This attack is described in README.md, section 3.5.
  """
  print(LOG_PREFIX + 'ATTACK: keyed_arbitrary_package_attack on '
      'target_filepath ' + repr(target_filepath))


  # TODO: Back up the image and then restore it in the undo function instead of
  # hard-coding the contents it's changed back to in the undo function.
  # That would require that we pick a temp file location.

  # Determine the location the specified file would occupy in the repository.
  target_full_path = os.path.join(
      repo._repository_directory, 'targets', target_filepath)

  # Make sure it exists in the repository, or else abort this attack, which is
  # written to work on an existing target only.
  if not os.path.exists(target_full_path):
    raise uptane.Error('Unable to attack: expected given image filename, ' +
        repr(target_filepath) + ', to exist, but it does not.')

  # TODO: Check to make sure the given file exists in the repository as well.
  # We should be attacking a file that's already in the repo.
  # TODO: Consider adding other edge case checks (interrupted things, attack
  # already in progress, etc.)

  # Replace the given target with a malicious version.
  add_target_and_write_to_live(target_filepath, file_content='evil content')

  print(LOG_PREFIX + 'COMPLETED ATTACK')
예제 #2
0
파일: secondary.py 프로젝트: Joan95/TFM
    def _expand_metadata_archive(self, metadata_archive_fname):
        """
    Given the filename of an archive of metadata files validated and zipped by
    primary.py, unzip it into the contained metadata files, to be used as a
    local repository and validated by this Secondary.

    Note that attacks are possible against zip files. The particulars of the
    distribution of these metadata files from Primary to Secondary will vary
    greatly based on one's implementation and setup, so this is offered for
    instruction. The mechanism employed in particular should not obviate the
    protections provided by Uptane and TUF. It should time out rather than be
    susceptible to slow retrieval, and not introduce vulnerabilities in the
    face of a malicious Primary.
    """

        I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[Secondary.process_metadata(self, metadata_archive_fname)]: ' + uptane.ENDCOLORS
        #TODO: Print to be deleted
        print(
            str('%s %s %s' %
                (I_TO_PRINT, 'Processing metadata for metadata_archive_fname:',
                 metadata_archive_fname)))
        #TODO: Until here

        tuf.formats.RELPATH_SCHEMA.check_match(metadata_archive_fname)
        if not os.path.exists(metadata_archive_fname):
            raise uptane.Error('Indicated metadata archive does not exist. '
                               'Filename: ' + repr(metadata_archive_fname))

        z = zipfile.ZipFile(metadata_archive_fname)

        z.extractall(os.path.join(self.full_client_dir, 'unverified'))
예제 #3
0
def ensure_valid_metadata_type_for_asn1(metadata_type):
  if metadata_type not in SUPPORTED_ASN1_METADATA_MODULES:
    # TODO: Choose/make better exception class.
    raise uptane.Error('This is not one of the metadata types configured for '
        'translation from JSON to DER-encoded ASN1. Type of given metadata: ' +
        repr(metadata_type) + '; types accepted: ' +
        repr(list(SUPPORTED_ASN1_METADATA_MODULES)))
예제 #4
0
파일: demo_director.py 프로젝트: Joan95/TFM
def backup_repositories(vin=None):
  """
  <Purpose>
    Back up the last-written state (contents of the 'metadata.staged'
    directories in each repository).

    Metadata is copied from '{repo_dir}/metadata.staged' to
    '{repo_dir}/metadata.backup'.

  <Arguments>
    vin (optional)
      If not provided, all known vehicle repositories will be backed up.
      You may also provide a single VIN (string) indicating one vehicle
      repository to back up.

  <Exceptions>
    uptane.Error if backup already exists

  <Side Effecs>
    None.

  <Returns>
    None.
  """

  I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[backup_repositories(vin)]: ' + ENDCOLORS
  #TODO: Print to be deleted
  print(str('%s %s %s %s' % (I_TO_PRINT, 'backuping repositories for vin:', vin, 'Metadata is copied from \{repo_dir}\/metadata.staged\' to \'{repo_dir}\/metadata.backup\'')))
  #TODO: Until here


  if vin is None:
    repos_to_backup = director_service_instance.vehicle_repositories.keys()
  else:
    repos_to_backup = [vin]

  for vin in repos_to_backup:
    repo = director_service_instance.vehicle_repositories[vin]
    repo_dir = repo._repository_directory

    if os.path.exists(os.path.join(repo_dir, 'metadata.backup')):
      raise uptane.Error('Backup already exists for repository ' +
          repr(repo_dir) + '; please delete or restore this backup before '
          'trying to backup again.')

    print(LOG_PREFIX + ' Backing up ' +
        os.path.join(repo_dir, 'metadata.staged'))
    shutil.copytree(os.path.join(repo_dir, 'metadata.staged'),
        os.path.join(repo_dir, 'metadata.backup'))
예제 #5
0
def get_signed_time_der(nonces):
  """
  Same as get_signed_time, but converts the resulting Python dictionary into
  an ASN.1 representation, encodes it as DER (Distinguished Encoding Rules),
  replaces the signature with a signature over the hash of the DER encoding of
  the 'signed' portion of the data (the time and nonces).
  """
  if not PYASN1_EXISTS:
    raise uptane.Error('This Timeserver does not support DER: pyasn1 is not '
        'installed.')
  time_attestation = get_time(nonces)

  signable_time_attestation = tuf.formats.make_signable(time_attestation)
  uptane.formats.SIGNABLE_TIMESERVER_ATTESTATION_SCHEMA.check_match(
      signable_time_attestation)

  # Convert it, re-signing over the hash of the DER encoding of the attestation.
  der_attestation = asn1_codec.convert_signed_metadata_to_der(
      signable_time_attestation, DATATYPE_TIME_ATTESTATION,
      private_key=timeserver_key, resign=True)


  return der_attestation
예제 #6
0
파일: secondary.py 프로젝트: Joan95/TFM
    def validate_image(self, image_fname):
        """
    Determines if the image with filename provided matches the expected file
    properties, based on the metadata we have previously validated (with
    fully_validate_metadata, stored in self.validated_targets_for_this_ecu). If
    this method completes without raising an exception, the image file is
    valid.

    <Arguments>

      image_fname
        This is the filename of the image file to validate. It is expected
        to match the filepath in the target file info (except without any
        leading '/' character). It should, therefore, not include any
        directory names except what is required to specify it within the
        target namespace.
        This file is expected to exist in the client directory
        (self.full_client_dir), in a subdirectory called 'unverified_targets'.

    <Exceptions>

      uptane.Error
        if the given filename does not match a filepath in the list of
        validated targets for this ECU (that is, the target(s) for which we
        have received validated instructions from the Director addressed to
        this ECU to install, and for which target info (file size and hashes)
        has been retrieved and fully validated)

      tuf.DownloadLengthMismatchError
        if the file does not have the expected length based on validated
        target info.

      tuf.BadHashError
        if the file does not have the expected hash based on validated target
        info

      tuf.FormatError
        if the given image_fname is not a path.

    <Returns>
      None.

    <Side-Effects>
      None.
    """
        tuf.formats.PATH_SCHEMA.check_match(image_fname)

        full_image_fname = os.path.join(self.full_client_dir,
                                        'unverified_targets', image_fname)

        # Get target info by looking up fname (filepath).

        relevant_targetinfo = None

        for targetinfo in self.validated_targets_for_this_ecu:
            filepath = targetinfo['filepath']
            if filepath[0] == '/':
                filepath = filepath[1:]
            if filepath == image_fname:
                relevant_targetinfo = targetinfo

        if relevant_targetinfo is None:
            # TODO: Consider a more specific error class.
            raise uptane.Error(
                'Unable to find validated target info for the given '
                'filename: ' + repr(image_fname) + '. Either metadata was not '
                'successfully updated, or the Primary is providing the wrong image '
                'file, or there was a very unlikely update to data on the Primary '
                'that had updated metadata but not yet updated images (The window '
                'for this is extremely small between two individually-atomic '
                'renames), or there has been a programming error....')

        # Check file length against trusted target info.
        with open(full_image_fname, 'rb') as fobj:
            tuf.client.updater.hard_check_file_length(
                fobj, relevant_targetinfo['fileinfo']['length'])

        # Check file hashes against trusted target info.
        with open(full_image_fname, 'rb') as fobj:
            tuf.client.updater.check_hashes(
                fobj,  # FIX
                relevant_targetinfo['fileinfo']['hashes'],
                reset_fpointer=True)  # Important for multiple hashes

        # If no error has been raised at this point, the image file is fully
        # validated and we can return.
        log.debug('Delivered target file has been fully validated: ' +
                  repr(full_image_fname))
예제 #7
0
파일: secondary.py 프로젝트: Joan95/TFM
    def __init__(self,
                 full_client_dir,
                 director_repo_name,
                 vin,
                 ecu_serial,
                 ecu_key,
                 time,
                 timeserver_public_key,
                 firmware_fileinfo=None,
                 director_public_key=None,
                 partial_verifying=False):
        """
    <Purpose>
      Constructor for class Secondary

    <Arguments>

      full_client_dir       See class docstring above.

      director_repo_name    See class docstring above.

      vin                   See class docstring above.

      ecu_serial            See class docstring above.

      ecu_key               See class docstring above.

      timeserver_public_key See class docstring above.

      director_public_key   See class docstring above. (optional)

      partial_verifying     See class docstring above. (optional)

      time
        An initial time to set the Secondary's "clock" to, conforming to
        tuf.formats.ISO8601_DATETIME_SCHEMA.

      firmware_fileinfo (optional)
        See class docstring above. As provided here, this is the initial
        value, which will be provided in ECU Manifests generated for the
        Director's consumption until the firmware is updated.


    <Exceptions>

      tuf.FormatError
        if the arguments are not correctly formatted

      uptane.Error
        if arguments partial_verifying and director_public_key are inconsistent
          (partial_verifying True requires a director_public_key, and
           partial_verifying False requires no director_public_key)
        if director_repo_name is not a known repository based on the
        map/pinning file (pinned.json)

    <Side Effects>
      None.
    """

        I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[Secondary.__init__()]: ' + uptane.ENDCOLORS
        #TODO: Print to be deleted
        print(
            str('%s %s' % (
                I_TO_PRINT,
                'This class contains the necessary code to perform Uptane validation of images and metadata, and core functionality supporting distribution of metadata and images to Secondary ECUs, combining ECU Manifests into a Vehicle Manifest and signing it, combining tokens for a Timeserver request, validating the response, etc.'
            )))
        #TODO: Until here

        # Check arguments:
        tuf.formats.PATH_SCHEMA.check_match(full_client_dir)
        tuf.formats.PATH_SCHEMA.check_match(director_repo_name)
        uptane.formats.VIN_SCHEMA.check_match(vin)
        uptane.formats.ECU_SERIAL_SCHEMA.check_match(ecu_serial)
        tuf.formats.ISO8601_DATETIME_SCHEMA.check_match(time)
        tuf.formats.ANYKEY_SCHEMA.check_match(timeserver_public_key)
        tuf.formats.ANYKEY_SCHEMA.check_match(ecu_key)
        if director_public_key is not None:
            tuf.formats.ANYKEY_SCHEMA.check_match(director_public_key)

        self.director_repo_name = director_repo_name
        self.ecu_key = ecu_key
        self.vin = vin
        self.ecu_serial = ecu_serial
        self.full_client_dir = full_client_dir
        self.director_proxy = None
        self.timeserver_public_key = timeserver_public_key
        self.director_public_key = director_public_key
        self.partial_verifying = partial_verifying
        self.firmware_fileinfo = firmware_fileinfo

        if not self.partial_verifying and self.director_public_key is not None:
            raise uptane.Error(
                'Secondary not set as partial verifying, but a director '  # TODO: Choose error class.
                'key was still provided. Full verification secondaries employ the '
                'normal TUF verifications rooted at root metadata files.')

        elif self.partial_verifying and self.director_public_key is None:
            raise uptane.Error(
                'Secondary set as partial verifying, but a director '
                'key was not provided. Partial verification Secondaries validate '
                'only the ')

        # Create a TAP-4-compliant updater object. This will read pinned.json
        # and create single-repository updaters within it to handle connections to
        # each repository.
        self.updater = tuf.client.updater.Updater('updater')

        if director_repo_name not in self.updater.pinned_metadata[
                'repositories']:
            raise uptane.Error(
                'Given name for the Director repository is not a '
                'known repository, according to the pinned metadata from pinned.json'
            )

        # We load the given time twice for simplicity in later code.
        self.all_valid_timeserver_times = [time, time]

        self.last_nonce_sent = None
        self.nonce_next = self._create_nonce()
        self.validated_targets_for_this_ecu = []
예제 #8
0
def get_metadata_for_ecu(ecu_serial, force_partial_verification=False):
    """
  Provides the current metadata a Secondary will need to validate updates.
  This takes two forms:
  - For Full Verification Secondaries (the norm):
      Send a zip archive of the most recent consistent set of the Primary's
      client metadata directory, containing the current, consistent metadata
      from all repositories used.
  - For Partial Verification Secondaries:
      Send the Director's Targets role file.
  <Arguments>
    ecu_serial
        the serial of the (Secondary) ECU for which to retrieve metadata
    force_partial_verification (optional: default False (Full))
        If True, provides the partial metadata (the Director's Targets role
        file), else provides the full metadata archive.
        Which metadata is provided (full vs partial) is entirely determined by
        force_partial_verification, which should be renamed to
        partial_verification, but is not yet because there are other branches
        to be merged that call these. # TODO: rename
        force_partial_verification.
  <Exceptions>
    uptane.Error if there is no metadata to distribute
  """

    I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[get_metadata_for_ecu()]: ' + uptane.ENDCOLORS
    #TODO: Print to be deleted
    print(
        str('%s %s %s' %
            (I_TO_PRINT, 'Getting metadata for ecu with ecu_serial:',
             ecu_serial)))
    #TODO: Until here

    # Ensure serial is correct format & registered
    primary_ecu._check_ecu_serial(ecu_serial)

    # The filename of the file to return.
    fname = None

    if force_partial_verification:
        fname = primary_ecu.get_partial_metadata_fname()

    else:
        # Note that in Python 2.7.4 and later, unzipping should prevent files from
        # being created outside of the target extraction directory. There are other
        # security concerns (such as zip bombs). The security of archive use in
        # your environment should be carefully considered.
        fname = primary_ecu.get_full_metadata_archive_fname()

    if not os.path.exists(fname):
        raise uptane.Error(
            'Primary has no metadata to distribute to Secondary "' +
            ecu_serial + '". Missing filename: "' + fname +
            '". Currently operating in ' +
            ('Partial' if force_partial_verification else 'Full') +
            ' Verification Mode')

    print('Distributing metadata file ' + fname + ' to ECU ' +
          repr(ecu_serial))

    binary_data = xmlrpc_client.Binary(open(fname, 'rb').read())

    #TODO: Print to be deleted
    print(str('%s %s' % (I_TO_PRINT, 'Returning binary_data')))
    #TODO: Until here

    return binary_data
예제 #9
0
def convert_signed_der_to_dersigned_json(der_data, datatype):
  """
  Convert the given der_data to a Python dictionary representation consistent
  with Uptane's typical JSON encoding.

  The 'signed' portion will be a JSON-compatible Python dict translation
  of der_data's 'signed' portion. Likewise for the 'signatures'
  portion. The result will be a dict containing a 'signatures' section that has
  signatures over not what is in the 'signed' section, but rather over a
  different format and encoding of what is in the 'signed' section. Please take
  care.

  <Arguments>
    der_data:
      # TODO: FILL IN

    datatype:
      String chosen from SUPPORTED_ASN1_METADATA_MODULES.
      Specifies the type of data provided in der_data, whether a Time
      Attestation, ECU Manifest, or Vehicle Manifest. This is used to determine
      the module to use for the conversion.

      If the metadata contained a metadata type indicator (the way that
      DER TUF metadata does), and if we could also capture this in an ASN.1
      specification that flexibly supports each possible metadata type (the
      way that the Metadata specification does in TUF ASN.1), then this would
      not be necessary....
      # TODO: Try to find some way to add the type to the metadata and cover
      # these requirements above.

  <Returns>
    A JSON-compatible Python dictionary representing the data from der_data,
    including signatures that are still over the DER data.

  <Exceptions>
    tuf.FormatError
      If der_data does not seem to be valid DER data (regardless of the type).

    uptane.Error
      If datatype is not a data type that Uptane supports converting into
      ASN.1/DER.

    uptane.ASN1DERDecodingError
      If der_data cannot be decoded as the given datatype (if pyasn1 raises an
      error in the decode process).
  """

  if not PYASN1_EXISTS:
    # This error message is provided in order to be helpful; behavior is not
    # prescribed when a dependency is missing, so this clause is not tested
    # (which would entail tests running after a separate installation with
    # missing dependencies), so this clause is not included in coverage
    # metrics.
    raise uptane.Error( # pragma: no cover
        'Request was made to load a DER file, but the required '
        'pyasn1 library failed to import.')

  uptane.formats.DER_DATA_SCHEMA.check_match(der_data)

  # Make sure it's a supported type of metadata for ASN.1 to Python dict
  # translation. (Throw an exception if not.)
  ensure_valid_metadata_type_for_asn1(datatype)


  # "_signed" here refers to the portion of the metadata that will be signed.
  # The metadata is divided into "signed" and "signature" portions. The
  # signatures are signatures over the "signed" portion. "json_signed" below
  # is actually not signed - it is simply the portion that will be put into
  # the "signed" section - the portion to be signed. The nomenclature is
  # unfortunate....
  # Note that decode() returns a tuple: (pyasn1_object, remaining_input)
  # We don't expect any remaining input (TODO: Consider testing it?) and
  # are only interested in the pyasn1 object decoded from the DER.

  # TODO: Determine type of metadata here first, so that you can choose the
  # correct class from asn1_spec.
  # This object will be used by the decoder for its structure, to determine
  # how to decode the DER object.
  # I can't seem to figure out why I need to do this this way.
  # Why can't I just use Metadata() by adding TokensAndTimestamp as an optional
  # component of SignedBody()? Anyway, this seems to work.......
  # Handle for the corresponding module.
  relevant_asn_module = SUPPORTED_ASN1_METADATA_MODULES[datatype]
  if datatype == DATATYPE_TIME_ATTESTATION:
    exemplar_object = asn1_spec.TokensAndTimestampSignable()
  elif datatype == DATATYPE_ECU_MANIFEST:
    exemplar_object = asn1_spec.ECUVersionManifest()
  elif datatype == DATATYPE_VEHICLE_MANIFEST:
    exemplar_object = asn1_spec.VehicleVersionManifest()

  # TODO: Determine if there are any other error types to add to the except
  # clause below to cover whatever errors we expect pyasn1 to raise when trying
  # to convert data. That error class covers ValueConstraintError and
  # SubstrateUnderrunError, but I'm not sure if pyasn1 wouldn't raise other
  # errors....
  try:
    asn_metadata = p_der_decoder.decode(der_data, asn1Spec=exemplar_object)[0]
  except pyasn1.error.PyAsn1Error as e:
    raise uptane.FailedToDecodeASN1DER('Unable to decode the provided '
        'der_data as datatype ' + repr(datatype) + '. The pyasn1-raised error '
        'follows: ' + repr(e))

  # asn_metadata here now has three components, indexed by integer 0, 1, 2.
  # 0 is the signed component (Signed())
  # 1 i the numberOfSignatures component (Length())
  # 2 is the signatures component (Signatures())

  asn_signed_metadata = asn_metadata[0]

  # TODO: The 'signed' component here should probably already be DER, since
  # that is what the signature is over. Because this would entail some changes
  # changes to the ASN.1 data specifications in metadataverificationmodule.py,
  # I'm not doing this yet (though I expect to).
  # So, for the time being, if we wanted to check the signature, we'd have to
  # encode this thing into DER again.
  # der_signed_metadata = p_der_encoder.encode(asn_signed)


  # Now we have to figure out what type of metadata the ASN.1 metadata is
  # so that we can use the appropriate spec to convert it back to JSON.

  # # (Even though this takes asn_metadata, it only uses asn_metadata[0],
  # # asn_signed_metadata....)
  # asn_type_data = asn_signed_metadata[0] # This is the RoleType info, a class.

  # # This is how we'd extract the name of the type from the enumeration that is
  # # in the class (namedValues), indexed by the underlying "value" of
  # # asn_type_data.
  # # We call lower() on it because I don't care about the casing, which has
  # # varied somewhat in TUF history, and I don't want casing to ruin this
  # # detection.
  # metadata_type = asn_type_data.namedValues[asn_type_data._value][0].lower()


  # Convert into the basic Python dict we use in the JSON encoding.
  json_signed = relevant_asn_module.get_json_signed(asn_metadata)

  # Extract the signatures from the ASN.1 representation.
  asn_signatures = asn_metadata[2]
  json_signatures = convert_signatures_to_json(asn_signatures)

  return {'signatures': json_signatures, 'signed': json_signed}
예제 #10
0
def convert_signed_metadata_to_der(signed_metadata, datatype,
    private_key=None, resign=False, only_signed=False):
  """
  Normal behavior ("resign" (re-sign) parameter being False) converts the
  basic Python dictionary format of signed_metadata provided into ASN.1 and
  encodes it as DER, returning the resulting DER encoding of the given metadata.

  "_signed" here refers to the portion of the metadata that will be signed.
  The metadata is divided into "signed" and "signature" portions. The
  signatures are signatures over the "signed" portion. "json_signed" below
  is actually not signed - it is simply the portion that will be put into
  the "signed" section - the portion to be signed. The nomenclature is
  unfortunate....
  TODO: Better variable and function naming.

  <Arguments>
    signed_metadata
      Metadata (time attestation or ecu manifest, for example), and signature(s)
      over it.
      A dictionary with keys 'signed' and 'signatures'.
      signed_metadata must conform to one of the following:
          SIGNABLE_TIMESERVER_ATTESTATION_SCHEMA
          SIGNABLE_VEHICLE_VERSION_MANIFEST_SCHEMA
          SIGNABLE_ECU_VERSION_MANIFEST_SCHEMA

      Each of the above also conforms to tuf.formats.SIGNABLE_SCHEMA.

    datatype:
      String chosen from SUPPORTED_ASN1_METADATA_MODULES.
      Specifies the type of data provided in der_data, whether a Time
      Attestation, ECU Manifest, or Vehicle Manifest. This is used to determine
      the module to use for the conversion.

    resign
      ("re-sign"). Normally False, resulting in the signatures in
      signed_metadata being formatted as ASN.1 and encoded as DER, but otherwise
      preserved (for example, they may still be signatures over JSON - the
      signature values themselves are unchanged).
      If resign is instead True, any signatures provided are
      discarded, and a new signature is generated. This new signature will be
      over the DER encoding of the data provided in signed_metadata['signed'].
      In other words, 'signed' will first be converted into ASN.1 and then
      encoded as DER, and a signature will be made using the given private_key,
      over that DER encoding.
      If the given signatures are already over DER encoding before reaching
      this point (as may happen in the current design), then you will not
      need this to be True.
      NOTE that if given a vehicle manifest and told to re-sign, this function
      will only re-sign the vehicle manifest itself - it will not try to re-sign
      every ECU Manifest contained in it. (Those would presumably be signed
      by other keys.)

    private_key
      This should be left out (None) unless resign is True, in which case
      private_key must conform to tuf.formats.ANYKEY_SCHEMA, containing a
      private key, specifically. It will be used to re-sign the metadata
      provided in signed_metadata['signed'].
      Such a key can be imported, for example, through the
      tuf.repository_tool.import_*_private_key() functions.

    only_signed
      Default False. If this is set to True, instead of returning the DER
      encoding of the full {'signed': {"abc..."}, 'signatures': [{"xyz..."}]}
      object, the DER encoding of only the 'signed' entry will be returned
      {"abc..."}.

  <Returns>
    By default (only_signed=False, resign=False), the returned value is the DER
    encoding of the full signed_metadata dictionary.

    If only_signed is True, the returned value is the DER encoding of only the
    'signed' entry in the signed_metadata dictionary.

    Otherwise, if resign is True, the returned value is the DER encoding of the
    full signed_metadata dictionary, but with the 'signatures' entry
    discarded and rebuilt anew with a new signature over the DER ENCODING of the
    'signed' entry in the signed_metadata dictionary.

  """
  # Make sure that if and only if the re-sign ('resign') parameter is True, a
  # private_key has been provided.
  tuf.formats.BOOLEAN_SCHEMA.check_match(resign)
  if resign != (private_key is not None):
    raise uptane.Error('Inconsistent arguments: a private key should be '
        'provided to convert_signed_json_to_signed_der if and only if the '
        'resign argument is True.')

  if only_signed and resign:
    raise uptane.Error('Inconsistent arguments: request to re-sign metadata '
        'in a new encoding and then throw those same new signatures away.')


  if private_key is not None:
    tuf.formats.ANYKEY_SCHEMA.check_match(private_key)
    # TODO: Note that this does not confirm that it is specifically a private key.
    # Consider checking that. (Best way is to have an additional SCHEMA in
    # tuf.formats and use that.)

  tuf.formats.SIGNABLE_SCHEMA.check_match(signed_metadata)
  uptane.formats.ANY_SIGNABLE_UPTANE_METADATA_SCHEMA.check_match(
      signed_metadata)

  json_signed = signed_metadata['signed']

  # # Force lowercase for metadata type because some TUF versions have been
  # # inconsistent in the casing of metadata types ('targets' vs 'Targets').
  # metadata_type = json_signed['_type'].lower()

  # Ensure that the type is one of the supported metadata types, for which
  # a module exists that translates it to and from an ASN.1 format.
  ensure_valid_metadata_type_for_asn1(datatype)

  # Handle for the corresponding module.
  relevant_asn_module = SUPPORTED_ASN1_METADATA_MODULES[datatype]

  asn_signed = relevant_asn_module.get_asn_signed(json_signed)

  if only_signed:
    # If the caller doesn't want any signatures included in the returned
    # DER object, then we need go no further and may encode what we already
    # have, which is the 'signed' component, the core metadata itself.
    der_signed = p_der_encoder.encode(asn_signed)
    return der_signed

  # Otherwise, we're to produce the full signable object (signed + signatures).
  # Either we will be retaining existing signatures or re-signing.


  if resign:

    # Encode the ASN.1 as DER first using pyasn1.
    # TODO: Determine if there are any other error types to add to the except
    # clause below to cover whatever errors we expect pyasn1 to raise when
    # trying to encode data. That error class covers ValueConstraintError and
    # SubstrateUnderrunError, but I'm not sure if pyasn1 wouldn't raise other
    # errors....
    try:
      der_signed = p_der_encoder.encode(asn_signed)
    except pyasn1.error.PyAsn1Error as e:
      raise uptane.FailedToEncodeASN1DER('Unable to encode the provided '
          'der_data as datatype ' + repr(datatype) + '. The pyasn1-raised '
          'error follows: ' + repr(e))


    # This hashing is redundant and temporary. Eventually, the hash will
    # consistently be performed in securesystemslib/keys.py in the
    # create_signature() function, so we shouldn't be taking a hash here.
    # For the time being, I do this so that it always uses a hash even for ed25519
    # and also so that the canonicalization that is currently called by
    # create_signature() doesn't choke on the DER I want to sign.
    hash_of_der = hashlib.sha256(der_signed).digest()

    # Now sign the metadata. (This signs a cryptographic hash of the metadata.)
    # The returned value is a basic Python dict writable into JSON.
    # This is a signature over the hash of the DER encoding.
    # Tell keys.create_signature that the data we're providing is not JSON so
    # that it doesn't try to canonicalize it (and wrap the hash in double
    # quotes).
    pydict_signatures = [tuf.keys.create_signature(private_key, hash_of_der)]

  else:
    pydict_signatures = signed_metadata['signatures']

  asn_signatures_list = convert_signatures_to_asn(pydict_signatures)


  # Now construct an ASN.1 representation of the signed/signatures-encapsulated
  # metadata, populating it.
  if datatype == DATATYPE_TIME_ATTESTATION:
    metadata = asn1_spec.TokensAndTimestampSignable()
  elif datatype == DATATYPE_ECU_MANIFEST:
    metadata = asn1_spec.ECUVersionManifest()
  elif datatype == DATATYPE_VEHICLE_MANIFEST:
    metadata = asn1_spec.VehicleVersionManifest()
  metadata['signed'] = asn_signed #considering using der_signed instead - requires changes
  metadata['signatures'] = asn_signatures_list # TODO: Support multiple sigs, or integrate with TUF.
  metadata['numberOfSignatures'] = len(asn_signatures_list)

  # Encode our new (py)ASN.1 object as DER (Distinguished Encoding Rules).
  return p_der_encoder.encode(metadata)
예제 #11
0
파일: secondary.py 프로젝트: eacain/uptane
    def __init__(self,
                 full_client_dir,
                 pinning_filename,
                 vin,
                 ecu_serial,
                 fname_root_from_mainrepo,
                 fname_root_from_directorrepo,
                 ecu_key,
                 time,
                 timeserver_public_key,
                 firmware_fileinfo=None,
                 director_public_key=None,
                 partial_verifying=False):

        # Check arguments:
        tuf.formats.PATH_SCHEMA.check_match(full_client_dir)
        tuf.formats.PATH_SCHEMA.check_match(pinning_filename)
        tuf.formats.PATH_SCHEMA.check_match(fname_root_from_mainrepo)
        tuf.formats.PATH_SCHEMA.check_match(fname_root_from_directorrepo)
        uptane.formats.VIN_SCHEMA.check_match(vin)
        uptane.formats.ECU_SERIAL_SCHEMA.check_match(ecu_serial)
        tuf.formats.ISO8601_DATETIME_SCHEMA.check_match(time)
        for key in [timeserver_public_key, director_public_key]:
            if key is not None:
                tuf.formats.ANYKEY_SCHEMA.check_match(key)

        self.ecu_key = ecu_key
        self.vin = vin
        self.ecu_serial = ecu_serial
        self.full_client_dir = full_client_dir
        self.director_proxy = None
        self.most_recent_timeserver_time = time
        self.previous_timeserver_time = time
        self.timeserver_public_key = timeserver_public_key
        self.director_public_key = director_public_key
        self.partial_verifying = partial_verifying
        self.attacks_detected = ''
        self.firmware_fileinfo = firmware_fileinfo
        self.most_recent_timeserver_time = time

        if not self.partial_verifying and self.director_public_key is not None:
            raise uptane.Error(
                'Secondary not set as partial verifying, but a director '  # TODO: Choose error class.
                'key was still provided. Full verification secondaries employ the '
                'normal TUF verifications rooted at root metadata files.')

        CLIENT_METADATA_DIR_MAINREPO_CURRENT = os.path.join(
            self.full_client_dir, 'metadata', 'mainrepo', 'current')
        CLIENT_METADATA_DIR_MAINREPO_PREVIOUS = os.path.join(
            self.full_client_dir, 'metadata', 'mainrepo', 'previous')
        CLIENT_METADATA_DIR_DIRECTOR_CURRENT = os.path.join(
            self.full_client_dir, 'metadata', 'director', 'current')
        CLIENT_METADATA_DIR_DIRECTOR_PREVIOUS = os.path.join(
            self.full_client_dir, 'metadata', 'director', 'previous')

        # Note that the hosts and ports for the repositories are drawn from
        # pinned.json now. The services (timeserver and the director's
        # submit-manifest service) are still addressed here, though, currently
        # by pulling the constants from their modules directly
        # e.g. timeserver.TIMESERVER_PORT and director.DIRECTOR_SERVER_PORT).
        # Note that despite the vague name, the latter is not the director
        # repository, but a service that receives manifests.

        # Set up the TUF client directories for the two repositories.
        if os.path.exists(self.full_client_dir):
            shutil.rmtree(self.full_client_dir)

        for d in [
                CLIENT_METADATA_DIR_MAINREPO_CURRENT,
                CLIENT_METADATA_DIR_MAINREPO_PREVIOUS,
                CLIENT_METADATA_DIR_DIRECTOR_CURRENT,
                CLIENT_METADATA_DIR_DIRECTOR_PREVIOUS
        ]:
            os.makedirs(d)

        # Get the root.json file from the mainrepo (would come with this client).
        shutil.copyfile(
            fname_root_from_mainrepo,
            os.path.join(CLIENT_METADATA_DIR_MAINREPO_CURRENT, 'root.json'))

        # Get the root.json file from the director repo (would come with this client).
        shutil.copyfile(
            fname_root_from_directorrepo,
            os.path.join(CLIENT_METADATA_DIR_DIRECTOR_CURRENT, 'root.json'))

        # Add a pinned.json to this client (softlink it from a saved copy).
        os.symlink(
            pinning_filename,
            os.path.join(self.full_client_dir, 'metadata', 'pinned.json'))

        # Configure tuf with the client's metadata directories (where it stores the
        # metadata it has collected from each repository, in subdirectories).
        tuf.conf.repository_directory = self.full_client_dir  # This setting should probably be called client_directory instead, post-TAP4.

        # Create a TAP-4-compliant updater object. This will read pinning.json
        # and create single-repository updaters within it to handle connections to
        # each repository.
        self.updater = tuf.client.updater.Updater('updater')

        self.nonce_sent = None
        self.nonce_next = self._create_nonce()
예제 #12
0
def verify_signature_over_metadata(key_dict,
                                   signature,
                                   data,
                                   datatype,
                                   metadata_format=tuf.conf.METADATA_FORMAT):
    """
  <Purpose>
    Determine whether the private key belonging to 'key_dict' produced
    'signature'. tuf.keys.verify_signature() will use the public key found in
    'key_dict', the 'method' and 'sig' objects contained in 'signature',
    and 'data' to complete the verification.

    Higher level function that wraps tuf.keys.verify_signature, and works
    specifically with Time Attestations, ECU Manifsts, and Vehicle Manifests
    that will be in JSON or ASN.1/DER format.

    Almost exactly identical to the function simultaneously added to TUF,
    tuf.sig.verify_signature_over_metadata(). Requires datatype.
    Must differ in Uptane simply because it is not possible to convert
    Uptane-specific metadata (Time Attestations, ECU Manifests, and Vehicle
    Manifests) to or from ASN.1/DER without knowing which of those three
    types of metadata you're dealign with, and this conversion is required for
    signing and verifying signatures.

    See tuf.keys.verify_signature for lower level details.

  <Arguments>
    key_dict:
      A dictionary containing the TUF keys and other identifying information.
      If 'key_dict' is an RSA key, it has the form:

      {'keytype': 'rsa',
       'keyid': 'f30a0870d026980100c0573bd557394f8c1bbd6...',
       'keyval': {'public': '-----BEGIN RSA PUBLIC KEY----- ...',
                  'private': '-----BEGIN RSA PRIVATE KEY----- ...'}}

      The public and private keys are strings in PEM format.

    signature:
      The signature dictionary produced by one of the key generation functions.
      'signature' has the form:

      {'keyid': 'f30a0870d026980100c0573bd557394f8c1bbd6...',
       'method': 'method',
       'sig': sig}.

      Conformant to 'tuf.formats.SIGNATURE_SCHEMA'.

    data:
      Data object over which the validity of the provided signature will be
      checked by verify_signature().

      Acceptable format depends somewhat on tuf.conf.METADATA_FORMAT, or, if
      the optional argument is provided, metadata_format.

      This will be converted into a bytes object and passed down to
      tuf.keys.verify_signature().

      In 'der' mode:
        'data' is expected to be a dictionary compliant with
        uptane.formats.ANY_SIGNABLE_UPTANE_METADATA_SCHEMA. ASN.1/DER
        conversion requires strictly defined formats.

      In 'json' mode:
        'data' can be any data that can be processed by
        tuf.formats.encode_canonical(data). This function is generally intended
        to verify signatures over Uptane metadata
        (uptane.formats.ANY_SIGNABLE_UPTANE_METADATA_SCHEMA), but can be used
        more broadly when in 'json' mode.

    metadata_format: (optional; default based on tuf.conf.METADATA_FORMAT)

      If 'json', treats data as a JSON-friendly Python dictionary to be turned
      into a canonical JSON string and then encoded as utf-8 before checking
      against the signature. When operating TUF with DER metadata but checking
      the signature on some piece of JSON for some reason, this should be
      manually set to 'json'. The purpose of this canonicalization is to
      produce repeatable signatures across different platforms and Python key
      dictionaries (avoiding things like different signatures over the same
      dictionary).

      If 'der', the data will be converted into ASN.1, encoded as DER,
      and hashed. The signature is then checked against that hash.

  <Exceptions>
    tuf.FormatError, raised if either 'key_dict' or 'signature' are improperly
    formatted.

    tuf.UnsupportedLibraryError, if an unsupported or unavailable library is
    detected.

    tuf.UnknownMethodError.  Raised if the signing method used by
    'signature' is not one supported.

    uptane.Error, if tuf.conf.METADATA_FORMAT is neither 'json' nor 'der'.

  <Side Effects>
    The cryptography library specified in 'tuf.conf' is called to do the actual
    verification. When in 'der' mode, argument data is converted into ASN.1/DER
    in order to verify it. (Argument object is unchanged.)

  <Returns>
    Boolean.  True if the signature is valid, False otherwise.
  """

    tuf.formats.ANYKEY_SCHEMA.check_match(key_dict)
    tuf.formats.SIGNATURE_SCHEMA.check_match(signature)
    # TODO: Check format of data, based on metadata_format.
    # TODO: Consider checking metadata_format redundantly. It's checked below.

    if metadata_format == 'json':
        data = tuf.formats.encode_canonical(data).encode('utf-8')

    elif metadata_format == 'der':

        # TODO: Have convert_signed_metadata_to_der take just the 'signed' element
        # so we don't have to do this silly wrapping in an empty signable.
        data = asn1_codec.convert_signed_metadata_to_der(
            {
                'signed': data,
                'signatures': []
            }, datatype, only_signed=True)
        data = hashlib.sha256(data).digest()

    else:  # pragma: no cover
        raise uptane.Error('Unsupported metadata format: ' +
                           repr(metadata_format) +
                           '; the supported formats are: "der" and "json".')

    return tuf.keys.verify_signature(key_dict, signature, data)
예제 #13
0
def sign_signable(signable,
                  keys_to_sign_with,
                  datatype,
                  metadata_format=tuf.conf.METADATA_FORMAT):
    """
  <Purpose>
    Signs the given signable (e.g. an ECU manifest) with all the given keys.

    Wraps sign_over_metadata such that multiple signatures can be generated,
    and places them all in the 'signatures' field of the given signable.

    Also does some additional argument validation.


  <Arguments>

    signable:
      An object with a 'signed' dictionary and a 'signatures' list:
      conforms to tuf.formats.SIGNABLE_SCHEMA
      This may already include signatures, in which case signatures are added.
      Signatures from the same key (that is, two signatures listing the same
      keyid) will never be produced with this function (whether because a
      key is provided twice in keys_to_sign_with, or because a key in
      keys_to_sign_with has already signed this signable).

    keys_to_sign_with:
      A list whose elements must conform to tuf.formats.ANYKEY_SCHEMA.

    datatype:
      The type of data signable['signed'] represents.
      Must be in uptane.encoding.asn1_codec.SUPPORTED_ASN1_METADATA_MODULES.
      Specifies the type of data provided in der_data, whether a Time
      Attestation, ECU Manifest, or Vehicle Manifest.

      'datatype' is used to determine the module to use for the conversion to
      ASN.1/DER, if the metadata format is 'der'. When 'der' is the metadata
      format, we need to convert to ASN.1/DER first, and conversion to
      ASN.1/DER varies by type. 'datatype' doesn't matter if signing is
      occuring over JSON.

      If the metadata contained a metadata type indicator (the way that
      DER TUF metadata does), and if we could also capture this in an ASN.1
      specification that flexibly supports each possible metadata type (the
      way that the Metadata specification does in TUF ASN.1), then this would
      not be necessary....
      # TODO: Try to find some way to add the type to the metadata and cover
      # these requirements above.

    metadata_format: (optional; default tuf.conf.METADATA_FORMAT)
      'json' or 'der'. Determines what the signature will be over.
      Should generally be left to the default except when testing different
      encodings or otherwise intentionally signing a different format.


  <Exceptions>
    tuf.FormatError if the provided key is not the correct format or lacks a
    private element.

    uptane.Error if the key type is not in the SUPPORTED_KEY_TYPES for Uptane
    or tuf.conf.METADATA_FORMAT is neither 'json' nor 'der'.

  <Side Effects>
    Adds a signature to the provided signable.

  <Returns>
    None. Note that the provided object, 'signable', is modified in place.


  """

    # The below was partially modeled after tuf.repository_lib.sign_metadata()

    for signing_key in keys_to_sign_with:

        tuf.formats.ANYKEY_SCHEMA.check_match(signing_key)

        # Populate a list of the keyids that have already signed, to prevent
        # duplicate signatures.
        keyids_that_already_signed = []
        for sig in signable['signatures']:
            if sig['keyid'] not in keyids_that_already_signed:
                keyids_that_already_signed.append(sig['keyid'])

        # If we already have a signature with this keyid, skip.
        if signing_key['keyid'] in keyids_that_already_signed:
            uptane.logger.debug('Skipping signing by key with keyid ' +
                                repr(signing_key['keyid']) +
                                ' because there is already a signature '
                                'using that keyid.')
            continue

        # If the given key was public, raise a FormatError.
        if 'private' not in signing_key['keyval']:
            raise tuf.FormatError(
                'One of the given keys lacks a private key value, '
                'and so cannot be used for signing: ' + repr(signing_key))

        # We should already be guaranteed to have a supported key type due to
        # the ANYKEY_SCHEMA.check_match call above. Defensive programming.
        if signing_key[
                'keytype'] not in SUPPORTED_KEY_TYPES:  # pragma: no cover
            raise uptane.Error('Unsupported key type: ' +
                               repr(signing_key['keytype']))

        # Else, all is well. Sign the signable with the given key, adding that
        # signature to the signatures list in the signable. Add the key used to the
        # list of keys that have already signed and continue to the next key.
        signable['signatures'].append(
            sign_over_metadata(signing_key,
                               signable['signed'],
                               datatype,
                               metadata_format=metadata_format))
        keyids_that_already_signed.append(signing_key['keyid'])

    uptane.formats.ANY_SIGNABLE_UPTANE_METADATA_SCHEMA.check_match(signable)
예제 #14
0
def sign_over_metadata(key_dict,
                       data,
                       datatype,
                       metadata_format=tuf.conf.METADATA_FORMAT):
    """
  <Purpose>
    Given a key and data, returns a signature over that data.

    Higher level function that wraps tuf.keys.create_signature, and works
    specifically with Time Attestations, ECU Manifsts, and Vehicle Manifests
    that will be in JSON or ASN.1/DER format.

    Almost exactly identical to the function simultaneously added to TUF,
    tuf.sig.sign_over_metadata(). Requires datatype, and operates on
    Uptane-specific metadata (see 'datatype' argument below)

    Must differ in Uptane simply because it is not possible to convert
    Uptane-specific metadata (Time Attestations, ECU Manifests, and Vehicle
    Manifests) to or from ASN.1/DER without knowing which of those three
    types of metadata you're dealign with, and this conversion is required for
    signing and verifying signatures.

    See tuf.keys.create_signature for lower level details.

  <Arguments>
    key_dict:
      A dictionary containing the TUF keys.  An example RSA key dict has the
      form:

      {'keytype': 'rsa',
       'keyid': 'f30a0870d026980100c0573bd557394f8c1bbd6...',
       'keyval': {'public': '-----BEGIN RSA PUBLIC KEY----- ...',
                  'private': '-----BEGIN RSA PRIVATE KEY----- ...'}}

      The public and private keys are strings in PEM format.

    data:
      Data object used by create_signature() to generate the signature.
      Acceptable format depends somewhat on tuf.conf.METADATA_FORMAT, or, if
      the optional argument is provided, metadata_format.

      This will be converted into a bytes object and passed down to
      tuf.keys.create_signature().

      In 'der' mode:
        'data' is expected to be a dictionary compliant with
        uptane.formats.ANY_UPTANE_METADATA_SCHEMA. ASN.1/DER
        conversion requires strictly defined formats.

      In 'json' mode:
        'data' can be any data that can be processed by
        tuf.formats.encode_canonical(data) can be signed. This function is
        generally intended to sign metadata (tuf.formats.ANYROLE_SCHEMA), but
        can be used more broadly.

    datatype:
      The type of data signable['signed'] represents.
      Must be in uptane.encoding.asn1_codec.SUPPORTED_ASN1_METADATA_MODULES.
      Specifies the type of data provided in der_data, whether a Time
      Attestation, ECU Manifest, or Vehicle Manifest.

      'datatype' is used to determine the module to use for the conversion to
      ASN.1/DER, if the metadata format is 'der'. When 'der' is the metadata
      format, we need to convert to ASN.1/DER first, and conversion to
      ASN.1/DER varies by type. 'datatype' doesn't matter if signing is
      occuring over JSON.

      If the metadata contained a metadata type indicator (the way that
      DER TUF metadata does), and if we could also capture this in an ASN.1
      specification that flexibly supports each possible metadata type (the
      way that the Metadata specification does in TUF ASN.1), then this would
      not be necessary....
      # TODO: Try to find some way to add the type to the metadata and cover
      # these requirements above.

    metadata_format: (optional; default based on tuf.conf.METADATA_FORMAT)

      If 'json', treats data as a JSON-friendly Python dictionary to be turned
      into a canonical JSON string and then encoded as utf-8 before signing.
      When operating TUF with DER metadata but checking the signature on some
      piece of JSON for some reason, this should be manually set to 'json'. The
      purpose of this canonicalization is to produce repeatable signatures
      across different platforms and Python key dictionaries (avoiding things
      like different signatures over the same dictionary).

      If 'der', the data will be converted into ASN.1, encoded as DER,
      and hashed. The signature is then checked against that hash.

  <Exceptions>
    tuf.FormatError, if 'key_dict' is improperly formatted.

    tuf.UnsupportedLibraryError, if an unsupported or unavailable cryptography
    library is chosen.

    uptane.Error, if the given metadata format is not 'json' or 'der' or if
    the given datatype is not one of the accepted Uptane data types for
    conversion (defined in constants asn1_codec.DATATYPE_*)

    TypeError, if 'key_dict' contains an invalid keytype.

  <Side Effects>
    The cryptography library specified in 'tuf.conf' is called to do the actual
    verification. When in 'der' mode, argument data is converted into ASN.1/DER
    in order to verify it. (Argument object is unchanged.)

  <Returns>
    A signature dictionary conformant to 'tuf.format.SIGNATURE_SCHEMA'. e.g.:
    {'keyid': 'f30a0870d026980100c0573bd557394f8c1bbd6...',
     'method': '...',
     'sig': '...'}.

  """

    tuf.formats.ANYKEY_SCHEMA.check_match(key_dict)

    if datatype not in asn1_codec.SUPPORTED_ASN1_METADATA_MODULES:
        raise uptane.Error('Datatype ' + repr(datatype) +
                           ' is not a supported '
                           'Uptane metadata type. The options are: ' +
                           repr(asn1_codec.SUPPORTED_ASN1_METADATA_MODULES))

    # TODO: Check format of data, based on metadata_format.
    # TODO: Consider checking metadata_format redundantly. It's checked below.

    if metadata_format == 'json':
        data = tuf.formats.encode_canonical(data).encode('utf-8')

    elif metadata_format == 'der':
        uptane.formats.ANY_UPTANE_METADATA_SCHEMA.check_match(data)

        data = asn1_codec.convert_signed_metadata_to_der(
            {
                'signed': data,
                'signatures': []
            }, datatype, only_signed=True)
        data = hashlib.sha256(data).digest()

    else:  # pragma: no cover
        raise uptane.Error('Unsupported metadata format: ' +
                           repr(metadata_format) +
                           '; the supported formats are: "der" and "json".')

    return tuf.keys.create_signature(key_dict, data)
예제 #15
0
import uptane

# Here, we can override the value of tuf.conf.METADATA_FORMAT for all tests we
# run. It can have values 'json' or 'der', and will change the TUF & Uptane
# configuration to cause TUF & Uptane to use this metadata format for the tests
# in this module (by setting tuf.conf.METADATA_FORMAT). When running these
# tests, it can be set by providing the argument 'json' or 'der' when calling
# this module:
# e.g.  python tests/runtests.py json
# or    python tests/runtests.py der
# Running this module without an argument will use the default format for
# Uptane, set in uptane/__init__.py to 'der'.
if len(sys.argv) > 2:
    raise uptane.Error(
        'More arguments provided to runtests than allowed. Only '
        '0 or 1 command line arguments are supported. If provided, the sole '
        'command line argument for this test module is the metadata format to be '
        'used, "json" or "der".')
elif len(sys.argv) == 2:
    if sys.argv[1] in ['json', 'der']:
        uptane.tuf.conf.METADATA_FORMAT = sys.argv[1]
        print('Metadata Format set to ' + repr(sys.argv[1]))
    else:
        raise uptane.Error('Command-line argument not understood. Only '
                           '"json" or "der" are allowed. Received: ' +
                           repr(sys.argv[1]))

suite = defaultTestLoader.discover(start_dir="tests")
result = TextTestRunner(verbosity=2).run(suite)
sys.exit(0 if result.wasSuccessful() else 1)
예제 #16
0
파일: demo_director.py 프로젝트: Joan95/TFM
def restore_repositories(vin=None):
  """
  <Purpose>
    Restore the last backup of each Director repository.

    Metadata is copied from '{repo_dir}/metadata.backup' to
    '{repo_dir}/metadata.staged' and '{repo_dir}/metadata'

  <Arguments>
    vin (optional)
      If not provided, all known vehicle repositories will be restored to their
      backed-up state. You may also provide a single VIN (string) indicating
      one vehicle to restore from backup.

  <Exceptions>
    uptane.Error if backup does not exist

  <Side Effecs>
    None.

  <Returns>
    None.
  """

  I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[restore_repositories(vin)]: ' + ENDCOLORS
  #TODO: Print to be deleted
  print(str('%s %s %s' % (I_TO_PRINT, 'Restoring repositories for vin:', vin)))
  #TODO: Until here

  if vin is None:
    repos_to_restore = director_service_instance.vehicle_repositories.keys()
  else:
    repos_to_restore = [vin]

  for vin in repos_to_restore:

    repo_dir = director_service_instance.vehicle_repositories[
        vin]._repository_directory

    # Copy the backup metadata to the metada.staged and live directories.  The
    # backup metadata should already exist if
    # sign_with_compromised_keys_attack() was called.

    if not os.path.exists(os.path.join(repo_dir, 'metadata.backup')):
      raise uptane.Error('Unable to restore backup of ' + repr(repo_dir) +
          '; no backup exists.')

    # Empty the existing (old) live metadata directory (relatively fast).
    print(LOG_PREFIX + 'Deleting ' + os.path.join(repo_dir, 'metadata.staged'))
    if os.path.exists(os.path.join(repo_dir, 'metadata.staged')):
      shutil.rmtree(os.path.join(repo_dir, 'metadata.staged'))

    # Atomically move the new metadata into place.
    print(LOG_PREFIX + 'Moving backup to ' +
        os.path.join(repo_dir, 'metadata.staged'))
    os.rename(os.path.join(repo_dir, 'metadata.backup'),
        os.path.join(repo_dir, 'metadata.staged'))

    # Re-load the repository from the restored metadata.stated directory.
    # (We're using a temp variable here, so we have to assign the new reference
    # to both the temp and the source variable.)
    print(LOG_PREFIX + 'Reloading repository from backup ' + repo_dir)
    director_service_instance.vehicle_repositories[vin] = rt.load_repository(
        repo_dir)

    # Load the new signing keys to write metadata. The root key is unchanged,
    # but must be reloaded because load_repository() was called.
    valid_root_private_key = demo.import_private_key('directorroot')
    director_service_instance.vehicle_repositories[vin].root.load_signing_key(
        valid_root_private_key)

    # Copy the staged metadata to a temp directory, which we'll move into place
    # atomically in a moment.
    shutil.copytree(os.path.join(repo_dir, 'metadata.staged'),
        os.path.join(repo_dir, 'metadata.livetemp'))

    # Empty the existing (old) live metadata directory (relatively fast).
    print(LOG_PREFIX + 'Deleting live hosted dir:' +
        os.path.join(repo_dir, 'metadata'))
    if os.path.exists(os.path.join(repo_dir, 'metadata')):
      shutil.rmtree(os.path.join(repo_dir, 'metadata'))

    # Atomically move the new metadata into place in the hosted directory.
    os.rename(os.path.join(repo_dir, 'metadata.livetemp'),
        os.path.join(repo_dir, 'metadata'))
    print(LOG_PREFIX + 'Repository ' + repo_dir + ' restored and hosted.')