示例#1
0
def load_json_string(data):
    """
  <Purpose>
    Deserialize 'data' (JSON string) to a Python object.

  <Arguments>
    data:
      A JSON string.
  
  <Exceptions>
    tuf.Error, if 'data' cannot be deserialized to a Python object.

  <Side Effects>
    None.

  <Returns>
    Deserialized object.  For example, a dictionary.
  """

    deserialized_object = None

    try:
        deserialized_object = json.loads(data)

    except TypeError as e:
        raise tuf.Error('Invalid JSON string. Error reads: {{' + repr(e) +
                        '}}. '
                        'Data provided: ' + repr(data))

    except ValueError:
        message = 'Cannot deserialize to a Python object: ' + repr(data)
        raise tuf.Error(message)

    else:
        return deserialized_object
示例#2
0
    def decompress_temp_file_object(self, compression):
        """
    <Purpose>
      To decompress a compressed temp file object.  Decompression is performed
      on a temp file object that is compressed, this occurs after downloading
      a compressed file.  For instance if a compressed version of some meta
      file in the repository is downloaded, the temp file containing the
      compressed meta file will be decompressed using this function.
      Note that after calling this method, write() can no longer be called.

                            meta.txt.gz
                               |...[download]
                        temporary_file (containing meta.txt.gz)
                        /             \
               temporary_file          _orig_file
          containing meta.txt          containing meta.txt.gz
          (decompressed data)

    <Arguments>
      compression:
        A string indicating the type of compression that was used to compress
        a file.  Only gzip is allowed.

    <Exceptions>
      tuf.FormatError: If 'compression' is improperly formatted.

      tuf.Error: If an invalid compression is given.

      tuf.DecompressionError: If the compression failed for any reason.

    <Side Effects>
      'self._orig_file' is used to store the original data of 'temporary_file'.

    <Return>
      None.

    """

        # Does 'compression' have the correct format?
        # Raise 'tuf.FormatError' if there is a mismatch.
        tuf.formats.NAME_SCHEMA.check_match(compression)

        if self._orig_file is not None:
            raise tuf.Error('Can only set compression on a TempFile once.')

        if compression != 'gzip':
            raise tuf.Error('Only gzip compression is supported.')

        self.seek(0)
        self._compression = compression
        self._orig_file = self.temporary_file

        try:
            self.temporary_file = gzip.GzipFile(fileobj=self.temporary_file,
                                                mode='rb')
        except Exception, exception:
            raise tuf.DecompressionError(exception)
示例#3
0
def push(config_filepath):
    """
  <Purpose>
    Perform a push/transfer of target files to a host.  The configuration file
    'config_filepath' provides the required settings needed by the transfer
    command.  In the case of an 'scp' configuration file, the configuration
    file would contain 'host', 'user', 'identity file', and 'remote directory'
    entries.
     
  <Arguments>
    config_filepath:
      The push configuration file (i.e., 'push.cfg').
      
  <Exceptions>
    tuf.FormatError, if any of the arguments are incorrectly formatted.

    tuf.Error, if there was an error while processing the push.

  <Side Effects>
    The 'config_filepath' file is read and its contents stored, the files
    in the targets directory (specified in the config file) are copied,
    and the copied targets transfered to a specified host.

  <Returns>
    None.
  
  """

    # Do the arguments have the correct format?
    # Raise 'tuf.FormatError' if there is a mismatch.
    tuf.formats.PATH_SCHEMA.check_match(config_filepath)

    # Is the path to the configuration file valid?
    if not os.path.isfile(config_filepath):
        message = 'The configuration file path is invalid.'
        raise tuf.Error(message)
    config_filepath = os.path.abspath(config_filepath)

    # Retrieve the push configuration settings required by the transfer
    # modules.  Raise ('tuf.FormatError', 'tuf.Error') if a valid
    # configuration file cannot be retrieved.
    config_dict = tuf.pushtools.pushtoolslib.read_config_file(
        config_filepath, 'push')

    # Extract the transfer module identified in the configuration file.
    transfer_module = config_dict['general']['transfer_module']

    # 'scp' is the only transfer module currently supported.  Perform
    # an scp-transfer of the targets located in the targets directory as
    # listed in the configuration file.
    if transfer_module == 'scp':
        tuf.pushtools.transfer.scp.transfer(config_dict)
    else:
        message = 'Cannot perform a transfer using ' + repr(transfer_module)
        raise tuf.Error(message)
示例#4
0
文件: util.py 项目: muri11as/tuf
def load_json_file(filepath):
    """
  <Purpose>
    Deserialize a JSON object from a file containing the object.

  <Arguments>
    data:
      Absolute path of JSON file.

  <Exceptions>
    tuf.FormatError: If 'filepath' is improperly formatted.

    tuf.Error: If 'filepath' could not be opened.

  <Side Effects>
    None.

  <Return>
    Deserialized object.  For example, a dictionary.

  """

    # Making sure that the format of 'filepath' is a path string.
    # tuf.FormatError is raised on incorrect format.
    tuf.formats.PATH_SCHEMA.check_match(filepath)

    try:
        fileobject = open(filepath)
    except IOError, err:
        raise tuf.Error(err)
示例#5
0
def set_console_log_level(log_level=_DEFAULT_CONSOLE_LOG_LEVEL):
    """
  <Purpose>
    Allow the default log level for console messages to be overridden.

  <Arguments>
    log_level:
      The log level to set for the console handler.
      'log_level' examples: logging.INFO; logging.CRITICAL.
      
  <Exceptions>
    tuf.Error, if the 'log.py' console handler has not been set yet with
    add_console_handler().

  <Side Effects>
    Overrides the logging level for the console handler.

  <Returns>
    None.

  """

    # Does 'log_level' have the correct format?
    # Raise 'tuf.FormatError' if there is a mismatch.
    tuf.formats.LOGLEVEL_SCHEMA.check_match(log_level)

    # Assign to the global console_handler object.
    global console_handler

    if console_handler is not None:
        console_handler.setLevel(log_level)
    else:
        message = 'The console handler has not been set with add_console_handler().'
        raise tuf.Error(message)
示例#6
0
文件: util.py 项目: Joan95/TFM
def load_file(filepath):
    """
  Loads the given DER or JSON file into TUF's standard Python dictionary
  format (return value conforms with tuf.formats.SIGNABLE_SCHEMA, with the
  value of 'signed' conforming to tuf.formats.ANYROLE_SCHEMA).

  A simple wrapper for load_der_file and load_json_file. Please see comments in
  those functions.
  """

    I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[load_file()]: ' + uptane.ENDCOLORS
    #TODO: Print to be deleted
    #print(str('%s %s %s' % (I_TO_PRINT, 'Loading file for filepath:', filepath)))
    #TODO: Until here

    #TODO: Print to be deleted
    #print(str('%s %s ' % (I_TO_PRINT, 'returning')))
    #TODO: Until here

    if filepath.endswith('.der'):
        return load_der_file(filepath)

    elif filepath.endswith('.json'):
        return load_json_file(filepath)

    else:
        raise tuf.Error(
            'The provided file does not have a supported extension: '
            '.der or .json. Filepath: ' + repr(filepath))
示例#7
0
文件: asn1_codec.py 项目: ngasoft/tuf
def _ensure_valid_metadata_type_for_asn1(metadata_type):
  if metadata_type not in SUPPORTED_ASN1_METADATA_MODULES:
    # TODO: Choose/make better exception class.
    raise tuf.Error('This is not one of the metadata types configured for '
        'translation from JSON to DER-encoded ASN1. Type of given metadata: ' +
        repr(metadata_type) + '; types accepted: ' +
        repr(list(SUPPORTED_ASN1_METADATA_MODULES)))
示例#8
0
def load_string(data):
    """
  Loads the given DER or JSON data into TUF's standard Python dictionary
  format (return value conforms with tuf.formats.SIGNABLE_SCHEMA, with the
  value of 'signed' conforming to tuf.formats.ANYROLE_SCHEMA).

  In DER mode, takes bytes (encoded ASN.1/DER data).
  In JSON mode, takes a string (already decoded)


  Here are the constraints leading to this unusual coding:
    - Keys are always loaded from JSON, not DER, by calling load_json_string
      directly.
    - DER can't be decoded into a string from bytes
    - It is preferable not to have DER vs JSON conditionals in every piece of
      code that loads metadata by calling load_string. (It is preferable for
      load_string to do it.)

  # TODO: Consider renaming this 'deserialize', since it may deal with 'strings'
    or 'bytes' (making the existing name misleading).

  A simple wrapper for load_der_string and load_json_string. Please see
  comments in those functions.
  """
    if tuf.conf.METADATA_FORMAT == 'der':
        return load_der_string(data)

    elif tuf.conf.METADATA_FORMAT == 'json':
        return load_json_string(data.decode('utf-8'))

    else:
        raise tuf.Error(
            'tuf.util.load_string() only supports DER or JSON, but '
            'tuf.conf.METADATA_FORMAT is set to neither. It is instead set to: '
            + repr(tuf.conf.METADATA_FORMAT))
示例#9
0
文件: keydb.py 项目: smith325/tuf
def add_rsakey(rsakey_dict, keyid=None):
    """
  <Purpose>
    Add 'rsakey_dict' to the key database while avoiding duplicates.
    If keyid is provided, verify it is the correct keyid for 'rsakey_dict'
    and raise an exception if it is not.
  
  <Arguments>
    rsakey_dict:
      A dictionary conformant to 'tuf.formats.RSAKEY_SCHEMA'.
      It has the form:
      {'keytype': 'rsa',
       'keyid': keyid,
       'keyval': {'public': '-----BEGIN RSA PUBLIC KEY----- ...',
                  'private': '-----BEGIN RSA PRIVATE KEY----- ...'}}
    
    keyid:
      An object conformant to 'KEYID_SCHEMA'.  It is used as an identifier
      for RSA keys.

  <Exceptions>
    tuf.FormatError, if 'rsakey_dict' or 'keyid' does not have the 
    correct format.

    tuf.Error, if 'keyid' does not match the keyid for 'rsakey_dict'.

    tuf.KeyAlreadyExistsError, if 'rsakey_dict' is found in the key database.

  <Side Effects>
    The keydb key database is modified.

  <Returns>
    None.

  """

    # Does 'rsakey_dict' have the correct format?
    # This check will ensure 'rsakey_dict' has the appropriate number of objects
    # and object types, and that all dict keys are properly named.
    # Raise 'tuf.FormatError if the check fails.
    tuf.formats.RSAKEY_SCHEMA.check_match(rsakey_dict)

    # Does 'keyid' have the correct format?
    if keyid is not None:
        # Raise 'tuf.FormatError' if the check fails.
        tuf.formats.KEYID_SCHEMA.check_match(keyid)

        # Check if the keyid found in 'rsakey_dict' matches 'keyid'.
        if keyid != rsakey_dict['keyid']:
            raise tuf.Error('Incorrect keyid ' + rsakey_dict['keyid'] +
                            ' expected ' + keyid)

    # Check if the keyid belonging to 'rsakey_dict' is not already
    # available in the key database before returning.
    keyid = rsakey_dict['keyid']
    if keyid in _keydb_dict:
        raise tuf.KeyAlreadyExistsError('Key: ' + keyid)

    _keydb_dict[keyid] = rsakey_dict
示例#10
0
文件: util.py 项目: muri11as/tuf
 def _default_temporary_directory(self, prefix):
     """__init__ helper."""
     try:
         self.temporary_file = tempfile.TemporaryFile(prefix=prefix)
     except OSError, err:
         logger.critical('Temp file in ' + temp_dir + 'failed: ' +
                         repr(err))
         raise tuf.Error(err)
示例#11
0
def _process_copied_push(pushpath, metadata_directory, targets_directory,
                         backup_directory):
    """
  <Purpose>
    Helper function for _process_new_push().
    
    This does the actual work of copying pushpath to a temp directory,
    checking the metadata and targets, and copying the files to the
    repository on success. The push is valid and successfully processed
    if no exception is raised.
  
  <Arguments>
    pushpath:
      The push directory currently being processed (i.e., the 'processing'
      directory on the developer's pushroot)
    
    metadata_directory:
      The directory where the repository's metadata files (e.g., 'targets.txt',
      'root.txt') are stored.

    targets_directory:
      The directory where the repository's target files are stored.

    backup_directory:
      The directory where the pushed directories are saved after a 
      successful 'receive'.

  <Exceptions>
    tuf.Error, if there is an error processing the push.
  
  <Side Effects>
    The repository is updated if the push is successful.

  <Returns>
    None.

  """

    # The push's timestamp directory name (e.g., '1348449811.39')
    pushname = os.path.basename(pushpath)

    # Copy the contents of pushpath to a temp directory. We don't want the
    # user modifying the files we work with.  The temp directory is only
    # accessible by the calling process.
    temporary_directory = tempfile.mkdtemp()
    push_temporary_directory = os.path.join(temporary_directory, 'push')
    shutil.copytree(pushpath, push_temporary_directory)

    # Read the 'root' metadata of the current repository.  'root.txt'
    # is needed to authorize the 'targets' metadata file.
    root_metadatapath = os.path.join(metadata_directory, 'root.txt')
    root_signable = tuf.util.load_json_file(root_metadatapath)

    # Ensure 'root_signable' is properly formatted.
    try:
        tuf.formats.check_signable_object_format(root_signable)
    except tuf.FormatError, e:
        raise tuf.Error('The repository contains an invalid "root.txt".')
示例#12
0
    def _default_temporary_directory(self, prefix):
        """__init__ helper."""
        try:
            self.temporary_file = tempfile.NamedTemporaryFile(prefix=prefix)

        except OSError as err:  # pragma: no cover
            logger.critical('Cannot create a system temporary directory: ' +
                            repr(err))
            raise tuf.Error(err)
示例#13
0
文件: util.py 项目: Joan95/TFM
def get_file_details(filepath, hash_algorithms=['sha256']):
    """
  <Purpose>
    To get file's length and hash information.  The hash is computed using the
    sha256 algorithm.  This function is used in the signerlib.py and updater.py
    modules.

  <Arguments>
    filepath:
      Absolute file path of a file.

    hash_algorithms:

  <Exceptions>
    tuf.FormatError: If hash of the file does not match HASHDICT_SCHEMA.

    tuf.Error: If 'filepath' does not exist.

  <Returns>
    A tuple (length, hashes) describing 'filepath'.
  """

    I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[get_file_details(filepath, hash_algorithms)]: ' + uptane.ENDCOLORS
    #TODO: Print to be deleted
    # print(str('%s %s %s %s %s' % (I_TO_PRINT, 'Returning file details for filepath:', filepath, 'hash_algorithms:', hash_algorithms)))
    #TODO: Until here

    # Making sure that the format of 'filepath' is a path string.
    # 'tuf.FormatError' is raised on incorrect format.
    tuf.formats.PATH_SCHEMA.check_match(filepath)
    tuf.formats.HASHALGORITHMS_SCHEMA.check_match(hash_algorithms)

    # The returned file hashes of 'filepath'.
    file_hashes = {}

    # Does the path exists?
    if not os.path.exists(filepath):
        raise tuf.Error('Path ' + repr(filepath) + ' doest not exist.')
    filepath = os.path.abspath(filepath)

    # Obtaining length of the file.
    file_length = os.path.getsize(filepath)

    # Obtaining hash of the file.
    for algorithm in hash_algorithms:
        digest_object = tuf.hash.digest_filename(filepath, algorithm)
        file_hashes.update({algorithm: digest_object.hexdigest()})

    # Performing a format check to ensure 'file_hash' corresponds HASHDICT_SCHEMA.
    # Raise 'tuf.FormatError' if there is a mismatch.
    tuf.formats.HASHDICT_SCHEMA.check_match(file_hashes)

    return file_length, file_hashes
示例#14
0
文件: util.py 项目: Joan95/TFM
def load_json_string(data):
    """
  <Purpose>
    Deserialize 'data' (JSON string) to a Python object.

  <Arguments>
    data:
      A JSON string.

  <Exceptions>
    tuf.Error, if 'data' cannot be deserialized to a Python object.

  <Side Effects>
    None.

  <Returns>
    Deserialized object.  For example, a dictionary.
  """

    I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[load_json_string(data)]: ' + uptane.ENDCOLORS
    #TODO: Print to be deleted
    # print(str('%s %s %s' % (I_TO_PRINT, 'Loading JSON string for data:', data)))
    #TODO: Until here

    deserialized_object = None

    try:
        deserialized_object = json.loads(data)

    except TypeError as e:
        raise tuf.Error('Invalid JSON string. Error reads: {{' + repr(e) +
                        '}}. '
                        'Data provided: ' + repr(data))

    except ValueError:
        message = 'Cannot deserialize to a Python object: ' + repr(data)
        raise tuf.Error(message)

    else:
        return deserialized_object
示例#15
0
def load_json_file(filepath):
    """
  <Purpose>
    Deserialize a JSON object from a file containing the object.

  <Arguments>
    filepath:
      Absolute path of JSON file.

  <Exceptions>
    tuf.FormatError: If 'filepath' is improperly formatted.

    tuf.Error: If 'filepath' cannot be deserialized to a Python object.

    IOError in case of runtime IO exceptions.

  <Side Effects>
    None.

  <Return>
    Deserialized object.  For example, a dictionary.
  """

    # Making sure that the format of 'filepath' is a path string.
    # tuf.FormatError is raised on incorrect format.
    tuf.formats.PATH_SCHEMA.check_match(filepath)

    deserialized_object = None

    # The file is mostly likely gzipped.
    if filepath.endswith('.gz'):
        logger.debug('gzip.open(' + str(filepath) + ')')
        fileobject = six.StringIO(gzip.open(filepath).read().decode('utf-8'))

    else:
        logger.debug('open(' + str(filepath) + ')')
        fileobject = open(filepath)

    try:
        deserialized_object = json.load(fileobject)

    except (ValueError, TypeError):
        raise tuf.Error('Cannot deserialize to a Python object: ' +
                        repr(filepath))

    else:
        fileobject.close()
        return deserialized_object

    finally:
        fileobject.close()
示例#16
0
文件: util.py 项目: Joan95/TFM
    def _default_temporary_directory(self, prefix):
        """__init__ helper."""

        ##    I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[TempFile._default_temporary_dictionary()]: ' + uptane.ENDCOLORS
        ##    #TODO: Print to be deleted
        ##    print(str('%s %s' % (I_TO_PRINT, 'Initializating default temporary directory')))
        ##    #TODO: Until here

        try:
            self.temporary_file = tempfile.NamedTemporaryFile(prefix=prefix)

        except OSError as err:  # pragma: no cover
            logger.critical('Cannot create a system temporary directory: ' +
                            repr(err))
            raise tuf.Error(err)
示例#17
0
文件: signerlib.py 项目: smith325/tuf
def get_metadata_file_info(filename):
    """
  <Purpose>
    Retrieve the file information for 'filename'.  The object returned
    conforms to 'tuf.formats.FILEINFO_SCHEMA'.  The information
    generated for 'filename' is stored in metadata files like 'targets.txt'.
    The fileinfo object returned has the form:
    fileinfo = {'length': 1024,
                'hashes': {'sha256': 1233dfba312, ...},
                'custom': {...}}

  <Arguments>
    filename:
      The metadata file whose file information is needed.

  <Exceptions>
    tuf.FormatError, if 'filename' is improperly formatted.

    tuf.Error, if 'filename' doesn't exist.

  <Side Effects>
    The file is opened and information about the file is generated,
    such as file size and its hash.

  <Returns>
    A dictionary conformant to 'tuf.formats.FILEINFO_SCHEMA'.  This
    dictionary contains the length, hashes, and custom data about
    the 'filename' metadata file.

  """

    # Does 'filename' have the correct format?
    # Raise 'tuf.FormatError' if there is a mismatch.
    tuf.formats.PATH_SCHEMA.check_match(filename)

    if not os.path.isfile(filename):
        message = repr(filename) + ' is not a file.'
        raise tuf.Error(message)

    # Note: 'filehashes' is a dictionary of the form
    # {'sha256': 1233dfba312, ...}.  'custom' is an optional
    # dictionary that a client might define to include additional
    # file information, such as the file's author, version/revision
    # numbers, etc.
    filesize, filehashes = tuf.util.get_file_details(filename)
    custom = None

    return tuf.formats.make_fileinfo(filesize, filehashes, custom)
示例#18
0
def verify(signable, role):
    """
  <Purpose> 
    Verify whether the authorized signatures of 'signable' meet the minimum
    required by 'role'.  Authorized signatures are those with valid keys
    associated with 'role'.  'signable' must conform to SIGNABLE_SCHEMA
    and 'role' must not equal 'None' or be less than zero.

  <Arguments>
    signable:
      A dictionary containing a list of signatures and a 'signed' identifier.
      signable = {'signed':, 'signatures': [{'keyid':, 'method':, 'sig':}]}

    role:
      TUF role (e.g., 'root', 'targets', 'release').

  <Exceptions>
    tuf.UnknownRoleError, if 'role' is not recognized.

    tuf.FormatError, if 'signable' is not formatted correctly.

    tuf.Error, if an invalid threshold is encountered.

  <Side Effects>
    tuf.sig.get_signature_status() called.  Any exceptions thrown by
    get_signature_status() will be caught here and re-raised.

  <Returns>
    Boolean.  True if the number of good signatures >= the role's threshold,
    False otherwise.

  """

    # Retrieve the signature status.  tuf.sig.get_signature_status() raises
    # tuf.UnknownRoleError
    # tuf.FormatError
    status = get_signature_status(signable, role)

    # Retrieve the role's threshold and the authorized keys of 'status'
    threshold = status['threshold']
    good_sigs = status['good_sigs']

    # Does 'status' have the required threshold of signatures?
    # First check for invalid threshold values before returning result.
    if threshold is None or threshold <= 0:
        raise tuf.Error("Invalid threshold: " + str(threshold))

    return len(good_sigs) >= threshold
示例#19
0
文件: util.py 项目: Joan95/TFM
def load_der_string(data):
    """
  <Purpose>
    Deserialize 'data' (DER string) to a Python object. This supports only
    what tuf.asn1_codec supports, which at the time of this writing is
    signed role metadata (timestamp, snapshot, root, or targets) converted into
    ASN.1 and then encoded as DER.

  <Arguments>
    data:
      A DER string, as would be output by e.g.
      asn1_codec.convert_signed_metadata_to_der()

  <Exceptions>
    tuf.Error, if 'data' cannot be deserialized to a Python object.

  <Side Effects>
    None.

  <Returns>
    A Python dictionary deserialized from the DER data provided, in TUF's
    standard format, conforming to tuf.formats.SIGNABLE_SCHEMA, where the
    'signed' entry matches tuf.formats.ANYROLE_SCHEMA (though conversion of the
    Mirrors role is not supported).

    The signatures contained in the returned dictionary (the 'signatures'
    entry), if any, will have been unchanged. If, for example, the signatures
    were over a DER object, they will remain that way, even though the 'signed'
    portion will no longer be in DER.
  """

    I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[load_der_string(data)]: ' + uptane.ENDCOLORS
    #TODO: Print to be deleted
    # print(str('%s %s %s' % (I_TO_PRINT, 'Loading DER string for data:', '?')))
    #TODO: Until here

    #TODO: Print to be deleted
    # print(str('%s %s ' % (I_TO_PRINT, 'returning')))
    #TODO: Until here

    try:
        return asn1_codec.convert_signed_der_to_dersigned_json(data)
    except Exception as e:
        raise tuf.Error(
            'An exception was encountered in an attempt to convert '
            'the given data from DER to a Python dictionary containing role '
            'metadata. The exception reads: ' + repr(e))
示例#20
0
文件: asn1_codec.py 项目: Joan95/TFM
def _ensure_valid_metadata_type_for_asn1(metadata_type):

    I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[_ensure_valid_metadata_type_for_asn1(metadata_type)]: ' + uptane.ENDCOLORS
    #TODO: Print to be deleted
    print(
        str('%s %s %s' %
            (I_TO_PRINT,
             'Ensuring valid metadata type for asn1. metadata_type:',
             metadata_type)))
    #TODO: Until here

    if metadata_type not in SUPPORTED_ASN1_METADATA_MODULES:
        # TODO: Choose/make better exception class.
        raise tuf.Error(
            'This is not one of the metadata types configured for '
            'translation from JSON to DER-encoded ASN1. Type of given metadata: '
            + repr(metadata_type) + '; types accepted: ' +
            repr(list(SUPPORTED_ASN1_METADATA_MODULES)))
示例#21
0
def load_file(filepath):
    """
  Loads the given DER or JSON file into TUF's standard Python dictionary
  format (return value conforms with tuf.formats.SIGNABLE_SCHEMA, with the
  value of 'signed' conforming to tuf.formats.ANYROLE_SCHEMA).

  A simple wrapper for load_der_file and load_json_file. Please see comments in
  those functions.
  """

    if filepath.endswith('.der'):
        return load_der_file(filepath)

    elif filepath.endswith('.json'):
        return load_json_file(filepath)

    else:
        raise tuf.Error(
            'The provided file does not have a supported extension: '
            '.der or .json. Filepath: ' + repr(filepath))
示例#22
0
文件: util.py 项目: muri11as/tuf
def get_file_details(filepath):
    """
  <Purpose>
    To get file's length and hash information.  The hash is computed using the
    sha256 algorithm.  This function is used in the signerlib.py and updater.py
    modules.

  <Arguments>
    filepath:
      Absolute file path of a file.

  <Exceptions>
    tuf.FormatError: If hash of the file does not match HASHDICT_SCHEMA.

    tuf.Error: If 'filepath' does not exist. 

  <Returns>
    A tuple (length, hashes) describing 'filepath'.

  """
    # Making sure that the format of 'filepath' is a path string.
    # 'tuf.FormatError' is raised on incorrect format.
    tuf.formats.PATH_SCHEMA.check_match(filepath)

    # Does the path exists?
    if not os.path.exists(filepath):
        raise tuf.Error('Path ' + repr(filepath) + ' doest not exist.')
    filepath = os.path.abspath(filepath)

    # Obtaining length of the file.
    file_length = os.path.getsize(filepath)

    # Obtaining hash of the file.
    digest_object = tuf.hash.digest_filename(filepath, algorithm='sha256')
    file_hash = {'sha256': digest_object.hexdigest()}

    # Performing a format check to ensure 'file_hash' corresponds HASHDICT_SCHEMA.
    # Raise 'tuf.FormatError' if there is a mismatch.
    tuf.formats.HASHDICT_SCHEMA.check_match(file_hash)

    return file_length, file_hash
示例#23
0
文件: formats.py 项目: smith325/tuf
    def make_metadata(version,
                      expiration_date,
                      filedict=None,
                      delegations=None):
        if filedict is None and delegations is None:
            raise tuf.Error(
                'We don\'t allow completely empty targets metadata.')

        result = {'_type': 'Targets'}
        result['version'] = version
        result['expires'] = expiration_date
        if filedict is not None:
            result['targets'] = filedict
        if delegations is not None:
            result['delegations'] = delegations

        # Is 'result' a Targets metadata file?
        # Raise 'tuf.FormatError' if not.
        TARGETS_SCHEMA.check_match(result)

        return result
示例#24
0
  def add_verification_key(self, key):
    """
      <Purpose>
        Function as a thin wrapper call for the project._targets call
        with the same name. This wrapper is only for usability purposes.

      <Arguments>
        key:
          The role key to be added, conformant to 'tuf.formats.ANYKEY_SCHEMA'.
          Adding a public key to a role means that its corresponding private
          key must generate and add its signture to the role. 

      <Exceptions>
        tuf.FormatError, if the 'key' argument is improperly formatted.

        tuf.Error, if the project already contains a key.

      <Side Effects>
        The role's entries in 'tuf.keydb.py' and 'tuf.roledb.py' are updated.

      <Returns>
        None
    """

    # Verify that this role does not already contain a key.  The parent project
    # role is restricted to one key.  Any of its delegated roles may have
    # more than one key.
    # TODO: Add condition check for the requirement stated above.
    if len(self.keys) > 0:
      raise tuf.Error("This project already contains a key.")

    try:
      super(Project, self).add_verification_key(key)
    
    except tuf.FormatError:
      raise
示例#25
0
def hex_from_octetstring(octetstring):
  """
  Convert a pyasn1 OctetString object into a hex string.
  Example return:   '4b394ae2'
  Raises Error() if an individual octet's supposed integer value is out of
  range (0 <= x <= 255).
  """
  octets = octetstring.asNumbers()
  hex_string = ''

  for x in octets:
    if x < 0 or x > 255:
      raise tuf.Error('Unable to generate hex string from OctetString: integer '
          'value of octet provided is not in range: ' + str(x))
    hex_string += '%.2x' % x

  # Make sure that the resulting value is a valid hex string.
  tuf.formats.HEX_SCHEMA.check_match(hex_string)
  if '\\x' in str(hex_string):
    print(hex_string)
    import pdb; pdb.set_trace()
    print()

  return hex_string
示例#26
0
文件: signerlib.py 项目: smith325/tuf
def check_directory(directory):
    """
  <Purpose>
    Ensure 'directory' is valid and it exists.  This is not a security check,
    but a way for the caller to determine the cause of an invalid directory
    provided by the user.  If the directory argument is valid, it is returned
    normalized and as an absolute path.

  <Arguments>
    directory:
      The directory to check.

  <Exceptions>
    tuf.Error, if 'directory' could not be validated.

    tuf.FormatError, if 'directory' is not properly formatted.

  <Side Effects>
    None.

  <Returns>
    The normalized absolutized path of 'directory'.

  """

    # Does 'directory' have the correct format?
    # Raise 'tuf.FormatError' if there is a mismatch.
    tuf.formats.PATH_SCHEMA.check_match(directory)

    # Check if the directory exists.
    if not os.path.isdir(directory):
        raise tuf.Error(repr(directory) + ' directory does not exist')

    directory = os.path.abspath(directory)

    return directory
示例#27
0
def transfer(scp_config_dict):
    """
  <Purpose>
    Create a local temporary directory with an added 'info' file used to
    communicate additional information to the repository. This directory
    will be transferred to the repository.
    
  <Arguments>
    scp_config_dict:
      The dict containing the options to use with the SCP command.

  <Exceptions>
    tuf.FormatError, if the arguments are improperly formatted.

    tuf.Error, if the transfer failed. 

  <Side Effects>
    Files specified in 'push.cfg' will be transfered to a host using
    'scp'.
  
  <Returns>
    None.
  
  """

    # Do the arguments have the correct format?
    # Raise 'tuf.FormatError' if there is a mismatch.
    tuf.formats.SCPCONFIG_SCHEMA.check_match(scp_config_dict)

    # Extract the required 'scp' entries.  If an entry contains
    # a path argument, Tilde Expansions or user home symbols
    # are converted.
    host = scp_config_dict['scp']['host']
    user = scp_config_dict['scp']['user']

    # The SCP command accepts an optional path to an SSH private key file.
    identity_file = scp_config_dict['scp']['identity_file']
    identity_file = os.path.expanduser(identity_file)

    # The directory on the host the target files will be pushed to.
    remote_directory = scp_config_dict['scp'].get('remote_directory', '.')
    remote_directory = os.path.expanduser(remote_directory)

    # The 'targets.txt' metadata file to be pushed to the host.
    metadata_path = scp_config_dict['general']['metadata_path']
    metadata_path = os.path.expanduser(metadata_path)

    # The local targets directory containing the target to be pushed.
    targets_directory = scp_config_dict['general']['targets_directory']
    targets_directory = os.path.expanduser(targets_directory)

    basecommand = ['scp']
    if identity_file:
        basecommand.extend(['-i', identity_file])

    # Build the destination.
    # Example: 'user@localhost:~/pushes/1273704893.55'
    timestamp = time.time()
    destination = ''
    if user:
        destination = destination + user + '@'
    destination = destination + host + ':' + remote_directory + '/' + str(
        timestamp)

    temporary_directory = tempfile.mkdtemp()
    try:
        # Make sure the temp directory is world-readable, as the permissions
        # get carried over in the scp'ing.
        os.chmod(temporary_directory, 0755)

        # Create a file that tells the repository the name of the targets
        # metadata file. For delegation, this will be the only way the
        # the repository knows the full role name.
        file_object = open(os.path.join(temporary_directory, 'info'), 'w')
        file_object.write('metadata=' + metadata_path + '\n')
        file_object.close()

        # Copy the targets metadata.
        basename = os.path.basename(metadata_path)
        shutil.copy(metadata_path, os.path.join(temporary_directory, basename))

        # Create a directory that all target files will be put in before
        # being transferred.
        temporary_targets_directory = os.path.join(temporary_directory,
                                                   'targets')

        # Copy all the targets into the correct directory structure.
        shutil.copytree(targets_directory, temporary_targets_directory)

        # This will create the 'timestamp' directory on the remote host.  The
        # 'timestamp' directory will contain the 'info' file, targets metadata,
        # and the targets directory being pushed.
        command = basecommand[:]
        # Add the recursive option, which will add the full contents of
        # 'temporary_directory'
        command.append('-r')
        command.append(temporary_directory)
        command.append(destination)
        # Example 'command':
        # ['scp', '-i', '/home/user/.ssh/id_dsa', '-r', '/tmp/tmpmxWxLS',
        #  'user@host:~/pushes/1348349228.4']
        print 'Running command: ' + ' '.join(command)

        # 'subprocess.CalledProcessError' raised on scp command failure.
        # Catch the exception and raise 'tuf.Error'.
        # For important security information on 'subprocess',
        # See http://docs.python.org/library/subprocess.html
        try:
            subprocess.check_call(command)
        except subprocess.CalledProcessError, e:
            message = 'scp.transfer failed.'
            raise tuf.Error(message)
    finally:
        shutil.rmtree(temporary_directory)
示例#28
0
def _generate_and_write_metadata(rolename, metadata_filename, write_partial,
                                 targets_directory, metadata_directory,
                                 filenames=None,
                                 prefix=''):
  """
    Non-public function that can generate and write the metadata of the
    specified 'rolename'.  It also increments version numbers if:
    
    1.  write_partial==True and the metadata is the first to be written.
              
    2.  write_partial=False (i.e., write()), the metadata was not loaded as
        partially written, and a write_partial is not needed.
  """

  metadata = None 
  
  # Retrieve the roleinfo of 'rolename' to extract the needed metadata
  # attributes, such as version number, expiration, etc.
  roleinfo = tuf.roledb.get_roleinfo(rolename) 

  metadata = generate_targets_metadata(targets_directory,
                                       roleinfo['paths'],
                                       roleinfo['version'],
                                       roleinfo['expires'],
                                       roleinfo['delegations'],
                                       False) 

  # Prepend the prefix to the project's filepath to avoid signature errors in
  # upstream.
  target_filepaths = metadata['targets'].items()
  for element in list(metadata['targets']):
    junk_path, relative_target = os.path.split(element)
    prefixed_path = os.path.join(prefix,relative_target)
    metadata['targets'][prefixed_path] = metadata['targets'][element]
    if prefix != '':
      del(metadata['targets'][element])

  signable = sign_metadata(metadata, roleinfo['signing_keyids'],
                           metadata_filename)

  # Check if the version number of 'rolename' may be automatically incremented,
  # depending on whether if partial metadata is loaded or if the metadata is
  # written with write() / write_partial(). 
  # Increment the version number if this is the first partial write.
  if write_partial:
    temp_signable = sign_metadata(metadata, [], metadata_filename)
    temp_signable['signatures'].extend(roleinfo['signatures'])
    status = tuf.sig.get_signature_status(temp_signable, rolename)
    if len(status['good_sigs']) == 0:
      metadata['version'] = metadata['version'] + 1
      signable = sign_metadata(metadata, roleinfo['signing_keyids'],
                               metadata_filename)
  
  # non-partial write()
  else:
    if tuf.sig.verify(signable, rolename): #and not roleinfo['partial_loaded']:
      metadata['version'] = metadata['version'] + 1
      signable = sign_metadata(metadata, roleinfo['signing_keyids'],
                               metadata_filename)

  # Write the metadata to file if contains a threshold of signatures. 
  signable['signatures'].extend(roleinfo['signatures']) 
  
  if tuf.sig.verify(signable, rolename) or write_partial:
    _remove_invalid_and_duplicate_signatures(signable)
    compressions = roleinfo['compressions']
    filename = write_metadata_file(signable, metadata_filename, compressions,
                                   False)
    
  # 'signable' contains an invalid threshold of signatures. 
  else:
    message = 'Not enough signatures for ' + repr(metadata_filename)
    raise tuf.Error(message, signable)

  return signable, filename 
示例#29
0
  pass

# Python <=2.4 does not have the hashlib module by default.
# Let's try importing hashlib and adding it to our supported list.
try:
  import hashlib
  _supported_libraries.append('hashlib')
except ImportError:
  logger.debug('Hashlib could not be imported.  '
              'Supported libraries: '+str(_SUPPORTED_LIB_LIST)) 
  pass

# Were we able to import any hash libraries?
if not _supported_libraries:
  # This is fatal, we'll have no way of generating hashes.
  raise tuf.Error('Unable to import a hash library from the '
                  'following supported list: '+str(_SUPPORTED_LIB_LIST)) 


_DEFAULT_HASH_ALGORITHM = 'sha256'
_DEFAULT_HASH_LIBRARY = 'hashlib'





def digest(algorithm=_DEFAULT_HASH_ALGORITHM, 
           hash_library=_DEFAULT_HASH_LIBRARY):
  """
  <Purpose>
    Provide the caller with the ability to create
    digest objects without having to worry about hash
示例#30
0
文件: asn1_codec.py 项目: Joan95/TFM
def convert_signed_der_to_dersigned_json(der_data):
    """
  Convert the given der_data to a Python dictionary representation consistent
  with TUF's typical JSON encoding.

  The 'signed' portion will be a JSON-style (essentially Python dict)
  translation of the der data's 'signed' portion. Likewise for the 'signatures'
  portion. The result will be a dict containing a 'signatures' section that has
  signatures over not what is in the 'signed' section, but rather over a
  different format and encoding of what is in the 'signed' section. Please take
  care.

  """

    I_TO_PRINT = TO_PRINT + uptane.YELLOW + '[convert_signed_der_to_dersigned_json(der_data)]: ' + uptane.ENDCOLORS
    #TODO: Print to be deleted
    print(
        str('%s %s %s' %
            (I_TO_PRINT, 'Converting signed der to dersigned json. der_data:',
             '?')))
    #TODO: Until here

    if not PYASN1_EXISTS:
        raise tuf.Error(
            'Request was made to load a DER file, but the required '
            'pyasn1 library failed to import.')

    # "_signed" here refers to the portion of the metadata that will be signed.
    # The metadata is divided into "signed" and "signature" portions. The
    # signatures are signatures over the "signed" portion. "json_signed" below
    # is actually not signed - it is simply the portion that will be put into
    # the "signed" section - the portion to be signed. The nomenclature is
    # unfortunate....
    # Note that decode() returns a tuple: (pyasn1_object, remaining_input)
    # We don't expect any remaining input (TODO: Consider testing it?) and
    # are only interested in the pyasn1 object decoded from the DER.
    asn_metadata = p_der_decoder.decode(
        der_data, asn1Spec=metadata_asn1_spec.Metadata())[0]

    # asn_metadata here now has three components, indexed by integer 0, 1, 2.
    # 0 is the signed component (Signed())
    # 1 i the numberOfSignatures component (Length())
    # 2 is the signatures component (Signatures())

    asn_signed_metadata = asn_metadata[0]

    # TODO: The 'signed' component here should probably already be DER, since
    # that is what the signature is over. Because this would entail some changes
    # changes to the ASN.1 data specifications in metadata_asn1_definitions.py,
    # I'm not doing this yet (though I expect to).
    # So, for the time being, if we wanted to check the signature, we'd have to
    # encode this thing into DER again.
    # der_signed_metadata = p_der_encoder.encode(asn_signed)

    # Now we have to figure out what type of metadata the ASN.1 metadata is
    # so that we can use the appropriate spec to convert it back to JSON.

    # (Even though this takes asn_metadata, it only uses asn_metadata[0],
    # asn_signed_metadata....)
    asn_type_data = asn_signed_metadata[
        0]  # This is the RoleType info, a class.

    # This is how we'd extract the name of the type from the enumeration that is
    # in the class (namedValues), indexed by the underlying "value" of
    # asn_type_data.
    # We call lower() on it because I don't care about the casing, which has
    # varied somewhat in TUF history, and I don't want casing to ruin this
    # detection.
    metadata_type = asn_type_data.namedValues[asn_type_data._value].lower()

    # Make sure it's a supported type of metadata for ASN.1 to Python dict
    # translation. (Throw an exception if not.)
    _ensure_valid_metadata_type_for_asn1(metadata_type)

    # Handle for the corresponding module.
    relevant_asn_module = SUPPORTED_ASN1_METADATA_MODULES[metadata_type]

    # Convert into the basic Python dict we use in the JSON encoding.
    json_signed = relevant_asn_module.get_json_signed(asn_metadata)

    # Extract the signatures from the ASN.1 representation.
    asn_signatures = asn_metadata[2]
    json_signatures = []

    for asn_signature in asn_signatures:
        json_signatures.append({
            'keyid':
            hex_from_octetstring(asn_signature['keyid']),
            # TODO: See if it's possible to tweak the definition of 'method' so
            # that str(method) returns what we want rather here than the enum, so
            # that we don't have to do make this weird enum translation call?
            'method':
            asn_signature['method'].namedValues[
                asn_signature['method']._value],
            'sig':
            hex_from_octetstring(asn_signature['value'])
        })

    #TODO: Print to be deleted
    print(
        str('%s %s ' % (
            I_TO_PRINT,
            'Returning dictionary with fields \'signatures\' and \'signed\'')))
    #TODO: Until here

    return {'signatures': json_signatures, 'signed': json_signed}