def load_json_string(data): """ <Purpose> Deserialize 'data' (JSON string) to a Python object. <Arguments> data: A JSON string. <Exceptions> ssl_crypto.Error, if 'data' cannot be deserialized to a Python object. <Side Effects> None. <Returns> Deserialized object. For example, a dictionary. """ deserialized_object = None try: deserialized_object = json.loads(data) except TypeError: message = 'Invalid JSON string: ' + repr(data) raise ssl_crypto.Error(message) except ValueError: message = 'Cannot deserialize to a Python object: ' + repr(data) raise ssl_crypto.Error(message) else: return deserialized_object
def set_console_log_level(log_level=_DEFAULT_CONSOLE_LOG_LEVEL): """ <Purpose> Allow the default log level for console messages to be overridden. If 'log_level' is not provided, log level defaults to 'logging.INFO'. <Arguments> log_level: The log level to set for the console handler. 'log_level' examples: logging.INFO; logging.CRITICAL. <Exceptions> ssl_crypto.Error, if the 'log.py' console handler has not been set yet with add_console_handler(). <Side Effects> Overrides the logging level for the console handler. <Returns> None. """ # Does 'log_level' have the correct format? # Raise 'ssl_crypto.FormatError' if there is a mismatch. ssl_crypto.formats.LOGLEVEL_SCHEMA.check_match(log_level) # Assign to the global console_handler object. global console_handler if console_handler is not None: console_handler.setLevel(log_level) else: message = 'The console handler has not been set with add_console_handler().' raise ssl_crypto.Error(message)
def add_key(key_dict, keyid=None): """ <Purpose> Add 'rsakey_dict' to the key database while avoiding duplicates. If keyid is provided, verify it is the correct keyid for 'rsakey_dict' and raise an exception if it is not. <Arguments> key_dict: A dictionary conformant to 'ssl_crypto.formats.ANYKEY_SCHEMA'. It has the form: {'keytype': 'rsa', 'keyid': keyid, 'keyval': {'public': '-----BEGIN RSA PUBLIC KEY----- ...', 'private': '-----BEGIN RSA PRIVATE KEY----- ...'}} keyid: An object conformant to 'KEYID_SCHEMA'. It is used as an identifier for RSA keys. <Exceptions> ssl_crypto.FormatError, if 'rsakey_dict' or 'keyid' does not have the correct format. ssl_crypto.Error, if 'keyid' does not match the keyid for 'rsakey_dict'. ssl_crypto.KeyAlreadyExistsError, if 'rsakey_dict' is found in the key database. <Side Effects> The keydb key database is modified. <Returns> None. """ # Does 'rsakey_dict' have the correct format? # This check will ensure 'rsakey_dict' has the appropriate number of objects # and object types, and that all dict keys are properly named. # Raise 'ssl_crypto.FormatError if the check fails. ssl_crypto.formats.ANYKEY_SCHEMA.check_match(key_dict) # Does 'keyid' have the correct format? if keyid is not None: # Raise 'ssl_crypto.FormatError' if the check fails. ssl_crypto.formats.KEYID_SCHEMA.check_match(keyid) # Check if the keyid found in 'key_dict' matches 'keyid'. if keyid != key_dict['keyid']: raise ssl_crypto.Error('Incorrect keyid ' + key_dict['keyid'] + ' expected ' + keyid) # Check if the keyid belonging to 'rsakey_dict' is not already # available in the key database before returning. keyid = key_dict['keyid'] if keyid in _keydb_dict: raise ssl_crypto.KeyAlreadyExistsError('Key: ' + keyid) _keydb_dict[keyid] = copy.deepcopy(key_dict)
def _default_temporary_directory(self, prefix): """__init__ helper.""" try: self.temporary_file = tempfile.NamedTemporaryFile(prefix=prefix) except OSError as err: # pragma: no cover logger.critical('Cannot create a system temporary directory: ' + repr(err)) raise ssl_crypto.Error(err)
def load_json_file(filepath): """ <Purpose> Deserialize a JSON object from a file containing the object. <Arguments> filepath: Absolute path of JSON file. <Exceptions> ssl_crypto.FormatError: If 'filepath' is improperly formatted. ssl_crypto.Error: If 'filepath' cannot be deserialized to a Python object. IOError in case of runtime IO exceptions. <Side Effects> None. <Return> Deserialized object. For example, a dictionary. """ # Making sure that the format of 'filepath' is a path string. # ssl_crypto.FormatError is raised on incorrect format. ssl_crypto.formats.PATH_SCHEMA.check_match(filepath) deserialized_object = None # The file is mostly likely gzipped. if filepath.endswith('.gz'): logger.debug('gzip.open(' + str(filepath) + ')') fileobject = six.StringIO(gzip.open(filepath).read().decode('utf-8')) else: logger.debug('open(' + str(filepath) + ')') fileobject = open(filepath) try: deserialized_object = json.load(fileobject) except (ValueError, TypeError) as e: message = 'Cannot deserialize to a Python object: ' + repr(filepath) raise ssl_crypto.Error(message) else: fileobject.close() return deserialized_object finally: fileobject.close()
def get_file_details(filepath, hash_algorithms=['sha256']): """ <Purpose> To get file's length and hash information. The hash is computed using the sha256 algorithm. This function is used in the signerlib.py and updater.py modules. <Arguments> filepath: Absolute file path of a file. hash_algorithms: <Exceptions> ssl_crypto.FormatError: If hash of the file does not match HASHDICT_SCHEMA. ssl_crypto.Error: If 'filepath' does not exist. <Returns> A tuple (length, hashes) describing 'filepath'. """ # Making sure that the format of 'filepath' is a path string. # 'ssl_crypto.FormatError' is raised on incorrect format. ssl_crypto.formats.PATH_SCHEMA.check_match(filepath) ssl_crypto.formats.HASHALGORITHMS_SCHEMA.check_match(hash_algorithms) # The returned file hashes of 'filepath'. file_hashes = {} # Does the path exists? if not os.path.exists(filepath): raise ssl_crypto.Error('Path ' + repr(filepath) + ' doest not exist.') filepath = os.path.abspath(filepath) # Obtaining length of the file. file_length = os.path.getsize(filepath) # Obtaining hash of the file. for algorithm in hash_algorithms: digest_object = ssl_crypto.hash.digest_filename(filepath, algorithm) file_hashes.update({algorithm: digest_object.hexdigest()}) # Performing a format check to ensure 'file_hash' corresponds HASHDICT_SCHEMA. # Raise 'ssl_crypto.FormatError' if there is a mismatch. ssl_crypto.formats.HASHDICT_SCHEMA.check_match(file_hashes) return file_length, file_hashes
def verify(signable, role): """ <Purpose> Verify whether the authorized signatures of 'signable' meet the minimum required by 'role'. Authorized signatures are those with valid keys associated with 'role'. 'signable' must conform to SIGNABLE_SCHEMA and 'role' must not equal 'None' or be less than zero. <Arguments> signable: A dictionary containing a list of signatures and a 'signed' identifier. signable = {'signed':, 'signatures': [{'keyid':, 'method':, 'sig':}]} role: TUF role (e.g., 'root', 'targets', 'snapshot'). <Exceptions> ssl_crypto.UnknownRoleError, if 'role' is not recognized. ssl_crypto.FormatError, if 'signable' is not formatted correctly. ssl_crypto.Error, if an invalid threshold is encountered. <Side Effects> ssl_crypto.sig.get_signature_status() called. Any exceptions thrown by get_signature_status() will be caught here and re-raised. <Returns> Boolean. True if the number of good signatures >= the role's threshold, False otherwise. """ # Retrieve the signature status. ssl_crypto.sig.get_signature_status() raises # ssl_crypto.UnknownRoleError # ssl_crypto.FormatError status = get_signature_status(signable, role) # Retrieve the role's threshold and the authorized keys of 'status' threshold = status['threshold'] good_sigs = status['good_sigs'] # Does 'status' have the required threshold of signatures? # First check for invalid threshold values before returning result. if threshold is None or threshold <= 0: raise ssl_crypto.Error("Invalid threshold: " + str(threshold)) return len(good_sigs) >= threshold
def make_metadata(version, expiration_date, filedict=None, delegations=None): if filedict is None and delegations is None: raise ssl_crypto.Error( 'We don\'t allow completely empty targets metadata.') result = {'_type': 'Targets'} result['version'] = version result['expires'] = expiration_date result['targets'] = {} if filedict is not None: result['targets'] = filedict if delegations is not None: result['delegations'] = delegations # Is 'result' a Targets metadata file? # Raise 'ssl_crypto.FormatError' if not. TARGETS_SCHEMA.check_match(result) return result
def add_verification_key(self, key): """ <Purpose> Function as a thin wrapper call for the project._targets call with the same name. This wrapper is only for usability purposes. <Arguments> key: The role key to be added, conformant to 'ssl_crypto.formats.ANYKEY_SCHEMA'. Adding a public key to a role means that its corresponding private key must generate and add its signture to the role. <Exceptions> ssl_crypto.FormatError, if the 'key' argument is improperly formatted. ssl_crypto.Error, if the project already contains a key. <Side Effects> The role's entries in 'ssl_crypto.keydb.py' and 'ssl_crypto.roledb.py' are updated. <Returns> None """ # Verify that this role does not already contain a key. The parent project # role is restricted to one key. Any of its delegated roles may have # more than one key. # TODO: Add condition check for the requirement stated above. if len(self.keys) > 0: raise ssl_crypto.Error("This project already contains a key.") try: super(Project, self).add_verification_key(key) except ssl_crypto.FormatError: raise
def decompress_temp_file_object(self, compression): """ <Purpose> To decompress a compressed temp file object. Decompression is performed on a temp file object that is compressed, this occurs after downloading a compressed file. For instance if a compressed version of some meta file in the repository is downloaded, the temp file containing the compressed meta file will be decompressed using this function. Note that after calling this method, write() can no longer be called. meta.json.gz |...[download] temporary_file (containing meta.json.gz) / \ temporary_file _orig_file containing meta.json containing meta.json.gz (decompressed data) <Arguments> compression: A string indicating the type of compression that was used to compress a file. Only gzip is allowed. <Exceptions> ssl_crypto.FormatError: If 'compression' is improperly formatted. ssl_crypto.Error: If an invalid compression is given. ssl_crypto.DecompressionError: If the compression failed for any reason. <Side Effects> 'self._orig_file' is used to store the original data of 'temporary_file'. <Return> None. """ # Does 'compression' have the correct format? # Raise 'ssl_crypto.FormatError' if there is a mismatch. ssl_crypto.formats.NAME_SCHEMA.check_match(compression) if self._orig_file is not None: raise ssl_crypto.Error( 'Can only set compression on a TempFile once.') if compression != 'gzip': raise ssl_crypto.Error('Only gzip compression is supported.') self.seek(0) self._compression = compression self._orig_file = self.temporary_file try: gzip_file_object = gzip.GzipFile(fileobj=self.temporary_file, mode='rb') uncompressed_content = gzip_file_object.read() self.temporary_file = tempfile.NamedTemporaryFile() self.temporary_file.write(uncompressed_content) self.flush() except Exception as exception: raise ssl_crypto.DecompressionError(exception)
def get_list_of_mirrors(file_type, file_path, mirrors_dict): """ <Purpose> Get a list of mirror urls from a mirrors dictionary, provided the type and the path of the file with respect to the base url. <Arguments> file_type: Type of data needed for download, must correspond to one of the strings in the list ['meta', 'target']. 'meta' for metadata file type or 'target' for target file type. It should correspond to NAME_SCHEMA format. file_path: A relative path to the file that corresponds to RELPATH_SCHEMA format. Ex: 'http://url_prefix/targets_path/file_path' mirrors_dict: A mirrors_dict object that corresponds to MIRRORDICT_SCHEMA, where keys are strings and values are MIRROR_SCHEMA. An example format of MIRROR_SCHEMA: {'url_prefix': 'http://localhost:8001', 'metadata_path': 'metadata/', 'targets_path': 'targets/', 'confined_target_dirs': ['targets/snapshot1/', ...], 'custom': {...}} The 'custom' field is optional. <Exceptions> ssl_crypto.Error, on unsupported 'file_type'. ssl_crypto.FormatError, on bad argument. <Return> List of mirror urls corresponding to the file_type and file_path. If no match is found, empty list is returned. """ # Checking if all the arguments have appropriate format. ssl_crypto.formats.RELPATH_SCHEMA.check_match(file_path) ssl_crypto.formats.MIRRORDICT_SCHEMA.check_match(mirrors_dict) ssl_crypto.formats.NAME_SCHEMA.check_match(file_type) # Verify 'file_type' is supported. if file_type not in _SUPPORTED_FILE_TYPES: message = 'Invalid file_type argument. '+ \ 'Supported file types: '+repr(_SUPPORTED_FILE_TYPES) raise ssl_crypto.Error(message) # Reference to 'ssl_crypto.util.file_in_confined_directories()' (improve readability). # This function checks whether a mirror should serve a file to the client. # A client may be confined to certain paths on a repository mirror # when fetching target files. This field may be set by the client when # the repository mirror is added to the 'ssl_crypto.client.updater.Updater' object. in_confined_directory = ssl_crypto.util.file_in_confined_directories list_of_mirrors = [] for mirror_name, mirror_info in six.iteritems(mirrors_dict): if file_type == 'meta': base = mirror_info['url_prefix'] + '/' + mirror_info[ 'metadata_path'] # 'file_type' == 'target'. 'file_type' should have been verified to contain # a supported string value above (either 'meta' or 'target'). else: targets_path = mirror_info['targets_path'] full_filepath = os.path.join(targets_path, file_path) if not in_confined_directory(full_filepath, mirror_info['confined_target_dirs']): continue base = mirror_info['url_prefix'] + '/' + mirror_info['targets_path'] # urllib.quote(string) replaces special characters in string using the %xx # escape. This is done to avoid parsing issues of the URL on the server # side. Do *NOT* pass URLs with Unicode characters without first encoding # the URL as UTF-8. We need a long-term solution with #61. # http://bugs.python.org/issue1712522 file_path = six.moves.urllib.parse.quote(file_path) url = base + '/' + file_path.lstrip(os.sep) list_of_mirrors.append(url) return list_of_mirrors
def _generate_and_write_metadata(rolename, metadata_filename, write_partial, targets_directory, metadata_directory, filenames=None, prefix=''): """ Non-public function that can generate and write the metadata of the specified 'rolename'. It also increments version numbers if: 1. write_partial==True and the metadata is the first to be written. 2. write_partial=False (i.e., write()), the metadata was not loaded as partially written, and a write_partial is not needed. """ metadata = None # Retrieve the roleinfo of 'rolename' to extract the needed metadata # attributes, such as version number, expiration, etc. roleinfo = ssl_crypto.roledb.get_roleinfo(rolename) metadata = generate_targets_metadata(targets_directory, roleinfo['paths'], roleinfo['version'], roleinfo['expires'], roleinfo['delegations'], False) # Prepend the prefix to the project's filepath to avoid signature errors in # upstream. target_filepaths = metadata['targets'].items() for element in list(metadata['targets']): junk_path, relative_target = os.path.split(element) prefixed_path = os.path.join(prefix, relative_target) metadata['targets'][prefixed_path] = metadata['targets'][element] if prefix != '': del (metadata['targets'][element]) signable = sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename) # Check if the version number of 'rolename' may be automatically incremented, # depending on whether if partial metadata is loaded or if the metadata is # written with write() / write_partial(). # Increment the version number if this is the first partial write. if write_partial: temp_signable = sign_metadata(metadata, [], metadata_filename) temp_signable['signatures'].extend(roleinfo['signatures']) status = ssl_crypto.sig.get_signature_status(temp_signable, rolename) if len(status['good_sigs']) == 0: metadata['version'] = metadata['version'] + 1 signable = sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename) # non-partial write() else: if ssl_crypto.sig.verify( signable, rolename): #and not roleinfo['partial_loaded']: metadata['version'] = metadata['version'] + 1 signable = sign_metadata(metadata, roleinfo['signing_keyids'], metadata_filename) # Write the metadata to file if contains a threshold of signatures. signable['signatures'].extend(roleinfo['signatures']) if ssl_crypto.sig.verify(signable, rolename) or write_partial: _remove_invalid_and_duplicate_signatures(signable) compressions = roleinfo['compressions'] filename = write_metadata_file(signable, metadata_filename, metadata['version'], compressions, False) # 'signable' contains an invalid threshold of signatures. else: message = 'Not enough signatures for ' + repr(metadata_filename) raise ssl_crypto.Error(message, signable) return signable, filename
# Python <=2.4 does not have the hashlib module by default. # Let's try importing hashlib and adding it to our supported list. try: import hashlib _supported_libraries.append('hashlib') except ImportError: # pragma: no cover logger.debug('Hashlib could not be imported. ' 'Supported libraries: ' + str(_SUPPORTED_LIB_LIST)) pass # Were we able to import any hash libraries? if not _supported_libraries: # pragma: no cover # This is fatal, we'll have no way of generating hashes. raise ssl_crypto.Error('Unable to import a hash library from the ' 'following supported list: ' + str(_SUPPORTED_LIB_LIST)) _DEFAULT_HASH_ALGORITHM = 'sha256' _DEFAULT_HASH_LIBRARY = 'hashlib' def digest(algorithm=_DEFAULT_HASH_ALGORITHM, hash_library=_DEFAULT_HASH_LIBRARY): """ <Purpose> Provide the caller with the ability to create digest objects without having to worry about hash library availability or which library to use. The caller also has the option of specifying which hash algorithm and/or library to use.
def add_role(rolename, roleinfo, require_parent=True): """ <Purpose> Add to the role database the 'roleinfo' associated with 'rolename'. <Arguments> rolename: An object representing the role's name, conformant to 'ROLENAME_SCHEMA' (e.g., 'root', 'snapshot', 'timestamp'). roleinfo: An object representing the role associated with 'rolename', conformant to ROLEDB_SCHEMA. 'roleinfo' has the form: {'keyids': ['34345df32093bd12...'], 'threshold': 1, 'signatures': ['ab23dfc32'] 'paths': ['path/to/target1', 'path/to/target2', ...], 'path_hash_prefixes': ['a324fcd...', ...], 'delegations': {'keys': } The 'paths', 'path_hash_prefixes', and 'delegations' dict keys are optional. The 'target' role has an additional 'paths' key. Its value is a list of strings representing the path of the target file(s). require_parent: A boolean indicating whether to check for a delegating role. add_role() will raise an exception if this parent role does not exist. <Exceptions> ssl_crypto.FormatError, if 'rolename' or 'roleinfo' does not have the correct object format. ssl_crypto.RoleAlreadyExistsError, if 'rolename' has already been added. ssl_crypto.InvalidNameError, if 'rolename' is improperly formatted. <Side Effects> The role database is modified. <Returns> None. """ # Does 'rolename' have the correct object format? # This check will ensure 'rolename' has the appropriate number of objects # and object types, and that all dict keys are properly named. ssl_crypto.formats.ROLENAME_SCHEMA.check_match(rolename) # Does 'roleinfo' have the correct object format? ssl_crypto.formats.ROLEDB_SCHEMA.check_match(roleinfo) # Does 'require_parent' have the correct format? ssl_crypto.formats.BOOLEAN_SCHEMA.check_match(require_parent) # Raises ssl_crypto.InvalidNameError. _validate_rolename(rolename) if rolename in _roledb_dict: raise ssl_crypto.RoleAlreadyExistsError('Role already exists: ' + rolename) # Make sure that the delegating role exists. This should be just a # sanity check and not a security measure. if require_parent and '/' in rolename: # Get parent role. 'a/b/c/d' --> 'a/b/c'. parent_role = '/'.join(rolename.split('/')[:-1]) if parent_role not in _roledb_dict: raise ssl_crypto.Error('Parent role does not exist: ' + parent_role) _roledb_dict[rolename] = copy.deepcopy(roleinfo)