def update(): # Set the local repository directory containing the metadata files. tuf.conf.repository_directory = 'C:\Users\Sarah\Documents\GitHub\TUFeverywhere\Project' repository_mirrors = {'mirror1': {'url_prefix': 'http://172.16.42.2:8001', 'metadata_path': 'metadata', 'targets_path': 'targets', 'confined_target_dirs': ['']}} # Create the Upater object using the updater name 'tuf-example' # and the repository mirrors defined above. updater = tuf.client.updater.Updater('tuf-example', repository_mirrors) # Set the local destination directory to save the target files. destination_directory = './targets' # Refresh the repository's top-level roles, store the target information for # all the targets tracked, and determine which of these targets have been # updated. updater.refresh() all_targets = updater.all_targets() updated_targets = updater.updated_targets(all_targets, destination_directory) # Download each of these updated targets and save them locally. for target in updated_targets: try: updater.download_target(target, destination_directory) except tuf.DownloadError, e: pass
def list_targets(self): """ Return the names of all the targets defined in the local copy of the TUF metadata. :returns: List of target names :rtype: list """ import tuf.client.updater updater = tuf.client.updater.Updater('repository', self._repository_mirrors) return [t['filepath'][1:] for t in updater.all_targets()]
def list_targets(self): """ Return the names of all the targets defined in the local copy of the TUF metadata. :returns: List of target names :rtype: list """ import tuf.client.updater updater = tuf.client.updater.Updater('updater', self._repository_mirrors) return [t['filepath'][1:] for t in updater.all_targets()]
def run(self): """ Check for updates """ if not self.mirrors: return while True: try: tuf.conf.repository_directory = os.path.join(self.bundle_path, 'repo') updater = tuf.client.updater.Updater('leap-updater', self.mirrors) updater.refresh() targets = updater.all_targets() updated_targets = updater.updated_targets(targets, self.source_path) if updated_targets: logger.info("There is updates needed. Start downloading " "updates.") for target in updated_targets: updater.download_target(target, self.dest_path) self._set_permissions(target) if os.path.isdir(self.dest_path): if os.path.isdir(self.update_path): shutil.rmtree(self.update_path) shutil.move(self.dest_path, self.update_path) filepath = sorted([f['filepath'] for f in updated_targets]) emit(catalog.UPDATER_NEW_UPDATES, ", ".join(filepath)) logger.info("Updates ready: %s" % (filepath,)) return except NotImplemented as e: logger.error("NotImplemented: %s" % (e,)) return except Exception as e: logger.error("An unexpected error has occurred while " "updating: %s" % (e,)) finally: time.sleep(self.delay)
def run(self): """ Check for updates periodically """ if not self.mirrors: return while True: try: tuf.conf.repository_directory = os.path.join( self.bundle_path, 'repo') updater = tuf.client.updater.Updater('leap-updater', self.mirrors) updater.refresh() targets = updater.all_targets() updated_targets = updater.updated_targets( targets, self.source_path) if updated_targets: print "There is updates needed. Start downloading updates." for target in updated_targets: updater.download_target(target, self.dest_path) self._set_permissions(target) if os.path.isdir(self.dest_path): if os.path.isdir(self.update_path): shutil.rmtree(self.update_path) shutil.move(self.dest_path, self.update_path) filepath = sorted([f['filepath'] for f in updated_targets]) signal(proto.UPDATER_NEW_UPDATES, content=", ".join(filepath)) print "Updates ready: ", filepath return except NotImplemented as e: print "NotImplemented: ", e return except Exception as e: print "ERROR:", e finally: time.sleep(self.delay)
def update_client(repository_mirror): """ <Purpose> Perform an update of the metadata and target files located at 'repository_mirror'. Target files are saved to the 'targets' directory in the current working directory. The current directory must already include a 'metadata' directory, which in turn must contain the 'current' and 'previous' directories. At a minimum, these two directories require the 'root.json' metadata file. <Arguments> repository_mirror: The URL to the repository mirror hosting the metadata and target files. E.g., 'http://localhost:8001' <Exceptions> tuf.RepositoryError, if 'repository_mirror' is improperly formatted. <Side Effects> Connects to a repository mirror and updates the metadata files and any target files. Obsolete targets are also removed locally. <Returns> None. """ # Does 'repository_mirror' have the correct format? try: tuf.formats.URL_SCHEMA.check_match(repository_mirror) except tuf.FormatError as e: message = 'The repository mirror supplied is invalid.' raise tuf.RepositoryError(message) # Set the local repository directory containing all of the metadata files. tuf.conf.repository_directory = '.' # Set the repository mirrors. This dictionary is needed by the Updater # class of updater.py. repository_mirrors = {'mirror': {'url_prefix': repository_mirror, 'metadata_path': 'metadata', 'targets_path': 'targets', 'confined_target_dirs': ['']}} # Create the repository object using the repository name 'repository' # and the repository mirrors defined above. updater = tuf.client.updater.Updater('repository', repository_mirrors) # The local destination directory to save the target files. destination_directory = './targets' # Refresh the repository's top-level roles, store the target information for # all the targets tracked, and determine which of these targets have been # updated. updater.refresh() all_targets = updater.all_targets() updated_targets = updater.updated_targets(all_targets, destination_directory) # Download each of these updated targets and save them locally. for target in updated_targets: try: updater.download_target(target, destination_directory) except tuf.DownloadError as e: pass # Remove any files from the destination directory that are no longer being # tracked. updater.remove_obsolete_targets(destination_directory)
'metadata_path': 'metadata', 'targets_path': 'targets', 'confined_target_dirs': ['']}} # Create the Upater object using the updater name 'tuf-example' # and the repository mirrors defined above. updater = tuf.client.updater.Updater('tuf-example', repository_mirrors) # Set the local destination directory to save the target files. destination_directory = './targets' # Refresh the repository's top-level roles, store the target information for # all the targets tracked, and determine which of these targets have been # updated. updater.refresh() all_targets = updater.all_targets() updated_targets = updater.updated_targets(all_targets, destination_directory) # Download each of these updated targets and save them locally. for target in updated_targets: try: updater.download_target(target, destination_directory) except tuf.DownloadError, e: pass # Remove any files from the destination directory that are no longer being # tracked. updater.remove_obsolete_targets(destination_directory) """
def pull_metadata(self, root_public_key=None, progress=None): """ Download TUF metadata from the repository. The metadata is checked for expiry and verified against the root public key for the repository. You only need to supply the root public key once, and you should obtain it from the person who uploaded the metadata. Target data is not downloaded - use :meth:`pull_target` for that. :param root_public_key: PEM-encoded root public key. Obtain this from the repository's owner, who generates the key using :meth:`DTufMaster.create_root_key` on the repository master. :type root_public_key: str :param progress: Optional function to call as the download progresses. The function will be called with the hash of the metadata currently being download, the blob just read from the repository and the total size of the metadata. :type progress: function(dgst, chunk, total) :returns: List of targets which have been updated since you last downloaded them (using :meth:`pull_target`). :rtype: list """ import tuf.keydb import tuf.roledb import tuf.client.updater import tuf.formats import tuf.sig from tuf.exceptions import BadSignatureError # pylint: disable=global-statement global _updater_progress _updater_progress = progress for d in ['current', 'previous']: try: makedirs(path.join(self._copy_repo_dir, 'metadata', d)) except OSError as exception: import errno if exception.errno != errno.EEXIST: raise # If root public key was passed, we shouldn't rely on the current # version but instead retrieve a new one and verify it using # the public key. if root_public_key: dgst = self._dxf.get_alias('root.json')[0] temp_file = securesystemslib.util.TempFile() try: it, size = self._dxf.pull_blob(dgst, size=True) _write_with_progress(it, dgst, size, temp_file, progress) metadata = temp_file.read() metadata_signable = json.loads(metadata.decode('utf-8')) tuf.formats.check_signable_object_format(metadata_signable) # pylint: disable=protected-access f = tuf.client.updater.Updater._ensure_not_expired f = getattr(f, '__func__', f) f(None, metadata_signable['signed'], 'root') # This metadata is claiming to be root.json # Get the keyid of the signature and use it to add the root # public key to the keydb. Thus when we verify the signature # the root public key will be used for verification. keyid = metadata_signable['signatures'][0]['keyid'] tuf.keydb.add_key( { 'keytype': 'rsa', 'scheme': metadata_signable['signed']['keys'][keyid]['scheme'], 'keyid': keyid, 'keyval': { 'public': root_public_key } }, keyid) tuf.roledb.add_role('root', { 'keyids': [keyid], 'threshold': 1 }) if not tuf.sig.verify(metadata_signable, 'root'): raise BadSignatureError('root') temp_file.move( path.join(self._copy_repo_dir, 'metadata', 'current', 'root.json')) except: temp_file.close_temp_file() raise updater = tuf.client.updater.Updater('repository', self._repository_mirrors) updater.refresh(False) targets = updater.all_targets() updated_targets = updater.updated_targets(targets, self._copy_targets_dir) if path.isdir(self._copy_targets_dir): targets = dict([(t['filepath'][1:], True) for t in targets]) for t in listdir(self._copy_targets_dir): if t not in targets: remove(path.join(self._copy_targets_dir, t)) return [t['filepath'][1:] for t in updated_targets]
'confined_target_dirs': [''] } } # Create the repository object using the repository name 'repository' # and the repository mirrors defined above. updater = tuf.client.updater.Updater('repository', repository_mirrors) # The local destination directory to save the target files. destination_directory = './targets' # Refresh the repository's top-level roles, store the target information for # all the targets tracked, and determine which of these targets have been # updated. updater.refresh() all_targets = updater.all_targets() updated_targets = updater.updated_targets(all_targets, destination_directory) # Download each of these updated targets and save them locally. for target in updated_targets: try: updater.download_target(target, destination_directory) except tuf.DownloadError, e: pass # Remove any files from the destination directory that are no longer being # tracked. updater.remove_obsolete_targets(destination_directory)
def pull_metadata(self, root_public_key=None, progress=None): """ Download TUF metadata from the repository. The metadata is checked for expiry and verified against the root public key for the repository. You only need to supply the root public key once, and you should obtain it from the person who uploaded the metadata. Target data is not downloaded - use :meth:`pull_target` for that. :param root_public_key: PEM-encoded root public key. Obtain this from the repository's owner, who generates the key using :meth:`DTufMaster.create_root_key` on the repository master. :type root_public_key: str :param progress: Optional function to call as the download progresses. The function will be called with the hash of the metadata currently being download, the blob just read from the repository and the total size of the metadata. :type progress: function(dgst, chunk, total) :returns: List of targets which have been updated since you last downloaded them (using :meth:`pull_target`). :rtype: list """ import tuf.keydb import tuf.roledb import tuf.client.updater import tuf.util import tuf.formats import tuf.sig from tuf import BadSignatureError # pylint: disable=global-statement global _updater_progress _updater_progress = progress for d in ['current', 'previous']: try: makedirs(path.join(self._copy_repo_dir, 'metadata', d)) except OSError as exception: import errno if exception.errno != errno.EEXIST: raise # If root public key was passed, we shouldn't rely on the current # version but instead retrieve a new one and verify it using # the public key. if root_public_key: dgst = self._dxf.get_alias('root.json')[0] temp_file = tuf.util.TempFile() try: it, size = self._dxf.pull_blob(dgst, size=True) _write_with_progress(it, dgst, size, temp_file, progress) metadata = temp_file.read() metadata_signable = json.loads(metadata.decode('utf-8')) tuf.formats.check_signable_object_format(metadata_signable) # pylint: disable=protected-access f = tuf.client.updater.Updater._ensure_not_expired f = getattr(f, '__func__', f) f(None, metadata_signable['signed'], 'root') # This metadata is claiming to be root.json # Get the keyid of the signature and use it to add the root # public key to the keydb. Thus when we verify the signature # the root public key will be used for verification. keyid = metadata_signable['signatures'][0]['keyid'] tuf.keydb.add_key({ 'keytype': 'rsa', 'keyid': keyid, 'keyval': {'public': root_public_key} }, keyid) tuf.roledb.add_role('root', { 'keyids': [keyid], 'threshold': 1 }) if not tuf.sig.verify(metadata_signable, 'root'): raise BadSignatureError('root') temp_file.move(path.join(self._copy_repo_dir, 'metadata', 'current', 'root.json')) except: temp_file.close_temp_file() raise updater = tuf.client.updater.Updater('updater', self._repository_mirrors) updater.refresh(False) targets = updater.all_targets() updated_targets = updater.updated_targets( targets, self._copy_targets_dir) if path.isdir(self._copy_targets_dir): # pylint: disable=redefined-variable-type targets = dict([(t['filepath'][1:], True) for t in targets]) for t in listdir(self._copy_targets_dir): if t not in targets: remove(path.join(self._copy_targets_dir, t)) return [t['filepath'][1:] for t in updated_targets]
def update_client(repository_mirror): """ <Purpose> Perform an update of the metadata and target files located at 'repository_mirror'. Target files are saved to the 'targets' directory in the current working directory. The current directory must already include a 'metadata' directory, which in turn must contain the 'current' and 'previous' directories. At a minimum, these two directories require the 'root.json' metadata file. <Arguments> repository_mirror: The URL to the repository mirror hosting the metadata and target files. E.g., 'http://localhost:8001' <Exceptions> tuf.RepositoryError, if 'repository_mirror' is improperly formatted. <Side Effects> Connects to a repository mirror and updates the metadata files and any target files. Obsolete targets are also removed locally. <Returns> None. """ # Does 'repository_mirror' have the correct format? try: tuf.formats.URL_SCHEMA.check_match(repository_mirror) except tuf.FormatError as e: message = 'The repository mirror supplied is invalid.' raise tuf.RepositoryError(message) # Set the local repository directory containing all of the metadata files. tuf.conf.repository_directory = '.' # Set the repository mirrors. This dictionary is needed by the Updater # class of updater.py. repository_mirrors = { 'mirror': { 'url_prefix': repository_mirror, 'metadata_path': 'repository', 'targets_path': 'repository/targets', 'confined_target_dirs': [''] } } # Create the repository object using the repository name 'repository' # and the repository mirrors defined above. updater = tuf.client.updater.Updater('repository', repository_mirrors) # The local destination directory to save the target files. destination_directory = './targets' # Refresh the repository's top-level roles, store the target information for # all the targets tracked, and determine which of these targets have been # updated. updater.refresh() all_targets = updater.all_targets() updated_targets = updater.updated_targets(all_targets, destination_directory) # Download each of these updated targets and save them locally. for target in updated_targets: try: updater.download_target(target, destination_directory) except tuf.DownloadError as e: pass # Remove any files from the destination directory that are no longer being # tracked. updater.remove_obsolete_targets(destination_directory)