def del_target(self, target): """ Delete a target (data) from the repository and update the local TUF metadata. The metadata isn't updated on the registry until you call :meth:`push_metadata`. Note that the registry doesn't support deletes yet so expect an error. :param target: The name you gave to the data when it was uploaded using :meth:`push_target`. :type target: str """ from tuf.repository_tool import Repository # read target manifest manifest_filename = path.join(self._master_targets_dir, target) with open(manifest_filename, 'rb') as f: manifest = f.read() manifest_dgst = hash_bytes(manifest) # remove target manifest remove(manifest_filename) # remove consistent snapshot links for target for f in Repository.get_filepaths_in_directory( self._master_targets_dir): _, basename = path.split(f) if basename[_skip_consistent_target_digest(basename):] == target: remove(f) # delete blobs manifest points to for dgst in self._dxf.get_alias(manifest=manifest.decode('utf-8')): self._dxf.del_blob(dgst) # delete manifest blob self._dxf.del_blob(manifest_dgst)
def del_target(self, target): """ Delete a target (data) from the repository and update the local TUF metadata. The metadata isn't updated on the registry until you call :meth:`push_metadata`. Note that the registry doesn't support deletes yet so expect an error. :param target: The name you gave to the data when it was uploaded using :meth:`push_target`. :type target: str """ from tuf.repository_tool import Repository # read target manifest manifest_filename = path.join(self._master_targets_dir, target) with open(manifest_filename, 'rb') as f: manifest = f.read() manifest_dgst = hash_bytes(manifest) # remove target manifest remove(manifest_filename) # remove consistent snapshot links for target for f in Repository.get_filepaths_in_directory(self._master_targets_dir): _, basename = path.split(f) if basename[_skip_consistent_target_digest(basename):] == target: remove(f) # delete blobs manifest points to for dgst in self._dxf.get_alias(manifest=manifest.decode('utf-8')): self._dxf.del_blob(dgst) # delete manifest blob self._dxf.del_blob(manifest_dgst)
def push_metadata(self, targets_key_password=None, snapshot_key_password=None, timestamp_key_password=None, progress=None): """ Upload local TUF metadata to the repository. The TUF metadata consists of a list of targets (which were uploaded by :meth:`push_target`), a snapshot of the state of the metadata (list of hashes), a timestamp and a list of public keys. This function signs the metadata except for the list of public keys, so you'll need to supply the password to the respective private keys. The list of public keys was signed (along with the rest of the metadata) with the root private key when you called :meth:`create_metadata` (or :meth:`reset_keys`). :param targets_key_password: Password to use for decrypting the TUF targets private key. You'll be prompted for one if you don't supply it. :type password: str :param snapshot_key_password: Password to use for decrypting the TUF snapshot private key. You'll be prompted for one if you don't supply it. :type password: str :param timestamp_key_password: Password to use for decrypting the TUF timestamp private key. You'll be prompted for one if you don't supply it. :type password: str :param progress: Optional function to call as the upload progresses. The function will be called with the hash of the content of the file currently being uploaded, the blob just read from the file and the total size of the file. :type progress: function(dgst, chunk, total) """ from tuf.repository_tool import load_repository, \ Repository, \ import_rsa_privatekey_from_file # Load repository object repository = load_repository(self._master_repo_dir) # pylint: disable=no-member # Update targets repository.targets.clear_targets() repository.targets.add_targets([ _strip_consistent_target_digest(f) for f in Repository.get_filepaths_in_directory(self._master_targets_dir) ]) # Update expirations repository.targets.expiration = datetime.now() + self._targets_lifetime repository.snapshot.expiration = datetime.now( ) + self._snapshot_lifetime repository.timestamp.expiration = datetime.now( ) + self._timestamp_lifetime # Load targets key if targets_key_password is None: print('importing targets key...') private_targets_key = import_rsa_privatekey_from_file( self._targets_key_file, targets_key_password) repository.targets.load_signing_key(private_targets_key) # Load snapshot key if snapshot_key_password is None: print('importing snapshot key...') private_snapshot_key = import_rsa_privatekey_from_file( self._snapshot_key_file, snapshot_key_password) repository.snapshot.load_signing_key(private_snapshot_key) # Load timestamp key if timestamp_key_password is None: print('importing timestamp key...') private_timestamp_key = import_rsa_privatekey_from_file( self._timestamp_key_file, timestamp_key_password) repository.timestamp.load_signing_key(private_timestamp_key) # Update metadata repository.write('targets', consistent_snapshot=True) repository.write('snapshot', consistent_snapshot=True) repository.write('timestamp', consistent_snapshot=True) # Upload root.json and timestamp.json without version prefix for f in ['root.json', 'timestamp.json']: dgst = self._dxf.push_blob(path.join(self._master_staged_dir, f), progress) self._dxf.set_alias(f, dgst) # Upload consistent snapshot versions of current metadata files... # first load timestamp.json with open(path.join(self._master_staged_dir, 'timestamp.json'), 'rb') as f: timestamp = json.loads(f.read().decode('utf-8')) # get timestamp prefix files = ['{}.timestamp.json'.format(timestamp['signed']['version'])] # get snapshot prefix snapshot_cs = '{}.snapshot.json'.format( timestamp['signed']['meta']['snapshot.json']['version']) files.append(snapshot_cs) # load prefixed snapshot.json with open(path.join(self._master_staged_dir, snapshot_cs), 'rb') as f: snapshot = json.loads(f.read().decode('utf-8')) # get targets and root prefixes files.append('{}.targets.json'.format( snapshot['signed']['meta']['targets.json']['version'])) files.append('{}.root.json'.format( snapshot['signed']['meta']['root.json']['version'])) # Upload metadata for f in files: dgst = self._dxf.push_blob(path.join(self._master_staged_dir, f), progress) self._dxf.set_alias(f, dgst)
def push_metadata(self, targets_key_password=None, snapshot_key_password=None, timestamp_key_password=None, progress=None): """ Upload local TUF metadata to the repository. The TUF metadata consists of a list of targets (which were uploaded by :meth:`push_target`), a snapshot of the state of the metadata (list of hashes), a timestamp and a list of public keys. This function signs the metadata except for the list of public keys, so you'll need to supply the password to the respective private keys. The list of public keys was signed (along with the rest of the metadata) with the root private key when you called :meth:`create_metadata` (or :meth:`reset_keys`). :param targets_key_password: Password to use for decrypting the TUF targets private key. You'll be prompted for one if you don't supply it. :type password: str :param snapshot_key_password: Password to use for decrypting the TUF snapshot private key. You'll be prompted for one if you don't supply it. :type password: str :param timestamp_key_password: Password to use for decrypting the TUF timestamp private key. You'll be prompted for one if you don't supply it. :type password: str :param progress: Optional function to call as the upload progresses. The function will be called with the hash of the content of the file currently being uploaded, the blob just read from the file and the total size of the file. :type progress: function(dgst, chunk, total) """ from tuf.repository_tool import load_repository, \ Repository, \ import_rsa_privatekey_from_file # Load repository object repository = load_repository(self._master_repo_dir) # pylint: disable=no-member # Update targets repository.targets.clear_targets() repository.targets.add_targets([ _strip_consistent_target_digest(f) for f in Repository.get_filepaths_in_directory(self._master_targets_dir)]) # Update expirations repository.targets.expiration = datetime.now() + self._targets_lifetime repository.snapshot.expiration = datetime.now() + self._snapshot_lifetime repository.timestamp.expiration = datetime.now() + self._timestamp_lifetime # Load targets key if targets_key_password is None: print('importing targets key...') private_targets_key = import_rsa_privatekey_from_file( self._targets_key_file, targets_key_password) repository.targets.load_signing_key(private_targets_key) # Load snapshot key if snapshot_key_password is None: print('importing snapshot key...') private_snapshot_key = import_rsa_privatekey_from_file( self._snapshot_key_file, snapshot_key_password) repository.snapshot.load_signing_key(private_snapshot_key) # Load timestamp key if timestamp_key_password is None: print('importing timestamp key...') private_timestamp_key = import_rsa_privatekey_from_file( self._timestamp_key_file, timestamp_key_password) repository.timestamp.load_signing_key(private_timestamp_key) # Update metadata repository.write(consistent_snapshot=True) # Upload root.json and timestamp.json without hash prefix for f in ['root.json', 'timestamp.json']: dgst = self._dxf.push_blob(path.join(self._master_staged_dir, f), progress) self._dxf.set_alias(f, dgst) # Upload consistent snapshot versions of current metadata files... # first load timestamp.json with open(path.join(self._master_staged_dir, 'timestamp.json'), 'rb') as f: timestamp_data = f.read() # hash of content is timestamp prefix timestamp_cs = hash_bytes(timestamp_data) + '.timestamp.json' files = [timestamp_cs] # parse timestamp data timestamp = json.loads(timestamp_data.decode('utf-8')) # get snapshot prefix snapshot_cs = timestamp['signed']['meta']['snapshot.json']['hashes']['sha256'] + '.snapshot.json' files.append(snapshot_cs) # load prefixed snapshot.json with open(path.join(self._master_staged_dir, snapshot_cs), 'rb') as f: snapshot_data = f.read() # parse snapshot data snapshot = json.loads(snapshot_data.decode('utf-8')) # get targets and root prefixes targets_cs = snapshot['signed']['meta']['targets.json']['hashes']['sha256'] + '.targets.json' files.append(targets_cs) root_cs = snapshot['signed']['meta']['root.json']['hashes']['sha256'] + '.root.json' files.append(root_cs) # Upload metadata for f in files: dgst = self._dxf.push_blob(path.join(self._master_staged_dir, f), progress)