Exemple #1
0
    def del_target(self, target):
        """
        Delete a target (data) from the repository and update the local TUF
        metadata.

        The metadata isn't updated on the registry until you call
        :meth:`push_metadata`.

        Note that the registry doesn't support deletes yet so expect an error.

        :param target: The name you gave to the data when it was uploaded using :meth:`push_target`.
        :type target: str
        """
        from tuf.repository_tool import Repository
        # read target manifest
        manifest_filename = path.join(self._master_targets_dir, target)
        with open(manifest_filename, 'rb') as f:
            manifest = f.read()
        manifest_dgst = hash_bytes(manifest)
        # remove target manifest
        remove(manifest_filename)
        # remove consistent snapshot links for target
        for f in Repository.get_filepaths_in_directory(
                self._master_targets_dir):
            _, basename = path.split(f)
            if basename[_skip_consistent_target_digest(basename):] == target:
                remove(f)
        # delete blobs manifest points to
        for dgst in self._dxf.get_alias(manifest=manifest.decode('utf-8')):
            self._dxf.del_blob(dgst)
        # delete manifest blob
        self._dxf.del_blob(manifest_dgst)
Exemple #2
0
    def del_target(self, target):
        """
        Delete a target (data) from the repository and update the local TUF
        metadata.

        The metadata isn't updated on the registry until you call
        :meth:`push_metadata`.

        Note that the registry doesn't support deletes yet so expect an error.

        :param target: The name you gave to the data when it was uploaded using :meth:`push_target`.
        :type target: str
        """
        from tuf.repository_tool import Repository
        # read target manifest
        manifest_filename = path.join(self._master_targets_dir, target)
        with open(manifest_filename, 'rb') as f:
            manifest = f.read()
        manifest_dgst = hash_bytes(manifest)
        # remove target manifest
        remove(manifest_filename)
        # remove consistent snapshot links for target
        for f in Repository.get_filepaths_in_directory(self._master_targets_dir):
            _, basename = path.split(f)
            if basename[_skip_consistent_target_digest(basename):] == target:
                remove(f)
        # delete blobs manifest points to
        for dgst in self._dxf.get_alias(manifest=manifest.decode('utf-8')):
            self._dxf.del_blob(dgst)
        # delete manifest blob
        self._dxf.del_blob(manifest_dgst)
Exemple #3
0
  def update(self, src_dir, push_alias):
    """
    Packs and uploads the contents of src_dir as a layer and injects the layer into the image.
    The new layer version is stored under the tag push_alias
    """
    if not self.fat_manifest.is_cvmfs_prepared():
      print("Preparing image for CVMFS injection...")
      self.setup(push_alias)
    with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
      print("Bundling file into tar...")
      _, error = exec_bash("tar --xattrs -C "+src_dir+" -cvf "+tmp_file.name+" .")
      if error:
        raise RuntimeError("Failed to tar with error " + str(error))
      tar_digest = hash_file(tmp_file.name)
      print("Bundling tar into gz...")
      gz_dest = tmp_file.name+".gz"
      _, error = exec_bash("gzip "+tmp_file.name)
      if error:
        raise RuntimeError("Failed to tar with error " + str(error))
      print("Uploading...")
      gz_digest = self.dxfObject.push_blob(gz_dest)
      os.unlink(gz_dest)
    print("Refreshing manifests...")
    old_gz_digest = self.fat_manifest.get_gz_digest()
    layer_size = self.dxfObject.blob_size(gz_digest)
    self.fat_manifest.inject(tar_digest, gz_digest)
    fat_man_json = self.fat_manifest.as_JSON()
    manifest_digest = hash_bytes(bytes(fat_man_json, 'utf-8'))
    self.dxfObject.push_blob(data=fat_man_json, digest=manifest_digest)
    manifest_size = self.dxfObject.blob_size(manifest_digest)

    self.image_manifest.inject(old_gz_digest, gz_digest, layer_size, manifest_digest, manifest_size)
    
    image_man_json = self.image_manifest.as_JSON()
    self.dxfObject.set_manifest(push_alias, image_man_json)
Exemple #4
0
  def setup(self, push_alias):
    """
    Sets an image up for layer injection
    """
    tar_digest, gz_digest = self._build_init_tar()
    layer_size = self.dxfObject.blob_size(gz_digest)
    self.fat_manifest.init_cvmfs_layer(tar_digest, gz_digest)
    fat_man_json = self.fat_manifest.as_JSON()
    manifest_digest = hash_bytes(bytes(fat_man_json, 'utf-8'))
    self.dxfObject.push_blob(data=fat_man_json, digest=manifest_digest)
    manifest_size = self.dxfObject.blob_size(manifest_digest)
    self.image_manifest.init_cvmfs_layer(gz_digest, layer_size, manifest_digest, manifest_size)

    image_man_json = self.image_manifest.as_JSON()
    self.dxfObject.set_manifest(push_alias, image_man_json)
Exemple #5
0
    def setup(self, push_alias):
        """
    Sets an image up for layer injection
    """
        tar_digest, gz_digest = self._build_init_tar()
        layer_size = self.dxfObject.blob_size(gz_digest)
        self.fat_manifest.init_cvmfs_layer(tar_digest, gz_digest)
        fat_man_json = self.fat_manifest.as_JSON()
        manifest_digest = hash_bytes(bytes(fat_man_json, 'utf-8'))
        self.dxfObject.push_blob(data=fat_man_json, digest=manifest_digest)
        manifest_size = self.dxfObject.blob_size(manifest_digest)
        self.image_manifest.init_cvmfs_layer(gz_digest, layer_size,
                                             manifest_digest, manifest_size)

        image_man_json = self.image_manifest.as_JSON()
        self.dxfObject.set_manifest(push_alias, image_man_json)
Exemple #6
0
    def update(self, src_dir, push_alias):
        """
    Packs and uploads the contents of src_dir as a layer and injects the layer into the image.
    The new layer version is stored under the tag push_alias
    """
        if not self.fat_manifest.is_cvmfs_prepared():
            print("Preparing image for CVMFS injection...")
            self.setup(push_alias)
        with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
            print("Bundling file into tar...")
            _, error = exec_bash("tar --xattrs -C " + src_dir + " -cvf " +
                                 tmp_file.name + " .")
            if error:
                raise RuntimeError("Failed to tar with error " + str(error))
            tar_digest = hash_file(tmp_file.name)
            print("Bundling tar into gz...")
            gz_dest = tmp_file.name + ".gz"
            _, error = exec_bash("gzip " + tmp_file.name)
            if error:
                raise RuntimeError("Failed to tar with error " + str(error))
            print("Uploading...")
            gz_digest = self.dxfObject.push_blob(gz_dest)
            os.unlink(gz_dest)
        print("Refreshing manifests...")
        old_gz_digest = self.fat_manifest.get_gz_digest()
        layer_size = self.dxfObject.blob_size(gz_digest)
        self.fat_manifest.inject(tar_digest, gz_digest)
        fat_man_json = self.fat_manifest.as_JSON()
        manifest_digest = hash_bytes(bytes(fat_man_json, 'utf-8'))
        self.dxfObject.push_blob(data=fat_man_json, digest=manifest_digest)
        manifest_size = self.dxfObject.blob_size(manifest_digest)

        self.image_manifest.inject(old_gz_digest, gz_digest, layer_size,
                                   manifest_digest, manifest_size)

        image_man_json = self.image_manifest.as_JSON()
        self.dxfObject.set_manifest(push_alias, image_man_json)
Exemple #7
0
    def push_metadata(self,
                      targets_key_password=None,
                      snapshot_key_password=None,
                      timestamp_key_password=None,
                      progress=None):
        """
        Upload local TUF metadata to the repository.

        The TUF metadata consists of a list of targets (which were uploaded by
        :meth:`push_target`), a snapshot of the state of the metadata (list of
        hashes), a timestamp and a list of public keys.

        This function signs the metadata except for the list of public keys,
        so you'll need to supply the password to the respective private keys.

        The list of public keys was signed (along with the rest of the metadata)
        with the root private key when you called :meth:`create_metadata`
        (or :meth:`reset_keys`).

        :param targets_key_password: Password to use for decrypting the TUF targets private key. You'll be prompted for one if you don't supply it.
        :type password: str

        :param snapshot_key_password: Password to use for decrypting the TUF snapshot private key. You'll be prompted for one if you don't supply it.
        :type password: str

        :param timestamp_key_password: Password to use for decrypting the TUF timestamp private key. You'll be prompted for one if you don't supply it.
        :type password: str

        :param progress: Optional function to call as the upload progresses. The function will be called with the hash of the content of the file currently being uploaded, the blob just read from the file and the total size of the file.
        :type progress: function(dgst, chunk, total)
        """
        from tuf.repository_tool import load_repository, \
                                        Repository, \
                                        import_rsa_privatekey_from_file
        # Load repository object
        repository = load_repository(self._master_repo_dir)
        #  pylint: disable=no-member

        # Update targets
        repository.targets.clear_targets()
        repository.targets.add_targets([
            _strip_consistent_target_digest(f)
            for f in Repository.get_filepaths_in_directory(self._master_targets_dir)])

        # Update expirations
        repository.targets.expiration = datetime.now() + self._targets_lifetime
        repository.snapshot.expiration = datetime.now() + self._snapshot_lifetime
        repository.timestamp.expiration = datetime.now() + self._timestamp_lifetime

        # Load targets key
        if targets_key_password is None:
            print('importing targets key...')
        private_targets_key = import_rsa_privatekey_from_file(
            self._targets_key_file,
            targets_key_password)
        repository.targets.load_signing_key(private_targets_key)

        # Load snapshot key
        if snapshot_key_password is None:
            print('importing snapshot key...')
        private_snapshot_key = import_rsa_privatekey_from_file(
            self._snapshot_key_file,
            snapshot_key_password)
        repository.snapshot.load_signing_key(private_snapshot_key)

        # Load timestamp key
        if timestamp_key_password is None:
            print('importing timestamp key...')
        private_timestamp_key = import_rsa_privatekey_from_file(
            self._timestamp_key_file,
            timestamp_key_password)
        repository.timestamp.load_signing_key(private_timestamp_key)

        # Update metadata
        repository.write(consistent_snapshot=True)

        # Upload root.json and timestamp.json without hash prefix
        for f in ['root.json', 'timestamp.json']:
            dgst = self._dxf.push_blob(path.join(self._master_staged_dir, f),
                                       progress)
            self._dxf.set_alias(f, dgst)

        # Upload consistent snapshot versions of current metadata files...
        # first load timestamp.json
        with open(path.join(self._master_staged_dir, 'timestamp.json'), 'rb') as f:
            timestamp_data = f.read()
        # hash of content is timestamp prefix
        timestamp_cs = hash_bytes(timestamp_data) + '.timestamp.json'
        files = [timestamp_cs]
        # parse timestamp data
        timestamp = json.loads(timestamp_data.decode('utf-8'))
        # get snapshot prefix
        snapshot_cs = timestamp['signed']['meta']['snapshot.json']['hashes']['sha256'] + '.snapshot.json'
        files.append(snapshot_cs)
        # load prefixed snapshot.json
        with open(path.join(self._master_staged_dir, snapshot_cs), 'rb') as f:
            snapshot_data = f.read()
        # parse snapshot data
        snapshot = json.loads(snapshot_data.decode('utf-8'))
        # get targets and root prefixes
        targets_cs = snapshot['signed']['meta']['targets.json']['hashes']['sha256'] + '.targets.json'
        files.append(targets_cs)
        root_cs = snapshot['signed']['meta']['root.json']['hashes']['sha256'] + '.root.json'
        files.append(root_cs)
        # Upload metadata
        for f in files:
            dgst = self._dxf.push_blob(path.join(self._master_staged_dir, f),
                                       progress)