Esempio n. 1
0
    def commit_to_blob(self, app_config, expected_digest=None):
        """ Commits the blob upload to a blob under the repository. The resulting blob will be marked
            to not be GCed for some period of time (as configured by `committed_blob_expiration`).

            If expected_digest is specified, the content digest of the data uploaded for the blob is
            compared to that given and, if it does not match, a BlobDigestMismatchException is
            raised. The digest given must be of type `Digest` and not a string. """
        # Compare the content digest.
        if expected_digest is not None:
            self._validate_digest(expected_digest)

        # Finalize the storage.
        storage_already_existed = self._finalize_blob_storage(app_config)

        # Convert the upload to a blob.
        computed_digest_str = digest_tools.sha256_digest_from_hashlib(
            self.blob_upload.sha_state)

        with db_transaction():
            blob = registry_model.commit_blob_upload(
                self.blob_upload, computed_digest_str,
                self.settings.committed_blob_expiration)
            if blob is None:
                return None

            # Save torrent hash information (if available).
            if self.blob_upload.piece_sha_state is not None and not storage_already_existed:
                piece_bytes = (self.blob_upload.piece_hashes +
                               self.blob_upload.piece_sha_state.digest())
                registry_model.set_torrent_info(
                    blob, self.settings.bittorrent_piece_size, piece_bytes)

        self.committed_blob = blob
        return blob
Esempio n. 2
0
    def _store_metadata_and_cleanup():
        if is_readonly:
            return

        with database.UseThenDisconnect(app.config):
            registry_model.set_torrent_info(
                derived_image.blob, app.config["BITTORRENT_PIECE_SIZE"], hasher.final_piece_hashes()
            )
            registry_model.set_derived_image_size(derived_image, hasher.hashed_bytes)