def head_image_layer(namespace, repository, image_id, headers): permission = ReadRepositoryPermission(namespace, repository) repository_ref = registry_model.lookup_repository(namespace, repository, kind_filter="image") logger.debug("Checking repo permissions") if permission.can() or (repository_ref is not None and repository_ref.is_public): if repository_ref is None: abort(404) logger.debug("Looking up placement locations") legacy_image = registry_model.get_legacy_image(repository_ref, image_id, include_blob=True) if legacy_image is None: logger.debug("Could not find any blob placement locations") abort(404, "Image %(image_id)s not found", issue="unknown-image", image_id=image_id) # Add the Accept-Ranges header if the storage engine supports resumable # downloads. extra_headers = {} if store.get_supports_resumable_downloads(legacy_image.blob.placements): logger.debug("Storage supports resumable downloads") extra_headers["Accept-Ranges"] = "bytes" resp = make_response("") resp.headers.extend(headers) resp.headers.extend(extra_headers) return resp abort(403)
def check_blob_exists(namespace_name, repo_name, digest): # Find the blob. blob = registry_model.get_cached_repo_blob(model_cache, namespace_name, repo_name, digest) if blob is None: raise BlobUnknown() # Build the response headers. headers = { "Docker-Content-Digest": digest, "Content-Length": blob.compressed_size, "Content-Type": BLOB_CONTENT_TYPE, } # If our storage supports range requests, let the client know. if storage.get_supports_resumable_downloads(blob.placements): headers["Accept-Ranges"] = "bytes" # Write the response to the client. return Response(headers=headers)
def download_blob(namespace_name, repo_name, digest): # Find the blob. blob = registry_model.get_cached_repo_blob(model_cache, namespace_name, repo_name, digest) if blob is None: raise BlobUnknown() # Build the response headers. headers = {"Docker-Content-Digest": digest} # If our storage supports range requests, let the client know. if storage.get_supports_resumable_downloads(blob.placements): headers["Accept-Ranges"] = "bytes" image_pulled_bytes.labels("v2").inc(blob.compressed_size) # Short-circuit by redirecting if the storage supports it. path = blob.storage_path logger.debug("Looking up the direct download URL for path: %s", path) direct_download_url = storage.get_direct_download_url(blob.placements, path, get_request_ip()) if direct_download_url: logger.debug("Returning direct download URL") resp = redirect(direct_download_url) resp.headers.extend(headers) return resp # Close the database connection before we stream the download. logger.debug("Closing database connection before streaming layer data") headers.update( { "Content-Length": blob.compressed_size, "Content-Type": BLOB_CONTENT_TYPE, } ) with database.CloseForLongOperation(app.config): # Stream the response to the client. return Response( storage.stream_read(blob.placements, path), headers=headers, )
def download_blob(namespace_name, repo_name, digest): # Find the blob. blob = registry_model.get_cached_repo_blob(model_cache, namespace_name, repo_name, digest) if blob is None: raise BlobUnknown() # Build the response headers. headers = {'Docker-Content-Digest': digest} # If our storage supports range requests, let the client know. if storage.get_supports_resumable_downloads(blob.placements): headers['Accept-Ranges'] = 'bytes' metric_queue.pull_byte_count.Inc(blob.compressed_size, labelvalues=['v2']) # Short-circuit by redirecting if the storage supports it. path = blob.storage_path logger.debug('Looking up the direct download URL for path: %s', path) direct_download_url = storage.get_direct_download_url( blob.placements, path, get_request_ip()) if direct_download_url: logger.debug('Returning direct download URL') resp = redirect(direct_download_url) resp.headers.extend(headers) return resp # Close the database connection before we stream the download. logger.debug('Closing database connection before streaming layer data') with database.CloseForLongOperation(app.config): # Stream the response to the client. return Response( storage.stream_read(blob.placements, path), headers=headers.update({ 'Content-Length': blob.compressed_size, 'Content-Type': BLOB_CONTENT_TYPE, }), )