def _init_db_path_sqlite(tmpdir_factory): """ Initializes a SQLite database for testing by populating it from scratch and placing it into a temp directory file. """ sqlitedbfile = str(tmpdir_factory.mktemp("data").join("test.db")) sqlitedb = "sqlite:///{0}".format(sqlitedbfile) conf = { "TESTING": True, "DEBUG": True, "SECRET_KEY": "superdupersecret!!!1", "DATABASE_SECRET_KEY": "anothercrazykey!", "DB_URI": sqlitedb, } os.environ["DB_URI"] = str(sqlitedb) db.initialize(SqliteDatabase(sqlitedbfile)) application.config.update(conf) application.config.update({"DB_URI": sqlitedb}) initialize_database() db.obj.execute_sql("PRAGMA foreign_keys = ON;") db.obj.execute_sql('PRAGMA encoding="UTF-8";') populate_database() close_db_filter(None) return str(sqlitedbfile)
def get_image_layer(namespace, repository, image_id, headers): permission = ReadRepositoryPermission(namespace, repository) repository_ref = registry_model.lookup_repository(namespace, repository, kind_filter="image") logger.debug("Checking repo permissions") if permission.can() or (repository_ref is not None and repository_ref.is_public): if repository_ref is None: abort(404) legacy_image = registry_model.get_legacy_image(repository_ref, image_id, include_blob=True) if legacy_image is None: abort(404, "Image %(image_id)s not found", issue="unknown-image", image_id=image_id) path = legacy_image.blob.storage_path image_pulled_bytes.labels("v1").inc(legacy_image.blob.compressed_size) try: logger.debug("Looking up the direct download URL for path: %s", path) direct_download_url = store.get_direct_download_url( legacy_image.blob.placements, path, get_request_ip() ) if direct_download_url: logger.debug("Returning direct download URL") resp = redirect(direct_download_url) return resp # Close the database handle here for this process before we send the long download. database.close_db_filter(None) logger.debug("Streaming layer data") return Response(store.stream_read(legacy_image.blob.placements, path), headers=headers) except (IOError, AttributeError): logger.exception("Image layer data not found") abort(404, "Image %(image_id)s not found", issue="unknown-image", image_id=image_id) abort(403)
def break_database(): # Close any existing connection. close_db_filter(None) # Reload the database config with an invalid connection. config = copy.copy(app.config) config["DB_URI"] = "sqlite:///not/a/valid/database" configure(config) return "OK"
def set_config_key(config_key, value): from app import app as current_app old_value = app.config.get(config_key) app.config[config_key] = value current_app.config[config_key] = value # Close any existing connection. close_db_filter(None) # Reload the database config. configure(app.config) return jsonify({"old_value": old_value})
def get_image_layer(namespace, repository, image_id, headers): permission = ReadRepositoryPermission(namespace, repository) repository_ref = registry_model.lookup_repository(namespace, repository, kind_filter='image') logger.debug('Checking repo permissions') if permission.can() or (repository_ref is not None and repository_ref.is_public): if repository_ref is None: abort(404) legacy_image = registry_model.get_legacy_image(repository_ref, image_id, include_blob=True) if legacy_image is None: abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id) path = legacy_image.blob.storage_path metric_queue.pull_byte_count.Inc(legacy_image.blob.compressed_size, labelvalues=['v1']) try: logger.debug('Looking up the direct download URL for path: %s', path) direct_download_url = store.get_direct_download_url( legacy_image.blob.placements, path, get_request_ip()) if direct_download_url: logger.debug('Returning direct download URL') resp = redirect(direct_download_url) return resp # Close the database handle here for this process before we send the long download. database.close_db_filter(None) logger.debug('Streaming layer data') return Response(store.stream_read(legacy_image.blob.placements, path), headers=headers) except (IOError, AttributeError): logger.exception('Image layer data not found') abort(404, 'Image %(image_id)s not found', issue='unknown-image', image_id=image_id) abort(403)
def reload_app(server_hostname): # Close any existing connection. close_db_filter(None) # Reload the database config. app.config["SERVER_HOSTNAME"] = server_hostname[len("http://"):] configure(app.config) # Required for anonymous calls to not exception. g.identity = Identity(None, "none") if os.environ.get("DEBUGLOG") == "true": logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False) return "OK"
def reload_app(server_hostname): # Close any existing connection. close_db_filter(None) # Reload the database config. app.config['SERVER_HOSTNAME'] = server_hostname[len('http://'):] configure(app.config) # Reload random after the process split, as it cannot be used uninitialized across forks. Random.atfork() # Required for anonymous calls to not exception. g.identity = Identity(None, 'none') if os.environ.get('DEBUGLOG') == 'true': logging.config.fileConfig(logfile_path(debug=True), disable_existing_loggers=False) return 'OK'
def _cleanup(): # Close any existing DB connection once the process has exited. database.close_db_filter(None)
def _repo_verb( namespace, repository, tag_name, verb, formatter, sign=False, checker=None, **kwargs ): # Verify that the image exists and that we have access to it. logger.debug( "Verifying repo verb %s for repository %s/%s with user %s with mimetype %s", verb, namespace, repository, get_authenticated_user(), request.accept_mimetypes.best, ) tag, manifest, schema1_manifest = _verify_repo_verb( storage, namespace, repository, tag_name, verb, checker ) # Load the repository for later. repo = model.repository.get_repository(namespace, repository) if repo is None: abort(404) # Check for torrent, which is no longer supported. if request.accept_mimetypes.best == "application/x-bittorrent": abort(406) # Log the action. track_and_log("repo_verb", wrap_repository(repo), tag=tag.name, verb=verb, **kwargs) is_readonly = app.config.get("REGISTRY_STATE", "normal") == "readonly" # Lookup/create the derived image for the verb and repo image. if is_readonly: derived_image = registry_model.lookup_derived_image( manifest, verb, storage, varying_metadata={"tag": tag.name}, include_placements=True ) else: derived_image = registry_model.lookup_or_create_derived_image( manifest, verb, storage.preferred_locations[0], storage, varying_metadata={"tag": tag.name}, include_placements=True, ) if derived_image is None: logger.error("Could not create or lookup a derived image for manifest %s", manifest) abort(400) if derived_image is not None and not derived_image.blob.uploading: logger.debug("Derived %s image %s exists in storage", verb, derived_image) is_head_request = request.method == "HEAD" if derived_image.blob.compressed_size: image_pulled_bytes.labels("verbs").inc(derived_image.blob.compressed_size) download_url = storage.get_direct_download_url( derived_image.blob.placements, derived_image.blob.storage_path, head=is_head_request ) if download_url: logger.debug("Redirecting to download URL for derived %s image %s", verb, derived_image) return redirect(download_url) # Close the database handle here for this process before we send the long download. database.close_db_filter(None) logger.debug("Sending cached derived %s image %s", verb, derived_image) return send_file( storage.stream_read_file( derived_image.blob.placements, derived_image.blob.storage_path ), mimetype=LAYER_MIMETYPE, ) logger.debug("Building and returning derived %s image", verb) hasher = SimpleHasher() # Close the database connection before any process forking occurs. This is important because # the Postgres driver does not react kindly to forking, so we need to make sure it is closed # so that each process will get its own unique connection. database.close_db_filter(None) def _cleanup(): # Close any existing DB connection once the process has exited. database.close_db_filter(None) def _store_metadata_and_cleanup(): if is_readonly: return with database.UseThenDisconnect(app.config): registry_model.set_derived_image_size(derived_image, hasher.hashed_bytes) # Create a queue process to generate the data. The queue files will read from the process # and send the results to the client and storage. unique_id = ( derived_image.unique_id if derived_image is not None else hashlib.sha256(("%s:%s" % (verb, uuid.uuid4())).encode("utf-8")).hexdigest() ) handlers = [hasher.update] reporter = VerbReporter(verb) args = (formatter, tag, schema1_manifest, unique_id, handlers, reporter) queue_process = QueueProcess( _open_stream, 8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max args, finished=_store_metadata_and_cleanup, ) client_queue_file = QueueFile( queue_process.create_queue(), "client", timeout=QUEUE_FILE_TIMEOUT ) if not is_readonly: storage_queue_file = QueueFile( queue_process.create_queue(), "storage", timeout=QUEUE_FILE_TIMEOUT ) # If signing is required, add a QueueFile for signing the image as we stream it out. signing_queue_file = None if sign and signer.name: signing_queue_file = QueueFile( queue_process.create_queue(), "signing", timeout=QUEUE_FILE_TIMEOUT ) # Start building. queue_process.run() # Start the storage saving. if not is_readonly: storage_args = (verb, derived_image, storage_queue_file, namespace, repository, tag_name) QueueProcess.run_process(_write_derived_image_to_storage, storage_args, finished=_cleanup) if sign and signer.name: signing_args = (verb, derived_image, signing_queue_file) QueueProcess.run_process(_sign_derived_image, signing_args, finished=_cleanup) # Close the database handle here for this process before we send the long download. database.close_db_filter(None) # Return the client's data. return send_file(client_queue_file, mimetype=LAYER_MIMETYPE)
def _repo_verb(namespace, repository, tag_name, verb, formatter, sign=False, checker=None, **kwargs): # Verify that the image exists and that we have access to it. logger.debug( 'Verifying repo verb %s for repository %s/%s with user %s with mimetype %s', verb, namespace, repository, get_authenticated_user(), request.accept_mimetypes.best) tag, manifest, schema1_manifest = _verify_repo_verb( storage, namespace, repository, tag_name, verb, checker) # Load the repository for later. repo = model.repository.get_repository(namespace, repository) if repo is None: abort(404) # Check for torrent. If found, we return a torrent for the repo verb image (if the derived # image already exists). if request.accept_mimetypes.best == 'application/x-bittorrent': metric_queue.repository_pull.Inc( labelvalues=[namespace, repository, verb + '+torrent', True]) return _torrent_repo_verb(repo, tag, manifest, verb, **kwargs) # Log the action. track_and_log('repo_verb', wrap_repository(repo), tag=tag.name, verb=verb, **kwargs) metric_queue.repository_pull.Inc( labelvalues=[namespace, repository, verb, True]) is_readonly = app.config.get('REGISTRY_STATE', 'normal') == 'readonly' # Lookup/create the derived image for the verb and repo image. if is_readonly: derived_image = registry_model.lookup_derived_image( manifest, verb, storage, varying_metadata={'tag': tag.name}, include_placements=True) else: derived_image = registry_model.lookup_or_create_derived_image( manifest, verb, storage.preferred_locations[0], storage, varying_metadata={'tag': tag.name}, include_placements=True) if derived_image is None: logger.error( 'Could not create or lookup a derived image for manifest %s', manifest) abort(400) if derived_image is not None and not derived_image.blob.uploading: logger.debug('Derived %s image %s exists in storage', verb, derived_image) is_head_request = request.method == 'HEAD' metric_queue.pull_byte_count.Inc(derived_image.blob.compressed_size, labelvalues=[verb]) download_url = storage.get_direct_download_url( derived_image.blob.placements, derived_image.blob.storage_path, head=is_head_request) if download_url: logger.debug('Redirecting to download URL for derived %s image %s', verb, derived_image) return redirect(download_url) # Close the database handle here for this process before we send the long download. database.close_db_filter(None) logger.debug('Sending cached derived %s image %s', verb, derived_image) return send_file(storage.stream_read_file( derived_image.blob.placements, derived_image.blob.storage_path), mimetype=LAYER_MIMETYPE) logger.debug('Building and returning derived %s image', verb) # Close the database connection before any process forking occurs. This is important because # the Postgres driver does not react kindly to forking, so we need to make sure it is closed # so that each process will get its own unique connection. database.close_db_filter(None) def _cleanup(): # Close any existing DB connection once the process has exited. database.close_db_filter(None) hasher = PieceHasher(app.config['BITTORRENT_PIECE_SIZE']) def _store_metadata_and_cleanup(): if is_readonly: return with database.UseThenDisconnect(app.config): registry_model.set_torrent_info( derived_image.blob, app.config['BITTORRENT_PIECE_SIZE'], hasher.final_piece_hashes()) registry_model.set_derived_image_size(derived_image, hasher.hashed_bytes) # Create a queue process to generate the data. The queue files will read from the process # and send the results to the client and storage. unique_id = (derived_image.unique_id if derived_image is not None else hashlib.sha256('%s:%s' % (verb, uuid.uuid4())).hexdigest()) handlers = [hasher.update] reporter = VerbReporter(verb) args = (formatter, tag, schema1_manifest, unique_id, handlers, reporter) queue_process = QueueProcess( _open_stream, 8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max args, finished=_store_metadata_and_cleanup) client_queue_file = QueueFile(queue_process.create_queue(), 'client') if not is_readonly: storage_queue_file = QueueFile(queue_process.create_queue(), 'storage') # If signing is required, add a QueueFile for signing the image as we stream it out. signing_queue_file = None if sign and signer.name: signing_queue_file = QueueFile(queue_process.create_queue(), 'signing') # Start building. queue_process.run() # Start the storage saving. if not is_readonly: storage_args = (verb, derived_image, storage_queue_file) QueueProcess.run_process(_write_derived_image_to_storage, storage_args, finished=_cleanup) if sign and signer.name: signing_args = (verb, derived_image, signing_queue_file) QueueProcess.run_process(_sign_derived_image, signing_args, finished=_cleanup) # Close the database handle here for this process before we send the long download. database.close_db_filter(None) # Return the client's data. return send_file(client_queue_file, mimetype=LAYER_MIMETYPE)