def _torrent_repo_verb(repository, tag, manifest, verb, **kwargs): """ Handles returning a torrent for the given verb on the given image and tag. """ if not features.BITTORRENT: # Torrent feature is not enabled. abort(406) # Lookup an *existing* derived storage for the verb. If the verb's image storage doesn't exist, # we cannot create it here, so we 406. derived_image = registry_model.lookup_derived_image( manifest, verb, storage, varying_metadata={"tag": tag.name}, include_placements=True) if derived_image is None: abort(406) # Return the torrent. torrent = _torrent_for_blob( derived_image.blob, model.repository.is_repository_public(repository)) # Log the action. track_and_log("repo_verb", wrap_repository(repository), tag=tag.name, verb=verb, torrent=True, **kwargs) return torrent
def emit_log(mirror, log_kind, verb, message, tag=None, tags=None, stdout=None, stderr=None): logs_model.log_action( log_kind, namespace_name=mirror.repository.namespace_user.username, repository_name=mirror.repository.name, metadata={ "verb": verb, "namespace": mirror.repository.namespace_user.username, "repo": mirror.repository.name, "message": message, "tag": tag, "tags": tags, "stdout": stdout, "stderr": stderr, }, ) if log_kind in ( "repo_mirror_sync_started", "repo_mirror_sync_failed", "repo_mirror_sync_success", ): spawn_notification(wrap_repository(mirror.repository), log_kind, {"message": message})
def post(self, namespace_name, repository_name): """ Create a RepoMirrorConfig for a given Repository. """ # TODO: Tidy up this function # TODO: Specify only the data we want to pass on when creating the RepoMirrorConfig. Avoid # the possibility of data injection. repo = model.repository.get_repository(namespace_name, repository_name) if not repo: raise NotFound() if model.repo_mirror.get_mirror(repo): return {'detail': 'Mirror configuration already exits for repository %s/%s' % ( namespace_name, repository_name)}, 409 data = request.get_json() data['sync_start_date'] = self._string_to_dt(data['sync_start_date']) rule = model.repo_mirror.create_rule(repo, data['root_rule']['rule_value']) del data['root_rule'] # Verify the robot is part of the Repository's namespace robot = self._setup_robot_for_mirroring(namespace_name, repository_name, data['robot_username']) del data['robot_username'] mirror = model.repo_mirror.enable_mirroring_for_repository(repo, root_rule=rule, internal_robot=robot, **data) if mirror: track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_reference', to=data['external_reference']) return '', 201 else: # TODO: Determine appropriate Response return {'detail': 'RepoMirrorConfig already exists for this repository.'}, 409
def post(self, namespace_name, repository_name): """ Update the sync_status for a given Repository's mirroring configuration. """ repo = model.repository.get_repository(namespace_name, repository_name) if not repo: raise NotFound() mirror = model.repo_mirror.get_mirror(repository=repo) if not mirror: raise NotFound() if mirror and model.repo_mirror.update_sync_status_to_cancel(mirror): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed="sync_status", to="SYNC_CANCEL") return '', 204 raise NotFound()
def put(self, namespace_name, repository_name): """ Update an existing RepoMirrorRule """ repo = model.repository.get_repository(namespace_name, repository_name) if not repo: raise NotFound() rule = model.repo_mirror.get_root_rule(repo) if not rule: return {'detail': 'The rule appears to be missing.'}, 400 data = request.get_json() if model.repo_mirror.change_rule_value( rule, data['root_rule']['rule_value']): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed="mirror_rule", to=data['root_rule']['rule_value']) return 200 else: return {'detail': 'Unable to update rule.'}, 400
def put(self, namespace_name, repository_name): """ Allow users to modifying the repository's mirroring configuration. """ values = request.get_json() repo = model.repository.get_repository(namespace_name, repository_name) if not repo: raise NotFound() mirror = model.repo_mirror.get_mirror(repo) if not mirror: raise NotFound() if "is_enabled" in values: if values["is_enabled"] == True: if model.repo_mirror.enable_mirror(repo): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="is_enabled", to=True, ) if values["is_enabled"] == False: if model.repo_mirror.disable_mirror(repo): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="is_enabled", to=False, ) if "external_reference" in values: if values["external_reference"] == "": return { "detail": "Empty string is an invalid repository location." }, 400 if model.repo_mirror.change_remote(repo, values["external_reference"]): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="external_reference", to=values["external_reference"], ) if "robot_username" in values: robot_username = values["robot_username"] robot = self._setup_robot_for_mirroring(namespace_name, repository_name, robot_username) if model.repo_mirror.set_mirroring_robot(repo, robot): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="robot_username", to=robot_username, ) if "sync_start_date" in values: try: sync_start_date = self._string_to_dt(values["sync_start_date"]) except ValueError as e: return { "detail": "Incorrect DateTime format for sync_start_date." }, 400 if model.repo_mirror.change_sync_start_date(repo, sync_start_date): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="sync_start_date", to=sync_start_date, ) if "sync_interval" in values: if model.repo_mirror.change_sync_interval(repo, values["sync_interval"]): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="sync_interval", to=values["sync_interval"], ) if "external_registry_username" in values and "external_registry_password" in values: username = values["external_registry_username"] password = values["external_registry_password"] if username is None and password is not None: return { "detail": "Unable to delete username while setting a password." }, 400 if model.repo_mirror.change_credentials(repo, username, password): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="external_registry_username", to=username, ) if password is None: track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="external_registry_password", to=None, ) else: track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="external_registry_password", to="********", ) elif "external_registry_username" in values: username = values["external_registry_username"] if model.repo_mirror.change_username(repo, username): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="external_registry_username", to=username, ) # Do not allow specifying a password without setting a username if "external_registry_password" in values and "external_registry_username" not in values: return ( { "detail": "Unable to set a new password without also specifying a username." }, 400, ) if "external_registry_config" in values: external_registry_config = values.get("external_registry_config", {}) if "verify_tls" in external_registry_config: updates = { "verify_tls": external_registry_config["verify_tls"] } if model.repo_mirror.change_external_registry_config( repo, updates): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="verify_tls", to=external_registry_config["verify_tls"], ) if "unsigned_images" in external_registry_config: updates = { "unsigned_images": external_registry_config["unsigned_images"] } if model.repo_mirror.change_external_registry_config( repo, updates): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="unsigned_images", to=external_registry_config["unsigned_images"], ) if "proxy" in external_registry_config: proxy_values = external_registry_config.get("proxy", {}) if "http_proxy" in proxy_values: updates = { "proxy": { "http_proxy": proxy_values["http_proxy"] } } if model.repo_mirror.change_external_registry_config( repo, updates): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="http_proxy", to=proxy_values["http_proxy"], ) if "https_proxy" in proxy_values: updates = { "proxy": { "https_proxy": proxy_values["https_proxy"] } } if model.repo_mirror.change_external_registry_config( repo, updates): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="https_proxy", to=proxy_values["https_proxy"], ) if "no_proxy" in proxy_values: updates = {"proxy": {"no_proxy": proxy_values["no_proxy"]}} if model.repo_mirror.change_external_registry_config( repo, updates): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="no_proxy", to=proxy_values["no_proxy"], ) if "root_rule" in values: if values["root_rule"]["rule_kind"] != "tag_glob_csv": raise ValidationError( 'validation failed: rule_kind must be "tag_glob_csv"') if model.repo_mirror.change_rule( repo, RepoMirrorRuleType.TAG_GLOB_CSV, values["root_rule"]["rule_value"]): track_and_log( "repo_mirror_config_changed", wrap_repository(repo), changed="mirror_rule", to=values["root_rule"]["rule_value"], ) return "", 201
def _repo_verb( namespace, repository, tag_name, verb, formatter, sign=False, checker=None, **kwargs ): # Verify that the image exists and that we have access to it. logger.debug( "Verifying repo verb %s for repository %s/%s with user %s with mimetype %s", verb, namespace, repository, get_authenticated_user(), request.accept_mimetypes.best, ) tag, manifest, schema1_manifest = _verify_repo_verb( storage, namespace, repository, tag_name, verb, checker ) # Load the repository for later. repo = model.repository.get_repository(namespace, repository) if repo is None: abort(404) # Check for torrent, which is no longer supported. if request.accept_mimetypes.best == "application/x-bittorrent": abort(406) # Log the action. track_and_log("repo_verb", wrap_repository(repo), tag=tag.name, verb=verb, **kwargs) is_readonly = app.config.get("REGISTRY_STATE", "normal") == "readonly" # Lookup/create the derived image for the verb and repo image. if is_readonly: derived_image = registry_model.lookup_derived_image( manifest, verb, storage, varying_metadata={"tag": tag.name}, include_placements=True ) else: derived_image = registry_model.lookup_or_create_derived_image( manifest, verb, storage.preferred_locations[0], storage, varying_metadata={"tag": tag.name}, include_placements=True, ) if derived_image is None: logger.error("Could not create or lookup a derived image for manifest %s", manifest) abort(400) if derived_image is not None and not derived_image.blob.uploading: logger.debug("Derived %s image %s exists in storage", verb, derived_image) is_head_request = request.method == "HEAD" if derived_image.blob.compressed_size: image_pulled_bytes.labels("verbs").inc(derived_image.blob.compressed_size) download_url = storage.get_direct_download_url( derived_image.blob.placements, derived_image.blob.storage_path, head=is_head_request ) if download_url: logger.debug("Redirecting to download URL for derived %s image %s", verb, derived_image) return redirect(download_url) # Close the database handle here for this process before we send the long download. database.close_db_filter(None) logger.debug("Sending cached derived %s image %s", verb, derived_image) return send_file( storage.stream_read_file( derived_image.blob.placements, derived_image.blob.storage_path ), mimetype=LAYER_MIMETYPE, ) logger.debug("Building and returning derived %s image", verb) hasher = SimpleHasher() # Close the database connection before any process forking occurs. This is important because # the Postgres driver does not react kindly to forking, so we need to make sure it is closed # so that each process will get its own unique connection. database.close_db_filter(None) def _cleanup(): # Close any existing DB connection once the process has exited. database.close_db_filter(None) def _store_metadata_and_cleanup(): if is_readonly: return with database.UseThenDisconnect(app.config): registry_model.set_derived_image_size(derived_image, hasher.hashed_bytes) # Create a queue process to generate the data. The queue files will read from the process # and send the results to the client and storage. unique_id = ( derived_image.unique_id if derived_image is not None else hashlib.sha256(("%s:%s" % (verb, uuid.uuid4())).encode("utf-8")).hexdigest() ) handlers = [hasher.update] reporter = VerbReporter(verb) args = (formatter, tag, schema1_manifest, unique_id, handlers, reporter) queue_process = QueueProcess( _open_stream, 8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max args, finished=_store_metadata_and_cleanup, ) client_queue_file = QueueFile( queue_process.create_queue(), "client", timeout=QUEUE_FILE_TIMEOUT ) if not is_readonly: storage_queue_file = QueueFile( queue_process.create_queue(), "storage", timeout=QUEUE_FILE_TIMEOUT ) # If signing is required, add a QueueFile for signing the image as we stream it out. signing_queue_file = None if sign and signer.name: signing_queue_file = QueueFile( queue_process.create_queue(), "signing", timeout=QUEUE_FILE_TIMEOUT ) # Start building. queue_process.run() # Start the storage saving. if not is_readonly: storage_args = (verb, derived_image, storage_queue_file, namespace, repository, tag_name) QueueProcess.run_process(_write_derived_image_to_storage, storage_args, finished=_cleanup) if sign and signer.name: signing_args = (verb, derived_image, signing_queue_file) QueueProcess.run_process(_sign_derived_image, signing_args, finished=_cleanup) # Close the database handle here for this process before we send the long download. database.close_db_filter(None) # Return the client's data. return send_file(client_queue_file, mimetype=LAYER_MIMETYPE)
def put(self, namespace_name, repository_name): """ Allow users to modifying the repository's mirroring configuration. """ values = request.get_json() repo = model.repository.get_repository(namespace_name, repository_name) if not repo: raise NotFound() mirror = model.repo_mirror.get_mirror(repo) if not mirror: raise NotFound() if 'is_enabled' in values: if values['is_enabled'] == True: if model.repo_mirror.enable_mirror(repo): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='is_enabled', to=True) if values['is_enabled'] == False: if model.repo_mirror.disable_mirror(repo): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='is_enabled', to=False) if 'external_reference' in values: if values['external_reference'] == '': return { 'detail': 'Empty string is an invalid repository location.' }, 400 if model.repo_mirror.change_remote(repo, values['external_reference']): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_reference', to=values['external_reference']) if 'robot_username' in values: robot_username = values['robot_username'] robot = self._setup_robot_for_mirroring(namespace_name, repository_name, robot_username) if model.repo_mirror.set_mirroring_robot(repo, robot): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='robot_username', to=robot_username) if 'sync_start_date' in values: try: sync_start_date = self._string_to_dt(values['sync_start_date']) except ValueError as e: return { 'detail': 'Incorrect DateTime format for sync_start_date.' }, 400 if model.repo_mirror.change_sync_start_date(repo, sync_start_date): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='sync_start_date', to=sync_start_date) if 'sync_interval' in values: if model.repo_mirror.change_sync_interval(repo, values['sync_interval']): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='sync_interval', to=values['sync_interval']) if 'external_registry_username' in values and 'external_registry_password' in values: username = values['external_registry_username'] password = values['external_registry_password'] if username is None and password is not None: return { 'detail': 'Unable to delete username while setting a password.' }, 400 if model.repo_mirror.change_credentials(repo, username, password): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_registry_username', to=username) if password is None: track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_registry_password', to=None) else: track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_registry_password', to="********") elif 'external_registry_username' in values: username = values['external_registry_username'] if model.repo_mirror.change_username(repo, username): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='external_registry_username', to=username) # Do not allow specifying a password without setting a username if 'external_registry_password' in values and 'external_registry_username' not in values: return { 'detail': 'Unable to set a new password without also specifying a username.' }, 400 if 'external_registry_config' in values: external_registry_config = values.get('external_registry_config', {}) if 'verify_tls' in external_registry_config: updates = { 'verify_tls': external_registry_config['verify_tls'] } if model.repo_mirror.change_external_registry_config( repo, updates): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='verify_tls', to=external_registry_config['verify_tls']) if 'proxy' in external_registry_config: proxy_values = external_registry_config.get('proxy', {}) if 'http_proxy' in proxy_values: updates = { 'proxy': { 'http_proxy': proxy_values['http_proxy'] } } if model.repo_mirror.change_external_registry_config( repo, updates): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='http_proxy', to=proxy_values['http_proxy']) if 'https_proxy' in proxy_values: updates = { 'proxy': { 'https_proxy': proxy_values['https_proxy'] } } if model.repo_mirror.change_external_registry_config( repo, updates): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='https_proxy', to=proxy_values['https_proxy']) if 'no_proxy' in proxy_values: updates = {'proxy': {'no_proxy': proxy_values['no_proxy']}} if model.repo_mirror.change_external_registry_config( repo, updates): track_and_log('repo_mirror_config_changed', wrap_repository(repo), changed='no_proxy', to=proxy_values['no_proxy']) return '', 201
def _repo_verb(namespace, repository, tag_name, verb, formatter, sign=False, checker=None, **kwargs): # Verify that the image exists and that we have access to it. logger.debug( 'Verifying repo verb %s for repository %s/%s with user %s with mimetype %s', verb, namespace, repository, get_authenticated_user(), request.accept_mimetypes.best) tag, manifest, schema1_manifest = _verify_repo_verb( storage, namespace, repository, tag_name, verb, checker) # Load the repository for later. repo = model.repository.get_repository(namespace, repository) if repo is None: abort(404) # Check for torrent. If found, we return a torrent for the repo verb image (if the derived # image already exists). if request.accept_mimetypes.best == 'application/x-bittorrent': metric_queue.repository_pull.Inc( labelvalues=[namespace, repository, verb + '+torrent', True]) return _torrent_repo_verb(repo, tag, manifest, verb, **kwargs) # Log the action. track_and_log('repo_verb', wrap_repository(repo), tag=tag.name, verb=verb, **kwargs) metric_queue.repository_pull.Inc( labelvalues=[namespace, repository, verb, True]) is_readonly = app.config.get('REGISTRY_STATE', 'normal') == 'readonly' # Lookup/create the derived image for the verb and repo image. if is_readonly: derived_image = registry_model.lookup_derived_image( manifest, verb, storage, varying_metadata={'tag': tag.name}, include_placements=True) else: derived_image = registry_model.lookup_or_create_derived_image( manifest, verb, storage.preferred_locations[0], storage, varying_metadata={'tag': tag.name}, include_placements=True) if derived_image is None: logger.error( 'Could not create or lookup a derived image for manifest %s', manifest) abort(400) if derived_image is not None and not derived_image.blob.uploading: logger.debug('Derived %s image %s exists in storage', verb, derived_image) is_head_request = request.method == 'HEAD' metric_queue.pull_byte_count.Inc(derived_image.blob.compressed_size, labelvalues=[verb]) download_url = storage.get_direct_download_url( derived_image.blob.placements, derived_image.blob.storage_path, head=is_head_request) if download_url: logger.debug('Redirecting to download URL for derived %s image %s', verb, derived_image) return redirect(download_url) # Close the database handle here for this process before we send the long download. database.close_db_filter(None) logger.debug('Sending cached derived %s image %s', verb, derived_image) return send_file(storage.stream_read_file( derived_image.blob.placements, derived_image.blob.storage_path), mimetype=LAYER_MIMETYPE) logger.debug('Building and returning derived %s image', verb) # Close the database connection before any process forking occurs. This is important because # the Postgres driver does not react kindly to forking, so we need to make sure it is closed # so that each process will get its own unique connection. database.close_db_filter(None) def _cleanup(): # Close any existing DB connection once the process has exited. database.close_db_filter(None) hasher = PieceHasher(app.config['BITTORRENT_PIECE_SIZE']) def _store_metadata_and_cleanup(): if is_readonly: return with database.UseThenDisconnect(app.config): registry_model.set_torrent_info( derived_image.blob, app.config['BITTORRENT_PIECE_SIZE'], hasher.final_piece_hashes()) registry_model.set_derived_image_size(derived_image, hasher.hashed_bytes) # Create a queue process to generate the data. The queue files will read from the process # and send the results to the client and storage. unique_id = (derived_image.unique_id if derived_image is not None else hashlib.sha256('%s:%s' % (verb, uuid.uuid4())).hexdigest()) handlers = [hasher.update] reporter = VerbReporter(verb) args = (formatter, tag, schema1_manifest, unique_id, handlers, reporter) queue_process = QueueProcess( _open_stream, 8 * 1024, 10 * 1024 * 1024, # 8K/10M chunk/max args, finished=_store_metadata_and_cleanup) client_queue_file = QueueFile(queue_process.create_queue(), 'client') if not is_readonly: storage_queue_file = QueueFile(queue_process.create_queue(), 'storage') # If signing is required, add a QueueFile for signing the image as we stream it out. signing_queue_file = None if sign and signer.name: signing_queue_file = QueueFile(queue_process.create_queue(), 'signing') # Start building. queue_process.run() # Start the storage saving. if not is_readonly: storage_args = (verb, derived_image, storage_queue_file) QueueProcess.run_process(_write_derived_image_to_storage, storage_args, finished=_cleanup) if sign and signer.name: signing_args = (verb, derived_image, signing_queue_file) QueueProcess.run_process(_sign_derived_image, signing_args, finished=_cleanup) # Close the database handle here for this process before we send the long download. database.close_db_filter(None) # Return the client's data. return send_file(client_queue_file, mimetype=LAYER_MIMETYPE)