def _init_db_path_real_db(db_uri): """ Initializes a real database for testing by populating it from scratch. Note that this does. *not* add the tables (merely data). Callers must have migrated the database before calling the test suite. """ configure({ "DB_URI": db_uri, "SECRET_KEY": "superdupersecret!!!1", "DB_CONNECTION_ARGS": { "threadlocals": True, "autorollback": True, }, "DB_TRANSACTION_FACTORY": _create_transaction, "DATABASE_SECRET_KEY": "anothercrazykey!", }) populate_database() return db_uri
def put(self): """ Updates the config override file. """ # Note: This method is called to set the database configuration before super users exists, # so we also allow it to be called if there is no valid registry configuration setup. config_object = request.get_json()["config"] # Add any enterprise defaults missing from the config. add_enterprise_config_defaults(config_object, app.config["SECRET_KEY"]) # Write the configuration changes to the config override file. config_provider.save_config(config_object) # now try to connect to the db provided in their config to validate it works combined = dict(**app.config) combined.update(config_provider.get_config()) configure(combined, testing=app.config["TESTING"]) return {"exists": True, "config": config_object}
def initialized_db(appconfig): """ Configures the database for the database found in the appconfig. """ under_test_real_database = bool(os.environ.get("TEST_DATABASE_URI")) # Configure the database. configure(appconfig) # Initialize caches. model._basequery._lookup_team_roles() model._basequery.get_public_repo_visibility() model.log.get_log_entry_kinds() if not under_test_real_database: # Make absolutely sure foreign key constraints are on. db.obj.execute_sql("PRAGMA foreign_keys = ON;") db.obj.execute_sql('PRAGMA encoding="UTF-8";') assert db.obj.execute_sql("PRAGMA foreign_keys;").fetchone()[0] == 1 assert db.obj.execute_sql("PRAGMA encoding;").fetchone()[0] == "UTF-8" # If under a test *real* database, setup a savepoint. if under_test_real_database: with db.transaction(): test_savepoint = db.savepoint() test_savepoint.__enter__() yield # Run the test. try: test_savepoint.rollback() test_savepoint.__exit__(None, None, None) except InternalError: # If postgres fails with an exception (like IntegrityError) mid-transaction, it terminates # it immediately, so when we go to remove the savepoint, it complains. We can safely ignore # this case. pass else: if os.environ.get("DISALLOW_AUTO_JOINS", "false").lower() == "true": # Patch get_rel_instance to fail if we try to load any non-joined foreign key. This will allow # us to catch missing joins when running tests. def get_rel_instance(self, instance): value = instance.__data__.get(self.name) if value is not None or self.name in instance.__rel__: if self.name not in instance.__rel__: # NOTE: We only raise an exception if this auto-lookup occurs from non-testing code. # Testing code can be a bit inefficient. lookup_allowed = False try: outerframes = inspect.getouterframes( inspect.currentframe()) except IndexError: # Happens due to a bug in Jinja. outerframes = [] for allowed_auto_join in ALLOWED_AUTO_JOINS: if lookup_allowed: break if (len(outerframes) >= allowed_auto_join.frame_start_index + CALLER_FRAMES_OFFSET): found_match = True for index, pattern_prefix in enumerate( allowed_auto_join.pattern_prefixes): frame_info = outerframes[ index + CALLER_FRAMES_OFFSET] if not frame_info[ FRAME_NAME_INDEX].startswith( pattern_prefix): found_match = False break if found_match: lookup_allowed = True break if not lookup_allowed: raise Exception( "Missing join on instance `%s` for field `%s`", instance, self.name) obj = self.rel_model.get(self.field.rel_field == value) instance.__rel__[self.name] = obj return instance.__rel__[self.name] elif not self.field.null: raise self.rel_model.DoesNotExist return value with patch("peewee.ForeignKeyAccessor.get_rel_instance", get_rel_instance): yield else: yield
def _reload_config(): combined = dict(**app.config) combined.update(config_provider.get_config()) configure(combined) return combined
tuf_metadata_api = TUFMetadataAPI(app, app.config) # Check for a key in config. If none found, generate a new signing key for Docker V2 manifests. _v2_key_path = os.path.join(OVERRIDE_CONFIG_DIRECTORY, DOCKER_V2_SIGNINGKEY_FILENAME) if os.path.exists(_v2_key_path): docker_v2_signing_key = RSAKey().load(_v2_key_path) else: docker_v2_signing_key = RSAKey(key=RSA.generate(2048)) # Configure the database. if app.config.get("DATABASE_SECRET_KEY") is None and app.config.get("SETUP_COMPLETE", False): raise Exception("Missing DATABASE_SECRET_KEY in config; did you perhaps forget to add it?") database.configure(app.config) model.config.app_config = app.config model.config.store = storage model.config.register_repo_cleanup_callback(tuf_metadata_api.delete_metadata) secscan_model.configure(app, instance_keys, storage) secscan_model.register_model_cleanup_callbacks(model.config) logs_model.configure(app.config) @login_manager.user_loader def load_user(user_uuid): logger.debug("User loader loading deferred user with uuid: %s", user_uuid) return LoginWrappedDBUser(user_uuid)
def test_readreplica(init_db_path, tmpdir_factory): primary_file = str(tmpdir_factory.mktemp("data").join("primary.db")) replica_file = str(tmpdir_factory.mktemp("data").join("replica.db")) # Copy the initialized database to two different locations. shutil.copy2(init_db_path, primary_file) shutil.copy2(init_db_path, replica_file) db_config = { "DB_URI": "sqlite:///{0}".format(primary_file), "DB_READ_REPLICAS": [{"DB_URI": "sqlite:///{0}".format(replica_file)},], "DB_CONNECTION_ARGS": {"threadlocals": True, "autorollback": True,}, "DB_TRANSACTION_FACTORY": lambda x: FakeTransaction(), "FOR_TESTING": True, "DATABASE_SECRET_KEY": "anothercrazykey!", } # Initialize the DB with the primary and the replica. configure(db_config) assert not read_only_config.obj.is_readonly assert read_only_config.obj.read_replicas # Ensure we can read the data. devtable_user = User.get(username="******") assert devtable_user.username == "devtable" # Configure with a bad primary. Reading should still work since we're hitting the replica. db_config["DB_URI"] = "sqlite:///does/not/exist" configure(db_config) assert not read_only_config.obj.is_readonly assert read_only_config.obj.read_replicas devtable_user = User.get(username="******") assert devtable_user.username == "devtable" # Force us to hit the master and ensure it doesn't work. with db_disallow_replica_use(): with pytest.raises(OperationalError): User.get(username="******") # Test read replica again. devtable_user = User.get(username="******") assert devtable_user.username == "devtable" # Try to change some data. This should fail because the primary is broken. with pytest.raises(OperationalError): devtable_user.email = "newlychanged" devtable_user.save() # Fix the primary and try again. db_config["DB_URI"] = "sqlite:///{0}".format(primary_file) configure(db_config) assert not read_only_config.obj.is_readonly assert read_only_config.obj.read_replicas devtable_user.email = "newlychanged" devtable_user.save() # Mark the system as readonly. db_config["DB_URI"] = "sqlite:///{0}".format(primary_file) db_config["REGISTRY_STATE"] = "readonly" configure(db_config) assert read_only_config.obj.is_readonly assert read_only_config.obj.read_replicas # Ensure all write operations raise a readonly mode exception. with pytest.raises(ReadOnlyModeException): devtable_user.email = "newlychanged2" devtable_user.save() with pytest.raises(ReadOnlyModeException): User.create(username="******") with pytest.raises(ReadOnlyModeException): User.delete().where(User.username == "foo").execute() with pytest.raises(ReadOnlyModeException): User.update(username="******").where(User.username == "foo").execute() # Reset the config on the DB, so we don't mess up other tests. configure( { "DB_URI": "sqlite:///{0}".format(primary_file), "DB_CONNECTION_ARGS": {"threadlocals": True, "autorollback": True,}, "DB_TRANSACTION_FACTORY": lambda x: FakeTransaction(), "DATABASE_SECRET_KEY": "anothercrazykey!", } )
import argparse import logging import json from app import app from data import model from data.database import RepositoryBuildTrigger, configure from data.model.build import update_build_trigger configure(app.config) logger = logging.getLogger(__name__) def run_branchregex_migration(): encountered = set() while True: found = list( RepositoryBuildTrigger.select().where( RepositoryBuildTrigger.config ** "%branch_regex%", ~(RepositoryBuildTrigger.config ** "%branchtag_regex%"), ) ) found = [f for f in found if not f.uuid in encountered] if not found: logger.debug("No additional records found") return logger.debug("Found %s records to be changed", len(found)) for trigger in found: