def database_conf(db_path, prefix="GALAXY", prefer_template_database=False): """Find (and populate if needed) Galaxy database connection.""" database_auto_migrate = False check_migrate_databases = True dburi_var = "%s_TEST_DBURI" % prefix template_name = None if dburi_var in os.environ: database_connection = os.environ[dburi_var] # only template if postgres - not mysql or sqlite do_template = prefer_template_database and database_connection.startswith( "p") if do_template: database_template_parsed = urlparse(database_connection) template_name = database_template_parsed.path[ 1:] # drop / from /galaxy actual_db = "gxtest" + ''.join( random.choice(string.ascii_uppercase) for _ in range(10)) actual_database_parsed = database_template_parsed._replace( path="/%s" % actual_db) database_connection = actual_database_parsed.geturl() if not database_exists(database_connection): # We pass by migrations and instantiate the current table create_database(database_connection) mapping.init('/tmp', database_connection, create_tables=True, map_install_models=True) toolshed_mapping.init(database_connection, create_tables=True) check_migrate_databases = False else: default_db_filename = "%s.sqlite" % prefix.lower() template_var = "%s_TEST_DB_TEMPLATE" % prefix db_path = os.path.join(db_path, default_db_filename) if template_var in os.environ: # Middle ground between recreating a completely new # database and pointing at existing database with # GALAXY_TEST_DBURI. The former requires a lot of setup # time, the latter results in test failures in certain # cases (namely tool shed tests expecting clean database). copy_database_template(os.environ[template_var], db_path) database_auto_migrate = True database_connection = 'sqlite:///%s' % db_path config = { "check_migrate_databases": check_migrate_databases, "database_connection": database_connection, "database_auto_migrate": database_auto_migrate } if not database_connection.startswith("sqlite://"): config["database_engine_option_max_overflow"] = "20" config["database_engine_option_pool_size"] = "10" if template_name: config["database_template"] = template_name return config
def setUp( self ): self.reindexed = False self.setup_app( mock_model=False ) install_model = mapping.init( "sqlite:///:memory:", create_tables=True ) self.app.install_model = install_model self.app.reindex_tool_search = self.__reindex itp_config = os.path.join(self.test_directory, "integrated_tool_panel.xml") self.app.config.integrated_tool_panel_config = itp_config self.__toolbox = None self.config_files = []
def _configure_models( self, check_migrate_databases=False, check_migrate_tools=False, config_file=None ): """ Preconditions: object_store must be set on self. """ if self.config.database_connection: db_url = self.config.database_connection else: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database install_db_url = self.config.install_database_connection # TODO: Consider more aggressive check here that this is not the same # database file under the hood. combined_install_database = not( install_db_url and install_db_url != db_url ) install_db_url = install_db_url or db_url if check_migrate_databases: # Initialize database / check for appropriate schema version. # If this # is a new installation, we'll restrict the tool migration messaging. from galaxy.model.migrate.check import create_or_verify_database create_or_verify_database( db_url, config_file, self.config.database_engine_options, app=self ) if not combined_install_database: from galaxy.model.tool_shed_install.migrate.check import create_or_verify_database as tsi_create_or_verify_database tsi_create_or_verify_database( install_db_url, self.config.install_database_engine_options, app=self ) if check_migrate_tools: # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed. from tool_shed.galaxy_install.migrate.check import verify_tools if combined_install_database: install_database_options = self.config.database_engine_options else: install_database_options = self.config.install_database_engine_options verify_tools( self, install_db_url, config_file, install_database_options ) from galaxy.model import mapping self.model = mapping.init( self.config.file_path, db_url, self.config.database_engine_options, map_install_models=combined_install_database, database_query_profiling_proxy=self.config.database_query_profiling_proxy, object_store=self.object_store, trace_logger=getattr(self, "trace_logger", None), use_pbkdf2=self.config.get_bool( 'use_pbkdf2', True ) ) if combined_install_database: log.info("Install database targetting Galaxy's database configuration.") self.install_model = self.model else: from galaxy.model.tool_shed_install import mapping as install_mapping install_db_url = self.config.install_database_connection log.info("Install database using its own connection %s" % install_db_url) install_db_engine_options = self.config.install_database_engine_options self.install_model = install_mapping.init( install_db_url, install_db_engine_options )
def _configure_models(self, check_migrate_databases=False, config_file=None): """Preconditions: object_store must be set on self.""" db_url = self.config.database_connection install_db_url = self.config.install_database_connection # TODO: Consider more aggressive check here that this is not the same # database file under the hood. combined_install_database = not (install_db_url and install_db_url != db_url) install_db_url = install_db_url or db_url install_database_options = self.config.database_engine_options if combined_install_database else self.config.install_database_engine_options if self.config.database_wait: self._wait_for_database(db_url) if getattr(self.config, "max_metadata_value_size", None): custom_types.MAX_METADATA_VALUE_SIZE = self.config.max_metadata_value_size if check_migrate_databases: # Initialize database / check for appropriate schema version. # If this # is a new installation, we'll restrict the tool migration messaging. create_or_verify_database( db_url, config_file, self.config.database_engine_options, app=self, map_install_models=combined_install_database) if not combined_install_database: tsi_create_or_verify_database(install_db_url, install_database_options, app=self) self.model = init_models_from_config( self.config, map_install_models=combined_install_database, object_store=self.object_store, trace_logger=getattr(self, "trace_logger", None)) if combined_install_database: log.info( "Install database targetting Galaxy's database configuration.") self.install_model = self.model else: install_db_url = self.config.install_database_connection log.info( f"Install database using its own connection {install_db_url}") self.install_model = install_mapping.init( install_db_url, install_database_options)
def create_database(config_file): parser = ConfigParser.SafeConfigParser() parser.read(config_file) # Determine which database connection to use. database_connection = parser.get('app:main', 'install_database_connection') if database_connection is None: database_connection = parser.get('app:main', 'database_connection') if database_connection is None: database_connection = 'sqlite:///%s' % parser.get( 'app:main', 'database_file') if database_connection is None: print 'Unable to determine correct database connection.' exit(1) '''Initialize the database file.''' dialect_to_egg = { "sqlite": "pysqlite>=2", "postgres": "psycopg2", "postgresql": "psycopg2", "mysql": "MySQL_python" } dialect = (database_connection.split(':', 1))[0] try: egg = dialect_to_egg[dialect] try: eggs.require(egg) print("%s egg successfully loaded for %s dialect" % (egg, dialect)) except: # If the module is in the path elsewhere (i.e. non-egg), it'll still load. print( "%s egg not found, but an attempt will be made to use %s anyway" % (egg, dialect)) except KeyError: # Let this go, it could possibly work with db's we don't support. print( "database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect) # Initialize the database connection. engine = create_engine(database_connection) MetaData(bind=engine) install_session = scoped_session( sessionmaker(bind=engine, autoflush=False, autocommit=True)) model = mapping.init(database_connection) return install_session, model
def create_database(config_file): parser = ConfigParser() parser.read(config_file) # Determine which database connection to use. database_connection = parser.get('app:main', 'install_database_connection') if database_connection is None: database_connection = parser.get('app:main', 'database_connection') if database_connection is None: database_connection = 'sqlite:///%s' % parser.get('app:main', 'database_file') if database_connection is None: print('Unable to determine correct database connection.') exit(1) # Initialize the database connection. engine = create_engine(database_connection) MetaData(bind=engine) install_session = scoped_session(sessionmaker(bind=engine, autoflush=False, autocommit=True)) model = mapping.init(database_connection) return install_session, model
def create_database( config_file ): parser = ConfigParser.SafeConfigParser() parser.read( config_file ) # Determine which database connection to use. database_connection = parser.get( 'app:main', 'install_database_connection' ) if database_connection is None: database_connection = parser.get( 'app:main', 'database_connection' ) if database_connection is None: database_connection = 'sqlite:///%s' % parser.get( 'app:main', 'database_file' ) if database_connection is None: print 'Unable to determine correct database connection.' exit(1) '''Initialize the database file.''' # Initialize the database connection. engine = create_engine( database_connection ) MetaData( bind=engine ) install_session = scoped_session( sessionmaker( bind=engine, autoflush=False, autocommit=True ) ) model = mapping.init( database_connection ) return install_session, model
def create_database(config_file): parser = ConfigParser.SafeConfigParser() parser.read(config_file) # Determine which database connection to use. database_connection = parser.get("app:main", "install_database_connection") if database_connection is None: database_connection = parser.get("app:main", "database_connection") if database_connection is None: database_connection = "sqlite:///%s" % parser.get("app:main", "database_file") if database_connection is None: print "Unable to determine correct database connection." exit(1) """Initialize the database file.""" dialect_to_egg = { "sqlite": "pysqlite>=2", "postgres": "psycopg2", "postgresql": "psycopg2", "mysql": "MySQL_python", } dialect = (database_connection.split(":", 1))[0] try: egg = dialect_to_egg[dialect] try: eggs.require(egg) print ("%s egg successfully loaded for %s dialect" % (egg, dialect)) except: # If the module is in the path elsewhere (i.e. non-egg), it'll still load. print ("%s egg not found, but an attempt will be made to use %s anyway" % (egg, dialect)) except KeyError: # Let this go, it could possibly work with db's we don't support. print ("database_connection contains an unknown SQLAlchemy database dialect: %s" % dialect) # Initialize the database connection. engine = create_engine(database_connection) MetaData(bind=engine) install_session = scoped_session(sessionmaker(bind=engine, autoflush=False, autocommit=True)) model = mapping.init(database_connection) return install_session, model
def __init__( self, config ): self.config = config # Setup the database engine and ORM self.model = install_mapper.init( self.config.database_connection, engine_options={}, create_tables=False )
def mock_app(): app = Mock() app.install_model = mapping.init("sqlite:///:memory:", create_tables=True) return app
def create_or_verify_database(url, engine_options=None, app=None): """ """ # Create engine and metadata engine_options = engine_options or {} if not database_exists(url): message = "Creating database for URI [%s]" % url log.info(message) create_database(url) engine = create_engine(url, **engine_options) def migrate(): try: # Declare the database to be under a repository's version control db_schema = schema.ControlledSchema.create(engine, migrate_repository) except Exception: # The database is already under version control db_schema = schema.ControlledSchema(engine, migrate_repository) # Apply all scripts to get to current version migrate_to_current_version(engine, db_schema) meta = MetaData(bind=engine) if app and getattr(app.config, 'database_auto_migrate', False): migrate() return # Try to load tool_shed_repository table try: Table("tool_shed_repository", meta, autoload=True) except NoSuchTableError: # No table means a completely uninitialized database. If we # have an app, we'll set its new_installation setting to True # so the tool migration process will be skipped. log.info("Creating install database from scratch, skipping migrations") mapping.init(url=url, create_tables=True) current_version = migrate_repository.version().version schema.ControlledSchema.create(engine, migrate_repository, version=current_version) db_schema = schema.ControlledSchema(engine, migrate_repository) assert db_schema.version == current_version migrate() return try: Table("migrate_version", meta, autoload=True) except NoSuchTableError: # The database exists but is not yet under migrate version control, so init with version 1 log.info("Adding version control to existing database") try: Table("metadata_file", meta, autoload=True) schema.ControlledSchema.create(engine, migrate_repository, version=2) except NoSuchTableError: schema.ControlledSchema.create(engine, migrate_repository, version=1) # Verify that the code and the DB are in sync db_schema = schema.ControlledSchema(engine, migrate_repository) if migrate_repository.versions.latest != db_schema.version: exception_msg = "Your database has version '%d' but this code expects version '%d'. " % ( db_schema.version, migrate_repository.versions.latest) exception_msg += "Back up your database and then migrate the schema by running the following from your Galaxy installation directory:" exception_msg += "\n\nsh manage_db.sh upgrade install\n" else: log.info("At database version %d" % db_schema.version)
def __init__(self, config): self.config = config # Setup the database engine and ORM self.model = install_mapper.init(self.config.database_connection, engine_options={}, create_tables=False)