def database_conf(db_path, prefix="GALAXY", prefer_template_database=False): """Find (and populate if needed) Galaxy database connection.""" database_auto_migrate = False check_migrate_databases = True dburi_var = "%s_TEST_DBURI" % prefix template_name = None if dburi_var in os.environ: database_connection = os.environ[dburi_var] # only template if postgres - not mysql or sqlite do_template = prefer_template_database and database_connection.startswith( "p") if do_template: database_template_parsed = urlparse(database_connection) template_name = database_template_parsed.path[ 1:] # drop / from /galaxy actual_db = "gxtest" + ''.join( random.choice(string.ascii_uppercase) for _ in range(10)) actual_database_parsed = database_template_parsed._replace( path="/%s" % actual_db) database_connection = actual_database_parsed.geturl() if not database_exists(database_connection): # We pass by migrations and instantiate the current table create_database(database_connection) mapping.init('/tmp', database_connection, create_tables=True, map_install_models=True) toolshed_mapping.init(database_connection, create_tables=True) check_migrate_databases = False else: default_db_filename = "%s.sqlite" % prefix.lower() template_var = "%s_TEST_DB_TEMPLATE" % prefix db_path = os.path.join(db_path, default_db_filename) if template_var in os.environ: # Middle ground between recreating a completely new # database and pointing at existing database with # GALAXY_TEST_DBURI. The former requires a lot of setup # time, the latter results in test failures in certain # cases (namely tool shed tests expecting clean database). copy_database_template(os.environ[template_var], db_path) database_auto_migrate = True database_connection = 'sqlite:///%s' % db_path config = { "check_migrate_databases": check_migrate_databases, "database_connection": database_connection, "database_auto_migrate": database_auto_migrate } if not database_connection.startswith("sqlite://"): config["database_engine_option_max_overflow"] = "20" config["database_engine_option_pool_size"] = "10" if template_name: config["database_template"] = template_name return config
def main(argv=None): if argv is None: argv = sys.argv[1:] args = _arg_parser().parse_args(argv) object_store_config = Bunch( object_store_store_by="uuid", object_store_config_file=args.object_store_config, object_store_check_old_style=False, jobs_directory=None, new_file_path=None, umask=os.umask(0o77), gid=os.getgid(), ) object_store = build_object_store_from_config(object_store_config) galaxy.model.Dataset.object_store = object_store galaxy.model.set_datatypes_registry(example_datatype_registry_for_sample()) from galaxy.model import mapping mapping.init("/tmp", "sqlite:///:memory:", create_tables=True, object_store=object_store) with open(args.objects) as f: targets = yaml.safe_load(f) if not isinstance(targets, list): targets = [targets] export_path = args.export export_type = args.export_type if export_type is None: export_type = "directory" if not export_path.endswith( ".tgz") else "bag_archive" export_types = { "directory": store.DirectoryModelExportStore, "tar": store.TarModelExportStore, "bag_directory": store.BagDirectoryModelExportStore, "bag_archive": store.BagArchiveModelExportStore, } store_class = export_types[export_type] export_kwds = { "serialize_dataset_objects": True, } with store_class(export_path, **export_kwds) as export_store: for target in targets: persist_target_to_export_store(target, export_store, object_store, ".")
def _init(config, need_app=False): if config.startswith('/'): config_file = os.path.abspath(config) else: config_file = os.path.abspath( os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, config)) properties = load_app_properties(ini_file=config_file) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) if not config.database_connection: logging.warning( "The database connection is empty. If you are using the default value, please uncomment that in your galaxy.ini" ) if need_app: app = galaxy.app.UniverseApplication(global_conf={ '__file__': config_file, 'here': os.getcwd() }) else: app = None return (mapping.init(config.file_path, config.database_connection, create_tables=False, object_store=object_store), object_store, config.database_connection.split(':')[0], config, app)
def init(): options.config = os.path.abspath( options.config ) if options.username == 'all': options.username = None if options.email == 'all': options.email = None os.chdir( os.path.dirname( options.config ) ) sys.path.append( 'lib' ) from galaxy import eggs import pkg_resources import galaxy.config from galaxy.objectstore import build_object_store_from_config # lazy globals()['nice_size'] = __import__( 'galaxy.util', globals(), locals(), ( 'nice_size', ) ).nice_size config_parser = ConfigParser( dict( here = os.getcwd(), database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) config_parser.read( os.path.basename( options.config ) ) config_dict = {} for key, value in config_parser.items( "app:main" ): config_dict[key] = value config = galaxy.config.Configuration( **config_dict ) object_store = build_object_store_from_config( config ) from galaxy.model import mapping return mapping.init( config.file_path, config.database_connection, create_tables = False, object_store = object_store ), object_store, config.database_connection.split(':')[0]
def __init__( self, **kwargs ): self.config = MockAppConfig( **kwargs ) self.security = self.config.security self.object_store = objectstore.build_object_store_from_config( self.config ) self.model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True, object_store=self.object_store ) self.security_agent = self.model.security_agent self.visualizations_registry = MockVisualizationsRegistry()
def __init__(self): self.config = bunch.Bunch(tool_secret="awesome_secret", ) self.model = mapping.init("/tmp", "sqlite:///:memory:", create_tables=True) self.toolbox = TestToolbox() self.datatypes_registry = TestDatatypesRegistry()
def init(): options.config = os.path.abspath( options.config ) if options.username == 'all': options.username = None if options.email == 'all': options.email = None sys.path.insert( 1, os.path.join( os.path.dirname( __file__ ), '..', 'lib' ) ) from galaxy import eggs import pkg_resources import galaxy.config from galaxy.objectstore import build_object_store_from_config # lazy globals()['nice_size'] = __import__( 'galaxy.util', globals(), locals(), ( 'nice_size', ) ).nice_size config_parser = ConfigParser( dict( here = os.getcwd(), database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) config_parser.read( options.config ) config_dict = {} for key, value in config_parser.items( "app:main" ): config_dict[key] = value config = galaxy.config.Configuration( **config_dict ) object_store = build_object_store_from_config( config ) from galaxy.model import mapping return mapping.init( config.file_path, config.database_connection, create_tables = False, object_store = object_store ), object_store, config.database_connection.split(':')[0]
def __init__(self): self.config = bunch.Bunch() self.model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True )
def init(): options.config = os.path.abspath(options.config) if options.username == "all": options.username = None if options.email == "all": options.email = None config_parser = ConfigParser( dict(here=os.getcwd(), database_connection="sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE") ) config_parser.read(options.config) config_dict = {} for key, value in config_parser.items("app:main"): config_dict[key] = value config = galaxy.config.Configuration(**config_dict) object_store = build_object_store_from_config(config) from galaxy.model import mapping return ( mapping.init(config.file_path, config.database_connection, create_tables=False, object_store=object_store), object_store, config.database_connection.split(":")[0], )
def _init(config, need_app=False): if config.startswith('/'): config_file = os.path.abspath(config) else: config_file = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, config)) properties = load_app_properties(ini_file=config_file) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) if not config.database_connection: logging.warning("The database connection is empty. If you are using the default value, please uncomment that in your galaxy.ini") if need_app: app = galaxy.app.UniverseApplication(global_conf={'__file__': config_file, 'here': os.getcwd()}) else: app = None return ( mapping.init( config.file_path, config.database_connection, create_tables=False, object_store=object_store ), object_store, config.database_connection.split(':')[0], config, app )
def __init__( self ): self.config = bunch.Bunch( ) self.model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True )
def init(): options.config = os.path.abspath( options.config ) if options.username == 'all': options.username = None if options.email == 'all': options.email = None os.chdir( os.path.dirname( options.config ) ) sys.path.append( 'lib' ) from galaxy import eggs import pkg_resources import galaxy.config from galaxy.objectstore import build_object_store_from_config config_parser = ConfigParser( dict( here = os.getcwd(), database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) config_parser.read( os.path.basename( options.config ) ) config_dict = {} for key, value in config_parser.items( "app:main" ): config_dict[key] = value config = galaxy.config.Configuration( **config_dict ) object_store = build_object_store_from_config( config ) from galaxy.model import mapping return mapping.init( config.file_path, config.database_connection, create_tables = False, object_store = object_store ), object_store
def migrate_from_scratch(): log.info("Creating new database from scratch, skipping migrations") current_version = migrate_repository.version().version mapping.init(file_path='/tmp', url=url, map_install_models=map_install_models, create_tables=True) schema.ControlledSchema.create(engine, migrate_repository, version=current_version) db_schema = schema.ControlledSchema(engine, migrate_repository) assert db_schema.version == current_version migrate() if app: # skips the tool migration process. app.new_installation = True
def __init__(self, config=None, **kwargs): self.config = config or MockAppConfig(**kwargs) self.security = self.config.security self.name = kwargs.get('name', 'galaxy') self.object_store = objectstore.build_object_store_from_config( self.config) self.model = mapping.init("/tmp", self.config.database_connection, create_tables=True, object_store=self.object_store) self.security_agent = self.model.security_agent self.visualizations_registry = MockVisualizationsRegistry() self.tag_handler = tags.GalaxyTagHandler(self.model.context) self.quota_agent = quota.DatabaseQuotaAgent(self.model) self.init_datatypes() self.job_config = Bunch(dynamic_params=None, destinations={}) self.tool_data_tables = {} self.dataset_collections_service = None self.container_finder = NullContainerFinder() self._toolbox_lock = MockLock() self.tool_shed_registry = Bunch(tool_sheds={}) self.genome_builds = GenomeBuilds(self) self.job_manager = NoopManager() self.application_stack = ApplicationStack() self.auth_manager = AuthManager(self) self.user_manager = UserManager(self) self.execution_timer_factory = Bunch( get_timer=StructuredExecutionTimer) def url_for(*args, **kwds): return "/mock/url" self.url_for = url_for
def init(): options.config = os.path.abspath(options.config) if options.username == 'all': options.username = None if options.email == 'all': options.email = None config_parser = ConfigParser( dict(here=os.getcwd(), database_connection= 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE')) config_parser.read(options.config) config_dict = {} for key, value in config_parser.items("app:main"): config_dict[key] = value config = galaxy.config.Configuration(**config_dict) object_store = build_object_store_from_config(config) from galaxy.model import mapping return (mapping.init(config.file_path, config.database_connection, create_tables=False, object_store=object_store), object_store, config.database_connection.split(':')[0])
def setUpClass(cls): # Start the database and connect the mapping cls.model = mapping.init("/tmp", cls._db_uri(), create_tables=True, object_store=MockObjectStore()) assert cls.model.engine is not None
def __init__(self): self.config = bunch.Bunch( log_events=False, log_actions=False, ) self.model = mapping.init("/tmp", "sqlite:///:memory:", create_tables=True)
def init(): options.config = os.path.abspath( options.config ) config = ConfigParser( dict( file_path='database/files', database_connection='sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) config.read( options.config ) return mapping.init( config.get( 'app:main', 'file_path' ), config.get( 'app:main', 'database_connection' ), create_tables=False )
def __init__( self, **kwargs ): print >> sys.stderr, "python path is: " + ", ".join( sys.path ) # Read config file and check for errors self.config = config.Configuration( **kwargs ) self.config.check() config.configure_logging( self.config ) # Set up datatypes registry self.datatypes_registry = galaxy.datatypes.registry.Registry( self.config.root, self.config.datatypes_config ) galaxy.model.set_datatypes_registry( self.datatypes_registry ) # Determine the database url if self.config.database_connection: db_url = self.config.database_connection else: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database # Initialize database / check for appropriate schema version from galaxy.model.migrate.check import create_or_verify_database create_or_verify_database( db_url, self.config.database_engine_options ) # Setup the database engine and ORM from galaxy.model import mapping self.model = mapping.init( self.config.file_path, db_url, self.config.database_engine_options ) # Security helper self.security = security.SecurityHelper( id_secret=self.config.id_secret ) # Initialize the tools self.toolbox = tools.ToolBox( self.config.tool_config, self.config.tool_path, self ) # Load datatype converters self.datatypes_registry.load_datatype_converters( self.toolbox ) # Load datatype indexers self.datatypes_registry.load_datatype_indexers( self.toolbox ) #Load security policy self.security_agent = self.model.security_agent # Heartbeat and memdump for thread / heap profiling self.heartbeat = None self.memdump = None self.memory_usage = None # Start the heartbeat process if configured and available if self.config.use_heartbeat: from galaxy.util import heartbeat if heartbeat.Heartbeat: self.heartbeat = heartbeat.Heartbeat() self.heartbeat.start() # Enable the memdump signal catcher if configured and available if self.config.use_memdump: from galaxy.util import memdump if memdump.Memdump: self.memdump = memdump.Memdump() # Enable memory_usage logging if configured if self.config.log_memory_usage: from galaxy.util import memory_usage self.memory_usage = memory_usage # Start the job queue self.job_manager = jobs.JobManager( self ) # FIXME: These are exposed directly for backward compatibility self.job_queue = self.job_manager.job_queue self.job_stop_queue = self.job_manager.job_stop_queue # Track Store self.track_store = store.TrackStoreManager( self.config.track_store_path )
def __init__(self): self.config = bunch.Bunch(tool_secret="awesome_secret", ) self.model = mapping.init("/tmp", "sqlite:///:memory:", create_tables=True) self.toolbox = TestToolbox() self.datatypes_registry = TestDatatypesRegistry() self.security = IdEncodingHelper(id_secret="testing") self.workflow_manager = WorkflowsManager(self)
def __init__( self, **kwargs ): self.config = MockAppConfig( **kwargs ) self.security = self.config.security self.object_store = objectstore.build_object_store_from_config( self.config ) self.model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True, object_store=self.object_store ) self.security_agent = self.model.security_agent self.visualizations_registry = MockVisualizationsRegistry() self.tag_handler = tags.GalaxyTagManager( self ) self.quota_agent = quota.QuotaAgent( self.model )
def __init__(self, config): self.config = config if not self.config.database_connection: self.config.database_connection = "sqlite:///%s?isolation_level=IMMEDIATE" % str(config.database) # Setup the database engine and ORM self.model = mapping.init( self.config.file_path, self.config.database_connection, engine_options={}, create_tables=False ) self.security = security.SecurityHelper(id_secret=self.config.id_secret)
def __init__(self, test_directory, mock_model=True): # The following line is needed in order to create # HistoryDatasetAssociations - ideally the model classes would be # usable without the ORM infrastructure in place. in_memomry_model = mapping.init("/tmp", "sqlite:///:memory:", create_tables=True) self.datatypes_registry = Bunch( integrated_datatypes_configs= '/galaxy/integrated_datatypes_configs.xml', get_datatype_by_extension=lambda ext: Bunch(), ) self.config = Bunch( outputs_to_working_directory=False, commands_in_new_shell=True, new_file_path=os.path.join(test_directory, "new_files"), tool_data_path=os.path.join(test_directory, "tools"), root=os.path.join(test_directory, "galaxy"), admin_users="*****@*****.**", len_file_path=os.path.join('tool-data', 'shared', 'ucsc', 'chrom'), builds_file_path=os.path.join('tool-data', 'shared', 'ucsc', 'builds.txt.sample'), migrated_tools_config=os.path.join(test_directory, "migrated_tools_conf.xml"), ) # Setup some attributes for downstream extension by specific tests. self.job_config = Bunch(dynamic_params=None, ) # Two ways to handle model layer, one is to stub out some objects that # have an interface similar to real model (mock_model) and can keep # track of 'persisted' objects in a map. The other is to use a real # sqlalchemy layer but target an in memory database. Depending on what # is being tested. if mock_model: # Create self.model to mimic app.model. self.model = Bunch(context=MockContext()) for module_member_name in dir(galaxy.model): module_member = getattr(galaxy.model, module_member_name) if type(module_member) == type: self.model[module_member_name] = module_member else: self.model = in_memomry_model self.genome_builds = GenomeBuilds(self) self.toolbox = None self.object_store = None self.security = SecurityHelper(id_secret="testing") from galaxy.security import GalaxyRBACAgent self.job_queue = NoopQueue() self.security_agent = GalaxyRBACAgent(self.model) self.tool_data_tables = {} self.dataset_collections_service = None self.container_finder = NullContainerFinder() self.name = "galaxy"
def _setup_mapping_and_user(): with TestConfig(DISK_TEST_CONFIG) as (test_config, object_store): # Start the database and connect the mapping model = mapping.init("/tmp", "sqlite:///:memory:", create_tables=True, object_store=object_store, slow_query_log_threshold=SLOW_QUERY_LOG_THRESHOLD, thread_local_log=THREAD_LOCAL_LOG) u = model.User(email="*****@*****.**", password="******") h1 = model.History(name="HistoryCopyHistory1", user=u) model.context.add_all([u, h1]) model.context.flush() yield test_config, object_store, model, h1
def __init__( self ): self.config = bunch.Bunch( log_events=False, log_actions=False, ) self.model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True )
def __init__(self, **kwargs): self.config = MockAppConfig(**kwargs) self.security = self.config.security self.object_store = objectstore.build_object_store_from_config( self.config) self.model = mapping.init("/tmp", "sqlite:///:memory:", create_tables=True, object_store=self.object_store) self.security_agent = self.model.security_agent
def __init__( self ): self.config = bunch.Bunch( tool_secret="awesome_secret", ) self.model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True ) self.toolbox = TestToolbox() self.datatypes_registry = TestDatatypesRegistry()
def main(): ini_file = sys.argv.pop(1) config = get_config(ini_file) model = mapping.init(ini_file, config['db_url'], create_tables=False) for row in model.context.query(model.Dataset): if row.uuid is None: row.uuid = uuid.uuid4() print "Setting dataset:", row.id, " UUID to ", row.uuid model.context.flush()
def __init__(self, config): self.config = config if not self.config.database_connection: self.config.database_connection = \ "sqlite:///%s?isolation_level=IMMEDIATE" % str(config.database) # Setup the database engine and ORM self.model = mapping.init(self.config.file_path, self.config.database_connection, engine_options={}, create_tables=False) self.security = Security(id_secret=self.config.id_secret)
def main(): ini_file = sys.argv.pop(1) config = get_config(ini_file) model = mapping.init( ini_file, config['db_url'], create_tables = False ) for row in model.context.query( model.Dataset ): if row.uuid is None: row.uuid = uuid.uuid4() print "Setting dataset:", row.id, " UUID to ", row.uuid model.context.flush()
def init(): options.config = os.path.abspath( options.config ) sys.path.insert( 1, os.path.join( os.path.dirname( __file__ ), os.pardir, 'lib' ) ) config = ConfigParser( dict( file_path='database/files', database_connection='sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) config.read( options.config ) from galaxy.model import mapping return mapping.init( config.get( 'app:main', 'file_path' ), config.get( 'app:main', 'database_connection' ), create_tables=False )
def delete_galaxy_user(): db_url = get_config(sys.argv, use_argparse=False)['db_url'] options = cli_options() mapping = init('/tmp/', db_url) sa_session = mapping.context security_agent = mapping.security_agent delete_user(sa_session, security_agent, options.user)
def __init__( self, test_directory, mock_model=True ): # The following line is needed in order to create # HistoryDatasetAssociations - ideally the model classes would be # usable without the ORM infrastructure in place. in_memomry_model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True ) self.datatypes_registry = Bunch( integrated_datatypes_configs='/galaxy/integrated_datatypes_configs.xml', get_datatype_by_extension=lambda ext: Bunch(), ) self.config = Bunch( outputs_to_working_directory=False, commands_in_new_shell=True, new_file_path=os.path.join(test_directory, "new_files"), tool_data_path=os.path.join(test_directory, "tools"), root=os.path.join(test_directory, "galaxy"), admin_users="*****@*****.**", len_file_path=os.path.join( 'tool-data', 'shared', 'ucsc', 'chrom' ), builds_file_path=os.path.join( 'tool-data', 'shared', 'ucsc', 'builds.txt.sample' ), migrated_tools_config=os.path.join(test_directory, "migrated_tools_conf.xml"), ) # Setup some attributes for downstream extension by specific tests. self.job_config = Bunch( dynamic_params=None, ) # Two ways to handle model layer, one is to stub out some objects that # have an interface similar to real model (mock_model) and can keep # track of 'persisted' objects in a map. The other is to use a real # sqlalchemy layer but target an in memory database. Depending on what # is being tested. if mock_model: # Create self.model to mimic app.model. self.model = Bunch( context=MockContext() ) for module_member_name in dir( galaxy.model ): module_member = getattr(galaxy.model, module_member_name) if type( module_member ) == type: self.model[ module_member_name ] = module_member else: self.model = in_memomry_model self.genome_builds = GenomeBuilds( self ) self.toolbox = None self.object_store = None self.security = SecurityHelper(id_secret="testing") from galaxy.security import GalaxyRBACAgent self.job_queue = NoopQueue() self.security_agent = GalaxyRBACAgent( self.model ) self.tool_data_tables = {} self.dataset_collections_service = None self.container_finder = NullContainerFinder() self.name = "galaxy"
def init(): options.config = os.path.abspath(options.config) config = ConfigParser( dict(file_path='database/files', database_connection= 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE')) config.read(options.config) return mapping.init(config.get('app:main', 'file_path'), config.get('app:main', 'database_connection'), create_tables=False)
def _configure_models( self, check_migrate_databases=False, check_migrate_tools=False, config_file=None ): """ Preconditions: object_store must be set on self. """ if self.config.database_connection: db_url = self.config.database_connection else: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database install_db_url = self.config.install_database_connection # TODO: Consider more aggressive check here that this is not the same # database file under the hood. combined_install_database = not( install_db_url and install_db_url != db_url ) install_db_url = install_db_url or db_url if check_migrate_databases: # Initialize database / check for appropriate schema version. # If this # is a new installation, we'll restrict the tool migration messaging. from galaxy.model.migrate.check import create_or_verify_database create_or_verify_database( db_url, config_file, self.config.database_engine_options, app=self ) if not combined_install_database: from galaxy.model.tool_shed_install.migrate.check import create_or_verify_database as tsi_create_or_verify_database tsi_create_or_verify_database( install_db_url, self.config.install_database_engine_options, app=self ) if check_migrate_tools: # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed. from tool_shed.galaxy_install.migrate.check import verify_tools if combined_install_database: install_database_options = self.config.database_engine_options else: install_database_options = self.config.install_database_engine_options verify_tools( self, install_db_url, config_file, install_database_options ) from galaxy.model import mapping self.model = mapping.init( self.config.file_path, db_url, self.config.database_engine_options, map_install_models=combined_install_database, database_query_profiling_proxy=self.config.database_query_profiling_proxy, object_store=self.object_store, trace_logger=getattr(self, "trace_logger", None), use_pbkdf2=self.config.get_bool( 'use_pbkdf2', True ) ) if combined_install_database: log.info("Install database targetting Galaxy's database configuration.") self.install_model = self.model else: from galaxy.model.tool_shed_install import mapping as install_mapping install_db_url = self.config.install_database_connection log.info("Install database using its own connection %s" % install_db_url) install_db_engine_options = self.config.install_database_engine_options self.install_model = install_mapping.init( install_db_url, install_db_engine_options )
def main() : # logging configuration log.basicConfig(format='%(levelname)s: %(message)s', level=log.DEBUG) # get the command line options parser = optparse.OptionParser() parser.add_option("--lims", dest="lims_server", action="store", help="lims server driver://user[:password]@host[:port]/database (mysql://Galaxy:9414xy@uk-cri-lbio04/cri_general)") parser.add_option("--dropall", dest="dropall", action="store_true", default=False, help="drop all users/groups and their associated roles in Galaxy") (options, args) = parser.parse_args() try: assert options.lims_server except: parser.print_help() sys.exit( 1 ) # check env variable define if not os.environ['GALAXY_HOME']: log.error('Env variable $GALAXY_HOME is not set.') sys.exit(1) # set default Galaxy config file universe_wsgi.ini default_config = os.path.join( os.environ['GALAXY_HOME'] , 'universe_wsgi.ini') if not os.path.isfile(default_config): log.error('Default Galaxy config file %s does not exist.' % default_config) sys.exit(1) config = os.path.abspath( default_config ) os.chdir( os.path.dirname( config ) ) sys.path.append( 'lib' ) # import Galaxy from galaxy import eggs import pkg_resources pkg_resources.require( "SQLAlchemy >= 0.4" ) from sqlalchemy.ext.sqlsoup import SqlSoup # parse config file config = SafeConfigParser() config.read( os.path.basename( default_config ) ) # get Galaxy database connection galaxy_db = config.get( "app:main", "database_connection" ) from galaxy.model import mapping model = mapping.init( config.get( 'app:main', 'file_path' ), config.get( 'app:main', 'database_connection' ), create_tables = False ) session = model.session # get CRI lims database connection lims_db = SqlSoup(options.lims_server) if options.dropall: dropall(config, model, session) else: create_limsusers(model, session, lims_db)
def __init__(self, config: GalaxyDataTestConfig = None, **kwd): config = config or GalaxyDataTestConfig(**kwd) self.config = config self.security = config.security self.object_store = objectstore.build_object_store_from_config( self.config) self.model = init("/tmp", self.config.database_connection, create_tables=True, object_store=self.object_store) self.security_agent = self.model.security_agent self.tag_handler = GalaxyTagHandler(self.model.context) self.init_datatypes()
def init(): options.config = os.path.abspath( options.config ) sys.path.insert( 1, os.path.join( os.path.dirname( __file__ ), '..', 'lib' ) ) from galaxy import eggs import pkg_resources config = ConfigParser( dict( file_path = 'database/files', database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) config.read( options.config ) from galaxy.model import mapping return mapping.init( config.get( 'app:main', 'file_path' ), config.get( 'app:main', 'database_connection' ), create_tables = False )
def test_basic(self): # Start the database and connect the mapping model = mapping.init("/tmp", "sqlite:///:memory:", create_tables=True) assert model.engine is not None # Make some changes and commit them u = model.User(email="*****@*****.**", password="******") # gs = model.GalaxySession() h1 = model.History(name="History 1", user=u) #h1.queries.append( model.Query( "h1->q1" ) ) #h1.queries.append( model.Query( "h1->q2" ) ) h2 = model.History(name=("H" * 1024)) model.session.add_all((u, h1, h2)) #q1 = model.Query( "h2->q1" ) d1 = model.HistoryDatasetAssociation(extension="interval", metadata=dict(chromCol=1, startCol=2, endCol=3), history=h2, create_dataset=True, sa_session=model.session) #h2.queries.append( q1 ) #h2.queries.append( model.Query( "h2->q2" ) ) model.session.add((d1)) model.session.flush() model.session.expunge_all() # Check users = model.session.query(model.User).all() assert len(users) == 1 assert users[0].email == "*****@*****.**" assert users[0].password == "password" assert len(users[0].histories) == 1 assert users[0].histories[0].name == "History 1" hists = model.session.query(model.History).all() assert hists[0].name == "History 1" assert hists[1].name == ("H" * 255) assert hists[0].user == users[0] assert hists[1].user is None assert hists[1].datasets[0].metadata.chromCol == 1 id = hists[1].datasets[0].id assert hists[1].datasets[0].file_name == os.path.join( "/tmp", *directory_hash_id(id)) + ("/dataset_%d.dat" % id) # Do an update and check hists[1].name = "History 2b" model.session.flush() model.session.expunge_all() hists = model.session.query(model.History).all() assert hists[0].name == "History 1" assert hists[1].name == "History 2b"
def init(): options.config = os.path.abspath( options.config ) os.chdir( os.path.dirname( options.config ) ) sys.path.append( 'lib' ) from galaxy import eggs import pkg_resources config = ConfigParser( dict( file_path = 'database/files', database_connection = 'sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE' ) ) config.read( os.path.basename( options.config ) ) from galaxy.model import mapping return mapping.init( config.get( 'app:main', 'file_path' ), config.get( 'app:main', 'database_connection' ), create_tables = False )
def init(): options.config = os.path.abspath(options.config) config_parser = ConfigParser(dict(here=os.getcwd(), database_connection='sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE')) config_parser.read(options.config) config_dict = {} for key, value in config_parser.items("app:main"): config_dict[key] = value config = galaxy.config.Configuration(**config_dict) object_store = build_object_store_from_config(config) return (mapping.init(config.file_path, config.database_connection, create_tables=False, object_store=object_store), object_store)
def _init(config): if config.startswith('/'): config = os.path.abspath(config) else: config = os.path.abspath( os.path.join(os.path.dirname(__file__), os.pardir, config)) properties = load_app_properties(ini_file=config) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) return (mapping.init(config.file_path, config.database_connection, create_tables=False, object_store=object_store), object_store, config.database_connection.split(':')[0])
def __init__(self, config=None, **kwargs): super().__init__() self[BasicApp] = self self[MinimalManagerApp] = self self[StructuredApp] = self self.config = config or MockAppConfig(**kwargs) self.security = self.config.security self[idencoding.IdEncodingHelper] = self.security self.name = kwargs.get('name', 'galaxy') self.object_store = objectstore.build_object_store_from_config( self.config) self.model = mapping.init("/tmp", self.config.database_connection, create_tables=True, object_store=self.object_store) self[SharedModelMapping] = self.model self[GalaxyModelMapping] = self.model self[scoped_session] = self.model.context self.security_agent = self.model.security_agent self.visualizations_registry = MockVisualizationsRegistry() self.tag_handler = tags.GalaxyTagHandler(self.model.context) self[tags.GalaxyTagHandler] = self.tag_handler self.quota_agent = quota.DatabaseQuotaAgent(self.model) self.init_datatypes() self.job_config = Bunch(dynamic_params=None, destinations={}, use_messaging=False, assign_handler=lambda *args, **kwargs: None) self.tool_data_tables = {} self.dataset_collections_service = None self.container_finder = NullContainerFinder() self._toolbox_lock = MockLock() self.tool_shed_registry = Bunch(tool_sheds={}) self.genome_builds = GenomeBuilds(self) self.job_manager = NoopManager() self.application_stack = ApplicationStack() self.auth_manager = AuthManager(self.config) self.user_manager = UserManager(self) self.execution_timer_factory = Bunch( get_timer=StructuredExecutionTimer) self.is_job_handler = False rebind_container_to_task(self) def url_for(*args, **kwds): return "/mock/url" self.url_for = url_for
def manage_galaxy_bootstrap_user(): db_url = get_config(sys.argv, use_argparse=False)['db_url'] options = cli_options() mapping = init('/tmp/', db_url) sa_session = mapping.context security_agent = mapping.security_agent if options.action == "create": add_user(sa_session, security_agent, options.user, options.password, key=options.key, username=options.username) elif options.action == "delete": delete_user(sa_session, security_agent, options.user)
def test_basic( self ): # Start the database and connect the mapping model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True ) assert model.engine is not None # Make some changes and commit them u = model.User( email="*****@*****.**", password="******" ) # gs = model.GalaxySession() h1 = model.History( name="History 1", user=u) #h1.queries.append( model.Query( "h1->q1" ) ) #h1.queries.append( model.Query( "h1->q2" ) ) h2 = model.History( name=( "H" * 1024 ) ) model.session.add_all( ( u, h1, h2 ) ) #q1 = model.Query( "h2->q1" ) d1 = model.HistoryDatasetAssociation( extension="interval", metadata=dict(chromCol=1,startCol=2,endCol=3 ), history=h2, create_dataset=True, sa_session=model.session ) #h2.queries.append( q1 ) #h2.queries.append( model.Query( "h2->q2" ) ) model.session.add( ( d1 ) ) model.session.flush() model.session.expunge_all() # Check users = model.session.query( model.User ).all() assert len( users ) == 1 assert users[0].email == "*****@*****.**" assert users[0].password == "password" assert len( users[0].histories ) == 1 assert users[0].histories[0].name == "History 1" hists = model.session.query( model.History ).all() assert hists[0].name == "History 1" assert hists[1].name == ( "H" * 255 ) assert hists[0].user == users[0] assert hists[1].user is None assert hists[1].datasets[0].metadata.chromCol == 1 # The filename test has moved to objecstore #id = hists[1].datasets[0].id #assert hists[1].datasets[0].file_name == os.path.join( "/tmp", *directory_hash_id( id ) ) + ( "/dataset_%d.dat" % id ) # Do an update and check hists[1].name = "History 2b" model.session.flush() model.session.expunge_all() hists = model.session.query( model.History ).all() assert hists[0].name == "History 1" assert hists[1].name == "History 2b"
def _init(config): if config.startswith('/'): config = os.path.abspath(config) else: config = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, config)) properties = load_app_properties(ini_file=config) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) return ( mapping.init( config.file_path, config.database_connection, create_tables=False, object_store=object_store ), object_store, config.database_connection.split(':')[0] )
def main(): if len(sys.argv) != 2 or sys.argv == "-h" or sys.argv == "--help": usage(sys.argv[0]) sys.exit() ini_file = sys.argv.pop(1) config = get_config(ini_file) model = mapping.init( ini_file, config['db_url'], create_tables=False ) for row in model.context.query( model.Dataset ): if row.uuid is None: row.uuid = uuid.uuid4() print("Setting dataset:", row.id, " UUID to ", row.uuid) model.context.flush() for row in model.context.query( model.Workflow ): if row.uuid is None: row.uuid = uuid.uuid4() print("Setting Workflow:", row.id, " UUID to ", row.uuid) model.context.flush()
def __init__(self, config=None, **kwargs): self.config = config or MockAppConfig(**kwargs) self.security = self.config.security self.name = kwargs.get('name', 'galaxy') self.object_store = objectstore.build_object_store_from_config(self.config) self.model = mapping.init("/tmp", "sqlite:///:memory:", create_tables=True, object_store=self.object_store) self.security_agent = self.model.security_agent self.visualizations_registry = MockVisualizationsRegistry() self.tag_handler = tags.GalaxyTagManager(self.model.context) self.quota_agent = quota.QuotaAgent(self.model) self.init_datatypes() self.job_config = Bunch( dynamic_params=None, ) self.tool_data_tables = {} self.dataset_collections_service = None self.container_finder = NullContainerFinder() self._toolbox_lock = MockLock() self.genome_builds = GenomeBuilds(self) self.job_queue = NoopQueue()
def __init__(self, config=None, **kwargs): self.config = config or MockAppConfig(**kwargs) self.security = self.config.security self.name = kwargs.get('name', 'galaxy') self.object_store = objectstore.build_object_store_from_config(self.config) self.model = mapping.init("/tmp", "sqlite:///:memory:", create_tables=True, object_store=self.object_store) self.security_agent = self.model.security_agent self.visualizations_registry = MockVisualizationsRegistry() self.tag_handler = tags.GalaxyTagManager(self.model.context) self.quota_agent = quota.QuotaAgent(self.model) self.init_datatypes() self.job_config = Bunch( dynamic_params=None, ) self.tool_data_tables = {} self.dataset_collections_service = None self.container_finder = NullContainerFinder() self._toolbox_lock = MockLock() self.genome_builds = GenomeBuilds(self) self.job_manager = Bunch(job_queue=NoopQueue()) self.application_stack = ApplicationStack()
def test_basic( self ): # Start the database and connect the mapping model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True ) assert model.engine is not None # Make some changes and commit them u = model.User( email="*****@*****.**", password="******" ) h1 = model.History( name="History 1", user=u) #h1.queries.append( model.Query( "h1->q1" ) ) #h1.queries.append( model.Query( "h1->q2" ) ) h2 = model.History( name=( "H" * 1024 ) ) #q1 = model.Query( "h2->q1" ) d1 = model.Dataset( metadata=dict(chromCol=1,startCol=2,endCol=3 ), history=h2 ) #h2.queries.append( q1 ) #h2.queries.append( model.Query( "h2->q2" ) ) model.context.current.flush() model.context.current.clear() # Check users = model.User.select() assert len( users ) == 1 assert users[0].email == "*****@*****.**" assert users[0].password == "password" assert len( users[0].histories ) == 1 assert users[0].histories[0].name == "History 1" hists = model.History.select() assert hists[0].name == "History 1" assert hists[1].name == ( "H" * 255 ) assert hists[0].user == users[0] assert hists[1].user is None assert hists[1].datasets[0].metadata['chromCol'] == 1 assert hists[1].datasets[0].file_name == "/tmp/dataset_%d.dat" % hists[1].datasets[0].id # Do an update and check hists[1].name = "History 2b" model.context.current.flush() model.context.current.clear() hists = model.History.select() assert hists[0].name == "History 1" assert hists[1].name == "History 2b"
def setUpClass(cls): # Start the database and connect the mapping cls.model = mapping.init( "/tmp", "sqlite:///:memory:", create_tables=True ) assert cls.model.engine is not None
def main(): # logging configuration log.basicConfig(format="%(levelname)s: %(message)s", level=log.DEBUG) # get the command line options parser = optparse.OptionParser() parser.add_option( "--lims", dest="lims_server", action="store", help="lims server driver://user[:password]@host[:port]/database (mysql://Galaxy:9414xy@uk-cri-lbio04/cri_general)", ) parser.add_option("--samples", dest="json_filename", action="store", help="json sample full path with filename") (options, args) = parser.parse_args() try: assert options.lims_server assert options.json_filename except: parser.print_help() sys.exit(1) # check env variable define if not os.environ["GALAXY_HOME"]: log.error("Env variable $GALAXY_HOME is not set.") sys.exit(1) # set default galaxy config file universe_wsgi.ini galaxy_config_file = os.path.join(os.environ["GALAXY_HOME"], "universe_wsgi.ini") if not os.path.isfile(galaxy_config_file): log.error("Default Galaxy config file %s does not exist." % galaxy_config_file) sys.exit(1) os.chdir(os.path.dirname(os.path.abspath(galaxy_config_file))) sys.path.append("lib") # import galaxy from galaxy import eggs import pkg_resources pkg_resources.require("SQLAlchemy >= 0.4") from sqlalchemy.ext.sqlsoup import SqlSoup # parse galaxy config file galaxy_config = ConfigParser.SafeConfigParser() galaxy_config.read(os.path.basename(galaxy_config_file)) # get galaxy database connection from galaxy.model import mapping galaxy_model = mapping.init( galaxy_config.get("app:main", "file_path"), galaxy_config.get("app:main", "database_connection"), create_tables=False, ) galaxy_session = galaxy_model.session # set galaxy library import directory try: lib_import_dir = galaxy_config.get("app:main", "library_import_dir") except (ConfigParser.NoOptionError, ConfigParser.NoSectionError): raise ValueError("galaxy config %s needs library_import_dir to be set." % galaxy_config_file) # set storage dir with the galaxy library import directory storage_root_folder = os.path.join(lib_import_dir, "storage") # query galaxy db to get all users/groups galaxy_groups = galaxy_session.query(galaxy_model.Group) all_groups = [] all_users = {} for galaxy_group in galaxy_groups: # create library directories for each group on disk log.debug("Existing group in galaxy: %s" % galaxy_group.name) check_whitespace(galaxy_group.name) group_dir = os.path.join(lib_import_dir, galaxy_group.name) create_library_dir(group_dir) create_library_storage_dir(lib_import_dir, group_dir) # create list of all group names all_groups.append(galaxy_group.name) # query galaxy db to get all users associated to this group galaxy_usergroup_associations = ( galaxy_session.query(galaxy_model.UserGroupAssociation) .filter_by(group_id=galaxy_group.id) .join(galaxy_model.User) .all() ) for galaxy_association in galaxy_usergroup_associations: # create library subfolder for each user in this group log.debug("Existing user in galaxy: %s" % galaxy_association.user.email) check_whitespace(galaxy_association.user.email) user_dir = os.path.join(group_dir, galaxy_association.user.email.lower()) # populate dictionary of all library folders on disk sorted by users all_users[galaxy_association.user.email.lower()] = user_dir create_library_dir(user_dir) create_library_storage_dir(lib_import_dir, user_dir) # get all samples data from json data file or from lims if it does not exist log.info("Reading samples information from %s" % options.json_filename) all_samples = defaultdict(lambda: defaultdict(list)) if os.path.exists(options.json_filename): all_samples = simplejson.load(open(options.json_filename, "r")) else: # get samples details form lims using its soap api from suds.client import Client log.getLogger("suds").setLevel(log.INFO) lims = Client("http://uk-cri-ldmz02.crnet.org/solexa-ws/SolexaExportBeanWS?wsdl") log.debug(lims) runs = lims.service.getAllSolexaRuns("true") for i in range(0, len(runs)): for run in runs[i]: for lane in run.sampleLanes: file_locations = lims.service.getFileLocations(lane.sampleProcess_id, "FILE", "FASTQ") for j in range(0, len(file_locations)): user_id = lane.userEmail.lower() sample_id = lane.userSampleId.replace(" ", "").replace("/", "_") log.debug( "SAMPLE: %s | %s | %s | %s | %s" % (lane.sampleProcess_id, lane.userSampleId, lane.genomicsSampleId, user_id, lane.genome) ) for file_location in file_locations[j]: if file_location.host == "uk-cri-lsol03.crnet.org": file_path = "%s/%s" % (file_location.path, file_location.filename) log.debug(" [%s]%s" % (file_location.host, file_path)) if not os.path.isfile(file_path): log.error("File %s does not exists on sol03." % file_path) else: all_samples[user_id][sample_id].append(file_path) log.debug("File %s exists on sol03." % file_path) else: log.debug("File on %s" % file_location.host) # create json file with all samples json_file = open(options.json_filename, "w") simplejson.dump(all_samples, json_file) json_file.close() # read dictionary of all samples sorted by users for user, samples in all_samples.iteritems(): log.debug(user) if user in all_users: log.debug(all_users[user]) for sample, files in samples.iteritems(): # create library subfolder for each sample sample_dir = os.path.join(all_users[user], sample.replace(" ", "").replace("/", "_")) create_library_dir(sample_dir) create_library_storage_dir(lib_import_dir, sample_dir) for file in files: link_name = os.path.join(sample_dir, os.path.basename(file)) log.debug(file) log.debug(link_name) if not os.path.islink(link_name): # create symlinks os.symlink(file, link_name) log.debug("Creating symbolic link: %s" % link_name) else: log.debug("Symbolic link %s already exists" % link_name) # upload samples into galaxy libraries galaxy_api = GalaxyAccessApi("http://localhost:8080/galaxy", "58f445336457812554c09ae17ac32647") for group_folder in os.listdir(lib_import_dir): group_folder_path = os.path.join(lib_import_dir, group_folder) if os.path.isdir(group_folder_path) and (group_folder in all_groups): log.debug(group_folder) # create a library per group library_id = galaxy_api.get_datalibrary_id(group_folder) log.debug(library_id) # get library folder root root_folder_id = galaxy_api.get_datafolder_id(library_id, None, "/", None) log.debug(root_folder_id) # get library contents library_contents = galaxy_api.library_contents(library_id) log.debug(library_contents) for user_folder in os.listdir(group_folder_path): user_folder_path = os.path.join(group_folder_path, user_folder) if os.path.isdir(user_folder_path) and (user_folder in all_users.keys()): log.debug(user_folder) # create a library folder per user user_folder_id = galaxy_api.get_datafolder_id( library_id, root_folder_id, "/%s" % user_folder, user_folder ) log.debug("user folder id: %s" % user_folder_id) for sample_folder in os.listdir(user_folder_path): sample_folder_path = os.path.join(user_folder_path, sample_folder) if os.path.isdir(sample_folder_path): log.debug(sample_folder) # create a library subfolder per sample sample_folder_id = galaxy_api.get_datafolder_id( library_id, user_folder_id, "/%s/%s" % (user_folder, sample_folder), sample_folder ) for file in os.listdir(sample_folder_path): log.debug(file) current_files = [ f for f in library_contents if f["type"] == "file" and f["name"].startswith( "/%s/%s/%s" % (user_folder, sample_folder, file.split(".")[0]) ) ] log.debug(current_files) if len(current_files) > 0: # move file to storage if alreary uploaded in galaxy file_path = os.path.join(sample_folder_path, file) log.debug(file_path) log.debug(current_files[0]["name"]) storage_folder = sample_folder_path.replace( lib_import_dir, os.path.join(lib_import_dir, "storage") ) new_file_path = os.path.join(storage_folder, file) if not os.path.exists(new_file_path): shutil.move(file_path, storage_folder) log.debug("Move file %s to storage. Already in galaxy." % file_path) else: os.remove(file_path) log.debug("Remove file %s. Already in galaxy and in storage." % file_path) if len(os.listdir(sample_folder_path)) > 0: # if directory not empty, upload directory contents into library sample folder galaxy_api.upload_directory( library_id, sample_folder_id, "%s/%s/%s" % (group_folder, user_folder, sample_folder), "hg19", ) log.debug("Uploading directory %s to galaxy" % sample_folder)
def __init__(self, **kwargs): print >>sys.stderr, "python path is: " + ", ".join(sys.path) self.name = "galaxy" self.new_installation = False # Read config file and check for errors self.config = config.Configuration(**kwargs) self.config.check() config.configure_logging(self.config) self.configure_fluent_log() # Determine the database url if self.config.database_connection: db_url = self.config.database_connection else: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % self.config.database # Set up the tool sheds registry if os.path.isfile(self.config.tool_sheds_config): self.tool_shed_registry = tool_shed.tool_shed_registry.Registry( self.config.root, self.config.tool_sheds_config ) else: self.tool_shed_registry = None # Initialize database / check for appropriate schema version. # If this # is a new installation, we'll restrict the tool migration messaging. from galaxy.model.migrate.check import create_or_verify_database create_or_verify_database( db_url, kwargs.get("global_conf", {}).get("__file__", None), self.config.database_engine_options, app=self ) # Alert the Galaxy admin to tools that have been moved from the distribution to the tool shed. from tool_shed.galaxy_install.migrate.check import verify_tools verify_tools( self, db_url, kwargs.get("global_conf", {}).get("__file__", None), self.config.database_engine_options ) # Object store manager self.object_store = build_object_store_from_config(self.config, fsmon=True) # Setup the database engine and ORM from galaxy.model import mapping self.model = mapping.init( self.config.file_path, db_url, self.config.database_engine_options, database_query_profiling_proxy=self.config.database_query_profiling_proxy, object_store=self.object_store, trace_logger=self.trace_logger, use_pbkdf2=self.config.get_bool("use_pbkdf2", True), ) # Manage installed tool shed repositories. self.installed_repository_manager = tool_shed.galaxy_install.InstalledRepositoryManager(self) # Create an empty datatypes registry. self.datatypes_registry = galaxy.datatypes.registry.Registry() # Load proprietary datatypes defined in datatypes_conf.xml files in all installed tool shed repositories. We # load proprietary datatypes before datatypes in the distribution because Galaxy's default sniffers include some # generic sniffers (eg text,xml) which catch anything, so it's impossible for proprietary sniffers to be used. # However, if there is a conflict (2 datatypes with the same extension) between a proprietary datatype and a datatype # in the Galaxy distribution, the datatype in the Galaxy distribution will take precedence. If there is a conflict # between 2 proprietary datatypes, the datatype from the repository that was installed earliest will take precedence. self.installed_repository_manager.load_proprietary_datatypes() # Load the data types in the Galaxy distribution, which are defined in self.config.datatypes_config. self.datatypes_registry.load_datatypes(self.config.root, self.config.datatypes_config) galaxy.model.set_datatypes_registry(self.datatypes_registry) # Security helper self.security = security.SecurityHelper(id_secret=self.config.id_secret) # Tag handler self.tag_handler = GalaxyTagHandler() # Genomes self.genomes = Genomes(self) # Data providers registry. self.data_provider_registry = DataProviderRegistry() # Initialize tool data tables using the config defined by self.config.tool_data_table_config_path. self.tool_data_tables = galaxy.tools.data.ToolDataTableManager( tool_data_path=self.config.tool_data_path, config_filename=self.config.tool_data_table_config_path ) # Load additional entries defined by self.config.shed_tool_data_table_config into tool data tables. self.tool_data_tables.load_from_config_file( config_filename=self.config.shed_tool_data_table_config, tool_data_path=self.tool_data_tables.tool_data_path, from_shed_config=False, ) # Initialize the job management configuration self.job_config = jobs.JobConfiguration(self) # Initialize the tools, making sure the list of tool configs includes the reserved migrated_tools_conf.xml file. tool_configs = self.config.tool_configs if self.config.migrated_tools_config not in tool_configs: tool_configs.append(self.config.migrated_tools_config) self.toolbox = tools.ToolBox(tool_configs, self.config.tool_path, self) # Search support for tools self.toolbox_search = galaxy.tools.search.ToolBoxSearch(self.toolbox) # Load Data Manager self.data_managers = DataManagers(self) # If enabled, poll respective tool sheds to see if updates are available for any installed tool shed repositories. if self.config.get_bool("enable_tool_shed_check", False): from tool_shed.galaxy_install import update_manager self.update_manager = update_manager.UpdateManager(self) else: self.update_manager = None # Load proprietary datatype converters and display applications. self.installed_repository_manager.load_proprietary_converters_and_display_applications() # Load datatype display applications defined in local datatypes_conf.xml self.datatypes_registry.load_display_applications() # Load datatype converters defined in local datatypes_conf.xml self.datatypes_registry.load_datatype_converters(self.toolbox) # Load external metadata tool self.datatypes_registry.load_external_metadata_tool(self.toolbox) # Load history import/export tools. load_history_imp_exp_tools(self.toolbox) # Load genome indexer tool. load_genome_index_tools(self.toolbox) # visualizations registry: associates resources with visualizations, controls how to render self.visualizations_registry = None if self.config.visualizations_config_directory: self.visualizations_registry = VisualizationsRegistry( self.config.root, self.config.visualizations_config_directory ) # Load security policy. self.security_agent = self.model.security_agent self.host_security_agent = galaxy.security.HostAgent( model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions ) # Load quota management. if self.config.enable_quotas: self.quota_agent = galaxy.quota.QuotaAgent(self.model) else: self.quota_agent = galaxy.quota.NoQuotaAgent(self.model) # Heartbeat and memdump for thread / heap profiling self.heartbeat = None self.memdump = None self.memory_usage = None # Container for OpenID authentication routines if self.config.enable_openid: from galaxy.web.framework import openid_manager self.openid_manager = openid_manager.OpenIDManager(self.config.openid_consumer_cache_path) self.openid_providers = OpenIDProviders.from_file(self.config.openid_config) else: self.openid_providers = OpenIDProviders() # Start the heartbeat process if configured and available if self.config.use_heartbeat: from galaxy.util import heartbeat if heartbeat.Heartbeat: self.heartbeat = heartbeat.Heartbeat(fname=self.config.heartbeat_log) self.heartbeat.start() # Enable the memdump signal catcher if configured and available if self.config.use_memdump: from galaxy.util import memdump if memdump.Memdump: self.memdump = memdump.Memdump() # Transfer manager client if self.config.get_bool("enable_beta_job_managers", False): from galaxy.jobs import transfer_manager self.transfer_manager = transfer_manager.TransferManager(self) # Start the job manager from galaxy.jobs import manager self.job_manager = manager.JobManager(self) # FIXME: These are exposed directly for backward compatibility self.job_queue = self.job_manager.job_queue self.job_stop_queue = self.job_manager.job_stop_queue # Initialize the external service types self.external_service_types = external_service_types.ExternalServiceTypesCollection( self.config.external_service_type_config_file, self.config.external_service_type_path, self )
from flask import Flask, request, make_response, current_app, send_file from flask_socketio import SocketIO, emit, disconnect, join_room, leave_room import flask.ext.login as flask_login from flask.ext.login import current_user from datetime import timedelta from functools import update_wrapper from galaxy.model.orm.scripts import get_config from galaxy.model import mapping from galaxy.util.properties import load_app_properties from galaxy.web.security import SecurityHelper from galaxy.util.sanitize_html import sanitize_html # Get config file and load up SA session config = get_config( sys.argv ) model = mapping.init( '/tmp/', config['db_url'] ) sa_session = model.context.current # With the config file we can load the full app properties app_properties = load_app_properties(ini_file=config['config_file']) # We need the ID secret for configuring the security helper to decrypt # galaxysession cookies. if "id_secret" not in app_properties: log.warn('No ID_SECRET specified. Please set the "id_secret" in your galaxy.ini.') id_secret = app_properties.get('id_secret', 'dangerous_default') security_helper = SecurityHelper(id_secret=id_secret) # And get access to the models # Login manager to manage current_user functionality
# # You can also use this script as a library, for instance see https://gist.github.com/1979583 # TODO: This script overlaps a lot with manage_db.py and create_db.py, # these should maybe be refactored to remove duplication. import sys import os.path db_shell_path = __file__ new_path = [ os.path.join( os.path.dirname( db_shell_path ), os.path.pardir, "lib" ) ] new_path.extend( sys.path[1:] ) # remove scripts/ from the path sys.path = new_path from galaxy.model.orm.scripts import get_config from galaxy import eggs eggs.require( "decorator" ) eggs.require( "Tempita" ) eggs.require( "SQLAlchemy" ) db_url = get_config( sys.argv )['db_url'] # Setup DB scripting environment from sqlalchemy import * from sqlalchemy.orm import * from sqlalchemy.exc import * from galaxy.model.mapping import init sa_session = init( '/tmp/', db_url ).context from galaxy.model import *
try: assert options.encode_id or options.decode_id or options.hda_id or options.ldda_id except Exception: parser.print_help() sys.exit(1) options.config = os.path.abspath(options.config) config = ConfigParser(dict(file_path='database/files', id_secret='USING THE DEFAULT IS NOT SECURE!', database_connection='sqlite:///database/universe.sqlite?isolation_level=IMMEDIATE')) config.read(options.config) helper = security.SecurityHelper(id_secret=config.get('app:main', 'id_secret')) model = mapping.init(config.get('app:main', 'file_path'), config.get('app:main', 'database_connection'), create_tables=False) if options.encode_id: print('Encoded "%s": %s' % (options.encode_id, helper.encode_id(options.encode_id))) if options.decode_id: print('Decoded "%s": %s' % (options.decode_id, helper.decode_id(options.decode_id))) if options.hda_id: try: hda_id = int(options.hda_id) except Exception: hda_id = int(helper.decode_id(options.hda_id)) hda = model.context.current.query(model.HistoryDatasetAssociation).get(hda_id) print('HDA "%s" is Dataset "%s" at: %s' % (hda.id, hda.dataset.id, hda.file_name))