def load_galaxy_app( config_builder, config_env=False, log=None, **kwds ): # Allow specification of log so daemon can reuse properly configured one. if log is None: log = logging.getLogger(__name__) # If called in daemon mode, set the ROOT directory and ensure Galaxy is on # sys.path. if config_env: try: os.chdir(GALAXY_ROOT_DIR) except Exception: log.exception("Failed to chdir") raise try: sys.path.insert(1, GALAXY_LIB_DIR) except Exception: log.exception("Failed to add Galaxy to sys.path") raise config_builder.setup_logging() from galaxy.util.properties import load_app_properties kwds = config_builder.app_kwds() kwds = load_app_properties(**kwds) from galaxy.app import UniverseApplication app = UniverseApplication( global_conf={"__file__": config_builder.ini_path}, **kwds ) app.control_worker.bind_and_start() return app
def app_factory(global_conf, load_app_kwds={}, **kwargs): """Return a wsgi application serving the root object""" # Create the Galaxy application unless passed in kwargs = load_app_properties( kwds=kwargs, **load_app_kwds ) if 'app' in kwargs: app = kwargs.pop('app') else: from galaxy.webapps.reports.app import UniverseApplication app = UniverseApplication(global_conf=global_conf, **kwargs) atexit.register(app.shutdown) # Create the universe WSGI application webapp = ReportsWebApplication(app, session_cookie='galaxyreportssession', name="reports") add_ui_controllers(webapp, app) # These two routes handle our simple needs at the moment webapp.add_route('/{controller}/{action}', controller="root", action='index') webapp.add_route('/{action}', controller='root', action='index') webapp.finalize_config() # Wrap the webapp in some useful middleware if kwargs.get('middleware', True): webapp = wrap_in_middleware(webapp, global_conf, app.application_stack, **kwargs) if asbool(kwargs.get('static_enabled', True)): webapp = wrap_if_allowed(webapp, app.application_stack, wrap_in_static, args=(global_conf,), kwargs=kwargs) # Close any pooled database connections before forking try: galaxy.model.mapping.metadata.bind.dispose() except Exception: log.exception("Unable to dispose of pooled galaxy model database connections.") # Return return webapp
def parse_configs( self ): self.config = load_app_properties( ini_file=self.config_file ) job_conf_xml = self.config.get( "job_config_file", join( dirname( self.config_file ), 'job_conf.xml' ) ) try: for plugin in ElementTree.parse( job_conf_xml ).find( 'plugins' ).findall( 'plugin' ): if 'load' in plugin.attrib: self.job_runners.append( plugin.attrib['load'] ) except (OSError, IOError): pass object_store_conf_xml = self.config.get( "object_store_config_file", join( dirname( self.config_file ), 'object_store_conf.xml' ) ) try: for store in ElementTree.parse( object_store_conf_xml ).iter( 'object_store' ): if 'type' in store.attrib: self.object_stores.append( store.attrib['type'] ) except (OSError, IOError): pass # Parse auth conf auth_conf_xml = self.config.get( "auth_config_file", join( dirname( self.config_file ), 'auth_conf.xml' ) ) try: for auth in ElementTree.parse( auth_conf_xml ).findall( 'authenticator' ): auth_type = auth.find('type') if auth_type is not None: self.authenticators.append( auth_type.text ) except (OSError, IOError): pass
def get_config(argv, cwd=None): """ Read sys.argv and parse out repository of migrations and database url. >>> from ConfigParser import SafeConfigParser >>> from tempfile import mkdtemp >>> config_dir = mkdtemp() >>> os.makedirs(os.path.join(config_dir, 'config')) >>> def write_ini(path, property, value): ... p = SafeConfigParser() ... p.add_section('app:main') ... p.set('app:main', property, value) ... with open(os.path.join(config_dir, 'config', path), 'w') as f: p.write(f) >>> write_ini('tool_shed.ini', 'database_connection', 'sqlite:///pg/testdb1') >>> config = get_config(['manage_db.py', 'tool_shed'], cwd=config_dir) >>> config['repo'] 'lib/galaxy/webapps/tool_shed/model/migrate' >>> config['db_url'] 'sqlite:///pg/testdb1' >>> write_ini('galaxy.ini', 'database_file', 'moo.sqlite') >>> config = get_config(['manage_db.py'], cwd=config_dir) >>> config['db_url'] 'sqlite:///moo.sqlite?isolation_level=IMMEDIATE' >>> config['repo'] 'lib/galaxy/model/migrate' """ if argv and (argv[-1] in DATABASE): database = argv.pop() # database name tool_shed, galaxy, or install. else: database = 'galaxy' database_defaults = DATABASE[database] default = database_defaults.get('config_file', DEFAULT_CONFIG_FILE) old_default = database_defaults.get('old_config_file') if cwd is not None: default = os.path.join(cwd, default) old_default = os.path.join(cwd, old_default) config_file = read_config_file_arg(argv, default, old_default) repo = database_defaults['repo'] config_prefix = database_defaults.get('config_prefix', DEFAULT_CONFIG_PREFIX) default_sqlite_file = database_defaults['default_sqlite_file'] if cwd: config_file = os.path.join(cwd, config_file) properties = load_app_properties(ini_file=config_file) if ("%sdatabase_connection" % config_prefix) in properties: db_url = properties["%sdatabase_connection" % config_prefix] elif ("%sdatabase_file" % config_prefix) in properties: database_file = properties["%sdatabase_file" % config_prefix] db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % database_file else: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % default_sqlite_file require_dialect_egg(db_url) return dict(db_url=db_url, repo=repo, config_file=config_file, database=database)
def app_factory( global_conf, **kwargs ): """Return a wsgi application serving the root object""" # Create the Galaxy application unless passed in kwargs = load_app_properties( kwds=kwargs ) if 'app' in kwargs: app = kwargs.pop( 'app' ) else: from galaxy.webapps.reports.app import UniverseApplication app = UniverseApplication( global_conf=global_conf, **kwargs ) atexit.register( app.shutdown ) # Create the universe WSGI application webapp = ReportsWebApplication( app, session_cookie='galaxyreportssession', name="reports" ) add_ui_controllers( webapp, app ) # These two routes handle our simple needs at the moment webapp.add_route( '/{controller}/{action}', controller="root", action='index' ) webapp.add_route( '/{action}', controller='root', action='index' ) webapp.finalize_config() # Wrap the webapp in some useful middleware if kwargs.get( 'middleware', True ): webapp = wrap_in_middleware( webapp, global_conf, **kwargs ) if asbool( kwargs.get( 'static_enabled', True ) ): webapp = wrap_in_static( webapp, global_conf, **kwargs ) # Close any pooled database connections before forking try: galaxy.model.mapping.metadata.bind.dispose() except: log.exception("Unable to dispose of pooled galaxy model database connections.") # Return return webapp
def parse_configs(self): self.config = load_app_properties(ini_file=self.config_file) job_conf_xml = self.config.get( "job_config_file", join(dirname(self.config_file), 'job_conf.xml')) try: for plugin in ElementTree.parse(job_conf_xml).find( 'plugins').findall('plugin'): if 'load' in plugin.attrib: self.job_runners.append(plugin.attrib['load']) except (OSError, IOError): pass object_store_conf_xml = self.config.get( "object_store_config_file", join(dirname(self.config_file), 'object_store_conf.xml')) try: for store in ElementTree.parse(object_store_conf_xml).iter( 'object_store'): if 'type' in store.attrib: self.object_stores.append(store.attrib['type']) except (OSError, IOError): pass # Parse auth conf auth_conf_xml = self.config.get( "auth_config_file", join(dirname(self.config_file), 'auth_conf.xml')) try: for auth in ElementTree.parse(auth_conf_xml).findall( 'authenticator'): auth_type = auth.find('type') if auth_type is not None: self.authenticators.append(auth_type.text) except (OSError, IOError): pass
def build_galaxy_app(simple_kwargs): """Build a Galaxy app object from a simple keyword arguments. Construct paste style complex dictionary and use load_app_properties so Galaxy override variables are respected. Also setup "global" references to sqlalchemy database context for Galaxy and install databases. """ log.info("Galaxy database connection: %s", simple_kwargs["database_connection"]) simple_kwargs['global_conf'] = get_webapp_global_conf() simple_kwargs['global_conf'][ '__file__'] = "lib/galaxy/config/sample/galaxy.yml.sample" simple_kwargs = load_app_properties(kwds=simple_kwargs) # Build the Universe Application app = GalaxyUniverseApplication(**simple_kwargs) rebind_container_to_task(app) log.info("Embedded Galaxy application started") global galaxy_context global install_context galaxy_context = app.model.context install_context = app.install_model.context # Toolbox indexing happens via the work queue out of band recently, and, # beyond potentially running async after tests execute doesn't execute # without building a uwsgi app (app.is_webapp = False for this test kit). # We need to ensure to build an index for the test galaxy app -- this is # pretty fast with the limited toolset app.reindex_tool_search() return app
def _get_uwsgi_args(cliargs, kwargs): # it'd be nice if we didn't have to reparse here but we need things out of more than one section config_file = cliargs.config_file or kwargs.get('__file__') uwsgi_kwargs = load_app_properties(config_file=config_file, config_section='uwsgi') args = [] defaults = { 'virtualenv': os.environ.get('VIRTUAL_ENV', './.venv'), 'pythonpath': 'lib', 'threads': '4', 'http': 'localhost:{port}'.format(port=DEFAULT_PORTS[cliargs.app]), 'static-map': ('/static/style={here}/static/style/blue'.format(here=os.getcwd()), '/static={here}/static'.format(here=os.getcwd())), 'die-on-term': True, 'enable-threads': True, 'hook-master-start': ('unix_signal:2 gracefully_kill_them_all', 'unix_signal:15 gracefully_kill_them_all'), 'py-call-osafterfork': True, } __add_config_file_arg(args, config_file, cliargs.app) if not __arg_set('module', uwsgi_kwargs): __add_arg(args, 'module', 'galaxy.webapps.{app}.buildapp:uwsgi_app()'.format(app=cliargs.app)) for arg in DEFAULT_ARGS['_all_'] + DEFAULT_ARGS[cliargs.app]: if not __arg_set(arg, uwsgi_kwargs): __add_arg(args, arg, defaults[arg]) print(' '.join(args))
def _init(config, need_app=False): if config.startswith('/'): config_file = os.path.abspath(config) else: config_file = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, config)) properties = load_app_properties(ini_file=config_file) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) if not config.database_connection: logging.warning("The database connection is empty. If you are using the default value, please uncomment that in your galaxy.ini") if need_app: app = galaxy.app.UniverseApplication(global_conf={'__file__': config_file, 'here': os.getcwd()}) else: app = None return ( mapping.init( config.file_path, config.database_connection, create_tables=False, object_store=object_store ), object_store, config.database_connection.split(':')[0], config, app )
def load_galaxy_app(config_builder, config_env=False, log=None, **kwds): # Allow specification of log so daemon can reuse properly configured one. if log is None: log = logging.getLogger(__name__) # If called in daemon mode, set the ROOT directory and ensure Galaxy is on # sys.path. if config_env: try: os.chdir(GALAXY_ROOT_DIR) except Exception: log.exception("Failed to chdir") raise try: sys.path.insert(1, GALAXY_LIB_DIR) except Exception: log.exception("Failed to add Galaxy to sys.path") raise config_builder.setup_logging() from galaxy.util.properties import load_app_properties kwds = config_builder.app_kwds() kwds = load_app_properties(**kwds) from galaxy.app import UniverseApplication app = UniverseApplication( global_conf={"__file__": config_builder.ini_path}, **kwds) app.setup_control_queue() return app
def app_properties_from_args(args, legacy_config_override=None, app=None): config_file = config_file_from_args( args, legacy_config_override=legacy_config_override, app=app) config_section = getattr(args, "config_section", None) app_properties = load_app_properties(config_file=config_file, config_section=config_section) return app_properties
def _get_uwsgi_args(cliargs, kwargs): # it'd be nice if we didn't have to reparse here but we need things out of more than one section config_file = cliargs.config_file or kwargs.get('__file__') uwsgi_kwargs = load_app_properties(config_file=config_file, config_section='uwsgi') args = [] defaults = { 'pythonpath': 'lib', 'threads': '4', 'buffer-size': '16384', # https://github.com/galaxyproject/galaxy/issues/1530 'http': 'localhost:{port}'.format(port=DEFAULT_PORTS[cliargs.app]), 'static-map': ('/static/style={here}/static/style/blue'.format(here=os.getcwd()), '/static={here}/static'.format(here=os.getcwd())), 'die-on-term': True, 'enable-threads': True, 'hook-master-start': ('unix_signal:2 gracefully_kill_them_all', 'unix_signal:15 gracefully_kill_them_all'), 'py-call-osafterfork': True, } __add_config_file_arg(args, config_file, cliargs.app) if not __arg_set('module', uwsgi_kwargs): __add_arg(args, 'module', 'galaxy.webapps.{app}.buildapp:uwsgi_app()'.format(app=cliargs.app)) # only include virtualenv if it's set/exists, otherwise this breaks conda-env'd Galaxy if not __arg_set('virtualenv', uwsgi_kwargs) and ('VIRTUAL_ENV' in os.environ or os.path.exists('.venv')): __add_arg(args, 'virtualenv', os.environ.get('VIRTUAL_ENV', '.venv')) for arg in DEFAULT_ARGS['_all_'] + DEFAULT_ARGS[cliargs.app]: if not __arg_set(arg, uwsgi_kwargs): __add_arg(args, arg, defaults[arg]) print(' '.join(args))
def get_config(argv, use_argparse=True, cwd=None): """ Read sys.argv and parse out repository of migrations and database url. >>> import os >>> from six.moves.configparser import SafeConfigParser >>> from shutil import rmtree >>> from tempfile import mkdtemp >>> config_dir = mkdtemp() >>> os.makedirs(os.path.join(config_dir, 'config')) >>> def write_ini(path, property, value): ... p = SafeConfigParser() ... p.add_section('app:main') ... p.set('app:main', property, value) ... with open(os.path.join(config_dir, 'config', path), 'w') as f: p.write(f) >>> write_ini('tool_shed.ini', 'database_connection', 'sqlite:///pg/testdb1') >>> config = get_config(['manage_db.py', 'tool_shed'], cwd=config_dir) >>> config['repo'] 'lib/galaxy/webapps/tool_shed/model/migrate' >>> config['db_url'] 'sqlite:///pg/testdb1' >>> write_ini('galaxy.ini', 'database_file', 'moo.sqlite') >>> config = get_config(['manage_db.py'], cwd=config_dir) >>> uri_with_env = os.getenv("GALAXY_TEST_DBURI", "sqlite:///moo.sqlite?isolation_level=IMMEDIATE") >>> config['db_url'] == uri_with_env True >>> config['repo'] 'lib/galaxy/model/migrate' >>> rmtree(config_dir) """ config_file, config_section, database = _read_model_arguments(argv, use_argparse=use_argparse) database_defaults = DATABASE[database] if config_file is None: config_names = database_defaults.get('config_names', DEFAULT_CONFIG_NAMES) if cwd: cwd = [cwd, os.path.join(cwd, 'config')] config_file = find_config_file(config_names, dirs=cwd) repo = database_defaults['repo'] config_prefix = database_defaults.get('config_prefix', DEFAULT_CONFIG_PREFIX) config_override = database_defaults.get('config_override', 'GALAXY_CONFIG_') default_sqlite_file = database_defaults['default_sqlite_file'] if config_section is None: if not config_file or get_ext(config_file, ignore='sample') == 'yaml': config_section = database_defaults.get('config_section', None) else: # Just use the default found by load_app_properties. config_section = None properties = load_app_properties(config_file=config_file, config_prefix=config_override, config_section=config_section) if ("%sdatabase_connection" % config_prefix) in properties: db_url = properties["%sdatabase_connection" % config_prefix] elif ("%sdatabase_file" % config_prefix) in properties: database_file = properties["%sdatabase_file" % config_prefix] db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % database_file else: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % default_sqlite_file return dict(db_url=db_url, repo=repo, config_file=config_file, database=database)
def get_config(argv, use_argparse=True, cwd=None): """ Read sys.argv and parse out repository of migrations and database url. >>> import os >>> from six.moves.configparser import SafeConfigParser >>> from shutil import rmtree >>> from tempfile import mkdtemp >>> config_dir = mkdtemp() >>> os.makedirs(os.path.join(config_dir, 'config')) >>> def write_ini(path, property, value): ... p = SafeConfigParser() ... p.add_section('app:main') ... p.set('app:main', property, value) ... with open(os.path.join(config_dir, 'config', path), 'w') as f: p.write(f) >>> write_ini('tool_shed.ini', 'database_connection', 'sqlite:///pg/testdb1') >>> config = get_config(['manage_db.py', 'tool_shed'], cwd=config_dir) >>> config['repo'] 'lib/galaxy/webapps/tool_shed/model/migrate' >>> config['db_url'] 'sqlite:///pg/testdb1' >>> write_ini('galaxy.ini', 'database_file', 'moo.sqlite') >>> config = get_config(['manage_db.py'], cwd=config_dir) >>> config['db_url'] 'sqlite:///moo.sqlite?isolation_level=IMMEDIATE' >>> config['repo'] 'lib/galaxy/model/migrate' >>> rmtree(config_dir) """ config_file, config_section, database = _read_model_arguments(argv, use_argparse=use_argparse) database_defaults = DATABASE[database] if config_file is None: config_names = database_defaults.get('config_names', DEFAULT_CONFIG_NAMES) if cwd: cwd = [cwd, os.path.join(cwd, 'config')] config_file = find_config_file(config_names, dirs=cwd) repo = database_defaults['repo'] config_prefix = database_defaults.get('config_prefix', DEFAULT_CONFIG_PREFIX) config_override = database_defaults.get('config_override', 'GALAXY_CONFIG_') default_sqlite_file = database_defaults['default_sqlite_file'] if config_section is None: if not config_file or get_ext(config_file, ignore='sample') == 'yaml': config_section = database_defaults.get('config_section', None) else: # Just use the default found by load_app_properties. config_section = None properties = load_app_properties(config_file=config_file, config_prefix=config_override, config_section=config_section) if ("%sdatabase_connection" % config_prefix) in properties: db_url = properties["%sdatabase_connection" % config_prefix] elif ("%sdatabase_file" % config_prefix) in properties: database_file = properties["%sdatabase_file" % config_prefix] db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % database_file else: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % default_sqlite_file return dict(db_url=db_url, repo=repo, config_file=config_file, database=database)
def get_config( argv, cwd=None ): """ Read sys.argv and parse out repository of migrations and database url. >>> from ConfigParser import SafeConfigParser >>> from tempfile import mkdtemp >>> config_dir = mkdtemp() >>> os.makedirs(os.path.join(config_dir, 'config')) >>> def write_ini(path, property, value): ... p = SafeConfigParser() ... p.add_section('app:main') ... p.set('app:main', property, value) ... with open(os.path.join(config_dir, 'config', path), 'w') as f: p.write(f) >>> write_ini('tool_shed.ini', 'database_connection', 'sqlite:///pg/testdb1') >>> config = get_config(['manage_db.py', 'tool_shed'], cwd=config_dir) >>> config['repo'] 'lib/galaxy/webapps/tool_shed/model/migrate' >>> config['db_url'] 'sqlite:///pg/testdb1' >>> write_ini('galaxy.ini', 'database_file', 'moo.sqlite') >>> config = get_config(['manage_db.py'], cwd=config_dir) >>> config['db_url'] 'sqlite:///moo.sqlite?isolation_level=IMMEDIATE' >>> config['repo'] 'lib/galaxy/model/migrate' """ if argv and (argv[-1] in DATABASE): database = argv.pop() # database name tool_shed, galaxy, or install. else: database = 'galaxy' database_defaults = DATABASE[ database ] default = database_defaults.get( 'config_file', DEFAULT_CONFIG_FILE ) old_default = database_defaults.get( 'old_config_file' ) if cwd is not None: default = os.path.join( cwd, default ) old_default = os.path.join( cwd, old_default ) config_file = read_config_file_arg( argv, default, old_default ) repo = database_defaults[ 'repo' ] config_prefix = database_defaults.get( 'config_prefix', DEFAULT_CONFIG_PREFIX ) config_override = database_defaults.get( 'config_override', 'GALAXY_CONFIG_' ) default_sqlite_file = database_defaults[ 'default_sqlite_file' ] if cwd: config_file = os.path.join( cwd, config_file ) properties = load_app_properties( ini_file=config_file, config_prefix=config_override ) if ("%sdatabase_connection" % config_prefix) in properties: db_url = properties[ "%sdatabase_connection" % config_prefix ] elif ("%sdatabase_file" % config_prefix) in properties: database_file = properties[ "%sdatabase_file" % config_prefix ] db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % database_file else: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % default_sqlite_file return dict(db_url=db_url, repo=repo, config_file=config_file, database=database)
def parse_configs( self ): self.config = load_app_properties( ini_file=self.config_file ) job_conf_xml = self.config.get( "job_config_file", join( dirname( self.config_file ), 'job_conf.xml' ) ) try: for plugin in ElementTree.parse( job_conf_xml ).find( 'plugins' ): self.job_runners.append( plugin.attrib['load'] ) except (OSError, IOError): pass
def parse_configs(self): self.config = load_app_properties(config_file=self.config_file) job_conf_xml = self.config.get( "job_config_file", join(dirname(self.config_file), 'job_conf.xml')) try: for plugin in ElementTree.parse(job_conf_xml).find('plugins').findall('plugin'): if 'load' in plugin.attrib: self.job_runners.append(plugin.attrib['load']) except (OSError, IOError): pass try: for plugin in ElementTree.parse(job_conf_xml).findall('.//destination/param[@id="rules_module"]'): self.job_rule_modules.append(plugin.text) except (OSError, IOError): pass object_store_conf_xml = self.config.get( "object_store_config_file", join(dirname(self.config_file), 'object_store_conf.xml')) try: for store in ElementTree.parse(object_store_conf_xml).iter('object_store'): if 'type' in store.attrib: self.object_stores.append(store.attrib['type']) except (OSError, IOError): pass # Parse auth conf auth_conf_xml = self.config.get( "auth_config_file", join(dirname(self.config_file), 'auth_conf.xml')) try: for auth in ElementTree.parse(auth_conf_xml).findall('authenticator'): auth_type = auth.find('type') if auth_type is not None: self.authenticators.append(auth_type.text) except (OSError, IOError): pass # Parse containers config containers_conf_yml = self.config.get( "containers_config_file", join(dirname(self.config_file), 'containers_conf.yml')) containers_conf = parse_containers_config(containers_conf_yml) self.container_interface_types = [c.get('type', None) for c in containers_conf.values()] # Parse error report config error_report_yml = self.config.get( "error_report_file", join(dirname(self.config_file), 'error_report.yml')) try: with open(error_report_yml, "r") as f: error_reporters = yaml.safe_load(f) self.error_report_modules = [er.get('type', None) for er in error_reporters] except (OSError, IOError): pass
def get_app_properties(): config_file = os.environ.get("GALAXY_CONFIG_FILE") if not config_file: galaxy_root_dir = os.environ.get('GALAXY_ROOT_DIR') if galaxy_root_dir: config_file = find_config(config_file, galaxy_root_dir) if config_file: return load_app_properties( config_file=os.path.abspath(config_file), config_section='galaxy', )
def __init__( self, config_file, ini_section= "app:%s"%(DEFAULT_INI_APP) ): logging.raiseExceptions = False os.chdir(GALAXY_ROOT_DIR) self.name = 'galaxy' self.new_installation = False kwds = dict( ini_file=config_file, ini_section="app:%s"%(DEFAULT_INI_APP), ) kwds = load_app_properties(kwds = kwds, ini_file = config_file) print config_file self.config = config.Configuration( global_conf={'__file__' : config_file}, **kwds ) self._configure_object_store( fsmon=False ) self._configure_models( check_migrate_databases=False, check_migrate_tools=False, config_file=config_file )
def parse_configs(self): self.config = load_app_properties(ini_file=self.config_file) job_conf_xml = self.config.get( "job_config_file", join(dirname(self.config_file), 'job_conf.xml')) try: for plugin in ElementTree.parse(job_conf_xml).find( 'plugins').findall('plugin'): if 'load' in plugin.attrib: self.job_runners.append(plugin.attrib['load']) except (OSError, IOError): pass
def __init__(self, config_file, config=None): self.config_file = config_file self.job_runners = [] self.authenticators = [] self.object_stores = [] self.file_sources = [] self.conditional_reqs = [] self.container_interface_types = [] self.job_rule_modules = [] self.error_report_modules = [] if config is None: self.config = load_app_properties(config_file=self.config_file) else: self.config = config self.parse_configs() self.get_conditional_requirements()
def _init(config): if config.startswith('/'): config = os.path.abspath(config) else: config = os.path.abspath( os.path.join(os.path.dirname(__file__), os.pardir, config)) properties = load_app_properties(ini_file=config) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) return (mapping.init(config.file_path, config.database_connection, create_tables=False, object_store=object_store), object_store, config.database_connection.split(':')[0])
def _get_uwsgi_args(cliargs, kwargs): # it'd be nice if we didn't have to reparse here but we need things out of more than one section config_file = cliargs.config_file or kwargs.get('__file__') uwsgi_kwargs = load_app_properties(config_file=config_file, config_section='uwsgi') args = [] ts_cron_config_option = '' if config_file is None else '-c %s' % config_file defaults = { 'pythonpath': 'lib', 'threads': '4', 'buffer-size': '16384', # https://github.com/galaxyproject/galaxy/issues/1530 'http': 'localhost:{port}'.format(port=DEFAULT_PORTS[cliargs.app]), 'static-map': ('/static/style={here}/static/style/blue'.format(here=os.getcwd()), '/static={here}/static'.format(here=os.getcwd()), '/favicon.ico={here}/static/favicon.ico'.format(here=os.getcwd())), 'die-on-term': True, 'enable-threads': True, 'hook-master-start': ('unix_signal:2 gracefully_kill_them_all', 'unix_signal:15 gracefully_kill_them_all'), 'py-call-osafterfork': True, 'cron': '0 -1 -1 -1 -1 python scripts/tool_shed/build_ts_whoosh_index.py %s --config-section tool_shed -d' % ts_cron_config_option, 'umask': '027', } __add_config_file_arg(args, config_file, cliargs.app) if not __arg_set('module', uwsgi_kwargs): if cliargs.app in ["tool_shed"]: __add_arg(args, 'module', 'tool_shed.webapp.buildapp:uwsgi_app()') else: __add_arg(args, 'module', 'galaxy.webapps.{app}.buildapp:uwsgi_app()'.format(app=cliargs.app)) # only include virtualenv if it's set/exists, otherwise this breaks conda-env'd Galaxy if not __arg_set('virtualenv', uwsgi_kwargs) and ('VIRTUAL_ENV' in os.environ or os.path.exists('.venv')): __add_arg(args, 'virtualenv', os.environ.get('VIRTUAL_ENV', '.venv')) # Client dev server for HMR hmr_server = os.environ.get('GALAXY_CLIENT_DEV_SERVER', None) if hmr_server: # Something like this, which is the default in the package scripts # route: ^/static/scripts/bundled/ http:127.0.0.1:8081 if hmr_server.lower() in ['1', 'true', 'default']: hmr_server = "http:127.0.0.1:8081" __add_arg(args, 'route', '^/static/scripts/bundled/ {hmr_server}'.format(hmr_server=hmr_server)) # We always want to append client/galaxy/images as static-safe. __add_arg(args, 'static-safe', '{here}/client/galaxy/images'.format(here=os.getcwd())) for arg in DEFAULT_ARGS['_all_'] + DEFAULT_ARGS[cliargs.app]: if not __arg_set(arg, uwsgi_kwargs): __add_arg(args, arg, defaults[arg]) print(' '.join(args))
def _get_uwsgi_args(cliargs, kwargs): # it'd be nice if we didn't have to reparse here but we need things out of more than one section config_file = cliargs.config_file or kwargs.get('__file__') uwsgi_kwargs = load_app_properties(config_file=config_file, config_section='uwsgi') args = [] ts_cron_config_option = '' if config_file is None else '-c %s' % config_file defaults = { 'pythonpath': 'lib', 'threads': '4', 'buffer-size': '16384', # https://github.com/galaxyproject/galaxy/issues/1530 'http': 'localhost:{port}'.format(port=DEFAULT_PORTS[cliargs.app]), 'static-map': ('/static/style={here}/static/style/blue'.format(here=os.getcwd()), '/static={here}/static'.format(here=os.getcwd()), '/favicon.ico={here}/static/favicon.ico'.format(here=os.getcwd())), 'static-safe': ('{here}/client/galaxy/images'.format(here=os.getcwd())), 'die-on-term': True, 'enable-threads': True, 'hook-master-start': ('unix_signal:2 gracefully_kill_them_all', 'unix_signal:15 gracefully_kill_them_all'), 'py-call-osafterfork': True, 'cron': '0 -1 -1 -1 -1 python scripts/tool_shed/build_ts_whoosh_index.py %s --config-section tool_shed -d' % ts_cron_config_option, } __add_config_file_arg(args, config_file, cliargs.app) if not __arg_set('module', uwsgi_kwargs): __add_arg( args, 'module', 'galaxy.webapps.{app}.buildapp:uwsgi_app()'.format( app=cliargs.app)) # only include virtualenv if it's set/exists, otherwise this breaks conda-env'd Galaxy if not __arg_set('virtualenv', uwsgi_kwargs) and ( 'VIRTUAL_ENV' in os.environ or os.path.exists('.venv')): __add_arg(args, 'virtualenv', os.environ.get('VIRTUAL_ENV', '.venv')) for arg in DEFAULT_ARGS['_all_'] + DEFAULT_ARGS[cliargs.app]: if not __arg_set(arg, uwsgi_kwargs): __add_arg(args, arg, defaults[arg]) print(' '.join(args))
def build_galaxy_app(simple_kwargs): """Build a Galaxy app object from a simple keyword arguments. Construct paste style complex dictionary and use load_app_properties so Galaxy override variables are respected. Also setup "global" references to sqlalchemy database context for Galaxy and install databases. """ log.info("Galaxy database connection: %s", simple_kwargs["database_connection"]) simple_kwargs['global_conf'] = get_webapp_global_conf() simple_kwargs['global_conf']['__file__'] = "config/galaxy.ini.sample" simple_kwargs = load_app_properties(kwds=simple_kwargs) # Build the Universe Application app = GalaxyUniverseApplication(**simple_kwargs) log.info("Embedded Galaxy application started") database_contexts.galaxy_context = app.model.context database_contexts.install_context = app.install_model.context return app
def build_galaxy_app(simple_kwargs): """Build a Galaxy app object from a simple keyword arguments. Construct paste style complex dictionary and use load_app_properties so Galaxy override variables are respected. Also setup "global" references to sqlalchemy database context for Galaxy and install databases. """ log.info("Galaxy database connection: %s", simple_kwargs["database_connection"]) simple_kwargs['global_conf'] = get_webapp_global_conf() simple_kwargs['global_conf']['__file__'] = "config/galaxy.ini.sample" simple_kwargs = load_app_properties( kwds=simple_kwargs ) # Build the Universe Application app = GalaxyUniverseApplication( **simple_kwargs ) log.info( "Embedded Galaxy application started" ) database_contexts.galaxy_context = app.model.context database_contexts.install_context = app.install_model.context return app
def _init(config): if config.startswith('/'): config = os.path.abspath(config) else: config = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, config)) properties = load_app_properties(ini_file=config) config = galaxy.config.Configuration(**properties) object_store = build_object_store_from_config(config) return ( mapping.init( config.file_path, config.database_connection, create_tables=False, object_store=object_store ), object_store, config.database_connection.split(':')[0] )
def _get_uwsgi_args(cliargs, kwargs): # it'd be nice if we didn't have to reparse here but we need things out of more than one section config_file = cliargs.config_file or kwargs.get('__file__') uwsgi_kwargs = load_app_properties(config_file=config_file, config_section='uwsgi') args = [] defaults = { 'virtualenv': os.environ.get('VIRTUAL_ENV', './.venv'), 'pythonpath': 'lib', 'threads': '4', 'buffer-size': '16384', # https://github.com/galaxyproject/galaxy/issues/1530 'http': 'localhost:{port}'.format(port=DEFAULT_PORTS[cliargs.app]), 'static-map': ('/static/style={here}/static/style/blue'.format(here=os.getcwd()), '/static={here}/static'.format(here=os.getcwd())), 'die-on-term': True, 'enable-threads': True, 'hook-master-start': ('unix_signal:2 gracefully_kill_them_all', 'unix_signal:15 gracefully_kill_them_all'), 'py-call-osafterfork': True, } __add_config_file_arg(args, config_file, cliargs.app) if not __arg_set('module', uwsgi_kwargs): __add_arg( args, 'module', 'galaxy.webapps.{app}.buildapp:uwsgi_app()'.format( app=cliargs.app)) for arg in DEFAULT_ARGS['_all_'] + DEFAULT_ARGS[cliargs.app]: if not __arg_set(arg, uwsgi_kwargs): __add_arg(args, arg, defaults[arg]) print(' '.join(args))
def app_factory( global_conf, **kwargs ): """ Return a wsgi application serving the root object """ kwargs = load_app_properties( kwds=kwargs ) # Create the Galaxy application unless passed in if 'app' in kwargs: app = kwargs.pop( 'app' ) galaxy.app.app = app else: try: app = galaxy.app.UniverseApplication( global_conf=global_conf, **kwargs ) galaxy.app.app = app except: import traceback traceback.print_exc() sys.exit( 1 ) # Call app's shutdown method when the interpeter exits, this cleanly stops # the various Galaxy application daemon threads atexit.register( app.shutdown ) # Create the universe WSGI application webapp = GalaxyWebApplication( app, session_cookie='galaxysession', name='galaxy' ) webapp.add_ui_controllers( 'galaxy.webapps.galaxy.controllers', app ) # Force /history to go to /root/history -- needed since the tests assume this webapp.add_route( '/history', controller='root', action='history' ) # Force /activate to go to the controller webapp.add_route( '/activate', controller='user', action='activate' ) # These two routes handle our simple needs at the moment webapp.add_route( '/async/:tool_id/:data_id/:data_secret', controller='async', action='index', tool_id=None, data_id=None, data_secret=None ) webapp.add_route( '/:controller/:action', action='index' ) webapp.add_route( '/:action', controller='root', action='index' ) # allow for subdirectories in extra_files_path webapp.add_route( '/datasets/:dataset_id/display/{filename:.+?}', controller='dataset', action='display', dataset_id=None, filename=None) webapp.add_route( '/datasets/:dataset_id/:action/:filename', controller='dataset', action='index', dataset_id=None, filename=None) webapp.add_route( '/display_application/:dataset_id/:app_name/:link_name/:user_id/:app_action/:action_param', controller='dataset', action='display_application', dataset_id=None, user_id=None, app_name=None, link_name=None, app_action=None, action_param=None ) webapp.add_route( '/u/:username/d/:slug/:filename', controller='dataset', action='display_by_username_and_slug', filename=None ) webapp.add_route( '/u/:username/p/:slug', controller='page', action='display_by_username_and_slug' ) webapp.add_route( '/u/:username/h/:slug', controller='history', action='display_by_username_and_slug' ) webapp.add_route( '/u/:username/w/:slug', controller='workflow', action='display_by_username_and_slug' ) webapp.add_route( '/u/:username/w/:slug/:format', controller='workflow', action='display_by_username_and_slug' ) webapp.add_route( '/u/:username/v/:slug', controller='visualization', action='display_by_username_and_slug' ) webapp.add_route( '/search', controller='search', action='index' ) # TODO: Refactor above routes into external method to allow testing in # isolation as well. populate_api_routes( webapp, app ) # ==== Done # Indicate that all configuration settings have been provided webapp.finalize_config() # Wrap the webapp in some useful middleware if kwargs.get( 'middleware', True ): webapp = wrap_in_middleware( webapp, global_conf, **kwargs ) if asbool( kwargs.get( 'static_enabled', True) ): if app.config.is_uwsgi: log.error("Static middleware is enabled in your configuration but this is a uwsgi process. Refusing to wrap in static middleware.") else: webapp = wrap_in_static( webapp, global_conf, plugin_frameworks=[ app.visualizations_registry ], **kwargs ) if asbool(kwargs.get('pack_scripts', False)): log.warn( "The 'pack_scripts' option is deprecated" ) pack_scripts() # Close any pooled database connections before forking try: galaxy.model.mapping.metadata.engine.connection_provider._pool.dispose() except: pass try: galaxy.model.tool_shed_install.mapping.metadata.engine.connection_provider._pool.dispose() except: pass if not app.config.is_uwsgi: postfork_setup() # Return return webapp
def paste_app_factory( global_conf, **kwargs ): """ Return a wsgi application serving the root object """ kwargs = load_app_properties( kwds=kwargs ) # Create the Galaxy application unless passed in if 'app' in kwargs: app = kwargs.pop( 'app' ) galaxy.app.app = app else: try: app = galaxy.app.UniverseApplication( global_conf=global_conf, **kwargs ) galaxy.app.app = app except: import traceback traceback.print_exc() sys.exit( 1 ) # Call app's shutdown method when the interpeter exits, this cleanly stops # the various Galaxy application daemon threads atexit.register( app.shutdown ) # Create the universe WSGI application webapp = GalaxyWebApplication( app, session_cookie='galaxysession', name='galaxy' ) # STANDARD CONTROLLER ROUTES webapp.add_ui_controllers( 'galaxy.webapps.galaxy.controllers', app ) # Force /history to go to view of current webapp.add_route( '/history', controller='history', action='view' ) webapp.add_route( '/history/view/{id}', controller='history', action='view' ) # THIS IS A TEMPORARY ROUTE FOR THE 17.01 RELEASE # This route supports the previous hide/delete-all-hidden functionality in a history. # It will be removed after 17.01. webapp.add_route( '/history/adjust_hidden', controller='history', action='adjust_hidden') # Force /activate to go to the controller webapp.add_route( '/activate', controller='user', action='activate' ) webapp.add_route( '/login', controller='root', action='login' ) # These two routes handle our simple needs at the moment webapp.add_route( '/async/{tool_id}/{data_id}/{data_secret}', controller='async', action='index', tool_id=None, data_id=None, data_secret=None ) webapp.add_route( '/{controller}/{action}', action='index' ) webapp.add_route( '/{action}', controller='root', action='index' ) # allow for subdirectories in extra_files_path webapp.add_route( '/datasets/{dataset_id}/display/{filename:.+?}', controller='dataset', action='display', dataset_id=None, filename=None) webapp.add_route( '/datasets/{dataset_id}/{action}/{filename}', controller='dataset', action='index', dataset_id=None, filename=None) webapp.add_route( '/display_application/{dataset_id}/{app_name}/{link_name}/{user_id}/{app_action}/{action_param}/{action_param_extra:.+?}', controller='dataset', action='display_application', dataset_id=None, user_id=None, app_name=None, link_name=None, app_action=None, action_param=None, action_param_extra=None ) webapp.add_route( '/u/{username}/d/{slug}/{filename}', controller='dataset', action='display_by_username_and_slug', filename=None ) webapp.add_route( '/u/{username}/p/{slug}', controller='page', action='display_by_username_and_slug' ) webapp.add_route( '/u/{username}/h/{slug}', controller='history', action='display_by_username_and_slug' ) webapp.add_route( '/u/{username}/w/{slug}', controller='workflow', action='display_by_username_and_slug' ) webapp.add_route( '/u/{username}/w/{slug}/{format}', controller='workflow', action='display_by_username_and_slug' ) webapp.add_route( '/u/{username}/v/{slug}', controller='visualization', action='display_by_username_and_slug' ) webapp.add_route( '/search', controller='search', action='index' ) # TODO: Refactor above routes into external method to allow testing in # isolation as well. populate_api_routes( webapp, app ) # CLIENTSIDE ROUTES # The following are routes that are handled completely on the clientside. # The following routes don't bootstrap any information, simply provide the # base analysis interface at which point the application takes over. webapp.add_client_route( '/tours' ) webapp.add_client_route( '/tours/{tour_id}' ) webapp.add_client_route( '/user' ) webapp.add_client_route( '/user/{form_id}' ) # ==== Done # Indicate that all configuration settings have been provided webapp.finalize_config() # Wrap the webapp in some useful middleware if kwargs.get( 'middleware', True ): webapp = wrap_in_middleware(webapp, global_conf, app.application_stack, **kwargs) if asbool( kwargs.get( 'static_enabled', True) ): webapp = wrap_if_allowed(webapp, app.application_stack, wrap_in_static, args=(global_conf,), kwargs=dict(plugin_frameworks=[app.visualizations_registry], **kwargs)) # Close any pooled database connections before forking try: galaxy.model.mapping.metadata.bind.dispose() except: log.exception("Unable to dispose of pooled galaxy model database connections.") try: # This model may not actually be bound. if galaxy.model.tool_shed_install.mapping.metadata.bind: galaxy.model.tool_shed_install.mapping.metadata.bind.dispose() except: log.exception("Unable to dispose of pooled toolshed install model database connections.") app.application_stack.register_postfork_function(postfork_setup) for th in threading.enumerate(): if th.is_alive(): log.debug("Prior to webapp return, Galaxy thread %s is alive.", th) # Return return webapp
def parse_configs(self): self.config = load_app_properties(config_file=self.config_file) def load_job_config_dict(job_conf_dict): for runner in job_conf_dict.get("runners"): if "load" in runner: self.job_runners.append(runner.get("load")) if "rules_module" in runner: self.job_rule_modules.append(plugin.text) if "params" in runner: runner_params = runner["params"] if "rules_module" in runner_params: self.job_rule_modules.append(plugin.text) if "job_config" in self.config: load_job_config_dict(self.config.get("job_config")) else: job_conf_path = self.config.get( "job_config_file", join(dirname(self.config_file), 'job_conf.xml')) if '.xml' in job_conf_path: try: try: for plugin in parse_xml(job_conf_path).find( 'plugins').findall('plugin'): if 'load' in plugin.attrib: self.job_runners.append(plugin.attrib['load']) except (OSError, IOError): pass try: for plugin in parse_xml(job_conf_path).findall( './/destination/param[@id="rules_module"]'): self.job_rule_modules.append(plugin.text) except (OSError, IOError): pass except etree.ParseError: pass else: try: with open("job_conf_path", "r") as f: job_conf_dict = yaml.safe_load(f) load_job_config_dict(job_conf_dict) except (OSError, IOError): pass object_store_conf_xml = self.config.get( "object_store_config_file", join(dirname(self.config_file), 'object_store_conf.xml')) try: for store in parse_xml(object_store_conf_xml).iter('object_store'): if 'type' in store.attrib: self.object_stores.append(store.attrib['type']) except (OSError, IOError): pass # Parse auth conf auth_conf_xml = self.config.get( "auth_config_file", join(dirname(self.config_file), 'auth_conf.xml')) try: for auth in parse_xml(auth_conf_xml).findall('authenticator'): auth_type = auth.find('type') if auth_type is not None: self.authenticators.append(auth_type.text) except (OSError, IOError): pass # Parse containers config containers_conf_yml = self.config.get( "containers_config_file", join(dirname(self.config_file), 'containers_conf.yml')) containers_conf = parse_containers_config(containers_conf_yml) self.container_interface_types = [ c.get('type', None) for c in containers_conf.values() ] # Parse error report config error_report_yml = self.config.get( "error_report_file", join(dirname(self.config_file), 'error_report.yml')) try: with open(error_report_yml, "r") as f: error_reporters = yaml.safe_load(f) self.error_report_modules = [ er.get('type', None) for er in error_reporters ] except (OSError, IOError): pass
def _app_properties(args): # FIXME: you can use galaxy.util.path.extensions for this config_file = args.config_file or find_config_file(args.app) app_properties = load_app_properties(config_file=config_file, config_section=args.config_section) return app_properties
from migrate.versioning.shell import main sys.path.insert(1, os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir, 'lib'))) from galaxy.model.orm.scripts import read_config_file_arg from galaxy.util.properties import load_app_properties log = logging.getLogger( __name__ ) config_file = read_config_file_arg( sys.argv, 'config/galaxy.ini', 'universe_wsgi.ini' ) if not os.path.exists( config_file ): print "Galaxy config file does not exist (hint: use '-c config.ini' for non-standard locations): %s" % config_file sys.exit( 1 ) repo = 'lib/tool_shed/galaxy_install/migrate' properties = load_app_properties( ini_file=config_file ) cp = SafeConfigParser() cp.read( config_file ) if config_file == 'config/galaxy.ini.sample' and 'GALAXY_TEST_DBURI' in os.environ: # Running functional tests. db_url = os.environ[ 'GALAXY_TEST_DBURI' ] elif "install_database_connection" in properties: db_url = properties[ "install_database_connection" ] elif "database_connection" in properties: db_url = properties[ "database_connection" ] elif "database_file" in properties: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % properties[ "database_file" ] else: db_url = "sqlite:///./database/universe.sqlite?isolation_level=IMMEDIATE"
def app_factory(global_conf, **kwargs): """ Return a wsgi application serving the root object """ kwargs = load_app_properties(kwds=kwargs) # Create the Galaxy application unless passed in if 'app' in kwargs: app = kwargs.pop('app') else: try: from galaxy.app import UniverseApplication app = UniverseApplication(global_conf=global_conf, **kwargs) except: import traceback traceback.print_exc() sys.exit(1) # Call app's shutdown method when the interpeter exits, this cleanly stops # the various Galaxy application daemon threads atexit.register(app.shutdown) # Create the universe WSGI application webapp = GalaxyWebApplication(app, session_cookie='galaxysession', name='galaxy') webapp.add_ui_controllers('galaxy.webapps.galaxy.controllers', app) # Force /history to go to /root/history -- needed since the tests assume this webapp.add_route('/history', controller='root', action='history') # Force /activate to go to the controller webapp.add_route('/activate', controller='user', action='activate') # These two routes handle our simple needs at the moment webapp.add_route('/async/:tool_id/:data_id/:data_secret', controller='async', action='index', tool_id=None, data_id=None, data_secret=None) webapp.add_route('/:controller/:action', action='index') webapp.add_route('/:action', controller='root', action='index') # allow for subdirectories in extra_files_path webapp.add_route('/datasets/:dataset_id/display/{filename:.+?}', controller='dataset', action='display', dataset_id=None, filename=None) webapp.add_route('/datasets/:dataset_id/:action/:filename', controller='dataset', action='index', dataset_id=None, filename=None) webapp.add_route( '/display_application/:dataset_id/:app_name/:link_name/:user_id/:app_action/:action_param', controller='dataset', action='display_application', dataset_id=None, user_id=None, app_name=None, link_name=None, app_action=None, action_param=None) webapp.add_route('/u/:username/d/:slug/:filename', controller='dataset', action='display_by_username_and_slug', filename=None) webapp.add_route('/u/:username/p/:slug', controller='page', action='display_by_username_and_slug') webapp.add_route('/u/:username/h/:slug', controller='history', action='display_by_username_and_slug') webapp.add_route('/u/:username/w/:slug', controller='workflow', action='display_by_username_and_slug') webapp.add_route('/u/:username/w/:slug/:format', controller='workflow', action='display_by_username_and_slug') webapp.add_route('/u/:username/v/:slug', controller='visualization', action='display_by_username_and_slug') webapp.add_route('/search', controller='search', action='index') # TODO: Refactor above routes into external method to allow testing in # isolation as well. populate_api_routes(webapp, app) # ==== Done # Indicate that all configuration settings have been provided webapp.finalize_config() # Wrap the webapp in some useful middleware if kwargs.get('middleware', True): webapp = wrap_in_middleware(webapp, global_conf, **kwargs) if asbool(kwargs.get('static_enabled', True)): webapp = wrap_in_static( webapp, global_conf, plugin_frameworks=[app.visualizations_registry], **kwargs) #webapp = wrap_in_static( webapp, global_conf, plugin_frameworks=None, **kwargs ) if asbool(kwargs.get('pack_scripts', False)): pack_scripts() # Close any pooled database connections before forking try: galaxy.model.mapping.metadata.engine.connection_provider._pool.dispose( ) except: pass # Close any pooled database connections before forking try: galaxy.model.tool_shed_install.mapping.metadata.engine.connection_provider._pool.dispose( ) except: pass # Return return webapp
def paste_app_factory( global_conf, **kwargs ): """ Return a wsgi application serving the root object """ kwargs = load_app_properties( kwds=kwargs ) # Create the Galaxy application unless passed in if 'app' in kwargs: app = kwargs.pop( 'app' ) galaxy.app.app = app else: try: app = galaxy.app.UniverseApplication( global_conf=global_conf, **kwargs ) galaxy.app.app = app except: import traceback traceback.print_exc() sys.exit( 1 ) # Call app's shutdown method when the interpeter exits, this cleanly stops # the various Galaxy application daemon threads atexit.register( app.shutdown ) # Create the universe WSGI application webapp = GalaxyWebApplication( app, session_cookie='galaxysession', name='galaxy' ) # STANDARD CONTROLLER ROUTES webapp.add_ui_controllers( 'galaxy.webapps.galaxy.controllers', app ) # Force /history to go to view of current webapp.add_route( '/history', controller='history', action='view' ) webapp.add_route( '/history/view/{id}', controller='history', action='view' ) # Force /activate to go to the controller webapp.add_route( '/activate', controller='user', action='activate' ) # These two routes handle our simple needs at the moment webapp.add_route( '/async/{tool_id}/{data_id}/{data_secret}', controller='async', action='index', tool_id=None, data_id=None, data_secret=None ) webapp.add_route( '/{controller}/{action}', action='index' ) webapp.add_route( '/{action}', controller='root', action='index' ) # allow for subdirectories in extra_files_path webapp.add_route( '/datasets/{dataset_id}/display/{filename:.+?}', controller='dataset', action='display', dataset_id=None, filename=None) webapp.add_route( '/datasets/{dataset_id}/{action}/{filename}', controller='dataset', action='index', dataset_id=None, filename=None) webapp.add_route( '/display_application/{dataset_id}/{app_name}/{link_name}/{user_id}/{app_action}/{action_param}/{action_param_extra:.+?}', controller='dataset', action='display_application', dataset_id=None, user_id=None, app_name=None, link_name=None, app_action=None, action_param=None, action_param_extra=None ) webapp.add_route( '/u/{username}/d/{slug}/{filename}', controller='dataset', action='display_by_username_and_slug', filename=None ) webapp.add_route( '/u/{username}/p/{slug}', controller='page', action='display_by_username_and_slug' ) webapp.add_route( '/u/{username}/h/{slug}', controller='history', action='display_by_username_and_slug' ) webapp.add_route( '/u/{username}/w/{slug}', controller='workflow', action='display_by_username_and_slug' ) webapp.add_route( '/u/{username}/w/{slug}/{format}', controller='workflow', action='display_by_username_and_slug' ) webapp.add_route( '/u/{username}/v/{slug}', controller='visualization', action='display_by_username_and_slug' ) # TODO: Refactor above routes into external method to allow testing in # isolation as well. populate_api_routes( webapp, app ) # CLIENTSIDE ROUTES # The following are routes that are handled completely on the clientside. # The following routes don't bootstrap any information, simply provide the # base analysis interface at which point the application takes over. webapp.add_client_route( '/tours' ) webapp.add_client_route( '/tours/{tour_id}' ) webapp.add_client_route( '/user' ) webapp.add_client_route( '/user/{form_id}' ) webapp.add_client_route( '/custom_builds' ) # ==== Done # Indicate that all configuration settings have been provided webapp.finalize_config() # Wrap the webapp in some useful middleware if kwargs.get( 'middleware', True ): webapp = wrap_in_middleware(webapp, global_conf, app.application_stack, **kwargs) if asbool( kwargs.get( 'static_enabled', True) ): webapp = wrap_if_allowed(webapp, app.application_stack, wrap_in_static, args=(global_conf,), kwargs=dict(plugin_frameworks=[app.visualizations_registry], **kwargs)) # Close any pooled database connections before forking try: galaxy.model.mapping.metadata.bind.dispose() except: log.exception("Unable to dispose of pooled galaxy model database connections.") try: # This model may not actually be bound. if galaxy.model.tool_shed_install.mapping.metadata.bind: galaxy.model.tool_shed_install.mapping.metadata.bind.dispose() except: log.exception("Unable to dispose of pooled toolshed install model database connections.") app.application_stack.register_postfork_function(postfork_setup) for th in threading.enumerate(): if th.is_alive(): log.debug("Prior to webapp return, Galaxy thread %s is alive.", th) # Return return webapp
def paste_app_factory( global_conf, **kwargs ): """ Return a wsgi application serving the root object """ kwargs = load_app_properties( kwds=kwargs ) # Create the Galaxy application unless passed in if 'app' in kwargs: app = kwargs.pop( 'app' ) galaxy.app.app = app else: try: app = galaxy.app.UniverseApplication( global_conf=global_conf, **kwargs ) galaxy.app.app = app except: import traceback traceback.print_exc() sys.exit( 1 ) # Call app's shutdown method when the interpeter exits, this cleanly stops # the various Galaxy application daemon threads atexit.register( app.shutdown ) # Create the universe WSGI application webapp = GalaxyWebApplication( app, session_cookie='galaxysession', name='galaxy' ) webapp.add_ui_controllers( 'galaxy.webapps.galaxy.controllers', app ) # Force /history to go to view of current webapp.add_route( '/history', controller='history', action='view' ) # Force /activate to go to the controller webapp.add_route( '/activate', controller='user', action='activate' ) webapp.add_route( '/login', controller='root', action='login' ) # These two routes handle our simple needs at the moment webapp.add_route( '/async/{tool_id}/{data_id}/{data_secret}', controller='async', action='index', tool_id=None, data_id=None, data_secret=None ) webapp.add_route( '/{controller}/{action}', action='index' ) webapp.add_route( '/{action}', controller='root', action='index' ) # allow for subdirectories in extra_files_path webapp.add_route( '/datasets/{dataset_id}/display/{filename:.+?}', controller='dataset', action='display', dataset_id=None, filename=None) webapp.add_route( '/datasets/{dataset_id}/{action}/{filename}', controller='dataset', action='index', dataset_id=None, filename=None) webapp.add_route( '/display_application/{dataset_id}/{app_name}/{link_name}/{user_id}/{app_action}/{action_param}/{action_param_extra:.+?}', controller='dataset', action='display_application', dataset_id=None, user_id=None, app_name=None, link_name=None, app_action=None, action_param=None, action_param_extra=None ) webapp.add_route( '/u/{username}/d/{slug}/{filename}', controller='dataset', action='display_by_username_and_slug', filename=None ) webapp.add_route( '/u/{username}/p/{slug}', controller='page', action='display_by_username_and_slug' ) webapp.add_route( '/u/{username}/h/{slug}', controller='history', action='display_by_username_and_slug' ) webapp.add_route( '/u/{username}/w/{slug}', controller='workflow', action='display_by_username_and_slug' ) webapp.add_route( '/u/{username}/w/{slug}/{format}', controller='workflow', action='display_by_username_and_slug' ) webapp.add_route( '/u/{username}/v/{slug}', controller='visualization', action='display_by_username_and_slug' ) webapp.add_route( '/search', controller='search', action='index' ) # TODO: Refactor above routes into external method to allow testing in # isolation as well. populate_api_routes( webapp, app ) # ==== Done # Indicate that all configuration settings have been provided webapp.finalize_config() # Wrap the webapp in some useful middleware if kwargs.get( 'middleware', True ): webapp = wrap_in_middleware( webapp, global_conf, **kwargs ) if asbool( kwargs.get( 'static_enabled', True) ): if process_is_uwsgi: log.error("Static middleware is enabled in your configuration but this is a uwsgi process. Refusing to wrap in static middleware.") else: webapp = wrap_in_static( webapp, global_conf, plugin_frameworks=[ app.visualizations_registry ], **kwargs ) # Close any pooled database connections before forking try: galaxy.model.mapping.metadata.bind.dispose() except: log.exception("Unable to dispose of pooled galaxy model database connections.") try: # This model may not actually be bound. if galaxy.model.tool_shed_install.mapping.metadata.bind: galaxy.model.tool_shed_install.mapping.metadata.bind.dispose() except: log.exception("Unable to dispose of pooled toolshed install model database connections.") if not process_is_uwsgi: postfork_setup() # Return return webapp
def app_factory( global_conf, **kwargs ): """Return a wsgi application serving the root object""" # Create the Galaxy tool shed application unless passed in kwargs = load_app_properties( kwds=kwargs, config_prefix='TOOL_SHED_CONFIG_' ) if 'app' in kwargs: app = kwargs.pop( 'app' ) else: try: from galaxy.webapps.tool_shed.app import UniverseApplication app = UniverseApplication( global_conf=global_conf, **kwargs ) except: import traceback import sys traceback.print_exc() sys.exit( 1 ) atexit.register( app.shutdown ) # Create the universe WSGI application webapp = CommunityWebApplication( app, session_cookie='galaxycommunitysession', name="tool_shed" ) add_ui_controllers( webapp, app ) webapp.add_route( '/view/{owner}', controller='repository', action='sharable_owner' ) webapp.add_route( '/view/{owner}/{name}', controller='repository', action='sharable_repository' ) webapp.add_route( '/view/{owner}/{name}/{changeset_revision}', controller='repository', action='sharable_repository_revision' ) # Handle displaying tool help images and README file images for tools contained in repositories. webapp.add_route( '/repository/static/images/{repository_id}/{image_file:.+?}', controller='repository', action='display_image_in_repository', repository_id=None, image_file=None ) webapp.add_route( '/{controller}/{action}', action='index' ) webapp.add_route( '/{action}', controller='repository', action='index' ) # Enable 'hg clone' functionality on repos by letting hgwebapp handle the request webapp.add_route( '/repos/*path_info', controller='hg', action='handle_request', path_info='/' ) # Add the web API. # A good resource for RESTful services - http://routes.readthedocs.org/en/latest/restful.html webapp.add_api_controllers( 'galaxy.webapps.tool_shed.api', app ) webapp.mapper.connect( 'api_key_retrieval', '/api/authenticate/baseauth/', controller='authenticate', action='get_tool_shed_api_key', conditions=dict( method=[ "GET" ] ) ) webapp.mapper.connect( 'group', '/api/groups/', controller='groups', action='index', conditions=dict( method=[ "GET" ] ) ) webapp.mapper.connect( 'group', '/api/groups/', controller='groups', action='create', conditions=dict( method=[ "POST" ] ) ) webapp.mapper.connect( 'group', '/api/groups/{encoded_id}', controller='groups', action='show', conditions=dict( method=[ "GET" ] ) ) webapp.mapper.resource( 'category', 'categories', controller='categories', name_prefix='category_', path_prefix='/api', parent_resources=dict( member_name='category', collection_name='categories' ) ) webapp.mapper.connect( 'repositories_in_category', '/api/categories/{category_id}/repositories', controller='categories', action='get_repositories', conditions=dict( method=[ "GET" ] ) ) webapp.mapper.connect( 'show_updates_for_repository', '/api/repositories/updates', controller='repositories', action='updates', conditions=dict( method=[ "GET" ] ) ) webapp.mapper.resource( 'repository', 'repositories', controller='repositories', collection={ 'add_repository_registry_entry': 'POST', 'get_repository_revision_install_info': 'GET', 'get_ordered_installable_revisions': 'GET', 'get_installable_revisions': 'GET', 'remove_repository_registry_entry': 'POST', 'repository_ids_for_setting_metadata': 'GET', 'reset_metadata_on_repositories': 'POST', 'reset_metadata_on_repository': 'POST' }, name_prefix='repository_', path_prefix='/api', new={ 'import_capsule': 'POST' }, parent_resources=dict( member_name='repository', collection_name='repositories' ) ) webapp.mapper.resource( 'repository_revision', 'repository_revisions', member={ 'repository_dependencies': 'GET', 'export': 'POST' }, controller='repository_revisions', name_prefix='repository_revision_', path_prefix='/api', parent_resources=dict( member_name='repository_revision', collection_name='repository_revisions' ) ) webapp.mapper.resource( 'user', 'users', controller='users', name_prefix='user_', path_prefix='/api', parent_resources=dict( member_name='user', collection_name='users' ) ) webapp.mapper.connect( 'update_repository', '/api/repositories/{id}', controller='repositories', action='update', conditions=dict( method=[ "PATCH", "PUT" ] ) ) webapp.mapper.connect( 'repository_create_changeset_revision', '/api/repositories/{id}/changeset_revision', controller='repositories', action='create_changeset_revision', conditions=dict( method=[ "POST" ] ) ) webapp.mapper.connect( 'repository_get_metadata', '/api/repositories/{id}/metadata', controller='repositories', action='metadata', conditions=dict( method=[ "GET" ] ) ) webapp.mapper.connect( 'repository_show_tools', '/api/repositories/{id}/{changeset}/show_tools', controller='repositories', action='show_tools', conditions=dict( method=[ "GET" ] ) ) webapp.mapper.connect( 'create_repository', '/api/repositories', controller='repositories', action='create', conditions=dict( method=[ "POST" ] ) ) webapp.mapper.connect( 'tools', '/api/tools', controller='tools', action='index', conditions=dict( method=[ "GET" ] ) ) webapp.mapper.connect( "version", "/api/version", controller="configuration", action="version", conditions=dict( method=[ "GET" ] ) ) webapp.finalize_config() # Wrap the webapp in some useful middleware if kwargs.get( 'middleware', True ): webapp = wrap_in_middleware( webapp, global_conf, **kwargs ) if asbool( kwargs.get( 'static_enabled', True) ): if process_is_uwsgi: log.error("Static middleware is enabled in your configuration but this is a uwsgi process. Refusing to wrap in static middleware.") else: webapp = wrap_in_static( webapp, global_conf, **kwargs ) # Close any pooled database connections before forking try: galaxy.webapps.tool_shed.model.mapping.metadata.bind.dispose() except: log.exception("Unable to dispose of pooled tool_shed model database connections.") # Return return webapp
def __main__(): file_path = sys.argv.pop(1) tool_job_working_directory = tmp_dir = sys.argv.pop(1) # this is also the job_working_directory now galaxy.model.Dataset.file_path = file_path galaxy.datatypes.metadata.MetadataTempFile.tmp_dir = tmp_dir config_root = sys.argv.pop(1) config_file_name = sys.argv.pop(1) if not os.path.isabs(config_file_name): config_file_name = os.path.join(config_root, config_file_name) # Set up reference to object store # First, read in the main config file for Galaxy; this is required because # the object store configuration is stored there conf_dict = load_app_properties(ini_file=config_file_name) # config object is required by ObjectStore class so create it now universe_config = config.Configuration(**conf_dict) universe_config.ensure_tempdir() object_store = build_object_store_from_config(universe_config) galaxy.model.Dataset.object_store = object_store # Set up datatypes registry datatypes_config = sys.argv.pop(1) datatypes_registry = galaxy.datatypes.registry.Registry() datatypes_registry.load_datatypes(root_dir=config_root, config=datatypes_config) galaxy.model.set_datatypes_registry(datatypes_registry) job_metadata = sys.argv.pop(1) existing_job_metadata_dict = {} new_job_metadata_dict = {} if job_metadata != "None" and os.path.exists(job_metadata): for line in open(job_metadata, "r"): try: line = stringify_dictionary_keys(json.loads(line)) if line["type"] == "dataset": existing_job_metadata_dict[line["dataset_id"]] = line elif line["type"] == "new_primary_dataset": new_job_metadata_dict[line["filename"]] = line except: continue for filenames in sys.argv[1:]: fields = filenames.split(",") filename_in = fields.pop(0) filename_kwds = fields.pop(0) filename_out = fields.pop(0) filename_results_code = fields.pop(0) dataset_filename_override = fields.pop(0) # Need to be careful with the way that these parameters are populated from the filename splitting, # because if a job is running when the server is updated, any existing external metadata command-lines # will not have info about the newly added override_metadata file if fields: override_metadata = fields.pop(0) else: override_metadata = None set_meta_kwds = stringify_dictionary_keys( json.load(open(filename_kwds)) ) # load kwds; need to ensure our keywords are not unicode try: dataset = cPickle.load(open(filename_in)) # load DatasetInstance if dataset_filename_override: dataset.dataset.external_filename = dataset_filename_override files_path = os.path.abspath( os.path.join(tool_job_working_directory, "dataset_%s_files" % (dataset.dataset.id)) ) dataset.dataset.external_extra_files_path = files_path if dataset.dataset.id in existing_job_metadata_dict: dataset.extension = existing_job_metadata_dict[dataset.dataset.id].get("ext", dataset.extension) # Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles if override_metadata: override_metadata = json.load(open(override_metadata)) for metadata_name, metadata_file_override in override_metadata: if galaxy.datatypes.metadata.MetadataTempFile.is_JSONified_value(metadata_file_override): metadata_file_override = galaxy.datatypes.metadata.MetadataTempFile.from_JSON( metadata_file_override ) setattr(dataset.metadata, metadata_name, metadata_file_override) file_dict = existing_job_metadata_dict.get(dataset.dataset.id, {}) set_meta_with_tool_provided(dataset, file_dict, set_meta_kwds) dataset.metadata.to_JSON_dict(filename_out) # write out results of set_meta json.dump( (True, "Metadata has been set successfully"), open(filename_results_code, "wb+") ) # setting metadata has succeeded except Exception, e: json.dump((False, str(e)), open(filename_results_code, "wb+")) # setting metadata has failed somehow
def app_properties_from_args(args, legacy_config_override=None, app=None): config_file = config_file_from_args(args, legacy_config_override=legacy_config_override, app=app) config_section = getattr(args, "config_section", None) app_properties = load_app_properties(config_file=config_file, config_section=config_section) return app_properties
def paste_app_factory(global_conf, **kwargs): """ Return a wsgi application serving the root object """ kwargs = load_app_properties(kwds=kwargs) # Create the Galaxy application unless passed in if 'app' in kwargs: app = kwargs.pop('app') galaxy.app.app = app else: try: app = galaxy.app.UniverseApplication(global_conf=global_conf, **kwargs) galaxy.app.app = app except: import traceback traceback.print_exc() sys.exit(1) # Call app's shutdown method when the interpeter exits, this cleanly stops # the various Galaxy application daemon threads atexit.register(app.shutdown) # Create the universe WSGI application webapp = GalaxyWebApplication(app, session_cookie='galaxysession', name='galaxy') webapp.add_ui_controllers('galaxy.webapps.galaxy.controllers', app) # Force /history to go to view of current webapp.add_route('/history', controller='history', action='view') # Force /activate to go to the controller webapp.add_route('/activate', controller='user', action='activate') # These two routes handle our simple needs at the moment webapp.add_route('/async/{tool_id}/{data_id}/{data_secret}', controller='async', action='index', tool_id=None, data_id=None, data_secret=None) webapp.add_route('/{controller}/{action}', action='index') webapp.add_route('/{action}', controller='root', action='index') # allow for subdirectories in extra_files_path webapp.add_route('/datasets/{dataset_id}/display/{filename:.+?}', controller='dataset', action='display', dataset_id=None, filename=None) webapp.add_route('/datasets/{dataset_id}/{action}/{filename}', controller='dataset', action='index', dataset_id=None, filename=None) webapp.add_route( '/display_application/{dataset_id}/{app_name}/{link_name}/{user_id}/{app_action}/{action_param}/{action_param_extra:.+?}', controller='dataset', action='display_application', dataset_id=None, user_id=None, app_name=None, link_name=None, app_action=None, action_param=None, action_param_extra=None) webapp.add_route('/u/{username}/d/{slug}/{filename}', controller='dataset', action='display_by_username_and_slug', filename=None) webapp.add_route('/u/{username}/p/{slug}', controller='page', action='display_by_username_and_slug') webapp.add_route('/u/{username}/h/{slug}', controller='history', action='display_by_username_and_slug') webapp.add_route('/u/{username}/w/{slug}', controller='workflow', action='display_by_username_and_slug') webapp.add_route('/u/{username}/w/{slug}/{format}', controller='workflow', action='display_by_username_and_slug') webapp.add_route('/u/{username}/v/{slug}', controller='visualization', action='display_by_username_and_slug') webapp.add_route('/search', controller='search', action='index') # TODO: Refactor above routes into external method to allow testing in # isolation as well. populate_api_routes(webapp, app) # ==== Done # Indicate that all configuration settings have been provided webapp.finalize_config() # Wrap the webapp in some useful middleware if kwargs.get('middleware', True): webapp = wrap_in_middleware(webapp, global_conf, **kwargs) if asbool(kwargs.get('static_enabled', True)): if process_is_uwsgi: log.error( "Static middleware is enabled in your configuration but this is a uwsgi process. Refusing to wrap in static middleware." ) else: webapp = wrap_in_static( webapp, global_conf, plugin_frameworks=[app.visualizations_registry], **kwargs) # Close any pooled database connections before forking try: galaxy.model.mapping.metadata.bind.dispose() except: log.exception( "Unable to dispose of pooled galaxy model database connections.") try: # This model may not actually be bound. if galaxy.model.tool_shed_install.mapping.metadata.bind: galaxy.model.tool_shed_install.mapping.metadata.bind.dispose() except: log.exception( "Unable to dispose of pooled toolshed install model database connections." ) if not process_is_uwsgi: postfork_setup() # Return return webapp
def __main__(): file_path = sys.argv.pop(1) tool_job_working_directory = tmp_dir = sys.argv.pop( 1) #this is also the job_working_directory now galaxy.model.Dataset.file_path = file_path galaxy.datatypes.metadata.MetadataTempFile.tmp_dir = tmp_dir config_root = sys.argv.pop(1) config_file_name = sys.argv.pop(1) if not os.path.isabs(config_file_name): config_file_name = os.path.join(config_root, config_file_name) # Set up reference to object store # First, read in the main config file for Galaxy; this is required because # the object store configuration is stored there conf_dict = load_app_properties(ini_file=config_file_name) # config object is required by ObjectStore class so create it now universe_config = config.Configuration(**conf_dict) universe_config.ensure_tempdir() object_store = build_object_store_from_config(universe_config) galaxy.model.Dataset.object_store = object_store # Set up datatypes registry datatypes_config = sys.argv.pop(1) datatypes_registry = galaxy.datatypes.registry.Registry() datatypes_registry.load_datatypes(root_dir=config_root, config=datatypes_config) galaxy.model.set_datatypes_registry(datatypes_registry) job_metadata = sys.argv.pop(1) existing_job_metadata_dict = {} new_job_metadata_dict = {} if job_metadata != "None" and os.path.exists(job_metadata): for line in open(job_metadata, 'r'): try: line = stringify_dictionary_keys(json.loads(line)) if line['type'] == 'dataset': existing_job_metadata_dict[line['dataset_id']] = line elif line['type'] == 'new_primary_dataset': new_job_metadata_dict[line['filename']] = line except: continue for filenames in sys.argv[1:]: fields = filenames.split(',') filename_in = fields.pop(0) filename_kwds = fields.pop(0) filename_out = fields.pop(0) filename_results_code = fields.pop(0) dataset_filename_override = fields.pop(0) # Need to be careful with the way that these parameters are populated from the filename splitting, # because if a job is running when the server is updated, any existing external metadata command-lines #will not have info about the newly added override_metadata file if fields: override_metadata = fields.pop(0) else: override_metadata = None set_meta_kwds = stringify_dictionary_keys( json.load(open(filename_kwds)) ) # load kwds; need to ensure our keywords are not unicode try: dataset = cPickle.load(open(filename_in)) # load DatasetInstance if dataset_filename_override: dataset.dataset.external_filename = dataset_filename_override files_path = os.path.abspath( os.path.join(tool_job_working_directory, "dataset_%s_files" % (dataset.dataset.id))) dataset.dataset.external_extra_files_path = files_path if dataset.dataset.id in existing_job_metadata_dict: dataset.extension = existing_job_metadata_dict[ dataset.dataset.id].get('ext', dataset.extension) # Metadata FileParameter types may not be writable on a cluster node, and are therefore temporarily substituted with MetadataTempFiles if override_metadata: override_metadata = json.load(open(override_metadata)) for metadata_name, metadata_file_override in override_metadata: if galaxy.datatypes.metadata.MetadataTempFile.is_JSONified_value( metadata_file_override): metadata_file_override = galaxy.datatypes.metadata.MetadataTempFile.from_JSON( metadata_file_override) setattr(dataset.metadata, metadata_name, metadata_file_override) file_dict = existing_job_metadata_dict.get(dataset.dataset.id, {}) set_meta_with_tool_provided(dataset, file_dict, set_meta_kwds) dataset.metadata.to_JSON_dict( filename_out) # write out results of set_meta json.dump((True, 'Metadata has been set successfully'), open(filename_results_code, 'wb+')) # setting metadata has succeeded except Exception, e: json.dump((False, str(e)), open(filename_results_code, 'wb+')) # setting metadata has failed somehow
def app_factory(global_conf, load_app_kwds=None, **kwargs): """Return a wsgi application serving the root object""" # Create the Galaxy tool shed application unless passed in load_app_kwds = load_app_kwds or {} kwargs = load_app_properties( kwds=kwargs, config_prefix='TOOL_SHED_CONFIG_', **load_app_kwds ) if 'app' in kwargs: app = kwargs.pop('app') else: try: from tool_shed.webapp.app import UniverseApplication app = UniverseApplication(global_conf=global_conf, **kwargs) except Exception: import traceback import sys traceback.print_exc() sys.exit(1) atexit.register(app.shutdown) # Create the universe WSGI application webapp = CommunityWebApplication(app, session_cookie='galaxycommunitysession', name="tool_shed") add_ui_controllers(webapp, app) webapp.add_route('/view/{owner}', controller='repository', action='sharable_owner') webapp.add_route('/view/{owner}/{name}', controller='repository', action='sharable_repository') webapp.add_route('/view/{owner}/{name}/{changeset_revision}', controller='repository', action='sharable_repository_revision') # Handle displaying tool help images and README file images for tools contained in repositories. webapp.add_route('/repository/static/images/{repository_id}/{image_file:.+?}', controller='repository', action='display_image_in_repository', repository_id=None, image_file=None) webapp.add_route('/{controller}/{action}', action='index') webapp.add_route('/{action}', controller='repository', action='index') # Enable 'hg clone' functionality on repos by letting hgwebapp handle the request webapp.add_route('/repos/*path_info', controller='hg', action='handle_request', path_info='/') # Add the web API. # A good resource for RESTful services - https://routes.readthedocs.io/en/latest/restful.html webapp.add_api_controllers('tool_shed.webapp.api', app) webapp.mapper.connect('api_key_retrieval', '/api/authenticate/baseauth/', controller='authenticate', action='get_tool_shed_api_key', conditions=dict(method=["GET"])) webapp.mapper.connect('group', '/api/groups/', controller='groups', action='index', conditions=dict(method=["GET"])) webapp.mapper.connect('group', '/api/groups/', controller='groups', action='create', conditions=dict(method=["POST"])) webapp.mapper.connect('group', '/api/groups/{encoded_id}', controller='groups', action='show', conditions=dict(method=["GET"])) webapp.mapper.resource('category', 'categories', controller='categories', name_prefix='category_', path_prefix='/api', parent_resources=dict(member_name='category', collection_name='categories')) webapp.mapper.connect('repositories_in_category', '/api/categories/{category_id}/repositories', controller='categories', action='get_repositories', conditions=dict(method=["GET"])) webapp.mapper.connect('show_updates_for_repository', '/api/repositories/updates', controller='repositories', action='updates', conditions=dict(method=["GET"])) webapp.mapper.resource('repository', 'repositories', controller='repositories', collection={'add_repository_registry_entry': 'POST', 'get_repository_revision_install_info': 'GET', 'get_ordered_installable_revisions': 'GET', 'get_installable_revisions': 'GET', 'remove_repository_registry_entry': 'POST', 'repository_ids_for_setting_metadata': 'GET', 'reset_metadata_on_repositories': 'POST', 'reset_metadata_on_repository': 'POST'}, name_prefix='repository_', path_prefix='/api', parent_resources=dict(member_name='repository', collection_name='repositories')) webapp.mapper.resource('repository_revision', 'repository_revisions', member={'repository_dependencies': 'GET', 'export': 'POST'}, controller='repository_revisions', name_prefix='repository_revision_', path_prefix='/api', parent_resources=dict(member_name='repository_revision', collection_name='repository_revisions')) webapp.mapper.resource('user', 'users', controller='users', name_prefix='user_', path_prefix='/api', parent_resources=dict(member_name='user', collection_name='users')) webapp.mapper.connect('update_repository', '/api/repositories/{id}', controller='repositories', action='update', conditions=dict(method=["PATCH", "PUT"])) webapp.mapper.connect('repository_create_changeset_revision', '/api/repositories/{id}/changeset_revision', controller='repositories', action='create_changeset_revision', conditions=dict(method=["POST"])) webapp.mapper.connect('repository_get_metadata', '/api/repositories/{id}/metadata', controller='repositories', action='metadata', conditions=dict(method=["GET"])) webapp.mapper.connect('repository_show_tools', '/api/repositories/{id}/{changeset}/show_tools', controller='repositories', action='show_tools', conditions=dict(method=["GET"])) webapp.mapper.connect('create_repository', '/api/repositories', controller='repositories', action='create', conditions=dict(method=["POST"])) webapp.mapper.connect('tools', '/api/tools', controller='tools', action='index', conditions=dict(method=["GET"])) webapp.mapper.connect('json', '/api/tools/json', controller='tools', action='json', conditions=dict(method=["GET"])) webapp.mapper.connect("version", "/api/version", controller="configuration", action="version", conditions=dict(method=["GET"])) webapp.finalize_config() # Wrap the webapp in some useful middleware if kwargs.get('middleware', True): webapp = wrap_in_middleware(webapp, global_conf, app.application_stack, **kwargs) if asbool(kwargs.get('static_enabled', True)): webapp = wrap_if_allowed(webapp, app.application_stack, build_url_map, args=(global_conf,), kwargs=kwargs) return webapp
def _app_properties(args): config_file = find_config_file("config/galaxy.ini", "universe_wsgi.ini", args.config_file) app_properties = load_app_properties(ini_file=config_file) return app_properties
def wsgiloop(args, log): config_builder = GalaxyConfigBuilder(args) kwds = config_builder.app_kwds() kwds = load_app_properties(**kwds) gx = app_factory(global_conf=config_builder.global_conf(), **kwds) uvicorn.run(initialize_fast_app(gx))
def app_factory( global_conf, **kwargs ): """Return a wsgi application serving the root object""" # Create the Galaxy tool shed application unless passed in kwargs = load_app_properties( kwds=kwargs, config_prefix='TOOL_SHED_CONFIG_' ) if 'app' in kwargs: app = kwargs.pop( 'app' ) else: try: from galaxy.webapps.tool_shed.app import UniverseApplication app = UniverseApplication( global_conf=global_conf, **kwargs ) except: import traceback import sys traceback.print_exc() sys.exit( 1 ) atexit.register( app.shutdown ) # Create the universe WSGI application webapp = CommunityWebApplication( app, session_cookie='galaxycommunitysession', name="tool_shed" ) add_ui_controllers( webapp, app ) webapp.add_route( '/view/{owner}', controller='repository', action='sharable_owner' ) webapp.add_route( '/view/{owner}/{name}', controller='repository', action='sharable_repository' ) webapp.add_route( '/view/{owner}/{name}/{changeset_revision}', controller='repository', action='sharable_repository_revision' ) # Handle displaying tool help images and README file images for tools contained in repositories. webapp.add_route( '/repository/static/images/{repository_id}/{image_file:.+?}', controller='repository', action='display_image_in_repository', repository_id=None, image_file=None ) webapp.add_route( '/{controller}/{action}', action='index' ) webapp.add_route( '/{action}', controller='repository', action='index' ) webapp.add_route( '/repos/*path_info', controller='hg', action='handle_request', path_info='/' ) # Add the web API. # A good resource for RESTful services - http://routes.readthedocs.org/en/latest/restful.html webapp.add_api_controllers( 'galaxy.webapps.tool_shed.api', app ) webapp.mapper.connect( 'api_key_retrieval', '/api/authenticate/baseauth/', controller='authenticate', action='get_tool_shed_api_key', conditions=dict( method=[ "GET" ] ) ) webapp.mapper.connect( 'group', '/api/groups/', controller='groups', action='index', conditions=dict( method=[ "GET" ] ) ) webapp.mapper.connect( 'group', '/api/groups/', controller='groups', action='create', conditions=dict( method=[ "POST" ] ) ) webapp.mapper.connect( 'group', '/api/groups/{encoded_id}', controller='groups', action='show', conditions=dict( method=[ "GET" ] ) ) webapp.mapper.resource( 'category', 'categories', controller='categories', name_prefix='category_', path_prefix='/api', parent_resources=dict( member_name='category', collection_name='categories' ) ) webapp.mapper.resource( 'repository', 'repositories', controller='repositories', collection={ 'add_repository_registry_entry' : 'POST', 'get_repository_revision_install_info' : 'GET', 'get_ordered_installable_revisions' : 'GET', 'remove_repository_registry_entry' : 'POST', 'repository_ids_for_setting_metadata' : 'GET', 'reset_metadata_on_repositories' : 'POST', 'reset_metadata_on_repository' : 'POST' }, name_prefix='repository_', path_prefix='/api', new={ 'import_capsule' : 'POST' }, parent_resources=dict( member_name='repository', collection_name='repositories' ) ) webapp.mapper.resource( 'repository_revision', 'repository_revisions', member={ 'repository_dependencies' : 'GET', 'export' : 'POST' }, controller='repository_revisions', name_prefix='repository_revision_', path_prefix='/api', parent_resources=dict( member_name='repository_revision', collection_name='repository_revisions' ) ) webapp.mapper.resource( 'user', 'users', controller='users', name_prefix='user_', path_prefix='/api', parent_resources=dict( member_name='user', collection_name='users' ) ) webapp.mapper.connect( 'update_repository', '/api/repositories/{id}', controller='repositories', action='update', conditions=dict( method=[ "PATCH", "PUT" ] ) ) webapp.mapper.connect( 'repository_create_changeset_revision', '/api/repositories/{id}/changeset_revision', controller='repositories', action='create_changeset_revision', conditions=dict( method=[ "POST" ] ) ) webapp.mapper.connect( 'create_repository', '/api/repositories', controller='repositories', action='create', conditions=dict( method=[ "POST" ] ) ) webapp.mapper.connect( 'tools', '/api/tools', controller='tools', action='index', conditions=dict( method=[ "GET" ] ) ) webapp.mapper.connect( "version", "/api/version", controller="configuration", action="version", conditions=dict( method=[ "GET" ] ) ) webapp.finalize_config() # Wrap the webapp in some useful middleware if kwargs.get( 'middleware', True ): webapp = wrap_in_middleware( webapp, global_conf, **kwargs ) if asbool( kwargs.get( 'static_enabled', True) ): if process_is_uwsgi: log.error("Static middleware is enabled in your configuration but this is a uwsgi process. Refusing to wrap in static middleware.") else: webapp = wrap_in_static( webapp, global_conf, **kwargs ) # Close any pooled database connections before forking try: galaxy.webapps.tool_shed.model.mapping.metadata.bind.dispose() except: log.exception("Unable to dispose of pooled tool_shed model database connections.") # Return return webapp
def main(): # ---- Configuration ------------------------------------------------------ galaxy_test_host = os.environ.get( 'GALAXY_TEST_HOST', default_galaxy_test_host ) galaxy_test_port = os.environ.get( 'GALAXY_TEST_PORT', None ) galaxy_test_save = os.environ.get( 'GALAXY_TEST_SAVE', None) tool_path = os.environ.get( 'GALAXY_TEST_TOOL_PATH', 'tools' ) if 'HTTP_ACCEPT_LANGUAGE' not in os.environ: os.environ[ 'HTTP_ACCEPT_LANGUAGE' ] = default_galaxy_locales testing_migrated_tools = __check_arg( '-migrated' ) testing_installed_tools = __check_arg( '-installed' ) datatypes_conf_override = None if testing_migrated_tools or testing_installed_tools: # Store a jsonified dictionary of tool_id : GALAXY_TEST_FILE_DIR pairs. galaxy_tool_shed_test_file = 'shed_tools_dict' # We need the upload tool for functional tests, so we'll create a temporary tool panel config that defines it. fd, tmp_tool_panel_conf = tempfile.mkstemp() os.write( fd, '<?xml version="1.0"?>\n' ) os.write( fd, '<toolbox>\n' ) os.write( fd, '<tool file="data_source/upload.xml"/>\n' ) os.write( fd, '</toolbox>\n' ) os.close( fd ) tool_config_file = tmp_tool_panel_conf galaxy_test_file_dir = None library_import_dir = None user_library_import_dir = None # Exclude all files except test_toolbox.py. ignore_files = ( re.compile( r'^test_[adghlmsu]*' ), re.compile( r'^test_ta*' ) ) else: framework_tool_dir = os.path.join('test', 'functional', 'tools') framework_test = __check_arg( '-framework' ) # Run through suite of tests testing framework. if framework_test: tool_conf = os.path.join( framework_tool_dir, 'samples_tool_conf.xml' ) datatypes_conf_override = os.path.join( framework_tool_dir, 'sample_datatypes_conf.xml' ) else: # Use tool_conf.xml toolbox. tool_conf = None if __check_arg( '-with_framework_test_tools' ): tool_conf = "%s,%s" % ( 'config/tool_conf.xml.sample', os.path.join( framework_tool_dir, 'samples_tool_conf.xml' ) ) test_dir = default_galaxy_test_file_dir tool_config_file = os.environ.get( 'GALAXY_TEST_TOOL_CONF', tool_conf ) galaxy_test_file_dir = os.environ.get( 'GALAXY_TEST_FILE_DIR', test_dir ) first_test_file_dir = galaxy_test_file_dir.split(",")[0] if not os.path.isabs( first_test_file_dir ): first_test_file_dir = os.path.join( os.getcwd(), first_test_file_dir ) library_import_dir = first_test_file_dir import_dir = os.path.join( first_test_file_dir, 'users' ) if os.path.exists(import_dir): user_library_import_dir = import_dir else: user_library_import_dir = None ignore_files = () start_server = 'GALAXY_TEST_EXTERNAL' not in os.environ tool_data_table_config_path = None if os.path.exists( 'tool_data_table_conf.test.xml' ): # If explicitly defined tables for test, use those. tool_data_table_config_path = 'tool_data_table_conf.test.xml' else: # ... otherise find whatever Galaxy would use as the default and # the sample data for fucntional tests to that. default_tool_data_config = 'config/tool_data_table_conf.xml.sample' for tool_data_config in ['config/tool_data_table_conf.xml', 'tool_data_table_conf.xml' ]: if os.path.exists( tool_data_config ): default_tool_data_config = tool_data_config tool_data_table_config_path = '%s,test/functional/tool-data/sample_tool_data_tables.xml' % default_tool_data_config default_data_manager_config = 'config/data_manager_conf.xml.sample' for data_manager_config in ['config/data_manager_conf.xml', 'data_manager_conf.xml' ]: if os.path.exists( data_manager_config ): default_data_manager_config = data_manager_config data_manager_config_file = "%s,test/functional/tools/sample_data_manager_conf.xml" % default_data_manager_config shed_tool_data_table_config = 'config/shed_tool_data_table_conf.xml' tool_dependency_dir = os.environ.get( 'GALAXY_TOOL_DEPENDENCY_DIR', None ) use_distributed_object_store = os.environ.get( 'GALAXY_USE_DISTRIBUTED_OBJECT_STORE', False ) galaxy_test_tmp_dir = os.environ.get( 'GALAXY_TEST_TMP_DIR', None ) if galaxy_test_tmp_dir is None: galaxy_test_tmp_dir = tempfile.mkdtemp() galaxy_job_conf_file = os.environ.get( 'GALAXY_TEST_JOB_CONF', os.path.join( galaxy_test_tmp_dir, 'test_job_conf.xml' ) ) # Generate the job_conf.xml file. file( galaxy_job_conf_file, 'w' ).write( job_conf_xml ) database_auto_migrate = False galaxy_test_proxy_port = None if start_server: tempdir = tempfile.mkdtemp( dir=galaxy_test_tmp_dir ) # Configure the database path. if 'GALAXY_TEST_DBPATH' in os.environ: galaxy_db_path = os.environ[ 'GALAXY_TEST_DBPATH' ] else: galaxy_db_path = os.path.join( tempdir, 'database' ) # Configure the paths Galaxy needs to test tools. file_path = os.path.join( galaxy_db_path, 'files' ) new_file_path = tempfile.mkdtemp( prefix='new_files_path_', dir=tempdir ) job_working_directory = tempfile.mkdtemp( prefix='job_working_directory_', dir=tempdir ) install_database_connection = os.environ.get( 'GALAXY_TEST_INSTALL_DBURI', None ) if 'GALAXY_TEST_DBURI' in os.environ: database_connection = os.environ['GALAXY_TEST_DBURI'] else: db_path = os.path.join( galaxy_db_path, 'universe.sqlite' ) if 'GALAXY_TEST_DB_TEMPLATE' in os.environ: # Middle ground between recreating a completely new # database and pointing at existing database with # GALAXY_TEST_DBURI. The former requires a lot of setup # time, the latter results in test failures in certain # cases (namely tool shed tests expecting clean database). log.debug( "Copying database template from %s.", os.environ['GALAXY_TEST_DB_TEMPLATE'] ) __copy_database_template(os.environ['GALAXY_TEST_DB_TEMPLATE'], db_path) database_auto_migrate = True database_connection = 'sqlite:///%s' % db_path kwargs = {} for dir in file_path, new_file_path: try: if not os.path.exists( dir ): os.makedirs( dir ) except OSError: pass # Data Manager testing temp path # For storing Data Manager outputs and .loc files so that real ones don't get clobbered data_manager_test_tmp_path = tempfile.mkdtemp( prefix='data_manager_test_tmp', dir=galaxy_test_tmp_dir ) galaxy_data_manager_data_path = tempfile.mkdtemp( prefix='data_manager_tool-data', dir=data_manager_test_tmp_path ) # ---- Build Application -------------------------------------------------- master_api_key = get_master_api_key() app = None if start_server: kwargs = dict( admin_users='*****@*****.**', api_allow_run_as='*****@*****.**', allow_library_path_paste=True, allow_user_creation=True, allow_user_deletion=True, database_connection=database_connection, database_auto_migrate=database_auto_migrate, datatype_converters_config_file="datatype_converters_conf.xml.sample", file_path=file_path, id_secret='changethisinproductiontoo', job_queue_workers=5, job_working_directory=job_working_directory, library_import_dir=library_import_dir, log_destination="stdout", new_file_path=new_file_path, running_functional_tests=True, shed_tool_data_table_config=shed_tool_data_table_config, template_path="templates", test_conf="test.conf", tool_config_file=tool_config_file, tool_data_table_config_path=tool_data_table_config_path, tool_path=tool_path, galaxy_data_manager_data_path=galaxy_data_manager_data_path, tool_parse_help=False, update_integrated_tool_panel=False, use_heartbeat=False, user_library_import_dir=user_library_import_dir, master_api_key=master_api_key, use_tasked_jobs=True, cleanup_job='onsuccess', enable_beta_tool_formats=True, data_manager_config_file=data_manager_config_file ) if install_database_connection is not None: kwargs[ 'install_database_connection' ] = install_database_connection if not database_connection.startswith( 'sqlite://' ): kwargs[ 'database_engine_option_max_overflow' ] = '20' kwargs[ 'database_engine_option_pool_size' ] = '10' if tool_dependency_dir is not None: kwargs[ 'tool_dependency_dir' ] = tool_dependency_dir if use_distributed_object_store: kwargs[ 'object_store' ] = 'distributed' kwargs[ 'distributed_object_store_config_file' ] = 'distributed_object_store_conf.xml.sample' if datatypes_conf_override: kwargs[ 'datatypes_config_file' ] = datatypes_conf_override # If the user has passed in a path for the .ini file, do not overwrite it. galaxy_config_file = os.environ.get( 'GALAXY_TEST_INI_FILE', None ) if not galaxy_config_file: galaxy_config_file = os.path.join( galaxy_test_tmp_dir, 'functional_tests_wsgi.ini' ) config_items = [] for label in kwargs: config_tuple = label, kwargs[ label ] config_items.append( config_tuple ) # Write a temporary file, based on config/galaxy.ini.sample, using the configuration options defined above. generate_config_file( 'config/galaxy.ini.sample', galaxy_config_file, config_items ) # Set the global_conf[ '__file__' ] option to the location of the temporary .ini file, which gets passed to set_metadata.sh. kwargs[ 'global_conf' ] = get_webapp_global_conf() kwargs[ 'global_conf' ][ '__file__' ] = galaxy_config_file kwargs[ 'config_file' ] = galaxy_config_file kwargs = load_app_properties( kwds=kwargs ) # Build the Universe Application app = UniverseApplication( **kwargs ) database_contexts.galaxy_context = app.model.context log.info( "Embedded Universe application started" ) # ---- Run webserver ------------------------------------------------------ server = None if start_server: webapp = buildapp.app_factory( kwargs[ 'global_conf' ], app=app, use_translogger=False, static_enabled=STATIC_ENABLED ) if galaxy_test_port is not None: server = httpserver.serve( webapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False ) else: random.seed() for i in range( 0, 9 ): try: galaxy_test_port = str( random.randint( default_galaxy_test_port_min, default_galaxy_test_port_max ) ) log.debug( "Attempting to serve app on randomly chosen port: %s" % galaxy_test_port ) server = httpserver.serve( webapp, host=galaxy_test_host, port=galaxy_test_port, start_loop=False ) break except socket.error, e: if e[0] == 98: continue raise else: raise Exception( "Unable to open a port between %s and %s to start Galaxy server" % ( default_galaxy_test_port_min, default_galaxy_test_port_max ) )
def get_config(argv, cwd=None): """ Read sys.argv and parse out repository of migrations and database url. >>> import os >>> from ConfigParser import SafeConfigParser >>> from shutil import rmtree >>> from tempfile import mkdtemp >>> config_dir = mkdtemp() >>> os.makedirs(os.path.join(config_dir, 'config')) >>> def write_ini(path, property, value): ... p = SafeConfigParser() ... p.add_section('app:main') ... p.set('app:main', property, value) ... with open(os.path.join(config_dir, 'config', path), 'w') as f: p.write(f) >>> write_ini('tool_shed.ini', 'database_connection', 'sqlite:///pg/testdb1') >>> config = get_config(['manage_db.py', 'tool_shed'], cwd=config_dir) >>> config['repo'] 'lib/galaxy/webapps/tool_shed/model/migrate' >>> config['db_url'] 'sqlite:///pg/testdb1' >>> write_ini('galaxy.ini', 'database_file', 'moo.sqlite') >>> config = get_config(['manage_db.py'], cwd=config_dir) >>> config['db_url'] 'sqlite:///moo.sqlite?isolation_level=IMMEDIATE' >>> config['repo'] 'lib/galaxy/model/migrate' >>> rmtree(config_dir) """ if argv and (argv[-1] in DATABASE): database = argv.pop() # database name tool_shed, galaxy, or install. else: database = 'galaxy' database_defaults = DATABASE[database] config_names = database_defaults.get('config_names', DEFAULT_CONFIG_NAMES) config_file = read_config_file_arg(argv, config_names, cwd=cwd) repo = database_defaults['repo'] config_prefix = database_defaults.get('config_prefix', DEFAULT_CONFIG_PREFIX) config_override = database_defaults.get('config_override', 'GALAXY_CONFIG_') default_sqlite_file = database_defaults['default_sqlite_file'] if not config_file or get_ext(config_file, ignore='sample') == 'yaml': config_section = database_defaults.get('config_section', None) else: # An .ini file - just let load_app_properties find app:main. config_section = None properties = load_app_properties(config_file=config_file, config_prefix=config_override, config_section=config_section) if ("%sdatabase_connection" % config_prefix) in properties: db_url = properties["%sdatabase_connection" % config_prefix] elif ("%sdatabase_file" % config_prefix) in properties: database_file = properties["%sdatabase_file" % config_prefix] db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % database_file else: db_url = "sqlite:///%s?isolation_level=IMMEDIATE" % default_sqlite_file return dict(db_url=db_url, repo=repo, config_file=config_file, database=database)