def test_user_ftp_explicit_config(): file_sources_config = ConfiguredFileSourcesConfig(ftp_upload_purge=False, ) plugin = { 'type': 'gxftp', } tmp, root = setup_root() file_sources = ConfiguredFileSources(file_sources_config, conf_dict=[plugin]) user_context = user_context_fixture(user_ftp_dir=root) write_file_fixtures(tmp, root) assert_realizes_as(file_sources, "gxftp://a", "a\n", user_context=user_context) file_sources_remote = serialize_and_recover(file_sources, user_context=user_context) assert_realizes_as(file_sources_remote, "gxftp://a", "a\n") as_dict = file_sources.to_dict() assert len(as_dict["file_sources"]) == 1 file_source_as_dict = as_dict["file_sources"][0] assert file_source_as_dict["uri_root"] == "gxftp://" assert file_source_as_dict["id"] == "_ftp"
def get_file_sources(working_directory): global _file_sources if _file_sources is None: from galaxy.files import ConfiguredFileSources file_sources = None file_sources_path = os.path.join(working_directory, "file_sources.json") if os.path.exists(file_sources_path): file_sources_as_dict = None with open(file_sources_path) as f: file_sources_as_dict = json.load(f) if file_sources_as_dict is not None: file_sources = ConfiguredFileSources.from_dict(file_sources_as_dict) if file_sources is None: ConfiguredFileSources.from_dict(None) _file_sources = file_sources return _file_sources
def get_file_sources(): global _file_sources if _file_sources is None: from galaxy.files import ConfiguredFileSources file_sources = None if os.path.exists("file_sources.json"): file_sources_as_dict = None with open("file_sources.json") as f: file_sources_as_dict = load(f) if file_sources_as_dict is not None: file_sources = ConfiguredFileSources.from_dict( file_sources_as_dict) if file_sources is None: ConfiguredFileSources.from_dict([]) _file_sources = file_sources return _file_sources
def get_file_sources(file_sources_path): assert os.path.exists(file_sources_path), f"file sources path [{file_sources_path}] does not exist" from galaxy.files import ConfiguredFileSources with open(file_sources_path) as f: file_sources_as_dict = json.load(f) file_sources = ConfiguredFileSources.from_dict(file_sources_as_dict) return file_sources
def __init__(self, **kwargs): super().__init__(**kwargs) self._register_singleton(MinimalManagerApp, self) self.execution_timer_factory = self._register_singleton( ExecutionTimerFactory, ExecutionTimerFactory(self.config)) self.configure_fluent_log() self.application_stack = self._register_singleton( ApplicationStack, application_stack_instance(app=self)) # Initialize job metrics manager, needs to be in place before # config so per-destination modifications can be made. self.job_metrics = self._register_singleton( JobMetrics, JobMetrics(self.config.job_metrics_config_file, app=self)) # Initialize the job management configuration self.job_config = self._register_singleton(jobs.JobConfiguration) # Tag handler self.tag_handler = self._register_singleton(GalaxyTagHandler) self.user_manager = self._register_singleton(UserManager) self._register_singleton(GalaxySessionManager) self.hda_manager = self._register_singleton(HDAManager) self.history_manager = self._register_singleton(HistoryManager) self.job_search = self._register_singleton(JobSearch) self.dataset_collection_manager = self._register_singleton( DatasetCollectionManager) self.workflow_manager = self._register_singleton(WorkflowsManager) self.workflow_contents_manager = self._register_singleton( WorkflowContentsManager) self.library_folder_manager = self._register_singleton(FolderManager) self.library_manager = self._register_singleton(LibraryManager) self.role_manager = self._register_singleton(RoleManager) from galaxy.jobs.manager import JobManager self.job_manager = self._register_singleton(JobManager) # ConfiguredFileSources self.file_sources = self._register_singleton( ConfiguredFileSources, ConfiguredFileSources.from_app_config(self.config)) # We need the datatype registry for running certain tasks that modify HDAs, and to build the registry we need # to setup the installed repositories ... this is not ideal self._configure_tool_config_files() self.installed_repository_manager = self._register_singleton( InstalledRepositoryManager, InstalledRepositoryManager(self)) self._configure_datatypes_registry(self.installed_repository_manager) self._register_singleton(Registry, self.datatypes_registry) galaxy.model.set_datatypes_registry(self.datatypes_registry) self.sentry_client = None if self.config.sentry_dsn: def postfork_sentry_client(): import raven self.sentry_client = raven.Client( self.config.sentry_dsn, transport=raven.transport.HTTPTransport) self.application_stack.register_postfork_function( postfork_sentry_client)
def get_file_sources(file_sources_path): assert os.path.exists( file_sources_path ), "file sources path [%s] does not exist" % file_sources_path with open(file_sources_path) as f: file_sources_as_dict = json.load(f) file_sources = ConfiguredFileSources.from_dict(file_sources_as_dict) return file_sources
def test_import_dir_implicit_config(): tmp, root = _setup_root() file_sources_config = ConfiguredFileSourcesConfig( library_import_dir=root, ) file_sources = ConfiguredFileSources(file_sources_config, conf_dict=[], load_stock_plugins=True) _write_file_fixtures(tmp, root) assert_realizes_as(file_sources, "gximport://a", "a\n")
def test_user_ftp_respects_upload_purge_on_by_default(): tmp, root = _setup_root() file_sources_config = ConfiguredFileSourcesConfig( ftp_upload_dir=root, ) file_sources = ConfiguredFileSources(file_sources_config, conf_dict=[], load_stock_plugins=True) user_context = user_context_fixture(user_ftp_dir=root) _write_file_fixtures(tmp, root) assert_realizes_as(file_sources, "gxftp://a", "a\n", user_context=user_context) assert not os.path.exists(os.path.join(root, "a"))
def test_user_import_dir_implicit_config(): tmp, root = _setup_root() file_sources_config = ConfiguredFileSourcesConfig( user_library_import_dir=root, ) file_sources = ConfiguredFileSources(file_sources_config, conf_dict=[], load_stock_plugins=True) _write_file_fixtures(tmp, os.path.join(root, EMAIL)) user_context = user_context_fixture() assert_realizes_as(file_sources, "gxuserimport://a", "a\n", user_context=user_context)
def test_import_dir_explicit_config(): tmp, root = _setup_root() file_sources_config = ConfiguredFileSourcesConfig( library_import_dir=root, ) plugin = { 'type': 'gximport', } file_sources = ConfiguredFileSources(file_sources_config, conf_dict=[plugin]) _write_file_fixtures(tmp, root) assert_realizes_as(file_sources, "gximport://a", "a\n")
def _configured_file_sources(include_allowlist=False, plugin_extra_config=None, per_user=False, writable=None, allow_subdir_creation=True): tmp, root = _setup_root() config_kwd = {} if include_allowlist: config_kwd["symlink_allowlist"] = [tmp] file_sources_config = ConfiguredFileSourcesConfig(**config_kwd) plugin = { 'type': 'posix', } if writable is not None: plugin['writable'] = writable if per_user: plugin['root'] = "%s/${user.username}" % root # setup files just for alice root = os.path.join(root, "alice") os.mkdir(root) else: plugin['root'] = root plugin.update(plugin_extra_config or {}) _write_file_fixtures(tmp, root) file_sources = ConfiguredFileSources(file_sources_config, conf_dict={"test1": plugin}) file_sources.test_root = root return file_sources
def test_user_ftp_implicit_config(): tmp, root = _setup_root() file_sources_config = ConfiguredFileSourcesConfig( ftp_upload_dir=root, ftp_upload_purge=False, ) file_sources = ConfiguredFileSources(file_sources_config, conf_dict=[], load_stock_plugins=True) user_context = user_context_fixture(user_ftp_dir=root) _write_file_fixtures(tmp, root) assert os.path.exists(os.path.join(root, "a")) assert_realizes_as(file_sources, "gxftp://a", "a\n", user_context=user_context) file_sources_remote = serialize_and_recover(file_sources, user_context=user_context) assert_realizes_as(file_sources_remote, "gxftp://a", "a\n") assert os.path.exists(os.path.join(root, "a"))
def _configured_file_sources(conf_file=FILE_SOURCES_CONF): file_sources_config = ConfiguredFileSourcesConfig() return ConfiguredFileSources(file_sources_config, conf_file=conf_file)
def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.file_sources = ConfiguredFileSources( ConfiguredFileSourcesConfig.from_app_config( self.config)) # type: ignore[assignment]
def __init__(self, **kwargs): if not log.handlers: # Paste didn't handle it, so we need a temporary basic log # configured. The handler added here gets dumped and replaced with # an appropriately configured logger in configure_logging below. logging.basicConfig(level=logging.DEBUG) log.debug("python path is: %s", ", ".join(sys.path)) self.name = 'galaxy' # is_webapp will be set to true when building WSGI app self.is_webapp = False self.startup_timer = ExecutionTimer() self.new_installation = False # Read config file and check for errors self.config = config.Configuration(**kwargs) self.config.check() config.configure_logging(self.config) self.execution_timer_factory = ExecutionTimerFactory(self.config) self.configure_fluent_log() # A lot of postfork initialization depends on the server name, ensure it is set immediately after forking before other postfork functions self.application_stack = application_stack_instance(app=self) self.application_stack.register_postfork_function( self.application_stack.set_postfork_server_name, self) self.config.reload_sanitize_allowlist( explicit='sanitize_allowlist_file' in kwargs) self.amqp_internal_connection_obj = galaxy.queues.connection_from_config( self.config) # queue_worker *can* be initialized with a queue, but here we don't # want to and we'll allow postfork to bind and start it. self.queue_worker = GalaxyQueueWorker(self) self._configure_tool_shed_registry() self._configure_object_store(fsmon=True) # Setup the database engine and ORM config_file = kwargs.get('global_conf', {}).get('__file__', None) if config_file: log.debug('Using "galaxy.ini" config file: %s', config_file) check_migrate_tools = self.config.check_migrate_tools self._configure_models( check_migrate_databases=self.config.check_migrate_databases, check_migrate_tools=check_migrate_tools, config_file=config_file) # Security helper self._configure_security() # Tag handler self.tag_handler = GalaxyTagHandler(self.model.context) self.dataset_collections_service = DatasetCollectionManager(self) self.history_manager = HistoryManager(self) self.hda_manager = HDAManager(self) self.workflow_manager = WorkflowsManager(self) self.workflow_contents_manager = WorkflowContentsManager(self) self.dependency_resolvers_view = DependencyResolversView(self) self.test_data_resolver = test_data.TestDataResolver( file_dirs=self.config.tool_test_data_directories) self.library_folder_manager = FolderManager() self.library_manager = LibraryManager() self.role_manager = RoleManager(self) self.dynamic_tool_manager = DynamicToolManager(self) # ConfiguredFileSources self.file_sources = ConfiguredFileSources.from_app_config(self.config) # Tool Data Tables self._configure_tool_data_tables(from_shed_config=False) # Load dbkey / genome build manager self._configure_genome_builds(data_table_name="__dbkeys__", load_old_style=True) # Genomes self.genomes = Genomes(self) # Data providers registry. self.data_provider_registry = DataProviderRegistry() # Initialize job metrics manager, needs to be in place before # config so per-destination modifications can be made. self.job_metrics = JobMetrics(self.config.job_metrics_config_file, app=self) # Initialize error report plugins. self.error_reports = ErrorReports(self.config.error_report_file, app=self) # Initialize the job management configuration self.job_config = jobs.JobConfiguration(self) # Setup a Tool Cache self.tool_cache = ToolCache() self.tool_shed_repository_cache = ToolShedRepositoryCache(self) # Watch various config files for immediate reload self.watchers = ConfigWatchers(self) self._configure_tool_config_files() self.installed_repository_manager = InstalledRepositoryManager(self) self._configure_datatypes_registry(self.installed_repository_manager) galaxy.model.set_datatypes_registry(self.datatypes_registry) self._configure_toolbox() # Load Data Manager self.data_managers = DataManagers(self) # Load the update repository manager. self.update_repository_manager = UpdateRepositoryManager(self) # Load proprietary datatype converters and display applications. self.installed_repository_manager.load_proprietary_converters_and_display_applications( ) # Load datatype display applications defined in local datatypes_conf.xml self.datatypes_registry.load_display_applications(self) # Load datatype converters defined in local datatypes_conf.xml self.datatypes_registry.load_datatype_converters(self.toolbox) # Load external metadata tool self.datatypes_registry.load_external_metadata_tool(self.toolbox) # Load history import/export tools. load_lib_tools(self.toolbox) self.toolbox.persist_cache(register_postfork=True) # visualizations registry: associates resources with visualizations, controls how to render self.visualizations_registry = VisualizationsRegistry( self, directories_setting=self.config.visualization_plugins_directory, template_cache_dir=self.config.template_cache_path) # Tours registry self.tour_registry = build_tours_registry(self.config.tour_config_dir) # Webhooks registry self.webhooks_registry = WebhooksRegistry(self.config.webhooks_dir) # Load security policy. self.security_agent = self.model.security_agent self.host_security_agent = galaxy.model.security.HostAgent( model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions) # Load quota management. self.quota_agent = get_quota_agent(self.config, self.model) # Heartbeat for thread profiling self.heartbeat = None from galaxy import auth self.auth_manager = auth.AuthManager(self) self.user_manager = UserManager(self) # Start the heartbeat process if configured and available (wait until # postfork if using uWSGI) if self.config.use_heartbeat: if heartbeat.Heartbeat: self.heartbeat = heartbeat.Heartbeat( self.config, period=self.config.heartbeat_interval, fname=self.config.heartbeat_log) self.heartbeat.daemon = True self.application_stack.register_postfork_function( self.heartbeat.start) self.authnz_manager = None if self.config.enable_oidc: from galaxy.authnz import managers self.authnz_manager = managers.AuthnzManager( self, self.config.oidc_config_file, self.config.oidc_backends_config_file) self.sentry_client = None if self.config.sentry_dsn: def postfork_sentry_client(): import raven self.sentry_client = raven.Client( self.config.sentry_dsn, transport=raven.transport.HTTPTransport) self.application_stack.register_postfork_function( postfork_sentry_client) # Transfer manager client if self.config.get_bool('enable_beta_job_managers', False): from galaxy.jobs import transfer_manager self.transfer_manager = transfer_manager.TransferManager(self) # Start the job manager from galaxy.jobs import manager self.job_manager = manager.JobManager(self) self.application_stack.register_postfork_function( self.job_manager.start) self.proxy_manager = ProxyManager(self.config) from galaxy.workflow import scheduling_manager # Must be initialized after job_config. self.workflow_scheduling_manager = scheduling_manager.WorkflowSchedulingManager( self) self.trs_proxy = TrsProxy(self.config) # Must be initialized after any component that might make use of stack messaging is configured. Alternatively if # it becomes more commonly needed we could create a prefork function registration method like we do with # postfork functions. self.application_stack.init_late_prefork() self.containers = {} if self.config.enable_beta_containers_interface: self.containers = build_container_interfaces( self.config.containers_config_file, containers_conf=self.config.containers_conf) self.interactivetool_manager = InteractiveToolManager(self) # Configure handling of signals handlers = {} if self.heartbeat: handlers[signal.SIGUSR1] = self.heartbeat.dump_signal_handler self._configure_signal_handlers(handlers) self.database_heartbeat = DatabaseHeartbeat( application_stack=self.application_stack) self.database_heartbeat.add_change_callback(self.watchers.change_state) self.application_stack.register_postfork_function( self.database_heartbeat.start) # Start web stack message handling self.application_stack.register_postfork_function( self.application_stack.start) self.application_stack.register_postfork_function( self.queue_worker.bind_and_start) # Delay toolbox index until after startup self.application_stack.register_postfork_function( lambda: send_local_control_task(self, 'rebuild_toolbox_search_index')) self.model.engine.dispose() # Inject url_for for components to more easily optionally depend # on url_for. self.url_for = url_for self.server_starttime = int(time.time()) # used for cachebusting log.info("Galaxy app startup finished %s" % self.startup_timer)
def serialize_and_recover(file_sources_o, user_context=None): as_dict = file_sources_o.to_dict(for_serialization=True, user_context=user_context) file_sources = ConfiguredFileSources.from_dict(as_dict) return file_sources
def configured_file_sources(conf_file): file_sources_config = ConfiguredFileSourcesConfig() return ConfiguredFileSources(file_sources_config, conf_file=conf_file)
def file_sources(self) -> ConfiguredFileSources: return ConfiguredFileSources.from_dict(self.file_sources_dict)