def __init__(self, app): self.app = app self.sa_session = app.model.context self.hda_manager = HDAManager(app) self.dataset_collection_manager = DatasetCollectionManager(app) self.ldda_manager = LDDAManager(app) self.decode_id = self.app.security.decode_id
def placeholderRenderForSave(trans, item_class, item_id, encode=False): encoded_item_id, decoded_item_id = get_page_identifiers(item_id, trans.app) item_name = '' if item_class == 'History': history = trans.sa_session.query( trans.model.History).get(decoded_item_id) history = base.security_check(trans, history, False, True) item_name = history.name elif item_class == 'HistoryDatasetAssociation': hda = trans.sa_session.query( trans.model.HistoryDatasetAssociation).get(decoded_item_id) hda_manager = HDAManager(trans.app) hda = hda_manager.get_accessible(decoded_item_id, trans.user) item_name = hda.name elif item_class == 'StoredWorkflow': wf = trans.sa_session.query( trans.model.StoredWorkflow).get(decoded_item_id) wf = base.security_check(trans, wf, False, True) item_name = wf.name elif item_class == 'Visualization': visualization = trans.sa_session.query( trans.model.Visualization).get(decoded_item_id) visualization = base.security_check(trans, visualization, False, True) item_name = visualization.title class_shorthand = PAGE_CLASS_MAPPING[item_class] if encode: item_id = encoded_item_id else: item_id = decoded_item_id return PLACEHOLDER_TEMPLATE.format( item_class=item_class, class_shorthand=class_shorthand, class_shorthand_lower=class_shorthand.lower(), item_id=item_id, item_name=item_name)
def __init__(self, app): super().__init__(app) self.page_manager = PageManager(app) self.history_manager = HistoryManager(app) self.history_serializer = HistorySerializer(self.app) self.hda_manager = HDAManager(app) self.workflow_manager = WorkflowsManager(app)
def __init__(self, **kwargs): if not log.handlers: # Paste didn't handle it, so we need a temporary basic log # configured. The handler added here gets dumped and replaced with # an appropriately configured logger in configure_logging below. logging.basicConfig(level=logging.DEBUG) log.debug("python path is: %s", ", ".join(sys.path)) self.name = 'galaxy' # is_webapp will be set to true when building WSGI app self.is_webapp = False self.startup_timer = ExecutionTimer() self.new_installation = False # Read config file and check for errors self.config = config.Configuration(**kwargs) self.config.check() config.configure_logging(self.config) self.execution_timer_factory = ExecutionTimerFactory(self.config) self.configure_fluent_log() # A lot of postfork initialization depends on the server name, ensure it is set immediately after forking before other postfork functions self.application_stack = application_stack_instance(app=self) self.application_stack.register_postfork_function( self.application_stack.set_postfork_server_name, self) self.config.reload_sanitize_whitelist( explicit='sanitize_whitelist_file' in kwargs) self.amqp_internal_connection_obj = galaxy.queues.connection_from_config( self.config) # queue_worker *can* be initialized with a queue, but here we don't # want to and we'll allow postfork to bind and start it. self.queue_worker = GalaxyQueueWorker(self) self._configure_tool_shed_registry() self._configure_object_store(fsmon=True) # Setup the database engine and ORM config_file = kwargs.get('global_conf', {}).get('__file__', None) if config_file: log.debug('Using "galaxy.ini" config file: %s', config_file) check_migrate_tools = self.config.check_migrate_tools self._configure_models( check_migrate_databases=self.config.check_migrate_databases, check_migrate_tools=check_migrate_tools, config_file=config_file) # Security helper self._configure_security() # Tag handler self.tag_handler = GalaxyTagHandler(self.model.context) self.dataset_collections_service = DatasetCollectionManager(self) self.history_manager = HistoryManager(self) self.hda_manager = HDAManager(self) self.workflow_manager = WorkflowsManager(self) self.dependency_resolvers_view = DependencyResolversView(self) self.test_data_resolver = test_data.TestDataResolver( file_dirs=self.config.tool_test_data_directories) self.library_folder_manager = FolderManager() self.library_manager = LibraryManager() self.dynamic_tool_manager = DynamicToolManager(self) # Tool Data Tables self._configure_tool_data_tables(from_shed_config=False) # Load dbkey / genome build manager self._configure_genome_builds(data_table_name="__dbkeys__", load_old_style=True) # Genomes self.genomes = Genomes(self) # Data providers registry. self.data_provider_registry = DataProviderRegistry() # Initialize job metrics manager, needs to be in place before # config so per-destination modifications can be made. self.job_metrics = job_metrics.JobMetrics( self.config.job_metrics_config_file, app=self) # Initialize error report plugins. self.error_reports = ErrorReports(self.config.error_report_file, app=self) # Initialize the job management configuration self.job_config = jobs.JobConfiguration(self) # Setup a Tool Cache self.tool_cache = ToolCache() self.tool_shed_repository_cache = ToolShedRepositoryCache(self) # Watch various config files for immediate reload self.watchers = ConfigWatchers(self) self._configure_tool_config_files() self.installed_repository_manager = InstalledRepositoryManager(self) self._configure_datatypes_registry(self.installed_repository_manager) galaxy.model.set_datatypes_registry(self.datatypes_registry) self._configure_toolbox() # Load Data Manager self.data_managers = DataManagers(self) # Load the update repository manager. self.update_repository_manager = UpdateRepositoryManager(self) # Load proprietary datatype converters and display applications. self.installed_repository_manager.load_proprietary_converters_and_display_applications( ) # Load datatype display applications defined in local datatypes_conf.xml self.datatypes_registry.load_display_applications(self) # Load datatype converters defined in local datatypes_conf.xml self.datatypes_registry.load_datatype_converters(self.toolbox) # Load external metadata tool self.datatypes_registry.load_external_metadata_tool(self.toolbox) # Load history import/export tools. load_lib_tools(self.toolbox) # visualizations registry: associates resources with visualizations, controls how to render self.visualizations_registry = VisualizationsRegistry( self, directories_setting=self.config.visualization_plugins_directory, template_cache_dir=self.config.template_cache_path) # Tours registry self.tour_registry = ToursRegistry(self.config.tour_config_dir) # Webhooks registry self.webhooks_registry = WebhooksRegistry(self.config.webhooks_dir) # Load security policy. self.security_agent = self.model.security_agent self.host_security_agent = galaxy.model.security.HostAgent( model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions) # Load quota management. if self.config.enable_quotas: self.quota_agent = galaxy.quota.QuotaAgent(self.model) else: self.quota_agent = galaxy.quota.NoQuotaAgent(self.model) # Heartbeat for thread profiling self.heartbeat = None from galaxy import auth self.auth_manager = auth.AuthManager(self) self.user_manager = UserManager(self) # Start the heartbeat process if configured and available (wait until # postfork if using uWSGI) if self.config.use_heartbeat: if heartbeat.Heartbeat: self.heartbeat = heartbeat.Heartbeat( self.config, period=self.config.heartbeat_interval, fname=self.config.heartbeat_log) self.heartbeat.daemon = True self.application_stack.register_postfork_function( self.heartbeat.start) self.authnz_manager = None if self.config.enable_oidc: from galaxy.authnz import managers self.authnz_manager = managers.AuthnzManager( self, self.config.oidc_config, self.config.oidc_backends_config) self.sentry_client = None if self.config.sentry_dsn: def postfork_sentry_client(): import raven self.sentry_client = raven.Client( self.config.sentry_dsn, transport=raven.transport.HTTPTransport) self.application_stack.register_postfork_function( postfork_sentry_client) # Transfer manager client if self.config.get_bool('enable_beta_job_managers', False): from galaxy.jobs import transfer_manager self.transfer_manager = transfer_manager.TransferManager(self) # Start the job manager from galaxy.jobs import manager self.job_manager = manager.JobManager(self) self.application_stack.register_postfork_function( self.job_manager.start) self.proxy_manager = ProxyManager(self.config) from galaxy.workflow import scheduling_manager # Must be initialized after job_config. self.workflow_scheduling_manager = scheduling_manager.WorkflowSchedulingManager( self) # Must be initialized after any component that might make use of stack messaging is configured. Alternatively if # it becomes more commonly needed we could create a prefork function registration method like we do with # postfork functions. self.application_stack.init_late_prefork() self.containers = {} if self.config.enable_beta_containers_interface: self.containers = build_container_interfaces( self.config.containers_config_file, containers_conf=self.config.containers_conf) self.interactivetool_manager = InteractiveToolManager(self) # Configure handling of signals handlers = {} if self.heartbeat: handlers[signal.SIGUSR1] = self.heartbeat.dump_signal_handler self._configure_signal_handlers(handlers) self.database_heartbeat = DatabaseHeartbeat( application_stack=self.application_stack) self.database_heartbeat.add_change_callback(self.watchers.change_state) self.application_stack.register_postfork_function( self.database_heartbeat.start) # Start web stack message handling self.application_stack.register_postfork_function( self.application_stack.start) self.application_stack.register_postfork_function( self.queue_worker.bind_and_start) # Delay toolbox index until after startup self.application_stack.register_postfork_function( lambda: send_local_control_task(self, 'rebuild_toolbox_search_index')) self.model.engine.dispose() # Inject url_for for components to more easily optionally depend # on url_for. self.url_for = url_for self.server_starttime = int(time.time()) # used for cachebusting log.info("Galaxy app startup finished %s" % self.startup_timer)
def __init__(self, app): super(PageController, self).__init__(app) self.page_manager = PageManager(app) self.history_manager = HistoryManager(app) self.history_serializer = HistorySerializer(self.app) self.hda_manager = HDAManager(app)
def set_up_managers(self): super(DatasetCollectionManagerTestCase, self).set_up_managers() self.dataset_manager = DatasetManager(self.app) self.hda_manager = HDAManager(self.app) self.history_manager = HistoryManager(self.app) self.collection_manager = DatasetCollectionManager(self.app)
def set_up_managers(self): super(HDAManagerTestCase, self).set_up_managers() self.history_mgr = HistoryManager(self.app) self.dataset_mgr = DatasetManager(self.app) self.hda_mgr = HDAManager(self.app)
def ready_galaxy_markdown_for_export(trans, internal_galaxy_markdown): """Fill in details needed to render Galaxy flavored markdown. Take it from a minimal internal version to an externally render-able version with more details populated and actual IDs replaced with encoded IDs to render external links. Return expanded markdown and extra data useful for rendering custom container tags. """ hdas_manager = HDAManager(trans.app) workflows_manager = WorkflowsManager(trans.app) extra_rendering_data = {} def _remap(container, line): id_match = re.search(ID_PATTERN, line) object_id = None encoded_id = None if id_match: object_id = int(id_match.group(2)) encoded_id = trans.security.encode_id(object_id) line = line.replace(id_match.group(), "%s=%s" % (id_match.group(1), encoded_id)) def ensure_rendering_data_for(object_type, encoded_id): if object_type not in extra_rendering_data: extra_rendering_data[object_type] = {} object_type_data = extra_rendering_data[object_type] if encoded_id not in object_type_data: object_type_data[encoded_id] = {} return object_type_data[encoded_id] def extend_history_dataset_rendering_data(key, val, default_val): ensure_rendering_data_for("history_datasets", encoded_id)[key] = val or default_val if container == "history_dataset_display": assert object_id is not None hda = hdas_manager.get_accessible(object_id, trans.user) if "history_datasets" not in extra_rendering_data: extra_rendering_data["history_datasets"] = {} extend_history_dataset_rendering_data("name", hda.name, "") elif container == "history_dataset_peek": assert object_id is not None hda = hdas_manager.get_accessible(object_id, trans.user) peek = hda.peek extend_history_dataset_rendering_data( "peek", peek, "*No Dataset Peek Available*") elif container == "history_dataset_info": hda = hdas_manager.get_accessible(object_id, trans.user) info = hda.info extend_history_dataset_rendering_data( "info", info, "*No Dataset Peek Available*") elif container == "workflow_display": # TODO: should be workflow id... stored_workflow = workflows_manager.get_stored_accessible_workflow( trans, encoded_id) ensure_rendering_data_for( "workflows", encoded_id)["name"] = stored_workflow.name elif container == "history_dataset_collection_display": collection_manager = DatasetCollectionManager(trans.app) hdca = collection_manager.get_dataset_collection_instance( trans, "history", encoded_id) hdca_serializer = HDCASerializer(trans.app) hdca_view = hdca_serializer.serialize_to_view(hdca, user=trans.user, trans=trans, view="summary") if "history_dataset_collections" not in extra_rendering_data: extra_rendering_data["history_dataset_collections"] = {} ensure_rendering_data_for("history_dataset_collections", encoded_id).update(hdca_view) elif container == "tool_stdout": job_manager = JobManager(trans.app) job = job_manager.get_accessible_job(trans, object_id) ensure_rendering_data_for( "jobs", encoded_id )["tool_stdout"] = job.tool_stdout or "*No Standard Output Available*" elif container == "tool_stderr": job_manager = JobManager(trans.app) job = job_manager.get_accessible_job(trans, object_id) ensure_rendering_data_for( "jobs", encoded_id )["tool_stderr"] = job.tool_stderr or "*No Standard Error Available*" return (line, False) export_markdown = _remap_galaxy_markdown_calls(_remap, internal_galaxy_markdown) return export_markdown, extra_rendering_data