def __init__(self, app): super().__init__(app) self.page_manager = PageManager(app) self.history_manager = HistoryManager(app) self.history_serializer = HistorySerializer(self.app) self.hda_manager = HDAManager(app) self.workflow_manager = WorkflowsManager(app)
def __init__(self): self.config = bunch.Bunch(tool_secret="awesome_secret", ) self.model = mapping.init("/tmp", "sqlite:///:memory:", create_tables=True) self.toolbox = TestToolbox() self.datatypes_registry = TestDatatypesRegistry() self.security = IdEncodingHelper(id_secret="testing") self.workflow_manager = WorkflowsManager(self)
def from_dict( Class, trans, d, **kwds ): module = super( SubWorkflowModule, Class ).from_dict( trans, d, **kwds ) if "subworkflow" in d: module.subworkflow = d[ "subworkflow" ] elif "content_id" in d: from galaxy.managers.workflows import WorkflowsManager module.subworkflow = WorkflowsManager( trans.app ).get_owned_workflow( trans, d[ "content_id" ] ) else: raise Exception( "Step associated subworkflow could not be found." ) return module
def __init__(self, **kwargs): if not log.handlers: # Paste didn't handle it, so we need a temporary basic log # configured. The handler added here gets dumped and replaced with # an appropriately configured logger in configure_logging below. logging.basicConfig(level=logging.DEBUG) log.debug("python path is: %s", ", ".join(sys.path)) self.name = 'galaxy' # is_webapp will be set to true when building WSGI app self.is_webapp = False self.startup_timer = ExecutionTimer() self.new_installation = False # Read config file and check for errors self.config = config.Configuration(**kwargs) self.config.check() config.configure_logging(self.config) self.execution_timer_factory = ExecutionTimerFactory(self.config) self.configure_fluent_log() # A lot of postfork initialization depends on the server name, ensure it is set immediately after forking before other postfork functions self.application_stack = application_stack_instance(app=self) self.application_stack.register_postfork_function( self.application_stack.set_postfork_server_name, self) self.config.reload_sanitize_whitelist( explicit='sanitize_whitelist_file' in kwargs) self.amqp_internal_connection_obj = galaxy.queues.connection_from_config( self.config) # queue_worker *can* be initialized with a queue, but here we don't # want to and we'll allow postfork to bind and start it. self.queue_worker = GalaxyQueueWorker(self) self._configure_tool_shed_registry() self._configure_object_store(fsmon=True) # Setup the database engine and ORM config_file = kwargs.get('global_conf', {}).get('__file__', None) if config_file: log.debug('Using "galaxy.ini" config file: %s', config_file) check_migrate_tools = self.config.check_migrate_tools self._configure_models( check_migrate_databases=self.config.check_migrate_databases, check_migrate_tools=check_migrate_tools, config_file=config_file) # Security helper self._configure_security() # Tag handler self.tag_handler = GalaxyTagHandler(self.model.context) self.dataset_collections_service = DatasetCollectionManager(self) self.history_manager = HistoryManager(self) self.hda_manager = HDAManager(self) self.workflow_manager = WorkflowsManager(self) self.dependency_resolvers_view = DependencyResolversView(self) self.test_data_resolver = test_data.TestDataResolver( file_dirs=self.config.tool_test_data_directories) self.library_folder_manager = FolderManager() self.library_manager = LibraryManager() self.dynamic_tool_manager = DynamicToolManager(self) # Tool Data Tables self._configure_tool_data_tables(from_shed_config=False) # Load dbkey / genome build manager self._configure_genome_builds(data_table_name="__dbkeys__", load_old_style=True) # Genomes self.genomes = Genomes(self) # Data providers registry. self.data_provider_registry = DataProviderRegistry() # Initialize job metrics manager, needs to be in place before # config so per-destination modifications can be made. self.job_metrics = job_metrics.JobMetrics( self.config.job_metrics_config_file, app=self) # Initialize error report plugins. self.error_reports = ErrorReports(self.config.error_report_file, app=self) # Initialize the job management configuration self.job_config = jobs.JobConfiguration(self) # Setup a Tool Cache self.tool_cache = ToolCache() self.tool_shed_repository_cache = ToolShedRepositoryCache(self) # Watch various config files for immediate reload self.watchers = ConfigWatchers(self) self._configure_tool_config_files() self.installed_repository_manager = InstalledRepositoryManager(self) self._configure_datatypes_registry(self.installed_repository_manager) galaxy.model.set_datatypes_registry(self.datatypes_registry) self._configure_toolbox() # Load Data Manager self.data_managers = DataManagers(self) # Load the update repository manager. self.update_repository_manager = UpdateRepositoryManager(self) # Load proprietary datatype converters and display applications. self.installed_repository_manager.load_proprietary_converters_and_display_applications( ) # Load datatype display applications defined in local datatypes_conf.xml self.datatypes_registry.load_display_applications(self) # Load datatype converters defined in local datatypes_conf.xml self.datatypes_registry.load_datatype_converters(self.toolbox) # Load external metadata tool self.datatypes_registry.load_external_metadata_tool(self.toolbox) # Load history import/export tools. load_lib_tools(self.toolbox) # visualizations registry: associates resources with visualizations, controls how to render self.visualizations_registry = VisualizationsRegistry( self, directories_setting=self.config.visualization_plugins_directory, template_cache_dir=self.config.template_cache_path) # Tours registry self.tour_registry = ToursRegistry(self.config.tour_config_dir) # Webhooks registry self.webhooks_registry = WebhooksRegistry(self.config.webhooks_dir) # Load security policy. self.security_agent = self.model.security_agent self.host_security_agent = galaxy.model.security.HostAgent( model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions) # Load quota management. if self.config.enable_quotas: self.quota_agent = galaxy.quota.QuotaAgent(self.model) else: self.quota_agent = galaxy.quota.NoQuotaAgent(self.model) # Heartbeat for thread profiling self.heartbeat = None from galaxy import auth self.auth_manager = auth.AuthManager(self) self.user_manager = UserManager(self) # Start the heartbeat process if configured and available (wait until # postfork if using uWSGI) if self.config.use_heartbeat: if heartbeat.Heartbeat: self.heartbeat = heartbeat.Heartbeat( self.config, period=self.config.heartbeat_interval, fname=self.config.heartbeat_log) self.heartbeat.daemon = True self.application_stack.register_postfork_function( self.heartbeat.start) self.authnz_manager = None if self.config.enable_oidc: from galaxy.authnz import managers self.authnz_manager = managers.AuthnzManager( self, self.config.oidc_config, self.config.oidc_backends_config) self.sentry_client = None if self.config.sentry_dsn: def postfork_sentry_client(): import raven self.sentry_client = raven.Client( self.config.sentry_dsn, transport=raven.transport.HTTPTransport) self.application_stack.register_postfork_function( postfork_sentry_client) # Transfer manager client if self.config.get_bool('enable_beta_job_managers', False): from galaxy.jobs import transfer_manager self.transfer_manager = transfer_manager.TransferManager(self) # Start the job manager from galaxy.jobs import manager self.job_manager = manager.JobManager(self) self.application_stack.register_postfork_function( self.job_manager.start) self.proxy_manager = ProxyManager(self.config) from galaxy.workflow import scheduling_manager # Must be initialized after job_config. self.workflow_scheduling_manager = scheduling_manager.WorkflowSchedulingManager( self) # Must be initialized after any component that might make use of stack messaging is configured. Alternatively if # it becomes more commonly needed we could create a prefork function registration method like we do with # postfork functions. self.application_stack.init_late_prefork() self.containers = {} if self.config.enable_beta_containers_interface: self.containers = build_container_interfaces( self.config.containers_config_file, containers_conf=self.config.containers_conf) self.interactivetool_manager = InteractiveToolManager(self) # Configure handling of signals handlers = {} if self.heartbeat: handlers[signal.SIGUSR1] = self.heartbeat.dump_signal_handler self._configure_signal_handlers(handlers) self.database_heartbeat = DatabaseHeartbeat( application_stack=self.application_stack) self.database_heartbeat.add_change_callback(self.watchers.change_state) self.application_stack.register_postfork_function( self.database_heartbeat.start) # Start web stack message handling self.application_stack.register_postfork_function( self.application_stack.start) self.application_stack.register_postfork_function( self.queue_worker.bind_and_start) # Delay toolbox index until after startup self.application_stack.register_postfork_function( lambda: send_local_control_task(self, 'rebuild_toolbox_search_index')) self.model.engine.dispose() # Inject url_for for components to more easily optionally depend # on url_for. self.url_for = url_for self.server_starttime = int(time.time()) # used for cachebusting log.info("Galaxy app startup finished %s" % self.startup_timer)
def __init__(self, app, *args, **kwargs): """ """ super().__init__(app, *args, **kwargs) self.workflow_manager = WorkflowsManager(app)
class PageManager(sharable.SharableModelManager, UsesAnnotations): """ """ model_class = model.Page foreign_key_name = 'page' user_share_model = model.PageUserShareAssociation tag_assoc = model.PageTagAssociation annotation_assoc = model.PageAnnotationAssociation rating_assoc = model.PageRatingAssociation def __init__(self, app, *args, **kwargs): """ """ super().__init__(app, *args, **kwargs) self.workflow_manager = WorkflowsManager(app) def copy(self, trans, page, user, **kwargs): """ """ def create(self, trans, payload): user = trans.get_user() if not payload.get("title"): raise exceptions.ObjectAttributeMissingException( "Page name is required") elif not payload.get("slug"): raise exceptions.ObjectAttributeMissingException( "Page id is required") elif not base.is_valid_slug(payload["slug"]): raise exceptions.ObjectAttributeInvalidException( "Page identifier must consist of only lowercase letters, numbers, and the '-' character" ) elif trans.sa_session.query(trans.app.model.Page).filter_by( user=user, slug=payload["slug"], deleted=False).first(): raise exceptions.DuplicatedSlugException( "Page identifier must be unique") if payload.get("invocation_id"): invocation_id = payload.get("invocation_id") invocation_report = self.workflow_manager.get_invocation_report( trans, invocation_id) content = invocation_report.get("markdown") content_format = "markdown" else: content = payload.get("content", "") content_format = payload.get("content_format", "html") content = self.rewrite_content_for_import(trans, content, content_format) # Create the new stored page page = trans.app.model.Page() page.title = payload['title'] page.slug = payload['slug'] page_annotation = payload.get("annotation", None) if page_annotation is not None: page_annotation = sanitize_html(page_annotation) self.add_item_annotation(trans.sa_session, trans.get_user(), page, page_annotation) page.user = user # And the first (empty) page revision page_revision = trans.app.model.PageRevision() page_revision.title = payload['title'] page_revision.page = page page.latest_revision = page_revision page_revision.content = content page_revision.content_format = content_format # Persist session = trans.sa_session session.add(page) session.flush() return page def save_new_revision(self, trans, page, payload): # Assumes security has already been checked by caller. content = payload.get("content", None) content_format = payload.get("content_format", None) if not content: raise exceptions.ObjectAttributeMissingException( "content undefined or empty") if content_format not in [None, "html", "markdown"]: raise exceptions.RequestParameterInvalidException( "content_format [%s], if specified, must be either html or markdown" % content_format) if 'title' in payload: title = payload['title'] else: title = page.title if content_format is None: content_format = page.latest_revision.content_format content = self.rewrite_content_for_import( trans, content, content_format=content_format) page_revision = trans.app.model.PageRevision() page_revision.title = title page_revision.page = page page.latest_revision = page_revision page_revision.content = content page_revision.content_format = content_format # Persist session = trans.sa_session session.flush() return page_revision def rewrite_content_for_import(self, trans, content, content_format): if content_format == "html": try: content = sanitize_html(content) processor = PageContentProcessor(trans, placeholderRenderForSave) processor.feed(content) # Output is string, so convert to unicode for saving. content = unicodify(processor.output(), 'utf-8') except exceptions.MessageException: raise except Exception: raise exceptions.RequestParameterInvalidException( "problem with embedded HTML content [%s]" % content) elif content_format == "markdown": content = ready_galaxy_markdown_for_import(trans, content) else: raise exceptions.RequestParameterInvalidException( "content_format [%s] must be either html or markdown" % content_format) return content def rewrite_content_for_export(self, trans, as_dict): content = as_dict["content"] content_format = as_dict.get("content_format", "html") if content_format == "html": processor = PageContentProcessor(trans, placeholderRenderForEdit) processor.feed(content) content = unicodify(processor.output(), 'utf-8') as_dict["content"] = content elif content_format == "markdown": content, extra_attributes = ready_galaxy_markdown_for_export( trans, content) as_dict["content"] = content as_dict.update(extra_attributes) else: raise exceptions.RequestParameterInvalidException( "content_format [%s] must be either html or markdown" % content_format) return as_dict
class PageController(BaseUIController, SharableMixin, UsesStoredWorkflowMixin, UsesVisualizationMixin, UsesItemRatings): _page_list = PageListGrid() _all_published_list = PageAllPublishedGrid() _history_selection_grid = HistorySelectionGrid() _workflow_selection_grid = WorkflowSelectionGrid() _datasets_selection_grid = HistoryDatasetAssociationSelectionGrid() _page_selection_grid = PageSelectionGrid() _visualization_selection_grid = VisualizationSelectionGrid() def __init__(self, app): super().__init__(app) self.page_manager = PageManager(app) self.history_manager = HistoryManager(app) self.history_serializer = HistorySerializer(self.app) self.hda_manager = HDAManager(app) self.workflow_manager = WorkflowsManager(app) @web.expose @web.json @web.require_login() def list(self, trans, *args, **kwargs): """ List user's pages. """ # Handle operation if 'operation' in kwargs and 'id' in kwargs: session = trans.sa_session operation = kwargs['operation'].lower() ids = util.listify(kwargs['id']) for id in ids: item = session.query(model.Page).get(self.decode_id(id)) if operation == "delete": item.deleted = True session.flush() # Build grid dictionary. grid = self._page_list(trans, *args, **kwargs) grid['shared_by_others'] = self._get_shared(trans) return grid @web.expose @web.json def list_published(self, trans, *args, **kwargs): grid = self._all_published_list(trans, *args, **kwargs) grid['shared_by_others'] = self._get_shared(trans) return grid def _get_shared(self, trans): """Identify shared pages""" shared_by_others = trans.sa_session \ .query(model.PageUserShareAssociation) \ .filter_by(user=trans.get_user()) \ .join(model.Page.table) \ .filter(model.Page.deleted == false()) \ .order_by(desc(model.Page.update_time)) \ .all() return [{ 'username': p.page.user.username, 'slug': p.page.slug, 'title': p.page.title } for p in shared_by_others] @web.legacy_expose_api @web.require_login("create pages") def create(self, trans, payload=None, **kwd): """ Create a new page. """ if trans.request.method == 'GET': form_title = "Create new Page" title = "" slug = "" content = "" content_format_hide = False content_hide = True if "invocation_id" in kwd: invocation_id = kwd.get("invocation_id") form_title = form_title + " from Invocation Report" slug = "invocation-report-" + invocation_id invocation_report = self.workflow_manager.get_invocation_report( trans, invocation_id) title = invocation_report.get("title") content = invocation_report.get("markdown") content_format_hide = True content_hide = False return { 'title': form_title, 'inputs': [{ 'name': 'title', 'label': 'Name', 'value': title, }, { 'name': 'slug', 'label': 'Identifier', 'help': 'A unique identifier that will be used for public links to this page. This field can only contain lowercase letters, numbers, and dashes (-).', 'value': slug, }, { 'name': 'annotation', 'label': 'Annotation', 'help': 'A description of the page. The annotation is shown alongside published pages.' }, { 'name': 'content_format', 'label': 'Content Format', 'type': 'select', 'hidden': content_format_hide, 'options': [('Markdown', 'markdown'), ('HTML', 'html')], 'help': 'Use the traditional rich HTML editor or the newer experimental Markdown editor to create the page content. The HTML editor has several known bugs, is unmaintained and pages created with it will be read-only in future releases of Galaxy.' }, { 'name': 'content', 'label': 'Content', 'area': True, 'value': content, 'hidden': content_hide, }] } else: try: page = self.page_manager.create(trans, payload) except exceptions.MessageException as e: return self.message_exception(trans, unicodify(e)) return { 'message': 'Page \'%s\' successfully created.' % page.title, 'status': 'success' } @web.legacy_expose_api @web.require_login("edit pages") def edit(self, trans, payload=None, **kwd): """ Edit a page's attributes. """ id = kwd.get('id') if not id: return self.message_exception(trans, 'No page id received for editing.') decoded_id = self.decode_id(id) user = trans.get_user() p = trans.sa_session.query(model.Page).get(decoded_id) if trans.request.method == 'GET': if p.slug is None: self.create_item_slug(trans.sa_session, p) return { 'title': 'Edit page attributes', 'inputs': [{ 'name': 'title', 'label': 'Name', 'value': p.title }, { 'name': 'slug', 'label': 'Identifier', 'value': p.slug, 'help': 'A unique identifier that will be used for public links to this page. This field can only contain lowercase letters, numbers, and dashes (-).' }, { 'name': 'annotation', 'label': 'Annotation', 'value': self.get_item_annotation_str(trans.sa_session, user, p), 'help': 'A description of the page. The annotation is shown alongside published pages.' }] } else: p_title = payload.get('title') p_slug = payload.get('slug') p_annotation = payload.get('annotation') if not p_title: return self.message_exception( trans, 'Please provide a page name is required.') elif not p_slug: return self.message_exception( trans, 'Please provide a unique identifier.') elif not self._is_valid_slug(p_slug): return self.message_exception( trans, 'Page identifier can only contain lowercase letters, numbers, and dashes (-).' ) elif p_slug != p.slug and trans.sa_session.query( model.Page).filter_by( user=p.user, slug=p_slug, deleted=False).first(): return self.message_exception(trans, 'Page id must be unique.') else: p.title = p_title p.slug = p_slug if p_annotation: p_annotation = sanitize_html(p_annotation) self.add_item_annotation(trans.sa_session, user, p, p_annotation) trans.sa_session.add(p) trans.sa_session.flush() return { 'message': 'Attributes of \'%s\' successfully saved.' % p.title, 'status': 'success' } @web.expose @web.require_login("edit pages") def edit_content(self, trans, id): """ Render the main page editor interface. """ return trans.fill_template("page/editor.mako", id=id) @web.expose @web.require_login("use Galaxy pages") def share(self, trans, id, email="", use_panels=False): """ Handle sharing with an individual user. """ msg = mtype = None page = trans.sa_session.query(model.Page).get(self.decode_id(id)) if email: other = trans.sa_session.query(model.User) \ .filter(and_(model.User.table.c.email == email, model.User.table.c.deleted == false())) \ .first() if not other: mtype = "error" msg = ("User '%s' does not exist" % escape(email)) elif other == trans.get_user(): mtype = "error" msg = ("You cannot share a page with yourself") elif trans.sa_session.query(model.PageUserShareAssociation) \ .filter_by(user=other, page=page).count() > 0: mtype = "error" msg = ("Page already shared with '%s'" % escape(email)) else: share = model.PageUserShareAssociation() share.page = page share.user = other session = trans.sa_session session.add(share) self.create_item_slug(session, page) session.flush() page_title = escape(page.title) other_email = escape(other.email) trans.set_message("Page '{}' shared with user '{}'".format( page_title, other_email)) return trans.response.send_redirect( url_for("/pages/sharing?id=%s" % id)) return trans.fill_template("/ind_share_base.mako", message=msg, messagetype=mtype, item=page, email=email, use_panels=use_panels) @web.expose @web.require_login() def display(self, trans, id): id = self.decode_id(id) page = trans.sa_session.query(model.Page).get(id) if not page: raise web.httpexceptions.HTTPNotFound() return self.display_by_username_and_slug(trans, page.user.username, page.slug) @web.expose def display_by_username_and_slug(self, trans, username, slug): """ Display page based on a username and slug. """ # Get page. session = trans.sa_session user = session.query(model.User).filter_by(username=username).first() page = trans.sa_session.query(model.Page).filter_by( user=user, slug=slug, deleted=False).first() if page is None: raise web.httpexceptions.HTTPNotFound() # Security check raises error if user cannot access page. self.security_check(trans, page, False, True) latest_revision = page.latest_revision if latest_revision.content_format == "html": # Process page content. processor = PageContentProcessor(trans, self._get_embed_html) processor.feed(page.latest_revision.content) # Output is string, so convert to unicode for display. page_content = unicodify(processor.output(), 'utf-8') template = "page/display.mako" else: page_content = trans.security.encode_id(page.id) template = "page/display_markdown.mako" # Get rating data. user_item_rating = 0 if trans.get_user(): user_item_rating = self.get_user_item_rating( trans.sa_session, trans.get_user(), page) if user_item_rating: user_item_rating = user_item_rating.rating else: user_item_rating = 0 ave_item_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, page) return trans.fill_template_mako(template, item=page, item_data=page_content, user_item_rating=user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings, content_only=True) @web.expose @web.require_login("use Galaxy pages") def set_accessible_async(self, trans, id=None, accessible=False): """ Set page's importable attribute and slug. """ page = self.get_page(trans, id) # Only set if importable value would change; this prevents a change in the update_time unless attribute really changed. importable = accessible in ['True', 'true', 't', 'T'] if page.importable != importable: if importable: self._make_item_accessible(trans.sa_session, page) else: page.importable = importable trans.sa_session.flush() return @web.expose @web.require_login("rate items") @web.json def rate_async(self, trans, id, rating): """ Rate a page asynchronously and return updated community data. """ page = self.get_page(trans, id, check_ownership=False, check_accessible=True) if not page: return trans.show_error_message( "The specified page does not exist.") # Rate page. self.rate_item(trans.sa_session, trans.get_user(), page, rating) return self.get_ave_item_rating_data(trans.sa_session, page) @web.expose def get_embed_html_async(self, trans, id): """ Returns HTML for embedding a workflow in a page. """ # TODO: user should be able to embed any item he has access to. see display_by_username_and_slug for security code. page = self.get_page(trans, id) if page: return "Embedded Page '%s'" % page.title @web.expose @web.json @web.require_login("use Galaxy pages") def get_name_and_link_async(self, trans, id=None): """ Returns page's name and link. """ page = self.get_page(trans, id) if self.create_item_slug(trans.sa_session, page): trans.sa_session.flush() return_dict = { "name": page.title, "link": url_for(controller='page', action="display_by_username_and_slug", username=page.user.username, slug=page.slug) } return return_dict @web.expose @web.json @web.require_login("select a history from saved histories") def list_histories_for_selection(self, trans, **kwargs): """ Returns HTML that enables a user to select one or more histories. """ return self._history_selection_grid(trans, **kwargs) @web.expose @web.json @web.require_login("select a workflow from saved workflows") def list_workflows_for_selection(self, trans, **kwargs): """ Returns HTML that enables a user to select one or more workflows. """ return self._workflow_selection_grid(trans, **kwargs) @web.expose @web.json @web.require_login("select a visualization from saved visualizations") def list_visualizations_for_selection(self, trans, **kwargs): """ Returns HTML that enables a user to select one or more visualizations. """ return self._visualization_selection_grid(trans, **kwargs) @web.expose @web.json @web.require_login("select a page from saved pages") def list_pages_for_selection(self, trans, **kwargs): """ Returns HTML that enables a user to select one or more pages. """ return self._page_selection_grid(trans, **kwargs) @web.expose @web.json @web.require_login("select a dataset from saved datasets") def list_datasets_for_selection(self, trans, **kwargs): """ Returns HTML that enables a user to select one or more datasets. """ return self._datasets_selection_grid(trans, **kwargs) @web.expose def get_editor_iframe(self, trans): """ Returns the document for the page editor's iframe. """ return trans.fill_template("page/wymiframe.mako") def get_page(self, trans, id, check_ownership=True, check_accessible=False): """Get a page from the database by id.""" # Load history from database id = self.decode_id(id) page = trans.sa_session.query(model.Page).get(id) if not page: error("Page not found") else: return self.security_check(trans, page, check_ownership, check_accessible) def get_item(self, trans, id): return self.get_page(trans, id) def _get_embedded_history_html(self, trans, decoded_id): """ Returns html suitable for embedding in another page. """ # histories embedded in pages are set to importable when embedded, check for access here history = self.history_manager.get_accessible( decoded_id, trans.user, current_history=trans.history) # create ownership flag for template, dictify models # note: adding original annotation since this is published - get_dict returns user-based annos user_is_owner = trans.user == history.user history.annotation = self.get_item_annotation_str( trans.sa_session, history.user, history) # include all datasets: hidden, deleted, and purged history_dictionary = self.history_serializer.serialize_to_view( history, view='detailed', user=trans.user, trans=trans) contents = self.history_serializer.serialize_contents(history, 'contents', trans=trans, user=trans.user) history_dictionary['annotation'] = history.annotation filled = trans.fill_template("history/embed.mako", item=history, user_is_owner=user_is_owner, history_dict=history_dictionary, content_dicts=contents) return filled def _get_embedded_visualization_html(self, trans, encoded_id): """ Returns html suitable for embedding visualizations in another page. """ visualization = self.get_visualization(trans, encoded_id, False, True) visualization.annotation = self.get_item_annotation_str( trans.sa_session, visualization.user, visualization) if not visualization: return None # Fork to template based on visualization.type (registry or builtin). if ((trans.app.visualizations_registry and visualization.type in trans.app.visualizations_registry.plugins) and (visualization.type not in trans.app.visualizations_registry.BUILT_IN_VISUALIZATIONS)): # if a registry visualization, load a version into an iframe :( # TODO: simplest path from A to B but not optimal - will be difficult to do reg visualizations any other way # TODO: this will load the visualization twice (once above, once when the iframe src calls 'saved') encoded_visualization_id = trans.security.encode_id( visualization.id) return trans.fill_template( 'visualization/embed_in_frame.mako', item=visualization, encoded_visualization_id=encoded_visualization_id, content_only=True) return trans.fill_template("visualization/embed.mako", item=visualization, item_data=None) def _get_embed_html(self, trans, item_class, item_id): """ Returns HTML for embedding an item in a page. """ item_class = self.get_class(item_class) encoded_id, decoded_id = get_page_identifiers(item_id, trans.app) if item_class == model.History: return self._get_embedded_history_html(trans, decoded_id) elif item_class == model.HistoryDatasetAssociation: dataset = self.hda_manager.get_accessible(decoded_id, trans.user) dataset = self.hda_manager.error_if_uploading(dataset) dataset.annotation = self.get_item_annotation_str( trans.sa_session, dataset.history.user, dataset) if dataset: data = self.hda_manager.text_data(dataset) return trans.fill_template("dataset/embed.mako", item=dataset, item_data=data) elif item_class == model.StoredWorkflow: workflow = self.get_stored_workflow(trans, encoded_id, False, True) workflow.annotation = self.get_item_annotation_str( trans.sa_session, workflow.user, workflow) if workflow: self.get_stored_workflow_steps(trans, workflow) return trans.fill_template( "workflow/embed.mako", item=workflow, item_data=workflow.latest_workflow.steps) elif item_class == model.Visualization: return self._get_embedded_visualization_html(trans, encoded_id) elif item_class == model.Page: pass
def ready_galaxy_markdown_for_export(trans, internal_galaxy_markdown): """Fill in details needed to render Galaxy flavored markdown. Take it from a minimal internal version to an externally render-able version with more details populated and actual IDs replaced with encoded IDs to render external links. Return expanded markdown and extra data useful for rendering custom container tags. """ hdas_manager = HDAManager(trans.app) workflows_manager = WorkflowsManager(trans.app) extra_rendering_data = {} def _remap(container, line): id_match = re.search(ID_PATTERN, line) object_id = None encoded_id = None if id_match: object_id = int(id_match.group(2)) encoded_id = trans.security.encode_id(object_id) line = line.replace(id_match.group(), "%s=%s" % (id_match.group(1), encoded_id)) def ensure_rendering_data_for(object_type, encoded_id): if object_type not in extra_rendering_data: extra_rendering_data[object_type] = {} object_type_data = extra_rendering_data[object_type] if encoded_id not in object_type_data: object_type_data[encoded_id] = {} return object_type_data[encoded_id] def extend_history_dataset_rendering_data(key, val, default_val): ensure_rendering_data_for("history_datasets", encoded_id)[key] = val or default_val if container == "history_dataset_display": assert object_id is not None hda = hdas_manager.get_accessible(object_id, trans.user) if "history_datasets" not in extra_rendering_data: extra_rendering_data["history_datasets"] = {} extend_history_dataset_rendering_data("name", hda.name, "") elif container == "history_dataset_peek": assert object_id is not None hda = hdas_manager.get_accessible(object_id, trans.user) peek = hda.peek extend_history_dataset_rendering_data( "peek", peek, "*No Dataset Peek Available*") elif container == "history_dataset_info": hda = hdas_manager.get_accessible(object_id, trans.user) info = hda.info extend_history_dataset_rendering_data( "info", info, "*No Dataset Peek Available*") elif container == "workflow_display": # TODO: should be workflow id... stored_workflow = workflows_manager.get_stored_accessible_workflow( trans, encoded_id) ensure_rendering_data_for( "workflows", encoded_id)["name"] = stored_workflow.name elif container == "history_dataset_collection_display": collection_manager = DatasetCollectionManager(trans.app) hdca = collection_manager.get_dataset_collection_instance( trans, "history", encoded_id) hdca_serializer = HDCASerializer(trans.app) hdca_view = hdca_serializer.serialize_to_view(hdca, user=trans.user, trans=trans, view="summary") if "history_dataset_collections" not in extra_rendering_data: extra_rendering_data["history_dataset_collections"] = {} ensure_rendering_data_for("history_dataset_collections", encoded_id).update(hdca_view) elif container == "tool_stdout": job_manager = JobManager(trans.app) job = job_manager.get_accessible_job(trans, object_id) ensure_rendering_data_for( "jobs", encoded_id )["tool_stdout"] = job.tool_stdout or "*No Standard Output Available*" elif container == "tool_stderr": job_manager = JobManager(trans.app) job = job_manager.get_accessible_job(trans, object_id) ensure_rendering_data_for( "jobs", encoded_id )["tool_stderr"] = job.tool_stderr or "*No Standard Error Available*" return (line, False) export_markdown = _remap_galaxy_markdown_calls(_remap, internal_galaxy_markdown) return export_markdown, extra_rendering_data