def placeholderRenderForSave(trans, item_class, item_id, encode=False): encoded_item_id, decoded_item_id = get_page_identifiers(item_id, trans.app) item_name = '' if item_class == 'History': history = trans.sa_session.query( trans.model.History).get(decoded_item_id) history = base.security_check(trans, history, False, True) item_name = history.name elif item_class == 'HistoryDatasetAssociation': hda = trans.sa_session.query( trans.model.HistoryDatasetAssociation).get(decoded_item_id) hda_manager = HDAManager(trans.app) hda = hda_manager.get_accessible(decoded_item_id, trans.user) item_name = hda.name elif item_class == 'StoredWorkflow': wf = trans.sa_session.query( trans.model.StoredWorkflow).get(decoded_item_id) wf = base.security_check(trans, wf, False, True) item_name = wf.name elif item_class == 'Visualization': visualization = trans.sa_session.query( trans.model.Visualization).get(decoded_item_id) visualization = base.security_check(trans, visualization, False, True) item_name = visualization.title class_shorthand = PAGE_CLASS_MAPPING[item_class] if encode: item_id = encoded_item_id else: item_id = decoded_item_id return PLACEHOLDER_TEMPLATE.format( item_class=item_class, class_shorthand=class_shorthand, class_shorthand_lower=class_shorthand.lower(), item_id=item_id, item_name=item_name)
def __init__(self, app): super().__init__(app) self.page_manager = PageManager(app) self.history_manager = HistoryManager(app) self.history_serializer = HistorySerializer(self.app) self.hda_manager = HDAManager(app) self.workflow_manager = WorkflowsManager(app)
def __init__(self, app): self.app = app self.sa_session = app.model.context self.hda_manager = HDAManager(app) self.dataset_collection_manager = DatasetCollectionManager(app) self.ldda_manager = LDDAManager(app) self.decode_id = self.app.security.decode_id
def set_metadata(hda_manager: HDAManager, ldda_manager: LDDAManager, dataset_id, model_class='HistoryDatasetAssociation'): if model_class == 'HistoryDatasetAssociation': dataset = hda_manager.by_id(dataset_id) elif model_class == 'LibraryDatasetDatasetAssociation': dataset = ldda_manager.by_id(dataset_id) dataset.datatype.set_meta(dataset)
class PageController(BaseUIController, SharableMixin, UsesStoredWorkflowMixin, UsesVisualizationMixin, UsesItemRatings): _page_list = PageListGrid() _all_published_list = PageAllPublishedGrid() _history_selection_grid = HistorySelectionGrid() _workflow_selection_grid = WorkflowSelectionGrid() _datasets_selection_grid = HistoryDatasetAssociationSelectionGrid() _page_selection_grid = PageSelectionGrid() _visualization_selection_grid = VisualizationSelectionGrid() def __init__(self, app): super(PageController, self).__init__(app) self.page_manager = PageManager(app) self.history_manager = HistoryManager(app) self.history_serializer = HistorySerializer(self.app) self.hda_manager = HDAManager(app) @web.expose @web.json @web.require_login() def list(self, trans, *args, **kwargs): """ List user's pages. """ # Handle operation if 'operation' in kwargs and 'id' in kwargs: session = trans.sa_session operation = kwargs['operation'].lower() ids = util.listify(kwargs['id']) for id in ids: item = session.query(model.Page).get(self.decode_id(id)) if operation == "delete": item.deleted = True session.flush() # Build grid dictionary. grid = self._page_list(trans, *args, **kwargs) grid['shared_by_others'] = self._get_shared(trans) return grid @web.expose @web.json def list_published(self, trans, *args, **kwargs): grid = self._all_published_list(trans, *args, **kwargs) grid['shared_by_others'] = self._get_shared(trans) return grid def _get_shared(self, trans): """Identify shared pages""" shared_by_others = trans.sa_session \ .query(model.PageUserShareAssociation) \ .filter_by(user=trans.get_user()) \ .join(model.Page.table) \ .filter(model.Page.deleted == false()) \ .order_by(desc(model.Page.update_time)) \ .all() return [{'username' : p.page.user.username, 'slug' : p.page.slug, 'title' : p.page.title} for p in shared_by_others] @web.legacy_expose_api @web.require_login("create pages") def create(self, trans, payload=None, **kwd): """ Create a new page. """ if trans.request.method == 'GET': return { 'title' : 'Create a new page', 'inputs' : [{ 'name' : 'title', 'label' : 'Name' }, { 'name' : 'slug', 'label' : 'Identifier', 'help' : 'A unique identifier that will be used for public links to this page. This field can only contain lowercase letters, numbers, and dashes (-).' }, { 'name' : 'annotation', 'label' : 'Annotation', 'help' : 'A description of the page. The annotation is shown alongside published pages.' }, { 'name' : 'content_format', 'label' : 'Content Format', 'type' : 'select', 'options' : [('HTML', 'html'), ('Markdown', 'markdown')], 'help' : 'Use the traditional rich HTML editor or the newer experimental Markdown editor to create the page content. The HTML editor has several known bugs, is unmaintained and pages created with it will be read-only in future releases of Galaxy.' }] } else: try: page = self.page_manager.create(trans, payload) except exceptions.MessageException as e: return self.message_exception(trans, unicodify(e)) return {'message': 'Page \'%s\' successfully created.' % page.title, 'status': 'success'} @web.legacy_expose_api @web.require_login("edit pages") def edit(self, trans, payload=None, **kwd): """ Edit a page's attributes. """ id = kwd.get('id') if not id: return self.message_exception(trans, 'No page id received for editing.') decoded_id = self.decode_id(id) user = trans.get_user() p = trans.sa_session.query(model.Page).get(decoded_id) if trans.request.method == 'GET': if p.slug is None: self.create_item_slug(trans.sa_session, p) return { 'title' : 'Edit page attributes', 'inputs' : [{ 'name' : 'title', 'label' : 'Name', 'value' : p.title }, { 'name' : 'slug', 'label' : 'Identifier', 'value' : p.slug, 'help' : 'A unique identifier that will be used for public links to this page. This field can only contain lowercase letters, numbers, and dashes (-).' }, { 'name' : 'annotation', 'label' : 'Annotation', 'value' : self.get_item_annotation_str(trans.sa_session, user, p), 'help' : 'A description of the page. The annotation is shown alongside published pages.' }] } else: p_title = payload.get('title') p_slug = payload.get('slug') p_annotation = payload.get('annotation') if not p_title: return self.message_exception(trans, 'Please provide a page name is required.') elif not p_slug: return self.message_exception(trans, 'Please provide a unique identifier.') elif not self._is_valid_slug(p_slug): return self.message_exception(trans, 'Page identifier can only contain lowercase letters, numbers, and dashes (-).') elif p_slug != p.slug and trans.sa_session.query(model.Page).filter_by(user=p.user, slug=p_slug, deleted=False).first(): return self.message_exception(trans, 'Page id must be unique.') else: p.title = p_title p.slug = p_slug if p_annotation: p_annotation = sanitize_html(p_annotation) self.add_item_annotation(trans.sa_session, user, p, p_annotation) trans.sa_session.add(p) trans.sa_session.flush() return {'message': 'Attributes of \'%s\' successfully saved.' % p.title, 'status': 'success'} @web.expose @web.require_login("edit pages") def edit_content(self, trans, id): """ Render the main page editor interface. """ id = self.decode_id(id) page = trans.sa_session.query(model.Page).get(id) assert page.user == trans.user return trans.fill_template("page/editor.mako", page=page) @web.expose @web.require_login("use Galaxy pages") def share(self, trans, id, email="", use_panels=False): """ Handle sharing with an individual user. """ msg = mtype = None page = trans.sa_session.query(model.Page).get(self.decode_id(id)) if email: other = trans.sa_session.query(model.User) \ .filter(and_(model.User.table.c.email == email, model.User.table.c.deleted == false())) \ .first() if not other: mtype = "error" msg = ("User '%s' does not exist" % escape(email)) elif other == trans.get_user(): mtype = "error" msg = ("You cannot share a page with yourself") elif trans.sa_session.query(model.PageUserShareAssociation) \ .filter_by(user=other, page=page).count() > 0: mtype = "error" msg = ("Page already shared with '%s'" % escape(email)) else: share = model.PageUserShareAssociation() share.page = page share.user = other session = trans.sa_session session.add(share) self.create_item_slug(session, page) session.flush() page_title = escape(page.title) other_email = escape(other.email) trans.set_message("Page '%s' shared with user '%s'" % (page_title, other_email)) return trans.response.send_redirect(url_for("/pages/sharing?id=%s" % id)) return trans.fill_template("/ind_share_base.mako", message=msg, messagetype=mtype, item=page, email=email, use_panels=use_panels) @web.expose @web.require_login() def display(self, trans, id): id = self.decode_id(id) page = trans.sa_session.query(model.Page).get(id) if not page: raise web.httpexceptions.HTTPNotFound() return self.display_by_username_and_slug(trans, page.user.username, page.slug) @web.expose def display_by_username_and_slug(self, trans, username, slug): """ Display page based on a username and slug. """ # Get page. session = trans.sa_session user = session.query(model.User).filter_by(username=username).first() page = trans.sa_session.query(model.Page).filter_by(user=user, slug=slug, deleted=False).first() if page is None: raise web.httpexceptions.HTTPNotFound() # Security check raises error if user cannot access page. self.security_check(trans, page, False, True) latest_revision = page.latest_revision if latest_revision.content_format == "html": # Process page content. processor = PageContentProcessor(trans, self._get_embed_html) processor.feed(page.latest_revision.content) # Output is string, so convert to unicode for display. page_content = unicodify(processor.output(), 'utf-8') template = "page/display.mako" else: page_content = trans.security.encode_id(page.id) template = "page/display_markdown.mako" # Get rating data. user_item_rating = 0 if trans.get_user(): user_item_rating = self.get_user_item_rating(trans.sa_session, trans.get_user(), page) if user_item_rating: user_item_rating = user_item_rating.rating else: user_item_rating = 0 ave_item_rating, num_ratings = self.get_ave_item_rating_data(trans.sa_session, page) return trans.fill_template_mako(template, item=page, item_data=page_content, user_item_rating=user_item_rating, ave_item_rating=ave_item_rating, num_ratings=num_ratings, content_only=True) @web.expose @web.require_login("use Galaxy pages") def set_accessible_async(self, trans, id=None, accessible=False): """ Set page's importable attribute and slug. """ page = self.get_page(trans, id) # Only set if importable value would change; this prevents a change in the update_time unless attribute really changed. importable = accessible in ['True', 'true', 't', 'T'] if page.importable != importable: if importable: self._make_item_accessible(trans.sa_session, page) else: page.importable = importable trans.sa_session.flush() return @web.expose @web.require_login("rate items") @web.json def rate_async(self, trans, id, rating): """ Rate a page asynchronously and return updated community data. """ page = self.get_page(trans, id, check_ownership=False, check_accessible=True) if not page: return trans.show_error_message("The specified page does not exist.") # Rate page. self.rate_item(trans.sa_session, trans.get_user(), page, rating) return self.get_ave_item_rating_data(trans.sa_session, page) @web.expose def get_embed_html_async(self, trans, id): """ Returns HTML for embedding a workflow in a page. """ # TODO: user should be able to embed any item he has access to. see display_by_username_and_slug for security code. page = self.get_page(trans, id) if page: return "Embedded Page '%s'" % page.title @web.expose @web.json @web.require_login("use Galaxy pages") def get_name_and_link_async(self, trans, id=None): """ Returns page's name and link. """ page = self.get_page(trans, id) if self.create_item_slug(trans.sa_session, page): trans.sa_session.flush() return_dict = {"name": page.title, "link": url_for(controller='page', action="display_by_username_and_slug", username=page.user.username, slug=page.slug)} return return_dict @web.expose @web.json @web.require_login("select a history from saved histories") def list_histories_for_selection(self, trans, **kwargs): """ Returns HTML that enables a user to select one or more histories. """ return self._history_selection_grid(trans, **kwargs) @web.expose @web.json @web.require_login("select a workflow from saved workflows") def list_workflows_for_selection(self, trans, **kwargs): """ Returns HTML that enables a user to select one or more workflows. """ return self._workflow_selection_grid(trans, **kwargs) @web.expose @web.json @web.require_login("select a visualization from saved visualizations") def list_visualizations_for_selection(self, trans, **kwargs): """ Returns HTML that enables a user to select one or more visualizations. """ return self._visualization_selection_grid(trans, **kwargs) @web.expose @web.json @web.require_login("select a page from saved pages") def list_pages_for_selection(self, trans, **kwargs): """ Returns HTML that enables a user to select one or more pages. """ return self._page_selection_grid(trans, **kwargs) @web.expose @web.json @web.require_login("select a dataset from saved datasets") def list_datasets_for_selection(self, trans, **kwargs): """ Returns HTML that enables a user to select one or more datasets. """ return self._datasets_selection_grid(trans, **kwargs) @web.expose def get_editor_iframe(self, trans): """ Returns the document for the page editor's iframe. """ return trans.fill_template("page/wymiframe.mako") def get_page(self, trans, id, check_ownership=True, check_accessible=False): """Get a page from the database by id.""" # Load history from database id = self.decode_id(id) page = trans.sa_session.query(model.Page).get(id) if not page: error("Page not found") else: return self.security_check(trans, page, check_ownership, check_accessible) def get_item(self, trans, id): return self.get_page(trans, id) def _get_embedded_history_html(self, trans, decoded_id): """ Returns html suitable for embedding in another page. """ # histories embedded in pages are set to importable when embedded, check for access here history = self.history_manager.get_accessible(decoded_id, trans.user, current_history=trans.history) # create ownership flag for template, dictify models # note: adding original annotation since this is published - get_dict returns user-based annos user_is_owner = trans.user == history.user history.annotation = self.get_item_annotation_str(trans.sa_session, history.user, history) # include all datasets: hidden, deleted, and purged history_dictionary = self.history_serializer.serialize_to_view( history, view='detailed', user=trans.user, trans=trans ) contents = self.history_serializer.serialize_contents(history, 'contents', trans=trans, user=trans.user) history_dictionary['annotation'] = history.annotation filled = trans.fill_template("history/embed.mako", item=history, user_is_owner=user_is_owner, history_dict=history_dictionary, content_dicts=contents) return filled def _get_embedded_visualization_html(self, trans, encoded_id): """ Returns html suitable for embedding visualizations in another page. """ visualization = self.get_visualization(trans, encoded_id, False, True) visualization.annotation = self.get_item_annotation_str(trans.sa_session, visualization.user, visualization) if not visualization: return None # Fork to template based on visualization.type (registry or builtin). if((trans.app.visualizations_registry and visualization.type in trans.app.visualizations_registry.plugins) and (visualization.type not in trans.app.visualizations_registry.BUILT_IN_VISUALIZATIONS)): # if a registry visualization, load a version into an iframe :( # TODO: simplest path from A to B but not optimal - will be difficult to do reg visualizations any other way # TODO: this will load the visualization twice (once above, once when the iframe src calls 'saved') encoded_visualization_id = trans.security.encode_id(visualization.id) return trans.fill_template('visualization/embed_in_frame.mako', item=visualization, encoded_visualization_id=encoded_visualization_id, content_only=True) return trans.fill_template("visualization/embed.mako", item=visualization, item_data=None) def _get_embed_html(self, trans, item_class, item_id): """ Returns HTML for embedding an item in a page. """ item_class = self.get_class(item_class) encoded_id, decoded_id = get_page_identifiers(item_id, trans.app) if item_class == model.History: return self._get_embedded_history_html(trans, decoded_id) elif item_class == model.HistoryDatasetAssociation: dataset = self.hda_manager.get_accessible(decoded_id, trans.user) dataset = self.hda_manager.error_if_uploading(dataset) dataset.annotation = self.get_item_annotation_str(trans.sa_session, dataset.history.user, dataset) if dataset: data = self.hda_manager.text_data(dataset) return trans.fill_template("dataset/embed.mako", item=dataset, item_data=data) elif item_class == model.StoredWorkflow: workflow = self.get_stored_workflow(trans, encoded_id, False, True) workflow.annotation = self.get_item_annotation_str(trans.sa_session, workflow.user, workflow) if workflow: self.get_stored_workflow_steps(trans, workflow) return trans.fill_template("workflow/embed.mako", item=workflow, item_data=workflow.latest_workflow.steps) elif item_class == model.Visualization: return self._get_embedded_visualization_html(trans, encoded_id) elif item_class == model.Page: pass
def set_up_managers(self): super(DatasetCollectionManagerTestCase, self).set_up_managers() self.dataset_manager = DatasetManager(self.app) self.hda_manager = HDAManager(self.app) self.history_manager = HistoryManager(self.app) self.collection_manager = DatasetCollectionManager(self.app)
class DatasetCollectionManagerTestCase(BaseTestCase, CreatesCollectionsMixin): def set_up_managers(self): super(DatasetCollectionManagerTestCase, self).set_up_managers() self.dataset_manager = DatasetManager(self.app) self.hda_manager = HDAManager(self.app) self.history_manager = HistoryManager(self.app) self.collection_manager = DatasetCollectionManager(self.app) def test_create_simple_list(self): owner = self.user_manager.create(**user2_data) history = self.history_manager.create(name='history1', user=owner) hda1 = self.hda_manager.create(name='one', history=history, dataset=self.dataset_manager.create()) hda2 = self.hda_manager.create(name='two', history=history, dataset=self.dataset_manager.create()) hda3 = self.hda_manager.create(name='three', history=history, dataset=self.dataset_manager.create()) self.log("should be able to create a new Collection via ids") element_identifiers = self.build_element_identifiers( [hda1, hda2, hda3]) hdca = self.collection_manager.create( self.trans, history, 'test collection', 'list', element_identifiers=element_identifiers) self.assertIsInstance(hdca, model.HistoryDatasetCollectionAssociation) self.assertEqual(hdca.name, 'test collection') self.assertEqual(hdca.hid, 4) self.assertFalse(hdca.deleted) self.assertTrue(hdca.visible) # print 'hdca dir:' # for k in dir( hdca ): # print k, getattr( hdca, k, '(?)' ) self.log("should contain an underlying, well-formed DatasetCollection") self.assertIsInstance(hdca.collection, model.DatasetCollection) collection = hdca.collection self.assertEqual(collection.collection_type, 'list') self.assertEqual(collection.state, 'ok') self.assertEqual(len(collection.dataset_instances), 3) self.assertEqual(len(collection.elements), 3) # print 'hdca.collection dir:' # for k in dir( hdca.collection ): # print k, getattr( hdca.collection, k, '(?)' ) # elements = collection.elements # print 'hdca.collection element dir:' # for k in dir( elements[0] ): # print k, getattr( elements[0], k, '(?)' ) self.log("and that collection should have three well-formed Elements") self.assertIsInstance(collection.elements[0], model.DatasetCollectionElement) self.assertEqual(collection.elements[0].element_identifier, 'one') self.assertEqual(collection.elements[0].element_index, 0) self.assertEqual(collection.elements[0].element_type, 'hda') self.assertEqual(collection.elements[0].element_object, hda1) self.assertIsInstance(collection.elements[1], model.DatasetCollectionElement) self.assertEqual(collection.elements[1].element_identifier, 'two') self.assertEqual(collection.elements[1].element_index, 1) self.assertEqual(collection.elements[1].element_type, 'hda') self.assertEqual(collection.elements[1].element_object, hda2) self.assertIsInstance(collection.elements[2], model.DatasetCollectionElement) self.assertEqual(collection.elements[2].element_identifier, 'three') self.assertEqual(collection.elements[2].element_index, 2) self.assertEqual(collection.elements[2].element_type, 'hda') self.assertEqual(collection.elements[2].element_object, hda3) self.log("should be able to create a new Collection via objects") elements = dict(one=hda1, two=hda2, three=hda3) hdca2 = self.collection_manager.create(self.trans, history, 'test collection 2', 'list', elements=elements) self.assertIsInstance(hdca2, model.HistoryDatasetCollectionAssociation) def test_update_from_dict(self): owner = self.user_manager.create(**user2_data) history = self.history_manager.create(name='history1', user=owner) hda1 = self.hda_manager.create(name='one', history=history, dataset=self.dataset_manager.create()) hda2 = self.hda_manager.create(name='two', history=history, dataset=self.dataset_manager.create()) hda3 = self.hda_manager.create(name='three', history=history, dataset=self.dataset_manager.create()) elements = dict(one=hda1, two=hda2, three=hda3) hdca = self.collection_manager.create(self.trans, history, 'test collection', 'list', elements=elements) self.log("should be set from a dictionary") self.collection_manager._set_from_dict( self.trans, hdca, { 'deleted': True, 'visible': False, 'name': 'New Name', # TODO: doesn't work # 'tags' : [ 'one', 'two', 'three' ] # 'annotations' : [?] }) self.assertEqual(hdca.name, 'New Name') self.assertTrue(hdca.deleted) self.assertFalse(hdca.visible)
def set_up_managers(self): super(HDAManagerTestCase, self).set_up_managers() self.history_mgr = HistoryManager(self.app) self.dataset_mgr = DatasetManager(self.app) self.hda_mgr = HDAManager(self.app)
class HDAManagerTestCase(BaseTestCase): def set_up_managers(self): super(HDAManagerTestCase, self).set_up_managers() self.history_mgr = HistoryManager(self.app) self.dataset_mgr = DatasetManager(self.app) self.hda_mgr = HDAManager(self.app) def test_base(self): hda_model = model.HistoryDatasetAssociation owner = self.user_mgr.create(self.trans, **user2_data) history1 = self.history_mgr.create(self.trans, name='history1', user=owner) hda1 = self.hda_mgr.create(self.trans, history=history1, hid=1) hda2 = self.hda_mgr.create(self.trans, history=history1, hid=2) hda3 = self.hda_mgr.create(self.trans, history=history1, hid=3) self.log("should be able to query") hdas = self.trans.sa_session.query(hda_model).all() self.assertEqual(self.hda_mgr.list(self.trans), hdas) self.assertEqual( self.hda_mgr.one(self.trans, filters=(hda_model.id == hda1.id)), hda1) self.assertEqual(self.hda_mgr.by_id(self.trans, hda1.id), hda1) self.assertEqual(self.hda_mgr.by_ids(self.trans, [hda2.id, hda1.id]), [hda2, hda1]) self.log("should be able to limit and offset") self.assertEqual(self.hda_mgr.list(self.trans, limit=1), hdas[0:1]) self.assertEqual(self.hda_mgr.list(self.trans, offset=1), hdas[1:]) self.assertEqual(self.hda_mgr.list(self.trans, limit=1, offset=1), hdas[1:2]) self.assertEqual(self.hda_mgr.list(self.trans, limit=0), []) self.assertEqual(self.hda_mgr.list(self.trans, offset=3), []) self.log("should be able to order") self.assertEqual( self.hda_mgr.list(self.trans, order_by=sqlalchemy.desc(hda_model.create_time)), [hda3, hda2, hda1]) def test_create(self): owner = self.user_mgr.create(self.trans, **user2_data) non_owner = self.user_mgr.create(self.trans, **user3_data) history1 = self.history_mgr.create(self.trans, name='history1', user=owner) dataset1 = self.dataset_mgr.create(self.trans) self.log( "should be able to create a new HDA with a specified history and dataset" ) hda1 = self.hda_mgr.create(self.trans, history=history1, dataset=dataset1) self.assertIsInstance(hda1, model.HistoryDatasetAssociation) self.assertEqual( hda1, self.trans.sa_session.query(model.HistoryDatasetAssociation).get( hda1.id)) self.assertEqual(hda1.history, history1) self.assertEqual(hda1.dataset, dataset1) self.assertEqual(hda1.hid, 1) self.log( "should be able to create a new HDA with only a specified history and no dataset" ) hda2 = self.hda_mgr.create(self.trans, history=history1) self.assertIsInstance(hda2, model.HistoryDatasetAssociation) self.assertIsInstance(hda2.dataset, model.Dataset) self.assertEqual(hda2.history, history1) self.assertEqual(hda2.hid, 2) self.log( "should be able to create a new HDA with no history and no dataset" ) hda3 = self.hda_mgr.create(self.trans, hid=None) self.assertIsInstance(hda3, model.HistoryDatasetAssociation) self.assertIsInstance(hda3.dataset, model.Dataset, msg="dataset will be auto created") self.assertIsNone(hda3.history, msg="history will be None") self.assertEqual( hda3.hid, None, msg="should allow setting hid to None (or any other value)") def test_copy_from_hda(self): owner = self.user_mgr.create(self.trans, **user2_data) history1 = self.history_mgr.create(self.trans, name='history1', user=owner) dataset1 = self.dataset_mgr.create(self.trans) hda1 = self.hda_mgr.create(self.trans, history=history1, dataset=dataset1) self.log("should be able to copy an HDA") hda2 = self.hda_mgr.copy(self.trans, hda1, history=history1) self.assertIsInstance(hda2, model.HistoryDatasetAssociation) self.assertEqual( hda2, self.trans.sa_session.query(model.HistoryDatasetAssociation).get( hda2.id)) self.assertEqual(hda2.name, hda1.name) self.assertEqual(hda2.history, hda1.history) self.assertEqual(hda2.dataset, hda1.dataset) self.assertNotEqual(hda2, hda1) #def test_copy_from_ldda( self ): # owner = self.user_mgr.create( self.trans, **user2_data ) # history1 = self.history_mgr.create( self.trans, name='history1', user=owner ) # # self.log( "should be able to copy an HDA" ) # hda2 = self.hda_mgr.copy_ldda( self.trans, history1, hda1 ) def test_delete(self): owner = self.user_mgr.create(self.trans, **user2_data) history1 = self.history_mgr.create(self.trans, name='history1', user=owner) dataset1 = self.dataset_mgr.create(self.trans) item1 = self.hda_mgr.create(self.trans, history=history1, dataset=dataset1) self.log("should be able to delete and undelete an hda") self.assertFalse(item1.deleted) self.assertEqual(self.hda_mgr.delete(self.trans, item1), item1) self.assertTrue(item1.deleted) self.assertEqual(self.hda_mgr.undelete(self.trans, item1), item1) self.assertFalse(item1.deleted) def test_purge_allowed(self): self.trans.app.config.allow_user_dataset_purge = True owner = self.user_mgr.create(self.trans, **user2_data) history1 = self.history_mgr.create(self.trans, name='history1', user=owner) dataset1 = self.dataset_mgr.create(self.trans) item1 = self.hda_mgr.create(self.trans, history=history1, dataset=dataset1) self.log("should purge an hda if config does allow") self.assertFalse(item1.purged) self.assertEqual(self.hda_mgr.purge(self.trans, item1), item1) self.assertTrue(item1.purged) def test_purge_not_allowed(self): self.trans.app.config.allow_user_dataset_purge = False owner = self.user_mgr.create(self.trans, **user2_data) history1 = self.history_mgr.create(self.trans, name='history1', user=owner) dataset1 = self.dataset_mgr.create(self.trans) item1 = self.hda_mgr.create(self.trans, history=history1, dataset=dataset1) self.log( "should raise an error when purging an hda if config does not allow" ) self.assertFalse(item1.purged) self.assertRaises(exceptions.ConfigDoesNotAllowException, self.hda_mgr.purge, self.trans, item1) self.assertFalse(item1.purged) def test_ownable(self): owner = self.user_mgr.create(self.trans, **user2_data) non_owner = self.user_mgr.create(self.trans, **user3_data) history1 = self.history_mgr.create(self.trans, name='history1', user=owner) dataset1 = self.dataset_mgr.create(self.trans) item1 = self.hda_mgr.create(self.trans, history1, dataset1) self.log("should be able to poll whether a given user owns an item") self.assertTrue(self.hda_mgr.is_owner(self.trans, item1, owner)) self.assertFalse(self.hda_mgr.is_owner(self.trans, item1, non_owner)) self.log( "should raise an error when checking ownership with non-owner") self.assertRaises(exceptions.ItemOwnershipException, self.hda_mgr.error_unless_owner, self.trans, item1, non_owner) self.log( "should raise an error when checking ownership with anonymous") self.assertRaises(exceptions.ItemOwnershipException, self.hda_mgr.error_unless_owner, self.trans, item1, None) self.log( "should not raise an error when checking ownership with owner") self.assertEqual( self.hda_mgr.error_unless_owner(self.trans, item1, owner), item1) self.log( "should not raise an error when checking ownership with admin") self.assertEqual( self.hda_mgr.error_unless_owner(self.trans, item1, self.admin_user), item1) def test_accessible(self): owner = self.user_mgr.create(self.trans, **user2_data) non_owner = self.user_mgr.create(self.trans, **user3_data) history1 = self.history_mgr.create(self.trans, name='history1', user=owner) dataset1 = self.dataset_mgr.create(self.trans) item1 = self.hda_mgr.create(self.trans, history1, dataset1) self.log( "(by default, dataset permissions are lax) should be accessible to all" ) for user in self.user_mgr.list(self.trans): self.assertTrue(self.hda_mgr.is_accessible(self.trans, item1, user)) #TODO: set perms on underlying dataset and then test accessible def test_anon(self): anon_user = None self.trans.set_user(anon_user) history1 = self.history_mgr.create(self.trans, name='anon_history', user=anon_user) self.trans.set_history(history1) dataset1 = self.dataset_mgr.create(self.trans) item1 = self.hda_mgr.create(self.trans, history1, dataset1) self.log( "should not raise an error when checking ownership/access on anonymous' own dataset" ) self.assertTrue( self.hda_mgr.is_accessible(self.trans, item1, anon_user)) self.assertEqual( self.hda_mgr.error_unless_owner(self.trans, item1, anon_user), item1) self.log( "should raise an error when checking ownership on anonymous' dataset with other user" ) non_owner = self.user_mgr.create(self.trans, **user3_data) self.assertRaises(exceptions.ItemOwnershipException, self.hda_mgr.error_unless_owner, self.trans, item1, non_owner)
def ready_galaxy_markdown_for_export(trans, internal_galaxy_markdown): """Fill in details needed to render Galaxy flavored markdown. Take it from a minimal internal version to an externally render-able version with more details populated and actual IDs replaced with encoded IDs to render external links. Return expanded markdown and extra data useful for rendering custom container tags. """ hdas_manager = HDAManager(trans.app) workflows_manager = WorkflowsManager(trans.app) extra_rendering_data = {} def _remap(container, line): id_match = re.search(ID_PATTERN, line) object_id = None encoded_id = None if id_match: object_id = int(id_match.group(2)) encoded_id = trans.security.encode_id(object_id) line = line.replace(id_match.group(), "%s=%s" % (id_match.group(1), encoded_id)) def ensure_rendering_data_for(object_type, encoded_id): if object_type not in extra_rendering_data: extra_rendering_data[object_type] = {} object_type_data = extra_rendering_data[object_type] if encoded_id not in object_type_data: object_type_data[encoded_id] = {} return object_type_data[encoded_id] def extend_history_dataset_rendering_data(key, val, default_val): ensure_rendering_data_for("history_datasets", encoded_id)[key] = val or default_val if container == "history_dataset_display": assert object_id is not None hda = hdas_manager.get_accessible(object_id, trans.user) if "history_datasets" not in extra_rendering_data: extra_rendering_data["history_datasets"] = {} extend_history_dataset_rendering_data("name", hda.name, "") elif container == "history_dataset_peek": assert object_id is not None hda = hdas_manager.get_accessible(object_id, trans.user) peek = hda.peek extend_history_dataset_rendering_data( "peek", peek, "*No Dataset Peek Available*") elif container == "history_dataset_info": hda = hdas_manager.get_accessible(object_id, trans.user) info = hda.info extend_history_dataset_rendering_data( "info", info, "*No Dataset Peek Available*") elif container == "workflow_display": # TODO: should be workflow id... stored_workflow = workflows_manager.get_stored_accessible_workflow( trans, encoded_id) ensure_rendering_data_for( "workflows", encoded_id)["name"] = stored_workflow.name elif container == "history_dataset_collection_display": collection_manager = DatasetCollectionManager(trans.app) hdca = collection_manager.get_dataset_collection_instance( trans, "history", encoded_id) hdca_serializer = HDCASerializer(trans.app) hdca_view = hdca_serializer.serialize_to_view(hdca, user=trans.user, trans=trans, view="summary") if "history_dataset_collections" not in extra_rendering_data: extra_rendering_data["history_dataset_collections"] = {} ensure_rendering_data_for("history_dataset_collections", encoded_id).update(hdca_view) elif container == "tool_stdout": job_manager = JobManager(trans.app) job = job_manager.get_accessible_job(trans, object_id) ensure_rendering_data_for( "jobs", encoded_id )["tool_stdout"] = job.tool_stdout or "*No Standard Output Available*" elif container == "tool_stderr": job_manager = JobManager(trans.app) job = job_manager.get_accessible_job(trans, object_id) ensure_rendering_data_for( "jobs", encoded_id )["tool_stderr"] = job.tool_stderr or "*No Standard Error Available*" return (line, False) export_markdown = _remap_galaxy_markdown_calls(_remap, internal_galaxy_markdown) return export_markdown, extra_rendering_data
class DatasetCollectionManagerTestCase( BaseTestCase ): def set_up_managers( self ): super( DatasetCollectionManagerTestCase, self ).set_up_managers() self.dataset_manager = DatasetManager( self.app ) self.hda_manager = HDAManager( self.app ) self.history_manager = HistoryManager( self.app ) self.collection_manager = DatasetCollectionManager( self.app ) def build_element_identifiers( self, elements ): identifier_list = [] for element in elements: src = 'hda' # if isinstance( element, model.DatasetCollection ): # src = 'collection'#? # elif isinstance( element, model.LibraryDatasetDatasetAssociation ): # src = 'ldda'#? encoded_id = self.trans.security.encode_id( element.id ) identifier_list.append( dict( src=src, name=element.name, id=encoded_id ) ) return identifier_list def test_create_simple_list( self ): owner = self.user_manager.create( **user2_data ) history = self.history_manager.create( name='history1', user=owner ) hda1 = self.hda_manager.create( name='one', history=history, dataset=self.dataset_manager.create() ) hda2 = self.hda_manager.create( name='two', history=history, dataset=self.dataset_manager.create() ) hda3 = self.hda_manager.create( name='three', history=history, dataset=self.dataset_manager.create() ) self.log( "should be able to create a new Collection via ids" ) element_identifiers = self.build_element_identifiers( [ hda1, hda2, hda3 ] ) hdca = self.collection_manager.create( self.trans, history, 'test collection', 'list', element_identifiers=element_identifiers ) self.assertIsInstance( hdca, model.HistoryDatasetCollectionAssociation ) self.assertEqual( hdca.name, 'test collection' ) self.assertEqual( hdca.hid, 4 ) self.assertFalse( hdca.deleted ) self.assertTrue( hdca.visible ) # print 'hdca dir:' # for k in dir( hdca ): # print k, getattr( hdca, k, '(?)' ) self.log( "should contain an underlying, well-formed DatasetCollection" ) self.assertIsInstance( hdca.collection, model.DatasetCollection ) collection = hdca.collection self.assertEqual( collection.collection_type, 'list' ) self.assertEqual( collection.state, 'ok' ) self.assertEqual( len( collection.dataset_instances ), 3 ) self.assertEqual( len( collection.elements ), 3 ) # print 'hdca.collection dir:' # for k in dir( hdca.collection ): # print k, getattr( hdca.collection, k, '(?)' ) # elements = collection.elements # print 'hdca.collection element dir:' # for k in dir( elements[0] ): # print k, getattr( elements[0], k, '(?)' ) self.log( "and that collection should have three well-formed Elements" ) self.assertIsInstance( collection.elements[0], model.DatasetCollectionElement ) self.assertEqual( collection.elements[0].element_identifier, 'one' ) self.assertEqual( collection.elements[0].element_index, 0 ) self.assertEqual( collection.elements[0].element_type, 'hda' ) self.assertEqual( collection.elements[0].element_object, hda1 ) self.assertIsInstance( collection.elements[1], model.DatasetCollectionElement ) self.assertEqual( collection.elements[1].element_identifier, 'two' ) self.assertEqual( collection.elements[1].element_index, 1 ) self.assertEqual( collection.elements[1].element_type, 'hda' ) self.assertEqual( collection.elements[1].element_object, hda2 ) self.assertIsInstance( collection.elements[2], model.DatasetCollectionElement ) self.assertEqual( collection.elements[2].element_identifier, 'three' ) self.assertEqual( collection.elements[2].element_index, 2 ) self.assertEqual( collection.elements[2].element_type, 'hda' ) self.assertEqual( collection.elements[2].element_object, hda3 ) self.log( "should be able to create a new Collection via objects" ) elements = dict( one=hda1, two=hda2, three=hda3 ) hdca2 = self.collection_manager.create( self.trans, history, 'test collection 2', 'list', elements=elements ) self.assertIsInstance( hdca2, model.HistoryDatasetCollectionAssociation ) def test_update_from_dict( self ): owner = self.user_manager.create( **user2_data ) history = self.history_manager.create( name='history1', user=owner ) hda1 = self.hda_manager.create( name='one', history=history, dataset=self.dataset_manager.create() ) hda2 = self.hda_manager.create( name='two', history=history, dataset=self.dataset_manager.create() ) hda3 = self.hda_manager.create( name='three', history=history, dataset=self.dataset_manager.create() ) elements = dict( one=hda1, two=hda2, three=hda3 ) hdca = self.collection_manager.create( self.trans, history, 'test collection', 'list', elements=elements ) self.log( "should be set from a dictionary" ) self.collection_manager._set_from_dict( self.trans, hdca, { 'deleted' : True, 'visible' : False, 'name' : 'New Name', # TODO: doesn't work # 'tags' : [ 'one', 'two', 'three' ] # 'annotations' : [?] }) self.assertEqual( hdca.name, 'New Name' ) self.assertTrue( hdca.deleted ) self.assertFalse( hdca.visible )
def set_up_managers( self ): super( HDAManagerTestCase, self ).set_up_managers() self.history_mgr = HistoryManager( self.app ) self.dataset_mgr = DatasetManager( self.app ) self.hda_mgr = HDAManager( self.app )
class HDAManagerTestCase( BaseTestCase ): def set_up_managers( self ): super( HDAManagerTestCase, self ).set_up_managers() self.history_mgr = HistoryManager( self.app ) self.dataset_mgr = DatasetManager( self.app ) self.hda_mgr = HDAManager( self.app ) def test_base( self ): hda_model = model.HistoryDatasetAssociation owner = self.user_mgr.create( self.trans, **user2_data ) history1 = self.history_mgr.create( self.trans, name='history1', user=owner ) hda1 = self.hda_mgr.create( self.trans, history=history1, hid=1 ) hda2 = self.hda_mgr.create( self.trans, history=history1, hid=2 ) hda3 = self.hda_mgr.create( self.trans, history=history1, hid=3 ) self.log( "should be able to query" ) hdas = self.trans.sa_session.query( hda_model ).all() self.assertEqual( self.hda_mgr.list( self.trans ), hdas ) self.assertEqual( self.hda_mgr.one( self.trans, filters=( hda_model.id == hda1.id ) ), hda1 ) self.assertEqual( self.hda_mgr.by_id( self.trans, hda1.id ), hda1 ) self.assertEqual( self.hda_mgr.by_ids( self.trans, [ hda2.id, hda1.id ] ), [ hda2, hda1 ] ) self.log( "should be able to limit and offset" ) self.assertEqual( self.hda_mgr.list( self.trans, limit=1 ), hdas[0:1] ) self.assertEqual( self.hda_mgr.list( self.trans, offset=1 ), hdas[1:] ) self.assertEqual( self.hda_mgr.list( self.trans, limit=1, offset=1 ), hdas[1:2] ) self.assertEqual( self.hda_mgr.list( self.trans, limit=0 ), [] ) self.assertEqual( self.hda_mgr.list( self.trans, offset=3 ), [] ) self.log( "should be able to order" ) self.assertEqual( self.hda_mgr.list( self.trans, order_by=sqlalchemy.desc( hda_model.create_time ) ), [ hda3, hda2, hda1 ] ) def test_create( self ): owner = self.user_mgr.create( self.trans, **user2_data ) non_owner = self.user_mgr.create( self.trans, **user3_data ) history1 = self.history_mgr.create( self.trans, name='history1', user=owner ) dataset1 = self.dataset_mgr.create( self.trans ) self.log( "should be able to create a new HDA with a specified history and dataset" ) hda1 = self.hda_mgr.create( self.trans, history=history1, dataset=dataset1 ) self.assertIsInstance( hda1, model.HistoryDatasetAssociation ) self.assertEqual( hda1, self.trans.sa_session.query( model.HistoryDatasetAssociation ).get( hda1.id ) ) self.assertEqual( hda1.history, history1 ) self.assertEqual( hda1.dataset, dataset1 ) self.assertEqual( hda1.hid, 1 ) self.log( "should be able to create a new HDA with only a specified history and no dataset" ) hda2 = self.hda_mgr.create( self.trans, history=history1 ) self.assertIsInstance( hda2, model.HistoryDatasetAssociation ) self.assertIsInstance( hda2.dataset, model.Dataset ) self.assertEqual( hda2.history, history1 ) self.assertEqual( hda2.hid, 2 ) self.log( "should be able to create a new HDA with no history and no dataset" ) hda3 = self.hda_mgr.create( self.trans, hid=None ) self.assertIsInstance( hda3, model.HistoryDatasetAssociation ) self.assertIsInstance( hda3.dataset, model.Dataset, msg="dataset will be auto created" ) self.assertIsNone( hda3.history, msg="history will be None" ) self.assertEqual( hda3.hid, None, msg="should allow setting hid to None (or any other value)" ) def test_copy_from_hda( self ): owner = self.user_mgr.create( self.trans, **user2_data ) history1 = self.history_mgr.create( self.trans, name='history1', user=owner ) dataset1 = self.dataset_mgr.create( self.trans ) hda1 = self.hda_mgr.create( self.trans, history=history1, dataset=dataset1 ) self.log( "should be able to copy an HDA" ) hda2 = self.hda_mgr.copy( self.trans, hda1, history=history1 ) self.assertIsInstance( hda2, model.HistoryDatasetAssociation ) self.assertEqual( hda2, self.trans.sa_session.query( model.HistoryDatasetAssociation ).get( hda2.id ) ) self.assertEqual( hda2.name, hda1.name ) self.assertEqual( hda2.history, hda1.history ) self.assertEqual( hda2.dataset, hda1.dataset ) self.assertNotEqual( hda2, hda1 ) #def test_copy_from_ldda( self ): # owner = self.user_mgr.create( self.trans, **user2_data ) # history1 = self.history_mgr.create( self.trans, name='history1', user=owner ) # # self.log( "should be able to copy an HDA" ) # hda2 = self.hda_mgr.copy_ldda( self.trans, history1, hda1 ) def test_delete( self ): owner = self.user_mgr.create( self.trans, **user2_data ) history1 = self.history_mgr.create( self.trans, name='history1', user=owner ) dataset1 = self.dataset_mgr.create( self.trans ) item1 = self.hda_mgr.create( self.trans, history=history1, dataset=dataset1 ) self.log( "should be able to delete and undelete an hda" ) self.assertFalse( item1.deleted ) self.assertEqual( self.hda_mgr.delete( self.trans, item1 ), item1 ) self.assertTrue( item1.deleted ) self.assertEqual( self.hda_mgr.undelete( self.trans, item1 ), item1 ) self.assertFalse( item1.deleted ) def test_purge_allowed( self ): self.trans.app.config.allow_user_dataset_purge = True owner = self.user_mgr.create( self.trans, **user2_data ) history1 = self.history_mgr.create( self.trans, name='history1', user=owner ) dataset1 = self.dataset_mgr.create( self.trans ) item1 = self.hda_mgr.create( self.trans, history=history1, dataset=dataset1 ) self.log( "should purge an hda if config does allow" ) self.assertFalse( item1.purged ) self.assertEqual( self.hda_mgr.purge( self.trans, item1 ), item1 ) self.assertTrue( item1.purged ) def test_purge_not_allowed( self ): self.trans.app.config.allow_user_dataset_purge = False owner = self.user_mgr.create( self.trans, **user2_data ) history1 = self.history_mgr.create( self.trans, name='history1', user=owner ) dataset1 = self.dataset_mgr.create( self.trans ) item1 = self.hda_mgr.create( self.trans, history=history1, dataset=dataset1 ) self.log( "should raise an error when purging an hda if config does not allow" ) self.assertFalse( item1.purged ) self.assertRaises( exceptions.ConfigDoesNotAllowException, self.hda_mgr.purge, self.trans, item1 ) self.assertFalse( item1.purged ) def test_ownable( self ): owner = self.user_mgr.create( self.trans, **user2_data ) non_owner = self.user_mgr.create( self.trans, **user3_data ) history1 = self.history_mgr.create( self.trans, name='history1', user=owner ) dataset1 = self.dataset_mgr.create( self.trans ) item1 = self.hda_mgr.create( self.trans, history1, dataset1 ) self.log( "should be able to poll whether a given user owns an item" ) self.assertTrue( self.hda_mgr.is_owner( self.trans, item1, owner ) ) self.assertFalse( self.hda_mgr.is_owner( self.trans, item1, non_owner ) ) self.log( "should raise an error when checking ownership with non-owner" ) self.assertRaises( exceptions.ItemOwnershipException, self.hda_mgr.error_unless_owner, self.trans, item1, non_owner ) self.log( "should raise an error when checking ownership with anonymous" ) self.assertRaises( exceptions.ItemOwnershipException, self.hda_mgr.error_unless_owner, self.trans, item1, None ) self.log( "should not raise an error when checking ownership with owner" ) self.assertEqual( self.hda_mgr.error_unless_owner( self.trans, item1, owner ), item1 ) self.log( "should not raise an error when checking ownership with admin" ) self.assertEqual( self.hda_mgr.error_unless_owner( self.trans, item1, self.admin_user ), item1 ) def test_accessible( self ): owner = self.user_mgr.create( self.trans, **user2_data ) non_owner = self.user_mgr.create( self.trans, **user3_data ) history1 = self.history_mgr.create( self.trans, name='history1', user=owner ) dataset1 = self.dataset_mgr.create( self.trans ) item1 = self.hda_mgr.create( self.trans, history1, dataset1 ) self.log( "(by default, dataset permissions are lax) should be accessible to all" ) for user in self.user_mgr.list( self.trans ): self.assertTrue( self.hda_mgr.is_accessible( self.trans, item1, user ) ) #TODO: set perms on underlying dataset and then test accessible def test_anon( self ): anon_user = None self.trans.set_user( anon_user ) history1 = self.history_mgr.create( self.trans, name='anon_history', user=anon_user ) self.trans.set_history( history1 ) dataset1 = self.dataset_mgr.create( self.trans ) item1 = self.hda_mgr.create( self.trans, history1, dataset1 ) self.log( "should not raise an error when checking ownership/access on anonymous' own dataset" ) self.assertTrue( self.hda_mgr.is_accessible( self.trans, item1, anon_user ) ) self.assertEqual( self.hda_mgr.error_unless_owner( self.trans, item1, anon_user ), item1 ) self.log( "should raise an error when checking ownership on anonymous' dataset with other user" ) non_owner = self.user_mgr.create( self.trans, **user3_data ) self.assertRaises( exceptions.ItemOwnershipException, self.hda_mgr.error_unless_owner, self.trans, item1, non_owner )
def purge_hda(hda_manager: HDAManager, hda_id): hda = hda_manager.by_id(hda_id) hda_manager._purge(hda)
def __init__(self, app): super(PageController, self).__init__(app) self.page_manager = PageManager(app) self.history_manager = HistoryManager(app) self.history_serializer = HistorySerializer(self.app) self.hda_manager = HDAManager(app)
def set_up_managers( self ): super( DatasetCollectionManagerTestCase, self ).set_up_managers() self.dataset_manager = DatasetManager( self.app ) self.hda_manager = HDAManager( self.app ) self.history_manager = HistoryManager( self.app ) self.collection_manager = DatasetCollectionManager( self.app )
def __init__(self, **kwargs): if not log.handlers: # Paste didn't handle it, so we need a temporary basic log # configured. The handler added here gets dumped and replaced with # an appropriately configured logger in configure_logging below. logging.basicConfig(level=logging.DEBUG) log.debug("python path is: %s", ", ".join(sys.path)) self.name = 'galaxy' # is_webapp will be set to true when building WSGI app self.is_webapp = False self.startup_timer = ExecutionTimer() self.new_installation = False # Read config file and check for errors self.config = config.Configuration(**kwargs) self.config.check() config.configure_logging(self.config) self.execution_timer_factory = ExecutionTimerFactory(self.config) self.configure_fluent_log() # A lot of postfork initialization depends on the server name, ensure it is set immediately after forking before other postfork functions self.application_stack = application_stack_instance(app=self) self.application_stack.register_postfork_function( self.application_stack.set_postfork_server_name, self) self.config.reload_sanitize_whitelist( explicit='sanitize_whitelist_file' in kwargs) self.amqp_internal_connection_obj = galaxy.queues.connection_from_config( self.config) # queue_worker *can* be initialized with a queue, but here we don't # want to and we'll allow postfork to bind and start it. self.queue_worker = GalaxyQueueWorker(self) self._configure_tool_shed_registry() self._configure_object_store(fsmon=True) # Setup the database engine and ORM config_file = kwargs.get('global_conf', {}).get('__file__', None) if config_file: log.debug('Using "galaxy.ini" config file: %s', config_file) check_migrate_tools = self.config.check_migrate_tools self._configure_models( check_migrate_databases=self.config.check_migrate_databases, check_migrate_tools=check_migrate_tools, config_file=config_file) # Security helper self._configure_security() # Tag handler self.tag_handler = GalaxyTagHandler(self.model.context) self.dataset_collections_service = DatasetCollectionManager(self) self.history_manager = HistoryManager(self) self.hda_manager = HDAManager(self) self.workflow_manager = WorkflowsManager(self) self.dependency_resolvers_view = DependencyResolversView(self) self.test_data_resolver = test_data.TestDataResolver( file_dirs=self.config.tool_test_data_directories) self.library_folder_manager = FolderManager() self.library_manager = LibraryManager() self.dynamic_tool_manager = DynamicToolManager(self) # Tool Data Tables self._configure_tool_data_tables(from_shed_config=False) # Load dbkey / genome build manager self._configure_genome_builds(data_table_name="__dbkeys__", load_old_style=True) # Genomes self.genomes = Genomes(self) # Data providers registry. self.data_provider_registry = DataProviderRegistry() # Initialize job metrics manager, needs to be in place before # config so per-destination modifications can be made. self.job_metrics = job_metrics.JobMetrics( self.config.job_metrics_config_file, app=self) # Initialize error report plugins. self.error_reports = ErrorReports(self.config.error_report_file, app=self) # Initialize the job management configuration self.job_config = jobs.JobConfiguration(self) # Setup a Tool Cache self.tool_cache = ToolCache() self.tool_shed_repository_cache = ToolShedRepositoryCache(self) # Watch various config files for immediate reload self.watchers = ConfigWatchers(self) self._configure_tool_config_files() self.installed_repository_manager = InstalledRepositoryManager(self) self._configure_datatypes_registry(self.installed_repository_manager) galaxy.model.set_datatypes_registry(self.datatypes_registry) self._configure_toolbox() # Load Data Manager self.data_managers = DataManagers(self) # Load the update repository manager. self.update_repository_manager = UpdateRepositoryManager(self) # Load proprietary datatype converters and display applications. self.installed_repository_manager.load_proprietary_converters_and_display_applications( ) # Load datatype display applications defined in local datatypes_conf.xml self.datatypes_registry.load_display_applications(self) # Load datatype converters defined in local datatypes_conf.xml self.datatypes_registry.load_datatype_converters(self.toolbox) # Load external metadata tool self.datatypes_registry.load_external_metadata_tool(self.toolbox) # Load history import/export tools. load_lib_tools(self.toolbox) # visualizations registry: associates resources with visualizations, controls how to render self.visualizations_registry = VisualizationsRegistry( self, directories_setting=self.config.visualization_plugins_directory, template_cache_dir=self.config.template_cache_path) # Tours registry self.tour_registry = ToursRegistry(self.config.tour_config_dir) # Webhooks registry self.webhooks_registry = WebhooksRegistry(self.config.webhooks_dir) # Load security policy. self.security_agent = self.model.security_agent self.host_security_agent = galaxy.model.security.HostAgent( model=self.security_agent.model, permitted_actions=self.security_agent.permitted_actions) # Load quota management. if self.config.enable_quotas: self.quota_agent = galaxy.quota.QuotaAgent(self.model) else: self.quota_agent = galaxy.quota.NoQuotaAgent(self.model) # Heartbeat for thread profiling self.heartbeat = None from galaxy import auth self.auth_manager = auth.AuthManager(self) self.user_manager = UserManager(self) # Start the heartbeat process if configured and available (wait until # postfork if using uWSGI) if self.config.use_heartbeat: if heartbeat.Heartbeat: self.heartbeat = heartbeat.Heartbeat( self.config, period=self.config.heartbeat_interval, fname=self.config.heartbeat_log) self.heartbeat.daemon = True self.application_stack.register_postfork_function( self.heartbeat.start) self.authnz_manager = None if self.config.enable_oidc: from galaxy.authnz import managers self.authnz_manager = managers.AuthnzManager( self, self.config.oidc_config, self.config.oidc_backends_config) self.sentry_client = None if self.config.sentry_dsn: def postfork_sentry_client(): import raven self.sentry_client = raven.Client( self.config.sentry_dsn, transport=raven.transport.HTTPTransport) self.application_stack.register_postfork_function( postfork_sentry_client) # Transfer manager client if self.config.get_bool('enable_beta_job_managers', False): from galaxy.jobs import transfer_manager self.transfer_manager = transfer_manager.TransferManager(self) # Start the job manager from galaxy.jobs import manager self.job_manager = manager.JobManager(self) self.application_stack.register_postfork_function( self.job_manager.start) self.proxy_manager = ProxyManager(self.config) from galaxy.workflow import scheduling_manager # Must be initialized after job_config. self.workflow_scheduling_manager = scheduling_manager.WorkflowSchedulingManager( self) # Must be initialized after any component that might make use of stack messaging is configured. Alternatively if # it becomes more commonly needed we could create a prefork function registration method like we do with # postfork functions. self.application_stack.init_late_prefork() self.containers = {} if self.config.enable_beta_containers_interface: self.containers = build_container_interfaces( self.config.containers_config_file, containers_conf=self.config.containers_conf) self.interactivetool_manager = InteractiveToolManager(self) # Configure handling of signals handlers = {} if self.heartbeat: handlers[signal.SIGUSR1] = self.heartbeat.dump_signal_handler self._configure_signal_handlers(handlers) self.database_heartbeat = DatabaseHeartbeat( application_stack=self.application_stack) self.database_heartbeat.add_change_callback(self.watchers.change_state) self.application_stack.register_postfork_function( self.database_heartbeat.start) # Start web stack message handling self.application_stack.register_postfork_function( self.application_stack.start) self.application_stack.register_postfork_function( self.queue_worker.bind_and_start) # Delay toolbox index until after startup self.application_stack.register_postfork_function( lambda: send_local_control_task(self, 'rebuild_toolbox_search_index')) self.model.engine.dispose() # Inject url_for for components to more easily optionally depend # on url_for. self.url_for = url_for self.server_starttime = int(time.time()) # used for cachebusting log.info("Galaxy app startup finished %s" % self.startup_timer)
class DatasetCollectionManagerTestCase(BaseTestCase, CreatesCollectionsMixin): def set_up_managers(self): super(DatasetCollectionManagerTestCase, self).set_up_managers() self.dataset_manager = DatasetManager(self.app) self.hda_manager = HDAManager(self.app) self.history_manager = HistoryManager(self.app) self.collection_manager = DatasetCollectionManager(self.app) def test_create_simple_list(self): owner = self.user_manager.create(**user2_data) history = self.history_manager.create(name='history1', user=owner) hda1 = self.hda_manager.create(name='one', history=history, dataset=self.dataset_manager.create()) hda2 = self.hda_manager.create(name='two', history=history, dataset=self.dataset_manager.create()) hda3 = self.hda_manager.create(name='three', history=history, dataset=self.dataset_manager.create()) self.log("should be able to create a new Collection via ids") element_identifiers = self.build_element_identifiers([hda1, hda2, hda3]) hdca = self.collection_manager.create(self.trans, history, 'test collection', 'list', element_identifiers=element_identifiers) self.assertIsInstance(hdca, model.HistoryDatasetCollectionAssociation) self.assertEqual(hdca.name, 'test collection') self.assertEqual(hdca.hid, 4) self.assertFalse(hdca.deleted) self.assertTrue(hdca.visible) self.log("should contain an underlying, well-formed DatasetCollection") self.assertIsInstance(hdca.collection, model.DatasetCollection) collection = hdca.collection self.assertEqual(collection.collection_type, 'list') self.assertEqual(collection.state, 'ok') self.assertEqual(len(collection.dataset_instances), 3) self.assertEqual(len(collection.elements), 3) self.log("and that collection should have three well-formed Elements") self.assertIsInstance(collection.elements[0], model.DatasetCollectionElement) self.assertEqual(collection.elements[0].element_identifier, 'one') self.assertEqual(collection.elements[0].element_index, 0) self.assertEqual(collection.elements[0].element_type, 'hda') self.assertEqual(collection.elements[0].element_object, hda1) self.assertIsInstance(collection.elements[1], model.DatasetCollectionElement) self.assertEqual(collection.elements[1].element_identifier, 'two') self.assertEqual(collection.elements[1].element_index, 1) self.assertEqual(collection.elements[1].element_type, 'hda') self.assertEqual(collection.elements[1].element_object, hda2) self.assertIsInstance(collection.elements[2], model.DatasetCollectionElement) self.assertEqual(collection.elements[2].element_identifier, 'three') self.assertEqual(collection.elements[2].element_index, 2) self.assertEqual(collection.elements[2].element_type, 'hda') self.assertEqual(collection.elements[2].element_object, hda3) self.log("should be able to create a new Collection via objects") elements = dict(one=hda1, two=hda2, three=hda3) hdca2 = self.collection_manager.create(self.trans, history, 'test collection 2', 'list', elements=elements) self.assertIsInstance(hdca2, model.HistoryDatasetCollectionAssociation) def test_update_from_dict(self): owner = self.user_manager.create(**user2_data) history = self.history_manager.create(name='history1', user=owner) hda1 = self.hda_manager.create(name='one', history=history, dataset=self.dataset_manager.create()) hda2 = self.hda_manager.create(name='two', history=history, dataset=self.dataset_manager.create()) hda3 = self.hda_manager.create(name='three', history=history, dataset=self.dataset_manager.create()) elements = dict(one=hda1, two=hda2, three=hda3) hdca = self.collection_manager.create(self.trans, history, 'test collection', 'list', elements=elements) self.log("should be set from a dictionary") self.collection_manager._set_from_dict(self.trans, hdca, { 'deleted': True, 'visible': False, 'name': 'New Name', # TODO: doesn't work # 'tags' : [ 'one', 'two', 'three' ] # 'annotations' : [?] }) self.assertEqual(hdca.name, 'New Name') self.assertTrue(hdca.deleted) self.assertFalse(hdca.visible)