def sharing(self, trans, id, **kwargs): """ Handle page sharing. """ # Get session and page. session = trans.sa_session page = trans.sa_session.query(model.Page).get(self.decode_id(id)) # Do operation on page. if 'make_accessible_via_link' in kwargs: self._make_item_accessible(trans.sa_session, page) elif 'make_accessible_and_publish' in kwargs: self._make_item_accessible(trans.sa_session, page) page.published = True elif 'publish' in kwargs: page.published = True elif 'disable_link_access' in kwargs: page.importable = False elif 'unpublish' in kwargs: page.published = False elif 'disable_link_access_and_unpublish' in kwargs: page.importable = page.published = False elif 'unshare_user' in kwargs: user = session.query(model.User).get( self.decode_id(kwargs['unshare_user'])) if not user: error("User not found for provided id") association = session.query( model.PageUserShareAssociation ) \ .filter_by( user=user, page=page ).one() session.delete(association) session.flush() return trans.fill_template("/sharing_base.mako", item=page, use_panels=True)
def editor(self, trans, id=None, version=None): """ Render the main workflow editor interface. The canvas is embedded as an iframe (necessary for scrolling to work properly), which is rendered by `editor_canvas`. """ if not id: error("Invalid workflow id") stored = self.get_stored_workflow(trans, id) # The following query loads all user-owned workflows, # So that they can be copied or inserted in the workflow editor. workflows = trans.sa_session.query(model.StoredWorkflow) \ .filter_by(user=trans.user, deleted=False) \ .order_by(desc(model.StoredWorkflow.table.c.update_time)) \ .options(joinedload('latest_workflow').joinedload('steps')) \ .all() if version is None: version = len(stored.workflows) - 1 else: version = int(version) return trans.fill_template("workflow/editor.mako", workflows=workflows, stored=stored, version=version, annotation=self.get_item_annotation_str( trans.sa_session, trans.user, stored))
def editor(self, trans, id=None, version=None): """ Render the main workflow editor interface. The canvas is embedded as an iframe (necessary for scrolling to work properly), which is rendered by `editor_canvas`. """ if not id: error("Invalid workflow id") stored = self.get_stored_workflow(trans, id) # The following query loads all user-owned workflows, # So that they can be copied or inserted in the workflow editor. workflows = trans.sa_session.query(model.StoredWorkflow) \ .filter_by(user=trans.user, deleted=False) \ .order_by(desc(model.StoredWorkflow.table.c.update_time)) \ .options(joinedload('latest_workflow').joinedload('steps')) \ .all() if version is None: version = len(stored.workflows) - 1 else: version = int(version) return trans.fill_template("workflow/editor.mako", workflows=workflows, stored=stored, version=version, annotation=self.get_item_annotation_str(trans.sa_session, trans.user, stored))
def sharing( self, trans, id, **kwargs ): """ Handle visualization sharing. """ # Get session and visualization. session = trans.sa_session visualization = self.get_visualization( trans, id, check_ownership=True ) # Do operation on visualization. if 'make_accessible_via_link' in kwargs: self._make_item_accessible( trans.sa_session, visualization ) elif 'make_accessible_and_publish' in kwargs: self._make_item_accessible( trans.sa_session, visualization ) visualization.published = True elif 'publish' in kwargs: visualization.published = True elif 'disable_link_access' in kwargs: visualization.importable = False elif 'unpublish' in kwargs: visualization.published = False elif 'disable_link_access_and_unpublish' in kwargs: visualization.importable = visualization.published = False elif 'unshare_user' in kwargs: user = session.query( model.User ).get( self.decode_id( kwargs['unshare_user' ] ) ) if not user: error( "User not found for provided id" ) association = session.query( model.VisualizationUserShareAssociation ) \ .filter_by( user=user, visualization=visualization ).one() session.delete( association ) session.flush() return trans.fill_template( "/sharing_base.mako", item=visualization, use_panels=True )
def sharing(self, trans, id, **kwargs): """ Handle visualization sharing. """ # Get session and visualization. session = trans.sa_session visualization = self.get_visualization(trans, id, check_ownership=True) # Do operation on visualization. if 'make_accessible_via_link' in kwargs: self._make_item_accessible(trans.sa_session, visualization) elif 'make_accessible_and_publish' in kwargs: self._make_item_accessible(trans.sa_session, visualization) visualization.published = True elif 'publish' in kwargs: visualization.published = True elif 'disable_link_access' in kwargs: visualization.importable = False elif 'unpublish' in kwargs: visualization.published = False elif 'disable_link_access_and_unpublish' in kwargs: visualization.importable = visualization.published = False elif 'unshare_user' in kwargs: user = session.query(model.User).get(self.decode_id(kwargs['unshare_user'])) if not user: error("User not found for provided id") association = session.query(model.VisualizationUserShareAssociation) \ .filter_by(user=user, visualization=visualization).one() session.delete(association) session.flush() return trans.fill_template("/sharing_base.mako", item=visualization, controller_list='visualizations', use_panels=True)
def index( self, trans, tool_id=None, from_noframe=None, **kwd ): # tool id not available, redirect to main page if tool_id is None: return trans.response.send_redirect( url_for( controller='root', action='welcome' ) ) tool = self.__get_tool( tool_id ) # tool id is not matching, display an error if not tool or not tool.allow_user_access( trans.user ): log.error( 'index called with tool id \'%s\' but no such tool exists', tool_id ) trans.log_event( 'Tool id \'%s\' does not exist' % tool_id ) trans.response.status = 404 return trans.show_error_message('Tool \'%s\' does not exist.' % ( escape(tool_id) )) if tool.require_login and not trans.user: redirect = url_for( controller='tool_runner', action='index', tool_id=tool_id, **kwd ) return trans.response.send_redirect( url_for( controller='user', action='login', cntrller='user', status='info', message='You must be logged in to use this tool.', redirect=redirect ) ) if tool.tool_type == 'default': return trans.response.send_redirect( url_for( controller='root', tool_id=tool_id ) ) # execute tool without displaying form (used for datasource tools) params = galaxy.util.Params( kwd, sanitize=False ) # do param translation here, used by datasource tools if tool.input_translator: tool.input_translator.translate( params ) # We may be visiting Galaxy for the first time ( e.g., sending data from UCSC ), # so make sure to create a new history if we've never had one before. history = tool.get_default_history_by_trans( trans, create=True ) try: vars = tool.handle_input( trans, params.__dict__, history=history ) except Exception, e: error( str( e ) )
def sharing( self, trans, id, **kwargs ): """ Handle page sharing. """ # Get session and page. session = trans.sa_session page = trans.sa_session.query( model.Page ).get( self.decode_id( id ) ) # Do operation on page. if 'make_accessible_via_link' in kwargs: self._make_item_accessible( trans.sa_session, page ) elif 'make_accessible_and_publish' in kwargs: self._make_item_accessible( trans.sa_session, page ) page.published = True elif 'publish' in kwargs: page.published = True elif 'disable_link_access' in kwargs: page.importable = False elif 'unpublish' in kwargs: page.published = False elif 'disable_link_access_and_unpublish' in kwargs: page.importable = page.published = False elif 'unshare_user' in kwargs: user = session.query( model.User ).get( self.decode_id( kwargs['unshare_user' ] ) ) if not user: error( "User not found for provided id" ) association = session.query( model.PageUserShareAssociation ) \ .filter_by( user=user, page=page ).one() session.delete( association ) session.flush() return trans.fill_template( "/sharing_base.mako", item=page, use_panels=True )
def get_page(self, trans, id, check_ownership=True, check_accessible=False): """Get a page from the database by id.""" # Load history from database id = self.decode_id(id) page = trans.sa_session.query(model.Page).get(id) if not page: error("Page not found") else: return self.security_check(trans, page, check_ownership, check_accessible)
def get_page( self, trans, id, check_ownership=True, check_accessible=False ): """Get a page from the database by id.""" # Load history from database id = self.decode_id( id ) page = trans.sa_session.query( model.Page ).get( id ) if not page: error( "Page not found" ) else: return self.security_check( trans, page, check_ownership, check_accessible )
def index(self, trans, tool_id=None, from_noframe=None, **kwd): def __tool_404__(): log.error( 'index called with tool id \'%s\' but no such tool exists', tool_id) trans.log_event('Tool id \'%s\' does not exist' % tool_id) trans.response.status = 404 return trans.show_error_message('Tool \'%s\' does not exist.' % (escape(tool_id))) # tool id not available, redirect to main page if tool_id is None: return trans.response.send_redirect( url_for(controller='root', action='welcome')) tool = self.__get_tool(tool_id) # tool id is not matching, display an error if not tool: return __tool_404__() if tool.require_login and not trans.user: redirect = url_for(controller='tool_runner', action='index', tool_id=tool_id, **kwd) return trans.response.send_redirect( url_for(controller='user', action='login', cntrller='user', status='info', message='You must be logged in to use this tool.', redirect=redirect)) if not tool.allow_user_access(trans.user): return __tool_404__() # FIXME: Tool class should define behavior if tool.tool_type in ['default', 'interactivetool']: return trans.response.send_redirect( url_for(controller='root', tool_id=tool_id)) # execute tool without displaying form (used for datasource tools) params = galaxy.util.Params(kwd, sanitize=False) # do param translation here, used by datasource tools if tool.input_translator: tool.input_translator.translate(params) if 'runtool_btn' not in params.__dict__ and 'URL' not in params.__dict__: error( 'Tool execution through the `tool_runner` requires a `runtool_btn` flag or `URL` parameter.' ) # We may be visiting Galaxy for the first time ( e.g., sending data from UCSC ), # so make sure to create a new history if we've never had one before. history = tool.get_default_history_by_trans(trans, create=True) try: vars = tool.handle_input(trans, params.__dict__, history=history) except Exception as e: error(galaxy.util.unicodify(e)) if len(params) > 0: trans.log_event('Tool params: %s' % (str(params)), tool_id=tool_id) return trans.fill_template('root/tool_runner.mako', **vars)
def get_history( self, trans, id, check_ownership=True ): """Get a History from the database by id, verifying ownership.""" # Load history from database id = trans.security.decode_id( id ) history = trans.sa_session.query( model.History ).get( id ) if not history: err+msg( "History not found" ) if check_ownership: # Verify ownership user = trans.get_user() if not user: error( "Must be logged in to manage histories" ) if history.user != user: error( "History is not owned by current user" ) return history
def get_history(self, trans, id, check_ownership=True): """Get a History from the database by id, verifying ownership.""" # Load history from database id = trans.security.decode_id(id) history = trans.sa_session.query(model.History).get(id) if not history: err + msg("History not found") if check_ownership: # Verify ownership user = trans.get_user() if not user: error("Must be logged in to manage histories") if history.user != user: error("History is not owned by current user") return history
def sharing(self, trans, id, **kwargs): """ Handle workflow sharing. """ session = trans.sa_session if 'unshare_me' in kwargs: # Remove self from shared associations with workflow. stored = self.get_stored_workflow(trans, id, False, True) association = session.query(model.StoredWorkflowUserShareAssociation) \ .filter_by(user=trans.user, stored_workflow=stored).one() session.delete(association) session.flush() return self.list(trans) else: # Get session and workflow. stored = self.get_stored_workflow(trans, id) session.add(stored) # Do operation on workflow. if 'make_accessible_via_link' in kwargs: self._make_item_accessible(trans.sa_session, stored) elif 'make_accessible_and_publish' in kwargs: self._make_item_accessible(trans.sa_session, stored) stored.published = True elif 'publish' in kwargs: stored.published = True elif 'disable_link_access' in kwargs: stored.importable = False elif 'unpublish' in kwargs: stored.published = False elif 'disable_link_access_and_unpublish' in kwargs: stored.importable = stored.published = False elif 'unshare_user' in kwargs: user = session.query(model.User).get( trans.security.decode_id(kwargs['unshare_user'])) if not user: error("User not found for provided id") association = session.query(model.StoredWorkflowUserShareAssociation) \ .filter_by(user=user, stored_workflow=stored).one() session.delete(association) # Legacy issue: workflows made accessible before recent updates may not have a slug. Create slug for any workflows that need them. if stored.importable and not stored.slug: self._make_item_accessible(trans.sa_session, stored) session.flush() return trans.fill_template("/workflow/sharing.mako", use_panels=True, item=stored)
def sharing(self, trans, id, **kwargs): """ Handle workflow sharing. """ session = trans.sa_session if 'unshare_me' in kwargs: # Remove self from shared associations with workflow. stored = self.get_stored_workflow(trans, id, False, True) association = session.query(model.StoredWorkflowUserShareAssociation) \ .filter_by(user=trans.user, stored_workflow=stored).one() session.delete(association) session.flush() return self.list(trans) else: # Get session and workflow. stored = self.get_stored_workflow(trans, id) session.add(stored) # Do operation on workflow. if 'make_accessible_via_link' in kwargs: self._make_item_accessible(trans.sa_session, stored) elif 'make_accessible_and_publish' in kwargs: self._make_item_accessible(trans.sa_session, stored) stored.published = True elif 'publish' in kwargs: stored.published = True elif 'disable_link_access' in kwargs: stored.importable = False elif 'unpublish' in kwargs: stored.published = False elif 'disable_link_access_and_unpublish' in kwargs: stored.importable = stored.published = False elif 'unshare_user' in kwargs: user = session.query(model.User).get(trans.security.decode_id(kwargs['unshare_user'])) if not user: error("User not found for provided id") association = session.query(model.StoredWorkflowUserShareAssociation) \ .filter_by(user=user, stored_workflow=stored).one() session.delete(association) # Legacy issue: workflows made accessible before recent updates may not have a slug. Create slug for any workflows that need them. if stored.importable and not stored.slug: self._make_item_accessible(trans.sa_session, stored) session.flush() return trans.fill_template("/workflow/sharing.mako", use_panels=True, item=stored)
def index(self, trans, tool_id=None, from_noframe=None, **kwd): # tool id not available, redirect to main page if tool_id is None: return trans.response.send_redirect( url_for(controller='root', action='welcome')) tool = self.__get_tool(tool_id) # tool id is not matching, display an error if not tool or not tool.allow_user_access(trans.user): log.error( 'index called with tool id \'%s\' but no such tool exists', tool_id) trans.log_event('Tool id \'%s\' does not exist' % tool_id) trans.response.status = 404 return trans.show_error_message('Tool \'%s\' does not exist.' % (escape(tool_id))) if tool.require_login and not trans.user: redirect = url_for(controller='tool_runner', action='index', tool_id=tool_id, **kwd) return trans.response.send_redirect( url_for(controller='user', action='login', cntrller='user', status='info', message='You must be logged in to use this tool.', redirect=redirect)) if tool.tool_type == 'default': return trans.response.send_redirect( url_for(controller='root', tool_id=tool_id)) # execute tool without displaying form (used for datasource tools) params = galaxy.util.Params(kwd, sanitize=False) # do param translation here, used by datasource tools if tool.input_translator: tool.input_translator.translate(params) # We may be visiting Galaxy for the first time ( e.g., sending data from UCSC ), # so make sure to create a new history if we've never had one before. history = tool.get_default_history_by_trans(trans, create=True) try: vars = tool.handle_input(trans, params.__dict__, history=history) except Exception, e: error(str(e))
def rerun(self, trans, id=None, job_id=None, **kwd): """ Given a HistoryDatasetAssociation id, find the job and that created the dataset, extract the parameters, and display the appropriate tool form with parameters already filled in. """ if job_id is None: if not id: error("'id' parameter is required") try: id = int(id) except ValueError: # it's not an un-encoded id, try to parse as encoded try: id = trans.security.decode_id(id) except Exception: error("Invalid value for 'id' parameter") # Get the dataset object data = trans.sa_session.query(trans.app.model.HistoryDatasetAssociation).get(id) # only allow rerunning if user is allowed access to the dataset. if not (trans.user_is_admin or trans.app.security_agent.can_access_dataset(trans.get_current_user_roles(), data.dataset)): error("You are not allowed to access this dataset") # Get the associated job, if any. job = data.creating_job if job: job_id = trans.security.encode_id(job.id) else: raise Exception("Failed to get job information for dataset hid %d" % data.hid) return trans.response.send_redirect(url_for(controller="root", job_id=job_id))
def rerun(self, trans, id=None, job_id=None, **kwd): """ Given a HistoryDatasetAssociation id, find the job and that created the dataset, extract the parameters, and display the appropriate tool form with parameters already filled in. """ if job_id is None: if not id: error("'id' parameter is required") try: id = int(id) except ValueError: # it's not an un-encoded id, try to parse as encoded try: id = trans.security.decode_id(id) except Exception: error("Invalid value for 'id' parameter") # Get the dataset object data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation).get(id) # only allow rerunning if user is allowed access to the dataset. if not (trans.user_is_admin or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset)): error("You are not allowed to access this dataset") # Get the associated job, if any. job = data.creating_job if job: job_id = trans.security.encode_id(job.id) else: raise Exception( "Failed to get job information for dataset hid %d" % data.hid) return trans.response.send_redirect( url_for(controller="root", job_id=job_id))
def copy(self, trans, id, save_as_name=None): # Get workflow to copy. stored = self.get_stored_workflow(trans, id, check_ownership=False) user = trans.get_user() if stored.user == user: owner = True else: if trans.sa_session.query(model.StoredWorkflowUserShareAssociation) \ .filter_by(user=user, stored_workflow=stored).count() == 0: error("Workflow is not owned by or shared with current user") owner = False # Copy. new_stored = model.StoredWorkflow() if (save_as_name): new_stored.name = '%s' % save_as_name else: new_stored.name = "Copy of %s" % stored.name new_stored.latest_workflow = stored.latest_workflow # Copy annotation. annotation_obj = self.get_item_annotation_obj(trans.sa_session, stored.user, stored) if annotation_obj: self.add_item_annotation(trans.sa_session, trans.get_user(), new_stored, annotation_obj.annotation) new_stored.copy_tags_from(trans.user, stored) if not owner: new_stored.name += " shared by %s" % stored.user.email new_stored.user = user # Persist session = trans.sa_session session.add(new_stored) session.flush() # Display the management page message = 'Created new workflow with name: %s' % escape( new_stored.name) trans.set_message(message) return_url = url_for( '/') + 'workflow?status=done&message=%s' % escape(message) trans.response.send_redirect(return_url)
def copy(self, trans, id, save_as_name=None): # Get workflow to copy. stored = self.get_stored_workflow(trans, id, check_ownership=False) user = trans.get_user() if stored.user == user: owner = True else: if trans.sa_session.query(model.StoredWorkflowUserShareAssociation) \ .filter_by(user=user, stored_workflow=stored).count() == 0: error("Workflow is not owned by or shared with current user") owner = False # Copy. new_stored = model.StoredWorkflow() if (save_as_name): new_stored.name = '%s' % save_as_name else: new_stored.name = "Copy of %s" % stored.name new_stored.latest_workflow = stored.latest_workflow # Copy annotation. annotation_obj = self.get_item_annotation_obj(trans.sa_session, stored.user, stored) if annotation_obj: self.add_item_annotation(trans.sa_session, trans.get_user(), new_stored, annotation_obj.annotation) new_stored.copy_tags_from(trans.user, stored) if not owner: new_stored.name += " shared by %s" % stored.user.email new_stored.user = user # Persist session = trans.sa_session session.add(new_stored) session.flush() # Display the management page message = 'Created new workflow with name: %s' % escape(new_stored.name) trans.set_message(message) return_url = url_for('/') + 'workflow?status=done&message=%s' % escape(message) trans.response.send_redirect(return_url)
def rerun(self, trans, id=None, job_id=None, **kwd): """ Given a HistoryDatasetAssociation id, find the job and that created the dataset, extract the parameters, and display the appropriate tool form with parameters already filled in. """ if job_id is None: if not id: error("'id' parameter is required") try: id = int(id) except: # it's not an un-encoded id, try to parse as encoded try: id = trans.security.decode_id(id) except: error("Invalid value for 'id' parameter") # Get the dataset object data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation).get(id) # only allow rerunning if user is allowed access to the dataset. if not (trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset)): error("You are not allowed to access this dataset") # Get the associated job, if any. job = data.creating_job if job: job_id = trans.security.encode_id(job.id) else: raise Exception( "Failed to get job information for dataset hid %d" % data.hid) # Get the tool object tool_id = job.tool_id tool = self.__get_tool(tool_id) if proto_tool_types.has_key(tool.tool_type): # Get the job's parameters #try: # params_objects = job.get_param_values(trans.app, ignore_errors=True) #except: # raise Exception('Failed to get job params') return trans.response.send_redirect( url_for(controller=tool.action, action='index', mako=tool.inputs['mako'].get_initial_value(None, None), rerun_hda_id=id)) return trans.response.send_redirect( url_for(controller="root", job_id=job_id))
def editor(self, trans, id=None, workflow_id=None, version=None): """ Render the main workflow editor interface. The canvas is embedded as an iframe (necessary for scrolling to work properly), which is rendered by `editor_canvas`. """ if not id: if workflow_id: workflow = trans.sa_session.query(model.Workflow).get(trans.security.decode_id(workflow_id)) stored_workflow = workflow.stored_workflow self.security_check(trans, stored_workflow, True, False) stored_workflow_id = trans.security.encode_id(stored_workflow.id) return trans.response.send_redirect(f'{url_for("/")}workflow/editor?id={stored_workflow_id}') error("Invalid workflow id") stored = self.get_stored_workflow(trans, id) # The following query loads all user-owned workflows, # So that they can be copied or inserted in the workflow editor. workflows = trans.sa_session.query(model.StoredWorkflow) \ .filter_by(user=trans.user, deleted=False, hidden=False) \ .order_by(desc(model.StoredWorkflow.table.c.update_time)) \ .options(joinedload('latest_workflow').joinedload('steps')) \ .all() if version is None: version = len(stored.workflows) - 1 else: version = int(version) # create workflow module models module_sections = [] for module_section in load_module_sections(trans).values(): module_sections.append({ "title": module_section.get("title"), "name": module_section.get("name"), "elems": [{ "name": elem.get("name"), "title": elem.get("title"), "description": elem.get("description") } for elem in module_section.get("modules")] }) # create data manager tool models data_managers = [] if trans.user_is_admin and trans.app.data_managers.data_managers: for data_manager_val in trans.app.data_managers.data_managers.values(): tool = data_manager_val.tool if not tool.hidden: data_managers.append({ "id": tool.id, "name": tool.name, "hidden": tool.hidden, "description": tool.description, "is_workflow_compatible": tool.is_workflow_compatible }) # create workflow models workflows = [{ 'id': trans.security.encode_id(workflow.id), 'latest_id': trans.security.encode_id(workflow.latest_workflow.id), 'step_count': len(workflow.latest_workflow.steps), 'name': workflow.name } for workflow in workflows if workflow.id != stored.id] # identify item tags item_tags = [tag for tag in stored.tags if tag.user == trans.user] item_tag_names = [] for ta in item_tags: item_tag_names.append(escape(ta.tag.name)) # build workflow editor model editor_config = { 'id': trans.security.encode_id(stored.id), 'name': stored.name, 'tags': item_tag_names, 'initialVersion': version, 'annotation': self.get_item_annotation_str(trans.sa_session, trans.user, stored), 'toolbox': trans.app.toolbox.to_dict(trans), 'moduleSections': module_sections, 'dataManagers': data_managers, 'workflows': workflows } # parse to mako return trans.fill_template("workflow/editor.mako", editor_config=editor_config)
def tag_outputs(self, trans, id, **kwargs): stored = self.get_stored_workflow(trans, id, check_ownership=False) user = trans.get_user() if stored.user != user: if trans.sa_session.query(model.StoredWorkflowUserShareAssociation) \ .filter_by(user=user, stored_workflow=stored).count() == 0: error("Workflow is not owned by or shared with current user") # Get the latest revision workflow = stored.latest_workflow # It is possible for a workflow to have 0 steps if len(workflow.steps) == 0: error("Workflow cannot be tagged for outputs because it does not have any steps") if workflow.has_cycles: error("Workflow cannot be tagged for outputs because it contains cycles") if workflow.has_errors: error("Workflow cannot be tagged for outputs because of validation errors in some steps") # Build the state for each step errors = {} has_upgrade_messages = False # has_errors is never used # has_errors = False if kwargs: # If kwargs were provided, the states for each step should have # been POSTed for step in workflow.steps: if step.type == 'tool': # Extract just the output flags for this step. p = "%s|otag|" % step.id l = len(p) outputs = [k[l:] for (k, v) in kwargs.items() if k.startswith(p)] if step.workflow_outputs: for existing_output in step.workflow_outputs: if existing_output.output_name not in outputs: trans.sa_session.delete(existing_output) else: outputs.remove(existing_output.output_name) for outputname in outputs: m = model.WorkflowOutput(workflow_step_id=int(step.id), output_name=outputname) trans.sa_session.add(m) # Prepare each step trans.sa_session.flush() module_injector = WorkflowModuleInjector(trans) for step in workflow.steps: step.upgrade_messages = {} # Contruct modules module_injector.inject(step) if step.upgrade_messages: has_upgrade_messages = True if step.type == 'tool' or step.type is None: # Error dict if step.tool_errors: errors[step.id] = step.tool_errors # Render the form return trans.fill_template( "workflow/tag_outputs.mako", steps=workflow.steps, workflow=stored, has_upgrade_messages=has_upgrade_messages, errors=errors, incoming=kwargs )
def rerun(self, trans, id=None, from_noframe=None, job_id=None, **kwd): """ Given a HistoryDatasetAssociation id, find the job and that created the dataset, extract the parameters, and display the appropriate tool form with parameters already filled in. """ if job_id: try: job_id = trans.security.decode_id(job_id) job = trans.sa_session.query(trans.app.model.Job).get(job_id) except: error("Invalid value for 'job_id' parameter") if not trans.user_is_admin(): for data_assoc in job.output_datasets: # only allow rerunning if user is allowed access to the dataset. if not trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data_assoc.dataset.dataset): error("You are not allowed to rerun this job") param_error_text = "Failed to get parameters for job id %d " % job_id else: if not id: error("'id' parameter is required") try: id = int(id) except: # it's not an un-encoded id, try to parse as encoded try: id = trans.security.decode_id(id) except: error("Invalid value for 'id' parameter") # Get the dataset object data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation).get(id) # only allow rerunning if user is allowed access to the dataset. if not (trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset)): error("You are not allowed to access this dataset") # Get the associated job, if any. job = data.creating_job if not job: raise Exception( "Failed to get job information for dataset hid %d" % data.hid) param_error_text = "Failed to get parameters for dataset id %d " % data.id # Get the tool object tool_id = job.tool_id tool_version = job.tool_version try: tool_version_select_field, tools, tool = self.__get_tool_components( tool_id, tool_version=tool_version, get_loaded_tools_by_lineage=False, set_selected=True) if (tool.id == job.tool_id or tool.old_id == job.tool_id) and tool.version == job.tool_version: tool_id_version_message = '' elif tool.id == job.tool_id: if job.tool_version is None: # For some reason jobs don't always keep track of the tool version. tool_id_version_message = '' else: tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version if len(tools) > 1: tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.' else: tool_id_version_message += 'You can rerun the job with this tool version, which is a derivation of the original tool.' else: if len(tools) > 1: tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.' else: tool_id_version_message = 'This job was initially run with tool id "%s", version "%s", which is not ' % ( job.tool_id, job.tool_version) tool_id_version_message += 'currently available. You can rerun the job with this tool, which is a derivation of the original tool.' assert tool is not None, 'Requested tool has not been loaded.' except: # This is expected so not an exception. tool_id_version_message = '' error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id) if not tool.allow_user_access(trans.user): error("The requested tool is unknown.") # Can't rerun upload, external data sources, et cetera. Workflow compatible will proxy this for now if not tool.is_workflow_compatible: error("The '%s' tool does not currently support rerunning." % tool.name) # Get the job's parameters try: params_objects = job.get_param_values(trans.app, ignore_errors=True) except: raise Exception(param_error_text) upgrade_messages = tool.check_and_update_param_values( params_objects, trans, update_values=False) # Need to remap dataset parameters. Job parameters point to original # dataset used; parameter should be the analygous dataset in the # current history. history = trans.get_history() hda_source_dict = {} # Mapping from HDA in history to source HDAs. for hda in history.datasets: source_hda = hda.copied_from_history_dataset_association while source_hda: # should this check library datasets as well? # FIXME: could be multiple copies of a hda in a single history, this does a better job of matching on cloned histories, # but is still less than perfect when eg individual datasets are copied between histories if source_hda not in hda_source_dict or source_hda.hid == hda.hid: hda_source_dict[source_hda] = hda source_hda = source_hda.copied_from_history_dataset_association # Ditto for dataset collections. hdca_source_dict = {} for hdca in history.dataset_collections: source_hdca = hdca.copied_from_history_dataset_collection_association while source_hdca: if source_hdca not in hdca_source_dict or source_hdca.hid == hdca.hid: hdca_source_dict[source_hdca] = hdca source_hdca = source_hdca.copied_from_history_dataset_collection_association # Unpack unvalidated values to strings, they'll be validated when the # form is submitted (this happens when re-running a job that was # initially run by a workflow) # This needs to be done recursively through grouping parameters def rerun_callback(input, value, prefixed_name, prefixed_label): if isinstance(value, UnvalidatedValue): try: return input.to_html_value(value.value, trans.app) except Exception, e: # Need to determine when (if ever) the to_html_value call could fail. log.debug( "Failed to use input.to_html_value to determine value of unvalidated parameter, defaulting to string: %s" % (e)) return str(value) if isinstance(input, DataToolParameter): if isinstance(value, list): values = [] for val in value: if is_hashable(val): if val in history.datasets: values.append(val) elif val in hda_source_dict: values.append(hda_source_dict[val]) return values if is_hashable( value ) and value not in history.datasets and value in hda_source_dict: return hda_source_dict[value] elif isinstance(input, DataCollectionToolParameter): if is_hashable( value ) and value not in history.dataset_collections and value in hdca_source_dict: return hdca_source_dict[value]
def rerun( self, trans, id=None, from_noframe=None, **kwd ): """ Given a HistoryDatasetAssociation id, find the job and that created the dataset, extract the parameters, and display the appropriate tool form with parameters already filled in. """ if not id: error( "'id' parameter is required" ); try: id = int( id ) except: # it's not an un-encoded id, try to parse as encoded try: id = trans.security.decode_id( id ) except: error( "Invalid value for 'id' parameter" ) # Get the dataset object data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id ) #only allow rerunning if user is allowed access to the dataset. if not ( trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ) ): error( "You are not allowed to access this dataset" ) # Get the associated job, if any. job = data.creating_job if not job: raise Exception("Failed to get job information for dataset hid %d" % data.hid) # Get the tool object tool_id = job.tool_id tool_version = job.tool_version try: tool_version_select_field, tools, tool = self.__get_tool_components( tool_id, tool_version=tool_version, get_loaded_tools_by_lineage=False, set_selected=True ) if ( tool.id == job.tool_id or tool.old_id == job.tool_id ) and tool.version == job.tool_version: tool_id_version_message = '' elif tool.id == job.tool_id: if job.tool_version == None: # For some reason jobs don't always keep track of the tool version. tool_id_version_message = '' else: tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version if len( tools ) > 1: tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.' else: tool_id_version_message += 'You can rerun the job with this tool version, which is a derivation of the original tool.' else: if len( tools ) > 1: tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.' else: tool_id_version_message = 'This job was initially run with tool id "%s", version "%s", which is not ' % ( job.tool_id, job.tool_version ) tool_id_version_message += 'currently available. You can rerun the job with this tool, which is a derivation of the original tool.' assert tool is not None, 'Requested tool has not been loaded.' except: # This is expected so not an exception. tool_id_version_message = '' error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id ) # Can't rerun upload, external data sources, et cetera. Workflow compatible will proxy this for now if not tool.is_workflow_compatible: error( "The '%s' tool does not currently support rerunning." % tool.name ) # Get the job's parameters try: params_objects = job.get_param_values( trans.app, ignore_errors = True ) except: raise Exception( "Failed to get parameters for dataset id %d " % data.id ) upgrade_messages = tool.check_and_update_param_values( params_objects, trans, update_values=False ) # Need to remap dataset parameters. Job parameters point to original # dataset used; parameter should be the analygous dataset in the # current history. history = trans.get_history() hda_source_dict = {} # Mapping from HDA in history to source HDAs. for hda in history.datasets: source_hda = hda.copied_from_history_dataset_association while source_hda:#should this check library datasets as well? #FIXME: could be multiple copies of a hda in a single history, this does a better job of matching on cloned histories, #but is still less than perfect when eg individual datasets are copied between histories if source_hda not in hda_source_dict or source_hda.hid == hda.hid: hda_source_dict[ source_hda ] = hda source_hda = source_hda.copied_from_history_dataset_association # Unpack unvalidated values to strings, they'll be validated when the # form is submitted (this happens when re-running a job that was # initially run by a workflow) #This needs to be done recursively through grouping parameters def rerun_callback( input, value, prefixed_name, prefixed_label ): if isinstance( value, UnvalidatedValue ): return str( value ) if isinstance( input, DataToolParameter ): if isinstance(value,list): values = [] for val in value: if is_hashable( val ): if val in history.datasets: values.append( val ) elif val in hda_source_dict: values.append( hda_source_dict[ val ]) return values if is_hashable( value ) and value not in history.datasets and value in hda_source_dict: return hda_source_dict[ value ] visit_input_values( tool.inputs, params_objects, rerun_callback ) # Create a fake tool_state for the tool, with the parameters values state = tool.new_state( trans ) state.inputs = params_objects # If the job failed and has dependencies, allow dependency remap if job.state == job.states.ERROR: try: if [ hda.dependent_jobs for hda in [ jtod.dataset for jtod in job.output_datasets ] if hda.dependent_jobs ]: state.rerun_remap_job_id = trans.app.security.encode_id(job.id) except: # Job has no outputs? pass #create an incoming object from the original job's dataset-modified param objects incoming = {} params_to_incoming( incoming, tool.inputs, params_objects, trans.app ) incoming[ "tool_state" ] = galaxy.util.object_to_string( state.encode( tool, trans.app ) ) template, vars = tool.handle_input( trans, incoming, old_errors=upgrade_messages ) #update new state with old parameters # Is the "add frame" stuff neccesary here? add_frame = AddFrameData() add_frame.debug = trans.debug if from_noframe is not None: add_frame.wiki_url = trans.app.config.wiki_url add_frame.from_noframe = True return trans.fill_template( template, history=history, toolbox=self.get_toolbox(), tool_version_select_field=tool_version_select_field, tool=tool, util=galaxy.util, add_frame=add_frame, tool_id_version_message=tool_id_version_message, **vars )
def rerun(self, trans, id=None, from_noframe=None, **kwd): """ Given a HistoryDatasetAssociation id, find the job and that created the dataset, extract the parameters, and display the appropriate tool form with parameters already filled in. """ if not id: error("'id' parameter is required") try: id = int(id) except: # it's not an un-encoded id, try to parse as encoded try: id = trans.security.decode_id(id) except: error("Invalid value for 'id' parameter") # Get the dataset object data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation).get(id) #only allow rerunning if user is allowed access to the dataset. if not (trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset)): error("You are not allowed to access this dataset") # Get the associated job, if any. job = data.creating_job if not job: raise Exception( "Failed to get job information for dataset hid %d" % data.hid) # Get the tool object tool_id = job.tool_id tool_version = job.tool_version try: tool_version_select_field, tools, tool = self.__get_tool_components( tool_id, tool_version=tool_version, get_loaded_tools_by_lineage=False, set_selected=True) if (tool.id == job.tool_id or tool.old_id == job.tool_id) and tool.version == job.tool_version: tool_id_version_message = '' elif tool.id == job.tool_id: if job.tool_version == None: # For some reason jobs don't always keep track of the tool version. tool_id_version_message = '' else: tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version if len(tools) > 1: tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.' else: tool_id_version_message += 'You can rerun the job with this tool version, which is a derivation of the original tool.' else: if len(tools) > 1: tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.' else: tool_id_version_message = 'This job was initially run with tool id "%s", version "%s", which is not ' % ( job.tool_id, job.tool_version) tool_id_version_message += 'currently available. You can rerun the job with this tool, which is a derivation of the original tool.' assert tool is not None, 'Requested tool has not been loaded.' except: # This is expected so not an exception. tool_id_version_message = '' error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id) # Can't rerun upload, external data sources, et cetera. Workflow compatible will proxy this for now if not tool.is_workflow_compatible: error("The '%s' tool does not currently support rerunning." % tool.name) # Get the job's parameters try: params_objects = job.get_param_values(trans.app, ignore_errors=True) except: raise Exception("Failed to get parameters for dataset id %d " % data.id) upgrade_messages = tool.check_and_update_param_values( params_objects, trans, update_values=False) # Need to remap dataset parameters. Job parameters point to original # dataset used; parameter should be the analygous dataset in the # current history. history = trans.get_history() hda_source_dict = {} # Mapping from HDA in history to source HDAs. for hda in history.datasets: source_hda = hda.copied_from_history_dataset_association while source_hda: #should this check library datasets as well? #FIXME: could be multiple copies of a hda in a single history, this does a better job of matching on cloned histories, #but is still less than perfect when eg individual datasets are copied between histories if source_hda not in hda_source_dict or source_hda.hid == hda.hid: hda_source_dict[source_hda] = hda source_hda = source_hda.copied_from_history_dataset_association # Unpack unvalidated values to strings, they'll be validated when the # form is submitted (this happens when re-running a job that was # initially run by a workflow) #This needs to be done recursively through grouping parameters def rerun_callback(input, value, prefixed_name, prefixed_label): if isinstance(value, UnvalidatedValue): return str(value) if isinstance(input, DataToolParameter): if isinstance(value, list): values = [] for val in value: if is_hashable(val): if val in history.datasets: values.append(val) elif val in hda_source_dict: values.append(hda_source_dict[val]) return values if is_hashable( value ) and value not in history.datasets and value in hda_source_dict: return hda_source_dict[value] visit_input_values(tool.inputs, params_objects, rerun_callback) # Create a fake tool_state for the tool, with the parameters values state = tool.new_state(trans) state.inputs = params_objects # If the job failed and has dependencies, allow dependency remap if job.state == job.states.ERROR: try: if [ hda.dependent_jobs for hda in [jtod.dataset for jtod in job.output_datasets] if hda.dependent_jobs ]: state.rerun_remap_job_id = trans.app.security.encode_id( job.id) except: # Job has no outputs? pass #create an incoming object from the original job's dataset-modified param objects incoming = {} params_to_incoming(incoming, tool.inputs, params_objects, trans.app) incoming["tool_state"] = galaxy.util.object_to_string( state.encode(tool, trans.app)) template, vars = tool.handle_input( trans, incoming, old_errors=upgrade_messages) #update new state with old parameters # Is the "add frame" stuff neccesary here? add_frame = AddFrameData() add_frame.debug = trans.debug if from_noframe is not None: add_frame.wiki_url = trans.app.config.wiki_url add_frame.from_noframe = True return trans.fill_template( template, history=history, toolbox=self.get_toolbox(), tool_version_select_field=tool_version_select_field, tool=tool, util=galaxy.util, add_frame=add_frame, tool_id_version_message=tool_id_version_message, **vars)
def tag_outputs(self, trans, id, **kwargs): stored = self.get_stored_workflow(trans, id, check_ownership=False) user = trans.get_user() if stored.user != user: if trans.sa_session.query(model.StoredWorkflowUserShareAssociation) \ .filter_by(user=user, stored_workflow=stored).count() == 0: error("Workflow is not owned by or shared with current user") # Get the latest revision workflow = stored.latest_workflow # It is possible for a workflow to have 0 steps if len(workflow.steps) == 0: error( "Workflow cannot be tagged for outputs because it does not have any steps" ) if workflow.has_cycles: error( "Workflow cannot be tagged for outputs because it contains cycles" ) if workflow.has_errors: error( "Workflow cannot be tagged for outputs because of validation errors in some steps" ) # Build the state for each step errors = {} has_upgrade_messages = False # has_errors is never used # has_errors = False if kwargs: # If kwargs were provided, the states for each step should have # been POSTed for step in workflow.steps: if step.type == 'tool': # Extract just the output flags for this step. p = "%s|otag|" % step.id l = len(p) outputs = [ k[l:] for (k, v) in kwargs.items() if k.startswith(p) ] if step.workflow_outputs: for existing_output in step.workflow_outputs: if existing_output.output_name not in outputs: trans.sa_session.delete(existing_output) else: outputs.remove(existing_output.output_name) for outputname in outputs: m = model.WorkflowOutput(workflow_step_id=int(step.id), output_name=outputname) trans.sa_session.add(m) # Prepare each step trans.sa_session.flush() module_injector = WorkflowModuleInjector(trans) for step in workflow.steps: step.upgrade_messages = {} # Contruct modules module_injector.inject(step) if step.upgrade_messages: has_upgrade_messages = True if step.type == 'tool' or step.type is None: # Error dict if step.tool_errors: errors[step.id] = step.tool_errors # Render the form return trans.fill_template("workflow/tag_outputs.mako", steps=workflow.steps, workflow=stored, has_upgrade_messages=has_upgrade_messages, errors=errors, incoming=kwargs)
def rerun( self, trans, id=None, from_noframe=None, job_id=None, **kwd ): """ Given a HistoryDatasetAssociation id, find the job and that created the dataset, extract the parameters, and display the appropriate tool form with parameters already filled in. """ if job_id: try: job_id = trans.security.decode_id( job_id ) job = trans.sa_session.query( trans.app.model.Job ).get( job_id ) except: error( "Invalid value for 'job_id' parameter" ) if not trans.user_is_admin(): for data_assoc in job.output_datasets: #only allow rerunning if user is allowed access to the dataset. if not trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data_assoc.dataset.dataset ): error( "You are not allowed to rerun this job" ) param_error_text = "Failed to get parameters for job id %d " % job_id else: if not id: error( "'id' parameter is required" ); try: id = int( id ) except: # it's not an un-encoded id, try to parse as encoded try: id = trans.security.decode_id( id ) except: error( "Invalid value for 'id' parameter" ) # Get the dataset object data = trans.sa_session.query( trans.app.model.HistoryDatasetAssociation ).get( id ) #only allow rerunning if user is allowed access to the dataset. if not ( trans.user_is_admin() or trans.app.security_agent.can_access_dataset( trans.get_current_user_roles(), data.dataset ) ): error( "You are not allowed to access this dataset" ) # Get the associated job, if any. job = data.creating_job if not job: raise Exception("Failed to get job information for dataset hid %d" % data.hid) param_error_text = "Failed to get parameters for dataset id %d " % data.id # Get the tool object tool_id = job.tool_id tool_version = job.tool_version try: tool_version_select_field, tools, tool = self.__get_tool_components( tool_id, tool_version=tool_version, get_loaded_tools_by_lineage=False, set_selected=True ) if ( tool.id == job.tool_id or tool.old_id == job.tool_id ) and tool.version == job.tool_version: tool_id_version_message = '' elif tool.id == job.tool_id: if job.tool_version == None: # For some reason jobs don't always keep track of the tool version. tool_id_version_message = '' else: tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version if len( tools ) > 1: tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.' else: tool_id_version_message += 'You can rerun the job with this tool version, which is a derivation of the original tool.' else: if len( tools ) > 1: tool_id_version_message = 'This job was initially run with tool version "%s", which is not currently available. ' % job.tool_version tool_id_version_message += 'You can rerun the job with the selected tool or choose another derivation of the tool.' else: tool_id_version_message = 'This job was initially run with tool id "%s", version "%s", which is not ' % ( job.tool_id, job.tool_version ) tool_id_version_message += 'currently available. You can rerun the job with this tool, which is a derivation of the original tool.' assert tool is not None, 'Requested tool has not been loaded.' except: # This is expected so not an exception. tool_id_version_message = '' error( "This dataset was created by an obsolete tool (%s). Can't re-run." % tool_id ) # Can't rerun upload, external data sources, et cetera. Workflow compatible will proxy this for now if not tool.is_workflow_compatible: error( "The '%s' tool does not currently support rerunning." % tool.name ) # Get the job's parameters try: params_objects = job.get_param_values( trans.app, ignore_errors = True ) except: raise Exception( param_error_text ) upgrade_messages = tool.check_and_update_param_values( params_objects, trans, update_values=False ) # Need to remap dataset parameters. Job parameters point to original # dataset used; parameter should be the analygous dataset in the # current history. history = trans.get_history() hda_source_dict = {} # Mapping from HDA in history to source HDAs. for hda in history.datasets: source_hda = hda.copied_from_history_dataset_association while source_hda:#should this check library datasets as well? #FIXME: could be multiple copies of a hda in a single history, this does a better job of matching on cloned histories, #but is still less than perfect when eg individual datasets are copied between histories if source_hda not in hda_source_dict or source_hda.hid == hda.hid: hda_source_dict[ source_hda ] = hda source_hda = source_hda.copied_from_history_dataset_association # Ditto for dataset collections. hdca_source_dict = {} for hdca in history.dataset_collections: source_hdca = hdca.copied_from_history_dataset_collection_association while source_hdca: if source_hdca not in hdca_source_dict or source_hdca.hid == hdca.hid: hdca_source_dict[ source_hdca ] = hdca source_hdca = source_hdca.copied_from_history_dataset_collection_association # Unpack unvalidated values to strings, they'll be validated when the # form is submitted (this happens when re-running a job that was # initially run by a workflow) #This needs to be done recursively through grouping parameters def rerun_callback( input, value, prefixed_name, prefixed_label ): if isinstance( value, UnvalidatedValue ): try: return input.to_html_value( value.value, trans.app ) except Exception, e: # Need to determine when (if ever) the to_html_value call could fail. log.debug( "Failed to use input.to_html_value to determine value of unvalidated parameter, defaulting to string: %s" % ( e ) ) return str( value ) if isinstance( input, DataToolParameter ): if isinstance(value,list): values = [] for val in value: if is_hashable( val ): if val in history.datasets: values.append( val ) elif val in hda_source_dict: values.append( hda_source_dict[ val ]) return values if is_hashable( value ) and value not in history.datasets and value in hda_source_dict: return hda_source_dict[ value ] elif isinstance( input, DataCollectionToolParameter ): if is_hashable( value ) and value not in history.dataset_collections and value in hdca_source_dict: return hdca_source_dict[ value ]