def rename( self, trans, id=None, name=None, **kwd ): user = trans.get_user() if not id: # Default to the current history history = trans.get_history() if not history.user: return trans.show_error_message( "You must save your history before renaming it." ) id = trans.security.encode_id( history.id ) id = util.listify( id ) name = util.listify( name ) histories = [] cur_names = [] for history_id in id: history = get_history( trans, history_id ) if history and history.user_id == user.id: histories.append( history ) cur_names.append( history.name ) if not name or len( histories ) != len( name ): return trans.fill_template( "/history/rename.mako", histories=histories ) change_msg = "" for i in range(len(histories)): if histories[i].user_id == user.id: if name[i] == histories[i].name: change_msg = change_msg + "<p>History: "+cur_names[i]+" is already named: "+name[i]+"</p>" elif name[i] not in [None,'',' ']: name[i] = escape(name[i]) histories[i].name = name[i] histories[i].flush() change_msg = change_msg + "<p>History: "+cur_names[i]+" renamed to: "+name[i]+"</p>" trans.log_event( "History renamed: id: %s, renamed to: '%s'" % (str(histories[i].id), name[i] ) ) else: change_msg = change_msg + "<p>You must specify a valid name for History: "+cur_names[i]+"</p>" else: change_msg = change_msg + "<p>History: "+cur_names[i]+" does not appear to belong to you.</p>" return trans.show_message( "<p>%s" % change_msg, refresh_frames=['history'] )
def index_jobs_summary(self, trans, history_id, **kwd): """ * GET /api/histories/{history_id}/jobs_summary return detailed information about an HDA or HDCAs jobs Warning: We allow anyone to fetch job state information about any object they can guess an encoded ID for - it isn't considered protected data. This keeps polling IDs as part of state calculation for large histories and collections as efficient as possible. :type history_id: str :param history_id: encoded id string of the HDA's or the HDCA's History :type ids: str[] :param ids: the encoded ids of job summary objects to return - if ids is specified types must also be specified and have same length. :type types: str[] :param types: type of object represented by elements in the ids array - either Job or ImplicitCollectionJob. :rtype: dict[] :returns: an array of job summary object dictionaries. """ ids = kwd.get("ids", None) types = kwd.get("types", None) if ids is None: assert types is None # TODO: ... pass else: ids = util.listify(ids) types = util.listify(types) return [self.encode_all_ids(trans, s) for s in fetch_job_states(self.app, trans.sa_session, ids, types)]
def build_from_current_history(self, trans, job_ids=None, dataset_ids=None, dataset_collection_ids=None, workflow_name=None, dataset_names=None, dataset_collection_names=None): user = trans.get_user() history = trans.get_history() if not user: return trans.show_error_message("Must be logged in to create workflows") if (job_ids is None and dataset_ids is None) or workflow_name is None: jobs, warnings = summarize(trans) # Render return trans.fill_template( "workflow/build_from_current_history.mako", jobs=jobs, warnings=warnings, history=history ) else: # If there is just one dataset name selected or one dataset collection, these # come through as string types instead of lists. xref #3247. dataset_names = util.listify(dataset_names) dataset_collection_names = util.listify(dataset_collection_names) stored_workflow = extract_workflow( trans, user=user, job_ids=job_ids, dataset_ids=dataset_ids, dataset_collection_ids=dataset_collection_ids, workflow_name=workflow_name, dataset_names=dataset_names, dataset_collection_names=dataset_collection_names ) # Index page with message workflow_id = trans.security.encode_id(stored_workflow.id) return trans.show_message('Workflow "%s" created from current history. ' 'You can <a href="%s" target="_parent">edit</a> or <a href="%s" target="_parent">run</a> the workflow.' % (escape(workflow_name), url_for(controller='workflow', action='editor', id=workflow_id), url_for(controller='workflows', action='run', id=workflow_id)))
def initiate_installation_process( self, install_tool_dependencies=False, install_repository_dependencies=True, no_changes=True, new_tool_panel_section_label=None ): html = self.last_page() # Since the installation process is by necessity asynchronous, we have to get the parameters to 'manually' initiate the # installation process. This regex will return the tool shed repository IDs in group(1), the encoded_kwd parameter in # group(2), and the reinstalling flag in group(3) and pass them to the manage_repositories method in the Galaxy # admin_toolshed controller. install_parameters = re.search( 'initiate_repository_installation\( "([^"]+)", "([^"]+)", "([^"]+)" \);', html ) if install_parameters: iri_ids = install_parameters.group(1) # In some cases, the returned iri_ids are of the form: "[u'<encoded id>', u'<encoded id>']" # This regex ensures that non-hex characters are stripped out of the list, so that util.listify/decode_id # will handle them correctly. It's safe to pass the cleaned list to manage_repositories, because it can parse # comma-separated values. repository_ids = str( iri_ids ) repository_ids = re.sub( '[^a-fA-F0-9,]+', '', repository_ids ) encoded_kwd = install_parameters.group(2) reinstalling = install_parameters.group(3) url = '/admin_toolshed/manage_repositories?operation=install&tool_shed_repository_ids=%s&encoded_kwd=%s&reinstalling=%s' % \ ( ','.join( util.listify( repository_ids ) ), encoded_kwd, reinstalling ) self.visit_url( url ) return util.listify( repository_ids )
def _set_permissions(self, trans, library_dataset, role_ids_dict): dataset = library_dataset.library_dataset_dataset_association.dataset new_access_roles_ids = role_ids_dict["DATASET_ACCESS"] new_manage_roles_ids = role_ids_dict["DATASET_MANAGE_PERMISSIONS"] new_modify_roles_ids = role_ids_dict["LIBRARY_MODIFY"] # ACCESS DATASET ROLES valid_access_roles = [] invalid_access_roles_ids = [] valid_roles_for_dataset, total_roles = trans.app.security_agent.get_valid_roles(trans, dataset) if new_access_roles_ids is None: trans.app.security_agent.make_dataset_public(dataset) else: for role_id in new_access_roles_ids: role = self.role_manager.get(trans, self.app, role_id) if role in valid_roles_for_dataset: valid_access_roles.append(role) else: invalid_access_roles_ids.append(role_id) if len(invalid_access_roles_ids) > 0: log.warning("The following roles could not be added to the dataset access permission: " + str(invalid_access_roles_ids)) access_permission = dict(access=valid_access_roles) trans.app.security_agent.set_dataset_permission(dataset, access_permission) # MANAGE DATASET ROLES valid_manage_roles = [] invalid_manage_roles_ids = [] new_manage_roles_ids = util.listify(new_manage_roles_ids) for role_id in new_manage_roles_ids: role = self.role_manager.get(trans, self.app, role_id) if role in valid_roles_for_dataset: valid_manage_roles.append(role) else: invalid_manage_roles_ids.append(role_id) if len(invalid_manage_roles_ids) > 0: log.warning("The following roles could not be added to the dataset manage permission: " + str(invalid_manage_roles_ids)) manage_permission = {trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS: valid_manage_roles} trans.app.security_agent.set_dataset_permission(dataset, manage_permission) # MODIFY LIBRARY ITEM ROLES valid_modify_roles = [] invalid_modify_roles_ids = [] new_modify_roles_ids = util.listify(new_modify_roles_ids) for role_id in new_modify_roles_ids: role = self.role_manager.get(trans, self.app, role_id) if role in valid_roles_for_dataset: valid_modify_roles.append(role) else: invalid_modify_roles_ids.append(role_id) if len(invalid_modify_roles_ids) > 0: log.warning("The following roles could not be added to the dataset modify permission: " + str(invalid_modify_roles_ids)) modify_permission = {trans.app.security_agent.permitted_actions.LIBRARY_MODIFY: valid_modify_roles} trans.app.security_agent.set_library_item_permission(library_dataset, modify_permission)
def create_role(self, trans, **kwd): params = util.Params(kwd) message = util.restore_text(params.get('message', '')) status = params.get('status', 'done') name = util.restore_text(params.get('name', '')) description = util.restore_text(params.get('description', '')) in_users = util.listify(params.get('in_users', [])) out_users = util.listify(params.get('out_users', [])) in_groups = util.listify(params.get('in_groups', [])) out_groups = util.listify(params.get('out_groups', [])) create_group_for_role = params.get('create_group_for_role', '') create_group_for_role_checked = CheckboxField.is_checked(create_group_for_role) ok = True if params.get('create_role_button', False): if not name or not description: message = "Enter a valid name and a description." status = 'error' ok = False elif trans.sa_session.query(trans.app.model.Role).filter(trans.app.model.Role.table.c.name == name).first(): message = "Role names must be unique and a role with that name already exists, so choose another name." status = 'error' ok = False else: # Create the role role, num_in_groups = trans.app.security_agent.create_role( name, description, in_users, in_groups, create_group_for_role=create_group_for_role_checked) message = "Role '%s' has been created with %d associated users and %d associated groups. " \ % (role.name, len(in_users), num_in_groups) if create_group_for_role_checked: message += 'One of the groups associated with this role is the newly created group with the same name.' trans.response.send_redirect(web.url_for(controller='admin', action='roles', message=util.sanitize_text(message), status='done')) if ok: for user in trans.sa_session.query(trans.app.model.User) \ .filter(trans.app.model.User.table.c.deleted == false()) \ .order_by(trans.app.model.User.table.c.email): out_users.append((user.id, user.email)) for group in trans.sa_session.query(trans.app.model.Group) \ .filter(trans.app.model.Group.table.c.deleted == false()) \ .order_by(trans.app.model.Group.table.c.name): out_groups.append((group.id, group.name)) return trans.fill_template('/webapps/tool_shed/admin/dataset_security/role/role_create.mako', name=name, description=description, in_users=in_users, out_users=out_users, in_groups=in_groups, out_groups=out_groups, create_group_for_role_checked=create_group_for_role_checked, message=message, status=status)
def handle_role_associations( app, role, repository, **kwd ): sa_session = app.model.context.current message = escape( kwd.get( 'message', '' ) ) status = kwd.get( 'status', 'done' ) repository_owner = repository.user if kwd.get( 'manage_role_associations_button', False ): in_users_list = util.listify( kwd.get( 'in_users', [] ) ) in_users = [ sa_session.query( app.model.User ).get( x ) for x in in_users_list ] # Make sure the repository owner is always associated with the repostory's admin role. owner_associated = False for user in in_users: if user.id == repository_owner.id: owner_associated = True break if not owner_associated: in_users.append( repository_owner ) message += "The repository owner must always be associated with the repository's administrator role. " status = 'error' in_groups_list = util.listify( kwd.get( 'in_groups', [] ) ) in_groups = [ sa_session.query( app.model.Group ).get( x ) for x in in_groups_list ] in_repositories = [ repository ] app.security_agent.set_entity_role_associations( roles=[ role ], users=in_users, groups=in_groups, repositories=in_repositories ) sa_session.refresh( role ) message += "Role <b>%s</b> has been associated with %d users, %d groups and %d repositories. " % \ ( str( role.name ), len( in_users ), len( in_groups ), len( in_repositories ) ) in_users = [] out_users = [] in_groups = [] out_groups = [] for user in sa_session.query( app.model.User ) \ .filter( app.model.User.table.c.deleted==False ) \ .order_by( app.model.User.table.c.email ): if user in [ x.user for x in role.users ]: in_users.append( ( user.id, user.email ) ) else: out_users.append( ( user.id, user.email ) ) for group in sa_session.query( app.model.Group ) \ .filter( app.model.Group.table.c.deleted==False ) \ .order_by( app.model.Group.table.c.name ): if group in [ x.group for x in role.groups ]: in_groups.append( ( group.id, group.name ) ) else: out_groups.append( ( group.id, group.name ) ) associations_dict = dict( in_users=in_users, out_users=out_users, in_groups=in_groups, out_groups=out_groups, message=message, status=status ) return associations_dict
def get_quota_params(self, kwargs): params = self.get_params(kwargs) updates = dict(name=util.restore_text(params.get('name', '')), description=util.restore_text(params.get('description', '')), amount=util.restore_text(params.get('amount', '').strip()), operation=params.get('operation', ''), default=params.get('default', ''), in_users=util.listify(params.get('in_users', [])), out_users=util.listify(params.get('out_users', [])), in_groups=util.listify(params.get('in_groups', [])), out_groups=util.listify(params.get('out_groups', []))) params.update(updates) return params
def undelete_repository( self, trans, **kwd ): params = util.Params( kwd ) message = util.restore_text( params.get( 'message', '' ) ) status = params.get( 'status', 'done' ) id = kwd.get( 'id', None ) if id: # Undeleting multiple items is currently not allowed (allow_multiple=False), so there will only be 1 id. ids = util.listify( id ) count = 0 undeleted_repositories = "" for repository_id in ids: repository = suc.get_repository_in_tool_shed( trans, repository_id ) if repository.deleted: repository.deleted = False trans.sa_session.add( repository ) trans.sa_session.flush() count += 1 undeleted_repositories += " %s" % repository.name if count: message = "Undeleted %d %s: %s" % ( count, inflector.cond_plural( count, "repository" ), undeleted_repositories ) else: message = "No selected repositories were marked deleted, so they could not be undeleted." else: message = "No repository ids received for undeleting." status = 'error' trans.response.send_redirect( web.url_for( controller='admin', action='browse_repositories', message=util.sanitize_text( message ), status='done' ) )
def undelete_category( self, trans, **kwd ): message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) id = kwd.get( 'id', None ) if id: ids = util.listify( id ) count = 0 undeleted_categories = "" for category_id in ids: category = suc.get_category( trans.app, category_id ) if category.deleted: category.deleted = False trans.sa_session.add( category ) trans.sa_session.flush() # Update the Tool Shed's repository registry. trans.app.repository_registry.add_category_entry( category ) count += 1 undeleted_categories += " %s" % category.name message = "Undeleted %d categories: %s" % ( count, undeleted_categories ) else: message = "No category ids received for undeleting." status = 'error' trans.response.send_redirect( web.url_for( controller='admin', action='manage_categories', message=util.sanitize_text( message ), status='done' ) )
def mark_category_deleted( self, trans, **kwd ): # TODO: We should probably eliminate the Category.deleted column since it really makes no # sense to mark a category as deleted (category names and descriptions can be changed instead). # If we do this, and the following 2 methods can be eliminated. message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) id = kwd.get( 'id', None ) if id: ids = util.listify( id ) message = "Deleted %d categories: " % len( ids ) for category_id in ids: category = suc.get_category( trans.app, category_id ) category.deleted = True trans.sa_session.add( category ) trans.sa_session.flush() # Update the Tool Shed's repository registry. trans.app.repository_registry.remove_category_entry( category ) message += " %s " % category.name else: message = "No category ids received for deleting." status = 'error' trans.response.send_redirect( web.url_for( controller='admin', action='manage_categories', message=util.sanitize_text( message ), status='done' ) )
def invoke( self ): workflow_invocation = model.WorkflowInvocation() workflow_invocation.workflow = self.workflow # Web controller will populate state on each step before calling # invoke but not API controller. More work should be done to further # harmonize these methods going forward if possible - if possible # moving more web controller logic here. state_populated = not self.workflow.steps or hasattr( self.workflow.steps[ 0 ], "state" ) if not state_populated: self._populate_state( ) for step in self.workflow.steps: jobs = self._invoke_step( step ) for job in util.listify( jobs ): # Record invocation workflow_invocation_step = model.WorkflowInvocationStep() workflow_invocation_step.workflow_invocation = workflow_invocation workflow_invocation_step.workflow_step = step workflow_invocation_step.job = job # All jobs ran successfully, so we can save now self.trans.sa_session.add( workflow_invocation ) # Not flushing in here, because web controller may create multiple # invokations. return self.outputs
def list_shared( self, trans, **kwargs ): """List histories shared with current user by others""" msg = util.restore_text( kwargs.get( 'msg', '' ) ) status = message = None if 'operation' in kwargs: ids = util.listify( kwargs.get( 'id', [] ) ) operation = kwargs['operation'].lower() if operation == "clone": if not ids: message = "Select a history to clone" return self.shared_list_grid( trans, status='error', message=message, **kwargs ) # When cloning shared histories, only copy active datasets new_kwargs = { 'clone_choice' : 'active' } id = kwargs.get( 'id', [] ) return self.clone( trans, id, **new_kwargs ) elif operation == 'unshare': if not ids: message = "Select a history to unshare" return self.shared_list_grid( trans, status='error', message=message, **kwargs ) histories = [ get_history( trans, history_id ) for history_id in ids ] for history in histories: # Current user is the user with which the histories were shared association = trans.app.model.HistoryUserShareAssociation.filter_by( user=trans.user, history=history ).one() association.delete() association.flush() message = "Unshared %d shared histories" % len( ids ) status = 'done' # Render the list view return self.shared_list_grid( trans, status=status, message=message, **kwargs )
def request_type_permissions(self, trans, **kwd): params = util.Params(kwd) message = util.restore_text(params.get('message', '')) status = params.get('status', 'done') request_type_id = kwd.get('id', '') try: request_type = trans.sa_session.query(trans.model.RequestType).get(trans.security.decode_id(request_type_id)) except: return invalid_id_redirect(trans, 'request_type', request_type_id, 'request type', action='browse_request_types') roles = trans.sa_session.query(trans.model.Role) \ .filter(trans.model.Role.table.c.deleted == false()) \ .order_by(trans.model.Role.table.c.name) if params.get('update_roles_button', False): permissions = {} for k, v in trans.model.RequestType.permitted_actions.items(): in_roles = [trans.sa_session.query(trans.model.Role).get(x) for x in util.listify(params.get(k + '_in', []))] permissions[trans.app.security_agent.get_action(v.action)] = in_roles trans.app.security_agent.set_request_type_permissions(request_type, permissions) trans.sa_session.refresh(request_type) message = "Permissions updated for request type '%s'" % request_type.name return trans.fill_template('/admin/request_type/request_type_permissions.mako', request_type=request_type, roles=roles, status=status, message=message)
def list( self, trans, *args, **kwargs ): """ List user's pages. """ # Handle operation if 'operation' in kwargs and 'id' in kwargs: session = trans.sa_session operation = kwargs['operation'].lower() ids = util.listify( kwargs['id'] ) for id in ids: item = session.query( model.Page ).get( self.decode_id( id ) ) if operation == "delete": item.deleted = True if operation == "share or publish": return self.sharing( trans, **kwargs ) session.flush() # HACK: to prevent the insertion of an entire html document inside another kwargs[ 'embedded' ] = True # Build grid HTML. grid = self._page_list( trans, *args, **kwargs ) # Build list of pages shared with user. shared_by_others = trans.sa_session \ .query( model.PageUserShareAssociation ) \ .filter_by( user=trans.get_user() ) \ .join( model.Page.table ) \ .filter( model.Page.deleted == false() ) \ .order_by( desc( model.Page.update_time ) ) \ .all() # Render grid wrapped in panels return trans.fill_template( "page/index.mako", embedded_grid=grid, shared_by_others=shared_by_others )
def purge_category( self, trans, **kwd ): # This method should only be called for a Category that has previously been deleted. # Purging a deleted Category deletes all of the following from the database: # - RepoitoryCategoryAssociations where category_id == Category.id message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) id = kwd.get( 'id', None ) if id: ids = util.listify( id ) count = 0 purged_categories = "" message = "Purged %d categories: " % len( ids ) for category_id in ids: category = suc.get_category( trans.app, category_id ) if category.deleted: # Delete RepositoryCategoryAssociations for rca in category.repositories: trans.sa_session.delete( rca ) trans.sa_session.flush() purged_categories += " %s " % category.name message = "Purged %d categories: %s" % ( count, purged_categories ) else: message = "No category ids received for purging." status = 'error' trans.response.send_redirect( web.url_for( controller='admin', action='manage_categories', message=util.sanitize_text( message ), status='done' ) )
def tool_search( self, trans, **kwd ): """Searches the tool database and returns data for any tool whose text matches the query. Data are returned in JSON format. """ query = kwd.get( 'query', '' ) tags = listify( kwd.get( 'tags[]', [] ) ) trans.log_action( trans.get_user(), "tool_search.search", "", { "query" : query, "tags" : tags } ) results = [] if tags: tags = trans.sa_session.query( trans.app.model.Tag ).filter( trans.app.model.Tag.name.in_( tags ) ).all() for tagged_tool_il in [ tag.tagged_tools for tag in tags ]: for tagged_tool in tagged_tool_il: if tagged_tool.tool_id not in results: results.append( tagged_tool.tool_id ) if trans.user: trans.user.preferences['selected_tool_tags'] = ','.join( [ tag.name for tag in tags ] ) trans.sa_session.flush() elif trans.user: trans.user.preferences['selected_tool_tags'] = '' trans.sa_session.flush() if len( query ) > 2: search_results = trans.app.toolbox_search.search( query ) if 'tags[]' in kwd: results = filter( lambda x: x in results, search_results ) else: results = search_results return results
def run( self, transfer_jobs ): """ This method blocks, so if invoking the transfer manager ever starts taking too long, we should move it to a thread. However, the transfer_manager will either daemonize or return after submitting to a running daemon, so it should be fairly quick to return. """ transfer_jobs = listify( transfer_jobs ) printable_tj_ids = ', '.join( [ str( tj.id ) for tj in transfer_jobs ] ) log.debug( 'Initiating transfer job(s): %s' % printable_tj_ids ) # Set all jobs running before spawning, or else updating the state may # clobber a state change performed by the worker. [ tj.__setattr__( 'state', tj.states.RUNNING ) for tj in transfer_jobs ] self.sa_session.add_all( transfer_jobs ) self.sa_session.flush() for tj in transfer_jobs: # The transfer script should daemonize fairly quickly - if this is # not the case, this process will need to be moved to a # non-blocking method. cmd = '%s %s' % ( self.command, tj.id ) log.debug( 'Transfer command is: %s' % cmd ) p = subprocess.Popen( cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT ) p.wait() output = p.stdout.read( 32768 ) if p.returncode != 0: log.error( 'Spawning transfer job failed: %s: %s' % ( tj.id, output ) ) tj.state = tj.states.ERROR tj.info = 'Spawning transfer job failed: %s' % output.splitlines()[-1] self.sa_session.add( tj ) self.sa_session.flush()
def parse_directories_setting( self, galaxy_root, directories_setting ): """ Parse the ``directories_setting`` into a list of relative or absolute filesystem paths that will be searched to discover plugins. :type galaxy_root: string :param galaxy_root: the root path of this galaxy installation :type directories_setting: string (default: None) :param directories_setting: the filesystem path (or paths) to search for plugins. Can be CSV string of paths. Will be treated as absolute if a path starts with '/', relative otherwise. :rtype: list of strings :returns: list of filesystem paths """ directories = [] if not directories_setting: return directories for directory in util.listify( directories_setting ): directory = directory.strip() if not directory.startswith( '/' ): directory = os.path.join( galaxy_root, directory ) if not os.path.exists( directory ): log.warn( '%s, directory not found: %s', self, directory ) continue directories.append( directory ) return directories
def list( self, trans, *args, **kwargs ): # Handle operation if 'operation' in kwargs and 'id' in kwargs: session = trans.sa_session operation = kwargs['operation'].lower() ids = util.listify( kwargs['id'] ) for id in ids: item = session.query( model.Visualization ).get( trans.security.decode_id( id ) ) if operation == "delete": item.deleted = True if operation == "share or publish": return self.sharing( trans, **kwargs ) if operation == "copy": self.copy( trans, **kwargs ) session.flush() # Build list of visualizations shared with user. shared_by_others = trans.sa_session \ .query( model.VisualizationUserShareAssociation ) \ .filter_by( user=trans.get_user() ) \ .join( model.Visualization.table ) \ .filter( model.Visualization.deleted == False ) \ .order_by( desc( model.Visualization.update_time ) ) \ .all() kwargs[ 'embedded' ] = True grid = self._user_list_grid( trans, *args, **kwargs ) return trans.fill_template( "visualization/list.mako", embedded_grid=grid, shared_by_others=shared_by_others )
def delete_repository( self, trans, **kwd ): message = kwd.get( 'message', '' ) status = kwd.get( 'status', 'done' ) id = kwd.get( 'id', None ) if id: # Deleting multiple items is currently not allowed (allow_multiple=False), so there will only be 1 id. ids = util.listify( id ) count = 0 deleted_repositories = "" for repository_id in ids: repository = suc.get_repository_in_tool_shed( trans, repository_id ) if repository: if not repository.deleted: # Mark all installable repository_metadata records as not installable. for repository_metadata in repository.downloadable_revisions: repository_metadata.downloadable = False trans.sa_session.add( repository_metadata ) repository.deleted = True trans.sa_session.add( repository ) trans.sa_session.flush() count += 1 deleted_repositories += " %s " % repository.name if count: message = "Deleted %d %s: %s" % ( count, inflector.cond_plural( len( ids ), "repository" ), deleted_repositories ) else: message = "All selected repositories were already marked deleted." else: message = "No repository ids received for deleting." status = 'error' trans.response.send_redirect( web.url_for( controller='admin', action='browse_repositories', message=util.sanitize_text( message ), status=status ) )
def handle_library_params(trans, params, folder_id, replace_dataset=None): # FIXME: the received params has already been parsed by util.Params() by the time it reaches here, # so no complex objects remain. This is not good because it does not allow for those objects to be # manipulated here. The received params should be the original kwd from the initial request. library_bunch = util.bunch.Bunch() library_bunch.replace_dataset = replace_dataset library_bunch.message = params.get('ldda_message', '') # See if we have any template field contents library_bunch.template_field_contents = {} template_id = params.get('template_id', None) library_bunch.folder = trans.sa_session.query(trans.app.model.LibraryFolder).get(trans.security.decode_id(folder_id)) # We are inheriting the folder's info_association, so we may have received inherited contents or we may have redirected # here after the user entered template contents ( due to errors ). if template_id not in [None, 'None']: library_bunch.template = trans.sa_session.query(trans.app.model.FormDefinition).get(template_id) for field in library_bunch.template.fields: field_name = field['name'] if params.get(field_name, False): field_value = util.restore_text(params.get(field_name, '')) library_bunch.template_field_contents[field_name] = field_value else: library_bunch.template = None library_bunch.roles = [] for role_id in util.listify(params.get('roles', [])): role = trans.sa_session.query(trans.app.model.Role).get(role_id) library_bunch.roles.append(role) return library_bunch
def handle_library_params( trans, params, folder_id, replace_dataset=None ): library_bunch = util.bunch.Bunch() library_bunch.replace_dataset = replace_dataset library_bunch.message = params.get( 'message', '' ) # See if we have any template field contents library_bunch.template_field_contents = [] template_id = params.get( 'template_id', None ) library_bunch.folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( folder_id ) # We are inheriting the folder's info_association, so we did not # receive any inherited contents, but we may have redirected here # after the user entered template contents ( due to errors ). if template_id not in [ None, 'None' ]: library_bunch.template = trans.sa_session.query( trans.app.model.FormDefinition ).get( template_id ) for field_index in range( len( library_bunch.template.fields ) ): field_name = 'field_%i' % field_index if params.get( field_name, False ): field_value = util.restore_text( params.get( field_name, '' ) ) library_bunch.template_field_contents.append( field_value ) else: library_bunch.template = None library_bunch.roles = [] for role_id in util.listify( params.get( 'roles', [] ) ): role = trans.sa_session.query( trans.app.model.Role ).get( role_id ) library_bunch.roles.append( role ) return library_bunch
def build_initial_query(self, trans, **kwd): clause_list = [] tool_shed_repository_ids = kwd.get('tool_shed_repository_ids', None) if tool_shed_repository_ids: if isinstance(tool_shed_repository_ids, string_types): try: # kwd['tool_shed_repository_ids'] may be a json dump of repo ids like u'['aebaa141e7243ebf']' tool_shed_repository_ids = json.loads(tool_shed_repository_ids) except ValueError: pass tool_shed_repository_ids = util.listify(tool_shed_repository_ids) for tool_shed_repository_id in tool_shed_repository_ids: clause_list.append(self.model_class.table.c.id == trans.security.decode_id(tool_shed_repository_id)) if clause_list: return trans.install_model.context.query(self.model_class) \ .filter(or_(*clause_list)) for tool_shed_repository in trans.install_model.context.query(self.model_class) \ .filter(self.model_class.table.c.deleted == false()): if tool_shed_repository.status in [trans.install_model.ToolShedRepository.installation_status.NEW, trans.install_model.ToolShedRepository.installation_status.CLONING, trans.install_model.ToolShedRepository.installation_status.SETTING_TOOL_VERSIONS, trans.install_model.ToolShedRepository.installation_status.INSTALLING_TOOL_DEPENDENCIES, trans.install_model.ToolShedRepository.installation_status.LOADING_PROPRIETARY_DATATYPES]: clause_list.append(self.model_class.table.c.id == tool_shed_repository.id) if clause_list: return trans.install_model.context.query(self.model_class) \ .filter(or_(*clause_list)) return trans.install_model.context.query(self.model_class) \ .filter(self.model_class.table.c.status == trans.install_model.ToolShedRepository.installation_status.NEW)
def undelete_category(self, trans, **kwd): message = escape(kwd.get("message", "")) id = kwd.get("id", None) if id: ids = util.listify(id) count = 0 undeleted_categories = "" for category_id in ids: category = suc.get_category(trans.app, category_id) if category.deleted: category.deleted = False trans.sa_session.add(category) trans.sa_session.flush() # Update the Tool Shed's repository registry. trans.app.repository_registry.add_category_entry(category) count += 1 undeleted_categories += " %s" % category.name message = "Undeleted %d categories: %s" % (count, escape(undeleted_categories)) else: message = "No category ids received for undeleting." trans.response.send_redirect( web.url_for( controller="admin", action="manage_categories", message=util.sanitize_text(message), status="done" ) )
def __init__(self, step_proxy, cwl_input): self._cwl_input = cwl_input self.step_proxy = step_proxy self.workflow_proxy = step_proxy._workflow_proxy cwl_input_id = cwl_input["id"] cwl_source_id = cwl_input.get("source", None) if cwl_source_id is None: if "valueFrom" not in cwl_input and "default" not in cwl_input: msg = "Workflow step input must define a source, a valueFrom, or a default value. Obtained [%s]." % cwl_input raise MessageException(msg) assert cwl_input_id step_name, input_name = split_step_references( cwl_input_id, multiple=False, workflow_id=step_proxy.cwl_workflow_id ) self.step_name = step_name self.input_name = input_name self.cwl_input_id = cwl_input_id self.cwl_source_id = cwl_source_id scatter_inputs = [split_step_references( i, multiple=False, workflow_id=step_proxy.cwl_workflow_id )[1] for i in listify(step_proxy._step.tool.get("scatter", []))] scatter = self.input_name in scatter_inputs self.scatter = scatter
def delete_repository_metadata(self, trans, **kwd): message = escape(kwd.get("message", "")) status = kwd.get("status", "done") id = kwd.get("id", None) if id: ids = util.listify(id) count = 0 for repository_metadata_id in ids: repository_metadata = metadata_util.get_repository_metadata_by_id(trans.app, repository_metadata_id) trans.sa_session.delete(repository_metadata) trans.sa_session.flush() count += 1 if count: message = "Deleted %d repository metadata %s" % (count, inflector.cond_plural(len(ids), "record")) else: message = "No repository metadata ids received for deleting." status = "error" trans.response.send_redirect( web.url_for( controller="admin", action="browse_repository_metadata", message=util.sanitize_text(message), status=status, ) )
def undelete_user(self, trans, **kwd): id = kwd.get('id', None) if not id: message = "No user ids received for undeleting" trans.response.send_redirect(web.url_for(controller='admin', action='users', message=message, status='error')) ids = util.listify(id) count = 0 undeleted_users = "" for user_id in ids: user = get_user(trans, user_id) if not user.deleted: message = "User '%s' has not been deleted, so it cannot be undeleted." % user.email trans.response.send_redirect(web.url_for(controller='admin', action='users', message=util.sanitize_text(message), status='error')) user.deleted = False trans.sa_session.add(user) trans.sa_session.flush() count += 1 undeleted_users += " %s" % user.email message = "Undeleted %d users: %s" % (count, undeleted_users) trans.response.send_redirect(web.url_for(controller='admin', action='users', message=util.sanitize_text(message), status='done'))
def _purge_quota( self, quota, params ): """ This method should only be called for a Quota that has previously been deleted. Purging a deleted Quota deletes all of the following from the database: - UserQuotaAssociations where quota_id == Quota.id - GroupQuotaAssociations where quota_id == Quota.id """ quotas = util.listify( quota ) names = [] for q in quotas: if not q.deleted: names.append( q.name ) if len( names ) == 1: raise ActionInputError( "Quota '%s' has not been deleted, so it cannot be purged." % ( names[0] ) ) elif len( names ) > 1: raise ActionInputError( "Quotas have not been deleted so they cannot be undeleted: " + ', '.join( names ) ) message = "Purged %d quotas: " % len( quotas ) for q in quotas: # Delete UserQuotaAssociations for uqa in q.users: self.sa_session.delete( uqa ) # Delete GroupQuotaAssociations for gqa in q.groups: self.sa_session.delete( gqa ) names.append( q.name ) self.sa_session.flush() message += ', '.join( names ) return message
def invoke( self ): workflow_invocation = self.workflow_invocation remaining_steps = self.progress.remaining_steps() delayed_steps = False for step in remaining_steps: step_timer = ExecutionTimer() jobs = None try: self.__check_implicitly_dependent_steps(step) jobs = self._invoke_step( step ) for job in (util.listify( jobs ) or [None]): # Record invocation workflow_invocation_step = model.WorkflowInvocationStep() workflow_invocation_step.workflow_invocation = workflow_invocation workflow_invocation_step.workflow_step = step workflow_invocation_step.job = job except modules.DelayedWorkflowEvaluation: delayed_steps = True self.progress.mark_step_outputs_delayed( step ) log.debug("Workflow step %s invoked %s" % (step.id, step_timer)) if delayed_steps: state = model.WorkflowInvocation.states.READY else: state = model.WorkflowInvocation.states.SCHEDULED workflow_invocation.state = state # All jobs ran successfully, so we can save now self.trans.sa_session.add( workflow_invocation ) # Not flushing in here, because web controller may create multiple # invocations. return self.progress.outputs
def create_quota(self, payload: dict, decode_id=None) -> Tuple[model.Quota, str]: params = CreateQuotaParams.parse_obj(payload) create_amount = self._parse_amount(params.amount) if self.sa_session.query(model.Quota).filter(model.Quota.name == params.name).first(): raise ActionInputError("Quota names must be unique and a quota with that name already exists, please choose another name.") elif create_amount is False: raise ActionInputError("Unable to parse the provided amount.") elif params.operation not in model.Quota.valid_operations: raise ActionInputError("Enter a valid operation.") elif params.default != DefaultQuotaValues.NO and params.operation != QuotaOperation.EXACT: raise ActionInputError("Operation for a default quota must be '='.") elif create_amount is None and params.operation != QuotaOperation.EXACT: raise ActionInputError("Operation for an unlimited quota must be '='.") # Create the quota quota = model.Quota(name=params.name, description=params.description, amount=create_amount, operation=params.operation) self.sa_session.add(quota) # If this is a default quota, create the DefaultQuotaAssociation if params.default != DefaultQuotaValues.NO: self.quota_agent.set_default_quota(params.default, quota) message = f"Default quota '{quota.name}' has been created." else: # Create the UserQuotaAssociations in_users = [self.sa_session.query(model.User).get(decode_id(x) if decode_id else x) for x in util.listify(params.in_users)] in_groups = [self.sa_session.query(model.Group).get(decode_id(x) if decode_id else x) for x in util.listify(params.in_groups)] if None in in_users: raise ActionInputError("One or more invalid user id has been provided.") for user in in_users: uqa = model.UserQuotaAssociation(user, quota) self.sa_session.add(uqa) # Create the GroupQuotaAssociations if None in in_groups: raise ActionInputError("One or more invalid group id has been provided.") for group in in_groups: gqa = model.GroupQuotaAssociation(group, quota) self.sa_session.add(gqa) message = f"Quota '{quota.name}' has been created with {len(in_users)} associated users and {len(in_groups)} associated groups." self.sa_session.flush() return quota, message
def index(self, trans, history_id, ids=None, v=None, **kwd): """ index( self, trans, history_id, ids=None, **kwd ) * GET /api/histories/{history_id}/contents return a list of HDA data for the history with the given ``id`` .. note:: Anonymous users are allowed to get their current history contents If Ids is not given, index returns a list of *summary* objects for every HDA associated with the given `history_id`. If ids is given, index returns a *more complete* json object for each HDA in the ids list. :type history_id: str :param history_id: encoded id string of the HDA's History :type ids: str :param ids: (optional) a comma separated list of encoded `HDA` ids :param types: (optional) kinds of contents to index (currently just dataset, but dataset_collection will be added shortly). :type types: str :rtype: list :returns: dictionaries containing summary or detailed HDA information """ if v == 'dev': return self.__index_v2(trans, history_id, **kwd) rval = [] history = self.history_manager.get_accessible( self.decode_id(history_id), trans.user, current_history=trans.history) # Allow passing in type or types - for continuity rest of methods # take in type - but this one can be passed multiple types and # type=dataset,dataset_collection is a bit silly. types = kwd.get('type', kwd.get('types', None)) or [] if types: types = util.listify(types) else: types = ['dataset', "dataset_collection"] contents_kwds = {'types': types} if ids: ids = map(lambda id: self.decode_id(id), ids.split(',')) contents_kwds['ids'] = ids # If explicit ids given, always used detailed result. details = 'all' else: contents_kwds['deleted'] = kwd.get('deleted', None) contents_kwds['visible'] = kwd.get('visible', None) # details param allows a mixed set of summary and detailed hdas # Ever more convoluted due to backwards compat..., details # should be considered deprecated in favor of more specific # dataset_details (and to be implemented dataset_collection_details). details = kwd.get('details', None) or kwd.get( 'dataset_details', None) or [] if details and details != 'all': details = util.listify(details) for content in history.contents_iter(**contents_kwds): encoded_content_id = trans.security.encode_id(content.id) detailed = details == 'all' or (encoded_content_id in details) if isinstance(content, trans.app.model.HistoryDatasetAssociation): view = 'detailed' if detailed else 'summary' hda_dict = self.hda_serializer.serialize_to_view( content, view=view, user=trans.user, trans=trans) rval.append(hda_dict) elif isinstance( content, trans.app.model.HistoryDatasetCollectionAssociation): view = 'element' if detailed else 'collection' collection_dict = self.__collection_dict(trans, content, view=view) rval.append(collection_dict) return rval
def __index_v2(self, trans, history_id, **kwd): """ index( self, trans, history_id, **kwd ) * GET /api/histories/{history_id}/contents return a list of HDA data for the history with the given ``id`` .. note:: Anonymous users are allowed to get their current history contents If ids is given, index returns a *more complete* json object for each HDA in the ids list. :type history_id: str :param history_id: encoded id string of the HDA's History :rtype: list :returns: dictionaries containing summary or detailed HDA information The following are optional parameters: view: string, one of ('summary','detailed'), defaults to 'summary' controls which set of properties to return keys: comma separated strings, unused by default keys/names of individual properties to return If neither keys or views are sent, the default view (set of keys) is returned. If both a view and keys are sent, the key list and the view's keys are combined. If keys are sent and no view, only those properties in keys are returned. For which properties are available see: galaxy/managers/hdas/HDASerializer and: galaxy/managers/collection_util The list returned can be filtered by using two optional parameters: q: string, generally a property name to filter by followed by an (often optional) hyphen and operator string. qv: string, the value to filter by ..example: To filter the list to only those created after 2015-01-29, the query string would look like: '?q=create_time-gt&qv=2015-01-29' Multiple filters can be sent in using multiple q/qv pairs: '?q=create_time-gt&qv=2015-01-29&q=name-contains&qv=experiment-1' The list returned can be paginated using two optional parameters: limit: integer, defaults to no value and no limit (return all) how many items to return offset: integer, defaults to 0 and starts at the beginning skip the first ( offset - 1 ) items and begin returning at the Nth item ..example: limit and offset can be combined. Skip the first two and return five: '?limit=5&offset=3' The list returned can be ordered using the optional parameter: order: string containing one of the valid ordering attributes followed (optionally) by '-asc' or '-dsc' for ascending and descending order respectively. Orders can be stacked as a comma- separated list of values. ..example: To sort by name descending then create time descending: '?order=name-dsc,create_time' The ordering attributes and their default orders are: hid defaults to 'hid-asc' create_time defaults to 'create_time-dsc' update_time defaults to 'update_time-dsc' name defaults to 'name-asc' 'order' defaults to 'hid-asc' """ rval = [] history = self.history_manager.get_accessible( self.decode_id(history_id), trans.user, current_history=trans.history) filter_params = self.parse_filter_params(kwd) filters = self.history_contents_filters.parse_filters(filter_params) limit, offset = self.parse_limit_offset(kwd) order_by = self._parse_order_by(kwd.get('order', 'hid-asc')) serialization_params = self._parse_serialization_params(kwd, 'summary') # TODO: > 16.04: remove these # TODO: remove 'dataset_details' and the following section when the UI doesn't need it # details param allows a mixed set of summary and detailed hdas # Ever more convoluted due to backwards compat..., details # should be considered deprecated in favor of more specific # dataset_details (and to be implemented dataset_collection_details). details = kwd.get('details', []) if details and details != 'all': details = util.listify(details) view = serialization_params.pop('view') contents = self.history_contents_manager.contents(history, filters=filters, limit=limit, offset=offset, order_by=order_by) for content in contents: # TODO: remove split if isinstance(content, trans.app.model.HistoryDatasetAssociation): # TODO: remove split if details == 'all' or trans.security.encode_id( content.id) in details: rval.append( self.hda_serializer.serialize_to_view( content, user=trans.user, trans=trans, view='detailed', **serialization_params)) else: rval.append( self.hda_serializer.serialize_to_view( content, user=trans.user, trans=trans, view=view, **serialization_params)) elif isinstance( content, trans.app.model.HistoryDatasetCollectionAssociation): collection = self.hdca_serializer.serialize_to_view( content, user=trans.user, trans=trans, view=view, **serialization_params) rval.append(collection) return rval
def set_permissions(self, trans, encoded_folder_id, **kwd): """ def set_permissions( self, trans, encoded_folder_id, **kwd ): *POST /api/folders/{encoded_folder_id}/permissions :param encoded_folder_id: the encoded id of the folder to set the permissions of :type encoded_folder_id: an encoded id string :param action: (required) describes what action should be performed available actions: set_permissions :type action: string :param add_ids[]: list of Role.id defining roles that should have add item permission on the folder :type add_ids[]: string or list :param manage_ids[]: list of Role.id defining roles that should have manage permission on the folder :type manage_ids[]: string or list :param modify_ids[]: list of Role.id defining roles that should have modify permission on the folder :type modify_ids[]: string or list :rtype: dictionary :returns: dict of current roles for all available permission types. :raises: RequestParameterInvalidException, ObjectNotFound, InsufficientPermissionsException, InternalServerError RequestParameterMissingException """ is_admin = trans.user_is_admin() current_user_roles = trans.get_current_user_roles() decoded_folder_id = self.folder_manager.decode_folder_id( trans, self.folder_manager.cut_the_prefix(encoded_folder_id)) folder = self.folder_manager.get(trans, decoded_folder_id) if not (is_admin or trans.app.security_agent.can_manage_library_item( current_user_roles, folder)): raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to modify permissions of this folder.' ) new_add_roles_ids = util.listify(kwd.get('add_ids[]', None)) new_manage_roles_ids = util.listify(kwd.get('manage_ids[]', None)) new_modify_roles_ids = util.listify(kwd.get('modify_ids[]', None)) action = kwd.get('action', None) if action is None: raise exceptions.RequestParameterMissingException( 'The mandatory parameter "action" is missing.') elif action == 'set_permissions': # ADD TO LIBRARY ROLES valid_add_roles = [] invalid_add_roles_names = [] for role_id in new_add_roles_ids: role = self._load_role(trans, role_id) # Check whether role is in the set of allowed roles valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, folder) if role in valid_roles: valid_add_roles.append(role) else: invalid_add_roles_names.append(role_id) if len(invalid_add_roles_names) > 0: log.warning( "The following roles could not be added to the add library item permission: " + str(invalid_add_roles_names)) # MANAGE FOLDER ROLES valid_manage_roles = [] invalid_manage_roles_names = [] for role_id in new_manage_roles_ids: role = self._load_role(trans, role_id) # Check whether role is in the set of allowed roles valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, folder) if role in valid_roles: valid_manage_roles.append(role) else: invalid_manage_roles_names.append(role_id) if len(invalid_manage_roles_names) > 0: log.warning( "The following roles could not be added to the manage folder permission: " + str(invalid_manage_roles_names)) # MODIFY FOLDER ROLES valid_modify_roles = [] invalid_modify_roles_names = [] for role_id in new_modify_roles_ids: role = self._load_role(trans, role_id) # Check whether role is in the set of allowed roles valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, folder) if role in valid_roles: valid_modify_roles.append(role) else: invalid_modify_roles_names.append(role_id) if len(invalid_modify_roles_names) > 0: log.warning( "The following roles could not be added to the modify folder permission: " + str(invalid_modify_roles_names)) permissions = { trans.app.security_agent.permitted_actions.LIBRARY_ADD: valid_add_roles } permissions.update({ trans.app.security_agent.permitted_actions.LIBRARY_MANAGE: valid_manage_roles }) permissions.update({ trans.app.security_agent.permitted_actions.LIBRARY_MODIFY: valid_modify_roles }) trans.app.security_agent.set_all_library_permissions( trans, folder, permissions) else: raise exceptions.RequestParameterInvalidException( 'The mandatory parameter "action" has an invalid value.' 'Allowed values are: "set_permissions"') return self.folder_manager.get_current_roles(trans, folder)
def set_permissions(self, trans, encoded_folder_id, payload: dict): """ Set permissions of the given folder to the given role ids. :param encoded_folder_id: the encoded id of the folder to set the permissions of :type encoded_folder_id: an encoded id string :param payload: dictionary structure containing: :param action: (required) describes what action should be performed :type action: string :param add_ids[]: list of Role.id defining roles that should have add item permission on the folder :type add_ids[]: string or list :param manage_ids[]: list of Role.id defining roles that should have manage permission on the folder :type manage_ids[]: string or list :param modify_ids[]: list of Role.id defining roles that should have modify permission on the folder :type modify_ids[]: string or list :type dictionary :returns: dict of current roles for all available permission types. :rtype: dictionary :raises: RequestParameterInvalidException, InsufficientPermissionsException, RequestParameterMissingException """ is_admin = trans.user_is_admin current_user_roles = trans.get_current_user_roles() decoded_folder_id = self.folder_manager.cut_and_decode( trans, encoded_folder_id) folder = self.folder_manager.get(trans, decoded_folder_id) if not (is_admin or trans.app.security_agent.can_manage_library_item( current_user_roles, folder)): raise InsufficientPermissionsException( 'You do not have proper permission to modify permissions of this folder.' ) new_add_roles_ids = util.listify(payload.get('add_ids[]', None)) new_manage_roles_ids = util.listify(payload.get('manage_ids[]', None)) new_modify_roles_ids = util.listify(payload.get('modify_ids[]', None)) action = payload.get('action', None) if action is None: raise RequestParameterMissingException( 'The mandatory parameter "action" is missing.') elif action == 'set_permissions': # ADD TO LIBRARY ROLES valid_add_roles = [] invalid_add_roles_names = [] for role_id in new_add_roles_ids: role = self.role_manager.get( trans, trans.security.decode_id(role_id, object_name='role')) # Check whether role is in the set of allowed roles valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, folder) if role in valid_roles: valid_add_roles.append(role) else: invalid_add_roles_names.append(role_id) if len(invalid_add_roles_names) > 0: log.warning( f"The following roles could not be added to the add library item permission: {str(invalid_add_roles_names)}" ) # MANAGE FOLDER ROLES valid_manage_roles = [] invalid_manage_roles_names = [] for role_id in new_manage_roles_ids: role = self.role_manager.get( trans, trans.security.decode_id(role_id, object_name='role')) # Check whether role is in the set of allowed roles valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, folder) if role in valid_roles: valid_manage_roles.append(role) else: invalid_manage_roles_names.append(role_id) if len(invalid_manage_roles_names) > 0: log.warning( f"The following roles could not be added to the manage folder permission: {str(invalid_manage_roles_names)}" ) # MODIFY FOLDER ROLES valid_modify_roles = [] invalid_modify_roles_names = [] for role_id in new_modify_roles_ids: role = self.role_manager.get( trans, trans.security.decode_id(role_id, object_name='role')) # Check whether role is in the set of allowed roles valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, folder) if role in valid_roles: valid_modify_roles.append(role) else: invalid_modify_roles_names.append(role_id) if len(invalid_modify_roles_names) > 0: log.warning( f"The following roles could not be added to the modify folder permission: {str(invalid_modify_roles_names)}" ) permissions = { trans.app.security_agent.permitted_actions.LIBRARY_ADD: valid_add_roles } permissions.update({ trans.app.security_agent.permitted_actions.LIBRARY_MANAGE: valid_manage_roles }) permissions.update({ trans.app.security_agent.permitted_actions.LIBRARY_MODIFY: valid_modify_roles }) trans.app.security_agent.set_all_library_permissions( trans, folder, permissions) else: raise RequestParameterInvalidException( 'The mandatory parameter "action" has an invalid value.' 'Allowed values are: "set_permissions"') return self.folder_manager.get_current_roles(trans, folder)
def install_repository_revisions(self, trans, payload, **kwd): """ POST /api/tool_shed_repositories/install_repository_revisions Install one or more specified repository revisions from one or more specified tool sheds into Galaxy. The received parameters must be ordered lists so that positional values in tool_shed_urls, names, owners and changeset_revisions are associated. It's questionable whether this method is needed as the above method for installing a single repository can probably cover all desired scenarios. We'll keep this one around just in case... :param key: the current Galaxy admin user's API key The following parameters are included in the payload. :param tool_shed_urls: the base URLs of the Tool Sheds from which to install a specified Repository :param names: the names of the Repositories to be installed :param owners: the owners of the Repositories to be installed :param changeset_revisions: the changeset_revisions of each RepositoryMetadata object associated with each Repository to be installed :param new_tool_panel_section_label: optional label of a new section to be added to the Galaxy tool panel in which to load tools contained in the Repository. Either this parameter must be an empty string or the tool_panel_section_id parameter must be an empty string, as both cannot be used. :param tool_panel_section_id: optional id of the Galaxy tool panel section in which to load tools contained in the Repository. If not set, tools will be loaded outside of any sections in the tool panel. Either this parameter must be an empty string or the tool_panel_section_id parameter must be an empty string, as both cannot be used. :param install_repository_dependencies (optional): Set to True if you want to install repository dependencies defined for the specified repository being installed. The default setting is False. :param install_tool_dependencies (optional): Set to True if you want to install tool dependencies defined for the specified repository being installed. The default setting is False. :param shed_tool_conf (optional): The shed-related tool panel configuration file configured in the "tool_config_file" setting in the Galaxy config file (e.g., galaxy.ini). At least one shed-related tool panel config file is required to be configured. Setting this parameter to a specific file enables you to choose where the specified repository will be installed because the tool_path attribute of the <toolbox> from the specified file is used as the installation location (e.g., <toolbox tool_path="../shed_tools">). If this parameter is not set, a shed-related tool panel configuration file will be selected automatically. """ self.__ensure_can_install_repos(trans) # Get the information about all of the repositories to be installed. tool_shed_urls = util.listify(payload.get('tool_shed_urls', '')) names = util.listify(payload.get('names', '')) owners = util.listify(payload.get('owners', '')) changeset_revisions = util.listify( payload.get('changeset_revisions', '')) num_specified_repositories = len(tool_shed_urls) if len( names ) != num_specified_repositories or \ len( owners ) != num_specified_repositories or \ len( changeset_revisions ) != num_specified_repositories: message = 'Error in tool_shed_repositories API in install_repository_revisions: the received parameters must be ordered ' message += 'lists so that positional values in tool_shed_urls, names, owners and changeset_revisions are associated.' log.debug(message) return dict(status='error', error=message) # Get the information about the Galaxy components (e.g., tool pane section, tool config file, etc) that will contain information # about each of the repositories being installed. # TODO: we may want to enhance this method to allow for each of the following to be associated with each repository instead of # forcing all repositories to use the same settings. install_repository_dependencies = payload.get( 'install_repository_dependencies', False) install_tool_dependencies = payload.get('install_tool_dependencies', False) new_tool_panel_section_label = payload.get( 'new_tool_panel_section_label', '') shed_tool_conf = payload.get('shed_tool_conf', None) tool_panel_section_id = payload.get('tool_panel_section_id', '') all_installed_tool_shed_repositories = [] for tool_shed_url, name, owner, changeset_revision in zip( tool_shed_urls, names, owners, changeset_revisions): current_payload = dict( tool_shed_url=tool_shed_url, name=name, owner=owner, changeset_revision=changeset_revision, new_tool_panel_section_label=new_tool_panel_section_label, tool_panel_section_id=tool_panel_section_id, install_repository_dependencies=install_repository_dependencies, install_tool_dependencies=install_tool_dependencies, shed_tool_conf=shed_tool_conf) installed_tool_shed_repositories = self.install_repository_revision( trans, **current_payload) if isinstance(installed_tool_shed_repositories, dict): # We encountered an error. return installed_tool_shed_repositories elif isinstance(installed_tool_shed_repositories, list): all_installed_tool_shed_repositories.extend( installed_tool_shed_repositories) return all_installed_tool_shed_repositories
def update_permissions(self, trans, encoded_dataset_id, payload=None, **kwd): """ Set permissions of the given library dataset to the given role ids. *POST /api/libraries/datasets/{encoded_dataset_id}/permissions :param encoded_dataset_id: the encoded id of the dataset to update permissions of :type encoded_dataset_id: an encoded id string :param payload: dictionary structure containing: :param action: (required) describes what action should be performed available actions: make_private, remove_restrictions, set_permissions :type action: string :param access_ids[]: list of Role.id defining roles that should have access permission on the dataset :type access_ids[]: string or list :param manage_ids[]: list of Role.id defining roles that should have manage permission on the dataset :type manage_ids[]: string or list :param modify_ids[]: list of Role.id defining roles that should have modify permission on the library dataset item :type modify_ids[]: string or list :type: dictionary :returns: dict of current roles for all available permission types :rtype: dictionary :raises: RequestParameterInvalidException, ObjectNotFound, InsufficientPermissionsException, InternalServerError RequestParameterMissingException """ if payload: kwd.update(payload) action = kwd.get('action', None) if action not in [ 'remove_restrictions', 'make_private', 'set_permissions' ]: raise exceptions.RequestParameterInvalidException( 'The mandatory parameter "action" has an invalid value. ' 'Allowed values are: "remove_restrictions", "make_private", "set_permissions"' ) library_dataset = self.ld_manager.get( trans, managers_base.decode_id(self.app, encoded_dataset_id)) # Some permissions are attached directly to the underlying dataset. dataset = library_dataset.library_dataset_dataset_association.dataset current_user_roles = trans.get_current_user_roles() can_manage = trans.app.security_agent.can_manage_dataset( current_user_roles, dataset) or trans.user_is_admin() if not can_manage: raise exceptions.InsufficientPermissionsException( 'You do not have proper permissions to manage permissions on this dataset.' ) new_access_roles_ids = util.listify(kwd.get('access_ids[]', None)) new_manage_roles_ids = util.listify(kwd.get('manage_ids[]', None)) new_modify_roles_ids = util.listify(kwd.get('modify_ids[]', None)) if action == 'remove_restrictions': trans.app.security_agent.make_dataset_public(dataset) if not trans.app.security_agent.dataset_is_public(dataset): raise exceptions.InternalServerError( 'An error occured while making dataset public.') elif action == 'make_private': if not trans.app.security_agent.dataset_is_private_to_user( trans, library_dataset): private_role = trans.app.security_agent.get_private_user_role( trans.user) dp = trans.app.model.DatasetPermissions( trans.app.security_agent.permitted_actions.DATASET_ACCESS. action, dataset, private_role) trans.sa_session.add(dp) trans.sa_session.flush() if not trans.app.security_agent.dataset_is_private_to_user( trans, library_dataset): # Check again and inform the user if dataset is not private. raise exceptions.InternalServerError( 'An error occured and the dataset is NOT private.') elif action == 'set_permissions': # ACCESS DATASET ROLES valid_access_roles = [] invalid_access_roles_ids = [] valid_roles_for_dataset, total_roles = trans.app.security_agent.get_valid_roles( trans, dataset) if new_access_roles_ids is None: trans.app.security_agent.make_dataset_public(dataset) else: for role_id in new_access_roles_ids: role = self.role_manager.get( trans, managers_base.decode_id(self.app, role_id)) if role in valid_roles_for_dataset: valid_access_roles.append(role) else: invalid_access_roles_ids.append(role_id) if len(invalid_access_roles_ids) > 0: log.warning( "The following roles could not be added to the dataset access permission: " + str(invalid_access_roles_ids)) access_permission = dict(access=valid_access_roles) trans.app.security_agent.set_dataset_permission( dataset, access_permission) # MANAGE DATASET ROLES valid_manage_roles = [] invalid_manage_roles_ids = [] new_manage_roles_ids = util.listify(new_manage_roles_ids) for role_id in new_manage_roles_ids: role = self.role_manager.get( trans, managers_base.decode_id(self.app, role_id)) if role in valid_roles_for_dataset: valid_manage_roles.append(role) else: invalid_manage_roles_ids.append(role_id) if len(invalid_manage_roles_ids) > 0: log.warning( "The following roles could not be added to the dataset manage permission: " + str(invalid_manage_roles_ids)) manage_permission = { trans.app.security_agent.permitted_actions.DATASET_MANAGE_PERMISSIONS: valid_manage_roles } trans.app.security_agent.set_dataset_permission( dataset, manage_permission) # MODIFY LIBRARY ITEM ROLES valid_modify_roles = [] invalid_modify_roles_ids = [] new_modify_roles_ids = util.listify(new_modify_roles_ids) for role_id in new_modify_roles_ids: role = self.role_manager.get( trans, managers_base.decode_id(self.app, role_id)) if role in valid_roles_for_dataset: valid_modify_roles.append(role) else: invalid_modify_roles_ids.append(role_id) if len(invalid_modify_roles_ids) > 0: log.warning( "The following roles could not be added to the dataset modify permission: " + str(invalid_modify_roles_ids)) modify_permission = { trans.app.security_agent.permitted_actions.LIBRARY_MODIFY: valid_modify_roles } trans.app.security_agent.set_library_item_permission( library_dataset, modify_permission) return self._get_current_roles(trans, library_dataset)
def update(self, trans, id, **kwd): """ PATCH /api/repositories/{encoded_repository_id} Updates information about a repository in the Tool Shed. :param id: the encoded id of the Repository object :param payload: dictionary structure containing:: 'name': repo's name (optional) 'synopsis': repo's synopsis (optional) 'description': repo's description (optional) 'remote_repository_url': repo's remote repo (optional) 'homepage_url': repo's homepage url (optional) 'category_ids': list of existing encoded TS category ids the updated repo should be associated with (optional) :type payload: dict :returns: detailed repository information :rtype: dict :raises: RequestParameterInvalidException, InsufficientPermissionsException """ payload = kwd.get('payload', None) if not payload: raise RequestParameterMissingException( "You did not specify any payload.") name = payload.get('name', None) synopsis = payload.get('synopsis', None) description = payload.get('description', None) remote_repository_url = payload.get('remote_repository_url', None) homepage_url = payload.get('homepage_url', None) category_ids = payload.get('category_ids', None) if category_ids is not None: # We need to know if it was actually passed, and listify turns None into [] category_ids = util.listify(category_ids) update_kwds = dict( name=name, description=synopsis, long_description=description, remote_repository_url=remote_repository_url, homepage_url=homepage_url, category_ids=category_ids, ) repo, message = repository_util.update_repository(app=trans.app, trans=trans, id=id, **update_kwds) if repo is None: if "You are not the owner" in message: raise InsufficientPermissionsException(message) else: raise ActionInputError(message) repository_dict = repo.to_dict( view='element', value_mapper=self.__get_value_mapper(trans)) repository_dict[ 'category_ids' ] = \ [ trans.security.encode_id( x.category.id ) for x in repo.categories ] return repository_dict
def create(self, trans, **kwd): """ create( self, trans, payload, **kwd ) * POST /api/repositories: Creates a new repository. Only ``name`` and ``synopsis`` parameters are required. :param payload: dictionary structure containing:: 'name': new repo's name (required) 'synopsis': new repo's synopsis (required) 'description': new repo's description (optional) 'remote_repository_url': new repo's remote repo (optional) 'homepage_url': new repo's homepage url (optional) 'category_ids[]': list of existing encoded TS category ids the new repo should be associated with (optional) 'type': new repo's type, defaults to ``unrestricted`` (optional) :type payload: dict :returns: detailed repository information :rtype: dict :raises: RequestParameterMissingException, RequestParameterInvalidException """ payload = kwd.get('payload', None) if not payload: raise RequestParameterMissingException( "You did not specify any payload.") name = payload.get('name', None) if not name: raise RequestParameterMissingException( "Missing required parameter 'name'.") synopsis = payload.get('synopsis', None) if not synopsis: raise RequestParameterMissingException( "Missing required parameter 'synopsis'.") description = payload.get('description', '') remote_repository_url = payload.get('remote_repository_url', '') homepage_url = payload.get('homepage_url', '') category_ids = util.listify(payload.get('category_ids[]', '')) repo_type = payload.get('type', rt_util.UNRESTRICTED) if repo_type not in rt_util.types: raise RequestParameterInvalidException( 'This repository type is not valid') invalid_message = repository_util.validate_repository_name( trans.app, name, trans.user) if invalid_message: raise RequestParameterInvalidException(invalid_message) repo, message = repository_util.create_repository( app=trans.app, name=name, type=repo_type, description=synopsis, long_description=description, user_id=trans.user.id, category_ids=category_ids, remote_repository_url=remote_repository_url, homepage_url=homepage_url) repository_dict = repo.to_dict( view='element', value_mapper=self.__get_value_mapper(trans)) repository_dict[ 'category_ids' ] = \ [ trans.security.encode_id( x.category.id ) for x in repo.categories ] return repository_dict
def download(self, trans, format, **kwd): """ Download requested datasets (identified by encoded IDs) in requested format. * GET /api/libraries/datasets/download/{format} * POST /api/libraries/datasets/download/{format} example: ``GET localhost:8080/api/libraries/datasets/download/tbz?ld_ids%255B%255D=a0d84b45643a2678&ld_ids%255B%255D=fe38c84dcd46c828`` .. note:: supported format values are: 'zip', 'tgz', 'tbz', 'uncompressed' :param format: string representing requested archive format :type format: string :param ld_ids[]: an array of encoded dataset ids :type ld_ids[]: an array :param folder_ids[]: an array of encoded folder ids :type folder_ids[]: an array :returns: either archive with the requested datasets packed inside or a single uncompressed dataset :rtype: file :raises: MessageException, ItemDeletionException, ItemAccessibilityException, HTTPBadRequest, OSError, IOError, ObjectNotFound """ library_datasets = [] datasets_to_download = kwd.get('ld_ids%5B%5D', None) if datasets_to_download is None: datasets_to_download = kwd.get('ld_ids', None) if datasets_to_download is not None: datasets_to_download = util.listify(datasets_to_download) for dataset_id in datasets_to_download: try: library_dataset = self.get_library_dataset( trans, id=dataset_id, check_ownership=False, check_accessible=True) library_datasets.append(library_dataset) except HTTPBadRequest: raise exceptions.RequestParameterInvalidException( 'Bad Request.') except HTTPInternalServerError: raise exceptions.InternalServerError('Internal error.') except Exception as e: raise exceptions.InternalServerError('Unknown error.' + str(e)) folders_to_download = kwd.get('folder_ids%5B%5D', None) if folders_to_download is None: folders_to_download = kwd.get('folder_ids', None) if folders_to_download is not None: folders_to_download = util.listify(folders_to_download) current_user_roles = trans.get_current_user_roles() def traverse(folder): admin = trans.user_is_admin() rval = [] for subfolder in folder.active_folders: if not admin: can_access, folder_ids = trans.app.security_agent.check_folder_contents( trans.user, current_user_roles, subfolder) if (admin or can_access) and not subfolder.deleted: rval.extend(traverse(subfolder)) for ld in folder.datasets: if not admin: can_access = trans.app.security_agent.can_access_dataset( current_user_roles, ld.library_dataset_dataset_association.dataset) if (admin or can_access) and not ld.deleted: rval.append(ld) return rval for encoded_folder_id in folders_to_download: folder_id = self.folder_manager.cut_and_decode( trans, encoded_folder_id) folder = self.folder_manager.get(trans, folder_id) library_datasets.extend(traverse(folder)) if not library_datasets: raise exceptions.RequestParameterMissingException( 'Request has to contain a list of dataset ids or folder ids to download.' ) if format in ['zip', 'tgz', 'tbz']: # error = False killme = string.punctuation + string.whitespace trantab = string.maketrans(killme, '_' * len(killme)) try: outext = 'zip' if format == 'zip': # Can't use mkstemp - the file must not exist first tmpd = tempfile.mkdtemp() util.umask_fix_perms(tmpd, trans.app.config.umask, 0o777, self.app.config.gid) tmpf = os.path.join(tmpd, 'library_download.' + format) if trans.app.config.upstream_gzip: archive = zipfile.ZipFile(tmpf, 'w', zipfile.ZIP_STORED, True) else: archive = zipfile.ZipFile(tmpf, 'w', zipfile.ZIP_DEFLATED, True) archive.add = lambda x, y: archive.write( x, y.encode('CP437')) elif format == 'tgz': if trans.app.config.upstream_gzip: archive = StreamBall('w|') outext = 'tar' else: archive = StreamBall('w|gz') outext = 'tgz' elif format == 'tbz': archive = StreamBall('w|bz2') outext = 'tbz2' except (OSError, zipfile.BadZipfile): log.exception("Unable to create archive for download") raise exceptions.InternalServerError( "Unable to create archive for download.") except Exception: log.exception( "Unexpected error in create archive for download") raise exceptions.InternalServerError( "Unable to create archive for download.") composite_extensions = trans.app.datatypes_registry.get_composite_extensions( ) seen = [] for ld in library_datasets: ldda = ld.library_dataset_dataset_association ext = ldda.extension is_composite = ext in composite_extensions path = "" parent_folder = ldda.library_dataset.folder while parent_folder is not None: # Exclude the now-hidden "root folder" if parent_folder.parent is None: path = os.path.join(parent_folder.library_root[0].name, path) break path = os.path.join(parent_folder.name, path) parent_folder = parent_folder.parent path += ldda.name while path in seen: path += '_' seen.append(path) zpath = os.path.split(path)[-1] # comes as base_name/fname outfname, zpathext = os.path.splitext(zpath) if is_composite: # need to add all the components from the extra_files_path to the zip if zpathext == '': zpath = '%s.html' % zpath # fake the real nature of the html file try: if format == 'zip': archive.add( ldda.dataset.file_name, zpath) # add the primary of a composite set else: archive.add(ldda.dataset.file_name, zpath, check_file=True ) # add the primary of a composite set except IOError: log.exception( "Unable to add composite parent %s to temporary library download archive", ldda.dataset.file_name) raise exceptions.InternalServerError( "Unable to create archive for download.") except ObjectNotFound: log.exception( "Requested dataset %s does not exist on the host.", ldda.dataset.file_name) raise exceptions.ObjectNotFound( "Requested dataset not found. ") except Exception as e: log.exception( "Unable to add composite parent %s to temporary library download archive", ldda.dataset.file_name) raise exceptions.InternalServerError( "Unable to add composite parent to temporary library download archive. " + str(e)) flist = glob.glob( os.path.join(ldda.dataset.extra_files_path, '*.*')) # glob returns full paths for fpath in flist: efp, fname = os.path.split(fpath) if fname > '': fname = fname.translate(trantab) try: if format == 'zip': archive.add(fpath, fname) else: archive.add(fpath, fname, check_file=True) except IOError: log.exception( "Unable to add %s to temporary library download archive %s", fname, outfname) raise exceptions.InternalServerError( "Unable to create archive for download.") except ObjectNotFound: log.exception( "Requested dataset %s does not exist on the host.", fpath) raise exceptions.ObjectNotFound( "Requested dataset not found.") except Exception as e: log.exception( "Unable to add %s to temporary library download archive %s" % (fname, outfname)) raise exceptions.InternalServerError( "Unable to add dataset to temporary library download archive . " + str(e)) else: try: if format == 'zip': archive.add(ldda.dataset.file_name, path) else: archive.add(ldda.dataset.file_name, path, check_file=True) except IOError: log.exception( "Unable to write %s to temporary library download archive", ldda.dataset.file_name) raise exceptions.InternalServerError( "Unable to create archive for download") except ObjectNotFound: log.exception( "Requested dataset %s does not exist on the host.", ldda.dataset.file_name) raise exceptions.ObjectNotFound( "Requested dataset not found.") except Exception as e: log.exception( "Unable to add %s to temporary library download archive %s", ldda.dataset.file_name, outfname) raise exceptions.InternalServerError( "Unknown error. " + str(e)) lname = 'selected_dataset' fname = lname.replace(' ', '_') + '_files' if format == 'zip': archive.close() trans.response.set_content_type("application/octet-stream") trans.response.headers[ "Content-Disposition"] = 'attachment; filename="%s.%s"' % ( fname, outext) archive = util.streamball.ZipBall(tmpf, tmpd) archive.wsgi_status = trans.response.wsgi_status() archive.wsgi_headeritems = trans.response.wsgi_headeritems() return archive.stream else: trans.response.set_content_type("application/x-tar") trans.response.headers[ "Content-Disposition"] = 'attachment; filename="%s.%s"' % ( fname, outext) archive.wsgi_status = trans.response.wsgi_status() archive.wsgi_headeritems = trans.response.wsgi_headeritems() return archive.stream elif format == 'uncompressed': if len(library_datasets) != 1: raise exceptions.RequestParameterInvalidException( "You can download only one uncompressed file at once.") else: single_ld = library_datasets[0] ldda = single_ld.library_dataset_dataset_association dataset = ldda.dataset fStat = os.stat(dataset.file_name) trans.response.set_content_type(ldda.get_mime()) trans.response.headers['Content-Length'] = int(fStat.st_size) fname = ldda.name fname = ''.join(c in util.FILENAME_VALID_CHARS and c or '_' for c in fname)[0:150] trans.response.headers[ "Content-Disposition"] = 'attachment; filename="%s"' % fname try: return open(dataset.file_name) except Exception: raise exceptions.InternalServerError( "This dataset contains no content.") else: raise exceptions.RequestParameterInvalidException( "Wrong format parameter specified")
def upload(self, trans, **kwd): message = escape(kwd.get('message', '')) status = kwd.get('status', 'done') commit_message = escape(kwd.get('commit_message', 'Uploaded')) category_ids = util.listify(kwd.get('category_id', '')) categories = suc.get_categories(trans.app) repository_id = kwd.get('repository_id', '') repository = suc.get_repository_in_tool_shed(trans.app, repository_id) repo_dir = repository.repo_path(trans.app) repo = hg_util.get_repo_for_repository(trans.app, repository=None, repo_path=repo_dir, create=False) uncompress_file = util.string_as_bool( kwd.get('uncompress_file', 'true')) remove_repo_files_not_in_tar = util.string_as_bool( kwd.get('remove_repo_files_not_in_tar', 'true')) uploaded_file = None upload_point = commit_util.get_upload_point(repository, **kwd) tip = repository.tip(trans.app) file_data = kwd.get('file_data', '') url = kwd.get('url', '') # Part of the upload process is sending email notification to those that have registered to # receive them. One scenario occurs when the first change set is produced for the repository. # See the suc.handle_email_alerts() method for the definition of the scenarios. new_repo_alert = repository.is_new(trans.app) uploaded_directory = None if kwd.get('upload_button', False): if file_data == '' and url == '': message = 'No files were entered on the upload form.' status = 'error' uploaded_file = None elif url and url.startswith('hg'): # Use mercurial clone to fetch repository, contents will then be copied over. uploaded_directory = tempfile.mkdtemp() repo_url = 'http%s' % url[len('hg'):] repo_url = repo_url.encode('ascii', 'replace') try: commands.clone(hg_util.get_configured_ui(), repo_url, uploaded_directory) except Exception, e: message = 'Error uploading via mercurial clone: %s' % basic_util.to_html_string( str(e)) status = 'error' basic_util.remove_dir(uploaded_directory) uploaded_directory = None elif url: valid_url = True try: stream = urllib.urlopen(url) except Exception, e: valid_url = False message = 'Error uploading file via http: %s' % str(e) status = 'error' uploaded_file = None if valid_url: fd, uploaded_file_name = tempfile.mkstemp() uploaded_file = open(uploaded_file_name, 'wb') while 1: chunk = stream.read(util.CHUNK_SIZE) if not chunk: break uploaded_file.write(chunk) uploaded_file.flush() uploaded_file_filename = url.split('/')[-1] isempty = os.path.getsize( os.path.abspath(uploaded_file_name)) == 0
def initiate_data_transfer(self, trans, sample_id, sample_datasets=[], sample_dataset_id=''): # Initiate the transfer of the datasets from the external service to the target Galaxy data library. # The admin user must have LIBRARY_ADD permission for the target library and folder try: sample = trans.sa_session.query(trans.model.Sample).get( trans.security.decode_id(sample_id)) except: return invalid_id_redirect(trans, 'requests_admin', sample_id, 'sample') message = "" status = "done" # Make sure the current admin user has LIBRARY_ADD permission on the target data library and folder. self.__ensure_library_add_permission(trans, sample.library, sample.folder) if sample_dataset_id and not sample_datasets: # Either a list of SampleDataset objects or a comma-separated string of # encoded SampleDataset ids can be received. If the latter, parse the # sample_dataset_id string to build the list of sample_datasets. id_list = util.listify(sample_dataset_id) for sample_dataset_id in id_list: sample_dataset = trans.sa_session.query( trans.model.SampleDataset).get( trans.security.decode_id(sample_dataset_id)) sample_datasets.append(sample_dataset) if trans.app.config.enable_beta_job_managers: # For now, assume that all SampleDatasets use the same external service ( this may not be optimal ). if sample_datasets: external_service_type_id = sample_datasets[ 0].external_service.external_service_type_id # Here external_service_type_id will be something like '454_life_sciences' external_service = sample.request.type.get_external_service( external_service_type_id) external_service_type = external_service.get_external_service_type( trans) external_service.load_data_transfer_settings(trans) # For now only scp is supported. scp_configs = external_service.data_transfer[ trans.model.ExternalService.data_transfer_protocol.SCP] if not scp_configs['automatic_transfer']: deferred_plugin = 'ManualDataTransferPlugin' else: raise Exception( "Automatic data transfer using scp is not yet supported." ) trans.app.job_manager.deferred_job_queue.plugins[ deferred_plugin].create_job( trans, sample=sample, sample_datasets=sample_datasets, external_service=external_service, external_service_type=external_service_type) else: # TODO: Using RabbitMq for now, but eliminate this entire block when we replace RabbitMq with Galaxy's # own messaging engine. We're holding off on using the new way to transfer files manually until we # implement a Galaxy-proprietary messaging engine because the deferred job plugins currently perform # constant db hits to check for deferred jobs that are not in a finished state. # Create the message messages = self.__create_data_transfer_messages( trans, sample, sample_datasets) # Send the messages for rmq_msg in messages: try: conn = amqp.Connection( host=trans.app.config.amqp['host'] + ":" + trans.app.config.amqp['port'], userid=trans.app.config.amqp['userid'], password=trans.app.config.amqp['password'], virtual_host=trans.app.config.amqp['virtual_host']) chan = conn.channel() msg = amqp.Message( rmq_msg, content_type='text/plain', application_headers={'msg_type': 'data_transfer'}) msg.properties["delivery_mode"] = 2 chan.basic_publish( msg, exchange=trans.app.config.amqp['exchange'], routing_key=trans.app.config.amqp['routing_key']) chan.close() conn.close() except Exception, e: message = "Error sending the data transfer message to the Galaxy AMQP message queue:<br/>%s" % str( e) status = "error" if not message: message = "%i datasets have been queued for transfer from the external service." % len( sample_datasets) status = "done"
def inputs_recursive(trans, input_params, param_values, depth=1, upgrade_messages=None): """ Recursive method for tool parameter section """ tool_parameter_template = '' if upgrade_messages is None: upgrade_messages = {} for input in input_params.values(): if input.name in param_values: if input.type == "repeat": for i in range(len(param_values[input.name])): inputs_recursive(trans, input.inputs, param_values[input.name][i], depth=depth + 1) elif input.type == "section": tool_parameter_template += '<tr>' tool_parameter_template += inputs_recursive_indent( text=input.name, depth=depth) tool_parameter_template += '<td></td></tr>' inputs_recursive(trans, input.inputs, param_values[input.name], depth=depth + 1, upgrade_messages=upgrade_messages.get( input.name)) elif input.type == "conditional": try: current_case = param_values[input.name]['__current_case__'] is_valid = True except Exception: current_case = None is_valid = False if is_valid: tool_parameter_template += '<tr>' tool_parameter_template += inputs_recursive_indent( text=input.test_param.label, depth=depth) tool_parameter_template += f"<td>{input.cases[current_case].value}</td><td></td></tr>" inputs_recursive(trans, input.cases[current_case].inputs, param_values[input.name], depth=depth + 1, upgrade_messages=upgrade_messages.get( input.name)) else: tool_parameter_template += '<tr>' tool_parameter_template += inputs_recursive_indent( text=input.name, depth=depth) tool_parameter_template += '<td><em> The previously used value is no longer valid </em></td><td></td></tr>' elif input.type == "upload_dataset": tool_parameter_template += '<tr>' tool_parameter_template += inputs_recursive_indent( text=input.group_title(param_values), depth=depth) tool_parameter_template += f"<td>{str(len(param_values[input.name]))} uploaded datasets</td><td></td></tr>" elif input.type == "data": tool_parameter_template += '<tr>' tool_parameter_template += inputs_recursive_indent( text=input.label, depth=depth) tool_parameter_template += '<td>' for i, element in enumerate(listify(param_values[input.name])): if i > 0: tool_parameter_template += ',' if element.history_content_type == "dataset": hda = element encoded_id = trans.security.encode_id(hda.id) dataset_info_url = url_for(controller="dataset", action="details", dataset_id=encoded_id) tool_parameter_template += f"<a target=\"galaxy_main\" data-hda-id=\"{encoded_id}\"" tool_parameter_template += f"href=\"{dataset_info_url}\">{str(hda.hid)}:{hda.name}</a>" else: tool_parameter_template += f"{str(element.hid)}:{element.name}" tool_parameter_template += '</td><td></td></tr>' elif input.visible: label = input.label if (hasattr(input, "label") and input.label) else input.name tool_parameter_template += '<tr>' tool_parameter_template += inputs_recursive_indent(text=label, depth=depth) tool_parameter_template += f"<td>{input.value_to_display_text(param_values[input.name])}</td>" tool_parameter_template += f"<td>{upgrade_messages.get(input.name, '')}</td></tr>" else: tool_parameter_template += '<tr>' if input.type == "conditional": label = input.test_param.label elif input.type == "repeat": label = input.label() else: label = input.label or input.name tool_parameter_template += inputs_recursive_indent(text=label, depth=depth) tool_parameter_template += '<td><em> not used (parameter was added after this job was run)</em></td><td></td></tr>' return tool_parameter_template
def extract_steps(trans, history=None, job_ids=None, dataset_ids=None, dataset_collection_ids=None, dataset_names=None, dataset_collection_names=None): # Ensure job_ids and dataset_ids are lists (possibly empty) job_ids = listify(job_ids) dataset_ids = listify(dataset_ids) dataset_collection_ids = listify(dataset_collection_ids) # Convert both sets of ids to integers job_ids = [int(_) for _ in job_ids] dataset_ids = [int(_) for _ in dataset_ids] dataset_collection_ids = [int(_) for _ in dataset_collection_ids] # Find each job, for security we (implicitly) check that they are # associated with a job in the current history. summary = WorkflowSummary(trans, history) jobs = summary.jobs steps = [] step_labels = set() hid_to_output_pair = {} # Input dataset steps for i, hid in enumerate(dataset_ids): step = model.WorkflowStep() step.type = 'data_input' if dataset_names: name = dataset_names[i] else: name = "Input Dataset" if name not in step_labels: step.label = name step_labels.add(name) step.tool_inputs = dict(name=name) hid_to_output_pair[hid] = (step, 'output') steps.append(step) for i, hid in enumerate(dataset_collection_ids): step = model.WorkflowStep() step.type = 'data_collection_input' if hid not in summary.collection_types: raise exceptions.RequestParameterInvalidException( "hid %s does not appear to be a collection" % hid) collection_type = summary.collection_types[hid] if dataset_collection_names: name = dataset_collection_names[i] else: name = "Input Dataset Collection" if name not in step_labels: step.label = name step_labels.add(name) step.tool_inputs = dict(name=name, collection_type=collection_type) hid_to_output_pair[hid] = (step, 'output') steps.append(step) # Tool steps for job_id in job_ids: if job_id not in summary.job_id2representative_job: log.warning( f"job_id {job_id} not found in job_id2representative_job {summary.job_id2representative_job}" ) raise AssertionError( "Attempt to create workflow with job not connected to current history" ) job = summary.job_id2representative_job[job_id] tool_inputs, associations = step_inputs(trans, job) step = model.WorkflowStep() step.type = 'tool' step.tool_id = job.tool_id step.tool_version = job.tool_version step.tool_inputs = tool_inputs # NOTE: We shouldn't need to do two passes here since only # an earlier job can be used as an input to a later # job. for other_hid, input_name in associations: if job in summary.implicit_map_jobs: an_implicit_output_collection = jobs[job][0][1] input_collection = an_implicit_output_collection.find_implicit_input_collection( input_name) if input_collection: other_hid = input_collection.hid else: log.info("Cannot find implicit input collection for %s" % input_name) if other_hid in hid_to_output_pair: step_input = step.get_or_add_input(input_name) other_step, other_name = hid_to_output_pair[other_hid] conn = model.WorkflowStepConnection() conn.input_step_input = step_input # Should always be connected to an earlier step conn.output_step = other_step conn.output_name = other_name steps.append(step) # Store created dataset hids for assoc in (job.output_datasets + job.output_dataset_collection_instances): assoc_name = assoc.name if ToolOutputCollectionPart.is_named_collection_part_name( assoc_name): continue if assoc_name.startswith("__new_primary_file"): continue if job in summary.implicit_map_jobs: hid = None for implicit_pair in jobs[job]: query_assoc_name, dataset_collection = implicit_pair if query_assoc_name == assoc_name or assoc_name.startswith( "__new_primary_file_%s|" % query_assoc_name): hid = summary.hid(dataset_collection) if hid is None: template = "Failed to find matching implicit job - job id is %s, implicit pairs are %s, assoc_name is %s." message = template % (job.id, jobs[job], assoc_name) log.warning(message) raise Exception("Failed to extract job.") else: if hasattr(assoc, "dataset"): has_hid = assoc.dataset else: has_hid = assoc.dataset_collection_instance hid = summary.hid(has_hid) if hid in hid_to_output_pair: log.warning("duplicate hid found in extract_steps [%s]" % hid) hid_to_output_pair[hid] = (step, assoc.name) return steps
def manage_users_and_groups_for_quota(self, quota, params, decode_id=None) -> str: if quota.default: raise ActionInputError('Default quotas cannot be associated with specific users and groups.') else: in_users = [self.sa_session.query(model.User).get(decode_id(x) if decode_id else x) for x in util.listify(params.in_users)] if None in in_users: raise ActionInputError("One or more invalid user id has been provided.") in_groups = [self.sa_session.query(model.Group).get(decode_id(x) if decode_id else x) for x in util.listify(params.in_groups)] if None in in_groups: raise ActionInputError("One or more invalid group id has been provided.") self.quota_agent.set_entity_quota_associations(quotas=[quota], users=in_users, groups=in_groups) self.sa_session.refresh(quota) message = f"Quota '{quota.name}' has been updated with {len(in_users)} associated users and {len(in_groups)} associated groups." return message
def manage_datasets(self, trans, **kwd): def handle_error(**kwd): kwd['status'] = 'error' return trans.response.send_redirect( web.url_for(controller='requests_admin', action='manage_datasets', **kwd)) params = util.Params(kwd) message = util.restore_text(params.get('message', '')) status = params.get('status', 'done') # When this method is called due to a grid operation, the sample ID # will be in the param 'id'. But when this method is called via a # redirect from another method, the ID will be in 'sample_id'. So, # check for 'id' if 'sample_id' is not provided. sample_id = params.get('sample_id', None) if sample_id is None: sample_id = params.get('id', None) try: sample = trans.sa_session.query(trans.model.Sample).get( trans.security.decode_id(sample_id)) except: return invalid_id_redirect(trans, 'requests_admin', sample_id, 'sample') if 'operation' in kwd: operation = kwd['operation'].lower() sample_dataset_id = params.get('id', None) if not sample_dataset_id: message = 'Select at least 1 dataset to %s.' % operation kwd['message'] = message del kwd['operation'] handle_error(**kwd) id_list = util.listify(sample_dataset_id) selected_sample_datasets = [] for sample_dataset_id in id_list: try: sample_dataset = trans.sa_session.query( trans.model.SampleDataset).get( trans.security.decode_id(sample_dataset_id)) except: return invalid_id_redirect(trans, 'requests_admin', sample_dataset_id, 'sample dataset') selected_sample_datasets.append(sample_dataset) if operation == "view": return trans.fill_template( '/admin/requests/view_sample_dataset.mako', cntrller='requests_admin', sample_dataset=selected_sample_datasets[0]) elif operation == "delete": not_deleted = [] for sample_dataset in selected_sample_datasets: # Make sure the dataset has been transferred before deleting it. if sample_dataset in sample_dataset.sample.untransferred_dataset_files: # Save the sample dataset sample = sample_dataset.sample trans.sa_session.delete(sample_dataset) trans.sa_session.flush() else: not_deleted.append(sample_dataset.name) message = '%i datasets have been deleted.' % (len(id_list) - len(not_deleted)) if not_deleted: status = 'warning' message = message + ' %s could not be deleted because their transfer status is not "Not Started". ' % str( not_deleted) return trans.response.send_redirect( web.url_for(controller='requests_admin', action='manage_datasets', sample_id=trans.security.encode_id(sample.id), status=status, message=message)) elif operation == "rename": # If one of the selected sample datasets is in the NOT_STARTED state, # then display an error message. A NOT_STARTED state implies the dataset # has not yet been transferred. no_datasets_transferred = True for selected_sample_dataset in selected_sample_datasets: if selected_sample_dataset in selected_sample_dataset.sample.untransferred_dataset_files: no_datasets_transferred = False break if no_datasets_transferred: status = 'error' message = 'A dataset can be renamed only if it has been transferred.' return trans.response.send_redirect( web.url_for(controller='requests_admin', action='manage_datasets', sample_id=trans.security.encode_id( selected_sample_datasets[0].sample.id), status=status, message=message)) return trans.fill_template( '/admin/requests/rename_datasets.mako', sample=selected_sample_datasets[0].sample, id_list=id_list) elif operation == "transfer": self.initiate_data_transfer( trans, trans.security.encode_id( selected_sample_datasets[0].sample.id), sample_datasets=selected_sample_datasets) elif operation == "view_external_service": return trans.response.send_redirect( web.url_for(controller='external_service', action='view_external_service', **kwd)) # Render the grid view request_id = trans.security.encode_id(sample.request.id) library_id = trans.security.encode_id(sample.library.id) self.datatx_grid.title = 'Manage "%s" datasets' % sample.name self.datatx_grid.global_actions = [ grids.GridAction( "Browse target data library", dict(controller='library_common', action='browse_library', cntrller='library_admin', id=library_id)), grids.GridAction( "Browse this request", dict(controller='requests_common', action='view_request', cntrller='requests_admin', id=request_id)) ] return self.datatx_grid(trans, **kwd)
def __configure_subsystems(self, kwargs): raw_subsystems_str = kwargs.get("subsystems", DEFAULT_SUBSYSTEMS) raw_subsystems = util.listify(raw_subsystems_str, do_strip=True) self.subsystems = [subsystems.get_subsystem(_) for _ in raw_subsystems]
def rename_datasets(self, trans, **kwd): # This method is called from the DataTransferGrid when a user is renaming 1 or more # SampleDatasets. params = util.Params(kwd) message = util.restore_text(params.get('message', '')) status = params.get('status', 'done') sample_id = kwd.get('sample_id', None) try: sample = trans.sa_session.query(trans.model.Sample).get( trans.security.decode_id(sample_id)) except: return invalid_id_redirect(trans, 'requests_admin', sample_id, 'sample') # id_list is list of SampleDataset ids, which is a subset of all # of the SampleDatasets associated with the Sample. The user may # or may not have selected all of the SampleDatasets for renaming. id_list = util.listify(kwd.get('id_list', [])) # Get all of the SampleDatasets sample_datasets = [] for sample_dataset_id in id_list: sample_dataset = trans.sa_session.query( trans.app.model.SampleDataset).get( trans.security.decode_id(sample_dataset_id)) sample_datasets.append(sample_dataset) if params.get('rename_datasets_button', False): incorrect_dataset_names = [] for sample_dataset in sample_datasets: encoded_id = trans.security.encode_id(sample_dataset.id) selected_option = util.restore_text( params.get('rename_datasets_for_sample_%s' % encoded_id, '')) new_name = util.restore_text( params.get('new_name_%s' % encoded_id, '')) if not new_name: incorrect_dataset_names.append(sample_dataset.name) continue new_name = util.sanitize_for_filename(new_name) if selected_option == 'none': sample_dataset.name = new_name else: sample_dataset.name = '%s_%s' % (selected_option, new_name) trans.sa_session.add(sample_dataset) trans.sa_session.flush() if len(sample_datasets) == len(incorrect_dataset_names): status = 'error' message = 'All datasets renamed incorrectly.' elif len(incorrect_dataset_names): status = 'done' message = 'Changes saved successfully. The following datasets were renamed incorrectly: %s.' % str( incorrect_dataset_names) else: message = 'Changes saved successfully.' return trans.fill_template('/admin/requests/rename_datasets.mako', sample=sample, id_list=id_list, message=message, status=status) return trans.response.send_redirect( web.url_for(controller='requests_admin', action='manage_datasets', sample_id=sample_id))
def set_permissions(self, trans, encoded_library_id, payload=None, **kwd): """ *POST /api/libraries/{encoded_library_id}/permissions Set permissions of the given library to the given role ids. :param encoded_library_id: the encoded id of the library to set the permissions of :type encoded_library_id: an encoded id string :param payload: dictionary structure containing: :param action: (required) describes what action should be performed available actions: remove_restrictions, set_permissions :type action: str :param access_ids[]: list of Role.id defining roles that should have access permission on the library :type access_ids[]: string or list :param add_ids[]: list of Role.id defining roles that should have add item permission on the library :type add_ids[]: string or list :param manage_ids[]: list of Role.id defining roles that should have manage permission on the library :type manage_ids[]: string or list :param modify_ids[]: list of Role.id defining roles that should have modify permission on the library :type modify_ids[]: string or list :type: dictionary :returns: dict of current roles for all available permission types :rtype: dictionary :raises: RequestParameterInvalidException, InsufficientPermissionsException, InternalServerError RequestParameterMissingException """ if payload: kwd.update(payload) is_admin = trans.user_is_admin current_user_roles = trans.get_current_user_roles() library = self.library_manager.get( trans, self.__decode_id(trans, encoded_library_id, 'library')) if not (is_admin or trans.app.security_agent.can_manage_library_item( current_user_roles, library)): raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to modify permissions of this library.' ) new_access_roles_ids = util.listify(kwd.get('access_ids[]', None)) new_add_roles_ids = util.listify(kwd.get('add_ids[]', None)) new_manage_roles_ids = util.listify(kwd.get('manage_ids[]', None)) new_modify_roles_ids = util.listify(kwd.get('modify_ids[]', None)) action = kwd.get('action', None) if action is None: if payload is not None: return self.set_permissions_old(trans, library, payload, **kwd) else: raise exceptions.RequestParameterMissingException( 'The mandatory parameter "action" is missing.') elif action == 'remove_restrictions': is_public = self.library_manager.make_public(trans, library) if not is_public: raise exceptions.InternalServerError( 'An error occurred while making library public.') elif action == 'set_permissions': # ACCESS LIBRARY ROLES valid_access_roles = [] invalid_access_roles_names = [] for role_id in new_access_roles_ids: role = self.role_manager.get( trans, self.__decode_id(trans, role_id, 'role')) valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, library, is_library_access=True) if role in valid_roles: valid_access_roles.append(role) else: invalid_access_roles_names.append(role_id) if len(invalid_access_roles_names) > 0: log.warning( "The following roles could not be added to the library access permission: " + str(invalid_access_roles_names)) # ADD TO LIBRARY ROLES valid_add_roles = [] invalid_add_roles_names = [] for role_id in new_add_roles_ids: role = self.role_manager.get( trans, self.__decode_id(trans, role_id, 'role')) valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, library) if role in valid_roles: valid_add_roles.append(role) else: invalid_add_roles_names.append(role_id) if len(invalid_add_roles_names) > 0: log.warning( "The following roles could not be added to the add library item permission: " + str(invalid_add_roles_names)) # MANAGE LIBRARY ROLES valid_manage_roles = [] invalid_manage_roles_names = [] for role_id in new_manage_roles_ids: role = self.role_manager.get( trans, self.__decode_id(trans, role_id, 'role')) valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, library) if role in valid_roles: valid_manage_roles.append(role) else: invalid_manage_roles_names.append(role_id) if len(invalid_manage_roles_names) > 0: log.warning( "The following roles could not be added to the manage library permission: " + str(invalid_manage_roles_names)) # MODIFY LIBRARY ROLES valid_modify_roles = [] invalid_modify_roles_names = [] for role_id in new_modify_roles_ids: role = self.role_manager.get( trans, self.__decode_id(trans, role_id, 'role')) valid_roles, total_roles = trans.app.security_agent.get_valid_roles( trans, library) if role in valid_roles: valid_modify_roles.append(role) else: invalid_modify_roles_names.append(role_id) if len(invalid_modify_roles_names) > 0: log.warning( "The following roles could not be added to the modify library permission: " + str(invalid_modify_roles_names)) permissions = { trans.app.security_agent.permitted_actions.LIBRARY_ACCESS: valid_access_roles } permissions.update({ trans.app.security_agent.permitted_actions.LIBRARY_ADD: valid_add_roles }) permissions.update({ trans.app.security_agent.permitted_actions.LIBRARY_MANAGE: valid_manage_roles }) permissions.update({ trans.app.security_agent.permitted_actions.LIBRARY_MODIFY: valid_modify_roles }) trans.app.security_agent.set_all_library_permissions( trans, library, permissions) trans.sa_session.refresh(library) # Copy the permissions to the root folder trans.app.security_agent.copy_library_permissions( trans, library, library.root_folder) else: raise exceptions.RequestParameterInvalidException( 'The mandatory parameter "action" has an invalid value.' 'Allowed values are: "remove_restrictions", set_permissions"') roles = self.library_manager.get_current_roles(trans, library) return roles
def wrap_in_middleware(app, global_conf, **local_conf): """ Based on the configuration wrap `app` in a set of common and useful middleware. """ # Merge the global and local configurations conf = global_conf.copy() conf.update(local_conf) debug = asbool(conf.get('debug', False)) # First put into place httpexceptions, which must be most closely # wrapped around the application (it can interact poorly with # other middleware): app = httpexceptions.make_middleware(app, conf) log.debug("Enabling 'httpexceptions' middleware") # If we're using remote_user authentication, add middleware that # protects Galaxy from improperly configured authentication in the # upstream server if asbool(conf.get('use_remote_user', False)): from galaxy.web.framework.middleware.remoteuser import RemoteUser app = RemoteUser(app, maildomain=conf.get('remote_user_maildomain', None), display_servers=util.listify( conf.get('display_servers', '')), admin_users=conf.get('admin_users', '').split(','), remote_user_header=conf.get('remote_user_header', 'HTTP_REMOTE_USER')) log.debug("Enabling 'remote user' middleware") # The recursive middleware allows for including requests in other # requests or forwarding of requests, all on the server side. if asbool(conf.get('use_recursive', True)): from paste import recursive app = recursive.RecursiveMiddleware(app, conf) log.debug("Enabling 'recursive' middleware") # If sentry logging is enabled, log here before propogating up to # the error middleware sentry_dsn = conf.get('sentry_dsn', None) if sentry_dsn: from galaxy.web.framework.middleware.sentry import Sentry app = Sentry(app, sentry_dsn) # Various debug middleware that can only be turned on if the debug # flag is set, either because they are insecure or greatly hurt # performance if debug: # Middleware to check for WSGI compliance if asbool(conf.get('use_lint', False)): from paste import lint app = lint.make_middleware(app, conf) log.debug("Enabling 'lint' middleware") # Middleware to run the python profiler on each request if asbool(conf.get('use_profile', False)): from paste.debug import profile app = profile.ProfileMiddleware(app, conf) log.debug("Enabling 'profile' middleware") if debug and asbool(conf.get('use_interactive', False)): # Interactive exception debugging, scary dangerous if publicly # accessible, if not enabled we'll use the regular error printing # middleware. pkg_resources.require("WebError") from weberror import evalexception app = evalexception.EvalException( app, conf, templating_formatters=build_template_error_formatters()) log.debug("Enabling 'eval exceptions' middleware") else: # Not in interactive debug mode, just use the regular error middleware import galaxy.web.framework.middleware.error app = galaxy.web.framework.middleware.error.ErrorMiddleware(app, conf) log.debug("Enabling 'error' middleware") # Transaction logging (apache access.log style) if asbool(conf.get('use_translogger', True)): from galaxy.web.framework.middleware.translogger import TransLogger app = TransLogger(app) log.debug("Enabling 'trans logger' middleware") # X-Forwarded-Host handling from galaxy.web.framework.middleware.xforwardedhost import XForwardedHostMiddleware app = XForwardedHostMiddleware(app) log.debug("Enabling 'x-forwarded-host' middleware") # Request ID middleware from galaxy.web.framework.middleware.request_id import RequestIDMiddleware app = RequestIDMiddleware(app) log.debug("Enabling 'Request ID' middleware") return app