def _load_role(self, trans, role_name): """ Method loads the role from the DB based on the given role name. :param role_name: name of the role to load from the DB :type role_name: string :rtype: Role :returns: the loaded Role object :raises: InconsistentDatabase, RequestParameterInvalidException, InternalServerError """ try: role = trans.sa_session.query(trans.app.model.Role).filter( trans.model.Role.table.c.name == role_name).one() except MultipleResultsFound: raise exceptions.InconsistentDatabase( 'Multiple roles found with the same name. Name: ' + str(role_name)) except NoResultFound: raise exceptions.RequestParameterInvalidException( 'No role found with the name provided. Name: ' + str(role_name)) except Exception, e: raise exceptions.InternalServerError( 'Error loading from the database.' + str(e))
def __api_import_new_workflow( self, trans, payload, **kwd ): data = payload['workflow'] publish = util.string_as_bool( payload.get( "publish", False ) ) # If 'publish' set, default to importable. importable = util.string_as_bool( payload.get( "importable", publish ) ) if publish and not importable: raise exceptions.RequestParameterInvalidException( "Published workflow must be importable." ) from_dict_kwds = dict( source="API", publish=publish, ) workflow, missing_tool_tups = self._workflow_from_dict( trans, data, **from_dict_kwds ) if importable: self._make_item_accessible( trans.sa_session, workflow ) trans.sa_session.flush() # galaxy workflow newly created id workflow_id = workflow.id # api encoded, id encoded_id = trans.security.encode_id(workflow_id) # return list rval = [] item = workflow.to_dict(value_mapper={'id': trans.security.encode_id}) item['url'] = url_for('workflow', id=encoded_id) rval.append(item) return item
def expand_workflow_inputs(inputs): """ Expands incoming encoded multiple payloads, into the set of all individual payload combinations >>> params, param_keys = expand_workflow_inputs( {'1': {'input': {'batch': True, 'product': True, 'values': [{'hid': '1'}, {'hid': '2'}] }}} ) >>> print sorted( [ "%s" % ( p[ '1' ][ 'input' ][ 'hid' ] ) for p in params ] ) ['1', '2'] >>> params, param_keys = expand_workflow_inputs( {'1': {'input': {'batch': True, 'values': [{'hid': '1'}, {'hid': '2'}] }}} ) >>> print sorted( [ "%s" % ( p[ '1' ][ 'input' ][ 'hid' ] ) for p in params ] ) ['1', '2'] >>> params, param_keys = expand_workflow_inputs( {'1': {'input': {'batch': True, 'values': [{'hid': '1'}, {'hid': '2'}] }}, '2': {'input': {'batch': True, 'values': [{'hid': '3'}, {'hid': '4'}] }}} ) >>> print sorted( [ "%s%s" % ( p[ '1' ][ 'input' ][ 'hid' ], p[ '2' ][ 'input' ][ 'hid' ] ) for p in params ] ) ['13', '24'] >>> params, param_keys = expand_workflow_inputs( {'1': {'input': {'batch': True, 'product': True, 'values': [{'hid': '1'}, {'hid': '2'}] }}, '2': {'input': {'batch': True, 'values': [{'hid': '3'}, {'hid': '4'}, {'hid': '5'}] }}} ) >>> print sorted( [ "%s%s" % ( p[ '1' ][ 'input' ][ 'hid' ], p[ '2' ][ 'input' ][ 'hid' ] ) for p in params ] ) ['13', '14', '15', '23', '24', '25'] >>> params, param_keys = expand_workflow_inputs( {'1': {'input': {'batch': True, 'product': True, 'values': [{'hid': '1'}, {'hid': '2'}] }}, '2': {'input': {'batch': True, 'product': True, 'values': [{'hid': '3'}, {'hid': '4'}, {'hid': '5'}] }}, '3': {'input': {'batch': True, 'product': True, 'values': [{'hid': '6'}, {'hid': '7'}, {'hid': '8'}] }}} ) >>> print sorted( [ "%s%s%s" % ( p[ '1' ][ 'input' ][ 'hid' ], p[ '2' ][ 'input' ][ 'hid' ], p[ '3' ][ 'input' ][ 'hid' ] ) for p in params ] ) ['136', '137', '138', '146', '147', '148', '156', '157', '158', '236', '237', '238', '246', '247', '248', '256', '257', '258'] """ linked_n = None linked = [] product = [] linked_keys = [] product_keys = [] for step_id, step in inputs.items(): for key, value in step.items(): if isinstance(value, dict) and 'batch' in value and value[ 'batch'] is True and 'values' in value and isinstance( value['values'], list): nval = len(value['values']) if 'product' in value and value['product'] is True: product.append(value['values']) product_keys.append((step_id, key)) else: if linked_n is None: linked_n = nval elif linked_n != nval or nval is 0: raise exceptions.RequestParameterInvalidException( 'Failed to match linked batch selections. Please select equal number of data files.' ) linked.append(value['values']) linked_keys.append((step_id, key)) params = [] params_keys = [] linked = linked or [[None]] product = product or [[None]] linked_keys = linked_keys or [(None, None)] product_keys = product_keys or [(None, None)] for linked_values, product_values in itertools.product( *[zip(*linked), itertools.product(*product)]): new_params = copy.deepcopy(inputs) new_keys = [] for (step_id, key), value in zip(linked_keys, linked_values) + zip( product_keys, product_values): if step_id is not None: new_params[step_id][key] = value new_keys.append(value['hid']) params_keys.append(new_keys) params.append(new_params) return params, params_keys
def get(self, trans, decoded_library_id, check_accessible=True): """ Get the library from the DB. :param decoded_library_id: decoded library id :type decoded_library_id: int :param check_accessible: flag whether to check that user can access item :type check_accessible: bool :returns: the requested library :rtype: galaxy.model.Library """ try: library = trans.sa_session.query(trans.app.model.Library).filter( trans.app.model.Library.table.c.id == decoded_library_id).one() except MultipleResultsFound: raise exceptions.InconsistentDatabase( 'Multiple libraries found with the same id.') except NoResultFound: raise exceptions.RequestParameterInvalidException( 'No library found with the id provided.') except Exception as e: raise exceptions.InternalServerError( 'Error loading from the database.' + unicodify(e)) library = self.secure(trans, library, check_accessible) return library
def parse_order_by(self, order_by_string, default=None): """Return an ORM compatible order_by using the given string""" # TODO: generalize into class # TODO: general (enough) columns if order_by_string in ('create_time', 'create_time-dsc'): return desc(self.model_class.create_time) if order_by_string == 'create_time-asc': return asc(self.model_class.create_time) if order_by_string in ('update_time', 'update_time-dsc'): return desc(self.model_class.update_time) if order_by_string == 'update_time-asc': return asc(self.model_class.update_time) if order_by_string in ('name', 'name-asc'): return asc(self.model_class.name) if order_by_string == 'name-dsc': return desc(self.model_class.name) # TODO: history columns if order_by_string in ('size', 'size-dsc'): return desc(self.model_class.disk_size) if order_by_string == 'size-asc': return asc(self.model_class.disk_size) # TODO: add functional/non-orm orders (such as rating) if default: return self.parse_order_by(default) raise glx_exceptions.RequestParameterInvalidException( 'Unkown order_by', order_by=order_by_string, available=['create_time', 'update_time', 'name', 'size'])
def invoke(self, trans, workflow_id, payload, **kwd): """ POST /api/workflows/{encoded_workflow_id}/invocations Schedule the workflow specified by `workflow_id` to run. """ # Get workflow + accessibility check. stored_workflow = self.__get_stored_accessible_workflow( trans, workflow_id) workflow = stored_workflow.latest_workflow run_configs = build_workflow_run_configs(trans, workflow, payload) is_batch = payload.get('batch') if not is_batch and len(run_configs) != 1: raise exceptions.RequestParameterInvalidException( "Must specify 'batch' to use batch parameters.") invocations = [] for run_config in run_configs: workflow_scheduler_id = payload.get('scheduler', None) # TODO: workflow scheduler hints work_request_params = dict(scheduler=workflow_scheduler_id) workflow_invocation = queue_invoke( trans=trans, workflow=workflow, workflow_run_config=run_config, request_params=work_request_params) invocation = self.encode_all_ids(trans, workflow_invocation.to_dict(), recursive=True) invocations.append(invocation) if is_batch: return invocations else: return invocations[0]
def parse_order_by(self, order_by_string, default=None): """Return an ORM compatible order_by using the given string""" if order_by_string in ('hid', 'hid-dsc'): return desc('hid') if order_by_string == 'hid-asc': return asc('hid') if order_by_string in ('create_time', 'create_time-dsc'): return desc('create_time') if order_by_string == 'create_time-asc': return asc('create_time') if order_by_string in ('update_time', 'update_time-dsc'): return desc('update_time') if order_by_string == 'update_time-asc': return asc('update_time') if order_by_string in ('name', 'name-asc'): return asc('name') if order_by_string == 'name-dsc': return desc('name') if default: return self.parse_order_by(default) # TODO: allow order_by None raise glx_exceptions.RequestParameterInvalidException( 'Unknown order_by', order_by=order_by_string, available=['create_time', 'update_time', 'name', 'hid'])
def parse_filter(self, attr, op, val): """ Attempt to parse filter as a custom/fn filter, then an orm filter, and if neither work - raise an error. :raises exceptions.RequestParameterInvalidException: if no functional or orm filter can be parsed. """ try: # check for a custom filter fn_filter = self._parse_fn_filter(attr, op, val) if fn_filter is not None: return fn_filter # if no custom filter found, try to make an ORM filter #note: have to use explicit is None here, bool( sqlalx.filter ) == False orm_filter = self._parse_orm_filter(attr, op, val) if orm_filter is not None: return orm_filter # by convention, assume most val parsers raise ValueError except ValueError, val_err: raise exceptions.RequestParameterInvalidException( 'unparsable value for filter', column=attr, operation=op, value=val)
def save_new_revision(self, trans, page, payload): # Assumes security has already been checked by caller. content = payload.get("content", None) content_format = payload.get("content_format", None) if not content: raise exceptions.ObjectAttributeMissingException("content undefined or empty") if content_format not in [None, "html", "markdown"]: raise exceptions.RequestParameterInvalidException("content_format [%s], if specified, must be either html or markdown" % content_format) if 'title' in payload: title = payload['title'] else: title = page.title if content_format is None: content_format = page.latest_revision.content_format content = self.rewrite_content_for_import(trans, content, content_format=content_format) page_revision = trans.app.model.PageRevision() page_revision.title = title page_revision.page = page page.latest_revision = page_revision page_revision.content = content page_revision.content_format = content_format # Persist session = trans.sa_session session.flush() return page_revision
def show(self, trans: ProvidesHistoryContext, id, instance_type='history', **kwds): """ GET /api/dataset_collections/{hdca_id} GET /api/dataset_collections/{ldca_id}?instance_type=library """ dataset_collection_instance = self.__service.get_dataset_collection_instance( trans, id=id, instance_type=instance_type, ) if instance_type == 'history': parent = dataset_collection_instance.history elif instance_type == 'library': parent = dataset_collection_instance.folder else: raise exceptions.RequestParameterInvalidException() return dictify_dataset_collection_instance(dataset_collection_instance, security=trans.security, parent=parent, view='element')
def __create_hda_from_ldda(self, trans, content, history): ld = self.get_library_dataset(trans, content) if type(ld) is not trans.app.model.LibraryDataset: raise exceptions.RequestParameterInvalidException( "Library content id ( %s ) is not a dataset" % content) hda = ld.library_dataset_dataset_association.to_history_dataset_association(history, add_to_history=True) return hda
def update(self, trans, library, name=None, description=None, synopsis=None): """ Update the given library """ changed = False if not trans.user_is_admin: raise exceptions.ItemAccessibilityException( 'Only administrators can update libraries.') if library.deleted: raise exceptions.RequestParameterInvalidException( 'You cannot modify a deleted library. Undelete it first.') if name is not None: library.name = name changed = True # When library is renamed the root folder has to be renamed too. folder_manager = folders.FolderManager() folder_manager.update(trans, library.root_folder, name=name) if description is not None: library.description = description changed = True if synopsis is not None: library.synopsis = synopsis changed = True if changed: trans.sa_session.add(library) trans.sa_session.flush() return library
def get_permissions(self, trans, encoded_folder_id, **kwd): """ GET /api/folders/{id}/permissions Load all permissions for the given folder id and return it. :param encoded_folder_id: the encoded id of the folder :type encoded_folder_id: an encoded id string :param scope: either 'current' or 'available' :type scope: string :returns: dictionary with all applicable permissions' values :rtype: dictionary :raises: InsufficientPermissionsException """ current_user_roles = trans.get_current_user_roles() is_admin = trans.user_is_admin decoded_folder_id = self.folder_manager.cut_and_decode( trans, encoded_folder_id) folder = self.folder_manager.get(trans, decoded_folder_id) if not (is_admin or trans.app.security_agent.can_manage_library_item( current_user_roles, folder)): raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to access permissions of this folder.' ) scope = kwd.get('scope', None) if scope == 'current' or scope is None: return self.folder_manager.get_current_roles(trans, folder) # Return roles that are available to select. elif scope == 'available': page = kwd.get('page', None) if page is not None: page = int(page) else: page = 1 page_limit = kwd.get('page_limit', None) if page_limit is not None: page_limit = int(page_limit) else: page_limit = 10 query = kwd.get('q', None) roles, total_roles = trans.app.security_agent.get_valid_roles( trans, folder, query, page, page_limit) return_roles = [] for role in roles: role_id = trans.security.encode_id(role.id) return_roles.append( dict(id=role_id, name=role.name, type=role.type)) return dict(roles=return_roles, page=page, page_limit=page_limit, total=total_roles) else: raise exceptions.RequestParameterInvalidException( "The value of 'scope' parameter is invalid. Alllowed values: current, available" )
def update(self, trans, id, payload, **kwds): """ * PUT /api/workflows/{id} updates the workflow stored with ``id`` :type id: str :param id: the encoded id of the workflow to update :type payload: dict :param payload: a dictionary containing any or all the * workflow the json description of the workflow as would be produced by GET workflows/<id>/download or given to `POST workflows` The workflow contents will be updated to target this. :rtype: dict :returns: serialized version of the workflow """ stored_workflow = self.__get_stored_workflow(trans, id) if 'workflow' in payload: workflow_contents_manager = workflows.WorkflowContentsManager() workflow, errors = workflow_contents_manager.update_workflow_from_dict( trans, stored_workflow, payload['workflow'], ) else: message = "Updating workflow requires dictionary containing 'workflow' attribute with new JSON description." raise exceptions.RequestParameterInvalidException(message) return self.workflow_contents_manager.workflow_to_dict( trans, stored_workflow, style="instance")
def show(self, trans, id, deleted='False', **kwd): """ GET /api/users/{encoded_id} GET /api/users/deleted/{encoded_id} GET /api/users/current Displays information about a user. """ deleted = util.string_as_bool(deleted) try: # user is requesting data about themselves if id == "current": # ...and is anonymous - return usage and quota (if any) if not trans.user: item = self.anon_user_api_value(trans) return item # ...and is logged in - return full else: user = trans.user else: user = self.get_user(trans, id, deleted=deleted) # check that the user is requesting themselves (and they aren't del'd) unless admin if not trans.user_is_admin: assert trans.user == user assert not user.deleted except exceptions.ItemDeletionException: raise except Exception: raise exceptions.RequestParameterInvalidException( 'Invalid user id specified', id=id) return self.user_serializer.serialize_to_view(user, view='detailed')
def create(self, trans, payload, **kwd): """ POST /api/users Creates a new Galaxy user. """ if not trans.app.config.allow_user_creation and not trans.user_is_admin: raise exceptions.ConfigDoesNotAllowException( 'User creation is not allowed in this Galaxy instance') if trans.app.config.use_remote_user and trans.user_is_admin: user = trans.get_or_create_remote_user( remote_user_email=payload['remote_user_email']) elif trans.user_is_admin: username = payload['username'] email = payload['email'] password = payload['password'] message = "\n".join([ validate_email(trans, email), validate_password(trans, password, password), validate_publicname(trans, username) ]).rstrip() if message: raise exceptions.RequestParameterInvalidException(message) else: user = self.user_manager.create(email=email, username=username, password=password) else: raise exceptions.NotImplemented() item = user.to_dict(view='element', value_mapper={ 'id': trans.security.encode_id, 'total_disk_usage': float }) return item
def _normalize_step_parameters(steps, param_map, legacy=False, already_normalized=False): """ Take a complex param_map that can reference parameters by step_id in the new flexible way or in the old one-parameter per step fashion or by tool id and normalize the parameters so everything is referenced by a numeric step id. """ normalized_param_map = {} for step in steps: if already_normalized: param_dict = param_map.get(str(step.order_index), {}) else: param_dict = _step_parameters(step, param_map, legacy=legacy) if step.type == "subworkflow" and param_dict: if not already_normalized: raise exceptions.RequestParameterInvalidException( "Specifying subworkflow step parameters requires already_normalized to be specified as true." ) subworkflow_param_dict = {} for key, value in param_dict.items(): step_index, param_name = key.split("|", 1) if step_index not in subworkflow_param_dict: subworkflow_param_dict[step_index] = {} subworkflow_param_dict[step_index][param_name] = value param_dict = _normalize_step_parameters( step.subworkflow.steps, subworkflow_param_dict, legacy=legacy, already_normalized=already_normalized) if param_dict: normalized_param_map[step.id] = param_dict return normalized_param_map
def __create_dataset_collection(self, trans, history, payload, **kwd): source = kwd.get("source", payload.get("source", "new_collection")) service = trans.app.dataset_collections_service if source == "new_collection": create_params = api_payload_to_create_params(payload) dataset_collection_instance = service.create(trans, parent=history, **create_params) elif source == "hdca": content = payload.get('content', None) if content is None: raise exceptions.RequestParameterMissingException( "'content' id of target to copy is missing") dataset_collection_instance = service.copy( trans=trans, parent=history, source="hdca", encoded_source_id=content, ) else: message = "Invalid 'source' parameter in request %s" % source raise exceptions.RequestParameterInvalidException(message) # if the consumer specified keys or view, use the secondary serializer if 'view' in kwd or 'keys' in kwd: return self.hdca_serializer.serialize_to_view( dataset_collection_instance, user=trans.user, trans=trans, **self._parse_serialization_params(kwd, 'detailed')) return self.__collection_dict(trans, dataset_collection_instance, view="element")
def _search(self, trans, q, page=1, page_size=10): """ Perform the search over TS tools index. Note that search works over the Whoosh index which you have to pre-create with scripts/tool_shed/build_ts_whoosh_index.sh manually. Also TS config option toolshed_search_on has to be True and whoosh_index_dir has to be specified. """ conf = self.app.config if not conf.toolshed_search_on: raise exceptions.ConfigDoesNotAllowException('Searching the TS through the API is turned off for this instance.') if not conf.whoosh_index_dir: raise exceptions.ConfigDoesNotAllowException('There is no directory for the search index specified. Please contact the administrator.') search_term = q.strip() if len(search_term) < 3: raise exceptions.RequestParameterInvalidException('The search term has to be at least 3 characters long.') tool_search = ToolSearch() Boosts = namedtuple('Boosts', ['tool_name_boost', 'tool_description_boost', 'tool_help_boost', 'tool_repo_owner_username_boost']) boosts = Boosts(float(conf.get('tool_name_boost', 1.2)), float(conf.get('tool_description_boost', 0.6)), float(conf.get('tool_help_boost', 0.4)), float(conf.get('tool_repo_owner_username_boost', 0.3))) results = tool_search.search(trans, search_term, page, page_size, boosts) results['hostname'] = web.url_for('/', qualified=True) return results
def validate_and_sanitize_basestring_list(key, val): try: assert isinstance(val, list) return [sanitize_html(t, 'utf-8', 'text/html') for t in val] except (AssertionError, TypeError): raise exceptions.RequestParameterInvalidException( '%s must be a list of strings: %s' % (key, str(type(val))))
def _get_target_history(trans, workflow, payload, param_keys=None, index=0): param_keys = param_keys or [] history_name = payload.get('new_history_name', None) history_id = payload.get('history_id', None) history_param = payload.get('history', None) if [history_name, history_id, history_param].count(None) < 2: raise exceptions.RequestParameterInvalidException("Specified workflow target history multiple ways - at most one of 'history', 'history_id', and 'new_history_name' may be specified.") if history_param: if history_param.startswith('hist_id='): history_id = history_param[8:] else: history_name = history_param if history_id: history_manager = histories.HistoryManager(trans.app) target_history = history_manager.get_owned(trans.security.decode_id(history_id), trans.user, current_history=trans.history) else: if history_name: nh_name = history_name else: nh_name = 'History from %s workflow' % workflow.name if len(param_keys) <= index: raise exceptions.MessageException("Incorrect expansion of workflow batch parameters.") ids = param_keys[index] nids = len(ids) if nids == 1: nh_name = '%s on %s' % (nh_name, ids[0]) elif nids > 1: nh_name = '%s on %s and %s' % (nh_name, ', '.join(ids[0:-1]), ids[-1]) new_history = trans.app.model.History(user=trans.user, name=nh_name) trans.sa_session.add(new_history) target_history = new_history return target_history
def __create_dataset(self, trans, history, payload, **kwd): source = payload.get('source', None) if source not in ('library', 'hda'): raise exceptions.RequestParameterInvalidException( "'source' must be either 'library' or 'hda': %s" % (source)) content = payload.get('content', None) if content is None: raise exceptions.RequestParameterMissingException("'content' id of lda or hda is missing") # copy from library dataset hda = None if source == 'library': hda = self.__create_hda_from_ldda(trans, content, history) # copy an existing, accessible hda elif source == 'hda': unencoded_hda_id = self.decode_id(content) original = self.hda_manager.get_accessible(unencoded_hda_id, trans.user) # check for access on history that contains the original hda as well self.history_manager.error_unless_accessible(original.history, trans.user, current_history=trans.history) hda = self.hda_manager.copy(original, history=history) trans.sa_session.flush() if not hda: return None return self.hda_serializer.serialize_to_view(hda, user=trans.user, trans=trans, **self._parse_serialization_params(kwd, 'detailed'))
def __create_dataset_collection(self, trans, history, payload, **kwd): source = kwd.get("source", "new_collection") service = trans.app.dataset_collections_service if source == "new_collection": create_params = api_payload_to_create_params(payload) dataset_collection_instance = service.create(trans, parent=history, **create_params) elif source == "hdca": content = payload.get('content', None) if content is None: raise exceptions.RequestParameterMissingException( "'content' id of target to copy is missing") dataset_collection_instance = service.copy( trans=trans, parent=history, source="hdca", encoded_source_id=content, ) else: message = "Invalid 'source' parameter in request %s" % source raise exceptions.RequestParameterInvalidException(message) return self.__collection_dict(trans, dataset_collection_instance, view="element")
def index(self, trans: ProvidesUserContext, running=False, job_id=None, **kwd): """ * GET /api/entry_points Returns tool entry point information. Currently passing a job_id parameter is required, as this becomes more general that won't be needed. :type job_id: string :param job_id: Encoded job id :type running: boolean :param running: filter to only include running job entry points. :rtype: list :returns: list of entry point dictionaries. """ running = util.asbool(running) if job_id is None and not running: raise exceptions.RequestParameterInvalidException( "Currently this API must passed a job id or running=true") if job_id is not None and running: raise exceptions.RequestParameterInvalidException( "Currently this API must passed only a job id or running=true") if job_id is not None: job = trans.sa_session.query(Job).get(self.decode_id(job_id)) if not self.interactivetool_manager.can_access_job(trans, job): raise exceptions.ItemAccessibilityException() entry_points = job.interactivetool_entry_points if running: entry_points = self.interactivetool_manager.get_nonterminal_for_user_by_trans( trans) rval = [] for entry_point in entry_points: as_dict = self.encode_all_ids(trans, entry_point.to_dict(), True) target = self.interactivetool_manager.target_if_active( trans, entry_point) if target: as_dict["target"] = target rval.append(as_dict) return rval
def get_permissions( self, trans, id: EncodedDatabaseIdField, scope: Optional[LibraryPermissionScope] = LibraryPermissionScope. current, is_library_access: Optional[bool] = False, page: Optional[int] = 1, page_limit: Optional[int] = 10, query: Optional[str] = None, ) -> Union[LibraryCurrentPermissions, LibraryAvailablePermissions]: """Load all permissions for the given library id and return it. :param id: the encoded id of the library :type id: an encoded id string :param scope: either 'current' or 'available' :type scope: string :param is_library_access: indicates whether the roles available for the library access are requested :type is_library_access: bool :returns: dictionary with all applicable permissions' values :rtype: dictionary :raises: InsufficientPermissionsException """ current_user_roles = trans.get_current_user_roles() is_admin = trans.user_is_admin library = self.library_manager.get( trans, trans.security.decode_id(id, object_name='library')) if not (is_admin or trans.app.security_agent.can_manage_library_item( current_user_roles, library)): raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to access permissions of this library.' ) if scope == LibraryPermissionScope.current or scope is None: roles = self.library_manager.get_current_roles(trans, library) return LibraryCurrentPermissions.parse_obj(roles) # Return roles that are available to select. elif scope == LibraryPermissionScope.available: roles, total_roles = trans.app.security_agent.get_valid_roles( trans, library, query, page, page_limit, is_library_access) return_roles = [] for role in roles: role_id = trans.security.encode_id(role.id) return_roles.append( dict(id=role_id, name=role.name, type=role.type)) return LibraryAvailablePermissions(roles=return_roles, page=page, page_limit=page_limit, total=total_roles) else: raise exceptions.RequestParameterInvalidException( "The value of 'scope' parameter is invalid. Alllowed values: current, available" )
def index( self, trans, library_id, **kwd ): """ index( self, trans, library_id, **kwd ) * GET /api/libraries/{library_id}/contents: Returns a list of library files and folders. .. note:: May be slow! Returns all content traversing recursively through all folders. .. seealso:: :class:`galaxy.webapps.galaxy.api.FolderContentsController.index` for a non-recursive solution :param library_id: the encoded id of the library :type library_id: str :returns: list of dictionaries of the form: * id: the encoded id of the library item * name: the 'library path' or relationship of the library item to the root * type: 'file' or 'folder' * url: the url to get detailed information on the library item :rtype: list :raises: MalformedId, InconsistentDatabase, RequestParameterInvalidException, InternalServerError """ rval = [] current_user_roles = trans.get_current_user_roles() def traverse( folder ): admin = trans.user_is_admin() rval = [] for subfolder in folder.active_folders: if not admin: can_access, folder_ids = trans.app.security_agent.check_folder_contents( trans.user, current_user_roles, subfolder ) if (admin or can_access) and not subfolder.deleted: subfolder.api_path = folder.api_path + '/' + subfolder.name subfolder.api_type = 'folder' rval.append( subfolder ) rval.extend( traverse( subfolder ) ) for ld in folder.datasets: if not admin: can_access = trans.app.security_agent.can_access_dataset( current_user_roles, ld.library_dataset_dataset_association.dataset ) if (admin or can_access) and not ld.deleted: ld.api_path = folder.api_path + '/' + ld.name ld.api_type = 'file' rval.append( ld ) return rval try: decoded_library_id = self.decode_id( library_id ) except Exception: raise exceptions.MalformedId( 'Malformed library id ( %s ) specified, unable to decode.' % library_id ) try: library = trans.sa_session.query( trans.app.model.Library ).filter( trans.app.model.Library.table.c.id == decoded_library_id ).one() except MultipleResultsFound: raise exceptions.InconsistentDatabase( 'Multiple libraries found with the same id.' ) except NoResultFound: raise exceptions.RequestParameterInvalidException( 'No library found with the id provided.' ) except Exception, e: raise exceptions.InternalServerError( 'Error loading from the database.' + str(e))
def show_roles( self, trans, encoded_dataset_id, **kwd ): """ show_roles( self, trans, id, **kwd ): * GET /api/libraries/datasets/{encoded_dataset_id}/permissions Displays information about current or available roles for a given dataset permission. :param encoded_dataset_id: the encoded id of the dataset to query :type encoded_dataset_id: an encoded id string :param scope: either 'current' or 'available' :type scope: string :rtype: dictionary :returns: either dict of current roles for all permission types or dict of available roles to choose from (is the same for any permission type) """ current_user_roles = trans.get_current_user_roles() try: library_dataset = self.get_library_dataset( trans, id=encoded_dataset_id, check_ownership=False, check_accessible=False ) except Exception as e: raise exceptions.ObjectNotFound( 'Requested dataset was not found.' + str(e) ) dataset = library_dataset.library_dataset_dataset_association.dataset # User has to have manage permissions permission in order to see the roles. can_manage = trans.app.security_agent.can_manage_dataset( current_user_roles, dataset ) or trans.user_is_admin() if not can_manage: raise exceptions.InsufficientPermissionsException( 'You do not have proper permission to access permissions.' ) scope = kwd.get( 'scope', None ) if scope == 'current' or scope is None: return self._get_current_roles( trans, library_dataset ) # Return roles that are available to select. elif scope == 'available': page = kwd.get( 'page', None ) if page is not None: page = int( page ) else: page = 1 page_limit = kwd.get( 'page_limit', None ) if page_limit is not None: page_limit = int( page_limit ) else: page_limit = 10 query = kwd.get( 'q', None ) roles, total_roles = trans.app.security_agent.get_valid_roles( trans, dataset, query, page, page_limit ) return_roles = [] for role in roles: role_id = trans.security.encode_id( role.id ) return_roles.append( dict( id=role_id, name=role.name, type=role.type ) ) return dict( roles=return_roles, page=page, page_limit=page_limit, total=total_roles ) else: raise exceptions.RequestParameterInvalidException( "The value of 'scope' parameter is invalid. Alllowed values: current, available" )
def __create_dataset(self, trans, history, payload, **kwd): source = payload.get('source', None) if source not in ('library', 'hda'): raise exceptions.RequestParameterInvalidException( "'source' must be either 'library' or 'hda': %s" % (source)) content = payload.get('content', None) if content is None: raise exceptions.RequestParameterMissingException( "'content' id of lda or hda is missing") # copy from library dataset hda = None if source == 'library': ld = self.get_library_dataset(trans, content, check_ownership=False, check_accessible=False) # TODO: why would get_library_dataset NOT return a library dataset? if type(ld) is not trans.app.model.LibraryDataset: raise exceptions.RequestParameterInvalidException( "Library content id ( %s ) is not a dataset" % content) # insert into history hda = ld.library_dataset_dataset_association.to_history_dataset_association( history, add_to_history=True) # copy an existing, accessible hda elif source == 'hda': unencoded_hda_id = self.decode_id(content) original = self.hda_manager.get_accessible(unencoded_hda_id, trans.user) # check for access on history that contains the original hda as well self.history_manager.error_unless_accessible( original.history, trans.user, current_history=trans.history) hda = self.hda_manager.copy(original, history=history) # data_copy = original.copy( copy_children=True ) # hda = history.add_dataset( data_copy ) trans.sa_session.flush() if not hda: return None return self.hda_serializer.serialize_to_view( hda, user=trans.user, trans=trans, **self._parse_serialization_params(kwd, 'detailed'))
def _get_user(self, trans, id): user = self.get_user(trans, id) if not user: raise exceptions.RequestParameterInvalidException( 'Invalid user (%s).' % id) if user != trans.user and not trans.user_is_admin: raise exceptions.InsufficientPermissionsException('Access denied.') return user
def do_invocation_step_action( self, step, action ): """ Update or set the workflow invocation state action - generic extension point meant to allows users to interact with interactive workflow modules. The action object returned from this method will be attached to the WorkflowInvocationStep and be available the next time the workflow scheduler visits the workflow. """ raise exceptions.RequestParameterInvalidException( "Attempting to perform invocation step action on module that does not support actions." )