def _check_access(self, trans, is_admin, item, current_user_roles): if isinstance(item, trans.model.HistoryDatasetAssociation): # Make sure the user has the DATASET_ACCESS permission on the history_dataset_association. if not item: message = f"Invalid history dataset ({escape(str(item))}) specified." raise ObjectNotFound(message) elif not trans.app.security_agent.can_access_dataset( current_user_roles, item.dataset) and item.history.user == trans.user: message = f"You do not have permission to access the history dataset with id ({str(item.id)})." raise ItemAccessibilityException(message) else: # Make sure the user has the LIBRARY_ACCESS permission on the library item. if not item: message = f"Invalid library item ({escape(str(item))}) specified." raise ObjectNotFound(message) elif not (is_admin or trans.app.security_agent.can_access_library_item( current_user_roles, item, trans.user)): if isinstance(item, trans.model.Library): item_type = 'data library' elif isinstance(item, trans.model.LibraryFolder): item_type = 'folder' else: item_type = '(unknown item type)' message = f"You do not have permission to access the {escape(item_type)} with id ({str(item.id)})." raise ItemAccessibilityException(message)
def get_accessible_job(self, trans, decoded_job_id): job = trans.sa_session.query(trans.app.model.Job).filter(trans.app.model.Job.id == decoded_job_id).first() if job is None: raise ObjectNotFound() belongs_to_user = (job.user == trans.user) if job.user else (job.session_id == trans.get_galaxy_session().id) if not trans.user_is_admin and not belongs_to_user: # Check access granted via output datasets. if not job.output_datasets: raise ItemAccessibilityException("Job has no output datasets.") for data_assoc in job.output_datasets: if not self.dataset_manager.is_accessible(data_assoc.dataset.dataset, trans.user): raise ItemAccessibilityException("You are not allowed to rerun this job.") trans.sa_session.refresh(job) return job
def index( self, trans, **kwargs ): """ GET /api/visualizations: """ #TODO: search for vizsesses that apply to an object (sending model class and id? - how to do this?) rval = [] try: if not trans.user: raise ItemAccessibilityException( 'You must be logged in to access visualizations' ) user = trans.user #TODO: search for: title, made by user, creation time range, type (vis name), dbkey, etc. #TODO: limit, offset, order_by #TODO: deleted # this is the default search - user's vis, vis shared with user, published vis visualizations = self.get_visualizations_by_user( trans, user ) visualizations += self.get_visualizations_shared_with_user( trans, user ) visualizations += self.get_published_visualizations( trans, exclude_user=user ) #TODO: the admin case - everything for visualization in visualizations: item = self.get_visualization_summary_dict( visualization ) item = trans.security.encode_dict_ids( item ) item[ 'url' ] = web.url_for( 'visualization', id=item[ 'id' ] ) rval.append( item ) except ItemAccessibilityException, exception: trans.response.status = 403 rval = { 'error': str( exception ) } if trans.debug: log.exception( str( exception ) )
def update(self, trans, folder, name=None, description=None): """ Update the given folder's name or description. :param folder: the model object :type folder: LibraryFolder :param name: new name for the library folder :type name: str :param description: new description for the library folder :type description: str :returns: the folder :rtype: LibraryFolder :raises: ItemAccessibilityException, InsufficientPermissionsException """ changed = False if not trans.user_is_admin: folder = self.check_modifyable(trans, folder) if folder.deleted is True: raise ItemAccessibilityException("You cannot update a deleted library folder. Undelete it first.") if name is not None and name != folder.name: folder.name = name changed = True if description is not None and description != folder.description: folder.description = description changed = True if changed: trans.sa_session.add(folder) trans.sa_session.flush() return folder
def _check_user_access(self, user_context): """Raises an exception if the given user doesn't have the rights to access this file source. Warning: if the user_context is None, then the check is skipped. This is due to tool executions context not having access to the user_context. The validation will be done when checking the tool parameters. """ if user_context is not None and not self.user_has_access(user_context): raise ItemAccessibilityException(f"User {user_context.username} has no access to file source.")
def __get_library_collection_instance( self, trans, id, check_ownership=False, check_accessible=True ): if check_ownership: raise NotImplemented( "Functionality (getting library dataset collection with ownership check) unimplemented." ) instance_id = int( trans.security.decode_id( id ) ) collection_instance = trans.sa_session.query( trans.app.model.LibraryDatasetCollectionAssociation ).get( instance_id ) if check_accessible: if not trans.app.security_agent.can_access_library_item( trans.get_current_user_roles(), collection_instance, trans.user ): raise ItemAccessibilityException( "LibraryDatasetCollectionAssociation is not accessible to the current user", type='error' ) return collection_instance
def __authorize_job_access(self, encoded_job_id, **kwargs): key = "job_key" if key not in kwargs: error_message = "Job files action requires a valid '%s'." % key raise ObjectAttributeMissingException(error_message) job_id = self._security.decode_id(encoded_job_id) job_key = self._security.encode_id(job_id, kind="jobs_files") if not util.safe_str_cmp(kwargs["job_key"], job_key): raise ItemAccessibilityException("Invalid job_key supplied.") # Verify job is active. Don't update the contents of complete jobs. sa_session = self._app.model.context.current job = sa_session.query(model.Job).get(job_id) if not job.running: error_message = "Attempting to read or modify the files of a job that has already completed." raise ItemAccessibilityException(error_message) return job
def delete(self, trans, group, undelete=False): """ Mark given group deleted/undeleted based on the flag. """ if not trans.user_is_admin(): raise ItemAccessibilityException('Only administrators can delete and undelete groups.') if undelete: group.deleted = False else: group.deleted = True trans.sa_session.add(group) trans.sa_session.flush() return group
def create(self, trans, name, description=''): """ Create a new group. """ if not trans.user_is_admin(): raise ItemAccessibilityException('Only administrators can create groups.') else: if self.get(trans, name=name): raise Conflict('Group with the given name already exists. Name: ' + str(name)) # TODO add description field to the model group = trans.app.model.Group(name=name) trans.sa_session.add(group) trans.sa_session.flush() return group
def update(self, trans, group, name=None, description=None): """ Update the given group """ changed = False if not trans.user_is_admin(): raise ItemAccessibilityException('Only administrators can update groups.') if group.deleted: raise RequestParameterInvalidException('You cannot modify a deleted group. Undelete it first.') if name is not None: group.name = name changed = True if description is not None: group.description = description changed = True if changed: trans.sa_session.add(group) trans.sa_session.flush() return group
def get(self, trans, history_id, bucket_name, objects, authz_id, input_args=None): """ Implements the logic of getting a file from a cloud-based storage (e.g., Amazon S3) and persisting it as a Galaxy dataset. This manager does NOT require use credentials, instead, it uses a more secure method, which leverages CloudAuthz (https://github.com/galaxyproject/cloudauthz) and automatically requests temporary credentials to access the defined resources. :type trans: galaxy.webapps.base.webapp.GalaxyWebTransaction :param trans: Galaxy web transaction :type history_id: string :param history_id: the (decoded) id of history to which the object should be received to. :type bucket_name: string :param bucket_name: the name of a bucket from which data should be fetched (e.g., a bucket name on AWS S3). :type objects: list of string :param objects: the name of objects to be fetched. :type authz_id: int :param authz_id: the ID of CloudAuthz to be used for authorizing access to the resource provider. You may get a list of the defined authorizations sending GET to `/api/cloud/authz`. Also, you can POST to `/api/cloud/authz` to define a new authorization. :type input_args: dict :param input_args: a [Optional] a dictionary of input parameters: dbkey, file_type, space_to_tab, to_posix_lines (see galaxy/webapps/galaxy/api/cloud.py) :rtype: list of galaxy.model.Dataset :return: a list of datasets created for the fetched files. """ if CloudProviderFactory is None: raise Exception(NO_CLOUDBRIDGE_ERROR_MESSAGE) if input_args is None: input_args = {} if not hasattr(trans.app, 'authnz_manager'): err_msg = "The OpenID Connect protocol, a required feature for getting data from cloud, " \ "is not enabled on this Galaxy instance." log.debug(err_msg) raise MessageException(err_msg) cloudauthz = trans.app.authnz_manager.try_get_authz_config( trans.sa_session, trans.user.id, authz_id) credentials = trans.app.authnz_manager.get_cloud_access_credentials( cloudauthz, trans.sa_session, trans.user.id, trans.request) connection = self.configure_provider(cloudauthz.provider, credentials) try: bucket = connection.storage.buckets.get(bucket_name) if bucket is None: raise RequestParameterInvalidException( f"The bucket `{bucket_name}` not found.") except Exception as e: raise ItemAccessibilityException( "Could not get the bucket `{}`: {}".format( bucket_name, util.unicodify(e))) datasets = [] for obj in objects: try: key = bucket.objects.get(obj) except Exception as e: raise MessageException( "The following error occurred while getting the object {}: {}" .format(obj, util.unicodify(e))) if key is None: log.exception( "Could not get object `{}` for user `{}`. Object may not exist, or the provided credentials are " "invalid or not authorized to read the bucket/object.". format(obj, trans.user.id)) raise ObjectNotFound( "Could not get the object `{}`. Please check if the object exists, and credentials are valid and " "authorized to read the bucket and object. ".format(obj)) params = Params(self._get_inputs(obj, key, input_args), sanitize=False) incoming = params.__dict__ history = trans.sa_session.query( trans.app.model.History).get(history_id) if not history: raise ObjectNotFound("History with ID `{}` not found.".format( trans.app.security.encode_id(history_id))) output = trans.app.toolbox.get_tool('upload1').handle_input( trans, incoming, history=history) job_errors = output.get('job_errors', []) if job_errors: raise ValueError( 'Following error occurred while getting the given object(s) from {}: {}' .format(cloudauthz.provider, job_errors)) else: for d in output['out_data']: datasets.append(d[1].dataset) return datasets
def _check_user_access(self, user_context): """Raises an exception if the given user doesn't have the rights to access this file source.""" if user_context is not None and not self.user_has_access(user_context): raise ItemAccessibilityException( f"User {user_context.username} has no access to file source.")
def _check_add(self, trans, is_admin, item, current_user_roles): # Deny access if the user is not an admin and does not have the LIBRARY_ADD permission. if not (is_admin or trans.app.security_agent.can_add_library_item(current_user_roles, item)): message = f"You are not authorized to add an item to ({escape(item.name)})." raise ItemAccessibilityException(message)
def upload(self, trans, history_id, provider, bucket_name, objects, credentials, input_args=None): """ Implements the logic of uploading a file from a cloud-based storage (e.g., Amazon S3) and persisting it as a Galaxy dataset. :type trans: galaxy.web.framework.webapp.GalaxyWebTransaction :param trans: Galaxy web transaction :type history_id: string :param history_id: the (decoded) id of history to which the object should be uploaded to. :type provider: string :param provider: the name of cloud-based resource provided. A list of supported providers is given in `SUPPORTED_PROVIDERS` variable. :type bucket_name: string :param bucket_name: the name of a bucket from which data should be uploaded (e.g., a bucket name on AWS S3). :type objects: list of string :param objects: the name of objects to be uploaded. :type credentials: dict :param credentials: a dictionary containing all the credentials required to authenticated to the specified provider (e.g., {"secret_key": YOUR_AWS_SECRET_TOKEN, "access_key": YOUR_AWS_ACCESS_TOKEN}). :type input_args: dict :param input_args: a [Optional] a dictionary of input parameters: dbkey, file_type, space_to_tab, to_posix_lines (see galaxy/webapps/galaxy/api/cloud.py) :rtype: list of galaxy.model.Dataset :return: a list of datasets created for the uploaded files. """ if CloudProviderFactory is None: raise Exception(NO_CLOUDBRIDGE_ERROR_MESSAGE) if input_args is None: input_args = {} connection = self._configure_provider(provider, credentials) try: bucket = connection.storage.buckets.get(bucket_name) if bucket is None: raise RequestParameterInvalidException( "The bucket `{}` not found.".format(bucket_name)) except Exception as e: raise ItemAccessibilityException( "Could not get the bucket `{}`: {}".format( bucket_name, str(e))) datasets = [] for obj in objects: try: key = bucket.objects.get(obj) except Exception as e: raise MessageException( "The following error occurred while getting the object {}: {}" .format(obj, str(e))) if key is None: raise ObjectNotFound( "Could not get the object `{}`.".format(obj)) params = Params(self._get_inputs(obj, key, input_args), sanitize=False) incoming = params.__dict__ history = trans.sa_session.query( trans.app.model.History).get(history_id) if not history: raise ObjectNotFound("History with ID `{}` not found.".format( trans.app.security.encode_id(history_id))) output = trans.app.toolbox.get_tool('upload1').handle_input( trans, incoming, history=history) job_errors = output.get('job_errors', []) if job_errors: raise ValueError( 'Following error occurred while uploading the given object(s) from {}: {}' .format(provider, job_errors)) else: for d in output['out_data']: datasets.append(d[1].dataset) return datasets