def __track_module_from_dict(self, trans, steps, steps_by_external_id, step_dict, secure): module, step = self.__module_from_dict(trans, step_dict, secure=secure) # Create the model class for the step steps.append(step) steps_by_external_id[step_dict['id']] = step if 'workflow_outputs' in step_dict: workflow_outputs = step_dict['workflow_outputs'] found_output_names = set([]) for workflow_output in workflow_outputs: # Allow workflow outputs as list of output_names for backward compatiblity. if not isinstance(workflow_output, dict): workflow_output = {"output_name": workflow_output} output_name = workflow_output["output_name"] if output_name in found_output_names: raise exceptions.ObjectAttributeInvalidException( "Duplicate workflow outputs with name [%s] found." % output_name) if not output_name: raise exceptions.ObjectAttributeInvalidException( "Workflow output with empty name encountered.") found_output_names.add(output_name) uuid = workflow_output.get("uuid", None) label = workflow_output.get("label", None) m = step.create_or_update_workflow_output( output_name=output_name, uuid=uuid, label=label, ) trans.sa_session.add(m) return module, step
def __module_from_dict(self, trans, steps, steps_by_external_id, step_dict, **kwds): """ Create a WorkflowStep model object and corresponding module representing type-specific functionality from the incoming dictionary. """ step = model.WorkflowStep() # TODO: Consider handling position inside module. step.position = step_dict['position'] if step_dict.get("uuid", None) and step_dict['uuid'] != "None": step.uuid = step_dict["uuid"] if "label" in step_dict: step.label = step_dict["label"] step_type = step_dict.get("type", None) if step_type == "subworkflow": subworkflow = self.__load_subworkflow_from_step_dict( trans, step_dict) step_dict["subworkflow"] = subworkflow module = module_factory.from_dict(trans, step_dict, **kwds) self.__set_default_label(step, module, step_dict.get('tool_state')) module.save_to_step(step) annotation = step_dict['annotation'] if annotation: annotation = sanitize_html(annotation, 'utf-8', 'text/html') self.add_item_annotation(trans.sa_session, trans.get_user(), step, annotation) # Stick this in the step temporarily step.temp_input_connections = step_dict['input_connections'] # Create the model class for the step steps.append(step) steps_by_external_id[step_dict['id']] = step if 'workflow_outputs' in step_dict: workflow_outputs = step_dict['workflow_outputs'] found_output_names = set([]) for workflow_output in workflow_outputs: # Allow workflow outputs as list of output_names for backward compatiblity. if not isinstance(workflow_output, dict): workflow_output = {"output_name": workflow_output} output_name = workflow_output["output_name"] if output_name in found_output_names: raise exceptions.ObjectAttributeInvalidException( "Duplicate workflow outputs with name [%s] found." % output_name) if not output_name: raise exceptions.ObjectAttributeInvalidException( "Workflow output with empty name encountered.") found_output_names.add(output_name) uuid = workflow_output.get("uuid", None) label = workflow_output.get("label", None) m = step.create_or_update_workflow_output( output_name=output_name, uuid=uuid, label=label, ) trans.sa_session.add(m) return module, step
def create( self, trans, encoded_folder_id, payload, **kwd ): """ * POST /api/folders/{encoded_id}/contents create a new library file from an HDA :param encoded_folder_id: the encoded id of the folder to import dataset(s) to :type encoded_folder_id: an encoded id string :param payload: dictionary structure containing: :param from_hda_id: (optional) the id of an accessible HDA to copy into the library :type from_hda_id: encoded id :param ldda_message: (optional) the new message attribute of the LDDA created :type ldda_message: str :param extended_metadata: (optional) dub-dictionary containing any extended metadata to associate with the item :type extended_metadata: dict :type payload: dict :returns: a dictionary containing the id, name, and 'show' url of the new item :rtype: dict :raises: ObjectAttributeInvalidException, InsufficientPermissionsException, ItemAccessibilityException, InternalServerError """ encoded_folder_id_16 = self.__decode_library_content_id( trans, encoded_folder_id ) from_hda_id, ldda_message = ( payload.pop( 'from_hda_id', None ), payload.pop( 'ldda_message', '' ) ) if ldda_message: ldda_message = util.sanitize_html.sanitize_html( ldda_message, 'utf-8' ) rval = {} try: decoded_hda_id = self.decode_id( from_hda_id ) hda = self.hda_manager.get_owned( decoded_hda_id, trans.user, current_history=trans.history ) hda = self.hda_manager.error_if_uploading( hda ) folder = self.get_library_folder( trans, encoded_folder_id_16, check_accessible=True ) library = folder.parent_library if library.deleted: raise exceptions.ObjectAttributeInvalidException() if not self.can_current_user_add_to_library_item( trans, folder ): raise exceptions.InsufficientPermissionsException() ldda = self.copy_hda_to_library_folder( trans, hda, folder, ldda_message=ldda_message ) update_time = ldda.update_time.strftime( "%Y-%m-%d %I:%M %p" ) ldda_dict = ldda.to_dict() rval = trans.security.encode_dict_ids( ldda_dict ) rval['update_time'] = update_time except exceptions.ObjectAttributeInvalidException: raise exceptions.ObjectAttributeInvalidException( 'You cannot add datasets into deleted library. Undelete it first.' ) except exceptions.InsufficientPermissionsException: raise exceptions.exceptions.InsufficientPermissionsException( 'You do not have proper permissions to add a dataset to a folder with id (%s)' % ( encoded_folder_id ) ) except Exception as exc: # TODO handle exceptions better within the mixins if ( ( 'not accessible to the current user' in str( exc ) ) or ( 'You are not allowed to access this dataset' in str( exc ) ) ): raise exceptions.ItemAccessibilityException( 'You do not have access to the requested item' ) else: log.exception( exc ) raise exceptions.InternalServerError( 'An unknown error ocurred. Please try again.' ) return rval
def _validate_favorite_object_type(self, object_type): if object_type in ['tools']: pass else: raise exceptions.ObjectAttributeInvalidException( "This type is not supported. Given object_type: %s" % object_type)
def create(self, trans, payload, **kwd): """ create( self, trans, payload, **kwd ) * POST /api/pages Create a page and return dictionary containing Page summary :param payload: dictionary structure containing:: 'slug' = The title slug for the page URL, must be unique 'title' = Title of the page 'content' = HTML contents of the page 'annotation' = Annotation that will be attached to the page :rtype: dict :returns: Dictionary return of the Page.to_dict call """ user = trans.get_user() if not payload.get("title", None): raise exceptions.ObjectAttributeMissingException( "Page name is required") elif not payload.get("slug", None): raise exceptions.ObjectAttributeMissingException( "Page id is required") elif not self._is_valid_slug(payload["slug"]): raise exceptions.ObjectAttributeInvalidException( "Page identifier must consist of only lowercase letters, numbers, and the '-' character" ) elif trans.sa_session.query(trans.app.model.Page).filter_by( user=user, slug=payload["slug"], deleted=False).first(): raise exceptions.DuplicatedSlugException( "Page slug must be unique") content = payload.get("content", "") content = sanitize_html(content, 'utf-8', 'text/html') # Create the new stored page page = trans.app.model.Page() page.title = payload['title'] page.slug = payload['slug'] page_annotation = sanitize_html(payload.get("annotation", ""), 'utf-8', 'text/html') self.add_item_annotation(trans.sa_session, trans.get_user(), page, page_annotation) page.user = user # And the first (empty) page revision page_revision = trans.app.model.PageRevision() page_revision.title = payload['title'] page_revision.page = page page.latest_revision = page_revision page_revision.content = content # Persist session = trans.sa_session session.add(page) session.flush() rval = self.encode_all_ids(trans, page.to_dict(), True) return rval
def index( self, trans, **kwd ): """ index( trans, state=None, tool_id=None, history_id=None ) * GET /api/jobs: return jobs for current user :type state: string or list :param state: limit listing of jobs to those that match one of the included states. If none, all are returned. Valid Galaxy job states include: 'new', 'upload', 'waiting', 'queued', 'running', 'ok', 'error', 'paused', 'deleted', 'deleted_new' :type tool_id: string or list :param tool_id: limit listing of jobs to those that match one of the included tool_ids. If none, all are returned. :type history_id: string :param history_id: limit listing of jobs to those that match the history_id. If none, all are returned. :rtype: list :returns: list of dictionaries containing summary job information """ state = kwd.get( 'state', None ) query = trans.sa_session.query( trans.app.model.Job ).filter( trans.app.model.Job.user == trans.user ) def build_and_apply_filters( query, objects, filter_func ): if objects is not None: if isinstance( objects, basestring ): query = query.filter( filter_func( objects ) ) elif isinstance( objects, list ): t = [] for obj in objects: t.append( filter_func( obj ) ) query = query.filter( or_( *t ) ) return query query = build_and_apply_filters( query, state, lambda s: trans.app.model.Job.state == s ) query = build_and_apply_filters( query, kwd.get( 'tool_id', None ), lambda t: trans.app.model.Job.tool_id == t ) query = build_and_apply_filters( query, kwd.get( 'tool_id_like', None ), lambda t: trans.app.model.Job.tool_id.like(t) ) history_id = kwd.get( 'history_id', None ) if history_id is not None: try: decoded_history_id = trans.security.decode_id(history_id) query = query.filter( trans.app.model.Job.history_id == decoded_history_id ) except: raise exceptions.ObjectAttributeInvalidException() out = [] for job in query.order_by( trans.app.model.Job.update_time.desc() ).all(): out.append( self.encode_all_ids( trans, job.to_dict( 'collection' ), True ) ) return out
def create(self, trans, payload): user = trans.get_user() if not payload.get("title"): raise exceptions.ObjectAttributeMissingException( "Page name is required") elif not payload.get("slug"): raise exceptions.ObjectAttributeMissingException( "Page id is required") elif not base.is_valid_slug(payload["slug"]): raise exceptions.ObjectAttributeInvalidException( "Page identifier must consist of only lowercase letters, numbers, and the '-' character" ) elif trans.sa_session.query(trans.app.model.Page).filter_by( user=user, slug=payload["slug"], deleted=False).first(): raise exceptions.DuplicatedSlugException( "Page identifier must be unique") if payload.get("invocation_id"): invocation_id = payload.get("invocation_id") invocation_report = self.workflow_manager.get_invocation_report( trans, invocation_id) content = invocation_report.get("markdown") content_format = "markdown" else: content = payload.get("content", "") content_format = payload.get("content_format", "html") content = self.rewrite_content_for_import(trans, content, content_format) # Create the new stored page page = trans.app.model.Page() page.title = payload['title'] page.slug = payload['slug'] page_annotation = payload.get("annotation", None) if page_annotation is not None: page_annotation = sanitize_html(page_annotation) self.add_item_annotation(trans.sa_session, trans.get_user(), page, page_annotation) page.user = user # And the first (empty) page revision page_revision = trans.app.model.PageRevision() page_revision.title = payload['title'] page_revision.page = page page.latest_revision = page_revision page_revision.content = content page_revision.content_format = content_format # Persist session = trans.sa_session session.add(page) session.flush() return page
def _list_delete(self, trans, histories, purge=False): """Delete histories""" n_deleted = 0 deleted_current = False message_parts = [] status = SUCCESS current_history = trans.get_history() for history in histories: try: if history.users_shared_with: raise exceptions.ObjectAttributeInvalidException( f"History ({history.name}) has been shared with others, unshare it before deleting it." ) if purge: self.history_manager.purge(history) else: self.history_manager.delete(history) if history == current_history: deleted_current = True except Exception as e: message_parts.append(unicodify(e)) status = ERROR else: trans.log_event(f"History ({history.name}) marked as deleted") n_deleted += 1 if n_deleted: part = "Deleted %d %s" % (n_deleted, iff(n_deleted != 1, "histories", "history")) if purge and trans.app.config.allow_user_dataset_purge: part += f" and removed {iff(n_deleted != 1, 'their', 'its')} dataset{iff(n_deleted != 1, 's', '')} from disk" elif purge: part += " but the datasets were not removed from disk because that feature is not enabled in this Galaxy instance" message_parts.append(f"{part}. ") if deleted_current: # if this history is the current history for this session, # - attempt to find the most recently used, undeleted history and switch to it. # - If no suitable recent history is found, create a new one and switch # note: this needs to come after commits above or will use an empty history that was deleted above not_deleted_or_purged = [model.History.deleted == false(), model.History.purged == false()] most_recent_history = self.history_manager.most_recent(user=trans.user, filters=not_deleted_or_purged) if most_recent_history: self.history_manager.set_current(trans, most_recent_history) else: trans.get_or_create_default_history() message_parts.append("Your active history was deleted, a new empty history is now active. ") status = INFO return (status, " ".join(message_parts))
def index(self, trans, **kwd): """ index( trans, state=None, tool_id=None, history_id=None, date_range_min=None, date_range_max=None, user_details=False ) * GET /api/jobs: return jobs for current user !! if user is admin and user_details is True, then return jobs for all galaxy users based on filtering - this is an extended service :type state: string or list :param state: limit listing of jobs to those that match one of the included states. If none, all are returned. Valid Galaxy job states include: 'new', 'upload', 'waiting', 'queued', 'running', 'ok', 'error', 'paused', 'deleted', 'deleted_new' :type tool_id: string or list :param tool_id: limit listing of jobs to those that match one of the included tool_ids. If none, all are returned. :type user_details: boolean :param user_details: if true, and requestor is an admin, will return external job id and user email. :type date_range_min: string '2014-01-01' :param date_range_min: limit the listing of jobs to those updated on or after requested date :type date_range_max: string '2014-12-31' :param date_range_max: limit the listing of jobs to those updated on or before requested date :type history_id: string :param history_id: limit listing of jobs to those that match the history_id. If none, all are returned. :rtype: list :returns: list of dictionaries containing summary job information """ state = kwd.get('state', None) is_admin = trans.user_is_admin user_details = kwd.get('user_details', False) if is_admin: query = trans.sa_session.query(trans.app.model.Job) else: query = trans.sa_session.query(trans.app.model.Job).filter( trans.app.model.Job.user == trans.user) def build_and_apply_filters(query, objects, filter_func): if objects is not None: if isinstance(objects, string_types): query = query.filter(filter_func(objects)) elif isinstance(objects, list): t = [] for obj in objects: t.append(filter_func(obj)) query = query.filter(or_(*t)) return query query = build_and_apply_filters( query, state, lambda s: trans.app.model.Job.state == s) query = build_and_apply_filters( query, kwd.get('tool_id', None), lambda t: trans.app.model.Job.tool_id == t) query = build_and_apply_filters( query, kwd.get('tool_id_like', None), lambda t: trans.app.model.Job.tool_id.like(t)) query = build_and_apply_filters( query, kwd.get('date_range_min', None), lambda dmin: trans.app.model.Job.table.c.update_time >= dmin) query = build_and_apply_filters( query, kwd.get('date_range_max', None), lambda dmax: trans.app.model.Job.table.c.update_time <= dmax) history_id = kwd.get('history_id', None) if history_id is not None: try: decoded_history_id = self.decode_id(history_id) query = query.filter( trans.app.model.Job.history_id == decoded_history_id) except Exception: raise exceptions.ObjectAttributeInvalidException() out = [] if kwd.get('order_by') == 'create_time': order_by = trans.app.model.Job.create_time.desc() else: order_by = trans.app.model.Job.update_time.desc() for job in query.order_by(order_by).all(): job_dict = job.to_dict('collection', system_details=is_admin) j = self.encode_all_ids(trans, job_dict, True) if user_details: j['user_email'] = job.user.email out.append(j) return out
def _validation_failed(self, message): raise exceptions.ObjectAttributeInvalidException(message)
class FoldersController(BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems): @web.expose_api def index(self, trans, **kwd): """ GET /api/folders/ This would normally display a list of folders. However, that would be across multiple libraries, so it's not implemented. """ raise exceptions.NotImplemented( 'Listing all accessible library folders is not implemented.') @web.expose_api def show(self, trans, id, **kwd): """ show( self, trans, id, **kwd ) *GET /api/folders/{encoded_folder_id} Displays information about a folder. :param id: the folder's encoded id (required) :type id: an encoded id string (has to be prefixed by 'F') :returns: dictionary including details of the folder :rtype: dict """ folder_id_without_prefix = self.__cut_the_prefix(id) content = self.get_library_folder(trans, folder_id_without_prefix, check_ownership=False, check_accessible=True) return_dict = self.encode_all_ids(trans, content.to_dict(view='element')) return_dict['id'] = 'F' + return_dict['id'] if return_dict['parent_id'] is not None: return_dict['parent_id'] = 'F' + return_dict['parent_id'] return return_dict @expose_api def create(self, trans, encoded_parent_folder_id, **kwd): """ create( self, trans, encoded_parent_folder_id, **kwd ) *POST /api/folders/{encoded_parent_folder_id} Create a new folder object underneath the one specified in the parameters. :param encoded_parent_folder_id: the parent folder's id (required) :type encoded_parent_folder_id: an encoded id string (should be prefixed by 'F') :param name: the name of the new folder (required) :type name: str :param description: the description of the new folder :type description: str :returns: information about newly created folder, notably including ID :rtype: dictionary :raises: RequestParameterMissingException, MalformedId, InternalServerError """ payload = kwd.get('payload', None) if payload is None: raise exceptions.RequestParameterMissingException( "Missing required parameters 'encoded_parent_folder_id' and 'name'." ) name = payload.get('name', None) description = payload.get('description', '') if encoded_parent_folder_id is None: raise exceptions.RequestParameterMissingException( "Missing required parameter 'encoded_parent_folder_id'.") elif name is None: raise exceptions.RequestParameterMissingException( "Missing required parameter 'name'.") # encoded_parent_folder_id should be prefixed by 'F' encoded_parent_folder_id = self.__cut_the_prefix( encoded_parent_folder_id) try: decoded_parent_folder_id = trans.security.decode_id( encoded_parent_folder_id) except ValueError: raise exceptions.MalformedId( "Malformed folder id ( %s ) specified, unable to decode" % (str(id))) try: parent_folder = trans.sa_session.query( trans.app.model.LibraryFolder).filter( trans.app.model.LibraryFolder.table.c.id == decoded_parent_folder_id).one() except MultipleResultsFound: raise exceptions.InconsistentDatabase( 'Multiple folders found with the same id.') except NoResultFound: raise exceptions.RequestParameterInvalidException( 'No folder found with the id provided.') except Exception, e: raise exceptions.InternalServerError( 'Error loading from the database.' + str(e)) library = parent_folder.parent_library if library.deleted: raise exceptions.ObjectAttributeInvalidException( 'You cannot create folder within a deleted library. Undelete it first.' ) # TODO: refactor the functionality for use here instead of calling another controller params = dict([("name", name), ("description", description)]) status, output = trans.webapp.controllers[ 'library_common'].create_folder(trans, 'api', encoded_parent_folder_id, '', **params) if 200 == status and len(output.items()) == 1: for k, v in output.items(): try: folder = trans.sa_session.query( trans.app.model.LibraryFolder).get(v.id) except Exception, e: raise exceptions.InternalServerError( 'Error loading from the database.' + str(e)) if folder: update_time = folder.update_time.strftime( "%Y-%m-%d %I:%M %p") return_dict = self.encode_all_ids( trans, folder.to_dict(view='element')) return_dict['update_time'] = update_time return_dict['parent_id'] = 'F' + return_dict['parent_id'] return_dict['id'] = 'F' + return_dict['id'] return return_dict