def update(self, trans, id, group_id, **kwd): """ PUT /api/groups/{encoded_group_id}/users/{encoded_user_id} Adds a user to a group """ user_id = id decoded_group_id = trans.security.decode_id(group_id) decoded_user_id = trans.security.decode_id(user_id) item = None try: group = trans.sa_session.query( trans.app.model.Group).get(decoded_group_id) user = trans.sa_session.query( trans.app.model.User).get(decoded_user_id) for uga in group.users: if uga.user == user: item = dict(id=user_id, email=user.email, url=url_for('group_user', group_id=group_id, id=user_id)) if not item: uga = trans.app.model.UserGroupAssociation(user, group) # Add UserGroupAssociations trans.sa_session.add(uga) trans.sa_session.flush() item = dict(id=user_id, email=user.email, url=url_for('group_user', group_id=group_id, id=user_id)) except Exception as e: item = f"Error in group_user API Adding user {user.email} to group {group.name}" log.error(item + ": %s", unicodify(e)) return item
def update(self, trans, id, group_id, **kwd): """ PUT /api/groups/{encoded_group_id}/roles/{encoded_role_id} Adds a role to a group """ role_id = id decoded_group_id = trans.security.decode_id(group_id) decoded_role_id = trans.security.decode_id(role_id) item = None try: group = trans.sa_session.query( trans.app.model.Group).get(decoded_group_id) role = trans.sa_session.query( trans.app.model.Role).get(decoded_role_id) for gra in group.roles: if gra.role == role: item = dict(id=role_id, name=role.name, url=url_for('group_role', group_id=group_id, id=role_id)) if not item: gra = trans.app.model.GroupRoleAssociation(group, role) # Add GroupRoleAssociation trans.sa_session.add(gra) trans.sa_session.flush() item = dict(id=role_id, name=role.name, url=url_for('group_role', group_id=group_id, id=role_id)) except Exception as e: item = f"Error in group_role API Adding role {role.name} to group {group.name}" log.error(item + ": %s", unicodify(e)) return item
def show(self, trans, id, **kwd): """ GET /api/groups/{encoded_group_id} Displays information about a group. """ group_id = id try: decoded_group_id = trans.security.decode_id(group_id) except TypeError: trans.response.status = 400 return "Malformed group id ( %s ) specified, unable to decode." % str( group_id) try: group = trans.sa_session.query( trans.app.model.Group).get(decoded_group_id) except Exception: group = None if not group: trans.response.status = 400 return "Invalid group id ( %s ) specified." % str(group_id) item = group.to_dict(view='element', value_mapper={'id': trans.security.encode_id}) item['url'] = url_for('group', id=group_id) item['users_url'] = url_for('group_users', group_id=group_id) item['roles_url'] = url_for('group_roles', group_id=group_id) return item
def delete(self, trans, id, group_id, **kwd): """ DELETE /api/groups/{encoded_group_id}/users/{encoded_user_id} Removes a user from a group """ user_id = id decoded_group_id = trans.security.decode_id(group_id) decoded_user_id = trans.security.decode_id(user_id) try: group = trans.sa_session.query( trans.app.model.Group).get(decoded_group_id) user = trans.sa_session.query( trans.app.model.User).get(decoded_user_id) for uga in group.users: if uga.user == user: trans.sa_session.delete(uga) trans.sa_session.flush() item = dict(id=user_id, email=user.email, url=url_for('group_user', group_id=group_id, id=user_id)) if not item: item = "user %s not in group %s" % (user.email, group.name) except Exception as e: item = "Error in group_user API Removing user %s from group %s" % ( user.email, group.name) log.error(item + ": %s", unicodify(e)) return item
def create(self, trans, payload, **kwd): """ POST /api/forms Creates a new form. """ if not trans.user_is_admin: trans.response.status = 403 return "You are not authorized to create a new form." xml_text = payload.get('xml_text', None) if xml_text is None: trans.response.status = 400 return "Missing required parameter 'xml_text'." # enhance to allow creating from more than just xml form_definition = form_factory.from_elem(XML(xml_text)) trans.sa_session.add(form_definition) trans.sa_session.flush() encoded_id = trans.security.encode_id(form_definition.id) item = form_definition.to_dict(view='element', value_mapper={ 'id': trans.security.encode_id, 'form_definition_current_id': trans.security.encode_id }) item['url'] = url_for('form', id=encoded_id) return [item]
def show(self, trans, id, **kwd): """ GET /api/roles/{encoded_role_id} Displays information about a role. """ role_id = id try: decoded_role_id = trans.security.decode_id(role_id) except Exception: trans.response.status = 400 return "Malformed role id ( %s ) specified, unable to decode." % str( role_id) try: role = trans.sa_session.query( trans.app.model.Role).get(decoded_role_id) except Exception: role = None if not role or not (trans.user_is_admin or trans.app.security_agent.ok_to_display( trans.user, role)): trans.response.status = 400 return "Invalid role id ( %s ) specified." % str(role_id) item = role.to_dict(view='element', value_mapper={'id': trans.security.encode_id}) item['url'] = url_for('role', id=role_id) return item
def index(self, trans, group_id, **kwd): """ GET /api/groups/{encoded_group_id}/users Displays a collection (list) of groups. """ decoded_group_id = trans.security.decode_id(group_id) try: group = trans.sa_session.query( trans.app.model.Group).get(decoded_group_id) except Exception: group = None if not group: trans.response.status = 400 return "Invalid group id ( %s ) specified." % str(group_id) rval = [] try: for uga in group.users: user = uga.user encoded_id = trans.security.encode_id(user.id) rval.append( dict(id=encoded_id, email=user.email, url=url_for( 'group_user', group_id=group_id, id=encoded_id, ))) except Exception as e: rval = "Error in group API at listing users" log.error(rval + ": %s", unicodify(e)) trans.response.status = 500 return rval
def show(self, trans, id, group_id, **kwd): """ GET /api/groups/{encoded_group_id}/users/{encoded_user_id} Displays information about a group user. """ user_id = id decoded_group_id = trans.security.decode_id(group_id) decoded_user_id = trans.security.decode_id(user_id) item = None try: group = trans.sa_session.query( trans.app.model.Group).get(decoded_group_id) user = trans.sa_session.query( trans.app.model.User).get(decoded_user_id) for uga in group.users: if uga.user == user: item = dict(id=user_id, email=user.email, url=url_for('group_user', group_id=group_id, id=user_id)) # TODO Fix This if not item: item = "user %s not in group %s" % (user.email, group.name) except Exception as e: item = "Error in group_user API group %s user %s" % (group.name, user.email) log.error(item + ": %s", unicodify(e)) return item
def show(self, trans, id, group_id, **kwd): """ GET /api/groups/{encoded_group_id}/roles/{encoded_role_id} Displays information about a group role. """ role_id = id decoded_group_id = trans.security.decode_id(group_id) decoded_role_id = trans.security.decode_id(role_id) item = None try: group = trans.sa_session.query( trans.app.model.Group).get(decoded_group_id) role = trans.sa_session.query( trans.app.model.Role).get(decoded_role_id) for gra in group.roles: if gra.role == role: item = dict(id=role_id, name=role.name, url=url_for('group_role', group_id=group_id, id=role_id)) # TODO Fix This if not item: item = f"role {role.name} not in group {group.name}" except Exception as e: item = f"Error in group_role API group {group.name} role {role.name}" log.error(item + ": %s", unicodify(e)) return item
def show(self, trans, id, **kwd): """ GET /api/forms/{encoded_form_id} Displays information about a form. """ form_definition_id = id try: decoded_form_definition_id = trans.security.decode_id( form_definition_id) except TypeError: trans.response.status = 400 return "Malformed form definition id ( %s ) specified, unable to decode." % str( form_definition_id) try: form_definition = trans.sa_session.query( trans.app.model.FormDefinition).get(decoded_form_definition_id) except Exception: form_definition = None if not form_definition or not trans.user_is_admin: trans.response.status = 400 return "Invalid form definition id ( %s ) specified." % str( form_definition_id) item = form_definition.to_dict(view='element', value_mapper={ 'id': trans.security.encode_id, 'form_definition_current_id': trans.security.encode_id }) item['url'] = url_for('form', id=form_definition_id) return item
def delete(self, trans, id, group_id, **kwd): """ DELETE /api/groups/{encoded_group_id}/roles/{encoded_role_id} Removes a role from a group """ role_id = id decoded_group_id = trans.security.decode_id(group_id) decoded_role_id = trans.security.decode_id(role_id) try: group = trans.sa_session.query( trans.app.model.Group).get(decoded_group_id) role = trans.sa_session.query( trans.app.model.Role).get(decoded_role_id) for gra in group.roles: if gra.role == role: trans.sa_session.delete(gra) trans.sa_session.flush() item = dict(id=role_id, name=role.name, url=url_for('group_role', group_id=group_id, id=role_id)) if not item: item = f"role {role.name} not in group {group.name}" except Exception as e: item = f"Error in group_role API Removing role {role.name} from group {group.name}" log.error(item + ": %s", unicodify(e)) return item
def role_to_model(trans, role): item = role.to_dict(view='element', value_mapper={'id': trans.security.encode_id}) role_id = trans.security.encode_id(role.id) try: item['url'] = url_for('role', id=role_id) except AttributeError: item['url'] = "*deprecated attribute not filled in by FastAPI server*" return RoleModel(**item)
def __api_import_new_workflow(self, trans, payload, **kwd): data = payload['workflow'] raw_workflow_description = self.__normalize_workflow(trans, data) data = raw_workflow_description.as_dict import_tools = util.string_as_bool(payload.get("import_tools", False)) if import_tools and not trans.user_is_admin: raise exceptions.AdminRequiredException() from_dict_kwds = self.__import_or_update_kwds(payload) publish = util.string_as_bool(payload.get("publish", False)) # If 'publish' set, default to importable. importable = util.string_as_bool(payload.get("importable", publish)) if publish and not importable: raise exceptions.RequestParameterInvalidException("Published workflow must be importable.") from_dict_kwds["publish"] = publish workflow, missing_tool_tups = self._workflow_from_dict(trans, raw_workflow_description, **from_dict_kwds) if importable: self._make_item_accessible(trans.sa_session, workflow) trans.sa_session.flush() # galaxy workflow newly created id workflow_id = workflow.id # api encoded, id encoded_id = trans.security.encode_id(workflow_id) item = workflow.to_dict(value_mapper={'id': trans.security.encode_id}) item['annotations'] = [x.annotation for x in workflow.annotations] item['url'] = url_for('workflow', id=encoded_id) item['owner'] = workflow.user.username item['number_of_steps'] = len(workflow.latest_workflow.steps) if import_tools: tools = {} for key in data['steps']: item = data['steps'][key] if item is not None: if 'tool_shed_repository' in item: tool_shed_repository = item['tool_shed_repository'] if 'owner' in tool_shed_repository and 'changeset_revision' in tool_shed_repository and 'name' in tool_shed_repository and 'tool_shed' in tool_shed_repository: toolstr = tool_shed_repository['owner'] \ + tool_shed_repository['changeset_revision'] \ + tool_shed_repository['name'] \ + tool_shed_repository['tool_shed'] tools[toolstr] = tool_shed_repository irm = InstallRepositoryManager(self.app) for k in tools: item = tools[k] tool_shed_url = 'https://' + item['tool_shed'] + '/' name = item['name'] owner = item['owner'] changeset_revision = item['changeset_revision'] irm.install(tool_shed_url, name, owner, changeset_revision, payload) return item
def create(self, trans, payload, **kwd): """ POST /api/groups Creates a new group. """ log.info("groups payload%s\n" % (payload)) if not trans.user_is_admin: trans.response.status = 403 return "You are not authorized to create a new group." name = payload.get('name', None) if not name: trans.response.status = 400 return "Enter a valid name" if trans.sa_session.query(trans.app.model.Group).filter( trans.app.model.Group.table.c.name == name).first(): trans.response.status = 400 return "A group with that name already exists" group = trans.app.model.Group(name=name) trans.sa_session.add(group) user_ids = payload.get('user_ids', []) for i in user_ids: log.info("user_id: %s\n" % (i)) log.info("%s %s\n" % (i, trans.security.decode_id(i))) users = [ trans.sa_session.query(trans.model.User).get( trans.security.decode_id(i)) for i in user_ids ] role_ids = payload.get('role_ids', []) roles = [ trans.sa_session.query(trans.model.Role).get( trans.security.decode_id(i)) for i in role_ids ] trans.app.security_agent.set_entity_group_associations(groups=[group], roles=roles, users=users) """ # Create the UserGroupAssociations for user in users: trans.app.security_agent.associate_user_group( user, group ) # Create the GroupRoleAssociations for role in roles: trans.app.security_agent.associate_group_role( group, role ) """ trans.sa_session.flush() encoded_id = trans.security.encode_id(group.id) item = group.to_dict(view='element', value_mapper={'id': trans.security.encode_id}) item['url'] = url_for('group', id=encoded_id) return [item]
def __api_import_shared_workflow(self, trans, workflow_id, payload, **kwd): try: stored_workflow = self.get_stored_workflow(trans, workflow_id, check_ownership=False) except Exception: raise exceptions.ObjectNotFound("Malformed workflow id ( %s ) specified." % workflow_id) if stored_workflow.importable is False: raise exceptions.ItemAccessibilityException('The owner of this workflow has disabled imports via this link.') elif stored_workflow.deleted: raise exceptions.ItemDeletionException("You can't import this workflow because it has been deleted.") imported_workflow = self._import_shared_workflow(trans, stored_workflow) item = imported_workflow.to_dict(value_mapper={'id': trans.security.encode_id}) encoded_id = trans.security.encode_id(imported_workflow.id) item['url'] = url_for('workflow', id=encoded_id) return item
def create(self, trans, payload, **kwd): """ POST /api/roles Creates a new role. """ if not trans.user_is_admin: trans.response.status = 403 return "You are not authorized to create a new role." name = payload.get('name', None) description = payload.get('description', None) if not name or not description: trans.response.status = 400 return "Enter a valid name and a description" if trans.sa_session.query(trans.app.model.Role).filter( trans.app.model.Role.table.c.name == name).first(): trans.response.status = 400 return "A role with that name already exists" role_type = trans.app.model.Role.types.ADMIN # TODO: allow non-admins to create roles role = trans.app.model.Role(name=name, description=description, type=role_type) trans.sa_session.add(role) user_ids = payload.get('user_ids', []) users = [ trans.sa_session.query(trans.model.User).get( trans.security.decode_id(i)) for i in user_ids ] group_ids = payload.get('group_ids', []) groups = [ trans.sa_session.query(trans.model.Group).get( trans.security.decode_id(i)) for i in group_ids ] # Create the UserRoleAssociations for user in users: trans.app.security_agent.associate_user_role(user, role) # Create the GroupRoleAssociations for group in groups: trans.app.security_agent.associate_group_role(group, role) trans.sa_session.flush() encoded_id = trans.security.encode_id(role.id) item = role.to_dict(view='element', value_mapper={'id': trans.security.encode_id}) item['url'] = url_for('role', id=encoded_id) return [item]
def index(self, trans, **kwd): """ GET /api/groups Displays a collection (list) of groups. """ rval = [] for group in trans.sa_session.query(trans.app.model.Group).filter( trans.app.model.Group.table.c.deleted == false()): if trans.user_is_admin: item = group.to_dict( value_mapper={'id': trans.security.encode_id}) encoded_id = trans.security.encode_id(group.id) item['url'] = url_for('group', id=encoded_id) rval.append(item) return rval
def index(self, trans, **kwd): """ GET /api/forms Displays a collection (list) of forms. """ if not trans.user_is_admin: trans.response.status = 403 return "You are not authorized to view the list of forms." query = trans.sa_session.query(trans.app.model.FormDefinition) rval = [] for form_definition in query: item = form_definition.to_dict(value_mapper={'id': trans.security.encode_id, 'form_definition_current_id': trans.security.encode_id}) item['url'] = url_for('form', id=trans.security.encode_id(form_definition.id)) rval.append(item) return rval
def index(self, trans, **kwd): """ GET /api/roles Displays a collection (list) of roles. """ rval = [] for role in trans.sa_session.query(trans.app.model.Role).filter( trans.app.model.Role.table.c.deleted == false()): if trans.user_is_admin or trans.app.security_agent.ok_to_display( trans.user, role): item = role.to_dict( value_mapper={'id': trans.security.encode_id}) encoded_id = trans.security.encode_id(role.id) item['url'] = url_for('role', id=encoded_id) rval.append(item) return rval
def create(self, trans, payload, **kwd): """ POST /api/quotas Creates a new quota. """ try: self.validate_in_users_and_groups(trans, payload) except Exception as e: raise HTTPBadRequest(detail=util.unicodify(e)) params = self.get_quota_params(payload) try: quota, message = self._create_quota(params) except ActionInputError as e: raise HTTPBadRequest(detail=util.unicodify(e)) item = quota.to_dict(value_mapper={'id': trans.security.encode_id}) item['url'] = url_for('quota', id=trans.security.encode_id(quota.id)) item['message'] = message return item
def index(self, trans, deleted='False', **kwd): """ GET /api/quotas GET /api/quotas/deleted Displays a collection (list) of quotas. """ rval = [] deleted = util.string_as_bool(deleted) query = trans.sa_session.query(trans.app.model.Quota) if deleted: route = 'deleted_quota' query = query.filter(trans.app.model.Quota.deleted == true()) else: route = 'quota' query = query.filter(trans.app.model.Quota.deleted == false()) for quota in query: item = quota.to_dict(value_mapper={'id': trans.security.encode_id}) encoded_id = trans.security.encode_id(quota.id) item['url'] = url_for(route, id=encoded_id) rval.append(item) return rval
def index(self, trans, library_id, **kwd): """ GET /api/libraries/{library_id}/contents: Return a list of library files and folders. .. note:: This endpoint is slow for large libraries. Returns all content traversing recursively through all folders. .. seealso:: :class:`galaxy.webapps.galaxy.api.FolderContentsController.index` for a faster non-recursive solution :param library_id: the encoded id of the library :type library_id: str :returns: list of dictionaries of the form: * id: the encoded id of the library item * name: the 'library path' or relationship of the library item to the root * type: 'file' or 'folder' * url: the url to get detailed information on the library item :rtype: list :raises: MalformedId, InconsistentDatabase, RequestParameterInvalidException, InternalServerError """ rval = [] current_user_roles = trans.get_current_user_roles() def traverse(folder): admin = trans.user_is_admin rval = [] for subfolder in folder.active_folders: if not admin: can_access, folder_ids = trans.app.security_agent.check_folder_contents( trans.user, current_user_roles, subfolder) if (admin or can_access) and not subfolder.deleted: subfolder.api_path = folder.api_path + '/' + subfolder.name subfolder.api_type = 'folder' rval.append(subfolder) rval.extend(traverse(subfolder)) for ld in folder.datasets: if not admin: can_access = trans.app.security_agent.can_access_dataset( current_user_roles, ld.library_dataset_dataset_association.dataset) if (admin or can_access) and not ld.deleted: ld.api_path = folder.api_path + '/' + ld.name ld.api_type = 'file' rval.append(ld) return rval decoded_library_id = self.decode_id(library_id) try: library = trans.sa_session.query(trans.app.model.Library).filter( trans.app.model.Library.table.c.id == decoded_library_id).one() except MultipleResultsFound: raise exceptions.InconsistentDatabase( 'Multiple libraries found with the same id.') except NoResultFound: raise exceptions.RequestParameterInvalidException( 'No library found with the id provided.') except Exception as e: raise exceptions.InternalServerError( 'Error loading from the database.' + util.unicodify(e)) if not (trans.user_is_admin or trans.app.security_agent.can_access_library( current_user_roles, library)): raise exceptions.RequestParameterInvalidException( 'No library found with the id provided.') encoded_id = 'F' + trans.security.encode_id(library.root_folder.id) # appending root folder rval.append( dict(id=encoded_id, type='folder', name='/', url=url_for('library_content', library_id=library_id, id=encoded_id))) library.root_folder.api_path = '' # appending all other items in the library recursively for content in traverse(library.root_folder): encoded_id = trans.security.encode_id(content.id) if content.api_type == 'folder': encoded_id = 'F' + encoded_id rval.append( dict(id=encoded_id, type=content.api_type, name=content.api_path, url=url_for( 'library_content', library_id=library_id, id=encoded_id, ))) return rval
def create(self, trans, library_id, payload, **kwd): """ POST /api/libraries/{library_id}/contents: Create a new library file or folder. To copy an HDA into a library send ``create_type`` of 'file' and the HDA's encoded id in ``from_hda_id`` (and optionally ``ldda_message``). To copy an HDCA into a library send ``create_type`` of 'file' and the HDCA's encoded id in ``from_hdca_id`` (and optionally ``ldda_message``). :type library_id: str :param library_id: the encoded id of the library where to create the new item :type payload: dict :param payload: dictionary structure containing: * folder_id: the encoded id of the parent folder of the new item * create_type: the type of item to create ('file', 'folder' or 'collection') * from_hda_id: (optional, only if create_type is 'file') the encoded id of an accessible HDA to copy into the library * ldda_message: (optional) the new message attribute of the LDDA created * extended_metadata: (optional) sub-dictionary containing any extended metadata to associate with the item * upload_option: (optional) one of 'upload_file' (default), 'upload_directory' or 'upload_paths' * server_dir: (optional, only if upload_option is 'upload_directory') relative path of the subdirectory of Galaxy ``library_import_dir`` (if admin) or ``user_library_import_dir`` (if non-admin) to upload. All and only the files (i.e. no subdirectories) contained in the specified directory will be uploaded. * filesystem_paths: (optional, only if upload_option is 'upload_paths' and the user is an admin) file paths on the Galaxy server to upload to the library, one file per line * link_data_only: (optional, only when upload_option is 'upload_directory' or 'upload_paths') either 'copy_files' (default) or 'link_to_files'. Setting to 'link_to_files' symlinks instead of copying the files * name: (optional, only if create_type is 'folder') name of the folder to create * description: (optional, only if create_type is 'folder') description of the folder to create * tag_using_filenames: (optional) create tags on datasets using the file's original name * tags: (optional) create the given list of tags on datasets :returns: a dictionary describing the new item unless ``from_hdca_id`` is supplied, in that case a list of such dictionaries is returned. :rtype: object """ if 'create_type' not in payload: trans.response.status = 400 return "Missing required 'create_type' parameter." else: create_type = payload.pop('create_type') if create_type not in ('file', 'folder', 'collection'): trans.response.status = 400 return "Invalid value for 'create_type' parameter ( %s ) specified." % create_type if 'folder_id' not in payload: trans.response.status = 400 return "Missing required 'folder_id' parameter." else: folder_id = payload.pop('folder_id') class_name, folder_id = self._decode_library_content_id(folder_id) try: # security is checked in the downstream controller parent = self.get_library_folder(trans, folder_id, check_ownership=False, check_accessible=False) except Exception as e: return util.unicodify(e) # The rest of the security happens in the library_common controller. real_folder_id = trans.security.encode_id(parent.id) payload['tag_using_filenames'] = util.string_as_bool( payload.get('tag_using_filenames', None)) payload['tags'] = util.listify(payload.get('tags', None)) # are we copying an HDA to the library folder? # we'll need the id and any message to attach, then branch to that private function from_hda_id, from_hdca_id, ldda_message = (payload.pop( 'from_hda_id', None), payload.pop('from_hdca_id', None), payload.pop('ldda_message', '')) if create_type == 'file': if from_hda_id: return self._copy_hda_to_library_folder( trans, self.hda_manager, self.decode_id(from_hda_id), real_folder_id, ldda_message) if from_hdca_id: return self._copy_hdca_to_library_folder( trans, self.hda_manager, self.decode_id(from_hdca_id), real_folder_id, ldda_message) # check for extended metadata, store it and pop it out of the param # otherwise sanitize_param will have a fit ex_meta_payload = payload.pop('extended_metadata', None) # Now create the desired content object, either file or folder. if create_type == 'file': status, output = self._upload_library_dataset( trans, library_id, real_folder_id, **payload) elif create_type == 'folder': status, output = self._create_folder(trans, real_folder_id, library_id, **payload) elif create_type == 'collection': # Not delegating to library_common, so need to check access to parent # folder here. self.check_user_can_add_to_library_item(trans, parent, check_accessible=True) create_params = api_payload_to_create_params(payload) create_params['parent'] = parent service = trans.app.dataset_collections_service dataset_collection_instance = service.create(**create_params) return [ dictify_dataset_collection_instance( dataset_collection_instance, security=trans.security, parent=parent) ] if status != 200: trans.response.status = status return output else: rval = [] for v in output.values(): if ex_meta_payload is not None: # If there is extended metadata, store it, attach it to the dataset, and index it ex_meta = ExtendedMetadata(ex_meta_payload) trans.sa_session.add(ex_meta) v.extended_metadata = ex_meta trans.sa_session.add(v) trans.sa_session.flush() for path, value in self._scan_json_block(ex_meta_payload): meta_i = ExtendedMetadataIndex(ex_meta, path, value) trans.sa_session.add(meta_i) trans.sa_session.flush() if type(v) == trans.app.model.LibraryDatasetDatasetAssociation: v = v.library_dataset encoded_id = trans.security.encode_id(v.id) if create_type == 'folder': encoded_id = 'F' + encoded_id rval.append( dict(id=encoded_id, name=v.name, url=url_for('library_content', library_id=library_id, id=encoded_id))) return rval
def create(self, trans, payload, **kwd): """ POST /api/workflows Run or create workflows from the api. .. tip:: When executing a workflow externally (e.g. from a script) it is recommended to use the :func:`galaxy.webapps.galaxy.api.workflows.WorkflowsAPIController.invoke` method below instead. If installed_repository_file or from_history_id is specified a new workflow will be created for this user. Otherwise, workflow_id must be specified and this API method will cause a workflow to execute. :param installed_repository_file The path of a workflow to import. Either workflow_id, installed_repository_file or from_history_id must be specified :type installed_repository_file str :param workflow_id: An existing workflow id. Either workflow_id, installed_repository_file or from_history_id must be specified :type workflow_id: str :param parameters: If workflow_id is set - see _update_step_parameters() :type parameters: dict :param ds_map: If workflow_id is set - a dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively) :type ds_map: dict :param no_add_to_history: If workflow_id is set - if present in the payload with any value, the input datasets will not be added to the selected history :type no_add_to_history: str :param history: If workflow_id is set - optional history where to run the workflow, either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history. If not specified, the workflow will be run a new unnamed history :type history: str :param replacement_params: If workflow_id is set - an optional dictionary used when renaming datasets :type replacement_params: dict :param from_history_id: Id of history to extract a workflow from. Either workflow_id, installed_repository_file or from_history_id must be specified :type from_history_id: str :param job_ids: If from_history_id is set - optional list of jobs to include when extracting a workflow from history :type job_ids: str :param dataset_ids: If from_history_id is set - optional list of HDA `hid`s corresponding to workflow inputs when extracting a workflow from history :type dataset_ids: str :param dataset_collection_ids: If from_history_id is set - optional list of HDCA `hid`s corresponding to workflow inputs when extracting a workflow from history :type dataset_collection_ids: str :param workflow_name: If from_history_id is set - name of the workflow to create when extracting a workflow from history :type workflow_name: str :param allow_tool_state_corrections: If set to True, any Tool parameter changes will not prevent running workflow, defaults to False :type allow_tool_state_corrections: bool :param use_cached_job: If set to True galaxy will attempt to find previously executed steps for all workflow steps with the exact same parameter combinations and will copy the outputs of the previously executed step. """ ways_to_create = set([ 'archive_source', 'workflow_id', 'installed_repository_file', 'from_history_id', 'from_path', 'shared_workflow_id', 'workflow', ]) if len(ways_to_create.intersection(payload)) == 0: message = "One parameter among - %s - must be specified" % ", ".join(ways_to_create) raise exceptions.RequestParameterMissingException(message) if len(ways_to_create.intersection(payload)) > 1: message = "Only one parameter among - %s - must be specified" % ", ".join(ways_to_create) raise exceptions.RequestParameterInvalidException(message) if 'installed_repository_file' in payload: if not trans.user_is_admin: raise exceptions.AdminRequiredException() installed_repository_file = payload.get('installed_repository_file', '') if not os.path.exists(installed_repository_file): raise exceptions.RequestParameterInvalidException("Workflow file '%s' not found" % installed_repository_file) elif os.path.getsize(os.path.abspath(installed_repository_file)) > 0: with io.open(installed_repository_file, encoding='utf-8') as f: workflow_data = f.read() return self.__api_import_from_archive(trans, workflow_data) else: raise exceptions.MessageException("You attempted to open an empty file.") if 'archive_source' in payload: archive_source = payload['archive_source'] archive_file = payload.get('archive_file') archive_data = None if archive_source: if archive_source.startswith("file://"): if not trans.user_is_admin: raise exceptions.AdminRequiredException() workflow_src = {"src": "from_path", "path": archive_source[len("file://"):]} payload["workflow"] = workflow_src return self.__api_import_new_workflow(trans, payload, **kwd) else: try: archive_data = requests.get(archive_source).text except Exception: raise exceptions.MessageException("Failed to open URL '%s'." % escape(archive_source)) elif hasattr(archive_file, 'file'): uploaded_file = archive_file.file uploaded_file_name = uploaded_file.name if os.path.getsize(os.path.abspath(uploaded_file_name)) > 0: archive_data = uploaded_file.read() else: raise exceptions.MessageException("You attempted to upload an empty file.") else: raise exceptions.MessageException("Please provide a URL or file.") return self.__api_import_from_archive(trans, archive_data, "uploaded file") if 'from_history_id' in payload: from_history_id = payload.get('from_history_id') from_history_id = self.decode_id(from_history_id) history = self.history_manager.get_accessible(from_history_id, trans.user, current_history=trans.history) job_ids = [self.decode_id(_) for _ in payload.get('job_ids', [])] dataset_ids = payload.get('dataset_ids', []) dataset_collection_ids = payload.get('dataset_collection_ids', []) workflow_name = payload['workflow_name'] stored_workflow = extract_workflow( trans=trans, user=trans.get_user(), history=history, job_ids=job_ids, dataset_ids=dataset_ids, dataset_collection_ids=dataset_collection_ids, workflow_name=workflow_name, ) item = stored_workflow.to_dict(value_mapper={'id': trans.security.encode_id}) item['url'] = url_for('workflow', id=item['id']) return item if 'from_path' in payload: from_path = payload.get('from_path') payload["workflow"] = {"src": "from_path", "path": from_path} return self.__api_import_new_workflow(trans, payload, **kwd) if 'shared_workflow_id' in payload: workflow_id = payload['shared_workflow_id'] return self.__api_import_shared_workflow(trans, workflow_id, payload) if 'workflow' in payload: return self.__api_import_new_workflow(trans, payload, **kwd) workflow_id = payload.get('workflow_id', None) if not workflow_id: message = "Invalid workflow_id specified." raise exceptions.RequestParameterInvalidException(message) # Get workflow + accessibility check. stored_workflow = self.__get_stored_accessible_workflow(trans, workflow_id) workflow = stored_workflow.latest_workflow run_configs = build_workflow_run_configs(trans, workflow, payload) assert len(run_configs) == 1 run_config = run_configs[0] history = run_config.target_history # invoke may throw MessageExceptions on tool erors, failure # to match up inputs, etc... outputs, invocation = invoke( trans=trans, workflow=workflow, workflow_run_config=run_config, populate_state=True, ) trans.sa_session.flush() # Build legacy output - should probably include more information from # outputs. rval = {} rval['history'] = trans.security.encode_id(history.id) rval['outputs'] = [] if outputs: # Newer outputs don't necessarily fill outputs (?) for step in workflow.steps: if step.type == 'tool' or step.type is None: for v in outputs[step.id].values(): rval['outputs'].append(trans.security.encode_id(v.id)) # Newer version of this API just returns the invocation as a dict, to # facilitate migration - produce the newer style response and blend in # the older information. invocation_response = self.__encode_invocation(invocation, **kwd) invocation_response.update(rval) return invocation_response
def get_workflows_list(self, trans, kwd): """ Displays a collection of workflows. :param show_published: if True, show also published workflows :type show_published: boolean :param missing_tools: if True, include a list of missing tools per workflow :type missing_tools: boolean """ missing_tools = util.string_as_bool(kwd.get('missing_tools', 'False')) rval = [] filter1 = (trans.app.model.StoredWorkflow.user == trans.user) user = trans.get_user() if user is None: show_published = util.string_as_bool(kwd.get('show_published', 'True')) else : show_published = util.string_as_bool(kwd.get('show_published', 'False')) if show_published: filter1 = or_(filter1, (trans.app.model.StoredWorkflow.published == true())) for wf in trans.sa_session.query(trans.app.model.StoredWorkflow).options( joinedload("annotations")).options( joinedload("latest_workflow").undefer("step_count").lazyload("steps")).options( joinedload("tags")).filter( filter1, trans.app.model.StoredWorkflow.table.c.deleted == false()).order_by( desc(trans.app.model.StoredWorkflow.table.c.update_time)).all(): item = wf.to_dict(value_mapper={'id': trans.security.encode_id}) encoded_id = trans.security.encode_id(wf.id) item['annotations'] = [x.annotation for x in wf.annotations] item['url'] = url_for('workflow', id=encoded_id) item['owner'] = wf.user.username item['number_of_steps'] = wf.latest_workflow.step_count item['show_in_tool_panel'] = False if user is not None: item['show_in_tool_panel'] = wf.show_in_tool_panel(user_id=user.id) rval.append(item) for wf_sa in trans.sa_session.query(model.StoredWorkflowUserShareAssociation).join( model.StoredWorkflowUserShareAssociation.stored_workflow).options( joinedload("stored_workflow").joinedload("annotations")).options( joinedload("stored_workflow").joinedload("latest_workflow").undefer("step_count").lazyload("steps")).options( joinedload("stored_workflow").joinedload("user")).options( joinedload("stored_workflow").joinedload("tags")).filter(model.StoredWorkflowUserShareAssociation.user == trans.user).filter( model.StoredWorkflow.deleted == false()).order_by( desc(model.StoredWorkflow.update_time)).all(): item = wf_sa.stored_workflow.to_dict(value_mapper={'id': trans.security.encode_id}) encoded_id = trans.security.encode_id(wf_sa.stored_workflow.id) item['annotations'] = [x.annotation for x in wf_sa.stored_workflow.annotations] item['url'] = url_for('workflow', id=encoded_id) item['slug'] = wf_sa.stored_workflow.slug item['owner'] = wf_sa.stored_workflow.user.username item['number_of_steps'] = wf_sa.stored_workflow.latest_workflow.step_count item['show_in_tool_panel'] = False if user is not None: item['show_in_tool_panel'] = wf_sa.stored_workflow.show_in_tool_panel(user_id=user.id) rval.append(item) if missing_tools: workflows_missing_tools = [] workflows = [] workflows_by_toolshed = dict() for key, value in enumerate(rval): tool_ids = [] workflow_details = self.workflow_contents_manager.workflow_to_dict(trans, self.__get_stored_workflow(trans, value['id']), style='instance') if 'steps' in workflow_details: for step in workflow_details['steps']: tool_id = workflow_details['steps'][step].get('tool_id') if tool_id and tool_id not in tool_ids and self.app.toolbox.is_missing_shed_tool(tool_id): tool_ids.append(tool_id) if len(tool_ids) > 0: value['missing_tools'] = tool_ids workflows_missing_tools.append(value) for workflow in workflows_missing_tools: for tool_id in workflow['missing_tools']: toolshed, _, owner, name, tool, version = tool_id.split('/') shed_url = self.__get_full_shed_url(toolshed) repo_identifier = '/'.join([toolshed, owner, name]) if repo_identifier not in workflows_by_toolshed: workflows_by_toolshed[repo_identifier] = dict(shed=shed_url.rstrip('/'), repository=name, owner=owner, tools=[tool_id], workflows=[workflow['name']]) else: if tool_id not in workflows_by_toolshed[repo_identifier]['tools']: workflows_by_toolshed[repo_identifier]['tools'].append(tool_id) if workflow['name'] not in workflows_by_toolshed[repo_identifier]['workflows']: workflows_by_toolshed[repo_identifier]['workflows'].append(workflow['name']) for repo_tag in workflows_by_toolshed: workflows.append(workflows_by_toolshed[repo_tag]) return workflows return rval
def role_to_model(trans, role): item = role.to_dict(view='element', value_mapper={'id': trans.security.encode_id}) role_id = trans.security.encode_id(role.id) item['url'] = url_for('role', id=role_id) return RoleModel(**item)