示例#1
0
def artifact_class(trans, as_dict):
    object_id = as_dict.get("object_id", None)
    if as_dict.get("src", None) == "from_path":
        if trans and not trans.user_is_admin:
            raise exceptions.AdminRequiredException()

        workflow_path = as_dict.get("path")
        with open(workflow_path, "r") as f:
            as_dict = ordered_load(f)

    artifact_class = as_dict.get("class", None)
    if artifact_class is None and "$graph" in as_dict:
        object_id = object_id or "main"
        graph = as_dict["$graph"]
        target_object = None
        if isinstance(graph, dict):
            target_object = graph.get(object_id)
        else:
            for item in graph:
                found_id = item.get("id")
                if found_id == object_id or found_id == "#" + object_id:
                    target_object = item

        if target_object and target_object.get("class"):
            artifact_class = target_object["class"]

    return artifact_class, as_dict, object_id
    def get_latest_installable_revision(self, trans, payload, **kwd):
        """
        POST /api/tool_shed_repositories/get_latest_installable_revision
        Get the latest installable revision of a specified repository from a specified Tool Shed.

        :param key: the current Galaxy admin user's API key

        The following parameters are included in the payload.
        :param tool_shed_url (required): the base URL of the Tool Shed from which to retrieve the Repository revision.
        :param name (required): the name of the Repository
        :param owner (required): the owner of the Repository
        """
        # Get the information about the repository to be installed from the payload.
        tool_shed_url, name, owner = self.__parse_repository_from_payload(
            payload)
        # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
        if not trans.user_is_admin():
            raise exceptions.AdminRequiredException(
                'You are not authorized to request the latest installable revision for a repository in this Galaxy instance.'
            )
        params = '?name=%s&owner=%s' % (name, owner)
        url = common_util.url_join(
            tool_shed_url,
            'api/repositories/get_ordered_installable_revisions%s' % params)
        try:
            raw_text = common_util.tool_shed_get(trans.app, tool_shed_url, url)
        except Exception, e:
            message = "Error attempting to retrieve the latest installable revision from tool shed %s for repository %s owned by %s: %s" % \
                ( str( tool_shed_url ), str( name ), str( owner ), str( e ) )
            log.debug(message)
            return dict(status='error', error=message)
示例#3
0
    def index(self, trans, deleted=False, **kwd):
        """
        GET /api/categories
        Return a list of dictionaries that contain information about each Category.

        :param deleted: flag used to include deleted categories

        Example: GET localhost:9009/api/categories
        """
        category_dicts = []
        deleted = util.asbool(deleted)
        if deleted and not trans.user_is_admin():
            raise exceptions.AdminRequiredException(
                'Only administrators can query deleted categories.')
        for category in trans.sa_session.query( self.app.model.Category ) \
                                        .filter( self.app.model.Category.table.c.deleted == deleted ) \
                                        .order_by( self.app.model.Category.table.c.name ):
            category_dict = category.to_dict(
                view='collection', value_mapper=self.__get_value_mapper(trans))
            category_dict['url'] = web.url_for(controller='categories',
                                               action='show',
                                               id=trans.security.encode_id(
                                                   category.id))
            category_dict[
                'repositories'] = self.app.repository_registry.viewable_repositories_and_suites_by_category.get(
                    category.name, 0)
            category_dicts.append(category_dict)
        return category_dicts
    def get_latest_installable_revision( self, trans, payload, **kwd ):
        """
        POST /api/tool_shed_repositories/get_latest_installable_revision
        Get the latest installable revision of a specified repository from a specified Tool Shed.

        :param key: the current Galaxy admin user's API key

        The following parameters are included in the payload.
        :param tool_shed_url (required): the base URL of the Tool Shed from which to retrieve the Repository revision.
        :param name (required): the name of the Repository
        :param owner (required): the owner of the Repository
        """
        # Get the information about the repository to be installed from the payload.
        tool_shed_url, name, owner = self.__parse_repository_from_payload( payload )
        # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
        if not trans.user_is_admin():
            raise exceptions.AdminRequiredException( 'You are not authorized to request the latest installable revision for a repository in this Galaxy instance.' )
        params = dict( name=name, owner=owner )
        pathspec = [ 'api', 'repositories', 'get_ordered_installable_revisions' ]
        try:
            raw_text = util.url_get( tool_shed_url, password_mgr=self.app.tool_shed_registry.url_auth( tool_shed_url ), pathspec=pathspec, params=params )
        except Exception as e:
            message = "Error attempting to retrieve the latest installable revision from tool shed %s for repository %s owned by %s: %s" % \
                ( str( tool_shed_url ), str( name ), str( owner ), str( e ) )
            log.debug( message )
            return dict( status='error', error=message )
        if raw_text:
            # If successful, the response from get_ordered_installable_revisions will be a list of
            # changeset_revision hash strings.
            changeset_revisions = json.loads( raw_text )
            if len( changeset_revisions ) >= 1:
                return changeset_revisions[ -1 ]
        return hg_util.INITIAL_CHANGELOG_HASH
示例#5
0
    def index(
        self,
        trans: ProvidesHistoryContext,
        serialization_params: SerializationParams,
        filter_query_params: FilterQueryParams,
        deleted_only: Optional[bool] = False,
        all_histories: Optional[bool] = False,
    ):
        """
        Return a collection of histories for the current user. Additional filters can be applied.

        :type   deleted_only: optional boolean
        :param  deleted_only: if True, show only deleted histories, if False, non-deleted

        .. note:: Anonymous users are allowed to get their current history
        """
        # bail early with current history if user is anonymous
        current_user = self.user_manager.current_user(trans)
        if self.user_manager.is_anonymous(current_user):
            current_history = self.manager.get_current(trans)
            if not current_history:
                return []
            # note: ignores filters, limit, offset
            return [
                self._serialize_history(trans, current_history,
                                        serialization_params)
            ]

        filter_params = self.filters.build_filter_params(filter_query_params)
        filters = []
        # support the old default of not-returning/filtering-out deleted_only histories
        filters += self._get_deleted_filter(deleted_only, filter_params)

        # if parameter 'all_histories' is true, throw exception if not admin
        # else add current user filter to query (default behaviour)
        if all_histories:
            if not trans.user_is_admin:
                message = "Only admins can query all histories"
                raise glx_exceptions.AdminRequiredException(message)
        else:
            filters += [model.History.user == current_user]
        # and any sent in from the query string
        filters += self.filters.parse_filters(filter_params)
        order_by = self.build_order_by(self.manager, filter_query_params.order)

        histories = self.manager.list(filters=filters,
                                      order_by=order_by,
                                      limit=filter_query_params.limit,
                                      offset=filter_query_params.offset)

        rval = [
            self._serialize_history(trans,
                                    history,
                                    serialization_params,
                                    default_view="summary")
            for history in histories
        ]
        return rval
 def __ensure_can_install_repos( self, trans ):
     # Make sure this Galaxy instance is configured with a shed-related tool panel configuration file.
     if not suc.have_shed_tool_conf_for_install( self.app ):
         message = get_message_for_no_shed_tool_config()
         log.debug( message )
         return dict( status='error', error=message )
     # Make sure the current user's API key proves he is an admin user in this Galaxy instance.
     if not trans.user_is_admin():
         raise exceptions.AdminRequiredException( 'You are not authorized to request the latest installable revision for a repository in this Galaxy instance.' )
示例#7
0
    def __api_import_new_workflow(self, trans, payload, **kwd):
        data = payload['workflow']
        raw_workflow_description = self.__normalize_workflow(trans, data)
        data = raw_workflow_description.as_dict
        import_tools = util.string_as_bool(payload.get("import_tools", False))
        if import_tools and not trans.user_is_admin:
            raise exceptions.AdminRequiredException()

        from_dict_kwds = self.__import_or_update_kwds(payload)

        publish = util.string_as_bool(payload.get("publish", False))
        # If 'publish' set, default to importable.
        importable = util.string_as_bool(payload.get("importable", publish))

        if publish and not importable:
            raise exceptions.RequestParameterInvalidException("Published workflow must be importable.")

        from_dict_kwds["publish"] = publish
        workflow, missing_tool_tups = self._workflow_from_dict(trans, raw_workflow_description, **from_dict_kwds)
        if importable:
            self._make_item_accessible(trans.sa_session, workflow)
            trans.sa_session.flush()
        # galaxy workflow newly created id
        workflow_id = workflow.id
        # api encoded, id
        encoded_id = trans.security.encode_id(workflow_id)
        item = workflow.to_dict(value_mapper={'id': trans.security.encode_id})
        item['annotations'] = [x.annotation for x in workflow.annotations]
        item['url'] = url_for('workflow', id=encoded_id)
        item['owner'] = workflow.user.username
        item['number_of_steps'] = len(workflow.latest_workflow.steps)
        if import_tools:
            tools = {}
            for key in data['steps']:
                item = data['steps'][key]
                if item is not None:
                    if 'tool_shed_repository' in item:
                        tool_shed_repository = item['tool_shed_repository']
                        if 'owner' in tool_shed_repository and 'changeset_revision' in tool_shed_repository and 'name' in tool_shed_repository and 'tool_shed' in tool_shed_repository:
                            toolstr = tool_shed_repository['owner'] \
                                + tool_shed_repository['changeset_revision'] \
                                + tool_shed_repository['name'] \
                                + tool_shed_repository['tool_shed']
                            tools[toolstr] = tool_shed_repository
            irm = InstallRepositoryManager(self.app)
            for k in tools:
                item = tools[k]
                tool_shed_url = 'https://' + item['tool_shed'] + '/'
                name = item['name']
                owner = item['owner']
                changeset_revision = item['changeset_revision']
                irm.install(tool_shed_url,
                            name,
                            owner,
                            changeset_revision,
                            payload)
        return item
示例#8
0
文件: users.py 项目: msauria/galaxy
    def error_unless_admin(self, user, msg="Administrators only", **kwargs):
        """
        Raise an error if `user` is not an admin.

        :raises exceptions.AdminRequiredException: if `user` is not an admin.
        """
        # useful in admin only methods
        if not self.is_admin(user, trans=kwargs.get("trans", None)):
            raise exceptions.AdminRequiredException(msg, **kwargs)
        return user
示例#9
0
    def load(self, trans, **kwd):
        """
        Load dataset from the given source into the library.

        :param  encoded_folder_id:      the encoded id of the folder to import dataset to
        :type   encoded_folder_id:      an encoded id string
        :param  source:                 source of the dataset to be loaded
        :type   source:                 str
        :param  link_data:              flag whether to link the dataset to data or copy it to Galaxy
        :type   link_data:              bool
        :param  preserve_dirs:          flag whether to preserver directory structure when importing dir
        :type   preserve_dirs:          bool
        """

        kwd['space_to_tab'] = 'False'
        kwd['to_posix_lines'] = 'True'

        kwd['dbkey'] = kwd.get('dbkey', '?')
        kwd['file_type'] = kwd.get('file_type', 'auto')
        kwd[' link_data_only'] = 'link_to_files' if util.string_as_bool(
            kwd.get('link_data', False)) else 'copy_files'
        encoded_folder_id = kwd.get('encoded_folder_id', None)
        if encoded_folder_id is not None:
            folder_id = self.folder_manager.cut_and_decode(
                trans, encoded_folder_id)
        else:
            raise exceptions.RequestParameterMissingException(
                'The required atribute encoded_folder_id is missing.')
        path = kwd.get('path', None)
        if path is None:
            raise exceptions.RequestParameterMissingException(
                'The required atribute path is missing.')
        folder = self.folder_manager.get(trans, folder_id)

        source = kwd.get('source', None)
        if source not in [
                'userdir_file', 'userdir_folder', 'importdir_file',
                'importdir_folder', 'admin_path'
        ]:
            raise exceptions.RequestParameterMissingException(
                'You have to specify "source" parameter. Possible values are "userdir_file", "userdir_folder", "admin_path", "importdir_file" and "importdir_folder". '
            )
        if source in ['importdir_file', 'importdir_folder']:
            if not trans.user_is_admin:
                raise exceptions.AdminRequiredException(
                    'Only admins can import from importdir.')
            if not trans.app.config.library_import_dir:
                raise exceptions.ConfigDoesNotAllowException(
                    'The configuration of this Galaxy instance does not allow admins to import into library from importdir.'
                )
            user_base_dir = trans.app.config.library_import_dir

        if source in ['userdir_file', 'userdir_folder']:
            user_login = trans.user.email
            user_base_dir = trans.app.config.user_library_import_dir
            if user_base_dir is None:
                raise exceptions.ConfigDoesNotAllowException(
                    'The configuration of this Galaxy instance does not allow upload from user directories.'
                )
            full_dir = os.path.join(user_base_dir, user_login)
            # path_to_root_import_folder = None
            if not path.lower().startswith(full_dir.lower()):
                # path_to_root_import_folder = path
                path = os.path.join(full_dir, path)
            if not os.path.exists(path):
                raise exceptions.RequestParameterInvalidException(
                    'Given path does not exist on the host.')
            if not self.folder_manager.can_add_item(trans, folder):
                raise exceptions.InsufficientPermissionsException(
                    'You do not have proper permission to add items to the given folder.'
                )
        if source == 'admin_path':
            if not trans.app.config.allow_library_path_paste:
                raise exceptions.ConfigDoesNotAllowException(
                    'The configuration of this Galaxy instance does not allow admins to import into library from path.'
                )
            if not trans.user_is_admin:
                raise exceptions.AdminRequiredException(
                    'Only admins can import from path.')

        # Set up the traditional tool state/params
        tool_id = 'upload1'
        tool = trans.app.toolbox.get_tool(tool_id)
        state = tool.new_state(trans)
        tool.update_state(trans, tool.inputs_by_page[0], state.inputs, kwd)
        tool_params = state.inputs
        dataset_upload_inputs = []
        for input_name, input in tool.inputs.iteritems():
            if input.type == "upload_dataset":
                dataset_upload_inputs.append(input)
        library_bunch = upload_common.handle_library_params(
            trans, {}, trans.security.encode_id(folder.id))
        abspath_datasets = []
        kwd['filesystem_paths'] = path
        params = util.Params(kwd)
        # user wants to import one file only
        if source == "userdir_file":
            file = os.path.abspath(path)
            abspath_datasets.append(trans.webapp.controllers['library_common'].
                                    make_library_uploaded_dataset(
                                        trans, 'api', params,
                                        os.path.basename(file), file,
                                        'server_dir', library_bunch))
        # user wants to import whole folder
        if source == "userdir_folder":
            uploaded_datasets_bunch = trans.webapp.controllers[
                'library_common'].get_path_paste_uploaded_datasets(
                    trans, 'api', params, library_bunch, 200, '')
            uploaded_datasets = uploaded_datasets_bunch[0]
            if uploaded_datasets is None:
                raise exceptions.ObjectNotFound(
                    'Given folder does not contain any datasets.')
            for ud in uploaded_datasets:
                ud.path = os.path.abspath(ud.path)
                abspath_datasets.append(ud)
        #  user wants to import from path (admins only)
        if source == "admin_path":
            # validate the path is within root
            uploaded_datasets_bunch = trans.webapp.controllers[
                'library_common'].get_path_paste_uploaded_datasets(
                    trans, 'api', params, library_bunch, 200, '')
            uploaded_datasets = uploaded_datasets_bunch[0]
            if uploaded_datasets is None:
                raise exceptions.ObjectNotFound(
                    'Given folder does not contain any datasets.')
            for ud in uploaded_datasets:
                ud.path = os.path.abspath(ud.path)
                abspath_datasets.append(ud)
        json_file_path = upload_common.create_paramfile(
            trans, abspath_datasets)
        data_list = [ud.data for ud in abspath_datasets]
        job, output = upload_common.create_job(trans,
                                               tool_params,
                                               tool,
                                               json_file_path,
                                               data_list,
                                               folder=folder)
        # HACK: Prevent outputs_to_working_directory from overwriting inputs when "linking"
        job.add_parameter('link_data_only',
                          dumps(kwd.get('link_data_only', 'copy_files')))
        job.add_parameter('uuid', dumps(kwd.get('uuid', None)))
        trans.sa_session.add(job)
        trans.sa_session.flush()
        job_dict = job.to_dict()
        job_dict['id'] = trans.security.encode_id(job_dict['id'])
        return job_dict
示例#10
0
    def create(self, trans, payload, **kwd):
        """
        POST /api/workflows

        Run or create workflows from the api.

        If installed_repository_file or from_history_id is specified a new
        workflow will be created for this user. Otherwise, workflow_id must be
        specified and this API method will cause a workflow to execute.

        :param  installed_repository_file    The path of a workflow to import. Either workflow_id, installed_repository_file or from_history_id must be specified
        :type   installed_repository_file    str

        :param  workflow_id:                 An existing workflow id. Either workflow_id, installed_repository_file or from_history_id must be specified
        :type   workflow_id:                 str

        :param  parameters:                  If workflow_id is set - see _update_step_parameters()
        :type   parameters:                  dict

        :param  ds_map:                      If workflow_id is set - a dictionary mapping each input step id to a dictionary with 2 keys: 'src' (which can be 'ldda', 'ld' or 'hda') and 'id' (which should be the id of a LibraryDatasetDatasetAssociation, LibraryDataset or HistoryDatasetAssociation respectively)
        :type   ds_map:                      dict

        :param  no_add_to_history:           If workflow_id is set - if present in the payload with any value, the input datasets will not be added to the selected history
        :type   no_add_to_history:           str

        :param  history:                     If workflow_id is set - optional history where to run the workflow, either the name of a new history or "hist_id=HIST_ID" where HIST_ID is the id of an existing history. If not specified, the workflow will be run a new unnamed history
        :type   history:                     str

        :param  replacement_params:          If workflow_id is set - an optional dictionary used when renaming datasets
        :type   replacement_params:          dict

        :param  from_history_id:             Id of history to extract a workflow from. Either workflow_id, installed_repository_file or from_history_id must be specified
        :type   from_history_id:             str

        :param  job_ids:                     If from_history_id is set - optional list of jobs to include when extracting a workflow from history
        :type   job_ids:                     str

        :param  dataset_ids:                 If from_history_id is set - optional list of HDA `hid`s corresponding to workflow inputs when extracting a workflow from history
        :type   dataset_ids:                 str

        :param  dataset_collection_ids:      If from_history_id is set - optional list of HDCA `hid`s corresponding to workflow inputs when extracting a workflow from history
        :type   dataset_collection_ids:      str

        :param  workflow_name:               If from_history_id is set - name of the workflow to create when extracting a workflow from history
        :type   workflow_name:               str

        :param  allow_tool_state_corrections:  If set to True, any Tool parameter changes will not prevent running workflow, defaults to False
        :type   allow_tool_state_corrections:  bool

        :param use_cached_job:               If set to True galaxy will attempt to find previously executed steps for all workflow steps with the exact same parameter combinations
                                             and will copy the outputs of the previously executed step.
        """
        ways_to_create = set([
            'archive_source',
            'workflow_id',
            'installed_repository_file',
            'from_history_id',
            'from_path',
            'shared_workflow_id',
            'workflow',
        ])

        if len(ways_to_create.intersection(payload)) == 0:
            message = "One parameter among - %s - must be specified" % ", ".join(
                ways_to_create)
            raise exceptions.RequestParameterMissingException(message)

        if len(ways_to_create.intersection(payload)) > 1:
            message = "Only one parameter among - %s - must be specified" % ", ".join(
                ways_to_create)
            raise exceptions.RequestParameterInvalidException(message)

        if 'installed_repository_file' in payload:
            if not trans.user_is_admin:
                raise exceptions.AdminRequiredException()
            installed_repository_file = payload.get(
                'installed_repository_file', '')
            if not os.path.exists(installed_repository_file):
                raise exceptions.MessageException(
                    "Repository file '%s' not found.")
            elif os.path.getsize(
                    os.path.abspath(installed_repository_file)) > 0:
                workflow_data = None
                with open(installed_repository_file, 'rb') as f:
                    workflow_data = f.read()
                return self.__api_import_from_archive(trans, workflow_data)
            else:
                raise exceptions.MessageException(
                    "You attempted to open an empty file.")

        if 'archive_source' in payload:
            archive_source = payload['archive_source']
            archive_file = payload.get('archive_file')
            archive_data = None
            if archive_source:
                if archive_source.startswith("file://"):
                    if not trans.user_is_admin:
                        raise exceptions.AdminRequiredException()
                    workflow_src = {
                        "src": "from_path",
                        "path": archive_source[len("file://"):]
                    }
                    payload["workflow"] = workflow_src
                    return self.__api_import_new_workflow(
                        trans, payload, **kwd)
                else:
                    try:
                        archive_data = requests.get(archive_source).text
                    except Exception:
                        raise exceptions.MessageException(
                            "Failed to open URL '%s'." %
                            escape(archive_source))
            elif hasattr(archive_file, 'file'):
                uploaded_file = archive_file.file
                uploaded_file_name = uploaded_file.name
                if os.path.getsize(os.path.abspath(uploaded_file_name)) > 0:
                    archive_data = uploaded_file.read()
                else:
                    raise exceptions.MessageException(
                        "You attempted to upload an empty file.")
            else:
                raise exceptions.MessageException(
                    "Please provide a URL or file.")
            return self.__api_import_from_archive(trans, archive_data,
                                                  "uploaded file")

        if 'from_history_id' in payload:
            from_history_id = payload.get('from_history_id')
            from_history_id = self.decode_id(from_history_id)
            history = self.history_manager.get_accessible(
                from_history_id, trans.user, current_history=trans.history)

            job_ids = [self.decode_id(_) for _ in payload.get('job_ids', [])]
            dataset_ids = payload.get('dataset_ids', [])
            dataset_collection_ids = payload.get('dataset_collection_ids', [])
            workflow_name = payload['workflow_name']
            stored_workflow = extract_workflow(
                trans=trans,
                user=trans.get_user(),
                history=history,
                job_ids=job_ids,
                dataset_ids=dataset_ids,
                dataset_collection_ids=dataset_collection_ids,
                workflow_name=workflow_name,
            )
            item = stored_workflow.to_dict(
                value_mapper={'id': trans.security.encode_id})
            item['url'] = url_for('workflow', id=item['id'])
            return item

        if 'from_path' in payload:
            from_path = payload.get('from_path')
            payload["workflow"] = {"src": "from_path", "path": from_path}
            return self.__api_import_new_workflow(trans, payload, **kwd)

        if 'shared_workflow_id' in payload:
            workflow_id = payload['shared_workflow_id']
            return self.__api_import_shared_workflow(trans, workflow_id,
                                                     payload)

        if 'workflow' in payload:
            return self.__api_import_new_workflow(trans, payload, **kwd)

        workflow_id = payload.get('workflow_id', None)
        if not workflow_id:
            message = "Invalid workflow_id specified."
            raise exceptions.RequestParameterInvalidException(message)

        # Get workflow + accessibility check.
        stored_workflow = self.__get_stored_accessible_workflow(
            trans, workflow_id)
        workflow = stored_workflow.latest_workflow

        run_configs = build_workflow_run_configs(trans, workflow, payload)
        assert len(run_configs) == 1
        run_config = run_configs[0]
        history = run_config.target_history

        # invoke may throw MessageExceptions on tool erors, failure
        # to match up inputs, etc...
        outputs, invocation = invoke(
            trans=trans,
            workflow=workflow,
            workflow_run_config=run_config,
            populate_state=True,
        )
        trans.sa_session.flush()

        # Build legacy output - should probably include more information from
        # outputs.
        rval = {}
        rval['history'] = trans.security.encode_id(history.id)
        rval['outputs'] = []
        if outputs:
            # Newer outputs don't necessarily fill outputs (?)
            for step in workflow.steps:
                if step.type == 'tool' or step.type is None:
                    for v in outputs[step.id].values():
                        rval['outputs'].append(trans.security.encode_id(v.id))

        # Newer version of this API just returns the invocation as a dict, to
        # facilitate migration - produce the newer style response and blend in
        # the older information.
        invocation_response = self.__encode_invocation(invocation, **kwd)
        invocation_response.update(rval)
        return invocation_response
示例#11
0
    def load(self, trans, payload=None, **kwd):
        """
        POST /api/libraries/datasets

        Load dataset(s) from the given source into the library.

        :param   payload: dictionary structure containing:
            :param  encoded_folder_id:      the encoded id of the folder to import dataset(s) to
            :type   encoded_folder_id:      an encoded id string
            :param  source:

                source the datasets should be loaded from. Source can be:

                    - user directory

                        root folder specified in galaxy.ini as "$user_library_import_dir"
                        example path: path/to/galaxy/$user_library_import_dir/[email protected]/{user can browse everything here}
                        the folder with the user login has to be created beforehand

                    - (admin)import directory

                        root folder specified in galaxy ini as "$library_import_dir"
                        example path: path/to/galaxy/$library_import_dir/{admin can browse everything here}

                    - (admin)any absolute or relative path

                        option allowed with "allow_library_path_paste" in galaxy.ini

            :type   source:                 str
            :param  link_data:

                flag whether to link the dataset to data or copy it to Galaxy, defaults to copy
                while linking is set to True all symlinks will be resolved _once_

            :type   link_data:              bool
            :param  preserve_dirs:

                flag whether to preserve the directory structure when importing dir
                if False only datasets will be imported

            :type   preserve_dirs:          bool
            :param  file_type:              file type of the loaded datasets, defaults to 'auto' (autodetect)
            :type   file_type:              str
            :param  dbkey:                  dbkey of the loaded genome, defaults to '?' (unknown)
            :type   dbkey:                  str
            :param  tag_using_filenames:    flag whether to generate dataset tags from filenames
            :type   tag_using_filenames:    bool

        :type   dictionary

        :returns:   dict containing information about the created upload job
        :rtype:     dictionary

        :raises: RequestParameterMissingException, AdminRequiredException, ConfigDoesNotAllowException, RequestParameterInvalidException
                    InsufficientPermissionsException, ObjectNotFound
        """
        if payload:
            kwd.update(payload)
        kwd['space_to_tab'] = False
        kwd['to_posix_lines'] = True
        kwd['dbkey'] = kwd.get('dbkey', '?')
        kwd['file_type'] = kwd.get('file_type', 'auto')
        kwd['link_data_only'] = 'link_to_files' if util.string_as_bool(kwd.get('link_data', False)) else 'copy_files'
        kwd['tag_using_filenames'] = util.string_as_bool(kwd.get('tag_using_filenames', None))
        encoded_folder_id = kwd.get('encoded_folder_id', None)
        if encoded_folder_id is not None:
            folder_id = self.folder_manager.cut_and_decode(trans, encoded_folder_id)
        else:
            raise exceptions.RequestParameterMissingException('The required attribute encoded_folder_id is missing.')
        path = kwd.get('path', None)
        if path is None:
            raise exceptions.RequestParameterMissingException('The required attribute path is missing.')
        if not isinstance(path, str):
            raise exceptions.RequestParameterInvalidException('The required attribute path is not String.')

        folder = self.folder_manager.get(trans, folder_id)

        source = kwd.get('source', None)
        if source not in ['userdir_file', 'userdir_folder', 'importdir_file', 'importdir_folder', 'admin_path']:
            raise exceptions.RequestParameterMissingException('You have to specify "source" parameter. Possible values are "userdir_file", "userdir_folder", "admin_path", "importdir_file" and "importdir_folder". ')
        elif source in ['importdir_file', 'importdir_folder']:
            if not trans.user_is_admin:
                raise exceptions.AdminRequiredException('Only admins can import from importdir.')
            if not trans.app.config.library_import_dir:
                raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow admins to import into library from importdir.')
            import_base_dir = trans.app.config.library_import_dir
            if not safe_relpath(path):
                # admins shouldn't be able to explicitly specify a path outside server_dir, but symlinks are allowed.
                # the reasoning here is that galaxy admins may not have direct filesystem access or can only access
                # library_import_dir via FTP (which cannot create symlinks), and may rely on sysadmins to set up the
                # import directory. if they have filesystem access, all bets are off.
                raise exceptions.RequestParameterInvalidException('The given path is invalid.')
            path = os.path.join(import_base_dir, path)
        elif source in ['userdir_file', 'userdir_folder']:
            username = trans.user.username if trans.app.config.user_library_import_check_permissions else None
            user_login = trans.user.email
            user_base_dir = trans.app.config.user_library_import_dir
            if user_base_dir is None:
                raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow upload from user directories.')
            full_dir = os.path.join(user_base_dir, user_login)

            if not safe_contains(full_dir, path, allowlist=trans.app.config.user_library_import_symlink_allowlist):
                # the path is a symlink outside the user dir
                path = os.path.join(full_dir, path)
                log.error('User attempted to import a path that resolves to a path outside of their import dir: %s -> %s', path, os.path.realpath(path))
                raise exceptions.RequestParameterInvalidException('The given path is invalid.')
            if trans.app.config.user_library_import_check_permissions and not full_path_permission_for_user(full_dir, path, username):
                log.error('User attempted to import a path that resolves to a path outside of their import dir: '
                        '%s -> %s and cannot be read by them.', path, os.path.realpath(path))
                raise exceptions.RequestParameterInvalidException('The given path is invalid.')
            path = os.path.join(full_dir, path)
            if unsafe_walk(path, allowlist=[full_dir] + trans.app.config.user_library_import_symlink_allowlist, username=username):
                # the path is a dir and contains files that symlink outside the user dir
                error = 'User attempted to import a path that resolves to a path outside of their import dir: {} -> {}'.format(
                    path, os.path.realpath(path)
                )
                if trans.app.config.user_library_import_check_permissions:
                    error += ' or is not readable for them.'
                log.error(error)
                raise exceptions.RequestParameterInvalidException('The given path is invalid.')
            if not os.path.exists(path):
                raise exceptions.RequestParameterInvalidException('Given path does not exist on the host.')
            if not self.folder_manager.can_add_item(trans, folder):
                raise exceptions.InsufficientPermissionsException('You do not have proper permission to add items to the given folder.')
        elif source == 'admin_path':
            if not trans.app.config.allow_library_path_paste:
                raise exceptions.ConfigDoesNotAllowException('The configuration of this Galaxy instance does not allow admins to import into library from path.')
            if not trans.user_is_admin:
                raise exceptions.AdminRequiredException('Only admins can import from path.')

        # Set up the traditional tool state/params
        tool_id = 'upload1'
        tool = trans.app.toolbox.get_tool(tool_id)
        state = tool.new_state(trans)
        populate_state(trans, tool.inputs, kwd, state.inputs)
        tool_params = state.inputs
        dataset_upload_inputs = []
        for input in tool.inputs.values():
            if input.type == "upload_dataset":
                dataset_upload_inputs.append(input)
        library_bunch = upload_common.handle_library_params(trans, {}, trans.security.encode_id(folder.id))
        abspath_datasets = []
        kwd['filesystem_paths'] = path
        if source in ['importdir_folder']:
            kwd['filesystem_paths'] = os.path.join(import_base_dir, path)
        # user wants to import one file only
        elif source in ["userdir_file", "importdir_file"]:
            file = os.path.abspath(path)
            abspath_datasets.append(self._make_library_uploaded_dataset(
                trans, kwd, os.path.basename(file), file, 'server_dir', library_bunch))
        # user wants to import whole folder
        elif source == "userdir_folder":
            uploaded_datasets_bunch = self._get_path_paste_uploaded_datasets(
                trans, kwd, library_bunch, 200, '')
            uploaded_datasets = uploaded_datasets_bunch[0]
            if uploaded_datasets is None:
                raise exceptions.ObjectNotFound('Given folder does not contain any datasets.')
            for ud in uploaded_datasets:
                ud.path = os.path.abspath(ud.path)
                abspath_datasets.append(ud)
        #  user wants to import from path
        if source in ["admin_path", "importdir_folder"]:
            # validate the path is within root
            uploaded_datasets_bunch = self._get_path_paste_uploaded_datasets(
                trans, kwd, library_bunch, 200, '')
            uploaded_datasets = uploaded_datasets_bunch[0]
            if uploaded_datasets is None:
                raise exceptions.ObjectNotFound('Given folder does not contain any datasets.')
            for ud in uploaded_datasets:
                ud.path = os.path.abspath(ud.path)
                abspath_datasets.append(ud)
        json_file_path = upload_common.create_paramfile(trans, abspath_datasets)
        data_list = [ud.data for ud in abspath_datasets]
        job_params = {}
        job_params['link_data_only'] = dumps(kwd.get('link_data_only', 'copy_files'))
        job_params['uuid'] = dumps(kwd.get('uuid', None))
        job, output = upload_common.create_job(trans, tool_params, tool, json_file_path, data_list, folder=folder, job_params=job_params)
        trans.app.job_manager.enqueue(job, tool=tool)
        job_dict = job.to_dict()
        job_dict['id'] = trans.security.encode_id(job_dict['id'])
        return job_dict
示例#12
0
    def index(self, trans, deleted='False', **kwd):
        """
        GET /api/histories

        return undeleted histories for the current user

        GET /api/histories/deleted

        return deleted histories for the current user

        .. note:: Anonymous users are allowed to get their current history

        :type   deleted: boolean
        :param  deleted: if True, show only deleted histories, if False, non-deleted

        :rtype:     list
        :returns:   list of dictionaries containing summary history information

        The following are optional parameters:

            view:   string, one of ('summary','detailed'), defaults to 'summary'
                    controls which set of properties to return
            keys:   comma separated strings, unused by default
                    keys/names of individual properties to return
            all:    boolean, defaults to 'false', admin-only
                    returns all histories, not just current user's

        If neither keys or views are sent, the default view (set of keys) is returned.
        If both a view and keys are sent, the key list and the view's keys are
        combined.

        If keys are send and no view, only those properties in keys are returned.

        For which properties are available see

            galaxy/managers/histories/HistorySerializer

        The list returned can be filtered by using two optional parameters:

            :q:

                string, generally a property name to filter by followed
                by an (often optional) hyphen and operator string.

            :qv:

                string, the value to filter by

        ..example::

            To filter the list to only those created after 2015-01-29,
            the query string would look like:

                '?q=create_time-gt&qv=2015-01-29'

            Multiple filters can be sent in using multiple q/qv pairs:

                '?q=create_time-gt&qv=2015-01-29&q=tag-has&qv=experiment-1'

        The list returned can be paginated using two optional parameters:

            limit:  integer, defaults to no value and no limit (return all)
                    how many items to return
            offset: integer, defaults to 0 and starts at the beginning
                    skip the first ( offset - 1 ) items and begin returning
                    at the Nth item

        ..example:

            limit and offset can be combined. Skip the first two and return five:
                '?limit=5&offset=3'

        The list returned can be ordered using the optional parameter:

            order:  string containing one of the valid ordering attributes followed
                    (optionally) by '-asc' or '-dsc' for ascending and descending
                    order respectively. Orders can be stacked as a comma-
                    separated list of values.

        ..example:
            To sort by name descending then create time descending:
                '?order=name-dsc,create_time'

        The ordering attributes and their default orders are:

            create_time defaults to 'create_time-dsc'
            update_time defaults to 'update_time-dsc'
            name    defaults to 'name-asc'

        'order' defaults to 'create_time-dsc'
        """
        serialization_params = self._parse_serialization_params(kwd, 'summary')
        limit, offset = self.parse_limit_offset(kwd)
        filter_params = self.parse_filter_params(kwd)

        # bail early with current history if user is anonymous
        current_user = self.user_manager.current_user(trans)
        if self.user_manager.is_anonymous(current_user):
            current_history = self.manager.get_current(trans)
            if not current_history:
                return []
            # note: ignores filters, limit, offset
            return [
                self.serializer.serialize_to_view(current_history,
                                                  user=current_user,
                                                  trans=trans,
                                                  **serialization_params)
            ]

        filters = []
        # support the old default of not-returning/filtering-out deleted histories
        filters += self._get_deleted_filter(deleted, filter_params)
        # get optional parameter 'all'
        all_histories = util.string_as_bool(kwd.get('all', False))
        # if parameter 'all' is true, throw exception if not admin
        # else add current user filter to query (default behaviour)
        if all_histories:
            if not trans.user_is_admin:
                message = "Only admins can query all histories"
                raise exceptions.AdminRequiredException(message)
        else:
            filters += [self.app.model.History.user == current_user]
        # and any sent in from the query string
        filters += self.filters.parse_filters(filter_params)

        order_by = self._parse_order_by(manager=self.manager,
                                        order_by_string=kwd.get(
                                            'order', 'create_time-dsc'))
        histories = self.manager.list(filters=filters,
                                      order_by=order_by,
                                      limit=limit,
                                      offset=offset)

        rval = []
        for history in histories:
            history_dict = self.serializer.serialize_to_view(
                history, user=trans.user, trans=trans, **serialization_params)
            rval.append(history_dict)
        return rval
示例#13
0
    def __api_import_new_workflow(self, trans, payload, **kwd):
        data = payload['workflow']

        import_tools = util.string_as_bool(payload.get("import_tools", False))
        if import_tools and not trans.user_is_admin():
            raise exceptions.AdminRequiredException()

        publish = util.string_as_bool(payload.get("publish", False))
        # If 'publish' set, default to importable.
        importable = util.string_as_bool(payload.get("importable", publish))
        # Galaxy will try to upgrade tool versions that don't match exactly during import,
        # this prevents that.
        exact_tools = util.string_as_bool(payload.get("exact_tools", False))

        if publish and not importable:
            raise exceptions.RequestParameterInvalidException(
                "Published workflow must be importable.")

        from_dict_kwds = dict(
            source="API",
            publish=publish,
            exact_tools=exact_tools,
        )
        workflow, missing_tool_tups = self._workflow_from_dict(
            trans, data, **from_dict_kwds)

        if importable:
            self._make_item_accessible(trans.sa_session, workflow)
            trans.sa_session.flush()

        # galaxy workflow newly created id
        workflow_id = workflow.id
        # api encoded, id
        encoded_id = trans.security.encode_id(workflow_id)

        # return list
        rval = []

        item = workflow.to_dict(value_mapper={'id': trans.security.encode_id})
        item['url'] = url_for('workflow', id=encoded_id)
        item['owner'] = workflow.user.username
        item['number_of_steps'] = len(workflow.latest_workflow.steps)
        rval.append(item)

        #
        if import_tools:
            tools = {}
            for key in data['steps']:
                item = data['steps'][key]
                if item is not None:
                    if 'tool_shed_repository' in item:
                        tool_shed_repository = item['tool_shed_repository']
                        if 'owner' in tool_shed_repository and 'changeset_revision' in tool_shed_repository and 'name' in tool_shed_repository and 'tool_shed' in tool_shed_repository:
                            toolstr = tool_shed_repository['owner'] \
                                + tool_shed_repository['changeset_revision'] \
                                + tool_shed_repository['name'] \
                                + tool_shed_repository['tool_shed']
                            tools[toolstr] = tool_shed_repository
            irm = InstallRepositoryManager(self.app)
            for k in tools:
                item = tools[k]
                tool_shed_url = 'https://' + item['tool_shed'] + '/'
                name = item['name']
                owner = item['owner']
                changeset_revision = item['changeset_revision']
                irm.install(tool_shed_url, name, owner, changeset_revision,
                            payload)
        return item
示例#14
0
文件: jobs.py 项目: eancelet/galaxy
    def index(self, trans: ProvidesUserContext, limit=500, offset=0, **kwd):
        """
        GET /api/jobs

        return jobs for current user

        if user is admin and user_details is True, then
        return jobs for all galaxy users based on filtering - this is an extended service

        :type   state: string or list
        :param  state: limit listing of jobs to those that match one of the included states. If none, all are returned.

        :type   tool_id: string or list
        :param  tool_id: limit listing of jobs to those that match one of the included tool_ids. If none, all are returned.

        :type   user_details: boolean
        :param  user_details: if true, and requestor is an admin, will return external job id and user email.

        :type   user_id: str
        :param  user_id: an encoded user id to restrict query to, must be own id if not admin user

        :type   limit: int
        :param  limit: Maximum number of jobs to return.

        :type   offset: int
        :param  offset: Return jobs starting from this specified position.
                        For example, if ``limit`` is set to 100 and ``offset`` to 200,
                        jobs 200-299 will be returned.

        :type   date_range_min: string '2014-01-01'
        :param  date_range_min: limit the listing of jobs to those updated on or after requested date

        :type   date_range_max: string '2014-12-31'
        :param  date_range_max: limit the listing of jobs to those updated on or before requested date

        :type   history_id: string
        :param  history_id: limit listing of jobs to those that match the history_id. If none, all are returned.

        :type   workflow_id: string
        :param  workflow_id: limit listing of jobs to those that match the workflow_id. If none, all are returned.

        :type   invocation_id: string
        :param  invocation_id: limit listing of jobs to those that match the invocation_id. If none, all are returned.

        :type   view: string
        :param  view: Determines columns to return. Defaults to 'collection'.

        :rtype:     list
        :returns:   list of dictionaries containing summary job information
        """
        state = kwd.get('state', None)
        is_admin = trans.user_is_admin
        user_details = kwd.get('user_details', False)
        user_id = kwd.get('user_id', None)
        view = kwd.get('view', 'collection')
        if view not in ('collection', 'admin_job_list'):
            raise exceptions.RequestParameterInvalidException(
                f"view parameter '{view} is invalid")
        if view == 'admin_job_list' and not is_admin:
            raise exceptions.AdminRequiredException(
                "Only admins can use the admin_job_list view")

        if user_id:
            decoded_user_id = self.decode_id(user_id)
        else:
            decoded_user_id = None
        if is_admin:
            if decoded_user_id is not None:
                query = trans.sa_session.query(
                    model.Job).filter(model.Job.user_id == decoded_user_id)
            else:
                query = trans.sa_session.query(model.Job)
        else:
            if decoded_user_id is not None and decoded_user_id != trans.user.id:
                raise exceptions.AdminRequiredException(
                    "Only admins can index the jobs of others")
            query = trans.sa_session.query(
                model.Job).filter(model.Job.user_id == trans.user.id)

        def build_and_apply_filters(query, objects, filter_func):
            if objects is not None:
                if isinstance(objects, str):
                    query = query.filter(filter_func(objects))
                elif isinstance(objects, list):
                    t = []
                    for obj in objects:
                        t.append(filter_func(obj))
                    query = query.filter(or_(*t))
            return query

        query = build_and_apply_filters(query, state,
                                        lambda s: model.Job.state == s)

        query = build_and_apply_filters(query, kwd.get('tool_id', None),
                                        lambda t: model.Job.tool_id == t)
        query = build_and_apply_filters(query, kwd.get('tool_id_like', None),
                                        lambda t: model.Job.tool_id.like(t))

        query = build_and_apply_filters(
            query, kwd.get('date_range_min', None),
            lambda dmin: model.Job.update_time >= dmin)
        query = build_and_apply_filters(
            query, kwd.get('date_range_max', None),
            lambda dmax: model.Job.update_time <= dmax)

        history_id = kwd.get('history_id', None)
        workflow_id = kwd.get('workflow_id', None)
        invocation_id = kwd.get('invocation_id', None)
        if history_id is not None:
            decoded_history_id = self.decode_id(history_id)
            query = query.filter(model.Job.history_id == decoded_history_id)
        if workflow_id or invocation_id:
            if workflow_id is not None:
                decoded_workflow_id = self.decode_id(workflow_id)
                wfi_step = trans.sa_session.query(
                    model.WorkflowInvocationStep).join(
                        model.WorkflowInvocation).join(model.Workflow).filter(
                            model.Workflow.stored_workflow_id ==
                            decoded_workflow_id, ).subquery()
            elif invocation_id is not None:
                decoded_invocation_id = self.decode_id(invocation_id)
                wfi_step = trans.sa_session.query(
                    model.WorkflowInvocationStep).filter(
                        model.WorkflowInvocationStep.workflow_invocation_id ==
                        decoded_invocation_id).subquery()
            query1 = query.join(wfi_step)
            query2 = query.join(
                model.ImplicitCollectionJobsJobAssociation).join(
                    wfi_step, model.ImplicitCollectionJobsJobAssociation.
                    implicit_collection_jobs_id ==
                    wfi_step.c.implicit_collection_jobs_id)
            query = query1.union(query2)

        if kwd.get('order_by') == 'create_time':
            order_by = model.Job.create_time.desc()
        else:
            order_by = model.Job.update_time.desc()
        query = query.order_by(order_by)

        query = query.offset(offset)
        query = query.limit(limit)

        out = []
        for job in query.all():
            job_dict = job.to_dict(view, system_details=is_admin)
            j = self.encode_all_ids(trans, job_dict, True)
            if view == 'admin_job_list':
                j['decoded_job_id'] = job.id
            if user_details:
                j['user_email'] = job.user.email
            out.append(j)

        return out
示例#15
0
    def list(self, trans, deleted: Optional[bool] = False):
        """
        Return a list of libraries from the DB.

        :param  deleted: if True, show only ``deleted`` libraries, if False show only ``non-deleted``
        :type   deleted: boolean (optional)

        :returns: query that will emit all accessible libraries
        :rtype:   sqlalchemy query
        :returns: dict of 3 sets with available actions for user's accessible
                  libraries and a set of ids of all public libraries. These are
                  used for limiting the number of queries when dictifying the
                  libraries later on.
        :rtype:   dict
        """
        is_admin = trans.user_is_admin
        query = trans.sa_session.query(trans.app.model.Library)
        library_access_action = trans.app.security_agent.permitted_actions.LIBRARY_ACCESS.action
        restricted_library_ids = {
            lp.library_id
            for lp in (
                trans.sa_session.query(trans.model.LibraryPermissions).filter(
                    trans.model.LibraryPermissions.table.c.action ==
                    library_access_action).distinct())
        }
        prefetched_ids = {'restricted_library_ids': restricted_library_ids}
        if is_admin:
            if deleted is None:
                #  Flag is not specified, do not filter on it.
                pass
            elif deleted:
                query = query.filter(
                    trans.app.model.Library.table.c.deleted == true())
            else:
                query = query.filter(
                    trans.app.model.Library.table.c.deleted == false())
        else:
            #  Nonadmins can't see deleted libraries
            if deleted:
                raise exceptions.AdminRequiredException()
            else:
                query = query.filter(
                    trans.app.model.Library.table.c.deleted == false())
                current_user_role_ids = [
                    role.id for role in trans.get_current_user_roles()
                ]
                all_actions = trans.sa_session.query(
                    trans.model.LibraryPermissions).filter(
                        trans.model.LibraryPermissions.table.c.role_id.in_(
                            current_user_role_ids))
                library_add_action = trans.app.security_agent.permitted_actions.LIBRARY_ADD.action
                library_modify_action = trans.app.security_agent.permitted_actions.LIBRARY_MODIFY.action
                library_manage_action = trans.app.security_agent.permitted_actions.LIBRARY_MANAGE.action
                accessible_restricted_library_ids = set()
                allowed_library_add_ids = set()
                allowed_library_modify_ids = set()
                allowed_library_manage_ids = set()
                for action in all_actions:
                    if action.action == library_access_action:
                        accessible_restricted_library_ids.add(
                            action.library_id)
                    if action.action == library_add_action:
                        allowed_library_add_ids.add(action.library_id)
                    if action.action == library_modify_action:
                        allowed_library_modify_ids.add(action.library_id)
                    if action.action == library_manage_action:
                        allowed_library_manage_ids.add(action.library_id)
                query = query.filter(
                    or_(
                        not_(
                            trans.model.Library.table.c.id.in_(
                                restricted_library_ids)),
                        trans.model.Library.table.c.id.in_(
                            accessible_restricted_library_ids)))
                prefetched_ids[
                    'allowed_library_add_ids'] = allowed_library_add_ids
                prefetched_ids[
                    'allowed_library_modify_ids'] = allowed_library_modify_ids
                prefetched_ids[
                    'allowed_library_manage_ids'] = allowed_library_manage_ids
        return query, prefetched_ids
示例#16
0
    def load(self, trans, payload=None, **kwd):
        """
        * POST /api/libraries/datasets
        Load dataset from the given source into the library.
        Source can be:
            user directory - root folder specified in galaxy.ini as "$user_library_import_dir"
                example path: path/to/galaxy/$user_library_import_dir/[email protected]/{user can browse everything here}
                the folder with the user login has to be created beforehand
            (admin)import directory - root folder specified in galaxy ini as "$library_import_dir"
                example path: path/to/galaxy/$library_import_dir/{admin can browse everything here}
            (admin)any absolute or relative path - option allowed with "allow_library_path_paste" in galaxy.ini

        :param   payload: dictionary structure containing:
            :param  encoded_folder_id:      the encoded id of the folder to import dataset(s) to
            :type   encoded_folder_id:      an encoded id string
            :param  source:                 source the datasets should be loaded from
            :type   source:                 str
            :param  link_data:              flag whether to link the dataset to data or copy it to Galaxy, defaults to copy
                                            while linking is set to True all symlinks will be resolved _once_
            :type   link_data:              bool
            :param  preserve_dirs:          flag whether to preserve the directory structure when importing dir
                                            if False only datasets will be imported
            :type   preserve_dirs:          bool
            :param  file_type:              file type of the loaded datasets, defaults to 'auto' (autodetect)
            :type   file_type:              str
            :param  dbkey:                  dbkey of the loaded genome, defaults to '?' (unknown)
            :type   dbkey:                  str
        :type   dictionary
        :returns:   dict containing information about the created upload job
        :rtype:     dictionary
        :raises: RequestParameterMissingException, AdminRequiredException, ConfigDoesNotAllowException, RequestParameterInvalidException
                    InsufficientPermissionsException, ObjectNotFound
        """
        if payload:
            kwd.update(payload)
        kwd['space_to_tab'] = False
        kwd['to_posix_lines'] = True
        kwd['dbkey'] = kwd.get('dbkey', '?')
        kwd['file_type'] = kwd.get('file_type', 'auto')
        kwd['link_data_only'] = 'link_to_files' if util.string_as_bool(
            kwd.get('link_data', False)) else 'copy_files'
        encoded_folder_id = kwd.get('encoded_folder_id', None)
        if encoded_folder_id is not None:
            folder_id = self.folder_manager.cut_and_decode(
                trans, encoded_folder_id)
        else:
            raise exceptions.RequestParameterMissingException(
                'The required atribute encoded_folder_id is missing.')
        path = kwd.get('path', None)
        if path is None:
            raise exceptions.RequestParameterMissingException(
                'The required atribute path is missing.')
        folder = self.folder_manager.get(trans, folder_id)

        source = kwd.get('source', None)
        if source not in [
                'userdir_file', 'userdir_folder', 'importdir_file',
                'importdir_folder', 'admin_path'
        ]:
            raise exceptions.RequestParameterMissingException(
                'You have to specify "source" parameter. Possible values are "userdir_file", "userdir_folder", "admin_path", "importdir_file" and "importdir_folder". '
            )
        if source in ['importdir_file', 'importdir_folder']:
            if not trans.user_is_admin:
                raise exceptions.AdminRequiredException(
                    'Only admins can import from importdir.')
            if not trans.app.config.library_import_dir:
                raise exceptions.ConfigDoesNotAllowException(
                    'The configuration of this Galaxy instance does not allow admins to import into library from importdir.'
                )
            import_base_dir = trans.app.config.library_import_dir
            path = os.path.join(import_base_dir, path)
        if source in ['userdir_file', 'userdir_folder']:
            user_login = trans.user.email
            user_base_dir = trans.app.config.user_library_import_dir
            if user_base_dir is None:
                raise exceptions.ConfigDoesNotAllowException(
                    'The configuration of this Galaxy instance does not allow upload from user directories.'
                )
            full_dir = os.path.join(user_base_dir, user_login)
            if not path.lower().startswith(full_dir.lower()):
                path = os.path.join(full_dir, path)
            if not os.path.exists(path):
                raise exceptions.RequestParameterInvalidException(
                    'Given path does not exist on the host.')
            if not self.folder_manager.can_add_item(trans, folder):
                raise exceptions.InsufficientPermissionsException(
                    'You do not have proper permission to add items to the given folder.'
                )
        if source == 'admin_path':
            if not trans.app.config.allow_library_path_paste:
                raise exceptions.ConfigDoesNotAllowException(
                    'The configuration of this Galaxy instance does not allow admins to import into library from path.'
                )
            if not trans.user_is_admin:
                raise exceptions.AdminRequiredException(
                    'Only admins can import from path.')

        # Set up the traditional tool state/params
        tool_id = 'upload1'
        tool = trans.app.toolbox.get_tool(tool_id)
        state = tool.new_state(trans)
        tool.populate_state(trans, tool.inputs, kwd, state.inputs)
        tool_params = state.inputs
        dataset_upload_inputs = []
        for input in tool.inputs.itervalues():
            if input.type == "upload_dataset":
                dataset_upload_inputs.append(input)
        library_bunch = upload_common.handle_library_params(
            trans, {}, trans.security.encode_id(folder.id))
        abspath_datasets = []
        kwd['filesystem_paths'] = path
        if source in ['importdir_folder']:
            kwd['filesystem_paths'] = os.path.join(import_base_dir, path)
        # user wants to import one file only
        if source in ["userdir_file", "importdir_file"]:
            file = os.path.abspath(path)
            abspath_datasets.append(trans.webapp.controllers['library_common'].
                                    make_library_uploaded_dataset(
                                        trans, 'api', kwd,
                                        os.path.basename(file), file,
                                        'server_dir', library_bunch))
        # user wants to import whole folder
        if source == "userdir_folder":
            uploaded_datasets_bunch = trans.webapp.controllers[
                'library_common'].get_path_paste_uploaded_datasets(
                    trans, 'api', kwd, library_bunch, 200, '')
            uploaded_datasets = uploaded_datasets_bunch[0]
            if uploaded_datasets is None:
                raise exceptions.ObjectNotFound(
                    'Given folder does not contain any datasets.')
            for ud in uploaded_datasets:
                ud.path = os.path.abspath(ud.path)
                abspath_datasets.append(ud)
        #  user wants to import from path
        if source in ["admin_path", "importdir_folder"]:
            # validate the path is within root
            uploaded_datasets_bunch = trans.webapp.controllers[
                'library_common'].get_path_paste_uploaded_datasets(
                    trans, 'api', kwd, library_bunch, 200, '')
            uploaded_datasets = uploaded_datasets_bunch[0]
            if uploaded_datasets is None:
                raise exceptions.ObjectNotFound(
                    'Given folder does not contain any datasets.')
            for ud in uploaded_datasets:
                ud.path = os.path.abspath(ud.path)
                abspath_datasets.append(ud)
        json_file_path = upload_common.create_paramfile(
            trans, abspath_datasets)
        data_list = [ud.data for ud in abspath_datasets]
        job_params = {}
        job_params['link_data_only'] = dumps(
            kwd.get('link_data_only', 'copy_files'))
        job_params['uuid'] = dumps(kwd.get('uuid', None))
        job, output = upload_common.create_job(trans,
                                               tool_params,
                                               tool,
                                               json_file_path,
                                               data_list,
                                               folder=folder,
                                               job_params=job_params)
        trans.sa_session.add(job)
        trans.sa_session.flush()
        job_dict = job.to_dict()
        job_dict['id'] = trans.security.encode_id(job_dict['id'])
        return job_dict