Ejemplo n.º 1
0
 def history_set_default_permissions( self, trans, id=None, **kwd ):
     """Sets the permissions on a history.
     """
     # TODO: unencoded id
     if trans.user:
         if 'update_roles_button' in kwd:
             history = None
             if id:
                 try:
                     id = int( id )
                 except:
                     id = None
                 if id:
                     history = trans.sa_session.query( trans.app.model.History ).get( id )
             if not history:
                 # If we haven't retrieved a history, use the current one
                 history = trans.get_history()
             p = Params( kwd )
             permissions = {}
             for k, v in trans.app.model.Dataset.permitted_actions.items():
                 in_roles = p.get( k + '_in', [] )
                 if not isinstance( in_roles, list ):
                     in_roles = [ in_roles ]
                 in_roles = [ trans.sa_session.query( trans.app.model.Role ).get( x ) for x in in_roles ]
                 permissions[ trans.app.security_agent.get_action( v.action ) ] = in_roles
             dataset = 'dataset' in kwd
             bypass_manage_permission = 'bypass_manage_permission' in kwd
             trans.app.security_agent.history_set_default_permissions( history, permissions,
                                                                       dataset=dataset, bypass_manage_permission=bypass_manage_permission )
             return trans.show_ok_message( 'Default history permissions have been changed.' )
         return trans.fill_template( 'history/permissions.mako' )
     else:
         # user not logged in, history group must be only public
         return trans.show_error_message( "You must be logged in to change a history's default permissions." )
Ejemplo n.º 2
0
    def history( self, trans, as_xml=False, show_deleted=None, show_hidden=None, hda_id=None, **kwd ):
        """Display the current history, creating a new history if necessary.

        NOTE: No longer accepts "id" or "template" options for security reasons.
        """
        if as_xml:
            return self.history_as_xml( trans,
                show_deleted=string_as_bool( show_deleted ), show_hidden=string_as_bool( show_hidden ) )

        # get all datasets server-side, client-side will get flags and render appropriately
        show_deleted = string_as_bool_or_none( show_deleted )
        show_purged  = show_deleted
        show_hidden  = string_as_bool_or_none( show_hidden )
        params = Params( kwd )
        message = params.get( 'message', '' )
        #TODO: ugh...
        message = message if message != 'None' else ''
        status = params.get( 'status', 'done' )

        if trans.app.config.require_login and not trans.user:
            return trans.fill_template( '/no_access.mako', message = 'Please log in to access Galaxy histories.' )

        def err_msg( where=None ):
            where = where if where else 'getting the history data from the server'
            err_msg = ( 'An error occurred %s. '
                      + 'Please contact a Galaxy administrator if the problem persists.' ) %( where )
            return err_msg, 'error'

        history_dictionary = {}
        hda_dictionaries   = []
        try:
            history = trans.get_history( create=True )
            hdas = self.get_history_datasets( trans, history,
                show_deleted=True, show_hidden=True, show_purged=True )

            for hda in hdas:
                try:
                    hda_dictionaries.append( self.get_hda_dict( trans, hda ) )

                except Exception, exc:
                    # don't fail entire list if hda err's, record and move on
                    log.error( 'Error bootstrapping hda %d: %s', hda.id, str( exc ), exc_info=True )
                    hda_dictionaries.append( self.get_hda_dict_with_error( trans, hda, str( exc ) ) )

            # re-use the hdas above to get the history data...
            history_dictionary = self.get_history_dict( trans, history, hda_dictionaries=hda_dictionaries )
Ejemplo n.º 3
0
    def index(self, trans, tool_id=None, data_secret=None, **kwd):
        """Manages ascynchronous connections"""

        if tool_id is None:
            return "tool_id argument is required"
        tool_id = str(tool_id)

        # redirect to main when getting no parameters
        if not kwd:
            return trans.response.send_redirect("/index")

        history = trans.get_history(create=True)
        params = Params(kwd, sanitize=False)
        STATUS = params.STATUS
        URL = params.URL
        data_id = params.data_id

        log.debug('async dataid -> %s' % data_id)
        trans.log_event('Async dataid -> %s' % str(data_id))

        # initialize the tool
        toolbox = self.get_toolbox()
        tool = toolbox.get_tool(tool_id)
        if not tool:
            return "Tool with id %s not found" % tool_id

        #
        # we have an incoming data_id
        #
        if data_id:
            if not URL:
                return "No URL parameter was submitted for data %s" % data_id
            data = trans.sa_session.query(
                trans.model.HistoryDatasetAssociation).get(data_id)

            if not data:
                return "Data %s does not exist or has already been deleted" % data_id

            if STATUS == 'OK':
                key = hmac_new(trans.app.config.tool_secret,
                               "%d:%d" % (data.id, data.history_id))
                if key != data_secret:
                    return "You do not have permission to alter data %s." % data_id
                # push the job into the queue
                data.state = data.blurb = data.states.RUNNING
                log.debug('executing tool %s' % tool.id)
                trans.log_event('Async executing tool %s' % tool.id,
                                tool_id=tool.id)
                galaxy_url = trans.request.base + '/async/%s/%s/%s' % (
                    tool_id, data.id, key)
                galaxy_url = params.get("GALAXY_URL", galaxy_url)
                params = dict(URL=URL,
                              GALAXY_URL=galaxy_url,
                              name=data.name,
                              info=data.info,
                              dbkey=data.dbkey,
                              data_type=data.ext)
                # Assume there is exactly one output file possible
                params[tool.outputs.keys()[0]] = data.id
                tool.execute(trans, incoming=params)
            else:
                log.debug('async error -> %s' % STATUS)
                trans.log_event('Async error -> %s' % STATUS)
                data.state = data.blurb = jobs.JOB_ERROR
                data.info = "Error -> %s" % STATUS

            trans.sa_session.flush()

            return "Data %s with status %s received. OK" % (data_id, STATUS)
        else:
            #
            # no data_id must be parameter submission
            #
            if params.data_type:
                GALAXY_TYPE = params.data_type
            elif params.galaxyFileFormat == 'wig':  # this is an undocumented legacy special case
                GALAXY_TYPE = 'wig'
            else:
                GALAXY_TYPE = params.GALAXY_TYPE or tool.outputs.values(
                )[0].format

            GALAXY_NAME = params.name or params.GALAXY_NAME or '%s query' % tool.name
            GALAXY_INFO = params.info or params.GALAXY_INFO or params.galaxyDescription or ''
            GALAXY_BUILD = params.dbkey or params.GALAXY_BUILD or params.galaxyFreeze or '?'

            # data = datatypes.factory(ext=GALAXY_TYPE)()
            # data.ext   = GALAXY_TYPE
            # data.name  = GALAXY_NAME
            # data.info  = GALAXY_INFO
            # data.dbkey = GALAXY_BUILD
            # data.state = jobs.JOB_OK
            # history.datasets.add_dataset( data )

            data = trans.app.model.HistoryDatasetAssociation(
                create_dataset=True,
                sa_session=trans.sa_session,
                extension=GALAXY_TYPE)
            trans.app.security_agent.set_all_dataset_permissions(
                data.dataset,
                trans.app.security_agent.history_get_default_permissions(
                    trans.history))
            data.name = GALAXY_NAME
            data.dbkey = GALAXY_BUILD
            data.info = GALAXY_INFO
            trans.sa_session.add(
                data
            )  # Need to add data to session before setting state (setting state requires that the data object is in the session, but this may change)
            data.state = data.states.NEW
            open(data.file_name, 'wb').close()  # create the file
            trans.history.add_dataset(data, genome_build=GALAXY_BUILD)
            trans.sa_session.add(trans.history)
            trans.sa_session.flush()
            trans.log_event("Added dataset %d to history %d" %
                            (data.id, trans.history.id),
                            tool_id=tool_id)

            try:
                key = hmac_new(trans.app.config.tool_secret,
                               "%d:%d" % (data.id, data.history_id))
                galaxy_url = trans.request.base + '/async/%s/%s/%s' % (
                    tool_id, data.id, key)
                params.update({'GALAXY_URL': galaxy_url})
                params.update({'data_id': data.id})
                # Use provided URL or fallback to tool action
                url = URL or tool.action
                # Does url already have query params?
                if '?' in url:
                    url_join_char = '&'
                else:
                    url_join_char = '?'
                url = "%s%s%s" % (url, url_join_char,
                                  urllib.urlencode(params.flatten()))
                log.debug("connecting to -> %s" % url)
                trans.log_event("Async connecting to -> %s" % url)
                text = urllib.urlopen(url).read(-1)
                text = text.strip()
                if not text.endswith('OK'):
                    raise Exception(text)
                data.state = data.blurb = data.states.RUNNING
            except Exception, e:
                data.info = str(e)
                data.state = data.blurb = data.states.ERROR

            trans.sa_session.flush()
Ejemplo n.º 4
0
    def _upload_test_data(self):
        """
        Upload test datasets, which are defined in the <test></test>
        section of the provided tool.
        """
        if not self._tool.tests:
            raise ValueError('Tests are not defined.')

        self._test = self._tool.tests[0]

        # All inputs with the type 'data'
        self._data_inputs = {
            x.name: x
            for x in self._tool.input_params if x.type == 'data'
        }

        # Datasets from the <test></test> section
        test_datasets = {
            input_name: self._test.inputs[input_name][0]
            for input_name in self._data_inputs.keys()
            if input_name in self._test.inputs.keys()
        }

        # Conditional datasets
        for name, value in self._test.inputs.items():
            if '|' in name:
                input_name = name.split('|')[1]
                if input_name in self._data_inputs.keys():
                    test_datasets.update(
                        {input_name: self._test.inputs[name][0]})

        if not test_datasets.keys():
            not_supported_input_types = [
                k for k, v in self._tool.inputs.items()
                if v.type == 'repeat' or v.type == 'data_collection'
            ]
            if not_supported_input_types:
                raise ValueError('Not supported input types.')
            else:
                # Some tests don't have data inputs at all,
                # so we can generate a tour without them
                self._use_datasets = False
                return

        test_data_paths = [os.path.abspath('test-data')]
        test_data_cache_dir = os.path.abspath(
            os.environ.get('GALAXY_TEST_DATA_REPO_CACHE', 'test-data-cache'))
        test_data_paths.extend(
            [x[0] for x in os.walk(test_data_cache_dir) if '.git' not in x[0]])
        if self._tool.tool_shed:
            test_data_paths.append(
                os.path.abspath(os.path.join(self._tool.tool_dir,
                                             'test-data')))

        # Upload all test datasets
        for input_name, input in self._data_inputs.items():
            if input_name in test_datasets.keys():
                for i, data_path in enumerate(test_data_paths):
                    input_path = os.path.join(data_path,
                                              test_datasets[input_name])
                    if os.path.exists(input_path):
                        break
                    elif i + 1 == len(test_data_paths):  # the last path
                        raise ValueError('Test dataset "%s" doesn\'t exist.' %
                                         input_name)

                upload_tool = self._trans.app.toolbox.get_tool('upload1')
                filename = os.path.basename(input_path)

                with open(input_path, 'r') as f:
                    content = f.read()
                    headers = {
                        'content-disposition':
                        'form-data; name="{}"; filename="{}"'.format(
                            'files_0|file_data', filename),
                    }

                    input_file = FieldStorage(headers=headers)
                    input_file.file = input_file.make_file()
                    input_file.file.write(content)

                    inputs = {
                        'dbkey': '?',  # is it always a question mark?
                        'file_type': input.extensions[0],
                        'files_0|type': 'upload_dataset',
                        'files_0|space_to_tab': None,
                        'files_0|to_posix_lines': 'Yes',
                        'files_0|file_data': input_file,
                    }

                    params = Params(inputs, sanitize=False)
                    incoming = params.__dict__
                    output = upload_tool.handle_input(self._trans,
                                                      incoming,
                                                      history=None)

                    job_errors = output.get('job_errors', [])
                    if job_errors:
                        # self._errors.extend(job_errors)
                        raise ValueError('Cannot upload a dataset.')
                    else:
                        self._hids.update(
                            {input_name: output['out_data'][0][1].hid})
Ejemplo n.º 5
0
    def get(self,
            trans,
            history_id,
            bucket_name,
            objects,
            authz_id,
            input_args=None):
        """
        Implements the logic of getting a file from a cloud-based storage (e.g., Amazon S3)
        and persisting it as a Galaxy dataset.

        This manager does NOT require use credentials, instead, it uses a more secure method,
        which leverages CloudAuthz (https://github.com/galaxyproject/cloudauthz) and automatically
        requests temporary credentials to access the defined resources.

        :type  trans:       galaxy.webapps.base.webapp.GalaxyWebTransaction
        :param trans:       Galaxy web transaction

        :type  history_id:  string
        :param history_id:  the (decoded) id of history to which the object should be received to.

        :type  bucket_name: string
        :param bucket_name: the name of a bucket from which data should be fetched (e.g., a bucket name on AWS S3).

        :type  objects:     list of string
        :param objects:     the name of objects to be fetched.

        :type  authz_id:    int
        :param authz_id:    the ID of CloudAuthz to be used for authorizing access to the resource provider. You may
                            get a list of the defined authorizations sending GET to `/api/cloud/authz`. Also, you can
                            POST to `/api/cloud/authz` to define a new authorization.

        :type  input_args:  dict
        :param input_args:  a [Optional] a dictionary of input parameters:
                            dbkey, file_type, space_to_tab, to_posix_lines (see galaxy/webapps/galaxy/api/cloud.py)

        :rtype:             list of galaxy.model.Dataset
        :return:            a list of datasets created for the fetched files.
        """
        if CloudProviderFactory is None:
            raise Exception(NO_CLOUDBRIDGE_ERROR_MESSAGE)

        if input_args is None:
            input_args = {}

        if not hasattr(trans.app, 'authnz_manager'):
            err_msg = "The OpenID Connect protocol, a required feature for getting data from cloud, " \
                      "is not enabled on this Galaxy instance."
            log.debug(err_msg)
            raise MessageException(err_msg)

        cloudauthz = trans.app.authnz_manager.try_get_authz_config(
            trans.sa_session, trans.user.id, authz_id)
        credentials = trans.app.authnz_manager.get_cloud_access_credentials(
            cloudauthz, trans.sa_session, trans.user.id, trans.request)
        connection = self.configure_provider(cloudauthz.provider, credentials)
        try:
            bucket = connection.storage.buckets.get(bucket_name)
            if bucket is None:
                raise RequestParameterInvalidException(
                    f"The bucket `{bucket_name}` not found.")
        except Exception as e:
            raise ItemAccessibilityException(
                "Could not get the bucket `{}`: {}".format(
                    bucket_name, util.unicodify(e)))

        datasets = []
        for obj in objects:
            try:
                key = bucket.objects.get(obj)
            except Exception as e:
                raise MessageException(
                    "The following error occurred while getting the object {}: {}"
                    .format(obj, util.unicodify(e)))
            if key is None:
                log.exception(
                    "Could not get object `{}` for user `{}`. Object may not exist, or the provided credentials are "
                    "invalid or not authorized to read the bucket/object.".
                    format(obj, trans.user.id))
                raise ObjectNotFound(
                    "Could not get the object `{}`. Please check if the object exists, and credentials are valid and "
                    "authorized to read the bucket and object. ".format(obj))

            params = Params(self._get_inputs(obj, key, input_args),
                            sanitize=False)
            incoming = params.__dict__
            history = trans.sa_session.query(
                trans.app.model.History).get(history_id)
            if not history:
                raise ObjectNotFound("History with ID `{}` not found.".format(
                    trans.app.security.encode_id(history_id)))
            output = trans.app.toolbox.get_tool('upload1').handle_input(
                trans, incoming, history=history)

            job_errors = output.get('job_errors', [])
            if job_errors:
                raise ValueError(
                    'Following error occurred while getting the given object(s) from {}: {}'
                    .format(cloudauthz.provider, job_errors))
            else:
                for d in output['out_data']:
                    datasets.append(d[1].dataset)

        return datasets
Ejemplo n.º 6
0
    def index(self, trans, tool_id=None, data_secret=None, **kwd):
        """Manages ascynchronous connections"""

        if tool_id is None:
            return "tool_id argument is required"
        tool_id=str(tool_id)
        #log.debug('async params -> %s' % kwd)

        # redirect to main when getting no parameters
        if not kwd:
            return trans.response.send_redirect( "/index" )

        history = trans.get_history( create=True )
        params  = Params(kwd, sanitize=False)
        STATUS = params.STATUS
        URL = params.URL
        data_id = params.data_id

        log.debug('async dataid -> %s' % data_id)
        trans.log_event( 'Async dataid -> %s' % str(data_id) )

        # initialize the tool
        toolbox = self.get_toolbox()
        tool = toolbox.get_tool( tool_id )
        if not tool:
            return "Tool with id %s not found" % tool_id

        #
        # we have an incoming data_id
        #
        if data_id:
            if not URL:
                return "No URL parameter was submitted for data %s" % data_id
            data = trans.sa_session.query( trans.model.HistoryDatasetAssociation ).get( data_id )

            if not data:
                return "Data %s does not exist or has already been deleted" % data_id

            if STATUS == 'OK':
                key = hmac_new( trans.app.config.tool_secret, "%d:%d" % ( data.id, data.history_id ) )
                if key != data_secret:
                    return "You do not have permission to alter data %s." % data_id
                # push the job into the queue
                data.state = data.blurb = data.states.RUNNING
                log.debug('executing tool %s' % tool.id)
                trans.log_event( 'Async executing tool %s' % tool.id, tool_id=tool.id )
                galaxy_url  = trans.request.base + '/async/%s/%s/%s' % ( tool_id, data.id, key )
                galaxy_url = params.get("GALAXY_URL",galaxy_url)
                params = dict( URL=URL, GALAXY_URL=galaxy_url, name=data.name, info=data.info, dbkey=data.dbkey, data_type=data.ext )
                # Assume there is exactly one output file possible
                params[tool.outputs.keys()[0]] = data.id
                tool.execute( trans, incoming=params )
            else:
                log.debug('async error -> %s' % STATUS)
                trans.log_event( 'Async error -> %s' % STATUS )
                data.state = data.blurb = jobs.JOB_ERROR
                data.info  = "Error -> %s" % STATUS

            trans.sa_session.flush()

            return "Data %s with status %s received. OK" % (data_id, STATUS)
        else:
            #
            # no data_id must be parameter submission
            #
            if params.data_type:
                GALAXY_TYPE = params.data_type
            elif params.galaxyFileFormat == 'wig': #this is an undocumented legacy special case
                GALAXY_TYPE = 'wig'
            else:
                GALAXY_TYPE = params.GALAXY_TYPE  or  tool.outputs.values()[0].format

            GALAXY_NAME = params.name or params.GALAXY_NAME or '%s query' % tool.name
            GALAXY_INFO = params.info or params.GALAXY_INFO or params.galaxyDescription or ''
            GALAXY_BUILD = params.dbkey or params.GALAXY_BUILD or params.galaxyFreeze or '?'

            #data = datatypes.factory(ext=GALAXY_TYPE)()
            #data.ext   = GALAXY_TYPE
            #data.name  = GALAXY_NAME
            #data.info  = GALAXY_INFO
            #data.dbkey = GALAXY_BUILD
            #data.state = jobs.JOB_OK
            #history.datasets.add_dataset( data )

            data = trans.app.model.HistoryDatasetAssociation( create_dataset=True, sa_session=trans.sa_session, extension=GALAXY_TYPE )
            trans.app.security_agent.set_all_dataset_permissions( data.dataset, trans.app.security_agent.history_get_default_permissions( trans.history ) )
            data.name = GALAXY_NAME
            data.dbkey = GALAXY_BUILD
            data.info = GALAXY_INFO
            trans.sa_session.add( data ) #Need to add data to session before setting state (setting state requires that the data object is in the session, but this may change)
            data.state = data.states.NEW
            open( data.file_name, 'wb' ).close() #create the file
            trans.history.add_dataset( data, genome_build=GALAXY_BUILD )
            trans.sa_session.add( trans.history )
            trans.sa_session.flush()
            trans.log_event( "Added dataset %d to history %d" %(data.id, trans.history.id ), tool_id=tool_id )

            try:
                key = hmac_new( trans.app.config.tool_secret, "%d:%d" % ( data.id, data.history_id ) )
                galaxy_url  = trans.request.base + '/async/%s/%s/%s' % ( tool_id, data.id, key )
                params.update( { 'GALAXY_URL' :galaxy_url } )
                params.update( { 'data_id' :data.id } )
                url  = tool.action + '?' + urllib.urlencode( params.flatten() )
                log.debug("connecting to -> %s" % url)
                trans.log_event( "Async connecting to -> %s" % url )
                text =  urllib.urlopen(url).read(-1)
                text = text.strip()
                if not text.endswith('OK'):
                    raise Exception, text
                data.state = data.blurb = data.states.RUNNING
            except Exception, e:
                data.info  = str(e)
                data.state = data.blurb = data.states.ERROR

            trans.sa_session.flush()
Ejemplo n.º 7
0
    def index(self, trans, tool_id=None, data_secret=None, **kwd):
        """Manages ascynchronous connections"""

        if tool_id is None:
            return "tool_id argument is required"
        tool_id = str(tool_id)

        # redirect to main when getting no parameters
        if not kwd:
            return trans.response.send_redirect("/index")

        params = Params(kwd, sanitize=False)
        STATUS = params.STATUS
        URL = params.URL
        data_id = params.data_id

        log.debug('async dataid -> %s' % data_id)
        trans.log_event('Async dataid -> %s' % str(data_id))

        # initialize the tool
        toolbox = self.get_toolbox()
        tool = toolbox.get_tool(tool_id)
        if not tool:
            return "Tool with id %s not found" % tool_id

        #
        # we have an incoming data_id
        #
        if data_id:
            if not URL:
                return "No URL parameter was submitted for data %s" % data_id
            data = trans.sa_session.query(
                trans.model.HistoryDatasetAssociation).get(data_id)

            if not data:
                return "Data %s does not exist or has already been deleted" % data_id

            if STATUS == 'OK':
                key = hmac_new(trans.app.config.tool_secret,
                               "%d:%d" % (data.id, data.history_id))
                if key != data_secret:
                    return "You do not have permission to alter data %s." % data_id
                # push the job into the queue
                data.state = data.blurb = data.states.RUNNING
                log.debug('executing tool %s' % tool.id)
                trans.log_event('Async executing tool %s' % tool.id,
                                tool_id=tool.id)
                galaxy_url = trans.request.base + f'/async/{tool_id}/{data.id}/{key}'
                galaxy_url = params.get("GALAXY_URL", galaxy_url)
                params = dict(URL=URL,
                              GALAXY_URL=galaxy_url,
                              name=data.name,
                              info=data.info,
                              dbkey=data.dbkey,
                              data_type=data.ext)

                # Assume there is exactly one output file possible
                TOOL_OUTPUT_TYPE = None
                for key, obj in tool.outputs.items():
                    try:
                        TOOL_OUTPUT_TYPE = obj.format
                        params[key] = data.id
                        break
                    except Exception:
                        # exclude outputs different from ToolOutput (e.g. collections) from the previous assumption
                        continue
                if TOOL_OUTPUT_TYPE is None:
                    raise Exception("Error: ToolOutput object not found")

                original_history = trans.sa_session.query(
                    trans.app.model.History).get(data.history_id)
                tool.execute(trans, incoming=params, history=original_history)
            else:
                log.debug('async error -> %s' % STATUS)
                trans.log_event('Async error -> %s' % STATUS)
                data.state = data.blurb = "error"
                data.info = "Error -> %s" % STATUS

            trans.sa_session.flush()

            return f"Data {data_id} with status {STATUS} received. OK"
        else:
            #
            # no data_id must be parameter submission
            #
            GALAXY_TYPE = None
            if params.data_type:
                GALAXY_TYPE = params.data_type
            elif params.galaxyFileFormat == 'wig':  # this is an undocumented legacy special case
                GALAXY_TYPE = 'wig'
            elif params.GALAXY_TYPE:
                GALAXY_TYPE = params.GALAXY_TYPE
            else:
                # Assume there is exactly one output
                outputs_count = 0
                for obj in tool.outputs.values():
                    try:
                        GALAXY_TYPE = obj.format
                        outputs_count += 1
                    except Exception:
                        # exclude outputs different from ToolOutput (e.g. collections) from the previous assumption
                        # a collection object does not have the 'format' attribute, so it will throw an exception
                        continue
                if outputs_count > 1:
                    raise Exception(
                        "Error: the tool should have just one output")

            if GALAXY_TYPE is None:
                raise Exception("Error: ToolOutput object not found")

            GALAXY_NAME = params.name or params.GALAXY_NAME or '%s query' % tool.name
            GALAXY_INFO = params.info or params.GALAXY_INFO or params.galaxyDescription or ''
            GALAXY_BUILD = params.dbkey or params.GALAXY_BUILD or params.galaxyFreeze or '?'

            # data = datatypes.factory(ext=GALAXY_TYPE)()
            # data.ext   = GALAXY_TYPE
            # data.name  = GALAXY_NAME
            # data.info  = GALAXY_INFO
            # data.dbkey = GALAXY_BUILD
            # data.state = jobs.JOB_OK
            # history.datasets.add_dataset( data )

            data = trans.app.model.HistoryDatasetAssociation(
                create_dataset=True,
                sa_session=trans.sa_session,
                extension=GALAXY_TYPE)
            trans.app.security_agent.set_all_dataset_permissions(
                data.dataset,
                trans.app.security_agent.history_get_default_permissions(
                    trans.history))
            data.name = GALAXY_NAME
            data.dbkey = GALAXY_BUILD
            data.info = GALAXY_INFO
            trans.sa_session.add(
                data
            )  # Need to add data to session before setting state (setting state requires that the data object is in the session, but this may change)
            data.state = data.states.NEW
            trans.history.add_dataset(data, genome_build=GALAXY_BUILD)
            trans.sa_session.add(trans.history)
            trans.sa_session.flush()
            # Need to explicitly create the file
            data.dataset.object_store.create(data)
            trans.log_event("Added dataset %d to history %d" %
                            (data.id, trans.history.id),
                            tool_id=tool_id)

            try:
                key = hmac_new(trans.app.config.tool_secret,
                               "%d:%d" % (data.id, data.history_id))
                galaxy_url = trans.request.base + f'/async/{tool_id}/{data.id}/{key}'
                params.update({'GALAXY_URL': galaxy_url})
                params.update({'data_id': data.id})

                # Use provided URL or fallback to tool action
                url = URL or tool.action
                # Does url already have query params?
                if '?' in url:
                    url_join_char = '&'
                else:
                    url_join_char = '?'
                url = "{}{}{}".format(url, url_join_char,
                                      urlencode(params.flatten()))
                log.debug("connecting to -> %s" % url)
                trans.log_event("Async connecting to -> %s" % url)
                text = requests.get(url).text.strip()
                if not text.endswith('OK'):
                    raise Exception(text)
                data.state = data.blurb = data.states.RUNNING
            except Exception as e:
                data.info = unicodify(e)
                data.state = data.blurb = data.states.ERROR

            trans.sa_session.flush()

        return trans.fill_template('root/tool_runner.mako',
                                   out_data={},
                                   num_jobs=1,
                                   job_errors=[])
Ejemplo n.º 8
0
    def history(self,
                trans,
                as_xml=False,
                show_deleted=None,
                show_hidden=None,
                hda_id=None,
                **kwd):
        """Display the current history, creating a new history if necessary.

        NOTE: No longer accepts "id" or "template" options for security reasons.
        """
        if as_xml:
            return self.history_as_xml(
                trans,
                show_deleted=string_as_bool(show_deleted),
                show_hidden=string_as_bool(show_hidden))

        # get all datasets server-side, client-side will get flags and render appropriately
        show_deleted = string_as_bool_or_none(show_deleted)
        show_purged = show_deleted
        show_hidden = string_as_bool_or_none(show_hidden)
        params = Params(kwd)
        message = params.get('message', '')
        #TODO: ugh...
        message = message if message != 'None' else ''
        status = params.get('status', 'done')

        if trans.app.config.require_login and not trans.user:
            return trans.fill_template(
                '/no_access.mako',
                message='Please log in to access Galaxy histories.')

        def err_msg(where=None):
            where = where if where else 'getting the history data from the server'
            err_msg = (
                'An error occurred %s. ' +
                'Please contact a Galaxy administrator if the problem persists.'
            ) % (where)
            return err_msg, 'error'

        history_dictionary = {}
        hda_dictionaries = []
        try:
            history = trans.get_history(create=True)
            hdas = self.get_history_datasets(trans,
                                             history,
                                             show_deleted=True,
                                             show_hidden=True,
                                             show_purged=True)

            for hda in hdas:
                try:
                    hda_dictionaries.append(self.get_hda_dict(trans, hda))

                except Exception, exc:
                    # don't fail entire list if hda err's, record and move on
                    log.error('Error bootstrapping hda %d: %s',
                              hda.id,
                              str(exc),
                              exc_info=True)
                    hda_dictionaries.append(
                        self.get_hda_dict_with_error(trans, hda, str(exc)))

            # re-use the hdas above to get the history data...
            history_dictionary = self.get_history_dict(
                trans, history, hda_dictionaries=hda_dictionaries)
Ejemplo n.º 9
0
    def index(self, trans, tool_id=None, data_secret=None, **kwd):
        """Manages ascynchronous connections"""

        if tool_id is None:
            return "tool_id argument is required"
        tool_id = str(tool_id)

        # redirect to main when getting no parameters
        if not kwd:
            return trans.response.send_redirect("/index")

        params = Params(kwd, sanitize=False)
        STATUS = params.STATUS
        URL = params.URL
        data_id = params.data_id

        log.debug('async dataid -> %s' % data_id)
        trans.log_event('Async dataid -> %s' % str(data_id))

        # initialize the tool
        toolbox = self.get_toolbox()
        tool = toolbox.get_tool(tool_id)
        if not tool:
            return "Tool with id %s not found" % tool_id

        #
        # we have an incoming data_id
        #
        if data_id:
            if not URL:
                return "No URL parameter was submitted for data %s" % data_id
            data = trans.sa_session.query(trans.model.HistoryDatasetAssociation).get(data_id)

            if not data:
                return "Data %s does not exist or has already been deleted" % data_id

            if STATUS == 'OK':
                key = hmac_new(trans.app.config.tool_secret, "%d:%d" % (data.id, data.history_id))
                if key != data_secret:
                    return "You do not have permission to alter data %s." % data_id
                # push the job into the queue
                data.state = data.blurb = data.states.RUNNING
                log.debug('executing tool %s' % tool.id)
                trans.log_event('Async executing tool %s' % tool.id, tool_id=tool.id)
                galaxy_url = trans.request.base + '/async/%s/%s/%s' % (tool_id, data.id, key)
                galaxy_url = params.get("GALAXY_URL", galaxy_url)
                params = dict(URL=URL, GALAXY_URL=galaxy_url, name=data.name, info=data.info, dbkey=data.dbkey, data_type=data.ext)

                # Assume there is exactly one output file possible
                TOOL_OUTPUT_TYPE = None
                for key, obj in tool.outputs.items():
                    try:
                        TOOL_OUTPUT_TYPE = obj.format
                        params[key] = data.id
                        break
                    except Exception:
                        # exclude outputs different from ToolOutput (e.g. collections) from the previous assumption
                        continue
                if TOOL_OUTPUT_TYPE is None:
                    raise Exception("Error: ToolOutput object not found")

                original_history = trans.sa_session.query(trans.app.model.History).get(data.history_id)
                tool.execute(trans, incoming=params, history=original_history)
            else:
                log.debug('async error -> %s' % STATUS)
                trans.log_event('Async error -> %s' % STATUS)
                data.state = data.blurb = jobs.JOB_ERROR
                data.info = "Error -> %s" % STATUS

            trans.sa_session.flush()

            return "Data %s with status %s received. OK" % (data_id, STATUS)
        else:
            #
            # no data_id must be parameter submission
            #
            GALAXY_TYPE = None
            if params.data_type:
                GALAXY_TYPE = params.data_type
            elif params.galaxyFileFormat == 'wig':  # this is an undocumented legacy special case
                GALAXY_TYPE = 'wig'
            elif params.GALAXY_TYPE:
                GALAXY_TYPE = params.GALAXY_TYPE
            else:
                # Assume there is exactly one output
                outputs_count = 0
                for obj in tool.outputs.values():
                    try:
                        GALAXY_TYPE = obj.format
                        outputs_count += 1
                    except Exception:
                        # exclude outputs different from ToolOutput (e.g. collections) from the previous assumption
                        # a collection object does not have the 'format' attribute, so it will throw an exception
                        continue
                if outputs_count > 1:
                    raise Exception("Error: the tool should have just one output")

            if GALAXY_TYPE is None:
                raise Exception("Error: ToolOutput object not found")

            GALAXY_NAME = params.name or params.GALAXY_NAME or '%s query' % tool.name
            GALAXY_INFO = params.info or params.GALAXY_INFO or params.galaxyDescription or ''
            GALAXY_BUILD = params.dbkey or params.GALAXY_BUILD or params.galaxyFreeze or '?'

            # data = datatypes.factory(ext=GALAXY_TYPE)()
            # data.ext   = GALAXY_TYPE
            # data.name  = GALAXY_NAME
            # data.info  = GALAXY_INFO
            # data.dbkey = GALAXY_BUILD
            # data.state = jobs.JOB_OK
            # history.datasets.add_dataset( data )

            data = trans.app.model.HistoryDatasetAssociation(create_dataset=True, sa_session=trans.sa_session, extension=GALAXY_TYPE)
            trans.app.security_agent.set_all_dataset_permissions(data.dataset, trans.app.security_agent.history_get_default_permissions(trans.history))
            data.name = GALAXY_NAME
            data.dbkey = GALAXY_BUILD
            data.info = GALAXY_INFO
            trans.sa_session.add(data)  # Need to add data to session before setting state (setting state requires that the data object is in the session, but this may change)
            data.state = data.states.NEW
            open(data.file_name, 'wb').close()  # create the file
            trans.history.add_dataset(data, genome_build=GALAXY_BUILD)
            trans.sa_session.add(trans.history)
            trans.sa_session.flush()
            trans.log_event("Added dataset %d to history %d" % (data.id, trans.history.id), tool_id=tool_id)

            try:
                key = hmac_new(trans.app.config.tool_secret, "%d:%d" % (data.id, data.history_id))
                galaxy_url = trans.request.base + '/async/%s/%s/%s' % (tool_id, data.id, key)
                params.update({'GALAXY_URL': galaxy_url})
                params.update({'data_id': data.id})

                # Use provided URL or fallback to tool action
                url = URL or tool.action
                # Does url already have query params?
                if '?' in url:
                    url_join_char = '&'
                else:
                    url_join_char = '?'
                url = "%s%s%s" % (url, url_join_char, urlencode(params.flatten()))
                log.debug("connecting to -> %s" % url)
                trans.log_event("Async connecting to -> %s" % url)
                text = requests.get(url).text.strip()
                if not text.endswith('OK'):
                    raise Exception(text)
                data.state = data.blurb = data.states.RUNNING
            except Exception as e:
                data.info = str(e)
                data.state = data.blurb = data.states.ERROR

            trans.sa_session.flush()

        return trans.fill_template('root/tool_runner.mako', out_data={}, num_jobs=1, job_errors=[])
Ejemplo n.º 10
0
    def upload(self,
               trans,
               history_id,
               provider,
               bucket_name,
               objects,
               credentials,
               input_args=None):
        """
        Implements the logic of uploading a file from a cloud-based storage (e.g., Amazon S3)
        and persisting it as a Galaxy dataset.

        :type  trans:       galaxy.web.framework.webapp.GalaxyWebTransaction
        :param trans:       Galaxy web transaction

        :type  history_id:  string
        :param history_id:  the (decoded) id of history to which the object should be uploaded to.

        :type  provider:    string
        :param provider:    the name of cloud-based resource provided. A list of supported providers is given in
                            `SUPPORTED_PROVIDERS` variable.

        :type  bucket_name: string
        :param bucket_name: the name of a bucket from which data should be uploaded (e.g., a bucket name on AWS S3).

        :type  objects:     list of string
        :param objects:     the name of objects to be uploaded.

        :type  credentials: dict
        :param credentials: a dictionary containing all the credentials required to authenticated to the
                            specified provider (e.g., {"secret_key": YOUR_AWS_SECRET_TOKEN,
                            "access_key": YOUR_AWS_ACCESS_TOKEN}).

        :type  input_args:  dict
        :param input_args:  a [Optional] a dictionary of input parameters:
                            dbkey, file_type, space_to_tab, to_posix_lines (see galaxy/webapps/galaxy/api/cloud.py)

        :rtype:             list of galaxy.model.Dataset
        :return:            a list of datasets created for the uploaded files.
        """
        if CloudProviderFactory is None:
            raise Exception(NO_CLOUDBRIDGE_ERROR_MESSAGE)

        if input_args is None:
            input_args = {}

        connection = self._configure_provider(provider, credentials)
        try:
            bucket = connection.storage.buckets.get(bucket_name)
            if bucket is None:
                raise RequestParameterInvalidException(
                    "The bucket `{}` not found.".format(bucket_name))
        except Exception as e:
            raise ItemAccessibilityException(
                "Could not get the bucket `{}`: {}".format(
                    bucket_name, str(e)))

        datasets = []
        for obj in objects:
            try:
                key = bucket.objects.get(obj)
            except Exception as e:
                raise MessageException(
                    "The following error occurred while getting the object {}: {}"
                    .format(obj, str(e)))
            if key is None:
                raise ObjectNotFound(
                    "Could not get the object `{}`.".format(obj))

            params = Params(self._get_inputs(obj, key, input_args),
                            sanitize=False)
            incoming = params.__dict__
            history = trans.sa_session.query(
                trans.app.model.History).get(history_id)
            if not history:
                raise ObjectNotFound("History with ID `{}` not found.".format(
                    trans.app.security.encode_id(history_id)))
            output = trans.app.toolbox.get_tool('upload1').handle_input(
                trans, incoming, history=history)

            job_errors = output.get('job_errors', [])
            if job_errors:
                raise ValueError(
                    'Following error occurred while uploading the given object(s) from {}: {}'
                    .format(provider, job_errors))
            else:
                for d in output['out_data']:
                    datasets.append(d[1].dataset)

        return datasets