def decorator(self, trans, *args, **kwargs):
        def error(environ, start_response):
            start_response(error_status, [('Content-type', 'text/plain')])
            return error_message

        error_status = '403 Forbidden'
        if trans.error_message:
            return trans.error_message
        if user_required and trans.anonymous:
            error_message = "API Authentication Required for this request"
            return error
        if trans.request.body:
            try:
                kwargs['payload'] = __extract_payload_from_request(
                    trans, func, kwargs)
            except ValueError:
                error_status = '400 Bad Request'
                error_message = 'Your request did not appear to be valid JSON, please consult the API documentation'
                return error
        trans.response.set_content_type("application/json")
        # send 'do not cache' headers to handle IE's caching of ajax get responses
        trans.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"
        # Perform api_run_as processing, possibly changing identity
        if 'payload' in kwargs and 'run_as' in kwargs['payload']:
            if not trans.user_can_do_run_as():
                error_message = 'User does not have permissions to run jobs as another user'
                return error
            try:
                decoded_user_id = trans.security.decode_id(
                    kwargs['payload']['run_as'])
            except TypeError:
                trans.response.status = 400
                return "Malformed user id ( %s ) specified, unable to decode." % str(
                    kwargs['payload']['run_as'])
            try:
                user = trans.sa_session.query(
                    trans.app.model.User).get(decoded_user_id)
                trans.api_inherit_admin = trans.user_is_admin()
                trans.set_user(user)
            except:
                trans.response.status = 400
                return "That user does not exist."
        try:
            rval = func(self, trans, *args, **kwargs)
            if to_json and trans.debug:
                rval = safe_dumps(rval, indent=4, sort_keys=True)
            elif to_json:
                rval = safe_dumps(rval)
            return rval
        except paste.httpexceptions.HTTPException:
            raise  # handled
        except:
            log.exception('Uncaught exception in exposed API method:')
            raise paste.httpexceptions.HTTPServerError()
Example #2
0
 def decorator( self, trans, *args, **kwargs ):
     def error( environ, start_response ):
         start_response( error_status, [('Content-type', 'text/plain')] )
         return error_message
     error_status = '403 Forbidden'
     if trans.error_message:
         return trans.error_message
     if user_required and trans.anonymous:
         error_message = "API Authentication Required for this request"
         return error
     if trans.request.body:
         try:
             kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
         except ValueError:
             error_status = '400 Bad Request'
             error_message = 'Your request did not appear to be valid JSON, please consult the API documentation'
             return error
     trans.response.set_content_type( "application/json" )
     # send 'do not cache' headers to handle IE's caching of ajax get responses
     trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"
     # Perform api_run_as processing, possibly changing identity
     if 'payload' in kwargs and 'run_as' in kwargs['payload']:
         if not trans.user_can_do_run_as():
             error_message = 'User does not have permissions to run jobs as another user'
             return error
         try:
             decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
         except TypeError:
             trans.response.status = 400
             return "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
         try:
             user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
             trans.api_inherit_admin = trans.user_is_admin()
             trans.set_user(user)
         except:
             trans.response.status = 400
             return "That user does not exist."
     try:
         rval = func( self, trans, *args, **kwargs)
         if to_json and trans.debug:
             rval = safe_dumps( rval, indent=4, sort_keys=True )
         elif to_json:
             rval = safe_dumps( rval )
         return rval
     except paste.httpexceptions.HTTPException:
         raise  # handled
     except:
         log.exception( 'Uncaught exception in exposed API method:' )
         raise paste.httpexceptions.HTTPServerError()
Example #3
0
    def test_data(self, trans, id, **kwd):
        """
        GET /api/tools/{tool_id}/test_data?tool_version={tool_version}

        This API endpoint is unstable and experimental. In particular the format of the
        response has not been entirely nailed down (it exposes too many Galaxy
        internals/Pythonisms in a rough way). If this endpoint is being used from outside
        of scripts shipped with Galaxy let us know and please be prepared for the response
        from this API to change its format in some ways.
        """
        # TODO: eliminate copy and paste with above code.
        if 'payload' in kwd:
            kwd = kwd.get('payload')
        tool_version = kwd.get('tool_version', None)
        tool = self._get_tool(id, tool_version=tool_version, user=trans.user)

        # Encode in this method to handle odict objects in tool representation.
        def json_encodeify(obj):
            if isinstance(obj, odict):
                return dict(obj)
            elif isinstance(obj, map):
                return list(obj)
            else:
                return obj

        result = [t.to_dict() for t in tool.tests]
        return safe_dumps(result, default=json_encodeify)
    def test_data(self, trans, id, **kwd):
        """
        GET /api/tools/{tool_id}/test_data?tool_version={tool_version}

        This API endpoint is unstable and experimental. In particular the format of the
        response has not been entirely nailed down (it exposes too many Galaxy
        internals/Pythonisms in a rough way). If this endpoint is being used from outside
        of scripts shipped with Galaxy let us know and please be prepared for the response
        from this API to change its format in some ways.
        """
        # TODO: eliminate copy and paste with above code.
        if 'payload' in kwd:
            kwd = kwd.get('payload')
        tool_version = kwd.get('tool_version', None)
        tool = self._get_tool(id, tool_version=tool_version, user=trans.user)

        # Encode in this method to handle odict objects in tool representation.
        def json_encodeify(obj):
            if isinstance(obj, odict):
                return dict(obj)
            else:
                return obj

        result = [t.to_dict() for t in tool.tests]
        return safe_dumps(result, default=json_encodeify)
Example #5
0
def _format_return_as_json(rval, jsonp_callback=None, pretty=False):
    """
    Formats a return value as JSON or JSONP if `jsonp_callback` is present.

    Use `pretty=True` to return pretty printed json.
    """
    dumps_kwargs = dict(indent=4, sort_keys=True) if pretty else {}
    json = safe_dumps(rval, **dumps_kwargs)
    if jsonp_callback:
        json = "{}({});".format(jsonp_callback, json)
    return json
Example #6
0
def _format_return_as_json( rval, jsonp_callback=None, pretty=False ):
    """
    Formats a return value as JSON or JSONP if `jsonp_callback` is present.

    Use `pretty=True` to return pretty printed json.
    """
    dumps_kwargs = dict( indent=4, sort_keys=True ) if pretty else {}
    json = safe_dumps( rval, **dumps_kwargs )
    if jsonp_callback:
        json = "{}({});".format( jsonp_callback, json )
    return json
Example #7
0
def format_return_as_json(rval, jsonp_callback=None, pretty=False):
    """
    Formats a return value as JSON or JSONP if `jsonp_callback` is present.

    Use `pretty=True` to return pretty printed json.
    """
    dumps_kwargs = dict(indent=4, sort_keys=True) if pretty else {}
    if isinstance(rval, BaseModel):
        json = rval.json(**dumps_kwargs)
    else:
        json = safe_dumps(rval, **dumps_kwargs)
    if jsonp_callback:
        json = f"{jsonp_callback}({json});"
    return json
Example #8
0
def file_err(msg, dataset, json_file):
    json_file.write(
        safe_dumps(
            dict(type='dataset',
                 ext='data',
                 dataset_id=dataset.dataset_id,
                 stderr=msg)) + "\n")
    # never remove a server-side upload
    if dataset.type in ('server_dir', 'path_paste'):
        return
    try:
        os.remove(dataset.path)
    except:
        pass
Example #9
0
def __api_error_response(trans, **kwds):
    error_dict = __api_error_message(trans, **kwds)
    exception = kwds.get("exception", None)
    # If we are given an status code directly - use it - otherwise check
    # the exception for a status_code attribute.
    if "status_code" in kwds:
        status_code = int(kwds.get("status_code"))
    elif hasattr(exception, "status_code"):
        status_code = int(exception.status_code)
    else:
        status_code = 500
    response = trans.response
    if not response.status or str(response.status).startswith("20"):
        # Unset status code appears to be string '200 OK', if anything
        # non-success (i.e. not 200 or 201) has been set, do not override
        # underlying controller.
        response.status = status_code
    return safe_dumps(error_dict)
Example #10
0
def __api_error_response( trans, **kwds ):
    error_dict = __api_error_message( trans, **kwds )
    exception = kwds.get( "exception", None )
    # If we are given an status code directly - use it - otherwise check
    # the exception for a status_code attribute.
    if "status_code" in kwds:
        status_code = int( kwds.get( "status_code" ) )
    elif hasattr( exception, "status_code" ):
        status_code = int( exception.status_code )
    else:
        status_code = 500
    response = trans.response
    if not response.status or str(response.status).startswith("20"):
        # Unset status code appears to be string '200 OK', if anything
        # non-success (i.e. not 200 or 201) has been set, do not override
        # underlying controller.
        response.status = status_code
    return safe_dumps( error_dict )
Example #11
0
        else:
            # This should not happen, but it's here just in case
            shutil.copy(dataset.path, output_path)
    elif link_data_only == 'copy_files':
        shutil.move(dataset.path, output_path)
    # Write the job info
    stdout = stdout or 'uploaded %s file' % data_type
    info = dict(type='dataset',
                dataset_id=dataset.dataset_id,
                ext=ext,
                stdout=stdout,
                name=dataset.name,
                line_count=line_count)
    if dataset.get('uuid', None) is not None:
        info['uuid'] = dataset.get('uuid')
    json_file.write(safe_dumps(info) + "\n")

    if link_data_only == 'copy_files' and datatype.dataset_content_needs_grooming(
            output_path):
        # Groom the dataset content if necessary
        datatype.groom_dataset_content(output_path)


def add_composite_file(dataset, registry, json_file, output_path, files_path):
    if dataset.composite_files:
        os.mkdir(files_path)
        for name, value in dataset.composite_files.iteritems():
            value = util.bunch.Bunch(**value)
            if dataset.composite_file_paths[
                    value.name] is None and not value.optional:
                file_err(
Example #12
0
def __api_error_response(trans, **kwds):
    error_dict = __api_error_dict(trans, **kwds)
    return safe_dumps(error_dict)
 def to_safe_string(self, value):
     # We do not sanitize json dicts
     return safe_dumps(value)
    def archive(self, trans, history_id, filename='', format='tgz', dry_run=True, **kwd):
        """
        archive( self, trans, history_id, filename='', format='tgz', dry_run=True, **kwd )
        * GET /api/histories/{history_id}/contents/archive/{id}
        * GET /api/histories/{history_id}/contents/archive/{filename}.{format}
            build and return a compressed archive of the selected history contents

        :type   filename:  string
        :param  filename:  (optional) archive name (defaults to history name)
        :type   dry_run:   boolean
        :param  dry_run:   (optional) if True, return the archive and file paths only
                           as json and not an archive file

        :returns:   archive file for download

        .. note:: this is a volatile endpoint and settings and behavior may change.
        """
        # roughly from: http://stackoverflow.com/a/31976060 (windows, linux)
        invalid_filename_char_regex = re.compile(r'[:<>|\\\/\?\* "]')
        # path format string - dot separator between id and name
        id_name_format = u'{}.{}'

        def name_to_filename(name, max_length=150, replace_with=u'_'):
            # TODO: seems like shortening unicode with [:] would cause unpredictable display strings
            return invalid_filename_char_regex.sub(replace_with, name)[0:max_length]

        # given a set of parents for a dataset (HDCAs, DC, DCEs, etc.) - build a directory structure that
        # (roughly) recreates the nesting in the contents using the parent names and ids
        def build_path_from_parents(parents):
            parent_names = []
            for parent in parents:
                # an HDCA
                if hasattr(parent, 'hid'):
                    name = name_to_filename(parent.name)
                    parent_names.append(id_name_format.format(parent.hid, name))
                # a DCE
                elif hasattr(parent, 'element_index'):
                    name = name_to_filename(parent.element_identifier)
                    parent_names.append(id_name_format.format(parent.element_index, name))
            # NOTE: DCs are skipped and use the wrapping DCE info instead
            return parent_names

        # get the history used for the contents query and check for accessibility
        history = self.history_manager.get_accessible(trans.security.decode_id(history_id), trans.user)
        archive_base_name = filename or name_to_filename(history.name)

        # this is the fn applied to each dataset contained in the query
        paths_and_files = []

        def build_archive_files_and_paths(content, *parents):
            archive_path = archive_base_name
            if not self.hda_manager.is_accessible(content, trans.user):
                # if the underlying dataset is not accessible, skip it silently
                return

            content_container_id = content.hid
            content_name = name_to_filename(content.name)
            if parents:
                if hasattr(parents[0], 'element_index'):
                    # if content is directly wrapped in a DCE, strip it from parents (and the resulting path)
                    # and instead replace the content id and name with the DCE index and identifier
                    parent_dce, parents = parents[0], parents[1:]
                    content_container_id = parent_dce.element_index
                    content_name = name_to_filename(parent_dce.element_identifier)
                # reverse for path from parents: oldest parent first
                archive_path = os.path.join(archive_path, *build_path_from_parents(parents)[::-1])
                # TODO: this is brute force - building the path each time instead of re-using it
                # possibly cache

            # add the name as the last element in the archive path
            content_id_and_name = id_name_format.format(content_container_id, content_name)
            archive_path = os.path.join(archive_path, content_id_and_name)

            # ---- for composite files, we use id and name for a directory and, inside that, ...
            if self.hda_manager.is_composite(content):
                # ...save the 'main' composite file (gen. html)
                paths_and_files.append((content.file_name, os.path.join(archive_path, content.name + '.html')))
                for extra_file in self.hda_manager.extra_files(content):
                    extra_file_basename = os.path.basename(extra_file)
                    archive_extra_file_path = os.path.join(archive_path, extra_file_basename)
                    # ...and one for each file in the composite
                    paths_and_files.append((extra_file, archive_extra_file_path))

            # ---- for single files, we add the true extension to id and name and store that single filename
            else:
                # some dataset names can contain their original file extensions, don't repeat
                if not archive_path.endswith('.' + content.extension):
                    archive_path += '.' + content.extension
                paths_and_files.append((content.file_name, archive_path))

        # filter the contents that contain datasets using any filters possible from index above and map the datasets
        filter_params = self.parse_filter_params(kwd)
        filters = self.history_contents_filters.parse_filters(filter_params)
        self.history_contents_manager.map_datasets(history, build_archive_files_and_paths, filters=filters)

        # if dry_run, return the structure as json for debugging
        if dry_run == 'True':
            trans.response.headers['Content-Type'] = 'application/json'
            return safe_dumps(paths_and_files)

        # create the archive, add the dataset files, then stream the archive as a download
        archive_type_string = 'w|gz'
        archive_ext = 'tgz'
        if self.app.config.upstream_gzip:
            archive_type_string = 'w|'
            archive_ext = 'tar'
        archive = StreamBall(archive_type_string)

        for file_path, archive_path in paths_and_files:
            archive.add(file_path, archive_path)

        archive_name = '.'.join([archive_base_name, archive_ext])
        trans.response.set_content_type("application/x-tar")
        trans.response.headers["Content-Disposition"] = 'attachment; filename="{}"'.format(archive_name)
        archive.wsgi_status = trans.response.wsgi_status()
        archive.wsgi_headeritems = trans.response.wsgi_headeritems()
        return archive.stream
Example #15
0
    def archive(self,
                trans,
                history_id,
                filename='',
                format='tgz',
                dry_run=True,
                **kwd):
        """
        archive( self, trans, history_id, filename='', format='tgz', dry_run=True, **kwd )
        * GET /api/histories/{history_id}/contents/archive/{id}
        * GET /api/histories/{history_id}/contents/archive/{filename}.{format}
            build and return a compressed archive of the selected history contents

        :type   filename:  string
        :param  filename:  (optional) archive name (defaults to history name)
        :type   dry_run:   boolean
        :param  dry_run:   (optional) if True, return the archive and file paths only
                           as json and not an archive file

        :returns:   archive file for download

        .. note: this is a volatile endpoint and settings and behavior may change.
        """
        # roughly from: http://stackoverflow.com/a/31976060 (windows, linux)
        invalid_filename_char_regex = re.compile(r'[:<>|\\\/\?\* "]')
        # path format string - dot separator between id and name
        id_name_format = u'{}.{}'

        def name_to_filename(name, max_length=150, replace_with=u'_'):
            # TODO: seems like shortening unicode with [:] would cause unpredictable display strings
            return invalid_filename_char_regex.sub(replace_with,
                                                   name)[0:max_length]

        # given a set of parents for a dataset (HDCAs, DC, DCEs, etc.) - build a directory structure that
        # (roughly) recreates the nesting in the contents using the parent names and ids
        def build_path_from_parents(parents):
            parent_names = []
            for parent in parents:
                # an HDCA
                if hasattr(parent, 'hid'):
                    name = name_to_filename(parent.name)
                    parent_names.append(id_name_format.format(
                        parent.hid, name))
                # a DCE
                elif hasattr(parent, 'element_index'):
                    name = name_to_filename(parent.element_identifier)
                    parent_names.append(
                        id_name_format.format(parent.element_index, name))
            # NOTE: DCs are skipped and use the wrapping DCE info instead
            return parent_names

        # get the history used for the contents query and check for accessibility
        history = self.history_manager.get_accessible(
            trans.security.decode_id(history_id), trans.user)
        archive_base_name = filename or name_to_filename(history.name)

        # this is the fn applied to each dataset contained in the query
        paths_and_files = []

        def build_archive_files_and_paths(content, *parents):
            archive_path = archive_base_name
            if not self.hda_manager.is_accessible(content, trans.user):
                # if the underlying dataset is not accessible, skip it silently
                return

            content_container_id = content.hid
            content_name = name_to_filename(content.name)
            if parents:
                if hasattr(parents[0], 'element_index'):
                    # if content is directly wrapped in a DCE, strip it from parents (and the resulting path)
                    # and instead replace the content id and name with the DCE index and identifier
                    parent_dce, parents = parents[0], parents[1:]
                    content_container_id = parent_dce.element_index
                    content_name = name_to_filename(
                        parent_dce.element_identifier)
                # reverse for path from parents: oldest parent first
                archive_path = os.path.join(
                    archive_path,
                    *build_path_from_parents(parents)[::-1])
                # TODO: this is brute force - building the path each time instead of re-using it
                # possibly cache

            # add the name as the last element in the archive path
            content_id_and_name = id_name_format.format(
                content_container_id, content_name)
            archive_path = os.path.join(archive_path, content_id_and_name)

            # ---- for composite files, we use id and name for a directory and, inside that, ...
            if self.hda_manager.is_composite(content):
                # ...save the 'main' composite file (gen. html)
                paths_and_files.append((content.file_name,
                                        os.path.join(archive_path,
                                                     content.name + '.html')))
                for extra_file in self.hda_manager.extra_files(content):
                    extra_file_basename = os.path.basename(extra_file)
                    archive_extra_file_path = os.path.join(
                        archive_path, extra_file_basename)
                    # ...and one for each file in the composite
                    paths_and_files.append(
                        (extra_file, archive_extra_file_path))

            # ---- for single files, we add the true extension to id and name and store that single filename
            else:
                # some dataset names can contain their original file extensions, don't repeat
                if not archive_path.endswith('.' + content.extension):
                    archive_path += '.' + content.extension
                paths_and_files.append((content.file_name, archive_path))

        # filter the contents that contain datasets using any filters possible from index above and map the datasets
        filter_params = self.parse_filter_params(kwd)
        filters = self.history_contents_filters.parse_filters(filter_params)
        self.history_contents_manager.map_datasets(
            history, build_archive_files_and_paths, filters=filters)

        # if dry_run, return the structure as json for debugging
        if dry_run == 'True':
            trans.response.headers['Content-Type'] = 'application/json'
            return safe_dumps(paths_and_files)

        # create the archive, add the dataset files, then stream the archive as a download
        archive_type_string = 'w|gz'
        archive_ext = 'tgz'
        if self.app.config.upstream_gzip:
            archive_type_string = 'w|'
            archive_ext = 'tar'
        archive = StreamBall(archive_type_string)

        for file_path, archive_path in paths_and_files:
            archive.add(file_path, archive_path)

        archive_name = '.'.join([archive_base_name, archive_ext])
        trans.response.set_content_type("application/x-tar")
        trans.response.headers[
            "Content-Disposition"] = 'attachment; filename="{}"'.format(
                archive_name)
        archive.wsgi_status = trans.response.wsgi_status()
        archive.wsgi_headeritems = trans.response.wsgi_headeritems()
        return archive.stream
Example #16
0
 def to_safe_string( self, value ):
     # We do not sanitize json dicts
     return safe_dumps( value )
 def call_and_format(self, trans, *args, **kwargs):
     trans.response.set_content_type(JSON_CONTENT_TYPE)
     return safe_dumps(func(self, trans, *args, **kwargs), **json_kwargs)
    def decorator(self, trans, *args, **kwargs):
        # errors passed in from trans._authenicate_api
        if trans.error_message:
            return __api_error_response(trans,
                                        status_code=403,
                                        err_code=error_codes.USER_NO_API_KEY,
                                        err_msg=trans.error_message)
        if trans.anonymous:
            # error if anon and user required
            if user_required:
                return __api_error_response(
                    trans,
                    status_code=403,
                    err_code=error_codes.USER_NO_API_KEY,
                    err_msg="API authentication required for this request")
            # error if anon and no session
            if not trans.galaxy_session and user_or_session_required:
                return __api_error_response(
                    trans,
                    status_code=403,
                    err_code=error_codes.USER_NO_API_KEY,
                    err_msg="API authentication required for this request")

        if trans.request.body:
            try:
                kwargs['payload'] = __extract_payload_from_request(
                    trans, func, kwargs)
            except ValueError:
                error_code = error_codes.USER_INVALID_JSON
                return __api_error_response(trans,
                                            status_code=400,
                                            err_code=error_code)

        trans.response.set_content_type(JSON_CONTENT_TYPE)

        # send 'do not cache' headers to handle IE's caching of ajax get responses
        trans.response.headers['Cache-Control'] = "max-age=0,no-cache,no-store"

        # TODO: Refactor next block out into a helper procedure.
        # Perform api_run_as processing, possibly changing identity
        if 'payload' in kwargs and 'run_as' in kwargs['payload']:
            if not trans.user_can_do_run_as():
                error_code = error_codes.USER_CANNOT_RUN_AS
                return __api_error_response(trans,
                                            err_code=error_code,
                                            status_code=403)
            try:
                decoded_user_id = trans.security.decode_id(
                    kwargs['payload']['run_as'])
            except TypeError:
                error_message = "Malformed user id ( %s ) specified, unable to decode." % str(
                    kwargs['payload']['run_as'])
                error_code = error_codes.USER_INVALID_RUN_AS
                return __api_error_response(trans,
                                            err_code=error_code,
                                            err_msg=error_message,
                                            status_code=400)
            try:
                user = trans.sa_session.query(
                    trans.app.model.User).get(decoded_user_id)
                trans.api_inherit_admin = trans.user_is_admin()
                trans.set_user(user)
            except:
                error_code = error_codes.USER_INVALID_RUN_AS
                return __api_error_response(trans,
                                            err_code=error_code,
                                            status_code=400)
        try:
            rval = func(self, trans, *args, **kwargs)
            if to_json and trans.debug:
                rval = safe_dumps(rval, indent=4, sort_keys=True)
            elif to_json:
                rval = safe_dumps(rval)
            return rval
        except MessageException as e:
            traceback_string = format_exc()
            return __api_error_response(trans,
                                        exception=e,
                                        traceback=traceback_string)
        except paste.httpexceptions.HTTPException:
            # TODO: Allow to pass or format for the API???
            raise  # handled
        except Exception as e:
            traceback_string = format_exc()
            error_message = 'Uncaught exception in exposed API method:'
            log.exception(error_message)
            return __api_error_response(trans,
                                        status_code=500,
                                        exception=e,
                                        traceback=traceback_string,
                                        err_msg=error_message,
                                        err_code=error_codes.UNKNOWN)
Example #19
0
    def decorator( self, trans, *args, **kwargs ):
        # errors passed in from trans._authenicate_api
        if trans.error_message:
            return __api_error_response( trans, status_code=403, err_code=error_codes.USER_NO_API_KEY,
                                         err_msg=trans.error_message )
        if trans.anonymous:
            # error if anon and user required
            if user_required:
                return __api_error_response( trans, status_code=403, err_code=error_codes.USER_NO_API_KEY,
                                             err_msg="API authentication required for this request" )
            # error if anon and no session
            if not trans.galaxy_session and user_or_session_required:
                return __api_error_response( trans, status_code=403, err_code=error_codes.USER_NO_API_KEY,
                                             err_msg="API authentication required for this request" )

        if trans.request.body:
            try:
                kwargs['payload'] = __extract_payload_from_request(trans, func, kwargs)
            except ValueError:
                error_code = error_codes.USER_INVALID_JSON
                return __api_error_response( trans, status_code=400, err_code=error_code )

        trans.response.set_content_type( JSON_CONTENT_TYPE )

        # send 'do not cache' headers to handle IE's caching of ajax get responses
        trans.response.headers[ 'Cache-Control' ] = "max-age=0,no-cache,no-store"

        # TODO: Refactor next block out into a helper procedure.
        # Perform api_run_as processing, possibly changing identity
        if 'payload' in kwargs and 'run_as' in kwargs['payload']:
            if not trans.user_can_do_run_as():
                error_code = error_codes.USER_CANNOT_RUN_AS
                return __api_error_response( trans, err_code=error_code, status_code=403 )
            try:
                decoded_user_id = trans.security.decode_id( kwargs['payload']['run_as'] )
            except TypeError:
                error_message = "Malformed user id ( %s ) specified, unable to decode." % str( kwargs['payload']['run_as'] )
                error_code = error_codes.USER_INVALID_RUN_AS
                return __api_error_response( trans, err_code=error_code, err_msg=error_message, status_code=400)
            try:
                user = trans.sa_session.query( trans.app.model.User ).get( decoded_user_id )
                trans.api_inherit_admin = trans.user_is_admin()
                trans.set_user(user)
            except:
                error_code = error_codes.USER_INVALID_RUN_AS
                return __api_error_response( trans, err_code=error_code, status_code=400 )
        try:
            rval = func( self, trans, *args, **kwargs)
            if to_json and trans.debug:
                rval = safe_dumps( rval, indent=4, sort_keys=True )
            elif to_json:
                rval = safe_dumps( rval )
            return rval
        except MessageException as e:
            traceback_string = format_exc()
            return __api_error_response( trans, exception=e, traceback=traceback_string )
        except paste.httpexceptions.HTTPException:
            # TODO: Allow to pass or format for the API???
            raise  # handled
        except Exception as e:
            traceback_string = format_exc()
            error_message = 'Uncaught exception in exposed API method:'
            log.exception( error_message )
            return __api_error_response(
                trans,
                status_code=500,
                exception=e,
                traceback=traceback_string,
                err_msg=error_message,
                err_code=error_codes.UNKNOWN
            )
Example #20
0
 def call_and_format( self, trans, *args, **kwargs ):
     trans.response.set_content_type( JSON_CONTENT_TYPE )
     return safe_dumps( func( self, trans, *args, **kwargs ), **json_kwargs )