Esempio n. 1
0
def ep(ep_path):
    _shared_counter = 0
    _common_counter = 0

    def get_shared_counter(shared: str = Header('shared'), ) -> str:
        nonlocal _shared_counter
        _shared_counter += 1
        return f'{shared}-{_shared_counter}'

    def get_common_counter(common: str = Body(...), ) -> str:
        nonlocal _common_counter
        _common_counter += 1
        return f'{common}-{_common_counter}'

    ep = jsonrpc.Entrypoint(
        ep_path,
        dependencies=[Depends(get_shared_counter)],
        common_dependencies=[Depends(get_common_counter)],
    )

    @ep.method()
    def probe(
            shared_counter: str = Depends(get_shared_counter),
            common_counter: str = Depends(get_common_counter),
    ) -> List[str]:
        return [shared_counter, common_counter]

    return ep
def ep(ep_path):
    _calls = defaultdict(list)

    @contextlib.asynccontextmanager
    async def mw_first(ctx: jsonrpc.JsonRpcContext):
        nonlocal _calls
        _calls[ctx.raw_request.get('id')].append(
            ('mw_first', 'enter', ctx.raw_request, ctx.raw_response,
             sys.exc_info()[0]))
        try:
            yield
        finally:
            _calls[ctx.raw_response.get('id')].append(
                ('mw_first', 'exit', ctx.raw_request, ctx.raw_response,
                 sys.exc_info()[0]))

    @contextlib.asynccontextmanager
    async def mw_exception_enter(ctx: jsonrpc.JsonRpcContext):
        nonlocal _calls
        _calls[ctx.raw_request.get('id')].append(
            ('mw_exception_enter', 'enter', ctx.raw_request, ctx.raw_response,
             sys.exc_info()[0]))
        raise RuntimeError(unique_marker)
        # noinspection PyUnreachableCode
        try:
            yield
        finally:
            _calls[ctx.raw_response.get('id')].append(
                ('mw_exception_enter', 'exit', ctx.raw_request,
                 ctx.raw_response, sys.exc_info()[0]))

    @contextlib.asynccontextmanager
    async def mw_last(ctx: jsonrpc.JsonRpcContext):
        nonlocal _calls
        _calls[ctx.raw_request.get('id')].append(
            ('mw_last', 'enter', ctx.raw_request, ctx.raw_response,
             sys.exc_info()[0]))
        try:
            yield
        finally:
            _calls[ctx.raw_response.get('id')].append(
                ('mw_last', 'exit', ctx.raw_request, ctx.raw_response,
                 sys.exc_info()[0]))

    ep = jsonrpc.Entrypoint(
        ep_path,
        middlewares=[mw_first, mw_exception_enter, mw_last],
    )

    @ep.method()
    def probe(data: str = Body(..., example='123'), ) -> str:
        return data

    ep.calls = _calls

    return ep
Esempio n. 3
0
import fastapi_jsonrpc as jsonrpc
from pydantic import BaseModel
from fastapi import Body

app = jsonrpc.API()

app_v1 = jsonrpc.Entrypoint('/api/v1/jsonrpc')


class MyError(jsonrpc.BaseError):
    CODE = 5000
    MESSAGE = 'My Error'

    class DataModel(BaseModel):
        details: str


@app_v1.method(errors=[MyError])
def echo(data: str = Body(..., example='123'), ) -> str:
    if data == 'error':
        raise MyError(data={'details': 'error'})
    else:
        return data


app.bind_entrypoint(app_v1)

if __name__ == '__main__':
    import uvicorn
    uvicorn.run('example1:app', port=5000, debug=True, access_log=True)
Esempio n. 4
0
import fastapi_jsonrpc as jsonrpc
from fastapi.middleware.cors import CORSMiddleware
from loguru import logger
from app.errors import *
from app.queue import Queue
# JSON-RPC entrypoint
api_v1 = jsonrpc.Entrypoint("/v1")

# Server singletons: database, queue and library handler
queue = Queue('redis')
# RPC Methods


@api_v1.method(errors=[])
def allocation() -> dict:
    """Get assigned a start for your brick of TMs to compute and submit"""

    logger.info("allocation requested ")


@api_v1.method(errors=[])
def submission(client: str, start_tm: int, result_map: dict) -> dict:
    """Submit your TM results for an allocated compute task"""

    logger.info("submission made for {} ", start_tm)


@api_v1.method()
def network() -> dict:
    """Gets the latest volunteer network information"""
    pass
Esempio n. 5
0
        yield
    finally:
        logger.info('Response: %r', ctx.raw_response)


# JSON-RPC entrypoint

common_errors = [AccountNotFound, AuthError]
common_errors.extend(jsonrpc.Entrypoint.default_errors)

api_v1 = jsonrpc.Entrypoint(
    # Swagger shows for entrypoint common parameters gathered by dependencies and common_dependencies:
    #    - json-rpc-parameter 'account_id'
    #    - header parameter 'user-auth-token'
    '/api/v1/jsonrpc',
    errors=common_errors,
    middlewares=[logging_middleware],
    # this dependencies called once for whole json-rpc batch request
    dependencies=[Depends(get_auth_user)],
    # this dependencies called separately for every json-rpc request in batch request
    common_dependencies=[Depends(get_account)],
)


# JSON-RPC methods of this entrypoint

# this json-rpc method has one json-rpc-parameter 'account_id' and one header parameter 'user-auth-token'
@api_v1.method()
def get_balance(
    account: Account = Depends(get_account),
) -> Balance:
    return Balance(
Esempio n. 6
0
def ep(ep_path):
    return jsonrpc.Entrypoint(ep_path)
Esempio n. 7
0
import asyncio
import fastapi_jsonrpc as jsonrpc
from fastapi import Depends
from loguru import logger
from walrus import *

from core.tree import *
from core.errors import *
from core.authorizer import *
from core.reactor import *
# JSON-RPC entrypoint
api_v1 = jsonrpc.Entrypoint('/api/v1')

# Server singletons
merkle_tree = Tree()
db = Walrus(host="127.0.0.1", port=6379, db=0)
auth = Authorizer(db)
reactor = Reactor(tree=merkle_tree, interval=30)

# RPC Methods


@api_v1.method(errors=[ChecksumFormatError, AuthorizationError])
def submit(api_key: str, checksum: str) -> bool:
    """Expect a bytestring in hexadecimal representing the hash digest of the file you want to timestamp. The response will be a boolean indicating if the submission was accepted by the calendar (but the proof of existence is assumed incomplete). Digests submitted this block are idempotent - meaning that you can only timestamp a file once per block."""
    logger.info("Checksum {} submitted for inclusion", checksum)
    if (auth.contains(api_key)):
        return merkle_tree.stamp(checksum)
    else:
        logger.info("The API key was rejected for submit call checksum: {}",
                    checksum)