Esempio n. 1
0
    def create_app(debug=True):
        log_config = LOGGING if not debug else None
        app = Sanic(PROJECT_ID, log_config=log_config)

        app.debug = debug

        app.config.update(SANIC_SETTINGS)

        app.http = http

        # Static files
        app.static('/static', './app/templates/static')

        # Set routes
        app.add_route(index, '/index')
        app.add_route(RoomView.as_view(), '/room')
        app.add_route(UserView.as_view(), '/user')
        app.add_websocket_route(handler=web_socket_chat, uri="chat")

        # Register listener
        app.register_listener(before_server_start, 'before_server_start')
        app.register_listener(after_server_stop, 'after_server_stop')

        # Register middleware
        app.register_middleware(token_middleware)
        app.register_middleware(cookie_middleware, 'response')

        app.blueprint(SANIC_BLUEPRINT)

        logging.info('Creating sanic app')
        MainSetup._app = app
        return app
Esempio n. 2
0
    def __init__(self, daemon):
        app = Sanic('ansible-api')
        # https://pypi.org/project/Sanic-Cors/
        CORS(app)

        app.add_route(controller.Main.as_view(), '/')
        app.add_route(controller.NonBlockTest.as_view(), '/test')
        app.add_route(controller.Command.as_view(), '/command')
        app.add_route(controller.Playbook.as_view(), '/playbook')
        app.add_route(controller.FileList.as_view(), '/filelist')
        app.add_route(controller.FileReadWrite.as_view(), '/fileitem')
        app.add_route(controller.FileExist.as_view(), '/filexist')
        app.add_route(controller.ParseVarsFromFile.as_view(), '/parsevars')
        app.add_websocket_route(controller.Message.websocket, '/message', subprotocols=Config.get('ws_sub'))

        app.config.update(dict(RESPONSE_TIMEOUT=Config.get('timeout')))  # timeout for waiting response

        @app.middleware('request')
        async def ip_ban(request):
            if len(Config.get('allow_ip')) and request.ip not in Config.get('allow_ip'):
                return text('Your IP (%s) is not allowed!' % request.ip, status=403)

        # print config contents
        config = Config().__dict__
        config['sign_key'] = len(config['sign_key']) * '*'  # mask signature key
        Tool.LOGGER.debug("Config at start: %s" % json.dumps(config))

        app.run(host=Config.get('host'), port=Config.get('port'), workers=Config.get('workers'), debug=not daemon)
Esempio n. 3
0
    def __init__(self, daemon):
        app = Sanic('ansible-api')

        app.add_route(controller.Main.as_view(), '/')
        app.add_route(controller.NonBlockTest.as_view(), '/test')
        app.add_route(controller.Command.as_view(), '/command')
        app.add_route(controller.Playbook.as_view(), '/playbook')
        app.add_route(controller.FileList.as_view(), '/filelist')
        app.add_route(controller.FileReadWrite.as_view(), '/fileitem')
        app.add_route(controller.FileExist.as_view(), '/filexist')
        app.add_route(controller.ParseVarsFromFile.as_view(), '/parsevars')
        app.add_websocket_route(controller.Message.websocket, '/message', subprotocols=Config.get('ws_sub'))

        app.config.update(dict(RESPONSE_TIMEOUT=Config.get('timeout')))  # timeout for waiting response

        @app.middleware('request')
        async def ip_ban(request):
            if len(Config.get('allow_ip')) and request.ip not in Config.get('allow_ip'):
                return text('Your IP (%s) is not allowed!' % request.ip, status=403)

        # print config contents
        config = Config().__dict__
        config['sign_key'] = len(config['sign_key']) * '*'  # mask signature key
        Tool.LOGGER.debug("Config at start: %s" % json.dumps(config))

        app.run(host=Config.get('host'), port=Config.get('port'), workers=Config.get('workers'), debug=not daemon)
Esempio n. 4
0
def dev(ctx, gateway, folder, port, secret):
    """
        Test a factory instance locally
    """
    if gateway is None:
        gateway = get_default_gateway(ctx)

    cwd = os.getcwd()

    log_config = {
        "version": 1,
        "disable_existing_loggers": False,
        "loggers": {
            "sanic.root": {
                "level": "FATAL",
                "propagate": True
            },
            "sanic.error": {
                "level": "FATAL",
                "propagate": True
            },
        },
    }

    app = Sanic("Lager-Factory", log_config=log_config)
    app.config['CONTEXT'] = ctx
    app.config['GATEWAY_ID'] = gateway
    app.config['CWD'] = cwd
    app.config['FOLDER'] = folder
    app.config['PORT'] = port
    app.config['SECRETS'] = secret

    app.add_route(home, '/')
    app.add_route(start, '/factory/start', methods=['POST'])
    app.add_route(stop, '/factory/stop', methods=['POST'])
    app.add_route(runner,
                  '/factory/run-station/new-runner/<session_id:uuid>',
                  methods=['POST'])
    app.add_route(image, '/factory/run-station/img')
    app.add_websocket_route(websocket_handler, '/ws/job/<session_id:uuid>')
    app.static('/static', ASSET_PATH)

    app.signal('server.init.after')(after_start)

    env = Environment(loader=FileSystemLoader(ASSET_PATH),
                      autoescape=select_autoescape())
    app.ctx.jinja = env
    dev_mode = 'LAGER_SANIC_DEV' in os.environ
    app.run(port=port, motd=False, verbosity=0, dev=dev_mode, access_log=False)
Esempio n. 5
0
def create_web_routes(
    app: Sanic,
    receive_protocol: T_WebProtocol,
    uri: str,
    request_handler: Any,
):
    if receive_protocol == "http":
        app.add_route(
            handler=request_handler,
            uri=uri,
            methods=["POST"],
        )

    else:
        app.add_websocket_route(
            handler=request_handler,
            uri=uri,
        )
Esempio n. 6
0
def create_app(config):
    app = Sanic(__name__)
    app.config.from_object(config)

    rethinkdb = RethinkDB(app)

    if app.config.DROP_REMAKE_DB and input(
            'Really drop the DB? (y/N)').lower() == 'y':
        wait(rethinkdb.drop_and_remake(SCHEMA))

    app.blueprint(rpc)
    app.blueprint(rest)

    app.add_websocket_route(on_connect, '/websocket')

    @app.listener('before_server_start')
    async def before_server_start(app_, loop):
        # Per process specific setup
        app_.rdb_connection = await rethinkdb.connection()
        app_.websocket_clients = WebSocketClients()
        loop.create_task(subscribe_and_broadcast(app_))

    return app
class CQHttp(AsyncApi):
    """
    CQHTTP 机器人的主类,负责控制整个机器人的运行、事件处理函数的注册、与 CQHTTP
    的连接、CQHTTP API 的调用等。

    内部维护了一个 `Quart` 对象作为 web 服务器,提供 HTTP 协议的 ``/`` 和 WebSocket
    协议的 ``/ws/``、``/ws/api/``、``/ws/event/`` 端点供 CQHTTP 连接。

    由于基类 `api_impl.AsyncApi` 继承了 `api.Api` 的 `__getattr__`
    魔术方法,因此可以在 bot 对象上直接调用 CQHTTP API,例如:

    ```py
    await bot.send_private_msg(user_id=10001000, message='你好')
    friends = await bot.get_friend_list()
    ```

    也可以通过 `CQHttp.call_action` 方法调用 API,例如:

    ```py
    await bot.call_action('set_group_whole_ban', group_id=10010)
    ```

    两种调用 API 的方法最终都通过 `CQHttp.api` 属性来向 CQHTTP
    发送请求并获取调用结果。
    """
    def __init__(self,
                 *,
                 api_root: Optional[str] = None,
                 access_token: Optional[str] = None,
                 secret: Optional[AnyStr] = None,
                 message_class: Optional[type] = None,
                 api_timeout_sec: Optional[float] = None,
                 **kwargs):
        """
        ``api_root`` 参数为 CQHTTP API 的 URL,``access_token`` 和
        ``secret`` 参数为 CQHTTP 配置中填写的对应项。

        ``message_class`` 参数为要用来对 `Event.message` 进行转换的消息类,可使用
        `Message`,例如:

        ```py
        from aiocqhttp import CQHttp, Message

        bot = CQHttp(message_class=Message)

        @bot.on_message
        async def handler(event):
            # 这里 event.message 已经被转换为 Message 对象
            assert isinstance(event.message, Message)
        ```

        ``api_timeout_sec`` 参数用于设置 CQHTTP API 请求的超时时间,单位是秒。
        """
        self._api = UnifiedApi()
        self._sync_api = None
        self._bus = EventBus()
        self._loop = None

        self._server_app = Sanic(__name__)
        self._server_app.register_listener(self._before_serving,
                                           'before_server_start')
        self._server_app.add_route(self._handle_http_event,
                                   '/',
                                   methods=['POST'])
        for p in ('/ws', '/ws/event', '/ws/api'):
            self._server_app.add_websocket_route(self._handle_wsr, p)

        self._configure(api_root, access_token, secret, message_class,
                        api_timeout_sec)

    def _configure(self,
                   api_root: Optional[str] = None,
                   access_token: Optional[str] = None,
                   secret: Optional[AnyStr] = None,
                   message_class: Optional[type] = None,
                   api_timeout_sec: Optional[float] = None):
        self._message_class = message_class
        api_timeout_sec = api_timeout_sec or 60  # wait for 60 secs by default
        self._access_token = access_token
        self._secret = secret
        self._api._http_api = HttpApi(api_root, access_token, api_timeout_sec)
        self._wsr_api_clients = {}  # connected wsr api clients
        self._api._wsr_api = WebSocketReverseApi(self._wsr_api_clients,
                                                 api_timeout_sec)

    async def _before_serving(self, app, loop):
        self._loop = asyncio.get_running_loop()

    @property
    def asgi(self) -> Callable[[dict, Callable, Callable], Awaitable]:
        """ASGI app 对象,可使用支持 ASGI 的 web 服务器软件部署。"""
        return self._server_app

    @property
    def server_app(self) -> Sanic:
        """Quart app 对象,可用来对 Quart 的运行做精细控制,或添加新的路由等。"""
        return self._server_app

    @property
    def logger(self) -> logging.Logger:
        """Quart app 的 logger,等价于 ``bot.server_app.logger``。"""
        return logger

    @property
    def loop(self) -> Optional[asyncio.AbstractEventLoop]:
        """Quart app 所在的 event loop,在 app 运行之前为 `None`。"""
        return self._loop

    @property
    def api(self) -> AsyncApi:
        """`api_impl.AsyncApi` 对象,用于异步地调用 CQHTTP API。"""
        return self._api

    @property
    def sync(self) -> SyncApi:
        """
        `api_impl.SyncApi` 对象,用于同步地调用 CQHTTP API,例如:

        ```py
        @bot.on_message('group')
        def sync_handler(event):
            user_info = bot.sync.get_group_member_info(
                group_id=event.group_id, user_id=event.user_id
            )
            ...
        ```
        """
        if not self._sync_api:
            if not self._loop:
                raise TimingError('attempt to access sync api '
                                  'before bot is running')
            self._sync_api = SyncApi(self._api, self._loop)
        return self._sync_api

    def run(self, host: str = None, port: int = None, *args, **kwargs) -> None:
        """运行 bot 对象,实际就是运行 Quart app,参数与 `Quart.run` 一致。"""
        if 'use_reloader' not in kwargs:
            kwargs['use_reloader'] = False
        self._server_app.run(host=host, port=port, *args, **kwargs)

    def run_task(self,
                 host: str = None,
                 port: int = None,
                 *args,
                 **kwargs) -> Coroutine[None, None, None]:
        if 'use_reloader' not in kwargs:
            kwargs['use_reloader'] = False
        return self._server_app.create_server(host=host,
                                              port=port,
                                              *args,
                                              **kwargs)

    async def call_action(self, action: str, **params) -> Any:
        """
        通过内部维护的 `api_impl.AsyncApi` 具体实现类调用 CQHTTP API,``action``
        为要调用的 API 动作名,``**params`` 为 API 所需参数。
        """
        return await self._api.call_action(action=action, **params)

    async def send(self, event: Event, message: Union[str, Dict[str, Any],
                                                      List[Dict[str, Any]]],
                   **kwargs) -> Optional[Dict[str, Any]]:
        """
        向触发事件的主体发送消息。

        ``event`` 参数为事件对象,``message`` 参数为要发送的消息。可额外传入 ``at_sender``
        命名参数用于控制是否 at 事件的触发者,默认为 `False`。其它命名参数作为
        CQHTTP API ``send_msg`` 的参数直接传递。
        """
        at_sender = kwargs.pop('at_sender', False) and 'user_id' in event

        keys = {
            'message_type', 'user_id', 'group_id', 'discuss_id', 'auto_escape'
        }
        params = {k: v for k, v in event.items() if k in keys}
        params['message'] = message
        params.update(kwargs)

        if 'message_type' not in params:
            if 'group_id' in params:
                params['message_type'] = 'group'
            elif 'discuss_id' in params:
                params['message_type'] = 'discuss'
            elif 'user_id' in params:
                params['message_type'] = 'private'

        if at_sender and params['message_type'] != 'private':
            params['message'] = MessageSegment.at(params['user_id']) + \
                                MessageSegment.text(' ') + params['message']

        return await self.send_msg(**params)

    def subscribe(self, event_name: str, func: Callable) -> None:
        """注册事件处理函数。"""
        self._bus.subscribe(event_name, ensure_async(func))

    def unsubscribe(self, event_name: str, func: Callable) -> None:
        """取消注册事件处理函数。"""
        self._bus.unsubscribe(event_name, func)

    def on(self, *event_names: str) -> Callable:
        """
        注册事件处理函数,用作装饰器,例如:

        ```py
        @bot.on('notice.group_decrease', 'notice.group_increase')
        async def handler(event):
            pass
        ```

        参数为要注册的事件名,格式是点号分割的各级事件类型,见 `Event.name`。

        可以多次调用,一个函数可作为多个事件的处理函数,一个事件也可以有多个处理函数。

        可以按不同粒度注册处理函数,例如:

        ```py
        @bot.on('message')
        async def handle_message(event):
            pass

        @bot.on('message.private')
        async def handle_private_message(event):
            pass

        @bot.on('message.private.friend')
        async def handle_friend_private_message(event):
            pass
        ```

        当收到好友私聊消息时,会首先运行 ``handle_friend_private_message``,然后运行
        ``handle_private_message``,最后运行 ``handle_message``。
        """
        def deco(func: Callable) -> Callable:
            for name in event_names:
                self.subscribe(name, func)
            return func

        return deco

    on_message = _deco_maker('message')
    __pdoc__['CQHttp.on_message'] = """
    注册消息事件处理函数,用作装饰器,例如:

    ```
    @bot.on_message('private')
    async def handler(event):
        pass
    ```

    这等价于:

    ```
    @bot.on('message.private')
    async def handler(event):
        pass
    ```

    也可以不加参数,表示注册为所有消息事件的处理函数,例如:

    ```
    @bot.on_message
    async def handler(event):
        pass
    ```
    """

    on_notice = _deco_maker('notice')
    __pdoc__['CQHttp.on_notice'] = "注册通知事件处理函数,用作装饰器,用法同上。"

    on_request = _deco_maker('request')
    __pdoc__['CQHttp.on_request'] = "注册请求事件处理函数,用作装饰器,用法同上。"

    on_meta_event = _deco_maker('meta_event')
    __pdoc__['CQHttp.on_meta_event'] = "注册元事件处理函数,用作装饰器,用法同上。"

    async def _handle_http_event(self, request: Request) -> HTTPResponse:
        if self._secret:
            if 'X-Signature' not in request.headers:
                self.logger.warning('signature header is missed')
                abort(401)

            sec = self._secret
            sec = sec.encode('utf-8') if isinstance(sec, str) else sec
            sig = hmac.new(sec, request.body, 'sha1').hexdigest()
            if request.headers['X-Signature'] != 'sha1=' + sig:
                self.logger.warning('signature header is invalid')
                abort(403)

        payload = request.json
        if not isinstance(payload, dict):
            abort(400)

        if request.headers['X-Self-ID'] in self._wsr_api_clients:
            self.logger.warning(
                'there is already a reverse websocket api connection, '
                'so the event may be handled twice.')

        resp = await self._handle_event(payload)
        if isinstance(resp, dict):
            return response.json(resp)
        return response.empty()

    async def _handle_wsr(self, request: Request,
                          websocket: WebSocketCommonProtocol) -> None:
        if self._access_token:
            auth = request.headers.get('Authorization', '')
            m = re.fullmatch(r'(?:[Tt]oken|[Bb]earer) (?P<token>\S+)', auth)
            if not m:
                self.logger.warning('authorization header is missed')
                abort(401)

            token_given = m.group('token').strip()
            if token_given != self._access_token:
                self.logger.warning('authorization header is invalid')
                abort(403)

        role = request.headers['X-Client-Role'].lower()
        conn = WebSocketConnection(request, websocket)
        if role == 'event':
            await self._handle_wsr_event(conn)
        elif role == 'api':
            await self._handle_wsr_api(conn)
        elif role == 'universal':
            await self._handle_wsr_universal(conn)

    async def _handle_wsr_event(self, websocket: WebSocketConnection) -> None:
        try:
            while True:
                try:
                    payload = json.loads(await websocket.recv())
                except ValueError:
                    payload = None

                if not isinstance(payload, dict):
                    # ignore invalid payload
                    continue

                asyncio.create_task(self._handle_event_with_response(payload))
        finally:
            pass

    async def _handle_wsr_api(self, websocket: WebSocketConnection) -> None:
        self._add_wsr_api_client(websocket)
        try:
            while True:
                try:
                    ResultStore.add(json.loads(await websocket.recv()))
                except ValueError:
                    pass
        finally:
            self._remove_wsr_api_client(websocket)

    async def _handle_wsr_universal(self,
                                    websocket: WebSocketConnection) -> None:
        self._add_wsr_api_client(websocket)
        try:
            while True:
                try:
                    payload = json.loads(await websocket.recv())
                except ValueError:
                    payload = None

                if not isinstance(payload, dict):
                    # ignore invalid payload
                    continue

                if 'post_type' in payload:
                    # is a event
                    asyncio.create_task(
                        self._handle_event_with_response(payload))
                elif payload:
                    # is a api result
                    ResultStore.add(payload)
        finally:
            self._remove_wsr_api_client(websocket)

    def _add_wsr_api_client(self, websocket: WebSocketConnection) -> None:
        self_id = websocket.headers['X-Self-ID']
        self._wsr_api_clients[self_id] = websocket

    def _remove_wsr_api_client(self, websocket: WebSocketConnection) -> None:
        self_id = websocket.headers['X-Self-ID']
        if self_id in self._wsr_api_clients:
            # we must check the existence here,
            # because we allow wildcard ws connections,
            # that is, the self_id may be '*'
            del self._wsr_api_clients[self_id]

    async def _handle_event(self, payload: Dict[str, Any]) -> Any:
        ev = Event.from_payload(payload)
        if not ev:
            return

        event_name = ev.name
        self.logger.info(f'received event: {event_name}')

        if self._message_class and 'message' in ev:
            ev['message'] = self._message_class(ev['message'])
        results = list(
            filter(lambda r: r is not None, await
                   self._bus.emit(event_name, ev)))
        # return the first non-none result
        return results[0] if results else None

    async def _handle_event_with_response(self, payload: Dict[str,
                                                              Any]) -> None:
        resp = await self._handle_event(payload)
        if isinstance(resp, dict):
            payload.pop('message', None)  # avoid wasting bandwidth
            payload.pop('raw_message', None)
            payload.pop('comment', None)
            payload.pop('sender', None)
            try:
                await self._api.call_action(
                    self_id=payload['self_id'],
                    action='.handle_quick_operation_async',
                    context=payload,
                    operation=resp)
            except Error:
                pass
Esempio n. 8
0
                experiment_uuid=experiment_uuid)
            # if experiment_uuid in request.app.experiment_logs_consumers:
            #     consumer = request.app.experiment_logs_consumers.pop(experiment_uuid, None)
            #     if consumer:
            #         consumer.stop()
            should_quite = True

        if should_quite:
            return

        await asyncio.sleep(SOCKET_SLEEP)


# Job urls
app.add_websocket_route(
    job_resources,
    '/v1/<username>/<project_name>/experiments/<experiment_sequence>/jobs/<job_sequence>/resources'
)
app.add_websocket_route(
    job_resources,
    '/ws/v1/<username>/<project_name>/experiments/<experiment_sequence>/jobs/<job_sequence>/resources'
)

app.add_websocket_route(
    job_logs,
    '/v1/<username>/<project_name>/experiments/<experiment_sequence>/jobs/<job_sequence>/logs'
)
app.add_websocket_route(
    job_logs,
    '/ws/v1/<username>/<project_name>/experiments/<experiment_sequence>/jobs/<job_sequence>/logs'
)
Esempio n. 9
0
    def __init__(self, channels, rate, stream_callback, port, proxy_suffix):
        self.rate = rate
        self.channels = channels
        self.stream_callback = stream_callback
        self.thread = None
        self.server = None
        self._is_active = False

        app = Sanic(__name__)

        async def bridge(request, ws):
            while True:
                _ = await ws.recv()
                buffer = self.stream_callback(None, None, None, None)
                await ws.send(buffer.reshape(-1, 1, order='F').tobytes())

        def loop_in_thread(loop, coro):
            loop.run_until_complete(coro)
            loop.run_forever()

        app.add_websocket_route(bridge, '/')
        coro = app.create_server(host="0.0.0.0",
                                 port=8765,
                                 debug=False,
                                 access_log=False,
                                 return_asyncio_server=True)
        self.loop = asyncio.new_event_loop()
        self.thread = threading.Thread(target=loop_in_thread,
                                       args=(self.loop, coro))
        # self.thread.daemon = True  # allow program to shutdown even if the thread is alive

        url_suffix = f':{port}' if proxy_suffix is None else proxy_suffix

        self.client = Javascript(f"""
var sampleRate = {self.rate};
var channels = {self.channels};
var urlSuffix = "{url_suffix}";
window.pya = {{ bufferThresh: 0.2 }}
            """
                                 r"""
var processedPackages = 0;
var latePackages = 0;
var badPackageRatio = 1;

function resolveProxy() {
  let reg = /\/notebooks.*ipynb/g
  let res = window.location.pathname.replace(reg, "");
  return res 
}

var protocol = (window.location.protocol == 'https:') ? 'wss://' : 'ws://'
var startTime = 0;
var context = new (window.AudioContext || window.webkitAudioContext)();

context.onstatechange = function() {
    console.log("PyaJSClient: AudioContext StateChange!")
    if (context.state == "running") {
        var ws = new WebSocket(protocol+window.location.hostname+resolveProxy()+urlSuffix);
        ws.binaryType = 'arraybuffer';
        window.ws = ws;

        ws.onopen = function() {
            console.log("PyaJSClient: Websocket connected.");
            startTime = context.currentTime;
            ws.send("G");
        };

        ws.onmessage = function (evt) {
            if (evt.data) {
                processedPackages++;
                var buf = new Float32Array(evt.data)
                var duration = buf.length / channels
                var buffer = context.createBuffer(channels, duration, sampleRate)
                for (let i = 0; i < channels; i++) {
                    updateChannel(buffer, buf.slice(i * duration, (i + 1) * duration), i)
                }
                var source = context.createBufferSource()
                source.buffer = buffer
                source.connect(context.destination)
                if (startTime > context.currentTime) {
                    source.start(startTime)
                    startTime += buffer.duration
                } else {
                    latePackages++;
                    badPackageRatio = latePackages / processedPackages
                    if (processedPackages > 50) {
                        console.log("PyaJSClient: Dropped sample ratio is " + badPackageRatio.toFixed(2))
                        if (badPackageRatio > 0.05) {
                            let tr = window.pya.bufferThresh
                            window.pya.bufferThresh = (tr > 0.01) ? tr - 0.03 : 0.01;
                            console.log("PyaJSClient: Decrease buffer delay to " + window.pya.bufferThresh.toFixed(2))
                        }
                        latePackages = 0;
                        processedPackages = 0;
                    }
                    source.start()
                    startTime = context.currentTime + buffer.duration
                }
                setTimeout(function() {ws.send("G")},
                    (startTime - context.currentTime) * 1000 * window.pya.bufferThresh)
            }
        };
    }
};

var updateChannel = function(buffer, data, channelId) {
    buffer.copyToChannel(data, channelId, 0)
}

// Fallback for browsers without copyToChannel Support
if (! AudioBuffer.prototype.copyToChannel) {
    console.log("PyaJSClient: AudioBuffer.copyToChannel not supported. Falling back...")
    updateChannel = function(buffer, data, channelId) {
        buffer.getChannelData(channelId).set(data);
    }
}

function resumeContext() {
    context.resume();
    var codeCells = document.getElementsByClassName("input_area")
    for (var i = 0; i < codeCells.length; i++) {
        codeCells[i].removeEventListener("focusin", resumeContext)
    }
}

if (context.state == "suspended") {
    console.log("PyaJSClient: AudioContext not running. Waiting for user input...")
    var codeCells = document.getElementsByClassName("input_area")
    for (var i = 0; i < codeCells.length; i++) {
        codeCells[i].addEventListener("focusin", resumeContext)
    }
}

console.log("PyaJSClient: Websocket client loaded.")
            """)
Esempio n. 10
0
            # raise sys.exc_info()
            await self.players[p_id].ws.send(
                JSON.dumps({
                    'type': 'error',
                    'data': str(sys.exc_info()[0])
                }))

    async def stream(self, request, ws):
        while True:
            data = await ws.recv()
            data = JSON.loads(data)

            if 'type' in data:
                await self.type_router(data, ws)
            else:
                print(f'unknown data: {data}')


app.static('/', './web-app/index.html')
app.static('/web-app/', './web-app/')
app.add_websocket_route(Stream(), "/updates")

if __name__ == "__main__":
    port = os.getenv('PORT')
    # port = None
    print("Starting Spades App")
    if port:
        app.run(host="0.0.0.0", port=port, protocol=WebSocketProtocol)
    else:
        app.run(host="0.0.0.0", port=8080, protocol=WebSocketProtocol)
Esempio n. 11
0
root = logging.getLogger()
root.setLevel(logging.DEBUG)

ch = logging.StreamHandler(sys.stdout)
ch.setLevel(logging.DEBUG)
formatter = logging.Formatter(
    '%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
root.addHandler(ch)

app = Sanic()


# create DB connection with the event loop created by the sanic app
@app.listener('after_server_start')
async def startup_stuff(app, loop):
    db = connect(loop=loop, **db_config)
    if db:
        setattr(app, 'db', db)
        # create channel for websocket subscribers
        app.pubsub = MongoChannel(name='pubsub', db=db)
        app.pubsub.kickoff()


for route in routes:
    app.add_route(methods=route.methods, uri=route.path, handler=route.handler)
# add route for websockets
app.add_websocket_route(uri='/ws/', handler=resource_event_websocket)

if __name__ == "__main__":
    app.run(host="0.0.0.0", port=8000)
Esempio n. 12
0
                response = await f(request, *args, **kwargs)
                return response
            else:
                return json({'status': 'not_authorized'}, 403)

        return decorated_function

    return decorator


async def check_login(request):
    return True


@app.route('/me')
@authorized()
async def meinfo(request):
    return json({'user': '******'}, status=403)


if __name__ == '__main__':
    #app.register_listener(setup_db, 'before_server_start')
    #app.add_task(notify_server_started_after_five_seconds)
    app.add_route(test_add_route, '/add/<name:str>', methods=['GET'])
    app.add_websocket_route(test_add_websocket_route, '/test_feed')
    app.run(host='127.0.0.1',
            port=8000,
            debug=True,
            access_log=False,
            auto_reload=True)
Esempio n. 13
0
class Jawaf(object):
    """Wraps a Sanic instance (server), and manages db connection pools, sanic routes, and the session."""
    def __init__(self, name='default', testing=False):
        """Initialize Jawaf instance. Set up routes, database connections, and session.
        :param name: String. Sanic instance name. (Default: 'default')
        :param testing: Boolean. Whether or not testing framework is active.
        """
        self.name = name
        self.server = Sanic(name)
        self.testing = testing

        self._db_pools = {}
        self._session_pool = None
        global _active_instance
        _active_instance = self

        self.add_routes(routes_import=os.path.join(settings.PROJECT_DIR,
                                                   'routes.py'),
                        base_path=settings.BASE_DIR)
        self.init_databases()
        self.init_session()

    def add_route(self, *args, **options):
        """Wraps Sanic.add_route"""
        self.server.add_route(*args, **options)

    def add_websocket_route(self, *args, **options):
        """Wraps Sanic.add_websocket_route"""
        self.server.add_websocket_route(*args, **options)

    def add_routes(self, routes_import, base_path, prefix=''):
        """Recursively add routes using routes.py files.
        :param routes_import: String. Relative path to the routes.py file to parse.
        :param base_path: String. Base path for the jawaf project.
        :param prefix: String. Prefix url path (for recursion) - passed in via include directives.
        """
        try:
            if routes_import[0] == '/':
                module = import_module(os.path.splitext(routes_import)[0])
            else:
                module = import_module(routes_import)
        except ImportError:
            module = None
        if module == None:
            routes_spec = importlib.util.spec_from_file_location(
                '%s%s.routes' % (self.name, prefix), routes_import)
            if not routes_spec:
                raise Exception('Error processing routes file: %s' %
                                routes_import)
            module = importlib.util.module_from_spec(routes_spec)
            routes_spec.loader.exec_module(module)
        for route in module.routes:
            if prefix:
                route['uri'] = ''.join([prefix, route['uri']])
            if 'include' in route:
                try:
                    import_module(route['include'])
                    # Treat as a package
                    self.add_routes('.'.join([route['include'], 'routes']),
                                    base_path=base_path,
                                    prefix=route['uri'])
                except ImportError:
                    # Treat as a relative path
                    self.add_routes(os.path.join(base_path, route['include'],
                                                 'routes.py'),
                                    base_path=base_path,
                                    prefix=route['uri'])
            elif 'websocket' in route and route['websocket'] == True:
                self.add_websocket_route(**route)
            else:
                self.add_route(**route)

    async def close_database_pools(self):
        """Asynchronously close all open database connection pools."""
        for database in self._db_pools:
            await self._db_pools[database].close()

    async def connection(self, database=None):
        """Asynchronously return a connection from the named database connection pool.
        :param database: String. Name of the database key in settings.py to call. (Default: settings.DEFAULT_DATABASE_KEY)
        """
        if not database:
            database = settings.DEFAULT_DATABASE_KEY
        return await self._db_pools[database].acquire()

    async def create_database_pool(self, database=None):
        """Create the database pool for the specified database. Used for unit tests, handled by Sanic blueprint when server is running.
        :param database: String. Name of the database key in settings.py to call. (Default: settings.DEFAULT_DATABASE_KEY)
        """
        if not database:
            database = settings.DEFAULT_DATABASE_KEY
        from jawaf.conf import settings
        connection_settings = settings.DATABASES[database].copy()
        connection_settings.pop(
            'engine'
        )  # Pop out engine before passing it into the create_pool method on the db backend.
        self._db_pools[database] = await settings.DB_BACKEND.create_pool(
            **connection_settings)

    async def get_session_pool(self):
        """Asynchronously return a connection from the session connection pool."""
        if not self._session_pool:
            self._session_pool = await asyncio_redis.Pool.create(
                **settings.SESSION)
        return self._session_pool

    def init_databases(self):
        """Initialize database connection pools from settings.py, 
        setting up Sanic blueprints for server start and stop."""
        for database in settings.DATABASES:
            db_blueprint = Blueprint('%s_db_blueprint_%s' %
                                     (self.name, database))
            connection_settings = settings.DATABASES[database].copy()
            connection_settings.pop(
                'engine'
            )  # Pop out engine before passing it into the create_pool method on the db backend.

            @db_blueprint.listener('before_server_start')
            async def setup_connection_pool(app, loop):
                self._db_pools[
                    database] = await settings.DB_BACKEND.create_pool(
                        **connection_settings)

            @db_blueprint.listener('after_server_stop')
            async def close_connection_pool(app, loop):
                if database in self._db_pools and self._db_pools[database]:
                    await self._db_pools[database].close()

            self.server.blueprint(db_blueprint)

    def init_session(self):
        """Initialize the session connection pool, using either in memory interface or redis."""
        interface_type = settings.SESSION.pop('interface')
        if self.testing:
            # Set the session to in memory for unit tests.
            # TODO: Revisit this!
            interface_type = 'memory'
        if interface_type == 'memory':
            self._session_interface = InMemorySessionInterface()
        elif interface_type == 'redis':
            self._session_interface = RedisSessionInterface(
                self.get_session_pool())
        else:
            raise Exception('Unexpected session type "%s".' % interface)

        @self.server.middleware('request')
        async def add_session_to_request(request):
            await self._session_interface.open(request)

        @self.server.middleware('response')
        async def save_session(request, response):
            await self._session_interface.save(request, response)

    async def release(self, connection, database=None):
        """Asynchronously release a connection to the database specified.
        :param connection: Connection object. Connection to database to release.
        :param database: String. Database name from settings.py"""
        if not database:
            database = settings.DEFAULT_DATABASE_KEY
        await self._db_pools[database].release(connection)

    def run(self, *args, **options):
        """Wrapper for Sanic instance run method."""
        return self.server.run(*args, **options)
Esempio n. 14
0
class Server(object):
    def __init__(self):

        self.app = Sanic()
        self.blockchain = Blockchain()
        self.sockets = []
        self.app.add_route(self.blocks, '/blocks', methods=['GET'])
        self.app.add_route(self.block, '/block/<hash>', methods=['GET'])
        self.app.add_route(self.mine_block, '/mineBlock', methods=['POST'])
        self.app.add_route(self.peers, '/peers', methods=['GET'])
        self.app.add_route(self.transaction,
                           '/transaction/<id>',
                           methods=['GET'])
        self.app.add_route(self.peers,
                           '/unspentTransactionOutputs',
                           methods=['GET'])
        self.app.add_route(self.peers,
                           '/myUnspentTransactionOutputs',
                           methods=['GET'])
        self.app.add_route(self.balance, '/balance', methods=['GET'])
        self.app.add_route(self.address, '/address', methods=['GET'])
        self.app.add_route(self.mine_transaction,
                           '/mineTransaction',
                           methods=['POST'])
        self.app.add_route(self.send_transaction,
                           '/sendTransaction',
                           methods=['POST'])
        self.app.add_route(self.peers, '/transactionPool', methods=['POST'])

        self.app.add_route(self.add_peer, '/addPeer', methods=['POST'])
        self.app.add_websocket_route(self.p2p_handler, '/')

    async def transaction(self, request, id):
        try:
            matched_tx = next(tx for tx in reduce(
                lambda a, b: a + b,
                map(lambda block: block.data, self.blockchain.blocks[1:]))
                              if tx.id == id)
        except StopIteration as e:
            return json({
                'status': False,
                'message': "couldn't find the transaction"
            })

        return json({'data': matched_tx})

    async def balance(self, request):
        return json({'balance': self.blockchain.get_account_balance()})

    async def address(self, request):
        return json({'address': get_public_from_wallet()})

    async def blocks(self, request):
        return json(self.blockchain.blocks)

    async def block(self, request, hash):
        matched_block = next(block for block in self.blockchain.blocks
                             if block.hash == hash)
        return json(matched_block)

    async def mine_transaction(self, request):

        if self.blockchain.construct_next_block_with_transaction(
                request.json["address"], request.json["amount"]):
            response = {'status': True}
        else:
            response = {
                'status': False,
                'message': 'failed to create the transaction'
            }
        return json(response)

    async def send_transaction(self, request):

        if self.blockchain.send_transaction(request.json["address"],
                                            request.json["amount"]):
            response = {'status': True}

    async def mine_block(self, request):

        try:
            if self.blockchain.construct_next_block():
                response = {'status': True}
            else:
                response = {
                    'status': False,
                    'message': 'failed to create the transaction'
                }
        except KeyError as e:
            response = {"status": False, "message": "pass value in data key"}
        # await self.broadcast(self.response_latest_msg())
        return json(response)

    async def peers(self, request):
        peers = map(
            lambda x: "{}:{}".format(x.remote_address[0], x.remote_address[1]),
            self.sockets)
        return json(peers)

    async def add_peer(self, request):
        import asyncio
        asyncio.ensure_future(self.connect_to_peers([request.json["peer"]]),
                              loop=asyncio.get_event_loop())
        return json({"status": True})

    async def connect_to_peers(self, newPeers):
        for peer in newPeers:
            logger.info(peer)
            try:
                ws = await websockets.connect(peer)

                await self.init_connection(ws)
            except Exception as e:
                logger.info(str(e))

    # initP2PServer WebSocket server
    async def p2p_handler(self, request, ws):
        logger.info('listening websocket p2p port on: %d' % port)

        try:
            await self.init_connection(ws)
        except (ConnectionClosed):
            await self.connection_closed(ws)

    async def connection_closed(self, ws):

        logger.critical("connection failed to peer")
        self.sockets.remove(ws)

    async def init_connection(self, ws):

        self.sockets.append(ws)
        await ws.send(JSON.dumps(self.query_chain_length_msg()))

        while True:
            await self.init_message_handler(ws)

    async def init_message_handler(self, ws):
        data = await ws.recv()
        message = JSON.loads(data)
        logger.info('Received message: {}'.format(data))

        await {
            QUERY_LATEST: self.send_latest_msg,
            QUERY_ALL: self.send_chain_msg,
            RESPONSE_BLOCKCHAIN: self.handle_blockchain_response
        }[message["type"]](ws, message)

    async def send_latest_msg(self, ws, *args):
        await ws.send(JSON.dumps(self.response_latest_msg()))

    async def send_chain_msg(self, ws, *args):

        await ws.send(JSON.dumps(self.response_chain_msg()))

    def response_chain_msg(self):
        return {
            'type': RESPONSE_BLOCKCHAIN,
            'data':
            JSON.dumps([block.dict() for block in self.blockchain.blocks])
        }

    def response_latest_msg(self):

        return {
            'type': RESPONSE_BLOCKCHAIN,
            'data': JSON.dumps([self.blockchain.get_latest_block().dict()])
        }

    def response_transaction_pool_msg(self):
        return {'data': JSON.dumps(get_transaction_pool())}

    async def handle_blockchain_response(self, ws, message):

        received_blocks = sorted(JSON.loads(message["data"]),
                                 key=lambda k: k['index'])
        logger.info(received_blocks)
        latest_block_received = received_blocks[-1]
        latest_block_held = self.blockchain.get_latest_block()
        if latest_block_received["index"] > latest_block_held.index:
            logger.info('blockchain possibly behind. We got: ' +
                        str(latest_block_held.index) + ' Peer got: ' +
                        str(latest_block_received["index"]))
            if latest_block_held.hash == latest_block_received[
                    "previous_hash"]:
                logger.info("We can append the received block to our chain")

                self.blockchain.blocks.append(Block(**latest_block_received))
                await self.broadcast(self.response_latest_msg())
            elif len(received_blocks) == 1:
                logger.info("We have to query the chain from our peer")
                await self.broadcast(self.query_all_msg())
            else:
                logger.info(
                    "Received blockchain is longer than current blockchain")
                await self.replace_chain(received_blocks)
        else:
            logger.info(
                'received blockchain is not longer than current blockchain. Do nothing'
            )

    async def replace_chain(self, newBlocks):

        try:

            if self.blockchain.is_valid_chain(newBlocks) and len(
                    newBlocks) > len(self.blockchain.blocks):
                logger.info(
                    'Received blockchain is valid. Replacing current blockchain with '
                    'received blockchain')
                self.blockchain.blocks = [
                    Block(**block) for block in newBlocks
                ]
                await self.broadcast(self.response_latest_msg())
            else:
                logger.info('Received blockchain invalid')
        except Exception as e:
            logger.info("Error in replace chain" + str(e))

    def handle_received_transaction(self, transaction):
        add_to_transaction_pool(transaction,
                                self.blockchain.get_unspent_tx_outs())

    def query_chain_length_msg(self):

        return {'type': QUERY_LATEST}

    def query_all_msg(self):

        return {'type': QUERY_ALL}

    def query_transaction_pool_msg(self):
        return {'type': QUERY_TRANSACTION_POOL, 'data': None}

    async def broadcast(self, message):

        for socket in self.sockets:
            logger.info(socket)
            await socket.send(JSON.dumps(message))
Esempio n. 15
0
from sanic import Sanic


async def echo(request, ws):
    while True:
        data = await ws.recv()
        print(data)
        if data == "close":
            await ws.close()
        else:
            await ws.send(data + "/answer")


app = Sanic(__name__)
app.add_websocket_route(echo, "/ws")
app.run(host="0.0.0.0", port=8080, debug=True)
Esempio n. 16
0
from sanic import Sanic
from sanic.request import Request
from sanic.response import json, HTTPResponse
from sanic_session import InMemorySessionInterface

from dispatcher.game_dispatcher import GameDispatcher
from dispatcher.user_dispatcher import UserDispatcher
from db_manager import db_manager

app = Sanic(__name__)
app.add_route(UserDispatcher.dispatch, '/users', methods=['POST'])
app.add_websocket_route(GameDispatcher.game, '/game')
app.add_websocket_route(GameDispatcher.hello, '/hello')
app.session_interface = InMemorySessionInterface()


@app.route('/')
async def index(_request: Request) -> HTTPResponse:
    return json({"Hello": "ICONDICE world"})


@app.route('/db')
async def db(_request: Request):
    return json(db_manager.user_data)


if __name__ == "__main__":
    app.run(host="0.0.0.0", port=8000, debug=True)
Esempio n. 17
0
def get_app():
    app = Sanic()
    app.add_route(index, '/')
    app.add_websocket_route(websocket, '/ws')
    return app
Esempio n. 18
0

@app.get('/index')
async def index_url_for(request):
    # 不支持method
    url = app.url_for('args',
                      cid=3,
                      args=request.args,
                      _anchor='anchor',
                      _scheme='http',
                      _external=True,
                      _server='config.media.interet.com:8000')
    return response.redirect(url)


app.add_websocket_route(wc, uri='/wc')

app.add_route(handler=favicon, uri='/favicon.ico', methods=['GET'])

index_bp = Blueprint('index_bp',
                     strict_slashes=False,
                     url_prefix='/heheda',
                     version=1)

# http://127.0.0.1:8000/heheda/static/image.jpg
# http://127.0.0.1:8000/heheda/static/%E8%90%8D%E5%A7%90-95984272643--44.mp4
# index_bp.static('/static', './video', stream_large_files=True)
# 当stream_large_files为True时,Sanic将会使用file_stream()替代file()来提供静态文件访问。它默认的块大小是1KB,当然你可以根据需要修改块大小
chunk_size = 1024 * 1024 * 8  # 8kb
index_bp.static('/static', './video', stream_large_files=chunk_size)
index_bp.static('/template',
Esempio n. 19
0
from dotenv import load_dotenv
from sanic import Sanic

import settings
from app import handlers

load_dotenv(".env-test")

app = Sanic()
app.config.from_object(settings)

app.add_websocket_route(handlers.switch_status, "/switch")
app.add_websocket_route(handlers.subscribe_to_switch_status, "/listen")

if __name__ == "__main__":
    from app.redis_utils import check_redis_connection

    check_redis_connection()

    app.run(
        host=app.config.HOST,
        port=app.config.PORT,
        debug=app.config.DEBUG,
        access_log=app.config.ACCESS_LOG,
    )
Esempio n. 20
0
class Jawaf(object):
    """Wraps a Sanic instance (server), and manages db connection pools,
    sanic routes, and the session."""
    def __init__(self, name='default', testing=False):
        """Initialize Jawaf instance. Set up routes, database connections, and session.
        :param name: String. Sanic instance name. (Default: 'default')
        :param testing: Boolean. Whether or not testing framework is active.
        """
        self.name = name
        self.server = Sanic(name)
        self.testing = testing

        self._db_pools = {}
        self._session_pool = None
        self._smtp = None
        global _active_instance
        _active_instance = self

        self.add_routes(routes_import=os.path.join(settings.PROJECT_DIR,
                                                   'routes.py'),
                        base_path=settings.BASE_DIR)
        if 'STATIC' in settings:
            self.server.static(settings.STATIC['uri'],
                               settings.STATIC['directory'])
        self.init_databases()
        self.init_session()
        self.init_smtp()
        self.init_apps()

    def add_route(self, *args, **options):
        """Wraps Sanic.add_route"""
        self.server.add_route(*args, **options)

    def add_websocket_route(self, *args, **options):
        """Wraps Sanic.add_websocket_route"""
        self.server.add_websocket_route(*args, **options)

    def add_routes(self, routes_import, base_path, prefix=''):
        """Recursively add routes using routes.py files.
        :param routes_import: String.
            Relative path to the routes.py file to parse.
        :param base_path: String. Base path for the jawaf project.
        :param prefix: String. Prefix url path (for recursion).
            Passed in via include directives.
        """
        try:
            if routes_import[0] == '/':
                module = import_module(os.path.splitext(routes_import)[0])
            else:
                module = import_module(routes_import)
        except ImportError:
            module = None
        if module == None:
            routes_spec = importlib.util.spec_from_file_location(
                f'{self.name}{prefix}.routes', routes_import)
            if not routes_spec:
                raise ServerError(
                    f'Error processing routes file: {routes_import}')
            module = importlib.util.module_from_spec(routes_spec)
            routes_spec.loader.exec_module(module)
        for route in module.routes:
            if prefix:
                route['uri'] = ''.join([prefix, route['uri']])
            if 'include' in route:
                try:
                    import_module(route['include'])
                    # Treat as a package
                    self.add_routes('.'.join([route['include'], 'routes']),
                                    base_path=base_path,
                                    prefix=route['uri'])
                except ImportError:
                    # Treat as a relative path
                    self.add_routes(os.path.join(base_path, route['include'],
                                                 'routes.py'),
                                    base_path=base_path,
                                    prefix=route['uri'])
            elif 'websocket' in route and route['websocket'] is True:
                self.add_websocket_route(**route)
            else:
                self.add_route(**route)

    async def close_database_pools(self):
        """Asynchronously close all open database connection pools."""
        for database in self._db_pools:
            await self._db_pools[database].close()

    async def connection(self, database=None):
        """Asynchronously return a connection from the named database connection pool.
        :param database: String. Database key in settings.py to call.
            (Default: settings.DEFAULT_DATABASE_KEY)
        """
        if not database:
            database = settings.DEFAULT_DATABASE_KEY
        return await self._db_pools[database].acquire()

    async def create_database_pool(self, database=None):
        """Create the database pool for the specified database.
        Used for unit tests, handled by Sanic blueprint when server is running.
        :param database: String. Database key in settings.py to call.
            (Default: settings.DEFAULT_DATABASE_KEY)
        """
        if not database:
            database = settings.DEFAULT_DATABASE_KEY
        self._db_pools[database] = await create_pool(
            **settings.DATABASES[database].copy())

    async def get_session_pool(self):
        """Asynchronously return a connection from the session connection pool.
        """
        if not self._session_pool:
            self._session_pool = await asyncio_redis.Pool.create(
                **settings.SESSION)
        return self._session_pool

    def get_smtp(self):
        return self._smtp

    def init_apps(self):
        """Run any initialization code inside an app"""
        for app in settings.INSTALLED_APPS:
            try:
                module = import_module(app)
            except ImportError:
                app_import = os.path.join(settings.BASE_DIR, app)
                app_spec = importlib.util.spec_from_file_location(
                    f'app.{app}', app_import)
                if app_spec:
                    module = importlib.util.module_from_spec(app_spec)
                    app_spec.loader.exec_module(module)
            if module:
                if hasattr(module, 'initialize'):
                    module.initialize(self)

    def init_databases(self):
        """Initialize database connection pools from settings.py,
        setting up Sanic blueprints for server start and stop."""
        for database in settings.DATABASES:
            db_blueprint = Blueprint(f'{self.name}_db_blueprint_{database}')
            connection_settings = settings.DATABASES[database].copy()
            if not connection_settings['user']:
                connection_settings.pop('user')
                connection_settings.pop('password')

            @db_blueprint.listener('before_server_start')
            async def setup_connection_pool(app, loop):
                self._db_pools[database] = await create_pool(
                    **connection_settings)

            @db_blueprint.listener('after_server_stop')
            async def close_connection_pool(app, loop):
                if database in self._db_pools and self._db_pools[database]:
                    await self._db_pools[database].close()

            self.server.blueprint(db_blueprint)

    def init_session(self):
        """Initialize the session connection pool,
        using either in memory interface or redis."""
        if 'SESSION' not in settings:
            return
        interface_type = settings.SESSION.pop('interface')
        if interface_type == 'memory':
            self._session_interface = InMemorySessionInterface()
        elif interface_type == 'redis':
            self._session_interface = RedisSessionInterface(
                self.get_session_pool)
        else:
            raise ServerError(f'Unexpected session type "{interface_type}".')

        @self.server.middleware('request')
        async def add_session_to_request(request):
            await self._session_interface.open(request)
            request['session']['csrf_token'] = 'test_token' if self.testing \
                else generate_csrf_token()

        @self.server.middleware('response')
        async def save_session(request, response):
            await self._session_interface.save(request, response)

    def init_smtp(self):
        """Initialize smtp connection"""
        if 'SMTP' not in settings:
            return
        smtp_blueprint = Blueprint(f'{self.name}_smtp_blueprint')

        @smtp_blueprint.listener('before_server_start')
        async def connect_smtp(app, loop):
            if settings.SMTP.get('ssl', False):
                self._smtp = SMTP_SSL(hostname=settings.SMTP['host'],
                                      port=settings.SMTP['port'])
            else:
                self._smtp = SMTP(hostname=settings.SMTP['host'],
                                  port=settings.SMTP['port'])
            await self._smtp.connect()
            if 'username' in settings.SMTP and 'password' in settings.SMTP:
                await self._smtp.auth.auth(settings.SMTP['username'],
                                           settings.SMTP['password'])

        self.server.blueprint(smtp_blueprint)

    async def release(self, connection, database=None):
        """Asynchronously release a connection to the database specified.
        :param connection: Connection object.
            Connection to database to release.
        :param database: String. Database name from settings.py"""
        if not database:
            database = settings.DEFAULT_DATABASE_KEY
        await self._db_pools[database].release(connection)

    def run(self, *args, **options):
        """Wrapper for Sanic instance run method."""
        return self.server.run(*args, **options)
Esempio n. 21
0
class ServerComponents(metaclass=SingletonMetaClass):
    conf: 'IconConfig' = None

    def __init__(self):
        self.__app = Sanic(__name__, log_config=self._make_log_config())
        self.__app.config.KEEP_ALIVE = False
        self.__app.config.REQUEST_MAX_SIZE = ServerComponents.conf[
            ConfigKey.REQUEST_MAX_SIZE]
        CORS(self.__app)

        # Decide whether to create context or not according to whether SSL is applied

        rest_ssl_type = ServerComponents.conf[ConfigKey.REST_SSL_TYPE]
        if rest_ssl_type == SSLAuthType.none:
            self.__ssl_context = None
        elif rest_ssl_type == SSLAuthType.server_only:

            self.__ssl_context = (
                ServerComponents.conf[ConfigKey.DEFAULT_SSL_CERT_PATH],
                ServerComponents.conf[ConfigKey.DEFAULT_SSL_KEY_PATH])
        elif rest_ssl_type == SSLAuthType.mutual:
            self.__ssl_context = ssl.SSLContext(_ssl.PROTOCOL_SSLv23)

            self.__ssl_context.verify_mode = _ssl.CERT_REQUIRED
            self.__ssl_context.check_hostname = False

            self.__ssl_context.load_verify_locations(
                cafile=ServerComponents.conf[
                    ConfigKey.DEFAULT_SSL_TRUST_CERT_PATH])

            self.__ssl_context.load_cert_chain(
                ServerComponents.conf[ConfigKey.DEFAULT_SSL_CERT_PATH],
                ServerComponents.conf[ConfigKey.DEFAULT_SSL_KEY_PATH])
        else:
            Logger.error(
                f"REST_SSL_TYPE must be one of [0,1,2]. But now conf.REST_SSL_TYPE is {rest_ssl_type}"
            )

    def _make_log_config(self) -> dict:
        log_config = LOGGING_CONFIG_DEFAULTS
        log_config['loggers'] = {}
        log_config['handlers'] = {}
        log_config['formatters'] = {}
        return log_config

    @property
    def app(self):
        return self.__app

    @property
    def ssl_context(self):
        return self.__ssl_context

    def set_resource(self):
        self.__app.add_route(NodeDispatcher.dispatch,
                             '/api/node/<channel_name>',
                             methods=['POST'])
        self.__app.add_route(NodeDispatcher.dispatch,
                             '/api/node/',
                             methods=['POST'])

        self.__app.add_route(Version2Dispatcher.dispatch,
                             '/api/v2',
                             methods=['POST'])
        self.__app.add_route(Version3Dispatcher.dispatch,
                             '/api/v3/<channel_name>',
                             methods=['POST'])
        self.__app.add_route(Version3Dispatcher.dispatch,
                             '/api/v3/',
                             methods=['POST'])

        self.__app.add_route(Version3DebugDispatcher.dispatch,
                             '/api/debug/v3/<channel_name>',
                             methods=['POST'])
        self.__app.add_route(Version3DebugDispatcher.dispatch,
                             '/api/debug/v3/',
                             methods=['POST'])

        self.__app.add_route(Disable.as_view(),
                             '/api/v1',
                             methods=['POST', 'GET'])
        self.__app.add_route(Status.as_view(), '/api/v1/status/peer')
        self.__app.add_route(Avail.as_view(), '/api/v1/avail/peer')

        self.__app.add_websocket_route(NodeDispatcher.websocket_dispatch,
                                       '/api/node/<channel_name>')
        self.__app.add_websocket_route(WSDispatcher.dispatch,
                                       '/api/ws/<channel_name>')

    def ready(self):
        StubCollection().amqp_target = ServerComponents.conf[
            ConfigKey.AMQP_TARGET]
        StubCollection().amqp_key = ServerComponents.conf[ConfigKey.AMQP_KEY]
        StubCollection().conf = ServerComponents.conf

        async def ready_tasks():
            Logger.debug('rest_server:initialize')

            if self.conf.get(ConfigKey.TBEARS_MODE, False):
                channel_name = self.conf.get(ConfigKey.CHANNEL,
                                             'loopchain_default')
                await StubCollection().create_channel_stub(channel_name)
                await StubCollection().create_channel_tx_creator_stub(
                    channel_name)
                await StubCollection().create_icon_score_stub(channel_name)

                RestProperty().node_type = NodeType.CommunityNode
                RestProperty().rs_target = None
            else:
                await StubCollection().create_peer_stub()
                channels_info = await StubCollection().peer_stub.async_task(
                ).get_channel_infos()
                channel_name = None
                for channel_name, channel_info in channels_info.items():
                    await StubCollection().create_channel_stub(channel_name)
                    await StubCollection().create_channel_tx_creator_stub(
                        channel_name)
                    await StubCollection().create_icon_score_stub(channel_name)
                results = await StubCollection().peer_stub.async_task(
                ).get_channel_info_detail(channel_name)
                RestProperty().node_type = NodeType(results[6])
                RestProperty().rs_target = results[3]

            Logger.debug(
                f'rest_server:initialize complete. '
                f'node_type({RestProperty().node_type}), rs_target({RestProperty().rs_target})'
            )

        self.__app.add_task(ready_tasks())

    def serve(self, api_port):
        self.ready()
        self.__app.run(host='0.0.0.0',
                       port=api_port,
                       debug=False,
                       ssl=self.ssl_context)
Esempio n. 22
0
from sanic import Sanic
from sanic.response import text

app = Sanic(__name__)


@app.websocket('/feed')
async def feed(request, ws):
    while True:
        data = 'hello!'
        print('Sending: ' + data)
        await ws.send(data)
        data = await ws.recv()
        print('Received: ' + data)


#  另外一种添加路由的方法


async def feed_one(request, ws):
    pass


app.add_websocket_route(feed_one, '/f')

if __name__ == '__main__':
    app.run(host='0.0.0.0', port=8000)
    def __init__(self, rtc, output):
        self.rtcClient = rtc
        self.websocket = None

        @rtc.on('candidate')
        def on_candidate(candidate):
            loop = asyncio.new_event_loop()
            loop.run_until_complete(
                self.websocket.send(json.dumps({'candidate': candidate})))
            print('send candidate', candidate)

        @rtc.on('answer')
        def on_answer(answer):
            rtc.set_local_description(answer)
            loop = asyncio.new_event_loop()
            loop.run_until_complete(
                self.websocket.send(
                    json.dumps({'answer': answer.sdp.as_text()})))
            print('send answer', answer.sdp.as_text())

        @rtc.on('offer')
        def on_offer(offer):
            rtc.set_local_description(offer)
            loop = asyncio.new_event_loop()
            loop.run_until_complete(
                self.websocket.send(json.dumps({'offer':
                                                offer.sdp.as_text()})))
            print('send offer', offer.sdp.as_text())

        @rtc.on('negotiation-needed')
        def on_negotiation_needed(element):
            print('negotiation-needed', element)

        @rtc.on('incoming-audio-intersink')
        def on_incoming_audio(channel_name):
            print('incoming-audio-intersink', channel_name)
            output.add_audio_input(channel_name)
            #output.draw_pipeline("after_incoming_audio")

        @rtc.on('incoming-video-intersink')
        def on_incoming_video(channel_name):
            print('incoming-video-intersink', channel_name)
            print("add {channel} to LIVE Mixer".format(channel=channel_name))
            output.add_video_input(channel_name, 520, 100)
            output.draw_pipeline("after_incoming_video")

        # gives you an proper offer
        source = TestSource()
        rtc.add_stream(source)

        #alternative ? https://stackoverflow.com/questions/57430215/how-to-use-webrtcbin-create-offer-only-receive-video
        '''
        direction = GstWebRTC.WebRTCRTPTransceiverDirection.RECVONLY
        caps = Gst.caps_from_string("application/x-rtp,media=video,encoding-name=VP8/9000,payload=96")
        rtc.emit('add-transceiver', direction, caps)
        '''

        logging.debug("SignalingServer initiated with!", rtc)
        app = Sanic()
        app.config.KEEP_ALIVE = False
        app.static('/', './www/index.html', name='index.html')
        app.static('/bg', './www/background.html', name='background.html')

        async def api(request):
            return sanic.response.json({"response": "api"})

        app.add_route(api, "/api")

        # avoid favicon.ico - Error
        app.error_handler.add(sanic.exceptions.NotFound,
                              lambda r, e: sanic.response.empty(status=404))

        # https://sanic.readthedocs.io/en/latest/sanic/websocket.html
        app.config.WEBSOCKET_MAX_SIZE = 2**20
        app.config.WEBSOCKET_MAX_QUEUE = 32
        app.config.WEBSOCKET_READ_LIMIT = 2**16
        app.config.WEBSOCKET_WRITE_LIMIT = 2**16

        clients = []

        async def signaling(request, ws):
            logging.debug("is called!")
            clients.append(id(ws))
            self.websocket = ws
            while True:
                #pprint(ws)
                #print("websocket id: {id}".format(id=id(ws)))
                # a Python object (dict):
                data = {"name": "John"}
                logging.debug('Sending: ' + str(data))
                await ws.send(json.dumps(data))
                message = await ws.recv()
                logging.debug('Received: ' + message)
                # https://realpython.com/python-json/
                msg = json.loads(message)
                if msg.get('join'):
                    self.rtcClient.create_offer()

                if msg.get('answer'):
                    sdp = msg['answer']
                    _, sdpmsg = GstSdp.SDPMessage.new()
                    GstSdp.sdp_message_parse_buffer(bytes(sdp.encode()),
                                                    sdpmsg)
                    answer = GstWebRTC.WebRTCSessionDescription.new(
                        GstWebRTC.WebRTCSDPType.ANSWER, sdpmsg)
                    self.rtcClient.set_remote_description(answer)

                if msg.get('candidate') and msg['candidate'].get('candidate'):
                    print('add_ice_candidate')
                    self.rtcClient.add_ice_candidate(msg['candidate'])

        app.add_websocket_route(signaling, '/signaling')

        def start_server():
            # https://docs.telethon.dev/en/latest/concepts/asyncio.html -> it uses asyncio.get_event_loop(), which only works in the main thread
            asyncio.set_event_loop(uvloop.new_event_loop())
            loop = asyncio.get_event_loop()
            server = app.create_server(host="0.0.0.0",
                                       port=8001,
                                       access_log=False,
                                       return_asyncio_server=True)
            asyncio.ensure_future(server)
            #loop.create_task(self.webockets_handler.periodic_check())
            loop.run_forever()

        start_server()
Esempio n. 24
0
class Server(object):
    def __init__(self):

        self.app = Sanic()
        self.blockchain = Blockchain()
        self.sockets = []
        self.app.add_route(self.blocks, '/blocks', methods=['GET'])
        self.app.add_route(self.mine_block, '/mineBlock', methods=['POST'])
        self.app.add_route(self.peers, '/peers', methods=['GET'])
        self.app.add_route(self.add_peer, '/addPeer', methods=['POST'])
        self.app.add_route(self.elect, '/election', methods=['POST'])
        self.app.add_route(self.update_stake, 'updateStake', methods=['POST'])
        self.app.add_websocket_route(self.p2p_handler, '/')
        self.app.add_route(self.send_message, '/sendMessage', methods=['POST'])
        self.app.add_websocket_route(self.receiveMessage, '/receiveMessage')

    async def blocks(self, request):
        return json(self.blockchain.blocks)

    async def elect(self, request):
        try:
            temp_peer_list = []
            print("request json:\t", request.json)
            newData = {}
            newData["data"] = request.json["data"]
            newData["peer"] = request.json["peer"]
            temp_peer_list = request.json["temp_peer"]

            print("Inside elect---------", temp_peer_list)

            stakesResult = sorted(temp_peer_list,
                                  key=lambda x: int(x.split(';')[1]),
                                  reverse=True)
            leader = stakesResult[0].split(';')[0]
            print("$$$$$$$$$$$LEADER IS:\t{}".format(leader))
            if "{}:{}".format(host, port) in leader:
                requestObject = {}
                requestObject["json"] = newData
                print("requestObject mineblock:\t", requestObject)
                mine_block = await self.mine_block(JSON.dumps(newData))
                return json(mine_block)
        except Exception as e:
            print("error:\t{}".format(e))
            return json({"Error in leader election": "{}".format(e)},
                        status=401)

    async def send_message(self, request):
        from_peer = str(peer_list[0])
        to = request.json["to"]
        print("to is\t", to)
        message = request.json["message"]
        jsonData = {}
        jsonData["type"] = 0
        jsonData["message"] = format(message)
        cursor = db.cursor()
        query = 'insert into message(data) values ("{}")'.format(message)
        print("query is------------{}".format(query))
        cursor.execute(query)
        db.commit()
        ws = await websockets.connect(to)
        data = {"data": "p2p message", "peer": "{}-{}".format(from_peer, to)}
        await ws.send(JSON.dumps(jsonData))

        newBlock = self.blockchain.generate_next_block(data)
        self.blockchain.add_block(newBlock)
        await self.broadcast(self.response_latest_msg())

        temp_peer_list = peer_list
        for i, peer in enumerate(temp_peer_list):
            if from_peer in temp_peer_list:
                del temp_peer_list[i]

        for i, peer in enumerate(temp_peer_list):
            if str(to) in temp_peer_list[i]:
                del temp_peer_list[i]

        print("peer inclusion list is:@@@@@@@@@@@@@@@@@@@\t{}".format(
            temp_peer_list))
        for peer in temp_peer_list:
            peerSplit = peer.split(';')[0]
            peerIp = "http:{}:{}".format(
                peerSplit.split(":")[1],
                peerSplit.split(":")[2])
            data["temp_peer"] = temp_peer_list
            responseObject = requests.post("{}/election".format(peerIp),
                                           json=data)

        return json(temp_peer_list)

    async def receiveMessage(self, request, ws):
        data = await ws.recv()
        message = JSON.loads(data)
        logger.info('Received p2p message: {}'.format(data))
        return json(message)

    async def send_handshake(self, peer):
        cursor = db.cursor()
        query = 'select public_key from my_keys where peer={}'.format(
            peer_list[0])
        cursor.execute(query)
        query_response = cursor.fetchall()
        query_result_to_str = ''.join(query_response[0])
        socket = self.sockets[-1]
        await socket.send(JSON.dumps(query_result_to_str))

    async def http_peer(self, peer):
        peer_updated = []
        try:
            peer_info = peer.replace('ws', 'http').split(';')[0]
            peer_updated.append(peer_info)
            return peer_updated
        except Exception as e:
            return [format(e)]

    async def mine_block(self, data):

        try:
            newData = data
            print("Inside mineblock:\t", newData)
            newData['peer'] = peerId
            print("newData block ----------------------", newData)
            newBlock = self.blockchain.generate_next_block(newData)
            print("new block here is:\t---", newBlock)
        except KeyError as e:
            return json({"status": False, "message": "pass value in data key"})

        self.blockchain.add_block(newBlock)
        await self.broadcast(self.response_latest_msg())

        return json(newBlock)

    async def peers(self, request):
        peers = map(
            lambda x: "{}:{}".format(x.remote_address[0], x.remote_address[1]),
            self.sockets)
        return json(peers)

    async def add_peer(self, request):
        import asyncio
        peerName = request.json["peer"]
        asyncio.ensure_future(self.connect_to_peers([peerName]),
                              loop=asyncio.get_event_loop())
        return json({"status": True})

    async def connect_to_peers(self, newPeers):
        for peer in newPeers:
            logger.info(peer)
            try:
                peerName = str(peer).split(';')[0]
                ws = await websockets.connect(peerName)
                print("ws########################", peerName)
                peer_list.append(peer)
                print("peer_list -----------\t", peer_list)
                await self.init_connection(ws)
            except Exception as e:
                logger.info(str(e))

    async def update_stake(self, request):

        logger.info("Updating stake for peer")
        peer = request.json["peer"]
        new_stake = request.json["stake"]
        for i, peer_info in enumerate(peer_list):
            if peer in peer_info:
                peer_name = str(peer_list[i]).split(';')[0]
                peer_info_updated = peer_name + ";" + new_stake
                peer_list[i] = peer_info_updated
        return json(peer_list)

    # initP2PServer WebSocket server
    async def p2p_handler(self, request, ws):
        logger.info('listening websocket p2p port on: %d' % port)

        try:
            await self.init_connection(ws)
        except (ConnectionClosed):
            await self.connection_closed(ws)

    async def connection_closed(self, ws):

        logger.critical("connection failed to peer")
        self.sockets.remove(ws)

    async def init_connection(self, ws):

        self.sockets.append(ws)
        await ws.send(JSON.dumps(self.query_chain_length_msg()))

        while True:
            await self.init_message_handler(ws)

    async def init_message_handler(self, ws):
        data = await ws.recv()
        message = JSON.loads(data)
        logger.info('Received message: {}'.format(data))

        await {
            QUERY_LATEST: self.send_latest_msg,
            QUERY_ALL: self.send_chain_msg,
            RESPONSE_BLOCKCHAIN: self.handle_blockchain_response
        }[message["type"]](ws, message)

    async def send_latest_msg(self, ws, *args):
        await ws.send(JSON.dumps(self.response_latest_msg()))

    async def send_chain_msg(self, ws, *args):

        await ws.send(JSON.dumps(self.response_chain_msg()))

    def response_chain_msg(self):
        return {
            'type': RESPONSE_BLOCKCHAIN,
            'data':
            JSON.dumps([block.dict() for block in self.blockchain.blocks])
        }

    def response_latest_msg(self):

        return {
            'type': RESPONSE_BLOCKCHAIN,
            'data': JSON.dumps([self.blockchain.get_latest_block().dict()])
        }

    async def handle_blockchain_response(self, ws, message):

        received_blocks = sorted(JSON.loads(message["data"]),
                                 key=lambda k: k['index'])
        logger.info(received_blocks)
        latest_block_received = received_blocks[-1]
        latest_block_held = self.blockchain.get_latest_block()
        if latest_block_received["index"] > latest_block_held.index:
            logger.info('blockchain possibly behind. We got: ' +
                        str(latest_block_held.index) + ' Peer got: ' +
                        str(latest_block_received["index"]))
            if latest_block_held.hash == latest_block_received[
                    "previous_hash"]:
                logger.info("We can append the received block to our chain")

                self.blockchain.blocks.append(Block(**latest_block_received))
                await self.broadcast(self.response_latest_msg())
            elif len(received_blocks) == 1:
                logger.info("We have to query the chain from our peer")
                await self.broadcast(self.query_all_msg())
            else:
                logger.info(
                    "Received blockchain is longer than current blockchain")
                await self.replace_chain(received_blocks)
        else:
            logger.info(
                'received blockchain is not longer than current blockchain. Do nothing'
            )

    async def replace_chain(self, newBlocks):

        try:

            if self.blockchain.is_valid_chain(newBlocks) and len(
                    newBlocks) > len(self.blockchain.blocks):
                logger.info(
                    'Received blockchain is valid. Replacing current blockchain with '
                    'received blockchain')
                self.blockchain.blocks = [
                    Block(**block) for block in newBlocks
                ]
                await self.broadcast(self.response_latest_msg())
            else:
                logger.info('Received blockchain invalid')
        except Exception as e:
            logger.info("Error in replace chain" + str(e))

    def query_chain_length_msg(self):

        return {'type': QUERY_LATEST}

    def query_all_msg(self):

        return {'type': QUERY_ALL}

    async def broadcast(self, message):

        for socket in self.sockets:
            logger.info(socket)
            await socket.send(JSON.dumps(message))
Esempio n. 25
0
class Server(object):

    def __init__(self):

        self.app = Sanic()
        self.blockchain = Blockchain()
        self.sockets = []
        self.app.add_route(self.blocks, '/blocks', methods=['GET'])
        self.app.add_route(self.mine_block, '/mineBlock', methods=['POST'])
        self.app.add_route(self.peers, '/peers', methods=['GET'])
        self.app.add_route(self.add_peer, '/addPeer', methods=['POST'])
        self.app.add_websocket_route(self.p2p_handler, '/')

    async def blocks(self, request):
        return json(self.blockchain.blocks)

    async def mine_block(self, request):

        try:
            newBlock = self.blockchain.generate_next_block(request.json["data"])
        except KeyError as e:
            return json({"status": False, "message": "pass value in data key"})
        self.blockchain.add_block(newBlock)
        await self.broadcast(self.response_latest_msg())
        return json(newBlock)

    async def peers(self, request):
        peers = map(lambda x: "{}:{}".format(x.remote_address[0], x.remote_address[1])
                    , self.sockets)
        return json(peers)

    async def add_peer(self, request):
        import asyncio
        asyncio.ensure_future(self.connect_to_peers([request.json["peer"]]),
                                             loop=asyncio.get_event_loop())
        return json({"status": True})

    async def connect_to_peers(self, newPeers):
        for peer in newPeers:
            logger.info(peer)
            try:
                ws = await websockets.connect(peer)

                await self.init_connection(ws)
            except Exception as e:
                logger.info(str(e))

    # initP2PServer WebSocket server
    async def p2p_handler(self, request, ws):
        logger.info('listening websocket p2p port on: %d' % port)


        try:
            await self.init_connection(ws)
        except (ConnectionClosed):
            await self.connection_closed(ws)

    async def connection_closed(self, ws):

        logger.critical("connection failed to peer")
        self.sockets.remove(ws)

    async def init_connection(self, ws):

        self.sockets.append(ws)
        await ws.send(JSON.dumps(self.query_chain_length_msg()))

        while True:
            await self.init_message_handler(ws)

    async def init_message_handler(self, ws):
        data = await ws.recv()
        message = JSON.loads(data)
        logger.info('Received message: {}'.format(data))

        await {
            QUERY_LATEST: self.send_latest_msg,
            QUERY_ALL: self.send_chain_msg,
            RESPONSE_BLOCKCHAIN: self.handle_blockchain_response
        }[message["type"]](ws, message)

    async def send_latest_msg(self, ws, *args):
        await ws.send(JSON.dumps(self.response_latest_msg()))

    async def send_chain_msg(self, ws, *args):

        await ws.send(JSON.dumps(self.response_chain_msg()))

    def response_chain_msg(self):
        return {
            'type': RESPONSE_BLOCKCHAIN,
            'data': JSON.dumps([block.dict() for block in self.blockchain.blocks])
        }

    def response_latest_msg(self):

        return {
            'type': RESPONSE_BLOCKCHAIN,
            'data': JSON.dumps([self.blockchain.get_latest_block().dict()])
        }

    async def handle_blockchain_response(self, ws, message):

        received_blocks = sorted(JSON.loads(message["data"]), key=lambda k: k['index'])
        logger.info(received_blocks)
        latest_block_received = received_blocks[-1]
        latest_block_held = self.blockchain.get_latest_block()
        if latest_block_received["index"] > latest_block_held.index:
            logger.info('blockchain possibly behind. We got: ' + str(latest_block_held.index)
                  + ' Peer got: ' + str(latest_block_received["index"]))
            if latest_block_held.hash == latest_block_received["previous_hash"]:
                logger.info("We can append the received block to our chain")

                self.blockchain.blocks.append(Block(**latest_block_received))
                await self.broadcast(self.response_latest_msg())
            elif len(received_blocks) == 1:
                logger.info("We have to query the chain from our peer")
                await self.broadcast(self.query_all_msg())
            else:
                logger.info("Received blockchain is longer than current blockchain")
                await self.replace_chain(received_blocks)
        else:
            logger.info('received blockchain is not longer than current blockchain. Do nothing')

    async def replace_chain(self, newBlocks):

        try:

            if self.blockchain.is_valid_chain(newBlocks) and len(newBlocks) > len(self.blockchain.blocks):
                logger.info('Received blockchain is valid. Replacing current blockchain with '
                            'received blockchain')
                self.blockchain.blocks = [Block(**block) for block in newBlocks]
                await self.broadcast(self.response_latest_msg())
            else:
                logger.info('Received blockchain invalid')
        except Exception as e:
            logger.info("Error in replace chain" + str(e))



    def query_chain_length_msg(self):

        return {'type': QUERY_LATEST}

    def query_all_msg(self):

        return {'type': QUERY_ALL}

    async def broadcast(self, message):

        for socket in self.sockets:
            logger.info(socket)
            await socket.send(JSON.dumps(message))
Esempio n. 26
0
from sanic import Sanic, response

from backend.chat_websocket import ChatManager
from backend.database_handlers.databases import SqliteDatabase
from backend.database_manager import DatabaseManager
from backend.google_oauth import GoogleOauth

app = Sanic('frosty_lol')

frontend_dir = Path('./frontend')
app.static('/', str(frontend_dir))

with open(Path('backend') / 'database_setup.sql') as file:
    db_config = file.read()
sqlite = SqliteDatabase('frosty_lol', db_config)
db_manager = DatabaseManager(sqlite)

app.add_route(GoogleOauth.as_view(db_manager), '/api/g-oauth')

chat = ChatManager(200, db_manager)
app.add_route(chat.get_chat_log, '/api/chat')
app.add_websocket_route(chat.websocket_runner, '/api/chat/feed')

# noinspection PyUnusedLocal
@app.route('/')
async def index(request):
    return await response.file(str(frontend_dir / 'index.html'))

if __name__ == "__main__":
    app.run(host="0.0.0.0", port=80)
Esempio n. 27
0
            #     consumer = request.app.experiment_logs_consumers.pop(experiment_uuid, None)
            #     if consumer:
            #         consumer.stop()
            should_quite = True

        if should_quite:
            return

        await asyncio.sleep(SOCKET_SLEEP)


EXPERIMENT_URL = '/v1/<username>/<project_name>/experiments/<experiment_sequence>'
WS_EXPERIMENT_URL = '/ws{}'.format(EXPERIMENT_URL)

# Job urls
app.add_websocket_route(
    job_resources, '{}/jobs/<job_sequence>/resources'.format(EXPERIMENT_URL))
app.add_websocket_route(
    job_resources,
    '{}/jobs/<job_sequence>/resources'.format(WS_EXPERIMENT_URL))

app.add_websocket_route(job_logs,
                        '{}/jobs/<job_sequence>/logs'.format(EXPERIMENT_URL))
app.add_websocket_route(
    job_logs, '{}/jobs/<job_sequence>/logs'.format(WS_EXPERIMENT_URL))

# Experiment urls
app.add_websocket_route(experiment_resources,
                        '{}/resources'.format(EXPERIMENT_URL))
app.add_websocket_route(experiment_resources,
                        '{}/resources'.format(WS_EXPERIMENT_URL))
Esempio n. 28
0
class Server(object):
    def __init__(self):
        self.app = Sanic()  # 一种Python web框架
        self.sockets = dict()  # 里面是websocket,可以当做TCP,维持链接,用socket进行通信
        self.files = []
        self.flag = 1
        self.sport = sport
        self.storage_total = 419200
        self.storage_consumed = 0
        self.band_width_dic = bwd
        self.locked_port = []
        self.history = []
        self.app.add_route(self.peers, '/peers', methods=['GET'])
        self.app.add_route(self.get_storage_state,
                           '/getStorageState',
                           methods=['GET'])
        self.app.add_route(self.get_all_files_local,
                           '/getAllFilesLocal',
                           methods=['GET'])
        self.app.add_route(self.search_for_file,
                           '/searchForFile',
                           methods=['GET'])
        self.app.add_route(self.add_peer, '/addPeer', methods=['POST'])
        self.app.add_route(self.save, '/save', methods=['POST'])
        self.app.add_websocket_route(self.p2p_handler, '/')
        self.app.add_route(self.get_history, '/getHistory', methods=['GET'])

    #李文全:2020.6.23:接口返回数据出错修正
    async def peers(self, request):
        # 从连接里解析数发送地址和接收地址
        # 返回所有的对等节点
        temp = []
        for sk in self.sockets.keys():
            temp.append(sk)
        #result={"peers":temp}
        result = {"peers": temp}
        return json(JSON.dumps(result))

    async def get_storage_state(self, request):
        result = {"storage_total": self.storage_total}
        result['storage_consumed'] = self.storage_consumed
        return json(JSON.dumps(result))

    #废弃
    async def add_peer(self, request):
        return json(JSON.dumps({"status": True}))

    async def get_history(self, request):
        return json(JSON.dumps({"history": self.history}))

    # initP2PServer WebSocket server
    async def p2p_handler(self, request, ws):
        logger.info('listening websocket p2p port on: %d' % int(sport))
        try:
            msg = {'nomessageflag': '0'}
            await self.init_connection(ws, msg)
        except (ConnectionClosed):
            print('connection_closed ' + str(sport))

    async def connect_to_peers(self, newPeers):
        # 尝试建立连接
        tasks = []
        for peer in newPeers:
            logger.info(peer)

            try:
                print(
                    "********************************************************************开始连接peers"
                )
                ws = await websockets.connect(peer)
                print(
                    "连接成功peers*********************************************************************"
                )
                msg = {'type': BANDWIDTH_COORDINATION}
                msg['bandwidth'] = self.band_width_dic[peer]
                target_port = peer.split(':')[2]
                self.sockets[target_port] = ws
                msg['sport'] = self.sport
                tasks.append(self.init_connection(ws, msg))
            except Exception as e:
                logger.info(str(e))
        try:
            loop = asyncio.get_event_loop()
            loop.run_until_complete(asyncio.wait(tasks))
        except Exception as e:
            logger.info(str(e))

    #需要重写添加websocket之后的通信消息,可以是heartbeat,
    #还需要重写init_message_handler,这是业务逻辑就在这上面用type做
    async def init_connection(self, ws, message):
        # 在sockerts里加入ws,发送一个json,内容是QUERY_LATEST
        # 维持一个死循环,等待信息
        if 'type' in message.keys():
            await ws.send(JSON.dumps(message))
        while True:
            await self.init_message_handler(ws)

    #需要重写
    async def init_message_handler(self, ws):
        '''
            如果收到 QUERY_lATEST ,返回最新的块
            如果收到 QUERY_ALL ,返回所有的块
            如果收到 RESPONSE_BLOCKCHAIN ,进入到  处理信息函数(handle_blockchain_response)
        '''
        data = await ws.recv()
        message = JSON.loads(data)
        logger.info('Received message: {}'.format(data))

        await {
            QUERY_LATEST: self.send_latest_msg,
            QUERY_ALL: self.send_chain_msg,
            BANDWIDTH_COORDINATION: self.bandwidth_coordination,
            STORAGE_APPLY: self.storage_apply_handler,
            GAIN_APPLY: self.gain_apply_handler,
            SEARCH_FOR_FILE: self.search_for_file_handler,
            PROVIDE_AGREE: self.provide_agree_handler,
            TRANSMISSION_APPLY: self.transmission_apply_handler
        }[message["type"]](ws, message)

    async def send_latest_msg(self, ws, *args):
        self.test_flag = 1

    async def send_chain_msg(self, ws, *args):
        print("send_chain_msg")

    async def bandwidth_coordination(self, ws, *args):
        msg = args[0]
        target_port = msg['sport']
        target = "{}:{}".format(ws.remote_address[0], target_port)
        bandwidth = int(msg['bandwidth'])
        self.sockets[target_port] = ws
        self.band_width_dic[target] = bandwidth

    async def broadcast(self, message):
        print(self.sockets.keys())
        for port in self.sockets.keys():
            logger.info(port)
            await self.sockets[port].send(JSON.dumps(message))

    async def get_all_files_local(self, request):
        result = {"files": self.files}
        return json(JSON.dumps(result))

    async def search_for_file(self, request):
        files = request.json["files"]
        local_file_list = []
        for f in self.files:
            local_file_list.append(f['hash'])
        for tf in files:
            await self.his_log(tf, "search_begin", 'no_target')
            if not tf['hash'] in local_file_list:
                message = {'type': SEARCH_FOR_FILE}
                message['file'] = tf
                await self.broadcast(message)
            else:
                await self.his_log(tf, 'search_success', 'local')
        return json(JSON.dumps({'result': 'success'}))

    async def save(self, request):
        files = request.json["files"]
        for file in files:
            t = round(file['size'] / bwtn, 2)
            self.storage_consumed = self.storage_consumed + int(file['size'])
            self.files.append(file)
            await self.his_log(file, 'local_storage', 'no_target')
            await asyncio.sleep(t)
            await self.place_backup(file, request.json['license'])
        return json(
            JSON.dumps({
                "result":
                "*** Local storage completed, automatic backup started ***"
            }))

    async def place_backup(self, file, license):
        for targetport in license['targets']:
            if targetport in self.sockets.keys():
                ws = self.sockets[targetport]
                logger.info(ws)
                await self.his_log(file, 'back_up_begin', targetport)
                message = {'type': STORAGE_APPLY}
                message['file'] = file
                message['sport'] = self.sport
                message['license'] = license
                await ws.send(JSON.dumps(message))

    async def storage_apply_handler(self, ws, *args):
        msg = args[0]
        print('接收到了storage_apply_handler的消息')
        license = msg['license']
        file = msg["file"]
        source_port = msg['sport']
        while True:
            if self.file_exist_check(file):
                print('文件存在检查通过')
                break
            if not self.port_locked_check(source_port):
                await self.process_port_locking(source_port)
                await self.process_file_write_in(file)
                message = {'type': GAIN_APPLY}
                message['file'] = file
                message['sport'] = self.sport
                message['license'] = license
                await ws.send(JSON.dumps(message))
                await self.process_storage_delay(source_port, file)
                await self.process_port_release(source_port)
                print('端口开放,文件备份完毕,开始提供备份')
                await self.place_backup(file, license)
                print('备份提供完成')
                break
            await asyncio.sleep(0.5)

    def port_locked_check(self, port):
        if port in self.locked_port:
            return True
        else:
            return False

    def file_exist_check(self, file):
        for f in self.files:
            if file['hash'] == f['hash']:
                return True

    async def process_port_locking(self, port):
        self.locked_port.append(port)

    async def process_file_write_in(self, file):
        self.files.append(file)
        self.storage_consumed = self.storage_consumed + int(file['size'])

    async def process_storage_delay(self, port, file):
        width = 0
        sec_delay = 0
        file_size = float(file['size'])
        for remote_address in self.band_width_dic.keys():
            if port in remote_address:
                width = int(self.band_width_dic[remote_address])
                sec_delay = file_size * 1.0 / width
        await asyncio.sleep(sec_delay)

    async def process_port_release(self, port):
        self.locked_port.remove(port)

    async def gain_apply_handler(self, ws, *args):
        msg = args[0]
        file = msg["file"]
        source_port = msg['sport']
        await self.process_port_locking(source_port)
        await self.process_storage_delay(source_port, file)
        await self.process_port_release(source_port)
        await self.his_log(file, 'back_up_success', source_port)

    async def search_for_file_handler(self, ws, *args):
        msg = args[0]
        file = msg["file"]
        for f in self.files:
            if f['hash'] == file['hash']:
                message = {'type': PROVIDE_AGREE}
                message['sport'] = self.sport
                message['file'] = file
                await ws.send(JSON.dumps(message))

    async def provide_agree_handler(self, ws, *args):
        msg = args[0]
        file = msg["file"]
        source_port = msg['sport']
        while True:
            if self.file_exist_check(file):
                break
            if not self.port_locked_check(source_port):
                await self.process_port_locking(source_port)
                await self.process_file_write_in(file)
                message = {'type': TRANSMISSION_APPLY}
                message['file'] = file
                message['sport'] = self.sport
                await ws.send(JSON.dumps(message))
                await self.process_storage_delay(source_port, file)
                await self.his_log(file, 'search_success', source_port)
                await self.process_port_release(source_port)
                break
            await asyncio.sleep(0.5)

    async def his_log(self, file, task_type, target_port):
        #task_type分四类 back_up_begin back_up_success search_begin search_success
        log = {'file_hash': file['hash']}
        log['task_type'] = task_type
        log['target_port'] = target_port
        log['timestamp'] = round(time.time() % 100000, 2)
        self.history.append(log)

    async def transmission_apply_handler(self, ws, *args):
        msg = args[0]
        file = msg["file"]
        source_port = msg['sport']
        await self.process_port_locking(source_port)
        await self.process_storage_delay(source_port, file)
        await self.process_port_release(source_port)
Esempio n. 29
0
class Server():
    def __init__(self):
        self.app = Sanic(__name__)
        self.blockchain = Blockchain()
        self.sockets = []
        self.txnPool = []
        self.app.add_route(self.showChain, '/showChain', methods=['GET'])
        self.app.add_route(self.mineBlockPage, '/mineBlock', methods=['GET'])
        self.app.add_route(self.mineBlock, '/mineBlock', methods=['POST'])
        self.app.add_route(self.peers, '/peers', methods=['GET'])
        self.app.add_route(self.addPeer, '/addPeer', methods=['POST'])
        self.app.add_websocket_route(self.P2PHandler, '/')

    async def showChain(self, request):
        return response.text(self.blockchain.getJSON())

    async def mineBlockPage(self, request):
        return render_template('mineBlock.html')

    async def mineBlock(self, request):
        data = request.form['data'][
            0]  # IDK but data turns out to be a list instead of str
        currentBlock = self.blockchain.getLatestBlock()
        diff = self.blockchain.difficulty
        start_time = time.time()
        timestamp = calculateTimestamp(int(start_time))
        targetBlock = Block(currentBlock.index + 1, currentBlock.hash,
                            timestamp, data)
        while str(targetBlock.hash)[0:diff] != ''.join(['0'] * diff):
            targetBlock.incrementNonce()
        self.blockchain.addNewBlock(targetBlock)
        await self._broadcast(self.blockchain.getJSON(), P2P_broadcast_CHAIN)
        return response.text(str(targetBlock.nonce))

    async def peers(self, request):
        peers = map(
            lambda x: "{}:{}".format(x.remote_address[0], x.remote_address[1]),
            self.sockets)
        return response.json(peers)

    async def addPeer(self, request):
        asyncio.ensure_future(self.connect_to_peers([request.json['peer']]),
                              loop=asynico.get_event_loop())
        res = {"Status": True}
        return response.json(res)

    async def connect2Peers(self, newPeers):
        for peer in newPeers:
            try:
                ws = await websockets.connect(peer)
                await self.initConnection(ws)
            except Exception as e:
                print(str(e))

    async def P2PHandler(self, request, ws):
        try:
            await self.initConnection(ws)
        except ConnectionClosed:
            await self.connectionClosed(ws)

    async def initConnection(self, ws):
        self.sockets.append(ws)
        query = json.dumps({'type': P2P_query_ALLBLOCK})
        await ws.send(query)
        while True:
            await self.initMsgHandler(ws)

    async def connectionClosed(self, ws):
        self.sockets.remove(ws)
        print('Connection closed.')
        return

    async def initMsgHandler(self, ws):
        data = await ws.recv()
        message = json.loads(data)
        print(message)
        if message['type'] == P2P_query_LATESTBLOCK:
            await self.send_latest(ws, message)
        elif message['type'] == P2P_query_ALLBLOCK:
            await self.send_chain(ws, message)
        elif message['type'] == P2P_query_PEERLIST:
            await self.send_peerlist(ws, message)
        elif message['type'] == P2P_res_CHAIN:
            await self.process_res(ws, message)
        else:
            print('Bad query type.')

    async def send_latest(self, ws, *args):
        blockInfo = self.blockchain.getLatestBlock.getDictForm()
        res = json.dumps({'type': P2P_res_CHAIN, 'data': blockInfo})
        await ws.send(res)

    async def send_chain(self, ws, *args):
        chainInfo = self.blockchain.getJSON()
        res = json.dumps({'type': P2P_res_CHAIN, 'data': chainInfo})
        await ws.send(res)

    async def send_peerlist(self, ws, *args):
        pass

    async def process_res(self, ws, message):
        blockRecv = Blockchain.buildChainFromJSON(message['data'])
        print(message['data'])
        choose = Blockchain.chooseChain(self.blockchain, blockRecv)
        if isinstance(choose, int):
            print("Error: invalid chain. Code = ", choose)
        else:
            print("Chain replaced.")
            self.blockchain = choose

    async def _broadcast(self, message, option=P2P_broadcast_CHAIN):
        ''' General broadcast method, either works for txn and block.'''
        for socket in self.sockets:
            await socket.send(json.dumps({'type': option, 'data': message}))
Esempio n. 30
0
            #     if consumer:
            #         consumer.stop()
            should_quite = True

        if should_quite:
            return

        await asyncio.sleep(SOCKET_SLEEP)


EXPERIMENT_URL = '/v1/<username>/<project_name>/experiments/<experiment_sequence>'
WS_EXPERIMENT_URL = '/ws{}'.format(EXPERIMENT_URL)

# Job urls
app.add_websocket_route(
    job_resources,
    '{}/jobs/<job_sequence>/resources'.format(EXPERIMENT_URL))
app.add_websocket_route(
    job_resources,
    '{}/jobs/<job_sequence>/resources'.format(WS_EXPERIMENT_URL))

app.add_websocket_route(
    job_logs,
    '{}/jobs/<job_sequence>/logs'.format(EXPERIMENT_URL))
app.add_websocket_route(
    job_logs,
    '{}/jobs/<job_sequence>/logs'.format(WS_EXPERIMENT_URL))


# Experiment urls
app.add_websocket_route(