def configure_app( input_channels: Optional[List["InputChannel"]] = None, cors: Optional[Union[Text, List[Text]]] = None, auth_token: Optional[Text] = None, enable_api: bool = True, jwt_secret: Optional[Text] = None, jwt_method: Optional[Text] = None, route: Optional[Text] = "/webhooks/", port: int = constants.DEFAULT_SERVER_PORT, log_file: Optional[Text] = None, ): """Run the agent.""" from rasa import server if enable_api: app = server.create_app( cors_origins=cors, auth_token=auth_token, jwt_secret=jwt_secret, jwt_method=jwt_method, ) else: app = Sanic(__name__, configure_logging=False) CORS(app, resources={r"/*": { "origins": cors or "" }}, automatic_options=True) _configure_logging(log_file) if input_channels: rasa.core.channels.channel.register(input_channels, app, route=route) else: input_channels = [] if logger.isEnabledFor(logging.DEBUG): utils.list_routes(app) # configure async loop logging async def configure_async_logging(): if logger.isEnabledFor(logging.DEBUG): rasa.utils.io.enable_async_loop_debugging(asyncio.get_event_loop()) app.add_task(configure_async_logging) if "cmdline" in {c.name() for c in input_channels}: async def run_cmdline_io(running_app: Sanic): """Small wrapper to shut down the server once cmd io is done.""" await asyncio.sleep(1) # allow server to start await console.record_messages( server_url=constants.DEFAULT_SERVER_FORMAT.format(port)) logger.info("Killing Sanic server now.") running_app.stop() # kill the sanic serverx app.add_task(run_cmdline_io) return app
class Knopfler: def __init__(self, config): self.app = Sanic(name="knopfler") self.bots = {} for bot in config.get("bots"): if bot["type"] == "rocket": self.bots[bot["name"]] = RocketBot(bot) if bot["type"] == "matrix": self.bots[bot["name"]] = MatrixBot(bot) for link in config.get("links"): newroute = self.bots[link["bot"]].get_link(link["channel"]) self.app.add_route(newroute, link["url"], methods=("GET", "POST")) async def home(request): return response.text("♫ knopfler is up and running") self.app.add_route(home, "/") if config.get("healthcheck"): async def healthcheck_task(): while True: urlopen(config["healthcheck"]) await asyncio.sleep(60 * 5) self.app.add_task(healthcheck_task)
def make_app(): """Create a new instance of the Synse Server Sanic application. This is the means by which all Synse Server applications should be created. Returns: Sanic: A Sanic application setup and configured to serve Synse Server routes. """ app = Sanic(__name__, log_config=LOGGING) app.config.LOGO = None # Get the application configuration(s) config.options.add_config_paths('.', '/synse/config') config.options.env_prefix = 'SYNSE' config.options.auto_env = True config.options.parse(requires_cfg=False) config.options.validate() # Set up application logging setup_logger() # Set the language environment variable to that set in the config, if # it is not already set. This is how we specify the language/locale for # the application. # FIXME (etd): this isn't a great way of doing things, especially if Synse # Server is being run in a non-containerized environment. lang = os.environ.get('LANGUAGE') if lang: logger.info('LANGUAGE set from env: {}'.format(lang)) else: lang = config.options.get('locale') logger.info('LANGUAGE set from config: {}'.format(lang)) os.environ['LANGUAGE'] = lang # Register the blueprints app.blueprint(aliases.bp) app.blueprint(base.bp) app.blueprint(core.bp) _disable_favicon(app) _register_error_handling(app) configure_cache() # Add background tasks app.add_task(periodic_cache_invalidation) # Log out metadata for Synse Server and the application configuration logger.info('Synse Server:') logger.info(' version: {}'.format(synse.__version__)) logger.info(' author: {}'.format(synse.__author__)) logger.info(' url: {}'.format(synse.__url__)) logger.info(' license: {}'.format(synse.__license__)) logger.info('Configuration: {}'.format(config.options.config)) return app
def register_with_app(app: sanic.Sanic) -> None: """Register all tasks with a Sanic application instance. Args: app: The application to register the tasks with. """ # Periodically invalidate caches logger.info('adding task', task='periodic device cache rebuild') app.add_task(_rebuild_device_cache) logger.info('adding task', task='periodic plugin refresh') app.add_task(_refresh_plugins)
def test_create_task_with_app_arg(): app = Sanic('test_add_task') q = Queue() @app.route('/') def not_set(request): return "hello" async def coro(app): q.put(app.name) app.add_task(coro) request, response = app.test_client.get('/') assert q.get() == 'test_add_task'
def create_app(config_class=Config): app = Sanic(__name__) app.config.from_object(Config) # Host static files app.static('/static', './core/static') app.static('/js', './core/js') # Blueprints app.blueprint(root) app.blueprint(listeners) Session(app) # sanic_session app.add_task(handle_daily_emails) return app
def test_register_with_app(): app = Sanic('test-app') app.add_task = mock.MagicMock() tasks.register_with_app(app) app.add_task.assert_has_calls([ mock.call(tasks._rebuild_device_cache), mock.call(tasks._refresh_plugins), ])
def create_app(config_path): app = Sanic(error_handler=PGErrorHandler()) app.config.from_pyfile(config_path) app.base_url = f"{app.config.METHOD}://{app.config.DOMAIN}" BaseModel.init_app(app) # TODO Find viable openapi fork app.blueprint(openapi_blueprint) app.blueprint(swagger_blueprint) # app.blueprint(instance) app.blueprint(well_known) app.blueprint(user_v1) app.blueprint(inbox_v1) app.blueprint(outbox_v1) app.add_task(register_client(app)) app.add_task(register_admin(app)) register_extensions(app) return app
def run_operation_in_single_sanic_worker( app: Sanic, f: Union[Callable[[], Union[None, Awaitable]]] ) -> None: """Run operation `f` in a single Sanic worker.""" from multiprocessing.sharedctypes import Value # noqa: F811 from ctypes import c_bool lock = Value(c_bool, False) async def execute(): if lock.value: return with lock.get_lock(): lock.value = True if asyncio.iscoroutinefunction(f): await f() else: f() app.add_task(execute)
def test_create_task(): e = Event() async def coro(): await asyncio.sleep(0.05) e.set() app = Sanic('test_create_task') app.add_task(coro) @app.route('/early') def not_set(request): return text(e.is_set()) @app.route('/late') async def set(request): await asyncio.sleep(0.1) return text(e.is_set()) request, response = app.test_client.get('/early') assert response.body == b'False' request, response = app.test_client.get('/late') assert response.body == b'True'
while True: try: Redisdb = redis.StrictRedis(host='127.0.0.1', port=REDIS_PORT, db=0) # cleanup Redisdb.delete('messages') Redisdb.delete('operations') Redisdb.delete("batch_jobs") break except: time.sleep(1) proc2 = [] # initial load of assets #proc2.append(subprocess.Popen("python3 bitshares_data.py init", shell=True)) # operations listener #proc2.append(subprocess.Popen("python3 bitshares_data.py operations_listener", shell=True)) app.add_task(feeder()) app.run(host="0.0.0.0", port=PORT, workers=1) proc1.kill() time.sleep(1) for p in proc2: p.kill() time.sleep(1)
class ServerComponents(metaclass=SingletonMetaClass): conf: 'IconConfig' = None def __init__(self): self.__app = Sanic(__name__, log_config=self._make_log_config()) self.__app.config.KEEP_ALIVE = False CORS(self.__app) # Decide whether to create context or not according to whether SSL is applied rest_ssl_type = ServerComponents.conf[ConfigKey.REST_SSL_TYPE] if rest_ssl_type == SSLAuthType.none: self.__ssl_context = None elif rest_ssl_type == SSLAuthType.server_only: self.__ssl_context = ( ServerComponents.conf[ConfigKey.DEFAULT_SSL_CERT_PATH], ServerComponents.conf[ConfigKey.DEFAULT_SSL_KEY_PATH]) elif rest_ssl_type == SSLAuthType.mutual: self.__ssl_context = ssl.SSLContext(_ssl.PROTOCOL_SSLv23) self.__ssl_context.verify_mode = _ssl.CERT_REQUIRED self.__ssl_context.check_hostname = False self.__ssl_context.load_verify_locations( cafile=ServerComponents.conf[ ConfigKey.DEFAULT_SSL_TRUST_CERT_PATH]) self.__ssl_context.load_cert_chain( ServerComponents.conf[ConfigKey.DEFAULT_SSL_CERT_PATH], ServerComponents.conf[ConfigKey.DEFAULT_SSL_KEY_PATH]) else: Logger.error( f"REST_SSL_TYPE must be one of [0,1,2]. But now conf.REST_SSL_TYPE is {rest_ssl_type}" ) def _make_log_config(self) -> dict: log_config = LOGGING_CONFIG_DEFAULTS log_config['loggers'] = {} log_config['handlers'] = {} log_config['formatters'] = {} return log_config @property def app(self): return self.__app @property def ssl_context(self): return self.__ssl_context def set_resource(self): self.__app.add_route(NodeDispatcher.dispatch, '/api/node/<channel_name>', methods=['POST']) self.__app.add_route(NodeDispatcher.dispatch, '/api/node/', methods=['POST'], strict_slashes=False) self.__app.add_route(Version2Dispatcher.dispatch, '/api/v2', methods=['POST']) self.__app.add_route(Version3Dispatcher.dispatch, '/api/v3/<channel_name>', methods=['POST']) self.__app.add_route(Version3Dispatcher.dispatch, '/api/v3/', methods=['POST'], strict_slashes=False) self.__app.add_route(Disable.as_view(), '/api/v1', methods=['POST', 'GET']) self.__app.add_route(Status.as_view(), '/api/v1/status/peer') self.__app.add_route(Avail.as_view(), '/api/v1/avail/peer') def ready(self): StubCollection().amqp_target = ServerComponents.conf[ ConfigKey.AMQP_TARGET] StubCollection().amqp_key = ServerComponents.conf[ConfigKey.AMQP_KEY] StubCollection().conf = ServerComponents.conf async def ready_tasks(): Logger.debug('rest_server:initialize') if self.conf.get(ConfigKey.TBEARS_MODE, False): channel_name = self.conf.get(ConfigKey.CHANNEL, 'loopchain_default') await StubCollection().create_channel_stub(channel_name) await StubCollection().create_icon_score_stub(channel_name) RestProperty().node_type = NodeType.CommunityNode RestProperty().rs_target = None else: await StubCollection().create_peer_stub() channels_info = await StubCollection().peer_stub.async_task( ).get_channel_infos() channel_name = None for channel_name, channel_info in channels_info.items(): await StubCollection().create_channel_stub(channel_name) await StubCollection().create_icon_score_stub(channel_name) results = await StubCollection().peer_stub.async_task( ).get_channel_info_detail(channel_name) RestProperty().node_type = NodeType(results[6]) RestProperty().rs_target = results[3] Logger.debug( f'rest_server:initialize complete. ' f'node_type({RestProperty().node_type}), rs_target({RestProperty().rs_target})' ) self.__app.add_task(ready_tasks()) def serve(self, api_port): self.ready() self.__app.run(host='0.0.0.0', port=api_port, debug=False, ssl=self.ssl_context)
edges.extend(create_edge_for_same_user(users_to_post, users)) stories_graph_by_type[story_type] = { "nodes": nodes, "edges": edges, "number_of_stories": number_of_posts } log.info("Sleep for %s", sleep_interval) await asyncio.sleep(sleep_interval) app.static('/static', CURRENT_DIR + '/static') app.static('/', CURRENT_DIR + '/static/index.html') if __name__ == "__main__": app.add_task( graph_cron(number_of_posts=5, sleep_interval=60 * 60, descendants_level=3, stories_to_download=["best"])) app.add_task( graph_cron(number_of_posts=15, sleep_interval=60, descendants_level=3, stories_to_download=["hot"])) app.add_task( graph_cron(number_of_posts=100, sleep_interval=10, descendants_level=4, stories_to_download=["new"])) app.run('0.0.0.0', port=8000, debug=True)
@app.route("/") async def get_idioms(request): if 'query' not in request.args: return HTTPResponse(status=400, body='invalid query') query = ''.join(request.args['query']) if len(query) > 15: return HTTPResponse(status=400, body='query too long') try: res = await scraper.scrape_idioms(query) return json([{ 'idiom': r[0], 'score': r[1] } for r in res], ensure_ascii=False) except Exception as e: logger.exception(e) return HTTPResponse(status=503) @app.route('/health') async def health_check(request): return json({'status': 'success'}) if __name__ == "__main__": logger.setLevel(os.getenv('SANIC_LOGGING_LEVEL', 'INFO')) app.add_task(init) app.run(host="0.0.0.0", access_log=False)
class ServerComponents(metaclass=SingletonMetaClass): def __init__(self): self.__app = Sanic(__name__) self.__app.config.KEEP_ALIVE = False # Decide whether to create context or not according to whether SSL is applied if conf.REST_SSL_TYPE == conf.SSLAuthType.none: self.__ssl_context = None elif conf.REST_SSL_TYPE == conf.SSLAuthType.server_only: self.__ssl_context = { 'cert': conf.DEFAULT_SSL_CERT_PATH, 'key': conf.DEFAULT_SSL_KEY_PATH } elif conf.REST_SSL_TYPE == conf.SSLAuthType.mutual: self.__ssl_context = ssl.SSLContext(_ssl.PROTOCOL_SSLv23) self.__ssl_context.verify_mode = ssl.CERT_REQUIRED self.__ssl_context.check_hostname = False self.__ssl_context.load_verify_locations( cafile=conf.DEFAULT_SSL_TRUST_CERT_PATH) self.__ssl_context.load_cert_chain(conf.DEFAULT_SSL_CERT_PATH, conf.DEFAULT_SSL_KEY_PATH) else: utils.exit_and_msg( f"REST_SSL_TYPE must be one of [0,1,2]. But now conf.REST_SSL_TYPE is {conf.REST_SSL_TYPE}" ) @property def app(self): return self.__app @property def ssl_context(self): return self.__ssl_context def set_resource(self): self.__app.add_route(json_rpc.NodeDispatcher.dispatch, '/api/node/', methods=['POST']) if conf.DISABLE_V1_API: self.__app.add_route(Disable.as_view(), '/api/v1', methods=['POST', 'GET']) else: self.__app.add_route(Query.as_view(), '/api/v1/query') self.__app.add_route(Transaction.as_view(), '/api/v1/transactions') self.__app.add_route(ScoreStatus.as_view(), '/api/v1/status/score') self.__app.add_route(Blocks.as_view(), '/api/v1/blocks') self.__app.add_route(InvokeResult.as_view(), '/api/v1/transactions/result') self.__app.add_route(Status.as_view(), '/api/v1/status/peer') self.__app.add_route(Avail.as_view(), '/api/v1/avail/peer') def query(self, data, channel): return PeerServiceStub().call( "Query", loopchain_pb2.QueryRequest(params=data, channel=channel), PeerServiceStub.REST_SCORE_QUERY_TIMEOUT) def create_transaction(self, data, channel): # logging.debug("Grpc Create Tx Data : " + data) return PeerServiceStub().call( "CreateTx", loopchain_pb2.CreateTxRequest(data=data, channel=channel), PeerServiceStub.REST_GRPC_TIMEOUT) def get_transaction(self, tx_hash, channel): return PeerServiceStub().call( "GetTx", loopchain_pb2.GetTxRequest(tx_hash=tx_hash, channel=channel), PeerServiceStub.REST_GRPC_TIMEOUT) def ready(self, amqp_target, amqp_key): StubCollection().amqp_target = amqp_target StubCollection().amqp_key = amqp_key async def ready_tasks(): from loopchain import loggers loggers.get_preset().update_logger() loggers.update_other_loggers() logging.debug('rest_server:initialize') await StubCollection().create_peer_stub() channels_info = await StubCollection().peer_stub.async_task( ).get_channel_infos() channel_name = None for channel_name, channel_info in channels_info.items(): await StubCollection().create_channel_stub(channel_name) if conf.USE_EXTERNAL_SCORE: await StubCollection().create_icon_score_stub(channel_name) else: await StubCollection().create_score_stub( channel_name, channel_info['score_package']) results = await StubCollection().peer_stub.async_task( ).get_channel_info_detail(channel_name) RestProperty().node_type = conf.NodeType(results[6]) RestProperty().rs_target = results[3] logging.debug( f'rest_server:initialize complete. ' f'node_type({RestProperty().node_type}), rs_target({RestProperty().rs_target})' ) self.__app.add_task(ready_tasks()) def serve(self, amqp_target, amqp_key, api_port): self.ready(amqp_target, amqp_key) self.__app.run(host='0.0.0.0', port=api_port, debug=False, ssl=self.ssl_context)
@app.get('/testclient') async def on_test_client(request): client = await account_dto.get_client(request) return text('Hello ' + client.username + '!') @app.get('/testperm') @requires_permission('admin:update') async def on_test_perm(request): return text('Admin who can only update gained access!') @app.get('/testrole') @requires_role('Admin') async def on_test_role(request): return text('Admin gained access!') @app.exception(RoseError) async def on_rose_error_test(request, exception: ServerError): payload = { 'error': str(exception), 'status': exception.status_code } return json(payload, status=exception.status_code) if __name__ == '__main__': app.add_task(tortoise_init()) app.run(host='0.0.0.0', port=8000, debug=True)
app.error_handler.add(SanicException, sanic_error_handler) app.error_handler.add(UniqueViolationError, unique_violation_error_handler) async def init_plugins(app, loop): await db.gino.create_all() # await cache.clear() # Register the listeners app.register_listener(init_plugins, "after_server_start") # Register background tasks from ora_backend.tasks.assign import check_for_reassign_chats_every_half_hour app.add_task(check_for_reassign_chats_every_half_hour()) # Register Prometheus try: # import prometheus_client as prometheus from sanic_prometheus import monitor except Exception: pass else: if MODE == "production": # Adds /metrics endpoint to the Sanic server monitor( app, endpoint_type="url", latency_buckets=[0.01, 0.05, 0.1, 0.25, 0.5, 1, 10, 30, 60, 120], ).expose_endpoint()
class Application: def __init__(self): self.logger = logger self.init_sanic() self.init_tokens() self.kvstore = KeyValueStore() def init_sanic(self): self.app = Sanic(__file__) self.app.add_route(lambda request: self.healthcheck(request), '/healthcheck') self.app.add_route(lambda request: self.sentimentanalysis(request), '/sentiment-analysis', methods=ALL_METHODS) self.app.add_route(lambda request: self.jobstatus(request), '/job-status', methods=ALL_METHODS) self.app.add_route(lambda request: self.catch_all(request, path='/'), '/', methods=ALL_METHODS) self.app.add_route(lambda request, path: self.catch_all(request, path=path), '/<path:path>', methods=ALL_METHODS) def get_uuid(self): return str(uuid.uuid4()) def init_tokens(self): self.token_manager = TokenManager() if "ACCESS_TOKEN" in os.environ: token = os.environ["ACCESS_TOKEN"] elif "ACCESS_TOKEN__FILE" in os.environ: with open(os.environ["ACCESS_TOKEN__FILE"], 'r') as f: token = f.read().strip() else: self.logger.error("Couldn't find a token. Set ACCESS_TOKEN or ACCESS_TOKEN__FILE") raise Exception("Couldn't find a token") self.logger.info(f"Loading token ACCESS_TOKEN: {token}") self.token_manager.add_token(token, 1, 3) def catch_all(self, request, path): return response.json({ 'status': 'invalid-arguments', 'message': 'Invalid API request' }, status=404) def healthcheck(self, request): rnd = self.get_uuid() self.kvstore.set(rnd, {"content": rnd }, 5) job = self.kvstore.get(rnd) if not job or job["content"] != rnd: return response.text(None, status=500) # TODO: check Google API connectivity? return response.text(None, status=200) def sentimentanalysis(self, request): result = self.verify_request(request) if result: return result content = request.json.get("content") language = request.json.get("language", "en") if not content: return response.json({ "status": "invalid-arguments", "message": "The request is missing the 'content' parameter" }, status=400) job_id = str(uuid.uuid4()) self.app.add_task(self.long_running(language, content, job_id)) return response.json({ "status": "pending", "job-id": str(job_id) }) async def long_running(self, language, content, job_id): self.kvstore.set(job_id, { "status": "pending" }) try: self.logger.info("[Job %s]: Starting communication with Google", job_id) text = content if language != DEFAULT_SA_LANG: self.logger.info("[Job %s]: Content requires translation. Calling Translation API", job_id) translation = self.translate.translate(content, target_language="en") self.logger.debug("[Job %s]: Content(translated): %s", job_id, translation) text = translation["translatedText"] self.logger.info("[Job %s]: Calling Sentiment Analysis API", job_id) self.logger.debug("[Job %s]: Content: %s", job_id, text) document = types.Document(content=text, type=enums.Document.Type.PLAIN_TEXT) annotations = self.language.analyze_sentiment(document=document) self.logger.debug("[Job %s]: Annotations: %s", job_id, annotations) self.kvstore.set(job_id, { "status": "success", "sentiment-score": annotations.document_sentiment.score, }) self.logger.info("[Job %s]: Finish processing", job_id) except Exception as ex: correlation_id = self.get_uuid() self.logger.exception("[Job %s]: We encountered an error. Correlation id '%s'", job_id, correlation_id) self.kvstore.set(job_id, { "status": "server-error", "message": "Encountered an error while processing request", "correlation-id": correlation_id }) def jobstatus(self, request): result = self.verify_request(request) if result: return result job_id = request.json.get("job-id") if not job_id: return response.json({ "status": "invalid-arguments", "message": "The request is missing the 'job-id' parameter" }, status=400) job = self.kvstore.get(job_id) if not job: return response.json({ "status": "not-found", "message": "The specified job was not found" }, status=404) self.logger.info("Key-Value store returned %s", job) return response.json(job, status=200) def verify_request(self, request, methods=frozenset(["POST", "PUT"])): if request.method.upper() not in methods: return response.json({ "status": "invalid-arguments", "message": "Unsupported method %s for this endpoint" % request.method.upper(), }, status=400) if request.json is None: return response.json({ "status": "invalid-arguments", "message": "Request body needs to be JSON", }, status=400) token = request.json.get("token") if not token: self.logger.error(f"Token {token} is not valid") return response.json({ "status": "access-denied", "message": "The request doesn't contain mandatory 'token' parameter" }, status=401) if not self.token_manager.is_valid(token): self.logger.error(f"Token {token} is not valid") return response.json({ "status": "access-denied", "message": "The token was not recognized" }, status=403) if not self.token_manager.can_consume(token): self.logger.error(f"Token {token} doesn't have enough credits") return response.json({ "status": "rate-limit", "message": "Your token used up its request quota. Wait a bit..." }, status=429) return None def run(self): self.translate = translate.Client() self.language = language.LanguageServiceClient() self.app.run(host="0.0.0.0", port=int(os.environ.get('PORT', "8080")))
connection=redis.Redis(host='127.0.0.1', port=REDIS_PORT)) Q_normal.empty() Q_bg = Queue('background', connection=redis.Redis(host='127.0.0.1', port=REDIS_PORT)) #Q_bg.empty() proc3 = [] for pr in range(0, 4): proc3.append( subprocess.Popen( "./rq_td_worker2.py web --url redis://127.0.0.1:" + str(REDIS_PORT), shell=True)) for pr in range(0, 4): proc3.append( subprocess.Popen( "./rq_td_worker2.py background --url redis://127.0.0.1:" + str(REDIS_PORT), shell=True)) time.sleep(0.2) app.add_task(broker()) app.run(host="0.0.0.0", port=PORT, workers=1) proc1.kill() time.sleep(1) for p in proc3: p.kill() time.sleep(1)
app.static('/static', './static') app.blueprint(_site_bp) async def calculate_emission(): while True: await asyncio.sleep(30) logger.info('Refreshing emission...') block_count = daemon.get_block_count() prev_block_count = int(cache_client.get(M0RKCOIN_PREV_HEIGHT) or 1) total_reward = int(cache_client.get(M0RKCOIN_EMISSION_KEY) or 0) for height in range(prev_block_count, block_count + 1): block = daemon.get_block_by_height(height) total_reward += block['reward'] logger.info(f'New Emission: {total_reward} at ' f'Height {block_count + 1}') cache_client.set(M0RKCOIN_EMISSION_KEY, total_reward) cache_client.set(M0RKCOIN_PREV_HEIGHT, block_count + 1) app.add_task(calculate_emission()) if __name__ == '__main__': app.run(host=config.host, port=config.port, debug=config.debug)
# so this still copies too much stuff. data_part = await request.stream.read() if data_part is None: break data[pos:len(data_part) + pos] = data_part pos += len(data_part) if pos > MAX_SIZE: raise HandlingError("Too large") # ideally, we would minimize preprocessing... im = PIL.Image.open(io.BytesIO(data)) im = torchvision.transforms.functional.resize(im, (228, 228)) im = torchvision.transforms.functional.to_tensor(im) im = im[:3] # drop alpha channel if present if im.dim() != 3 or im.size(0) < 3 or im.size(0) > 4: raise HandlingError("need rgb image") out_im = await style_transfer_runner.process_input(im) out_im = torchvision.transforms.functional.to_pil_image(out_im) imgByteArr = io.BytesIO() out_im.save(imgByteArr, format='JPEG') return sanic.response.raw(imgByteArr.getvalue(), status=200, content_type='image/jpeg') except HandlingError as e: # we don't want these to be logged... return sanic.response.text(e.handling_msg, status=e.handling_code) app.add_task(style_transfer_runner.model_runner()) app.run(host="0.0.0.0", port=8000, debug=True)
# -*- coding: utf-8 -*- import asyncio from sanic import Sanic app = Sanic() async def notify_server_started_after_five_seconds(): await asyncio.sleep(5) print('Server successfully started!') app.add_task(notify_server_started_after_five_seconds()) if __name__ == "__main__": app.run(host="0.0.0.0", port=8000)
import os import asyncio from sanic import Sanic from server.clients import WSClient from server.world import ChatterUniverse, ChatterWorld app = Sanic() universe = ChatterUniverse() app.add_task(universe.run()) os.chdir(os.path.dirname(__file__)) app.static('/js', './static/js', content_type='application/javascript') app.static('/img', './static/img') app.static('/', './static/html/index.html') @app.websocket('/connect') async def client_connect(request, ws): face = await ws.recv() print('Receive connection:', face) client = WSClient(ws, face) async with universe.add_client(client): await client.run() if client.ws.open: print('Client exited without closing socket!')
class Node(Blockchain, Peers): """A Node the communicates over Http using Sanic and requests.""" def __init__(self, port=8000, db='blockchain.db'): self.port = port self.db = db Peers.__init__(self) self.app = Sanic(__name__) @self.app.route('/block', methods=['POST']) async def block(request): if request.body is None: return response.json( {'success': False}, headers={'Access-Control-Allow-Origin': '*'}) try: block = Block.from_dict(loads(request.body.decode())) except KeyError: return response.json( {'success': False}, headers={'Access-Control-Allow-Origin': '*'}) if await self.add_block(block): await self.broadcast_block(block) return response.json( {'success': True}, headers={'Access-Control-Allow-Origin': '*'}) return response.json({'success': False}, headers={'Access-Control-Allow-Origin': '*'}) @self.app.route('/transaction', methods=['POST']) async def transaction(request): print(request.body) if request.body is None: return response.json( {'success': False}, headers={'Access-Control-Allow-Origin': '*'}) try: transaction = Transaction.from_dict( loads(request.body.decode())) except KeyError: return response.json( {'success': False}, headers={'Access-Control-Allow-Origin': '*'}) if not await self.add_transaction(transaction): return response.json( {'success': False}, headers={'Access-Control-Allow-Origin': '*'}) await self.broadcast_transaction(transaction) return response.json({'success': True}, headers={'Access-Control-Allow-Origin': '*'}) @self.app.route('/blocks/<index:number>', methods=['GET']) async def blocks(request, index): try: return response.json( loads(repr(await self.block_from_index(index))), headers={'Access-Control-Allow-Origin': '*'}) except IndexError: return response.json( {'success': False}, headers={'Access-Control-Allow-Origin': '*'}) @self.app.route('/getlastblock', methods=['GET']) async def getlastblock(request): return response.json(loads(repr(await self.last_block())), headers={'Access-Control-Allow-Origin': '*'}) @self.app.route('/blockrange/<start:number>/<end:number>', methods=['GET']) async def blockrange(request, start, end): try: return response.json( loads(repr(await self.blocks_from_range(start, end))), headers={'Access-Control-Allow-Origin': '*'}) except IndexError: return response.json( {'success': False}, headers={'Access-Control-Allow-Origin': '*'}) @self.app.route('/peers', methods=['GET', 'POST']) async def peers(request): if request.method == 'GET': return response.json( list(self.peers), headers={'Access-Control-Allow-Origin': '*'}) if request.method == 'POST': if request.body is None: return response.json( {'success': False}, headers={'Access-Control-Allow-Origin': '*'}) if request.body.decode() == '{}:{}'.format( socket.gethostbyname(socket.getfqdn()), self.port): return response.json( {'success': False}, headers={'Access-Control-Allow-Origin': '*'}) self.peers.add(request.body.decode()) return response.json( {'success': True}, headers={'Access-Control-Allow-Origin': '*'}) @self.app.route('/balance/<address>', methods=['GET']) async def balance(request, address): return response.text(str(await self.get_balance(address)), headers={'Access-Control-Allow-Origin': '*'}) @self.app.route('/nonce/<address>', methods=['GET']) async def nonce(request, address): return response.text(str(await self.get_account_nonce(address)), headers={'Access-Control-Allow-Origin': '*'}) @self.app.route('/pending', methods=['GET']) async def pending(request): return response.json(loads(repr(self.pending)), headers={'Access-Control-Allow-Origin': '*'}) @self.app.route('/config', methods=['GET']) async def config(request): return response.json(self.config_, headers={'Access-Control-Allow-Origin': '*'}) @self.app.route('/difficulty', methods=['GET']) def difficulty(request): return response.text(str(self.difficulty), headers={'Access-Control-Allow-Origin': '*'}) @self.app.route('/height', methods=['GET']) async def height(request): return response.text(str(await self.height()), headers={'Access-Control-Allow-Origin': '*'}) @self.app.websocket('/subscribeblock') async def subscription(request, websocket): self.block_subscribers.add(websocket) while True: try: await websocket.recv() except ConnectionClosed: self.block_subscribers.remove(websocket) async def mine(self, reward_address, lowest_fee=1): """Asynchronous POW task.""" while True: block = await self.mine_block(reward_address, lowest_fee) await self.add_block(block) await self.broadcast_block(block) async def sync(self, uri): pass async def interface(self): """Asynchronous user input task.""" logging.getLogger('root').setLevel('CRITICAL') logging.getLogger('sanic.error').setLevel('CRITICAL') logging.getLogger('sanic.access').setLevel('CRITICAL') loop = asyncio.get_event_loop() while True: cmd = await ainput('(NODE) > ') cmd = cmd.lower().split() if not cmd: pass elif cmd[0] == 'mine': if len(cmd) > 1: if cmd[1] == 'stop': try: mining_task.cancel() del mining_task print('Stopped mining task.') except NameError: print('The node is not mining.') else: if 'mining_task' in locals(): print('The node is already mining.') else: mining_task = loop.create_task(self.mine(cmd[1])) print('Started mining task.') else: if 'mining_task' in locals(): print('The node is already mining.') else: with open('./asyncoin/config/keys.yaml') as key_file: address = yaml.load(key_file.read())['address'] if not address: print( "No address found in 'keys.yaml', use 'python3 run.py generate' not generate a pair." ) else: mining_task = loop.create_task(self.mine(address)) print('Started mining task.') elif cmd[0] == 'send': with open('./asyncoin/config/keys.yaml') as key_file: enc_private = yaml.load( key_file.read())['encrypted_private'] if enc_private: pass_ = await ainput('Enter your Passphrase > ') try: keys = KeyPair( decrypt(pass_.encode(), enc_private).decode()) except ValueError: print('Unable to decrypt private key.') continue to = await ainput('Address to send to > ') if len(to) != 96: print('The address must be 96 characters long.') continue amount = await ainput('Amount to send > ') try: amount = int(amount) except ValueError: print("That's not a number.") continue fee = await ainput('Fee (at least 1) > ') try: fee = int(fee) except ValueError: print("That's not a number.") continue balance = await self.get_balance(keys.address) if amount > balance: print('You only have {}.'.format(balance)) continue transaction = keys.Transaction( to=to, amount=amount, fee=fee, nonce=await self.get_account_nonce(keys.address)) print('Created Transaction {}'.format(transaction.hash)) await self.add_transaction(transaction) await self.broadcast_transaction(transaction) print('Broadcasting transaction...') else: print('No encrypted key found in keys.yaml.') elif cmd[0] == 'balance': if len(cmd) > 1: print('Balance: {}'.format(await self.get_balance(cmd[1]))) else: with open('./asyncoin/config/keys.yaml') as key_file: address = yaml.load(key_file.read())['address'] if not address: print( "No address found in 'keys.yaml', use 'python3 run.py generate' not generate a pair." ) else: print('Balance: {}'.format(await self.get_balance(address))) elif cmd[0] == 'exit': for task in asyncio.Task.all_tasks(): task.cancel() loop.stop() async def sync(self, sync_url): # get rid of schema or extra / at end node_url = urlparse(sync_url).netloc if urlparse( sync_url).netloc else urlparse(sync_url).path async with aiohttp.ClientSession() as session: try: async with session.get( 'http://{}/config'.format(node_url)) as response: config = await response.json() except aiohttp.client_exceptions.ClientConnectorError: print('That node is not online.') for task in asyncio.Task.all_tasks(): task.cancel() asyncio.get_event_loop().stop() if os.path.exists(self.db): Blockchain.__init__(self, config_=config, db=self.db) last_block = await self.last_block() last_block_index = last_block.index async with aiohttp.ClientSession() as session: async with session.get('http://{}/blocks/{}'.format( node_url, last_block_index)) as response: if Block.from_dict( await response.json()).hash != last_block.hash: raise ValueError( 'Unable to sync from that node, blocks are not the same.' ) else: # don't use Blockchain.start_db so we don't mine genesis block async with aiosqlite.connect(self.db) as db: await db.executescript(startup_script) await db.commit() Blockchain.__init__(self, config_=config, db=self.db) async with aiohttp.ClientSession() as session: async with session.get( 'http://{}/blocks/0'.format(node_url)) as response: block = Block.from_dict(await response.json()) if self.verify_genesis_block(block): async with aiosqlite.connect(self.db) as db: await db.execute(block_template, (block.index, block.hash, block.nonce, block.previous_hash, block.timestamp)) await db.execute( transaction_template, (block.hash, block.data[0].hash, block.data[0].to, block.data[0].from_, block.data[0].amount, block.data[0].timestamp, block.data[0].signature, block.data[0].nonce, block.data[0].fee)) await db.commit() async with aiohttp.ClientSession() as session: async with session.get( 'http://{}/height'.format(node_url)) as response: peer_height = int(await response.text()) our_height = await self.height() while peer_height != our_height: async with aiohttp.ClientSession() as session: # Sync in 50 block chunks to just in case for x in range(math.floor(our_height / 50), math.ceil(peer_height / 50)): async with session.get('http://{}/blockrange/{}/{}'.format( node_url, our_height, our_height + 50 if our_height + 50 <= peer_height - 1 else peer_height - 1)) as response: # not using asyncio.wait or asyncio.gather to preserve order for block in await response.json(): print(await self.add_block(Block.from_dict(block), syncing=True)) our_height = await self.height() async with session.get( 'http://{}/height'.format(node_url)) as response: height = int(await response.text()) self.peers.add(node_url) for node in await self.find_peers(): self.peers.add(node) peers = set(self.peers) for peer in peers: try: async with aiohttp.ClientSession() as session: await session.post('http://{}/peers'.format(node_url), data='{}:{}'.format( socket.gethostbyname( socket.getfqdn()), self.port)) except aiohttp.client_exceptions.ClientConnectorError: self.peers.remove(peer) def run(self, sync=None): """Spin up a blockchain and start the Sanic server.""" self.db = '{}{}'.format(self.port, self.db) if sync is not None: asyncio.get_event_loop().run_until_complete(self.sync(sync)) elif not os.path.exists(self.db): with open('./config/keys.yaml') as key_file: address = yaml.load(key_file.read())['address'] if not address: raise KeyError( "No address found in 'keys.yaml', use 'python3 run.py generate' to generate a pair." ) Blockchain.__init__(self, genesis_address=address, db=self.db) print('Started Blockchain and Mined Genesis Block.') else: Blockchain.__init__(self, db=self.db) print('Loaded Blockchain from Database.') loop = asyncio.get_event_loop() self.app.add_task(self.interface()) loop.create_task( self.app.create_server(host=socket.gethostbyname(socket.getfqdn()), port=self.port)) loop.run_forever()
chat_id = request.json['chatId'] link = request.json['link'] client = request.ctx.tg_client chat = await get_chat_by_something(chat_id, client) needs_delete = False is_deleted = False try: revoked_invite = await client( functions.messages.EditExportedChatInviteRequest( peer=chat, link=link, revoked=True, )) needs_delete = True except errors.rpcerrorlist.InviteHashExpiredError: needs_delete = False is_deleted = True if needs_delete: is_deleted = await client( functions.messages.DeleteExportedChatInviteRequest(peer=chat, link=link)) return json({"isDeleted": is_deleted}) app.add_task(connect_tg_client()) app.run(host='0.0.0.0', port=3000)
def create_app(game: Game, player_class, request_class, response_class, config) -> Sanic: app = Sanic() game.app = app connections = set() @app.websocket("/game") async def feed(request, ws): connections.add(ws) player = None try: while True: data = await ws.recv() if data is not None: request = request_class(data) if player: data = game.process_request(player, request) else: data, player = game.process_request(player, request) response = response_class(data) await ws.send(response()) finally: if player: game.remove_player(player) return json({"status": "end"}) @app.route("/login", methods=["POST"]) async def login(request): username = request.json.get("username") or request.form.get("username") server_address = config.SCHEMA + "://" + \ config.DOMAIN + ":" + str(config.PORT) + "/game" return json({ "token": jwt.encode({'username': username}, config.SECRET_KEY, algorithm='HS256'), "websocket": server_address }) @app.middleware('response') async def add_response_header(request, response): response.headers['Access-Control-Allow-Origin'] = "*" async def update(): while True: now = time.time() game.update() info = game.get_info() data = {"type": "update", "payload": info} response = response_class(data) data = response() for connection in connections: app.add_task(send_data(connection, data, connections)) print("update in %f" % (time.time() - now)) await asyncio.sleep(1 / config.SERVER_FPS) app.add_task(update()) return app
@app.post('/training') async def training(request: Request): votes = request.json['votes'] inserted_user = await request['accessor'].batch_register_users( {vote[0] for vote in votes}) inserted = await request['accessor'].insert_votes(votes) return json({'inserted_users': inserted_user, 'inserted_votes': inserted}) @app.get('/activation') async def activation(request: Request): ''' Returns the activation value for the given set of heuristics ''' heuristics = request.json['heuristics'] a = await algorithm_module.get_activation(heuristics, request['accessor'], app.predictor) return json({"activation": a, 'received_heuristics': heuristics}) if __name__ == '__main__': app.run_retrain = True if retrain_config['periodic']: print("Periodic training enabled") app.add_task(partial(periodic_retrain, retrain_config['periodic'])) app.run(host='0.0.0.0', port=8000)
"response": { "uid": original_request["request"]["uid"], "allowed": allowed, "status": error, } } ) logger.info(f"Response {response}") return response @app.post("/log-all") async def log_all(request: Request): original_request = request.json if getenv("DEBUG"): if original_request["request"]["namespace"] == "default": logger.info("{}".format(pp.pformat(original_request))) return json( {"response": {"uid": original_request["request"]["uid"], "allowed": True}} ) if __name__ == "__main__": app.add_task(gather_allowed_registries()) app.run( host="0.0.0.0", port=6543, debug=True, ssl={"cert": "/data/ssl/cert.pem", "key": "/data/ssl/key.pem"}, )
class ServerComponents(metaclass=SingletonMetaClass): def __init__(self): self.__app = Sanic(__name__) self.__app.config.KEEP_ALIVE = False # SSL 적용 여부에 따라 context 생성 여부를 결정한다. if conf.REST_SSL_TYPE is conf.SSLAuthType.none: self.__ssl_context = None elif conf.REST_SSL_TYPE == conf.SSLAuthType.server_only: self.__ssl_context = (conf.DEFAULT_SSL_CERT_PATH, conf.DEFAULT_SSL_KEY_PATH) elif conf.REST_SSL_TYPE == conf.SSLAuthType.mutual: self.__ssl_context = ssl.SSLContext(_ssl.PROTOCOL_SSLv23) self.__ssl_context.verify_mode = ssl.CERT_REQUIRED self.__ssl_context.check_hostname = False self.__ssl_context.load_verify_locations( cafile=conf.DEFAULT_SSL_TRUST_CERT_PATH) self.__ssl_context.load_cert_chain(conf.DEFAULT_SSL_CERT_PATH, conf.DEFAULT_SSL_KEY_PATH) else: utils.exit_and_msg( f"REST_SSL_TYPE must be one of [0,1,2]. But now conf.REST_SSL_TYPE is {conf.REST_SSL_TYPE}" ) @property def app(self): return self.__app @property def stub(self): return self.__stub_to_rs_service @property def ssl_context(self): return self.__ssl_context def set_stub_port(self, port): self.__stub_to_rs_service = StubManager( conf.IP_LOCAL + ':' + str(port), loopchain_pb2_grpc.RadioStationStub, ssl_auth_type=conf.GRPC_SSL_TYPE) def set_resource(self): self.__app.add_route(Peer.as_view(), '/api/v1/peer/<request_type:string>') self.__app.add_route(Configuration.as_view(), '/api/v1/conf') self.__app.add_route( Certificate.as_view(), '/api/v1/cert/<request_type:string>/<certificate_type:string>') def get_peer_list(self, channel): return self.__stub_to_rs_service.call( "GetPeerList", loopchain_pb2.CommonRequest(request="", group_id=conf.ALL_GROUP_ID, channel=channel)) def get_leader_peer(self, channel): return self.__stub_to_rs_service.call( "Request", loopchain_pb2.Message(code=message_code.Request.peer_get_leader, channel=channel)) def get_peer_status(self, peer_id, group_id, channel): return self.__stub_to_rs_service.call_in_times( "GetPeerStatus", loopchain_pb2.PeerID(peer_id=peer_id, group_id=group_id, channel=channel)) def get_peer_status_async(self, peer_id, group_id, channel) -> grpc.Future: return self.__stub_to_rs_service.call_async( "GetPeerStatus", loopchain_pb2.PeerID(peer_id=peer_id, group_id=group_id, channel=channel)) def get_configuration(self, conf_info): return self.__stub_to_rs_service.call( "Request", loopchain_pb2.Message( code=message_code.Request.rs_get_configuration, meta=conf_info)) def set_configuration(self, conf_info): return self.__stub_to_rs_service.call( "Request", loopchain_pb2.Message( code=message_code.Request.rs_set_configuration, meta=conf_info)) def response_simple_success(self): result = { 'response_code': message_code.Response.success, 'message': message_code.get_response_msg(message_code.Response.success) } return result def abort_if_url_doesnt_exist(self, request_type, type_list): result = {'response_code': message_code.Response.fail} if request_type not in type_list.values(): result['message'] = "The resource doesn't exist" return result def ready(self): async def ready_tasks(): from loopchain import loggers loggers.get_preset().update_logger() loggers.update_other_loggers() logging.debug('rest_server:initialize complete.') self.__app.add_task(ready_tasks()) def serve(self, api_port): self.ready() self.__app.run(host='0.0.0.0', port=api_port, debug=False, ssl=self.ssl_context)
# coding=utf-8 from functools import partial from sanic import Sanic from asyncpg import create_pool from apps import PagesHandler, PageHandler, IndexHandler, QueueHandler from crawler import crawler_executor from settings import settings app = Sanic() app.add_task(crawler_executor(app)) route = partial(app.route, methods=frozenset(['GET', 'POST', 'PATCH', 'DELETE'])) run = partial(app.run, host='0.0.0.0', port=8088) @app.listener('before_server_start') async def register_db(app, loop): app.pg_pool = await create_pool(**settings['database'], loop=loop, max_size=10) # check if tables exists async with app.pg_pool.acquire() as conn: res = await conn.fetchrow( "SELECT COUNT(*) as count FROM pg_catalog.pg_tables WHERE tablename LIKE 'estate%'" ) if res['count'] == 0: await conn.execute('CREATE TABLE estate_pages (' 'id bigserial, '