async def test_render_in_quart_app(self): """ check render usage in Quart app """ app = Quart(__name__) app.registry = aioprometheus.Registry() app.events_counter = aioprometheus.Counter("events", "Number of events.") app.registry.register(app.events_counter) @app.route("/") async def index(): app.events_counter.inc({"path": "/"}) return "hello" @app.route("/metrics") async def handle_metrics(): content, http_headers = aioprometheus.render( app.registry, request.headers.getlist("accept") ) return content, http_headers # The test client also starts the web service test_client = app.test_client() # Access root to increment metric counter response = await test_client.get("/") self.assertEqual(response.status_code, 200) # Get default format response = await test_client.get("/metrics", headers={"accept": "*/*"}) self.assertEqual(response.status_code, 200) self.assertIn( aioprometheus.formats.TEXT_CONTENT_TYPE, response.headers.get("content-type"), ) # payload = await response.get_data() # Get text format response = await test_client.get("/metrics", headers={"accept": "text/plain;"}) self.assertEqual(response.status_code, 200) self.assertIn( aioprometheus.formats.TEXT_CONTENT_TYPE, response.headers.get("content-type"), ) # Get binary format response = await test_client.get( "/metrics", headers={"accept": aioprometheus.formats.BINARY_CONTENT_TYPE} ) self.assertEqual(response.status_code, 200) self.assertIn( aioprometheus.formats.BINARY_CONTENT_TYPE, response.headers.get("content-type"), )
def run(self): #ssl_context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH) #ssl_context.options |= ssl.OP_NO_TLSv1 | ssl.OP_NO_TLSv1_1 | ssl.OP_NO_COMPRESSION #ssl_context.set_ciphers('ECDHE+AESGCM') #ssl_context.load_cert_chain(, ) #ssl_context.set_alpn_protocols(['http/1.1', 'h2']) if (self['Key'] == 'data/key.pem') and (self['Cert'] == 'data/cert.pem'): if not os.path.exists(self['Key']) or not os.path.exists(self['Cert']) or self['RegenCert']: create_self_signed_cert() """ While we could use the standard decorators to register these routes, using add_url_rule() allows us to create diffrent endpoint names programmatically and pass the classes self object to the routes """ loop = asyncio.get_event_loop() http_blueprint = Blueprint(__name__, 'https') http_blueprint.before_request(self.check_if_naughty) #http_blueprint.after_request(self.make_normal) http_blueprint.add_url_rule('/<uuid:GUID>', 'key_exchange', self.key_exchange, methods=['POST']) http_blueprint.add_url_rule('/<uuid:GUID>', 'stage', self.stage, methods=['GET']) http_blueprint.add_url_rule('/<uuid:GUID>/jobs', 'jobs', self.jobs, methods=['GET']) http_blueprint.add_url_rule('/<uuid:GUID>/jobs/<job_id>', 'job_result', self.job_result, methods=['POST']) # Add a catch all route http_blueprint.add_url_rule('/', 'unknown_path', self.unknown_path, defaults={'path': ''}) http_blueprint.add_url_rule('/<path:path>', 'unknown_path', self.unknown_path, methods=['GET', 'POST']) self.app = Quart(__name__) logging.getLogger('quart.app').setLevel(logging.DEBUG if state.args['--debug'] else logging.ERROR) logging.getLogger('quart.serving').setLevel(logging.DEBUG if state.args['--debug'] else logging.ERROR) #serving_handler.setFormatter('%(h)s %(p)s - - %(t)s statusline: "%(r)s" statuscode: %(s)s responselen: %(b)s protocol: %(H)s') #logging.getLogger('quart.app').removeHandler(default_handler) self.app.register_blueprint(http_blueprint) self.app.run(host=self['BindIP'], port=self['Port'], debug=False, #ssl=ssl_context, certfile=self['Cert'], keyfile=self['Key'], use_reloader=False, #access_log_format=, loop=loop)
def run(self): """ While we could use the standard decorators to register these routes, using add_url_rule() allows us to create diffrent endpoint names programmatically and pass the classes self object to the routes """ loop = asyncio.get_event_loop() http_blueprint = Blueprint(__name__, 'http') http_blueprint.before_request(self.check_if_naughty) #http_blueprint.after_request(self.make_normal) http_blueprint.add_url_rule('/<uuid:GUID>', 'key_exchange', self.key_exchange, methods=['POST']) http_blueprint.add_url_rule('/<uuid:GUID>', 'stage', self.stage, methods=['GET']) http_blueprint.add_url_rule('/<uuid:GUID>/jobs', 'jobs', self.jobs, methods=['GET']) http_blueprint.add_url_rule('/<uuid:GUID>/jobs/<job_id>', 'job_result', self.job_result, methods=['POST']) # Add a catch all route http_blueprint.add_url_rule('/', 'unknown_path', self.unknown_path, defaults={'path': ''}) http_blueprint.add_url_rule('/<path:path>', 'unknown_path', self.unknown_path, methods=['GET', 'POST']) self.app = Quart(__name__) logging.getLogger('quart.app').setLevel(logging.DEBUG if state.args['--debug'] else logging.ERROR) logging.getLogger('quart.serving').setLevel(logging.DEBUG if state.args['--debug'] else logging.ERROR) #serving_handler.setFormatter('%(h)s %(p)s - - %(t)s statusline: "%(r)s" statuscode: %(s)s responselen: %(b)s protocol: %(H)s') #logging.getLogger('quart.app').removeHandler(default_handler) self.app.register_blueprint(http_blueprint) self.app.run(host=self['BindIP'], port=self['Port'], debug=False, use_reloader=False, #access_log_format=, loop=loop)
science, geolocate, stats) import config import logbook import sys from covid_hackgov_server.errors import CovidHackgovError import asyncpg from quart_cors import cors handler = logbook.StreamHandler(sys.stdout, level=logbook.INFO) handler.push_application() log = logbook.Logger("covid_hackgov_server.boot") logbook.compat.redirect_logging() app = Quart(__name__) app.config.from_object(f"config.{config.MODE}") app.debug = app.config.get("DEBUG", False) if app.debug: log.info("Running in debug mode") handler.level = logbook.DEBUG app.logger.level = logbook.DEBUG bps = { root: None, knowledge_base: None, auth: None, science: None, geolocate: None, stats: "stats"
""" Sometimes you want to expose Prometheus metrics from within an existing web service and don't want to start a separate Prometheus metrics server. This example uses the aioprometheus package to add Prometheus instrumentation to a Quart application. In this example a registry and a counter metric is instantiated. A '/metrics' route is added to the application and the render function from aioprometheus is called to format the metrics into the appropriate format. """ from aioprometheus import render, Counter, Registry from quart import Quart, request app = Quart(__name__) app.registry = Registry() app.events_counter = Counter("events", "Number of events.") app.registry.register(app.events_counter) @app.route("/") async def hello(): app.events_counter.inc({"path": "/"}) return "hello" @app.route("/metrics") async def handle_metrics(): content, http_headers = render(app.registry, request.headers.getlist("accept")) return content, http_headers
from datetime import timezone import bleach import markdown import quart import quart.flask_patch from quart import Quart import config import postgres from blueprints import user, login, post, api, board app = Quart(__name__) app.jinja_env.globals.update(md=lambda x: bleach.clean( markdown.markdown(x, extensions=config.MARKDOWN_EXTENSIONS), tags=config.ALLOWED_HTML, )) app.jinja_env.globals.update( nix_time=lambda dt: dt.replace(tzinfo=timezone.utc).timestamp()) app.jinja_env.trim_blocks = True app.jinja_env.lstrip_blocks = True app.secret_key = config.SECRET_KEY login.login_man.init_app(app) @app.route("/") async def front_page(): newest = await postgres.pool.fetch( "SELECT * FROM posts ORDER BY timestamp DESC LIMIT 25") return await quart.render_template("front_page.html", pageposts=newest)
from hypercorn.asyncio import serve from hypercorn.config import Config from quart import Quart from quart import request, Response from quart_doh.constants import DOH_JSON_CONTENT_TYPE from quart_doh.dns_resolver import DNSResolverClient from quart_doh.utils import ( configure_logger, create_http_wire_response, get_name_and_type_from_dns_question, create_http_json_response, ) resolver_dns = None app = Quart(__name__) @app.route("/dns-query", methods=["GET", "POST"]) async def route_dns_query() -> Response: logger = logging.getLogger("doh-server") accept_header = request.headers.get("Accept") message = await get_name_and_type_from_dns_question(request) if not message: return Response("", status=400) try: loop = asyncio.get_running_loop() query_response = None try: query_response = await loop.run_in_executor( None, functools.partial(resolver_dns.resolve, message))
from quart import Quart from app.config import * blog_app = Quart(__name__) blog_app.config.update({"SECRET_KEY": SECRET_KEY})
import datetime import quart.flask_patch # The flask_patch module allows the extensions # to find modules and objects in the flask namespace from quart import Quart import flask_admin as admin from flask_mongoengine import MongoEngine from flask_admin.form import rules from flask_admin.contrib.mongoengine import ModelView # create application app = Quart(__name__) # Create dummy secret key so we can use sessions app.config['SECRET_KEY'] = 'my secret key' app.config['MONGODB_SETTINGS'] = {'DB': 'testing'} # create models db = MongoEngine() db.init_app(app) # Define mongoengine documents class User(db.Document): name = db.StringField(max_length=40) tags = db.ListField(db.ReferenceField('Tag')) password = db.StringField(max_length=40) def __unicode__(self): return self.name
class APIRouter: def __init__(self, host: str, port: int): self._root = dirname(__file__) self._app = Quart(__name__) self._debug = False self._host = host self._port = port self._domain = f"http://{host}:{port}" self._agent = Agent() self._config = Config() self._proxy = RequestProxy() self._stats = Statistics() def set_domain(self, domain: str): """ 设置 API 返回的资源链接的域名, 域名含协议头不含端口号 如: http://www.foo.bar """ self._domain = f"{domain}:{self._port}" if domain else self._domain def run(self): """启动 API 解析服务""" def exception_handler(_loop, context): logger.debug(context) self._init_routers() # 为了解决事件循环内部出现的异常 loop = asyncio.new_event_loop() loop.set_exception_handler(exception_handler) asyncio.set_event_loop(loop) self._app.run(host=self._host, port=self._port, debug=False, use_reloader=False, loop=loop) def _init_routers(self): """创建路由接口""" @self._app.after_request async def apply_caching(resp: Response): """设置响应的全局 headers, 允许跨域""" resp.headers["Server"] = "Anime-API" resp.headers["Access-Control-Allow-Origin"] = "*" resp.headers["Access-Control-Allow-Headers"] = "*" return resp @self._app.route("/") async def index(): """API 主页显示帮助信息""" file = f"{self._root}/templates/interface.txt" with open(file, encoding="utf-8") as f: text = f.read() return Response(text, mimetype="text/plain") @self._app.route("/statistics") async def statistics(): """百度统计转发, 用户体验计划""" return await self._stats.transmit(request) @self._app.route("/statistics/<hm_js>") async def get_statistics_js(hm_js): return await self._stats.get_hm_js(request) # ======================== Anime Interface =============================== @self._app.route("/anime/bangumi/updates") async def get_bangumi_updates(): """获取番剧更新时间表""" bangumi_list = await self._agent.get_bangumi_updates() data = [] for bangumi in bangumi_list: one_day = { "date": bangumi.date, "day_of_week": bangumi.day_of_week, "is_today": bangumi.is_today, "updates": [] } for info in bangumi: one_day["updates"].append({ "title": info.title, "cover_url": f"{info.cover_url}", # 图片一律走代理, 防止浏览器跨域拦截 "update_time": info.update_time, "update_to": info.update_to }) data.append(one_day) return jsonify(data) @self._app.route("/anime/search/<path:keyword>") async def search_anime(keyword): """番剧搜索, 该方法回阻塞直到所有引擎数据返回""" result: List[AnimeMeta] = [] await self._agent.get_anime_metas( keyword.strip(), callback=lambda m: result.append(m)) ret = [] for meta in result: ret.append({ "title": meta.title, "cover_url": f"{meta.cover_url}", "category": meta.category, "description": meta.desc, "score": 80, # TODO: 番剧质量评分机制 "module": meta.module, "url": f"{self._domain}/anime/{meta.token}" }) return jsonify(ret) @self._app.websocket("/anime/search") async def ws_search_anime(): async def push(meta: AnimeMeta): await websocket.send_json({ "title": meta.title, "cover_url": f"{meta.cover_url}", "category": meta.category, "description": meta.desc, "score": 80, "engine": meta.module, "url": f"{self._domain}/anime/{meta.token}" }) # route path 不能有中文, 客户端 send 关键字 keyword = await websocket.receive() await self._agent.get_anime_metas(keyword.strip(), co_callback=push) @self._app.route("/anime/<token>") async def get_anime_detail(token): """返回番剧详情页面信息""" detail = await self._agent.get_anime_detail(token) if not detail: return Response("Parse detail failed", status=404) ret = { "title": detail.title, "cover_url": f"{detail.cover_url}", "description": detail.desc, "category": detail.category, "module": detail.module, "play_lists": [] } for idx, playlist in enumerate(detail): lst = { "name": playlist.name, "num": playlist.num, "video_list": [] } # 一个播放列表 for episode, video in enumerate(playlist): video_path = f"{token}/{idx}/{episode}" lst["video_list"].append({ "name": video.name, "info": f"{self._domain}/anime/{video_path}", "player": f"{self._domain}/anime/{video_path}/player", }) ret["play_lists"].append(lst) return jsonify(ret) @self._app.route("/anime/<token>/<playlist>/<episode>") async def parse_anime_info(token: str, playlist: str, episode: str): """获取视频信息""" url = await self._agent.get_anime_real_url(token, int(playlist), int(episode)) info = { "raw_url": f"{self._domain}/anime/{token}/{playlist}/{episode}/url", "proxy_url": f"{self._domain}/proxy/stream/{token}/{playlist}/{episode}", "format": url.format, "resolution": url.resolution, "size": url.size, "lifetime": url.left_lifetime } return jsonify(info) @self._app.route("/anime/<token>/<playlist>/<episode>/url") async def redirect_to_real_url(token: str, playlist: str, episode: str): """重定向到视频直链, 防止直链过期导致播放器无法播放""" url = await self._agent.get_anime_real_url(token, int(playlist), int(episode)) return redirect(url.real_url) @self._app.route("/anime/<token>/<playlist>/<episode>/player") async def player_without_proxy(token, playlist, episode): """视频直链播放测试""" url = f"{self._domain}/anime/{token}/{playlist}/{episode}" return await render_template("player.html", info_url=url) # ======================== Danmaku Interface =============================== @self._app.route("/danmaku/search/<path:keyword>") async def search_danmaku(keyword): """搜索番剧弹幕库""" result: List[DanmakuMeta] = [] await self._agent.get_danmaku_metas( keyword.strip(), callback=lambda m: result.append(m)) data = [] for meta in result: data.append({ "title": meta.title, "num": meta.num, "module": meta.module, "score": 80, # TODO: 弹幕质量评分机制 "url": f"{self._domain}/danmaku/{meta.token}" }) return jsonify(data) @self._app.websocket("/danmaku/search") async def ws_search_danmaku(): """搜索番剧弹幕库""" async def push(meta: DanmakuMeta): await websocket.send_json({ "title": meta.title, "num": meta.num, "module": meta.module, "score": 80, "url": f"{self._domain}/danmaku/{meta.token}" }) keyword = await websocket.receive() await self._agent.get_danmaku_metas(keyword.strip(), co_callback=push) @self._app.route("/danmaku/<token>") async def get_danmaku_detail(token): """获取番剧各集对应的弹幕库信息""" detail = await self._agent.get_danmaku_detail(token) if detail.is_empty(): return Response("Parse danmaku detail failed", status=404) data = [] for episode, danmaku in enumerate(detail): data.append({ "name": danmaku.name, "url": f"{self._domain}/danmaku/{token}/{episode}", # Dplayer 会自动添加 /v3/ "data": f"{self._domain}/danmaku/{token}/{episode}/v3/" # 调试用 }) return jsonify(data) @self._app.route("/danmaku/<token>/<episode>/v3/") async def get_danmaku_data(token, episode): """解析视频的弹幕库信息, 返回 DPlayer 支持的弹幕格式""" data = await self._agent.get_danmaku_data(token, int(episode)) ret = {"code": 0, "data": data.data, "num": data.num} return jsonify(ret) # ======================== IPTV Interface =============================== @self._app.route("/iptv/list") async def get_iptv_list(): """IPTV 直播源""" sources = self._agent.get_iptv_sources() data = [] for source in sources: data.append({"name": source.name, "url": source.url}) return jsonify(data) # ======================== Proxy Interface =============================== @self._app.route("/proxy/image/<path:raw_url>") async def image_proxy(raw_url): """对跨域图片进行代理访问, 返回数据""" return await self._proxy.make_response(raw_url) @self._app.route("/proxy/stream/<token>/<playlist>/<episode>") async def video_stream_proxy(token, playlist, episode): """代理访问普通的视频数据流""" range_field = request.headers.get("range") proxy = await self._agent.get_anime_proxy(token, int(playlist), int(episode)) if not proxy: return Response("stream proxy error", status=404) return await proxy.make_response(range_field) @self._app.route("/proxy/hls/<token>/<playlist>/<episode>") async def hls_stream_proxy(token, playlist, episode): """代理访问 HLS 视频数据流""" # TODO : implement hls stream proxy return Response("HLS stream proxy not supported yet", status=500) # ======================== System Interface =============================== @self._app.route("/system/logs") async def show_logs(): file = f"{self._root}/logs/api.log" with open(file, encoding="utf-8") as f: text = f.read() return Response(text, mimetype="text/plain") @self._app.route("/system/version") async def show_system_version(): return jsonify(self._config.get_version()) @self._app.route("/system/clear") async def clear_system_cache(): """清空 API 的临时缓存数据""" mem_free = self._agent.cache_clear() return jsonify({"clear": "success", "free": mem_free}) @self._app.route("/system/modules", methods=["GET", "POST", "OPTIONS"]) async def show_global_settings(): if request.method == "GET": return jsonify(self._config.get_modules_status()) if request.method == "POST": options = await request.json ret = {} for option in options: module = option.get("module") enable = option.get("enable") if not module: continue ok = self._agent.change_module_state(module, enable) ret[module] = "success" if ok else "failed" return jsonify(ret) if request.method == "OPTIONS": return Response("")
""" Test pylti/test_flask_app.py module """ from quart import Quart, session from aiolti.quart import lti as lti_quart from aiolti.quart import LTIRequestError from aiolti.common import LTI_SESSION_KEY from aiolti.tests.test_common import ExceptionHandler app = Quart(__name__) # pylint: disable=invalid-name app_exception = ExceptionHandler() # pylint: disable=invalid-name # Key for cookie-based sessions app.secret_key = b'_5#y2L"F4Q8z\n\xec]/' app.config[ "SESSION_COOKIE_DOMAIN"] = ".local" # This is critical for sessions to work with mocket !! @app.errorhandler(LTIRequestError) def lti_error(exc: LTIRequestError): """ Set exception to exception handler and returns error string. """ app_exception.set(exc.lti_exception) print(f"error: lti_exception = {exc.lti_exception}") return "error", 500 @app.route("/unknown_protection") @lti_quart(app=app, request='notreal')
from quart import Quart, websocket, request, session import requests import logging, json, os, telethon.sync, time, difflib, re, traceback, Levenshtein, asyncio logging.basicConfig(format='[%(levelname) 5s/%(asctime)s] %(name)s: %(message)s', level=logging.WARNING) from quart_cors import cors from configparser import ConfigParser app = Quart(__name__) # create an app instance app = cors(app, allow_origin="*") app.config['SESSION_TYPE'] = 'filesystem' app.secret_key = 'super secret key' class SMSInterface(): #Send to the bot's endpoint def __init__(self, url = 'http://52.136.249.181:4000/webhook'): # self.url=url+"/webhook" self.url=url self.conversationReset = True self.test_counter = 0 self.question_counter = 0 #Function to post the message def post(self, text): config_object = ConfigParser() config_object.read("config.ini") SMSinfo = config_object['SMSINFO'] _to = format(SMSinfo["code"])
request, render_template, redirect, url_for, Response, current_app, ) from flask_babel import Babel from . import colour from .prosodyclient import client from ._version import version, version_info app = Quart(__name__) app.config.setdefault("LANGUAGES", ["de", "en"]) app.config.from_envvar("SNIKKET_WEB_CONFIG") client.init_app(app) client.default_login_redirect = "login" babel = Babel(app) @babel.localeselector def selected_locale(): return request.accept_languages.best_match(current_app.config['LANGUAGES']) @app.route("/login", methods=["GET", "POST"])
"""main entry point for the app""" from gino.ext.quart import Gino from quart import Quart from quart_cors import cors app = Quart(__name__) # load config app.config.from_pyfile('settings.py') # set cors app = cors(app) # set the database handler db = Gino(app) # import and register blueprints from routes import simple_app # noqa, prevent circular imports app.register_blueprint(simple_app)
from dotenv import load_dotenv from quart import Quart from quart_cors import cors from src.compiler import compiler_bp load_dotenv(verbose=True) app = Quart(__name__) cors = cors(app) app.config["CORS_HEADERS"] = "Content-Type" app.register_blueprint(compiler_bp)
import time from uuid import uuid4 from quart import Quart, Response, request app = Quart(__name__) # first add ten more routes to load routing system # ------------------------------------------------ async def req_ok(*args, **kwargs): return Response('OK') for n in range(5): app.route(f"/route-{n}")(req_ok) app.route(f"/route-dyn-{n}/<part>")(req_ok) # then prepare endpoints for the benchmark # ---------------------------------------- @app.route('/html') async def hello(): """Return HTML content and a custom header.""" content = "<b>HTML OK</b>" headers = {'x-time': f"{time.time()}"} return Response(content, content_type="text/html", headers=headers) @app.route('/upload', methods=['POST']) async def upload():
from relay_sdk import Interface, WebhookServer from quart import Quart, request, jsonify, make_response import logging import json relay = Interface() app = Quart('ado-pullrequest-merged') logging.getLogger().setLevel(logging.INFO) @app.route('/', methods=['POST']) async def handler(): payload = await request.get_json() ado_event = payload.body.get('eventType') if ado_event is None: return {'message': 'Not a valid Azure Devops event'} if ado_event != "git.pullrequest.merged": return {'message': 'only git.pullrequest.merged events are supported by this trigger'} logging.info("Received event from azure devops: {}".format(ado_event)) logging.info("Received the following webhook payload: \n%s", json.dumps(payload, indent=4)) resource = payload['resource'] if resource['mergeStatus'] == "succeeded" and resource['status'] == "status": relay.events.emit({ 'url': resource['url'],
def create_app(): app = Quart(__name__) app.secret_key = '9a8sdyflkhjasdf' app.cutouts = None app.cutout_similarities = None app.cutout_similarities_jaccard = None @app.before_first_request async def create_db(): log.info('Initializing the postgres pool') app.pool = await asyncpg.create_pool(user='******', password='******', database='hubble', host='127.0.0.1', max_size=20) app.register_blueprint(hdltl_blueprint) app.register_blueprint(available_data_blueprint) app.register_blueprint(transfer_learning_blueprint) return app
async def test_not_found_error(app: Quart) -> None: test_client = app.test_client() response = await test_client.get('/not_found/') assert response.status_code == 404 assert b'Not Found' in (await response.get_data()) # type: ignore
import asyncio from pyppeteer import launch from quart import Quart from quart import request import time import threading import logging pyppeteer_level = logging.WARNING logging.getLogger('pyppeteer').setLevel(pyppeteer_level) logging.getLogger('websockets.protocol').setLevel(pyppeteer_level) app = Quart(__name__) class Room: @classmethod def __init__(self): pass ''' @classmethod async def __new__(cls): self = super().__new__(cls) self.page = await browser.newPage() self.result = 'raw' return self ''' @classmethod async def start(self):
from quart import Quart, render_template, request, g from quart_cors import cors from docopt import docopt from wdreconcile.engine import ReconcileEngine from wdreconcile.suggest import SuggestEngine from wdreconcile.monitoring import Monitoring from config import * try: from config import wikibase_name from config import wikibase_main_page except ImportError: wikibase_name = 'Wikidata' wikibase_main_page = 'https://www.wikidata.org/wiki/Wikidata:Main_Page' app = Quart(__name__, static_url_path='/static/', static_folder='static/') app = cors(app, allow_origin='*') @app.before_serving async def setup(): app.redis_client = aioredis.from_url(redis_uri, encoding='utf-8', decode_responses=True) app.http_connector = aiohttp.TCPConnector(limit_per_host=10) app.http_session_obj = aiohttp.ClientSession(connector=app.http_connector) app.http_session = await app.http_session_obj.__aenter__() @app.before_request async def request_context():
from relay_sdk import Interface, WebhookServer from quart import Quart, request, jsonify, make_response import logging import json relay = Interface() app = Quart('alertmanager-event') logging.getLogger().setLevel(logging.INFO) @app.route('/', methods=['POST']) async def handler(): logging.info("Received event from Alertmanager!") event_payload = await request.get_json() logging.info("Received the following webhook payload: \n%s", json.dumps(event_payload, indent=4)) if event_payload is None: return {'message': 'not a valid Alertmanager alert'}, 400, {} relay.events.emit({'event_payload': event_payload}) return {'message': 'success'}, 200, {} if __name__ == '__main__': WebhookServer(app).serve_forever()
class STListener(Listener): def __init__(self): super().__init__() self.name = 'http' self.author = '@byt3bl33d3r' self.description = 'HTTP listener' self.options = { # format: # value_name : {description, required, default_value} 'Name': { 'Description' : 'Name for the listener.', 'Required' : True, 'Value' : 'http' }, #'StageURL': { # 'Description' : 'URL for staging.', # 'Required' : True, # 'Value' : f"https://{get_ipaddress()}" #}, 'BindIP': { 'Description' : 'The IPv4/IPv6 address to bind to.', 'Required' : True, 'Value' : get_ipaddress() }, 'Port': { 'Description' : 'Port for the listener.', 'Required' : True, 'Value' : 80 }, 'CallBackURls': { 'Description' : 'Additional C2 Callback URLs (comma seperated)', 'Required' : False, 'Value' : '' }, 'Comms': { 'Description' : 'C2 Comms to use', 'Required' : True, 'Value' : 'http' } } def run(self): """ While we could use the standard decorators to register these routes, using add_url_rule() allows us to create diffrent endpoint names programmatically and pass the classes self object to the routes """ config = Config() config.accesslog = './data/logs/access.log' config.bind = f"{self['BindIP']}:{self['Port']}" config.insecure_bind = True config.include_server_header = False config.use_reloader = False config.debug = False http_blueprint = Blueprint(__name__, 'http') http_blueprint.before_request(self.check_if_naughty) #http_blueprint.after_request(self.make_normal) http_blueprint.add_url_rule('/<uuid:GUID>', 'key_exchange', self.key_exchange, methods=['POST']) http_blueprint.add_url_rule('/<uuid:GUID>', 'stage', self.stage, methods=['GET']) http_blueprint.add_url_rule('/<uuid:GUID>/jobs', 'jobs', self.jobs, methods=['GET']) http_blueprint.add_url_rule('/<uuid:GUID>/jobs/<job_id>', 'job_result', self.job_result, methods=['POST']) # Add a catch all route http_blueprint.add_url_rule('/', 'unknown_path', self.unknown_path, defaults={'path': ''}) http_blueprint.add_url_rule('/<path:path>', 'unknown_path', self.unknown_path, methods=['GET', 'POST']) #logging.getLogger('quart.app').setLevel(logging.DEBUG if state.args['--debug'] else logging.ERROR) #logging.getLogger('quart.serving').setLevel(logging.DEBUG if state.args['--debug'] else logging.ERROR) self.app = Quart(__name__) self.app.register_blueprint(http_blueprint) logging.debug(f"Started HTTP listener {self['BindIP']}:{self['Port']}") asyncio.run(serve(self.app, config)) async def check_if_naughty(self): try: headers = request.headers['User-Agent'].lower() if 'curl' in headers or 'httpie' in headers: return '', 404 except KeyError: pass async def make_normal(self, response): response.headers["server"] = "Apache/2.4.35" return response async def unknown_path(self, path): self.app.logger.error(f"{request.remote_addr} requested an unknown path: {path}") return '', 404 async def key_exchange(self, GUID): data = await request.data pub_key = self.dispatch_event(events.KEX, (GUID, request.remote_addr, data)) if pub_key: return Response(pub_key, content_type='application/octet-stream') return '', 400 async def stage(self, GUID): stage_file = self.dispatch_event(events.ENCRYPT_STAGE, (GUID, request.remote_addr, self["Comms"])) if stage_file: self.dispatch_event(events.SESSION_STAGED, f'Sending stage ({sys.getsizeof(stage_file)} bytes) -> {request.remote_addr} ...') return Response(stage_file, content_type='application/octet-stream') return '', 400 async def jobs(self, GUID): #self.app.logger.debug(f"Session {GUID} ({request.remote_addr}) checked in") job = self.dispatch_event(events.SESSION_CHECKIN, (GUID, request.remote_addr)) if job: return Response(job, content_type='application/octet-stream') #self.app.logger.debug(f"No jobs to give {GUID}") return '', 200 async def job_result(self, GUID, job_id): data = await request.data #self.app.logger.debug(f"Session {GUID} posted results of job {job_id}") self.dispatch_event(events.JOB_RESULT, (GUID, job_id, data)) return '', 200
import socket import string import urllib import zlib import aiofiles import websockets from quart import Quart, abort, jsonify, request import Items import MultiClient import MultiServer # from config import Config as c APP = Quart(__name__) MULTIWORLDS = {} @APP.route('/game', methods=['POST']) async def create_game(): global MULTIWORLDS data = await request.get_json() if not 'multidata_url' in data and not 'token' in data: abort(400, description=f'Missing multidata_url or token in data') port = int(data.get('port', random.randint(30000, 35000)))
def init_app(self, app: Quart) -> None: self.redis_queue_url = app.config.get('REDIS_URL', 'localhost') app.before_serving(self._before_serving) app.after_serving(self._after_serving)
# Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import asyncio import time from quart import Quart, request, jsonify from google.cloud import firestore from db_lease import helpers from db_lease.helpers import DB_SIZES, DB_TYPES, run_function_as_async from db_lease import db_clean app = Quart(__name__) db = firestore.Client() CLOUD_SQL = 1 CLOUD_SQL_READ_REPLICA = 2 CLOUD_SPANNER = 3 @run_function_as_async @firestore.transactional def lease(transaction, db_type, size, duration): """ Finds the resource with the earliest expiry, and returns it if available. Returns None if no available resource is found. """
__author__ = "tejpratap" import asyncio from quart import Quart import os from azure.servicebus import ServiceBusMessage from azure.servicebus.aio import ServiceBusClient import time from datetime import datetime import threading conn_string = os.environ.get("AZURE_SERVICE_BUS_CONNECTION_STRING") queue_name = os.environ["SERVICE_BUS_QUEUE_NAME"] app = Quart(__name__) class AzureServiceBusClient: def __init__(self): self.client = ServiceBusClient.from_connection_string(conn_string) thread = threading.Thread( target=self._callback, name="azure_service_bus_thread", ) thread.setDaemon(True) thread.start() def _callback(self): loop = asyncio.new_event_loop()
async def test_index(app: Quart) -> None: test_client = app.test_client() response = await test_client.get('/') assert response.status_code == 200 assert b'index' in (await response.get_data()) # type: ignore
def run_server(port, handler=None, asynchronous=True, ssl_creds=None): ensure_event_loop() app = Quart(__name__) app.config[ 'MAX_CONTENT_LENGTH'] = 256 * 1024 * 1024 # 256 MB request payload limit @app.route('/', methods=HTTP_METHODS, defaults={'path': ''}) @app.route('/<path:path>', methods=HTTP_METHODS) async def index(path=None): response = await make_response('{}') if handler: data = await request.get_data() try: result = await run_sync(handler, request, data) if isinstance(result, Exception): raise result except Exception as e: LOG.warning( 'Error in proxy handler for request %s %s: %s %s' % (request.method, request.url, e, traceback.format_exc())) response.status_code = 500 if isinstance(e, HTTPErrorResponse): response.status_code = e.code or response.status_code return response if result is not None: is_chunked = uses_chunked_encoding(result) result_content = result.content or '' response = await make_response(result_content) response.status_code = result.status_code if is_chunked: response.headers.pop('Content-Length', None) result.headers.pop('Server', None) result.headers.pop('Date', None) headers = { k: str(v).replace('\n', r'\n') for k, v in result.headers.items() } response.headers.update(headers) # set multi-value headers multi_value_headers = getattr(result, 'multi_value_headers', {}) for key, values in multi_value_headers.items(): for value in values: response.headers.add_header(key, value) # set default headers, if required if not is_chunked and request.method not in [ 'OPTIONS', 'HEAD' ]: response_data = await response.get_data() response.headers['Content-Length'] = str( len(response_data or '')) if 'Connection' not in response.headers: response.headers['Connection'] = 'close' return response def run_app_sync(*args, loop=None, shutdown_event=None): kwargs = {} config = Config() cert_file_name, key_file_name = ssl_creds or (None, None) if cert_file_name: kwargs['certfile'] = cert_file_name config.certfile = cert_file_name if key_file_name: kwargs['keyfile'] = key_file_name config.keyfile = key_file_name setup_quart_logging() config.bind = ['0.0.0.0:%s' % port] loop = loop or ensure_event_loop() run_kwargs = {} if shutdown_event: run_kwargs['shutdown_trigger'] = shutdown_event.wait try: try: return loop.run_until_complete(serve(app, config, **run_kwargs)) except Exception as e: LOG.info('Error running server event loop on port %s: %s %s' % (port, e, traceback.format_exc())) if 'SSL' in str(e): c_exists = os.path.exists(cert_file_name) k_exists = os.path.exists(key_file_name) c_size = len(load_file(cert_file_name)) if c_exists else 0 k_size = len(load_file(key_file_name)) if k_exists else 0 LOG.warning( 'Unable to create SSL context. Cert files exist: %s %s (%sB), %s %s (%sB)' % (cert_file_name, c_exists, c_size, key_file_name, k_exists, k_size)) raise finally: try: _cancel_all_tasks(loop) loop.run_until_complete(loop.shutdown_asyncgens()) finally: asyncio.set_event_loop(None) loop.close() class ProxyThread(FuncThread): def __init__(self): FuncThread.__init__(self, self.run_proxy, None) def run_proxy(self, *args): loop = ensure_event_loop() self.shutdown_event = asyncio.Event() run_app_sync(loop=loop, shutdown_event=self.shutdown_event) def stop(self, quiet=None): self.shutdown_event.set() def run_in_thread(): thread = ProxyThread() thread.start() TMP_THREADS.append(thread) return thread if asynchronous: return run_in_thread() return run_app_sync()
class STListener(Listener): def __init__(self): Listener.__init__(self) self.name = 'http' self.author = '@byt3bl33d3r' self.description = 'HTTP listener' self.options = { # format: # value_name : {description, required, default_value} 'Name': { 'Description' : 'Name for the listener.', 'Required' : True, 'Value' : 'http' }, #'StageURL': { # 'Description' : 'URL for staging.', # 'Required' : True, # 'Value' : f"https://{get_ipaddress()}" #}, 'BindIP': { 'Description' : 'The IPv4/IPv6 address to bind to.', 'Required' : True, 'Value' : get_ipaddress() }, 'Port': { 'Description' : 'Port for the listener.', 'Required' : True, 'Value' : 80 } } def run(self): """ While we could use the standard decorators to register these routes, using add_url_rule() allows us to create diffrent endpoint names programmatically and pass the classes self object to the routes """ loop = asyncio.get_event_loop() http_blueprint = Blueprint(__name__, 'http') http_blueprint.before_request(self.check_if_naughty) #http_blueprint.after_request(self.make_normal) http_blueprint.add_url_rule('/<uuid:GUID>', 'key_exchange', self.key_exchange, methods=['POST']) http_blueprint.add_url_rule('/<uuid:GUID>', 'stage', self.stage, methods=['GET']) http_blueprint.add_url_rule('/<uuid:GUID>/jobs', 'jobs', self.jobs, methods=['GET']) http_blueprint.add_url_rule('/<uuid:GUID>/jobs/<job_id>', 'job_result', self.job_result, methods=['POST']) # Add a catch all route http_blueprint.add_url_rule('/', 'unknown_path', self.unknown_path, defaults={'path': ''}) http_blueprint.add_url_rule('/<path:path>', 'unknown_path', self.unknown_path, methods=['GET', 'POST']) self.app = Quart(__name__) logging.getLogger('quart.app').setLevel(logging.DEBUG if state.args['--debug'] else logging.ERROR) logging.getLogger('quart.serving').setLevel(logging.DEBUG if state.args['--debug'] else logging.ERROR) #serving_handler.setFormatter('%(h)s %(p)s - - %(t)s statusline: "%(r)s" statuscode: %(s)s responselen: %(b)s protocol: %(H)s') #logging.getLogger('quart.app').removeHandler(default_handler) self.app.register_blueprint(http_blueprint) self.app.run(host=self['BindIP'], port=self['Port'], debug=False, use_reloader=False, #access_log_format=, loop=loop) async def check_if_naughty(self): try: headers = request.headers['User-Agent'].lower() if 'curl' in headers or 'httpie' in headers: return '', 404 except KeyError: pass async def make_normal(self, response): #response.headers["server"] = "Apache/2.4.35" return response async def unknown_path(self, path): self.app.logger.error(f"Unknown path: {path}") return '', 404 async def key_exchange(self, GUID): data = await request.data pub_key = self.dispatch_event(events.KEX, (GUID, request.remote_addr, data)) return Response(pub_key, content_type='application/xml') async def stage(self, GUID): stage_file = self.dispatch_event(events.ENCRYPT_STAGE, (GUID, request.remote_addr)) if stage_file: self.dispatch_event(events.SESSION_STAGED, f'Sending stage ({sys.getsizeof(stage_file)} bytes) -> {request.remote_addr} ...') return Response(stage_file, content_type='application/octet-stream') return '', 400 async def jobs(self, GUID): self.app.logger.debug(f"Session {GUID} ({request.remote_addr}) checked in") job = self.dispatch_event(events.SESSION_CHECKIN, (GUID, request.remote_addr)) if job: return Response(job, content_type='application/octet-stream') self.app.logger.debug(f"No jobs to give {GUID}") return '', 200 async def job_result(self, GUID, job_id): data = await request.data self.app.logger.debug(f"Session {GUID} posted results of job {job_id}") self.dispatch_event(events.JOB_RESULT, (GUID, job_id, data)) return '', 200
def __init__(self, *, data_path: str, scheduler: AsyncIOScheduler, quart_app: Quart, bot_api: Api, verinfo: str = None): # initialize config is_packaged = "_MEIPASS" in dir(sys) if is_packaged: basepath = os.path.dirname(sys.argv[0]) else: basepath = os.path.dirname(__file__) dirname = os.path.abspath(os.path.join(basepath, data_path)) if not os.path.exists(dirname): os.makedirs(dirname) config_f_path = os.path.join(dirname, "yobot_config.json") if is_packaged: default_config_f_path = os.path.join(sys._MEIPASS, "packedfiles", "default_config.json") else: default_config_f_path = os.path.join(os.path.dirname(__file__), "packedfiles", "default_config.json") with open(default_config_f_path, "r", encoding="utf-8") as config_file: self.glo_setting = json.load(config_file) if not os.path.exists(config_f_path): shutil.copyfile(default_config_f_path, config_f_path) print("设置已初始化,发送help获取帮助") boss_filepath = os.path.join(dirname, "boss3.json") if not os.path.exists(boss_filepath): if is_packaged: default_boss_filepath = os.path.join(sys._MEIPASS, "packedfiles", "default_boss.json") else: default_boss_filepath = os.path.join(os.path.dirname(__file__), "packedfiles", "default_boss.json") shutil.copyfile(default_boss_filepath, boss_filepath) pool_filepath = os.path.join(dirname, "pool3.json") if not os.path.exists(pool_filepath): if is_packaged: default_pool_filepath = os.path.join(sys._MEIPASS, "packedfiles", "default_pool.json") else: default_pool_filepath = os.path.join(os.path.dirname(__file__), "packedfiles", "default_pool.json") shutil.copyfile(default_pool_filepath, pool_filepath) with open(config_f_path, "r", encoding="utf-8-sig") as config_file: cfg = json.load(config_file) for k in self.glo_setting.keys(): if k in cfg: self.glo_setting[k] = cfg[k] if verinfo is None: verinfo = updater.get_version(self.Version, self.Version_id) print(verinfo['ver_name']) # initialize database ybdata.init(os.path.join(dirname, 'yobotdata.db')) # initialize web path if not self.glo_setting.get("public_address"): try: res = requests.get("http://api.ipify.org/") ipaddr = res.text except: with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s: s.connect(("8.8.8.8", 53)) ipaddr = s.getsockname()[0] self.glo_setting["public_address"] = "http://{}:{}/".format( ipaddr, self.glo_setting["port"], ) if not self.glo_setting["public_address"].endswith("/"): self.glo_setting["public_address"] += "/" if not self.glo_setting["public_basepath"].startswith("/"): self.glo_setting["public_basepath"] = "/" + \ self.glo_setting["public_basepath"] if not self.glo_setting["public_basepath"].endswith("/"): self.glo_setting["public_basepath"] += "/" # initialize update time if self.glo_setting["update-time"] == "random": self.glo_setting["update-time"] = "{:02d}:{:02d}".format( random.randint(2, 4), random.randint(0, 59)) # initialize client salt if self.glo_setting["client_salt"] is None: self.glo_setting["client_salt"] = web_util.rand_string(16) # save initialization with open(config_f_path, "w", encoding="utf-8") as config_file: json.dump(self.glo_setting, config_file, indent=4) # initialize utils templating.Ver = self.Version[2:-1] # generate random secret_key if (quart_app.secret_key is None): quart_app.secret_key = bytes( (random.randint(0, 255) for _ in range(16))) # add mimetype mimetypes.init() mimetypes.add_type('application/javascript', '.js') mimetypes.add_type('image/webp', '.webp') # add route for static files @quart_app.route(urljoin(self.glo_setting["public_basepath"], "assets/<path:filename>"), methods=["GET"]) async def yobot_static(filename): return await send_file( os.path.join(os.path.dirname(__file__), "public", "static", filename)) # add route for output files if not os.path.exists(os.path.join(dirname, "output")): os.mkdir(os.path.join(dirname, "output")) @quart_app.route(urljoin(self.glo_setting["public_basepath"], "output/<path:filename>"), methods=["GET"]) async def yobot_output(filename): return await send_file(os.path.join(dirname, "output", filename)) # openCC self.ccs2t = OpenCC(self.glo_setting.get("zht_out_style", "s2t")) self.cct2s = OpenCC("t2s") # filter self.black_list = set(self.glo_setting["black-list"]) self.black_list_group = set(self.glo_setting["black-list-group"]) self.white_list_group = set(self.glo_setting["white-list-group"]) # update runtime variables self.glo_setting.update({"dirname": dirname, "verinfo": verinfo}) kwargs = { "glo_setting": self.glo_setting, "bot_api": bot_api, "scheduler": scheduler, "app": quart_app, } # load plugins plug_all = [ updater.Updater(**kwargs), switcher.Switcher(**kwargs), yobot_msg.Message(**kwargs), gacha.Gacha(**kwargs), jjc_consult.Consult(**kwargs), boss_dmg.Boss_dmg(**kwargs), push_news.News(**kwargs), calender.Event(**kwargs), homepage.Index(**kwargs), marionette.Marionette(**kwargs), login.Login(**kwargs), settings.Setting(**kwargs), web_util.WebUtil(**kwargs), clan_battle.ClanBattle(**kwargs), ] self.plug_passive = [p for p in plug_all if p.Passive] self.plug_active = [p for p in plug_all if p.Active] for p in plug_all: if p.Request: p.register_routes(quart_app) # load new plugins self.plug_new = [ miner.Miner(**kwargs), group_leave.GroupLeave(**kwargs), custom.Custom(**kwargs), ]
from datetime import timedelta from scripts.theme import WebsiteTheme from discord.ext import tasks, commands from scripts.contents import verify_staff from scripts.caching import Cache as cache from views.site.routes import site, bot, main_bot from views.api.routes import api async def key_func(): ip = request.headers.get("X-Forwarded-For", request.remote_addr) return ip app = Quart(import_name=__name__, template_folder='website/templates', static_folder='website/static') app.secret_key = secrets.token_hex(16) app.config["DISCORD_CLIENT_ID"] = 667117267405766696 app.config["DISCORD_CLIENT_SECRET"] = config.SECRET app.config["DISCORD_REDIRECT_URI"] = "https://dredd-bot.xyz/callback" app.config["DISCORD_BOT_TOKEN"] = config.MAIN_TOKEN discord_session = DiscordOAuth2Session(app) rate_limiter = RateLimiter(app, key_function=key_func) app.register_blueprint(site) app.register_blueprint(api) AVATARS_FOLDER = os.path.join('/static/images') not_found_icon = os.path.join(AVATARS_FOLDER, 'not_found.png')
async def test_json(app: Quart) -> None: test_client = app.test_client() response = await test_client.post('/json/', json={'value': 'json'}) assert response.status_code == 200 assert b'json' in (await response.get_data()) # type: ignore
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see http://www.gnu.org/licenses """ import json import pprint import socket from quart import Quart, request, make_response, Response app = Quart("whoami") @app.route("/", methods=["GET", "POST", "PATCH", "PUT", "DELETE"]) async def index() -> Response: """ Receive details information about the HTTP request as plain text. """ # TODO: Add "wait" query arg handling hostname = f"Hostname: {socket.gethostname()}" net_interfaces = {i[4][0] for i in socket.getaddrinfo(socket.gethostname(), None)} ips = [f"IP: {ip}" for ip in net_interfaces] remote_address = f"RemoteAddr: {request.remote_addr}" full_path = request.full_path
async def test_generic_error(app: Quart) -> None: test_client = app.test_client() response = await test_client.get('/error/') assert response.status_code == 409 assert b'Something Unique' in (await response.get_data()) # type: ignore
#!/usr/bin/env python3 import random import os import asyncpg from quart import Quart, jsonify, make_response, request, render_template app = Quart(__name__) GET_WORLD = "select randomnumber from world where id = $1" UPDATE_WORLD = "update world set randomNumber = $2 where id = $1" @app.before_first_request async def connect_to_db(): app.db = await asyncpg.create_pool( user=os.getenv("PGUSER", "benchmarkdbuser"), password=os.getenv("PGPASS", "benchmarkdbpass"), database="hello_world", host="tfb-database", port=5432, ) @app.route("/json") def json(): return jsonify(message="Hello, World!") @app.route("/plaintext") async def plaintext():