def create_app(env_name): """ param: env_name -> necessário para carregar nossa configuração no modo development ou production DOC API USING SWAGGER UI Create app """ # app initiliazation APP = Quart(__name__) APP = cors(APP, allow_origin="*") APP.config.from_object(app_config[env_name]) # initializing bcrypt and db bcrypt.init_app(APP) db.init_app(APP) ### swagger specific ### SWAGGER_URL = '/apidocs' API_URL = '/static/api/openapi.json' SWAGGERUI_BLUEPRINT = get_swaggerui_blueprint( SWAGGER_URL, API_URL, config={'app_name': "Auto Complete Service"}) APP.register_blueprint(SWAGGERUI_BLUEPRINT, url_prefix=SWAGGER_URL) ### end swagger specific ### APP.register_blueprint(user_blueprint, url_prefix='/api/users') APP.register_blueprint(entity_blueprint, url_prefix='/api/entities') APP.register_blueprint(collector_blueprint, url_prefix='/api/collectors') APP.register_blueprint(autocomplete_blueprint, url_prefix='/api/autocompleties') APP.register_blueprint(home_blueprint, url_prefix='') return APP
async def main() -> int: """ Start the backend Quart-based web server with CORS access control """ backend = Quart(__name__, static_folder='../../../frontend/static/', template_folder='../../../frontend/') # Apply CORS access control headers to all routes in the backend backend = cors(backend) # Create the monitor app monitor = Monitor() # Register endpoint modules backend.register_blueprint( monitor.nodes_controller, url_prefix='/api/v1/nodes') # Sink all undeclared routes so that vue can work with router properly @backend.route('/', defaults={'path': ''}) @backend.route('/<path:path>') async def catch_all(path: str) -> str: return await render_template('index.html') await backend.run_task(port=5000) return 0
def create_app(config) -> Pint: load_dotenv(verbose=True) app = Pint(__name__, title="BattleShip", base_model_schema=BASE_MODEL_SCHEMA) app = cors(app, allow_origin="*") app.json_encoder = Encoder app.config.from_envvar("CONFIG_FILE") app.games = dict() @app.before_serving async def init_orm(): await init() from battlefield.session.data.api.single import session app.register_blueprint(session) from battlefield.session.data.api.multi import sessions app.register_blueprint(sessions) from battlefield.game.data.websocket import game app.register_blueprint(game) @app.cli.command() def openapi(): print(json.dumps(app.__schema__, indent=4, sort_keys=False)) @app.after_serving async def close_orm(): await Tortoise.close_connections() return app
def __init__( self, wechaty: Wechaty, endpoint: EndPoint, scheduler_options: Optional[Union[AsyncIOScheduler, WechatySchedulerOptions]] = None): self._plugins: Dict[str, WechatyPlugin] = OrderedDict() self._wechaty: Wechaty = wechaty self._plugin_status: Dict[str, PluginStatus] = {} self.app: Quart = cors(Quart('Wechaty Server', static_folder=None)) self.endpoint: Tuple[str, int] = endpoint if scheduler_options is None: scheduler_options = WechatySchedulerOptions() if isinstance(scheduler_options, WechatySchedulerOptions): scheduler = AsyncIOScheduler() if isinstance(scheduler_options.job_store, str): scheduler_options.job_store = SQLAlchemyJobStore( scheduler_options.job_store) scheduler.add_jobstore(scheduler_options.job_store, scheduler_options.job_store_alias) self.scheduler: AsyncIOScheduler = scheduler
async def run(self): # Serve static files from ./static static_folder = os.path.join(os.path.dirname(__file__), 'static') app = QuartTrio(__name__, static_folder=static_folder) app.add_url_rule('/', 'static', app.send_static_file, defaults={'filename': 'index.html'}) app.add_url_rule('/<path:filename>', 'static', app.send_static_file) app.add_url_rule('/tasks.json', 'task_tree', self.dispatch_task_tree, ['GET']) app.add_url_rule('/task/<int:task_id>/stacktrace.json', 'task_stacktrace', self.dispatch_task_stacktrace, ['GET']) app.add_url_rule('/nursery/<int:nursery_id>/cancel', 'nursery_cancel', self.dispatch_nursery_cancel, ['GET']) app.add_url_rule('/stats.json', 'stats', self.dispatch_stats, ['GET']) config = HyperConfig() #config.access_log_format = '%(h)s %(r)s %(s)s %(b)s %(D)s' #config.access_logger = create_serving_logger() # type: ignore config.bind = [f'{self._host}:{self._port}'] config.error_logger = config.access_logger # type: ignore #trio.hazmat.add_instrument(self) await serve(cors(app), config)
def create_app(config_object="lnbits.settings") -> Quart: """Create application factory. :param config_object: The configuration object to use. """ app = Quart(__name__, static_folder="static") app.config.from_object(config_object) app.asgi_http_class = ASGIProxyFix cors(app) Compress(app) register_assets(app) register_blueprints(app) register_filters(app) register_commands(app) register_request_hooks(app) return app
def create_app(config_object="lnbits.settings") -> QuartTrio: """Create application factory. :param config_object: The configuration object to use. """ app = QuartTrio(__name__, static_folder="static") app.config.from_object(config_object) app.asgi_http_class = ASGIProxyFix cors(app) Compress(app) check_funding_source(app) register_assets(app) register_blueprints(app) register_filters(app) register_commands(app) register_async_tasks(app) register_exception_handlers(app) return app
def create_app(container): # create and configure the app app = Flask("server", container, __name__) app.config.from_mapping( JWT_SECRET_KEY="dev", JWT_BLACKLIST_ENABLED=True, JWT_BLACKLIST_TOKEN_CHECKS=["access", "refresh"], ) app = cors(app) app.config.from_mapping(app._crib_config) app.register_blueprint(properties.bp) app.register_blueprint(directions.bp) auth.init_app(app) return app
def make_app(): app = Pint(__name__, title="Rhasspy") app = cors(app) @app.route("/") class Root(Resource): async def get(self): """Hello World Route This docstring will show up as the description and short-description for the openapi docs for this route. """ return "hello" @app.websocket("/ws") async def ws(): while True: await websocket.send("hello")
def __init__(self, sys_descr: str, sess_descr: str) -> None: self._api_prefix = '/api/v1' self._sys_descr = sys_descr self._sess_descr = sess_descr self._app = quart.Quart(__name__, static_folder='../../../frontend/static/', template_folder='../../../frontend/') self._app = quart_cors.cors(self._app) self._session_timer_start = time.time() self._session_scheduler = sched.scheduler(time.time, time.sleep) self._event = ServerSentEvent() # Commented for now as the logs are showing doubled # coloredlogs.install(level='DEBUG', logger=logging.getLogger('quart.app')) # coloredlogs.install(level='DEBUG', logger=logging.getLogger('quart.serving')) self.load_mock_system_description() self.load_mock_session_description()
async def main(): cache = redis.Redis( host=config["REDIS_HOST"], port=int(config["REDIS_PORT"]), password=config["REDIS_PASSWORD"], db=0, ) bithumbService = BithumbService(cache=cache, config=config) app = Quart("pandasFlask") app = cors(app, allow_origin="*") @app.route("/") async def home(): return "✅ server is running" @app.route("/ticker") async def get_tickers(): # ✅ """코인 종류 제공""" return jsonify(bithumbService.get_tickers()) @app.route("/ohlcv/<string:ticker>") async def get_ohlcv(ticker): """특정 코인의 가격 데이터""" return bithumbService.get_ohlcv(ticker) @app.route("/get_current_price") async def get_current_price(): return bithumbService.get_current_price() @app.route("/get_S13_coins") async def get_S13_coins(): return bithumbService.get_S13_coins() @app.route("/get_technical_data/<string:ticker>") async def get_technical_data(ticker): return bithumbService.get_technical_data(ticker) coinUpdater = asyncio.create_task(bithumbService.subscribe_checker()) await app.run_task(debug=True, host=config["HOST"], port=int(config["PORT"]))
def create_app(config_file='settings.py'): app = Quart(__name__) app = cors(app, allow_origin="*") app.debug = True app.register_blueprint(blog) app.config.from_pyfile(config_file) app.jinja_env.filters['humanize'] = pretty_date @app.before_serving async def create_db_pool(): await database.connect() if database.is_connected: app.db = database app.logger.info("Connected!") else: app.logger.warn('Not connected to database.') @app.after_serving async def remove_db_pool(): await app.db.disconnect() return app
def create_app(config_class=Development) -> Pint: app = Pint(__name__) app.test_client() DbWrapper.set_url(config_class.SQLALCHEMY_DATABASE_URI) db = DbWrapper.create_instance() @app.before_request async def connect_db() -> None: # todo: replace Exception with AlreadyConnectedToDbException try: await db.connect() except Exception: pass @app.after_request async def disconnect_db(response) -> None: await db.disconnect() return response app.config.from_object(Development) register_blueprints(app) app = cors(app, allow_credentials=True) return app
import quart.flask_patch # noqa import os from celery import Celery from flask_limiter import Limiter from flask_limiter.util import get_remote_address from quart import Quart from quart_cors import cors app = cors(Quart(__name__)) app.config['PROCESSING_DIR'] = './beat-api-tmp' if not os.path.isdir(app.config['PROCESSING_DIR']): os.mkdir(app.config['PROCESSING_DIR']) app.config['RESULT_LIFETIME'] = 8 * 60 app.config['MAX_CONTENT_LENGTH'] = 8 * 1024 * 1024 app.config['CELERY_BROKER_URL'] = os.getenv('REDIS_URL', 'redis://localhost:6379/0') app.config['CELERY_RESULT_BACKEND'] = os.getenv('REDIS_URL', 'redis://localhost:6379/0') limiter = Limiter(app, key_func=get_remote_address, default_limits=['1 per minute']) celery = Celery(app.import_name, broker=app.config['CELERY_BROKER_URL'], backend=app.config['CELERY_RESULT_BACKEND']) from beatapi.v0 import api_v0
import os import git from quart import Blueprint, jsonify, request, abort from quart import current_app as app from quart_cors import cors api = Blueprint("api", __name__) api = cors(api) git_hash = os.getenv("GIT_REV") or git.Repo().head.object.hexsha class Item: @property def redis_key(self): return f"items:{self.uuid}" @staticmethod async def from_redis(redis, uuid): item = Item() item.uuid = uuid item.name = await redis.hget(item.redis_key, "name") item.guild = int(await redis.hget(item.redis_key, "guild") or "0") item.owner = int(await redis.hget(item.redis_key, "owner") or "0") item.desc = await redis.hget(item.redis_key, "desc") item.wearable = await redis.hget(item.redis_key, "wearable") or "0" return item
def create_app(): app = Quart(__name__) app = cors(app) app.spotify = None clients_init = init_clients() if not clients_init: print("Error initiating clients") @app.before_request def before_request(): if not app.spotify: try: client_ID = os.environ.get("SIX_DEGREES_CLIENT_ID") client_secret = os.environ.get("SIX_DEGREES_CLIENT_SECRET") except KeyError as e: print( "You must set the client ID and secret in SIX_DEGREES_CLIENT_ID and SIX_DEGREES_CLIENT_SECRET (environment variables)" ) return False app.spotify = spotify.Client(client_ID, client_secret) # route for getting path given artist IDs @app.route('/api/connect/<artist1_id>/<artist2_id>', methods=['GET']) async def find_connections(artist1_id, artist2_id): artist1: Artist = await app.spotify.get_artist(artist1_id) artist2: Artist = await app.spotify.get_artist(artist2_id) id_path, artists_searched = await bi_bfs(artist1, artist2) artist_dicts = [] for i in id_path: artist_dict = await get_artist_dict(i) artist_dicts.append(artist_dict) res = artist_dicts return Response(json.dumps(res), mimetype='text/json') # route for getting search results for web app @app.route('/api/search/<artist_name>', methods=['GET']) async def search_artists(artist_name): results = await app.spotify.search(artist_name, types=['artist'], limit="20") artists: List[Artist] = results['artists'] artist_dicts: List[Dict] = [] for a in artists: artist: Dict = generate_artist_dict(a) artist_dicts.append(artist) res = artist_dicts return Response(json.dumps(res), mimetype='text/json') # route for getting one artist (after path found) @app.route('/api/artist/<artist_id>', methods=['GET']) async def get_artist(artist_id): artist: Artist = await app.spotify.get_artist(artist_id) artist_dict: Dict = generate_artist_dict(artist) return Response(json.dumps(artist_dict), mimetype='text/json') @app.route('/api/stats', methods=['GET']) async def get_stats(): if not cache.redis_connected(): error_message = json.dumps( {"message": "Could not connect to Redis server"}) abort(500) stats = {} stats['top_artists'] = [] stats['top_connections'] = [] stats['nonexistent_connections'] = [] top_artist_ids = cache.get_top_artists() for i in top_artist_ids: res = await get_artist_dict(i) stats['top_artists'].append(res) top_connection_keys = cache.get_top_connections() for i in top_connection_keys: connection_dict = dict() connection_dict['url'] = i.replace(":", "/") connection_dict['artists'] = [] for j in i.split(":"): artist_dict = await get_artist_dict(j) connection_dict['artists'].append(artist_dict) stats['top_connections'].append(connection_dict) stats['mean_degrees'] = cache.get_average_degrees_of_separation() stats['connections_searched'] = cache.get_number_connections_searched() max_degrees_connection = cache.get_longest_path() # just return the artists that identify the connection artist_ids = [max_degrees_connection[0], max_degrees_connection[-1]] artist_dicts = [] for i in artist_ids: res = await get_artist_dict(i) artist_dicts.append(res) stats['max_degrees_path'] = { "artists": artist_dicts, "degrees": len(max_degrees_connection) - 1, "url": "/".join(artist_ids) } nonexistent_connection_keys = cache.get_nonexistent_connections() for i in nonexistent_connection_keys: connection_dict = dict() connection_dict['url'] = i.replace(":", "/") connection_dict['artists'] = [] for j in i.split(":"): artist_dict = await get_artist_dict(j) connection_dict['artists'].append(artist_dict) stats['nonexistent_connections'].append(connection_dict) return Response(json.dumps(stats), mimetype='text/json') async def get_artist(name: str) -> Artist: res = await app.spotify.search(name, types=['artist'], limit=1) # assume first result is desired result try: artist: Artist = await app.spotify.get_artist( str(res['artists'][0])) except IndexError as e: return False return artist async def get_related_artists(artist_id: ArtistID) -> List[ArtistID]: if not cache.redis_connected() or not cache.get_related_artists( artist_id): related = await app.spotify.http.artist_related_artists(artist_id) related_ids: List[ArtistID] = [a['id'] for a in related['artists']] cache.store_related_artists(artist_id, related_ids) return related_ids else: return cache.get_related_artists(artist_id) async def get_artist_dict(artist_id): artist: Artist = await app.spotify.get_artist(artist_id) return generate_artist_dict(artist) async def bi_bfs(artist1: Artist, artist2: Artist) -> Tuple[List[ArtistID], int]: cached_path = cache.get_path(artist1.id, artist2.id) if cached_path: # if cache.store_longest_path(artist1.id, artist2.id, cached_path): # print("New longest path") if not cache.cached_connection_stats(artist1.id, artist2.id, cached_path): print("Error storing cached connection stats") return cached_path, 0 print_progress = False parent1: Dict[ArtistID, ArtistID] = {} parent2: Dict[ArtistID, ArtistID] = {} found = False intersect: ArtistID = "" queue1: List[ArtistID] = [artist1.id] queue2: List[ArtistID] = [artist2.id] set1 = set() set1.add(artist1.id) set2 = set() set2.add(artist2.id) visited1: Set[ArtistID] = set() visited2: Set[ArtistID] = set() loop = asyncio.get_event_loop() # edge case where artist1/2 or is in queue of opposite side when intersection is found # so intersection should be ignored one_way_edge_case = False # settings for how often (BFS turns) to display count of artists searched status_counter = 0 status_interval = 50 while queue1 and queue2 and not found: # take turns from each side current_artist1_id: ArtistID = queue1.pop(0) set1.remove(current_artist1_id) if current_artist1_id == artist2.id or current_artist1_id in visited2: found = True intersect = current_artist1_id if artist1.id in queue2 or artist2.id in queue1: one_way_edge_case = True break if current_artist1_id not in visited1: promise = await loop.run_in_executor( None, lambda: get_related_artists(current_artist1_id)) related_artists_ids: List[ArtistID] = await promise for i in related_artists_ids: if i not in parent1: parent1[i] = current_artist1_id if i not in visited1 and i not in set1: queue1.append(i) set1.add(i) visited1.add(current_artist1_id) current_artist2_id: ArtistID = queue2.pop(0) set2.remove(current_artist2_id) if current_artist2_id == artist1.id or current_artist2_id in visited1: found = True intersect = current_artist2_id if artist1.id in queue2 or artist2.id in queue1: one_way_edge_case = True break if current_artist2_id not in visited2: promise = await loop.run_in_executor( None, lambda: get_related_artists(current_artist2_id)) related_artists_ids: List[ArtistID] = await promise for i in related_artists_ids: if i not in parent2: parent2[i] = current_artist2_id if i not in visited2 and i not in set2: queue2.append(i) set2.add(i) visited2.add(current_artist2_id) # print progress if print_progress: if status_counter == 0: all_artists = visited1.union(visited2) print("Artists searched: {}".format(len(all_artists) - 2)) status_counter = (status_counter + 1) % status_interval if found: all_artists = visited1.union(visited2) # print("Artists searched: {}".format(len(all_artists)-2)) path: List[ArtistID] = await trace_bi_path(artist1, artist2, parent1, parent2, intersect) if one_way_edge_case: path2: List[ArtistID] = await trace_path( artist1, artist2, parent1, parent2) if len(path2) < len(path): path = path2[:] # store stats # store length, and initialize count associated with this connection # update count of artists included in searches # if not cache.store_path(artist1.id, artist2.id, path): # print("Error storing path. May have already been stored") if not cache.new_connection_stats(artist1.id, artist2.id, path): print("Error updating new connection stats") return path, len(all_artists) else: return [], 0 return app
from quart import Quart, escape, request, jsonify from quart_cors import cors from daikin_aircon_pylib import daikin_aircon app = Quart(__name__) cors(app) AIRCON_ADDR = '10.33.1.244' def _get_aircon(): return daikin_aircon.Aircon(AIRCON_ADDR) @app.route('/api/') async def hello(): name = request.args.get("name", "World") return jsonify('Hello') @app.route('/api/power/start') async def power_start(): aircon = _get_aircon() aircon.set_power(True) return jsonify('OK') @app.route('/api/power/stop') async def power_stop(): aircon = _get_aircon() aircon.set_power(False)
try: import config except ModuleNotFoundError: import config_default as config from urllib.parse import urljoin from models import Request def make_url(url): return urljoin(config.NETEASE_BACKEND, url) app = Quart(__name__) app = cors(app, allow_origin=["*"]) logger = app.logger @app.before_serving async def init(): global pool, client client = aiohttp.ClientSession() print(f"Creating mysql engine...") pool = await aiomysql.create_pool(user=config.MYSQL_USER, password=config.MYSQL_PASSWORD, port=config.MYSQL_PORT, db=config.MYSQL_DATABASE, host=config.MYSQL_HOST, loop=asyncio.get_event_loop(),
from typing import Union import httpx from quart_cors import cors from quart import Quart, request, jsonify from privex.helpers import empty, retry_on_err from werkzeug.exceptions import BadRequest import logging from asyncio import sleep from balancer.core import MAX_BATCH, CHUNK_SIZE from balancer.node import find_endpoint, Endpoint log = logging.getLogger(__name__) flask = Quart(__name__) cors(flask, allow_origin="*") loop = asyncio.get_event_loop() MAX_RETRY = 10 RETRY_DELAY = 3 class EndpointException(BaseException): def __init__(self, message, endpoint: Endpoint = None): super().__init__(message) self.endpoint = endpoint async def extract_json(rq: request): try: data = await rq.get_json(force=True) return data
__all__ = ( "sayonika_instance", "limiter", "logger", "jwt_service", "db", "mailer", "loop", "redis", ) loop = asyncio.get_event_loop() logger = logging.getLogger("Sayonika") sayonika_instance = cors( Sayonika(), allow_origin=["https://sayonika.moe", "*"], # Remove this one when ready for prod ) jwt_service = JWT(SETTINGS) mailer = Mailer(SETTINGS) limiter = Limiter( key_func=get_ratelimit_key, default_limits=SETTINGS.get("RATELIMITS", "5 per 2 seconds;1000 per hour").split( ";" ), ) redis = InitLaterRedis( ConnectionsPool(SETTINGS["REDIS_URL"], minsize=5, maxsize=10, loop=loop) ) # Use env vars to update config sayonika_instance.config.update(SETTINGS)
from quart import Quart, make_response, render_template, send_from_directory, Blueprint, jsonify from quart_cors import cors from .. import factory from ..models.mongo import Video, Preset, Model encode = Blueprint('encode', __name__, url_prefix='/encode') cors(encode) @encode.route('/test', methods=['GET']) async def test(): x = Model() print(x.scheme) print(x.validate({})) x = Preset() print(x.scheme) print(x.validate({'title': 'mon titre'})) return jsonify({'response': 'okay', 'code': 200}) @encode.route('/populate', methods=['GET']) async def populate(): return jsonify({'response': 'populated', 'code': 200})
from simplekv.decorator import PrefixDecorator from simplekv.memory.redisstore import RedisStore from quart import Quart from quart_cors import cors from .config import config, load_config logging.basicConfig(level=logging.DEBUG) app = Quart(__name__) for key in config.keys(): app.config[key] = config[key] app = cors(app, allow_headers=['X-Requested-With'], allow_origin=app.config['ALLOW_ORIGIN']) load_config() if "pytest" in sys.modules: from mockredis import mock_strict_redis_client store = RedisStore(mock_strict_redis_client()) else: store = RedisStore(redis.StrictRedis(host=app.config['REDIS_HOST'])) prefixed_store = PrefixDecorator('sessions_', store) KVSessionExtension(prefixed_store, app) from .gateway import proxy from .auth import web
# Default Libraries import json import requests from time import sleep # PyPi packages from quart_cors import cors from quart import Quart, request, jsonify STEAM_API = 'https://store.steampowered.com/api/appdetails?appids=' TIME_INTERVAL = 0.67 app = Quart(__name__) app = cors(app, allow_origin='http://localhost:3000') neededFields = ['price_overview', 'is_free', 'package_groups'] def sanitizeResponse(appid: str, gameInfo: dict) -> dict: sanitizedResponse = {'appid': appid} if not gameInfo['success']: sanitizedResponse['success'] = False else: gameData = gameInfo['data'] for field in neededFields: if field in gameData: sanitizedResponse[field] = gameData[field]
import ssl from quart import make_response, make_push_promise, Quart, render_template, url_for, websocket from quart_cors import cors, websocket_cors app = Quart(__name__) app1 = Quart(__name__) app1 = cors(app1, allow_origin="*") @app1.route('/') async def index(): # result = await render_template('index.html') # response = await make_response(result) # print(dir(response)) await make_push_promise(url_for('static', filename='css/bootstrap.min.css')) await make_push_promise(url_for('static', filename='js/bootstrap.min.js')) await make_push_promise(url_for('static', filename='js/jquery.min.js')) return await render_template('index.html') @app1.websocket("/login") @websocket_cors(allow_origin="*") async def login(): while True: data = await websocket.receive() print("DATA FROM SOCKET : ", data) await websocket.send(data) @app1.websocket("/chat_message") @websocket_cors(allow_origin="*") async def chat_message(): while True:
SCRIPT_PATH, SENDER_ADDRESS, NETWORK_GRAPH, RECEIVER_1_ADDRESS, RECEIVER_5_ADDRESS, ) from deployment import get_receiver_addresses from network import NetworkTopology LOGGER_FORMAT = '%(asctime)s %(message)s' logging.basicConfig(format=LOGGER_FORMAT, datefmt='[%H:%M:%S]') log = logging.getLogger() log.setLevel(logging.INFO) app = Quart(__name__) app = cors(app) train_app = None network = NetworkTopology(NETWORK_GRAPH, SENDER_ADDRESS, get_receiver_addresses()) current_provider = None def on_new_bar_code(bar_code, file_path): factor = 4 bar_code = bar_code.resize( (int(bar_code.width * factor), int(bar_code.height * factor))) bar_code.save(str(file_path)) def barcode_factory(address, nonce):
import json import time import aiohttp import aioredis from quart import Quart, render_template, request, g from quart_cors import cors from docopt import docopt from wdreconcile.engine import ReconcileEngine from wdreconcile.suggest import SuggestEngine from wdreconcile.monitoring import Monitoring from config import * app = Quart(__name__, static_url_path='/static/', static_folder='static/') app = cors(app, allow_origin='*') @app.before_serving async def setup(): app.redis_client = await aioredis.create_redis_pool(redis_uri) app.http_connector = aiohttp.TCPConnector(limit_per_host=10) app.http_session_obj = aiohttp.ClientSession(connector=app.http_connector) app.http_session = await app.http_session_obj.__aenter__() @app.before_request async def request_context(): g.reconcile = ReconcileEngine(app.redis_client, app.http_session) g.suggest = SuggestEngine(app.redis_client, app.http_session) g.monitoring = Monitoring(app.redis_client) @app.after_serving
ALLOW_ORIGIN = ( [ f"http://{ ip }:{ PORT }" for ip in get_ips() ] + [ f"http://{ ip }:8011" for ip in get_ips() ] + [ f"http://0.0.0.0:8011", f"http://0.0.0.0:{ PORT }" ] ) ''' app = Quart('test_quart', static_url_path=f"/static", static_folder=f"{ ROOT_PATH }/static", template_folder=f"{ ROOT_PATH }/templates") app = cors( app, allow_origin=['*'], allow_methods=["GET", "POST"], #allow_credentials=True, ) PID = None SPID = None BPID = None @app.after_serving async def close_all_subprocess(): global PID global SPID global BPID api.pyll.pylive.kill(PID) api.pyll.pylive.kill(SPID)
"""Authentication module. """ import os import quart import quart_cors import quart_jwt_extended as jwt api_password = os.getenv("API_PASSWORD") if not api_password: raise RuntimeError("API_PASSWORD env not set") app = quart.Blueprint("auth", __name__) app = quart_cors.cors(app) @app.route("/login", methods=["POST"]) async def login(): """Gets a JWT token.""" if not quart.request.is_json: return {"error": "missing JSON in request"}, 400 request = await quart.request.json password = request.get("password", None) if not password: return {"error": "missing password parameter"}, 400 if password != api_password: return {"error": "bad password"}, 401
import logging import json from quart import Quart, render_template_string, request, jsonify from telethon import TelegramClient, utils from quart_cors import cors import psycopg2 import os import re import pymorphy2 quart_cfg = hypercorn.Config() port = int(os.environ.get("PORT", 17995)) quart_cfg.bind = ["0.0.0.0:" + str(port)] # Quart app app = Quart(__name__) app = cors(app, allow_origin="*") app.secret_key = 'CHANGE THIS TO SOMETHING SECRET' # db connection user = os.environ['POSTGRES_USER'] password = os.environ['POSTGRES_PASSWORD'] host = os.environ['POSTGRES_HOST'] db = os.environ['POSTGRES_DATABASE'] morph = pymorphy2.MorphAnalyzer() @app.route('/news', methods=['POST'], endpoint='news') async def news_route(): logging.info(request.is_json) content = await request.get_json() print(content)
PORT = 8012 ALLOW_ORIGIN = ([f"http://{ ip }:{ PORT }" for ip in get_ips()] + [f"http://{ ip }:8011" for ip in get_ips()] + [f"http://0.0.0.0:8011", f"http://0.0.0.0:{ PORT }"]) from quart import Quart, websocket, request, render_template, send_from_directory, send_file from quart_cors import cors from aiofile import AIOFile, LineReader app = Quart('test_quart') app = cors( app, #allow_origin=ALLOW_ORIGIN, #allow_credentials=True, allow_origin=['*'], allow_methods=["GET", "POST"], ) PID = None SPID = None BPID = None @app.after_serving async def close_all_subprocess(): global PID global SPID global BPID api.pyll.pylive.kill(PID)