import logging from time import time import socketio from web3 import Web3 import websockets from ..app import App from ..config import ALLOWED_ORIGIN_SUFFIXES, ED_CONTRACT_ADDR, HTTP_ORDERS_ENDPOINT_SECRET, STOPPED_TOKENS from ..src.erc20_token import ERC20Token from ..src.order_enums import OrderState from ..constants import ZERO_ADDR, ZERO_ADDR_BYTES, MAX_ORDERS_PER_USER from ..lib import rapidjson sio_logger = logging.getLogger('socketio.AsyncServer') sio_logger.setLevel(logging.DEBUG) sio = socketio.AsyncServer( logger=sio_logger, json=rapidjson, cors_allowed_origins='*') app = web.Application() routes = web.RouteTableDef() sio.attach(app) logger = logging.getLogger('websocket_server') logger.setLevel(logging.DEBUG) getcontext().prec = 10 from urllib.parse import urlparse def is_origin_allowed(origin): """ Returns True if the origin has hostname suffix in the allowed origins list.
import socketio import numpy as np def load_eigenvector(k,d): vec_path = "eigenvectors/eigen_k=" + str(k) + ",d=" + str(d) + ".npy" eigenvector_np = np.load(vec_path) eigenvector_str = "" for x in np.nditer(eigenvector_np): eigenvector_str += str(x) + " " # print() # print(eigenvector_str) return eigenvector_str # creates a new Async Socket IO Server sio = socketio.AsyncServer(cors_allowed_origins="*") # Creates a new Aiohttp Web Application app = web.Application() # Binds our Socket.IO server to our Web App # instance sio.attach(app) # we can define aiohttp endpoints just as we normally # would with no change async def index(request): with open('index.html') as f: return web.Response(text=f.read(), content_type='text/html') async def test(request): with open('test.js') as f: return web.Response(text=f.read(), content_type='text/js')
roomlist = ['room1', 'room2', 'room3', 'room4', 'room5'] available_rooms = queue.Queue() available_rooms.put('room2') available_rooms.put('room3') available_rooms.put('room4') available_rooms.put('room5') current_room = 'room1' clientlist = {} sid_room_map = {} for i in roomlist: clientlist[i] = list() start_match_map = {} sio = socketio.AsyncServer(cors_allowed_origins='*', ping_timeout=35) app = web.Application() sio.attach(app) @sio.event async def connect(sid, environ): global current_room global clientlist global available_rooms global sid_room_map if len(clientlist[current_room]) == 2: current_room = available_rooms.get()
import asyncio import concurrent.futures from random import random from hashlib import sha1 from fastapi import FastAPI from rich.console import Console from firebase_admin import db, credentials, initialize_app ping_freq, ping_wait = 25, 60 player_limit = 1 console = Console() app = FastAPI() sio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*", ping_interval=ping_freq, ping_timeout=ping_wait # async_handlers=True ) socket_app = socketio.ASGIApp(sio) app.mount("/", socket_app) # r = redis.StrictRedis(host="localhost", db=0, decode_responses=True) r = redis.from_url(os.environ.get("REDIS_URL")) room_id = sha1((str(random()) + str(random())).encode("utf8")).hexdigest() r.set("n_player", 0) r.set("room_id", room_id) executor = concurrent.futures.ThreadPoolExecutor(max_workers=20) cred = credentials.Certificate(
import ssl from io import BytesIO import xmlrpc.client import requests import json import sched, time from aiohttp import web import socketio from requests.auth import HTTPBasicAuth import datetime import urllib sio = socketio.AsyncServer(async_mode='aiohttp') app = web.Application() sio.attach(app) headers = { 'Content-type': 'application/json', } slot_id = '' current_Highest_bid = 0 current_second_bid = 0 newBids = {} bidCounter = 1 bid_id = '' c_id = '' _baseprice = 0 status = False finalreading = 0
from jwt.exceptions import InvalidTokenError from lib.config import which_shard, logger, is_prod, domain_name from lib.auth import JWT, gateway_authenticated as authenticated from lib.ipc.grpc_client import get_async_client from lib.ipc.async_rpc_client import shardRPC from lib.ipc.async_subscriber import Subscriber from lib.ipc.async_rpc_server import start_server from lib.status_codes import StatusCodes as s from lib.pool_types import PoolType from src.pools import GuildPool sio = socketio.AsyncServer( async_mode='aiohttp', cors_allowed_origins=[ f'https://{domain_name}', f'https://api.{domain_name}' ] if is_prod else '*') app = web.Application() sio.attach(app) loop = asyncio.get_event_loop() shard_client = shardRPC(loop) event_subscriber = Subscriber(loop) manager_client = get_async_client('manager:50051') auth_nonces = {} class Counter: def __init__(self):
import json import graphene import model as UserModel from mongoengine import connect from graphene_mongo import MongoengineObjectType from aiohttp import web import pymongo import bson.json_util as json_util from bson.objectid import ObjectId import aiohttp_cors import socketio from collections import OrderedDict sio = socketio.AsyncServer(async_mode='aiohttp', cors_allowed_origins='*') myclient = pymongo.MongoClient("mongodb://localhost:27017/") mydb = myclient["creaxt"] # Creates a new Aiohttp Web Application app = web.Application() # Binds our Socket.IO server to our Web App # instance sio.attach(app) async def mongo(request): class Query(graphene.ObjectType): allusers = graphene.JSONString() getuser = graphene.JSONString(usertype=graphene.String(), schoolId=graphene.String()) allactivity = graphene.JSONString() activity = graphene.JSONString(CID=graphene.String(),
import logging import socketio sio = socketio.AsyncServer( async_mode='asgi', cors_allowed_origins='*') # logger=False) @sio.event async def connect(sid, environ): print('connect', sid) @sio.event async def disconnect(sid): print('disconnect', sid)
import logging from time import time import socketio from web3 import Web3 import websockets from ..app import App from ..config import ALLOWED_ORIGIN_SUFFIXES, ED_CONTRACT_ADDR, STOPPED_TOKENS from ..src.erc20_token import ERC20Token from ..src.order_enums import OrderState from ..constants import ZERO_ADDR, ZERO_ADDR_BYTES, MAX_ORDERS_PER_USER from ..lib import rapidjson sio_logger = logging.getLogger('socketio.AsyncServer') sio_logger.setLevel(logging.DEBUG) sio = socketio.AsyncServer(logger=sio_logger, json=rapidjson) app = web.Application() routes = web.RouteTableDef() sio.attach(app) logger = logging.getLogger('websocket_server') logger.setLevel(logging.DEBUG) getcontext().prec = 10 from urllib.parse import urlparse def is_origin_allowed(origin): """ Returns True if the origin has hostname suffix in the allowed origins list.
else: warn(f'Attempted directory traversal with path {path}') raise NotFound("No route or file at this URL") async def index(request): return FileResponse((static_path / 'index.html').as_posix()) async def demo(request): return FileResponse((static_path / 'demo.html').as_posix()) sio = socketio.AsyncServer( async_mode='asgi', cors_allowed_origins="*", # TODO only CORS in dev ping_interval=PING_INTERVAL, ) @sio.event async def connect(sid, environ): debug(f'socket.io client connected with sid: {sid}') await sio.emit('indi_init', app.indi.to_jsonable(), room=sid) @sio.event async def disconnect(sid): debug(f'socket.io client disconnected with sid: {sid}')
from starlette.responses import HTMLResponse from uvicorn.middleware.proxy_headers import ProxyHeadersMiddleware from bitcoin.core.serialize import uint256_from_str, uint256_to_str, uint256_from_compact from shared.utils import bytes_to_int redis = Redis(settings.REDIS_HOST) class Broadcast(BaseModel): data : str app = FastAPI() app.add_middleware(CORSMiddleware, allow_origins=['*'], allow_methods=['GET', 'POST']) app.add_middleware(ProxyHeadersMiddleware) mgr = socketio.AsyncRedisManager('redis://%s' % settings.REDIS_HOST) sio = socketio.AsyncServer(async_mode='asgi', client_manager=mgr) app_sio = socketio.ASGIApp(sio, app) from shared.models import Address, Transaction, Block, Utxo, WalletGroup, WalletGroupAddress from shared.settings import POOLS from peewee import RawQuery, fn from datetime import datetime, timedelta @sio.on('subscribe') async def subscribe(sid, room): sio.enter_room(sid, room) def get_latest_block(): return Block.select().order_by(Block.height.desc()).limit(1)[0] def get_confirmations(height, block=None):
redis_queues_service = RedisMultipleQueuesPublisher( async_redis_queue, num_queues=settings.WORKERS) websocket_channels_service = WebsocketChannelsService( channels_repository=channels_repository, data_repository=world_repository, redis_queue=redis_queues_service) pubsub_manager = PubSubManager(async_redis_queue) events_subscriber_service = RedisPubSubEventsSubscriberService(pubsub_manager) events_publisher_service = RedisPubSubEventsPublisherService(pubsub_manager) mgr = socketio.AsyncRedisManager('redis://{}:{}/{}'.format( settings.REDIS_HOST, settings.REDIS_PORT, settings.REDIS_SIO_DB)) transport = SocketioTransportInterface( socketio.AsyncServer(client_manager=mgr)) pubsub_observer = PubSubObserver(world_repository) async_redis_queues = get_redis_factory(RedisType.QUEUES) queue = RedisQueueConsumer(async_redis_queues, 0) worker_queue_manager = WorkerQueueService(queue) cmds_observer = commands_observer_factory(transport) connections_manager = ConnectionsManager() connections_observer = ConnectionsObserver(transport, pubsub_observer, world_repository, events_subscriber_service, connections_manager, cmds_observer) singleton_actions_scheduler = SingletonActionsScheduler()
import socketio import uvicorn sio = socketio.AsyncServer(async_mode="asgi", cors_allowed_origins="*", logger=True, engineio_logger=True) app = socketio.ASGIApp( sio, static_files={ "/": "app.html", }, ) @sio.on("connect") async def connect(sid, environ): print(f"Client {sid} connected") @sio.on("disconnect") def disconnect(sid): print(f"Client {sid} disconnected") @sio.on("set_features") async def set_features(sid, features): await sio.emit("subscribe_features", {"features": features}) if __name__ == "__main__":
SSL_KEY_FILE = 'XXX.key' import socketio import asyncio import json import threading from six.moves import queue from google.cloud import speech_v1p1beta1 as speech from google.cloud.speech_v1p1beta1 import types from google.protobuf.json_format import MessageToDict ''' mgr = socketio.AsyncRedisManager('redis://') sio = socketio.AsyncServer(client_manager=mgr,cors_allowed_origins=[]) ''' sio = socketio.AsyncServer(async_mode='aiohttp', async_handlers=True) app = web.Application() sio.attach(app) ########################################################################################################### ## ## ## ## Basic Web Routes ## ## ## ########################################################################################################### @sio.on('debug')
print( f'Players: {len([e for e in self.room if isinstance(e, Player)])}') print(f'Bombs: {len([e for e in self.room if isinstance(e, Bomb)])}') print( f'Boomba: {len([e for e in self.room if isinstance(e, Explosion)])}' ) for i, p in enumerate( sorted([e for e in self.room if isinstance(e, Player)], key=lambda x: x.uuid)): print(f'Player {p.uuid}, x={p.x}, y={p.y}') cursesman_server = CursesmanServer() sids = [] sio = socketio.AsyncServer(async_mode='tornado') app = tornado.web.Application([ (r"/socket.io/", socketio.get_tornado_handler(sio)), ], ) async def broadcast_update(room_state): await sio.emit('room_server_refresh', pickle.dumps(room_state)) @sio.event def room_event(sid, data): # update internal room representation room = pickle.loads(data) print(room)
oldLen = 0 validFilter = ["http", "dns", "tls", "https", "tcp", "udp", "icmpv6", "icmpv4"] p_id = -1 filtres = [] filtered = [] packets = [] devices = [] p_current = 0 p_max = 150 sio = socketio.AsyncServer(async_mode='sanic', cors_allowed_origins=[ "127.0.0.1", "http://localhost:8000", "http://127.0.0.1:8000" ]) app = Sanic(name="reactCap", ) app.config['CORS_SUPPORTS_CREDENTIALS'] = True sio.attach(app) globall = False ip_regex = re.compile( '((^\s*((([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))\s*$)|(^\s*((([0-9A-Fa-f]{1,4}:){7}([0-9A-Fa-f]{1,4}|:))|(([0-9A-Fa-f]{1,4}:){6}(:[0-9A-Fa-f]{1,4}|((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){5}(((:[0-9A-Fa-f]{1,4}){1,2})|:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3})|:))|(([0-9A-Fa-f]{1,4}:){4}(((:[0-9A-Fa-f]{1,4}){1,3})|((:[0-9A-Fa-f]{1,4})?:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){3}(((:[0-9A-Fa-f]{1,4}){1,4})|((:[0-9A-Fa-f]{1,4}){0,2}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){2}(((:[0-9A-Fa-f]{1,4}){1,5})|((:[0-9A-Fa-f]{1,4}){0,3}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(([0-9A-Fa-f]{1,4}:){1}(((:[0-9A-Fa-f]{1,4}){1,6})|((:[0-9A-Fa-f]{1,4}){0,4}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:))|(:(((:[0-9A-Fa-f]{1,4}){1,7})|((:[0-9A-Fa-f]{1,4}){0,5}:((25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)(\.(25[0-5]|2[0-4]\d|1\d\d|[1-9]?\d)){3}))|:)))(%.+)?\s*$))' ) import os, sys base_dir = '.' if hasattr(sys, '_MEIPASS'): # or, untested: if getattr(sys, 'frozen', False): base_dir = os.path.join(sys._MEIPASS)
import json import sys import os import yaml from traceback import print_exc LOCATION = os.path.realpath( os.path.join(os.getcwd(), os.path.dirname(__file__))) IMMEDIATE = 'immediate_command_char' RECURRING = 'recurring_command_char' CALIBRATIONS_FILENAME = "calibrations.json" evolver_conf = {} serial_connection = None command_queue = [] sio = socketio.AsyncServer(async_handlers=True) class EvolverSerialError(Exception): pass @sio.on('connect', namespace='/dpu-evolver') async def on_connect(sid, environ): print('Connected dpu as server', flush=True) @sio.on('disconnect', namespace='/dpu-evolver') async def on_disconnect(sid): print('Disconnected dpu as Server', flush=True)
#!/usr/bin/env python # coding:utf-8 # Copyright (C) dirlt import logging import socketio from aiohttp import web channel = socketio.AsyncRedisManager('redis://*****:*****@%(lineno)d: %(msg)s' logging.basicConfig(level=logging.WARN, format=DEFAULT_LOGGING_FORMAT) async def handle_fanout(request): return web.Response(text='query = "{}"'.format(request.query_string)) namespace = '/'
return app async def setup_runner(app: web.Application, site: Type[web.BaseSite], **kwargs): runner = web.AppRunner(app) runners.append(runner) await runner.setup() s = site(runner, **kwargs) await s.start() # MAIN APP sio = socketio.AsyncServer( async_mode="aiohttp", engineio_logger=False, cors_allowed_origins=config.get("Webserver", "cors_allowed_origins", fallback=None), ) app = setup_app() aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader("templates")) basepath = os.environ.get("PA_BASEPATH", "/")[1:] socketio_path = basepath + ("/" if len(basepath) > 0 else "") + "socket.io" sio.attach(app, socketio_path=socketio_path) app["state"] = {} # API APP api_app = setup_app([auth.token_middleware])
from asyncio import Event from sanic import Sanic, request from sanic.response import json, text from sanic_cors import CORS import socketio from json import dumps from typing import Dict, List from uuid import uuid4 from game_logic import lobby, answer from functions import is_correct from models import Player, Session app = Sanic(__name__) CORS(app) socket = socketio.AsyncServer(async_mode='sanic', cors_allowed_origins="*") socket.attach(app) sessions: Dict[str, Session] = {} @app.route('/host', methods=["POST"]) def host(request) -> str: data = request.json if type(data) == dict: name = data["name"] id_ = data["id"] session = Session(id_, name) sessions[id_] = session asyncio.create_task(lobby(session, socket)) return json({"id": id_}) else:
def app_main(): logging.basicConfig(format='[{asctime}]{levelname}:{message}', datefmt='%Y-%m-%d %H:%M:%S', level=logging.DEBUG, style='{') base_path = os.getcwd() # Serve static content from /static app = web.Application() aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader(os.path.join(base_path, 'templates'))) app.router.add_static(prefix='/static', path=os.path.join(base_path, 'static')) routes = web.RouteTableDef() # Create SocketIO async server for controller sio = socketio.AsyncServer(async_mode='aiohttp') sio.attach(app) controller = Controller(sio.emit) controller.system_init() def render_template(template_name, request, **kwargs): return aiohttp_jinja2.render_template(template_name, request, context=kwargs) def redirect(route_name, request, **kwargs): raise web.HTTPFound(request.app.router[route_name].url_for().with_query(kwargs)) def process_list(lst): for it in lst: for k, v in it.items(): if isinstance(v, bytes): it[k] = v.decode() @routes.get('/') @aiohttp_jinja2.template('index.html') async def index(request): return @routes.get('/system-overview') @aiohttp_jinja2.template('system-overview.html') async def system_overview(request): metainfo = [] metainfo.append({"information":"System Prefix", "value": controller.system_prefix}) # metainfo.append({"information":"System Anchor", "value": controller.system_anchor}) metainfo.append({"information": "Available Devices", "value": str(len(controller.device_list.device))}) metainfo.append({"information": "Available Services", "value": str(len(controller.service_list.service))}) return {'metainfo': metainfo} ### bootstrapping @routes.get('/bootstrapping') @aiohttp_jinja2.template('bootstrapping.html') async def bootstrapping(request): secrets = list() for secret in controller.shared_secret_list.shared_secrets: secrets.append({'deviceIdentifier': str(bytes(secret.device_identifier).decode()), 'publicKey': str(bytes(secret.public_key).decode()), 'symmetricKey': str(bytes(secret.symmetric_key).decode())}) logging.info("bootstapping") logging.info(secrets) return {'existing_shared_secrets': secrets} ### room @routes.get('/room') async def room(request): room_list = [] return render_template("room.html", request, room_list= room_list) ### trigger bootstrapping process @routes.post('/exec/bootstrapping') async def bootstrap_device(request): ret = await controller.bootstrapping() logging.info("Bootstrap result:") logging.info(ret) return web.json_response(ret) ###add shared_secrets @routes.post('/add/shared_secrets') async def add_shared_secrets(request): data = await request.post() logging.info(data) up_img = data['file'].file decoded = decode(Image.open(up_img)) logging.info(decoded) shared_info = json.loads(decode(Image.open(up_img))[0].data) if not shared_info["device_identifier"] or not shared_info["public_key"] or not shared_info["symmetric_key"]: return web.json_response({"st_code": 500}) for secret in controller.shared_secret_list.shared_secrets: if bytes(secret.device_identifier).decode() == shared_info["public_key"]: return web.json_response({"st_code": 500}) new_shared_secret = SharedSecretsItem() new_shared_secret.device_identifier = shared_info["device_identifier"].encode() new_shared_secret.public_key = shared_info["public_key"].encode() new_shared_secret.symmetric_key = shared_info["symmetric_key"].encode() controller.shared_secret_list.shared_secrets.append(new_shared_secret) secrets = list() for secret in controller.shared_secret_list.shared_secrets: secrets.append({'deviceIdentifier': str(bytes(secret.device_identifier).decode()), 'publicKey': str(bytes(secret.public_key).decode()), 'symmetricKey': str(bytes(secret.symmetric_key).decode())}) res = dict() res['sharedsecrets'] = secrets res['st_code'] = 200 return web.json_response(res) ###delete shared_secrets @routes.post('/delete/shared_secrets') async def delete_shared_secrets(request): data = await request.json() controller.shared_secret_list.shared_secrets = [ss for ss in controller.shared_secret_list.shared_secrets if bytes(ss.public_key).decode() != data['publicKey']] return web.json_response({"st_code": 200}) ### device list @routes.get('/device-list') @aiohttp_jinja2.template('device-list.html') async def device_list(request): load =[] for device in controller.device_list.device: load.append({'deviceId': str(device.device_id), 'deviceInfo': str(device.device_info), 'deviceCertName': str(device.device_cert_name)}) if not load: device_list = [] else: device_list = load["device"] return {'device_list': device_list} @routes.post('/delete/device') async def remove_device(request): r_json = await request.json() device_cert_name = None # delete device information from level db try: count = 0 for ss in controller.device_list.device: if ss.device_id == r_json["device_id"]: device_id_name = ss.device_cert_name # Key name of the certificate del controller.device_list.device[count] count += 1 except: logging.error('Cannot find the deleting device in the leveldb') return web.json_response({"st_code": 500}) # delete device identity in pib try: controller.keychain.deleteIdentity(Name(device_id_name)) except: logging.error('Cannot find the pib-identity of the deleting device') return web.json_response({"st_code": 500}) # delete service information from leveldb # service Name: system_prefix/%01/<service-id>/ [Device Identifier] # Device Identifier should not start with '/' for service_name in list(controller.real_service_list.keys()): d_id = Name(service_name)[3:].__str__()[1:] #get rid of the beginning '/'; device id shall not start with '/' if d_id == r_json["device_id"]: del controller.real_service_list[service_name] return web.json_response({"st_code": 200}) ### service list @routes.get('/service-list') @aiohttp_jinja2.template('service-list.html') async def service_list(request): load = [] for service in controller.service_list.service: load.append({'serviceId': str(service.service_id), 'serviceName': str(service.service_name), 'expTime': str(service.exp_time)}) if not load: service_list = [] else: service_list = load["service"] for item in service_list: if 'expTime' in item: logging.info(item) item['expTime'] = time.ctime(int(item['expTime']) / 1000.0) # The following code is only for sample use return {'service_list': service_list} ### service invocation @routes.get('/invoke-service', name='invoke-service') @aiohttp_jinja2.template('invoke-service.html') async def invoke_service(request): return @routes.post('/exec/invoke-service') async def trigger_invocation(request): return redirect('invoke-service', request) ### access control @routes.get('/access-control', name='access-control') @aiohttp_jinja2.template('access-control.html') async def access_control(request): load = [] if not load: service_prefix_list = [] else: service_prefix_list = load["access"] # The following code is only for sample use return {'service_prefix_list': service_prefix_list} @routes.post('/exec/update-access-rights') async def update_access_rights(request): r_json = await request.json() print(r_json['prefix']) print(r_json['access_type']) return redirect('access-control', request) @routes.get('/ndn-ping') @aiohttp_jinja2.template('ndn-ping.html') async def ndn_ping(request): return @routes.post('/exec/ndn-ping') async def exec_ndn_ping(request): controller.decode_crypto_public_key(controller.get_crypto_public_key(controller.system_anchor)) r_json = await request.json() name = r_json['name'] can_be_prefix = r_json['can_be_prefix'] must_be_fresh = r_json['must_be_fresh'] signed_interest = r_json['signed_interest'] param = r_json['application_parameter'] try: interest_lifetime = float(r_json['interest_lifetime']) * 1000.0 except ValueError: interest_lifetime = 4000.0 interest = Interest(name) interest.canBePrefix = can_be_prefix interest.mustBeFresh = must_be_fresh interest.interestLifetimeMilliseconds = interest_lifetime if param != '': try: interest.applicationParameters = int(param).to_bytes(1, 'little') except ValueError: pass interest.appendParametersDigestToName() if signed_interest: data_parameter = Data(interest.name) controller.keychain.sign(data_parameter, controller.system_anchor.getName()) data_parameter_blob_bytes = data_parameter.wireEncode().toBytes() existing_parameter_bytes = interest.getApplicationParameters().toBytes() whole_parameter_bytes = existing_parameter_bytes + data_parameter_blob_bytes interest.setApplicationParameters(Blob(whole_parameter_bytes)) st_time = time.time() ret = await controller.express_interest(interest) ed_time = time.time() response_time = '{:.3f}s'.format(ed_time - st_time) print(response_time, ret) ret['response_time'] = response_time return web.json_response(ret) app.add_routes(routes) asyncio.ensure_future(controller.run()) try: web.run_app(app, port=6060) finally: controller.save_db()
from data import db from secrets import token_hex from hashlib import md5 from dataclasses import dataclass, field from datetime import datetime from typing import Any, ClassVar import aioredis import socketio import asyncio import bcrypt import json import time import random mgr = socketio.AsyncRedisManager('redis://') sio = socketio.AsyncServer(client_manager=mgr, async_mode='sanic', cors_allowed_origins="*") app = Sanic() sio.attach(app) penguins = {} igloos = {} @dataclass class penguin: sid: str room: int x: int y: int logged_in: bool
#!/usr/bin/env python import uvicorn import socketio sio = socketio.AsyncServer(async_mode='asgi') app = socketio.ASGIApp(sio, static_files={ '/': { 'content_type': 'text/html', 'filename': 'latency.html' }, '/static/style.css': { 'content_type': 'text/css', 'filename': 'static/style.css' }, }) @sio.on('ping_from_client') async def ping(sid): await sio.emit('pong_from_server', room=sid) if __name__ == '__main__': uvicorn.run(app, '127.0.0.1', 5000)
CommonRequestHandlerMixin) import socketio import logging __all__ = [ 'BaseSocketIOHandler', 'SocketIOHandler', 'socketio_server', 'socketio_client' ] logger = logging.getLogger('anthill.application') socketio_server = socketio.AsyncServer( client_manager=socketio.AsyncRedisManager('redis://', logger=logger), async_mode='tornado', engineio_logger=logger, logger=logger, ping_timeout=settings.WEBSOCKET_PING_TIMEOUT, ping_interval=settings.WEBSOCKET_PING_INTERVAL, max_http_buffer_size=settings.WEBSOCKET_MAX_MESSAGE_SIZE, cookie=settings.SESSION_COOKIE_NAME) socketio_client = socketio.AsyncClient(logger=logger, engineio_logger=logger, reconnection_delay=1, reconnection_delay_max=600) BaseSocketIOHandler = socketio.get_tornado_handler(socketio_server) class SocketIOHandler(TranslationHandlerMixin, LogExceptionHandlerMixin, SessionHandlerMixin, CommonRequestHandlerMixin, BaseSocketIOHandler): clients = None
import aiohttp_jinja2 import aiohttp_security import aiohttp_session import jinja2 import socketio from aiohttp import web from aiohttp_security import SessionIdentityPolicy from aiohttp_session.cookie_storage import EncryptedCookieStorage import auth from utils import FILE_DIR # SETUP SERVER sio = socketio.AsyncServer(async_mode="aiohttp", engineio_logger=False) app = web.Application() app["AuthzPolicy"] = auth.AuthPolicy() aiohttp_security.setup(app, SessionIdentityPolicy(), app["AuthzPolicy"]) aiohttp_session.setup(app, EncryptedCookieStorage(auth.get_secret_token())) aiohttp_jinja2.setup(app, loader=jinja2.FileSystemLoader("templates")) sio.attach(app) # SETUP PATHS os.chdir(FILE_DIR) # SETUP LOGGING logger = logging.getLogger("PlanarAllyServer") logger.setLevel(logging.INFO) file_handler = logging.FileHandler(str(FILE_DIR / "planarallyserver.log"))
def run_server(): with open('log.csv', 'w', newline='') as logfile: log_writer = csv.writer(logfile) sio = socketio.AsyncServer(logger=True) slaves = [] @sio.event async def connect(sid, environ): slaves.append(sid) if len(slaves) == 1: await sio.emit('orient', {'data': 'left'}, room=sid) print('Connected to left slave {}'.format(sid)) elif len(slaves) == 2: await sio.emit('orient', {'data': 'right'}, room=sid) await sio.emit('start') print('Connected to right slave {}'.format(sid)) else: print('Got {} slaves, dunno what to do with them, ' 'will only work on first two'.format(len(slaves))) return 200, 'OK' @sio.event async def disconnect(sid): # Do this if you want to break the left/right logic on disconnect # slaves.remove(sid) print('Client {} disconnected.'.format(sid)) @sio.on('left_slave_up') async def move_left_pad_up(sid): log_writer.writerow([str(datetime.now()), 'move_left_pad_up']) move_left_pad(-LEFT_PAD_MOVEMENT_SPEED) await sio.emit('move_left_pad', {'position': LEFT_PAD_POSITION}) return 200, "OK" @sio.on('left_slave_down') async def move_left_pad_down(sid): await sio.emit('move_left_pad_down') log_writer.writerow([str(datetime.now()), 'move_left_pad_down']) move_left_pad(LEFT_PAD_MOVEMENT_SPEED) await sio.emit('move_left_pad', {'position': LEFT_PAD_POSITION}) return 200, "OK" async def move_right_pad(): global RIGHT_PAD_POSITION, right_pad_moving_up # get next position for the right pad new_position = ( RIGHT_PAD_POSITION[0], RIGHT_PAD_POSITION[1] + (RIGHT_PAD_MOVEMENT_SPEED if right_pad_moving_up else (-RIGHT_PAD_MOVEMENT_SPEED))) if 0 < new_position[1] < WINDOW_DIMENSIONS[1] - PAD_SIZE[1]: RIGHT_PAD_POSITION = new_position else: right_pad_moving_up = not right_pad_moving_up await sio.emit('move_right_pad', {'position': RIGHT_PAD_POSITION}) @sio.on('tick') async def tick(sid): await move_right_pad() move_ball() await sio.emit('set_ball_position', {'position': BALL_POSITION}) await sio.emit('set_score', {'score': score}) @sio.on('right_slave_up') async def right_slave_up(sid): speed_ball_up() log_writer.writerow([str(datetime.now()), 'speed_ball_up']) return 200, "OK" @sio.on('right_slave_down') async def right_slave_down(sid): speed_ball_down() log_writer.writerow([str(datetime.now()), 'speed_ball_down']) return 200, "OK" app = web.Application() sio.attach(app) web.run_app(app, host='127.0.0.1', port='5005')
async def parse(self, stream): t = time.time() output = { "time": t } lines = stream.split("\n") for line in lines: key, val = line.split(":") output[key.strip()] = val.strip() return output def __await__(self): return self.read_stream().__await__() sio = socketio.AsyncServer(async_mode='sanic') # app = web.Application() # sio.attach(app) app = Sanic() sio.attach(app) fs = AFileStream("/proc/meminfo") # async def index(request): # with open('latency.html') as f: # return web.Response(text=f.read(), content_type='text/html') @app.route('/get/') def index(request): return sjson({"hello": "mama"}) @app.route('/')
from tornado.web import RequestHandler import aioredis import json from secrets import token_bytes, token_urlsafe, base64 import hmac from hashlib import blake2b from time import time import asyncio import socketio from socketio import AsyncNamespace from Other import BackendUtils sio = socketio.AsyncServer(async_mode="tornado") _Handler = socketio.get_tornado_handler(sio) config = json.load(open("config/web.json")) CORS_ORGINS = config["CORS_ORGINS"] SESSION_TIMEOUT = BackendUtils.SESSION_TIMEOUT async def redisSecConnection(): try: # This Redis Index will be only for storage of security related items, like keys for examples redisDB = await aioredis.create_redis_pool(("localhost", 6379), encoding="utf-8", db=10) return redisDB except OSError as ex: print("Failed to connect to Redis, aborting action!") raise ex
import json import socketio from random import * import math playerkey = "chujwie" mgkey = "lelz" mgsid = "-1" menacingState = 0 entitiesList = [] for x in range(16): entitiesList.append( ["0", "", "", "Jurgend", "White Room", "Jorguś", "[WR]"]) sio = socketio.AsyncServer() app = web.Application() #app = socketio.ASGIApp(sio, static_files=static_files) sio.attach(app) async def index(request): with open('canvastest.html') as f1: return web.Response(text=f1.read(), content_type='text/html') @sio.event async def connect(sid, environ): print("> Request From: ", sid) await sio.emit("syncronize", entitiesList, sid)
import socketio import uvicorn sio = socketio.AsyncServer(async_mode='asgi', cors_allowed_origins=[]) app = socketio.ASGIApp(sio, static_files={'/': 'index.html'}) @sio.on('connect') def connect(sid, environ): print('connect ', sid) @sio.on('msg') async def message(sid, data): print('message ', data) await sio.emit('msg', "ur gayyy") @sio.on('disconnect') def disconnect(sid): print('disconnect ', sid) if __name__ == '__main__': uvicorn.run(app, host="localhost", port=5000) # asgi_thread = Thread(target=run) # asgi_thread.daemon = True # asgi_thread.start()