示例#1
0
def run(port):
    """
    运行
    :param port: 端口
    :return:
    """
    config = get_config()
    log = create_logger(config)

    # rpc
    global rpc_chat
    rpc_chat = grpc.insecure_channel(config.RPC.CHATBOT)

    # create a Socket.IO server
    mgr = socketio.KombuManager(config.RABBITMQ)

    global sio
    sio = socketio.Server(async_mode='eventlet',
                          client_manager=mgr,
                          logger=log,
                          ping_timeout=300)

    sio.JWT_SECRET = config.JWT_SECRET

    # 添加处理
    from . import chatbot, notify

    app = socketio.Middleware(sio)

    eventlet.wsgi.server(eventlet.listen(('', port)), app, log=log)
示例#2
0
文件: utils.py 项目: talkenson/ForcAD
 def create(*, write_only: bool = False) -> socketio.KombuManager:
     broker_url = config.get_broker_url()
     return socketio.KombuManager(
         url=broker_url,
         write_only=write_only,
         channel='forcad-front',
     )
示例#3
0
def run_analysis(self, video_id, room_id):
    mgr = socketio.KombuManager(SOCKET_IO_BROKER, write_only=True)
    #
    # mgr.emit('results', data=json.dumps({"message": "Initialized YouTube retriever."}), room=room_id)
    # mgr.emit('results', data=json.dumps({"message": "Retrieved YouTube Comments."}), room=room_id)
    # mgr.emit('results', data=json.dumps({"message": "Running Analysis. This will take some time.."}), room=room_id)
    # mgr.emit('results', data=json.dumps({"message": "Analysis Complete"}), room=room_id)
    #
    # with open('./lib/data.json') as data_file:
    #     data_values = json.load(data_file)
    #
    # mgr.emit('results', data=json.dumps(data_values), room=room_id)

    try:
        youtube = GC.GetComments()

        print("Celery: Initialized YouTube retriever.")
        mgr.emit('results',
                 data=json.dumps({"message":
                                  "Initialized YouTube retriever."}),
                 room=room_id)

        results = youtube.comments_list(part='snippet, replies',
                                        videoId=video_id,
                                        maxResults=100,
                                        fields='items')

        print("Celery: Retrieved YouTube Comments.")

        mgr.emit('results',
                 data=json.dumps({"message": "Retrieve YouTube Comments."}),
                 room=room_id)
        mgr.emit('results',
                 data=json.dumps({
                     "message":
                     "Running Analysis. This will take some time.."
                 }),
                 room=room_id)

        data_values = AB.analyze(results)
        print("Celery: Analysis Complete")

        mgr.emit('results',
                 data=json.dumps({"message": "Analysis Complete"}),
                 room=room_id)
        mgr.emit('results', data=json.dumps(data_values), room=room_id)

    except Exception as e:
        print("Celery Error ocurred!", e)
        mgr.emit('results',
                 data=json.dumps(
                     {"error": "An error has ocurred. Restarting analysis."}),
                 room=room_id)
        self.retry()
示例#4
0
def get_wro_sio_manager():
    global _sio_wro_manager

    if _sio_wro_manager is None:
        broker_url = config.get_broker_url()
        _sio_wro_manager = socketio.KombuManager(
            url=broker_url,
            write_only=True,
            channel='forcad-front',
        )

    return _sio_wro_manager
示例#5
0
def sockjs_config(config, global_config):
    settings = config.registry.settings

    sockjs_options = {}
    if 'sockjs_options' in settings:
        sockjs_options = json.loads(settings['sockjs_options'])

    parser = ConfigParser({'here': global_config['here']})
    parser.read(global_config['__file__'])
    settings['server:main:worker_class'] = parser.get('server:main', 'worker_class')

    mgr = socketio.KombuManager(settings['sockjs_url'],
        connection_options=sockjs_options)
    sio = socketio.Server(client_manager=mgr, async_mode='gevent')
    config.registry.settings['sio'] = sio
示例#6
0
    def init_app(self, app, **kwargs):
        if not hasattr(app, 'extensions'):
            app.extensions = {}  # pragma: no cover
        app.extensions['socketio'] = self
        self.server_options = kwargs

        if 'client_manager' not in self.server_options:
            url = kwargs.pop('message_queue', None)
            if url:
                queue = socketio.KombuManager(url)
                self.server_options['client_manager'] = queue

        resource = kwargs.pop('resource', 'socket.io')
        if resource.startswith('/'):
            resource = resource[1:]
        self.server = socketio.Server(**self.server_options)
        for handler in self.handlers:
            self.server.on(handler[0], handler[1], namespace=handler[2])
        app.wsgi_app = _SocketIOMiddleware(self.server,
                                           app,
                                           socketio_path=resource)
示例#7
0
import socketio

import eventlet
eventlet.monkey_patch()

uri = 'amqp://*****:*****@localhost:5672/pythonsocket'
# uri = 'amqp://*****:*****@localhost:5672/'
internal_sio = socketio.KombuManager(uri, write_only=True)

channelName = "streamEvents"


# working because we use rabbitmq for inter thread messaging
def emit(data):
    global channelName
    print 'emitting to ' + channelName + ' '
    print data
    internal_sio.emit(channelName, data)
示例#8
0
def create_app(config, enable_config_file=False):
    """
    创建应用
    :param config: 配置信息对象
    :param enable_config_file: 是否允许运行环境中的配置文件覆盖已加载的配置信息
    :return: 应用
    """
    app = create_flask_app(config, enable_config_file)

    # 创建Snowflake ID worker
    from utils.snowflake.id_worker import IdWorker
    app.id_worker = IdWorker(app.config['DATACENTER_ID'],
                             app.config['WORKER_ID'],
                             app.config['SEQUENCE'])

    # 限流器
    from utils.limiter import limiter as lmt
    lmt.init_app(app)

    # 配置日志
    from utils.logging import create_logger
    create_logger(app)

    # 注册url转换器
    from utils.converters import register_converters
    register_converters(app)

    # redis
    # 暂时保留旧redis接口
    from utils.redis_client import create_redis_clients
    app.redis_cli = create_redis_clients(app)

    from redis.sentinel import Sentinel
    _sentinel = Sentinel(app.config['REDIS_SENTINELS'])
    app.redis_master = _sentinel.master_for(app.config['REDIS_SENTINEL_SERVICE_NAME'])
    app.redis_slave = _sentinel.slave_for(app.config['REDIS_SENTINEL_SERVICE_NAME'])

    from rediscluster import StrictRedisCluster
    app.redis_cluster = StrictRedisCluster(startup_nodes=app.config['REDIS_CLUSTER'])

    # rpc
    app.rpc_reco = grpc.insecure_channel(app.config['RPC'].RECOMMEND)

    # Elasticsearch
    app.es = Elasticsearch(
        app.config['ES'],
        # sniff before doing anything
        sniff_on_start=True,
        # refresh nodes after a node fails to respond
        sniff_on_connection_fail=True,
        # and also every 60 seconds
        sniffer_timeout=60
    )

    # socket.io
    app.sio = socketio.KombuManager(app.config['RABBITMQ'], write_only=True)

    # MySQL数据库连接初始化
    from models import db

    db.init_app(app)

    # 废弃 添加异常处理 对于flask-restful无效
    # from utils.error_handlers import handle_redis_error, handler_mysql_error
    # app.register_error_handler(RedisError, handle_redis_error)
    # app.register_error_handler(SQLAlchemyError, handler_mysql_error)

    # 添加请求钩子
    from utils.middlewares import jwt_authentication
    app.before_request(jwt_authentication)

    # 注册用户模块蓝图
    from .resources.user import user_bp
    app.register_blueprint(user_bp)

    # 注册新闻模块蓝图
    from .resources.news import news_bp
    app.register_blueprint(news_bp)

    # 注册通知模块
    from .resources.notice import notice_bp
    app.register_blueprint(notice_bp)

    # 搜索
    from .resources.search import search_bp
    app.register_blueprint(search_bp)

    return app
示例#9
0
import socketio

RABBITMQ = 'amqp://*****:*****@localhost:5672/toutiao'

# 创建一个从rabbitmq队列中取出需要推送消息的辅助工具
mgr = socketio.KombuManager(RABBITMQ)

# 像创建flask程序对象一样,创建socketio对象
sio = socketio.Server(async_mode='eventlet', client_manager=mgr)
app = socketio.Middleware(sio)
示例#10
0
def create_app(config, enable_config_file=False):
    """
    创建应用
    :param config: 配置信息对象
    :param enable_config_file: 是否允许运行环境中的配置文件覆盖已加载的配置信息
    :return: 应用
    """
    app = create_flask_app(config, enable_config_file)

    # 创建Snowflake ID worker
    from utils.snowflake.id_worker import IdWorker
    app.id_worker = IdWorker(app.config['DATACENTER_ID'],
                             app.config['WORKER_ID'], app.config['SEQUENCE'])

    # 限流器
    from utils.limiter import limiter as lmt
    lmt.init_app(app)

    # 配置日志
    from utils.logging import create_logger
    create_logger(app)

    # 注册url转换器
    from utils.converters import register_converters
    register_converters(app)

    from redis.sentinel import Sentinel
    _sentinel = Sentinel(app.config['REDIS_SENTINELS'])
    app.redis_master = _sentinel.master_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])
    app.redis_slave = _sentinel.slave_for(
        app.config['REDIS_SENTINEL_SERVICE_NAME'])

    from rediscluster import StrictRedisCluster
    app.redis_cluster = StrictRedisCluster(
        startup_nodes=app.config['REDIS_CLUSTER'])

    # rpc
    app.rpc_reco = grpc.insecure_channel(app.config['RPC'].RECOMMEND)

    # Elasticsearch
    app.es = Elasticsearch(
        app.config['ES'],
        # sniff before doing anything
        sniff_on_start=True,
        # refresh nodes after a node fails to respond
        sniff_on_connection_fail=True,
        # and also every 60 seconds
        sniffer_timeout=60)

    # socket.io
    app.sio_mgr = socketio.KombuManager(app.config['RABBITMQ'],
                                        write_only=True)

    # MySQL数据库连接初始化
    from models import db
    db.init_app(app)

    # APSchduler
    from .schedule import statistic
    executors = {'default': ThreadPoolExecutor(10)}
    app.scheduler = BackgroundScheduler(executors=executors)
    # 此处可以添加flask程序之外的定时任务
    # app.scheduler.add_job(statistic.fix_statistic, 'cron', hour=3) # 每天的凌晨三点
    app.scheduler.add_job(statistic.fix_statistic, 'date',
                          args=[app])  # 为了测试方便立即执行
    app.scheduler.start()

    # 添加请求钩子
    from utils.middlewares import jwt_authentication
    app.before_request(jwt_authentication)

    # 注册用户模块蓝图
    from .resources.user import user_bp
    app.register_blueprint(user_bp)

    # 注册新闻模块蓝图
    from .resources.news import news_bp
    app.register_blueprint(news_bp)

    # 注册通知模块
    from .resources.notice import notice_bp
    app.register_blueprint(notice_bp)

    # 搜索
    from .resources.search import search_bp
    app.register_blueprint(search_bp)

    return app
示例#11
0
# modify to whatever shows up in your
PORT_CONTROL = "/dev/cu.usbmodem1411"

REDIS_ADDR = 'redis://127.0.0.1:6379'
PORT_TIME_OUT = 0.05

# action key bindings
KEY_LEFT = 'a'
KEY_RIGHT = 'd'
KEY_QUIT = 'q'
KEY_ATTACK = 'j'
KEY_FINISH = 'f'
KEY_RESTART = 'r'

# server init
mgr = socketio.KombuManager(REDIS_ADDR)
sio = socketio.Server(async_mode='eventlet', client_manager=mgr)
app = socketio.Middleware(sio)


# Mock client for testing purposes
class Mock(object):
    def __init__(self):
        self.messages = ["p p1", "p p1", "p p2"]
        self.index = -1

    def readline(self):
        self.index = self.index + 1
        i = self.index % 3

        return self.messages[i]
示例#12
0
文件: main.py 项目: yuhao0925/toutiao
    定义事件触发和处理方法
3、app对象
4、端口监听对象
5、执行sio服务

"""
from werkzeug.wrappers import Request
import socketio
import jwt
import eventlet

eventlet.monkey_patch()
JWT_SECRET = 'TPmi4aLWRbyVq8zu9v82dWYW17/z+UvRnYTt4P6fAXA'

mq_uri = 'amqp://*****:*****@localhost:5672/toutiao'
manager = socketio.KombuManager(mq_uri)
"""
manager消息管理对象会从mq中阻塞的取消息,一旦取到消息就会按照消息内容和约定发送
视图函数中:current_app.sio_maneger.emit('following notify', data, room=target)
所以就不用专门的执行 sio.emit('following notify', data, room=target)
"""

# 创建sio对象           client_manager=manager 给客户分发消息的消息管理对象
sio = socketio.Server(async_mode='eventlet', client_manager=manager)
"""定义事件触发和处理方法"""


@sio.on('connect')
def connect(sid, environ):
    # environ是建立ws连接时的第四次握手时的请求信息,走的是http1.1协议
    # 就可以规定environ的请求头信息中包含jwtoken,可以从jwtoken中取出每一个用户user_id
示例#13
0
# Personal back end socket.io server
import socketio
import eventlet

eventlet.monkey_patch()

from flask import Flask, render_template

# run this to check rabbitmq status of nodes and stuff
#
# sudo rabbitmqctl cluster_status
# To run server brew services start rabbitmq or rabbitmq-server
urimgr = 'amqp://*****:*****@localhost:5672/pythonsocket'
# urimgr = 'amqp://*****:*****@localhost:5672/'

mgr = socketio.KombuManager(urimgr)

sio = socketio.Server(client_manager=mgr)

app = Flask(__name__)

channelName = "streamEvents"


@app.route('/')
def index():
    """Serve the client-side application."""
    # return render_template('index.html')
    return '<h1>Hello Python Server</h1'

示例#14
0
# set async_mode to 'threading', 'eventlet', 'gevent' or 'gevent_uwsgi' to
# force a mode else, the best mode is selected automatically from what's
# installed

import os
import socketio
from remotelogger.settings import BROKER_URL
from gevent import monkey
monkey.patch_all()

basedir = os.path.dirname(os.path.realpath(__file__))
sio = socketio.Server(client_manager=socketio.KombuManager(BROKER_URL),
                      logger=False)


@sio.on('my event', namespace='/test')
def test_message(sid, message):
    sio.emit('my response', {'data': message['data']},
             room=sid,
             namespace='/test')


@sio.on('my broadcast event', namespace='/test')
def test_broadcast_message(sid, message):
    sio.emit('my response', {'data': message['data']}, namespace='/test')


@sio.on('join', namespace='/test')
def join(sid, message):
    sio.enter_room(sid, message['room'], namespace='/test')
    sio.emit('my response', {'data': 'Entered room: ' + message['room']},
示例#15
0
def create_app(env_type, enable_config_file=False):
    """
    创建flask应用 并 初始化各组件

    :param env_type: 环境类型
    :param enable_config_file: 是否允许运行环境中的配置文件覆盖已加载的配置信息
    :return: flask应用
    """
    app = create_flask_app(env_type, enable_config_file)

    # 添加自定义正则转换器
    from utils.converters import register_converters
    register_converters(app)

    # 创建redis哨兵
    from redis.sentinel import Sentinel
    _sentinel = Sentinel(app.config['REDIS_SENTINELS'])
    # 获取redis主从连接对象
    app.redis_master = _sentinel.master_for(app.config['REDIS_SENTINEL_SERVICE_NAME'])
    app.redis_slave = _sentinel.slave_for(app.config['REDIS_SENTINEL_SERVICE_NAME'])

    # 创建redis集群
    from rediscluster import StrictRedisCluster
    app.redis_cluster = StrictRedisCluster(startup_nodes=app.config['REDIS_CLUSTER'])

    # 配置myql数据库
    from models import db
    db.init_app(app)

    # 配置日志
    from utils.logging import create_logger
    create_logger(app)

    # 限流器
    from utils.limiter import limiter as lmt
    lmt.init_app(app)

    # 创建Snowflake ID worker
    from utils.snowflake.id_worker import IdWorker
    app.id_worker = IdWorker(app.config['DATACENTER_ID'],
                             app.config['WORKER_ID'],
                             app.config['SEQUENCE'])

    # 创建执行器
    from apscheduler.executors.pool import ThreadPoolExecutor
    executor = ThreadPoolExecutor()
    # 创建定时任务调度器
    from apscheduler.schedulers.background import BackgroundScheduler
    app.scheduler = BackgroundScheduler(executors={'default': executor})
    from scheduler.cache_schedule import fix_statistic
    # 添加定时任务  每天3天同步数据
    # app.scheduler.add_job(fix_statistic, 'cron', hour=3)
    app.scheduler.add_job(fix_statistic, 'date', args=[app])
    # 启动调度器
    app.scheduler.start()
    
    # 建立grpc的连接
    app.channel = grpc.insecure_channel(app.config['RPC'].RECOMMEND)

    # 创建socketio的消息队列管理器(要求flask应用处于生产模式)  将消息保存到消息队列中
    import socketio
    app.siomgr = socketio.KombuManager(app.config['RABBIT_MQ'])

    # 创建es客户端
    from elasticsearch5 import Elasticsearch
    app.es = Elasticsearch(
        app.config['ES_HOST'],
        # 启动前嗅探es集群服务器
        sniff_on_start=True,
        # es集群服务器结点连接异常时是否刷新es节点信息
        sniff_on_connection_fail=True,
        # 每60秒刷新节点信息
        sniffer_timeout=60
    )

    # 添加请求钩子
    from utils.middlewares import jwt_authentication
    app.before_request(jwt_authentication)

    # 注册用户模块蓝图
    from .resources.user import user_bp
    app.register_blueprint(user_bp)

    # 注册新闻模块蓝图
    from .resources.news import news_bp
    app.register_blueprint(news_bp)

    # 注册搜索模块蓝图
    from .resources.search import search_bp
    app.register_blueprint(search_bp)

    return app
示例#16
0
import sentry_sdk
from sentry_sdk.integrations.flask import FlaskIntegration

sentry_sdk.init(
    dsn=os.environ.get('SENTRY_DSN'),
    integrations=[FlaskIntegration()]
)

app = Flask(__name__, static_url_path='', static_folder='public', template_folder='public')
app.config['STATIC_FOLDER'] = 'public'

on_heroku = os.environ.get('ON_HEROKU')

if on_heroku == "True":
    mgr = socketio.KombuManager(os.environ.get('REDISCLOUD_URL'))
else:
    mgr = socketio.KombuManager('redis://localhost:6379/12')

@app.after_request
def after_request(response):
    if app.debug:
        print("Debug mode.")
        response.headers.add('Access-Control-Allow-Origin', '*')
        response.headers.add('Access-Control-Allow-Headers', 'Content-Type,Authorization')
        response.headers.add('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE,OPTIONS')
        return response
    else:
        return response

示例#17
0
from celery import Celery
import socketio
import os
import random
import time

# Initialize Celery
config = {}
config['CELERY_BROKER_URL'] = 'amqp://*****:*****@localhost:5672/'
config['CELERY_RESULT_BACKEND'] = 'amqp://*****:*****@localhost:5672/'
celery = Celery('tasks', broker=config['CELERY_BROKER_URL'])
celery.conf.update(config)

# connect to the RabbitMQ queue through Kombu
rabbitMq = socketio.KombuManager('amqp://', write_only=True)
# now, it is ready to emit the message as
# rabbitMq.emit('title', {'data', 'data here'}, namespace='/test')


@celery.task
def long_task():
    """Background task that runs a long function with progress reports."""
    verb = ['Starting up', 'Booting', 'Repairing', 'Loading', 'Checking']
    adjective = ['master', 'radiant', 'silent', 'harmonic', 'fast']
    noun = ['solar array', 'particle reshaper', 'cosmic ray', 'orbiter', 'bit']
    message = ''
    total = random.randint(10, 50)
    for i in range(total):
        if not message or random.random() < 0.5:
            message = '{0} {1} {2}...'.format(random.choice(verb),
                                              random.choice(adjective),
from giessomat import Relais
from giessomat import Database

eventlet.monkey_patch()

relais_light = Relais.Relais(23)
relais_irrigation = Relais.Relais(24)

db = Database.Database('/home/pi/Giess-o-mat/giessomat_db.db')

path_json = '/home/pi/Giess-o-mat/giessomat/processes.json'
path_l298n = '/home/pi/Giess-o-mat/giessomat/L298n.py'

fans = Fans.Fans(path_l298n, path_json)

mgr = socketio.KombuManager('amqp://')
sio = socketio.Server(cors_allowed_origins=[
    'http://localhost:5672', 'http://192.168.0.134:8080',
    'http://192.168.0.235:8080', 'http://192.168.1.149:8080'
],
                      client_manager=mgr)

app = socketio.WSGIApp(sio)


@sio.event
def connect(sid, environ):
    print('Client connect', sid)


@sio.event
示例#19
0
import datetime
import os
import socketio

from urllib.parse import parse_qs
from django.core.wsgi import get_wsgi_application
from django.conf import settings
from rest_framework.authtoken.models import Token

from app.models import Device, Broadcast, Poll, Vote, Timer

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'broadcast.settings')

mgr = socketio.KombuManager(settings.CELERY_BROKER_URL)
sio = socketio.Server(logger=True,
                      cors_allowed_origins='*',
                      client_manager=mgr,
                      engineio_logger=True)
application = socketio.WSGIApp(sio, get_wsgi_application())

Device.objects.all().delete()


@sio.event
def connect(sid, environ):
    print(f'connected: {sid=}')
    query = parse_qs(environ['QUERY_STRING'])
    token = query.get('token')[0]
    if token == 'celery':
        print('CELERY CONNECTED')
        return
示例#20
0
# 在线聊天的服务器
# import socketio
#
# sio = socketio.Server(async_mode='eventlet')  # 指明在evenlet模式下
# app = socketio.Middleware(sio)

#  且可以实现消息推送的服务器
import socketio

mgr = socketio.KombuManager('amqp://*****:*****@localhost:5672/toutiao')
sio = socketio.Server(client_manager=mgr, async_mode='eventlet')
app = socketio.Middleware(sio)