示例#1
0
def tornado(port):
    from tornado.wsgi import WSGIContainer
    from tornado.httpserver import HTTPServer
    from tornado.ioloop import IOLoop

    app = create_app()
    wsgi_container = WSGIContainer(app)
    wsgi_container._log = lambda *args, **kwargs: None
    http_server = HTTPServer(wsgi_container)
    http_server.listen(port)
    IOLoop.instance().start()
示例#2
0
文件: app.py 项目: eea/ldaplog
def tornado(port):
    from tornado.web import Application, FallbackHandler
    from tornado.wsgi import WSGIContainer
    from tornado.httpserver import HTTPServer
    from tornado.ioloop import IOLoop

    app = flask.current_app
    wsgi_container = WSGIContainer(app)
    wsgi_container._log = lambda *args, **kwargs: None
    handlers = [('.*', FallbackHandler, {'fallback': wsgi_container})]
    tornado_app = Application(handlers, debug=app.debug)
    http_server = HTTPServer(tornado_app)
    http_server.listen(port)
    log.info("Hambar109 Tornado listening on port %r", port)
    IOLoop.instance().start()
示例#3
0
    def get(self, *args, **kwargs):
        # type: (*Any, **Any) -> None
        environ = WSGIContainer.environ(self.request)
        environ['PATH_INFO'] = urllib.parse.unquote(environ['PATH_INFO'])
        request = WSGIRequest(environ)
        request._tornado_handler = self

        set_script_prefix(get_script_name(environ))
        signals.request_started.send(sender=self.__class__)
        try:
            response = self.get_response(request)

            if not response:
                return
        finally:
            signals.request_finished.send(sender=self.__class__)

        self.set_status(response.status_code)
        for h in response.items():
            self.set_header(h[0], h[1])

        if not hasattr(self, "_new_cookies"):
            self._new_cookies = []  # type: List[http.cookie.SimpleCookie]
        self._new_cookies.append(response.cookies)

        self.write(response.content)
        self.finish()
示例#4
0
    def __init__(self, tornado_req, site_conf):
        super(TornadoRequest, self).__init__(
                WSGIContainer.environ(tornado_req)
                )
        self._native_request = tornado_req

        self.site = site_conf
示例#5
0
    def get(self):
        from tornado.wsgi import WSGIContainer
        from django.core.handlers.wsgi import WSGIRequest, get_script_name
        import urllib

        environ  = WSGIContainer.environ(self.request)
        environ['PATH_INFO'] = urllib.unquote(environ['PATH_INFO'])
        request  = WSGIRequest(environ)
        request._tornado_handler     = self

        set_script_prefix(get_script_name(environ))
        signals.request_started.send(sender=self.__class__)
        try:
            response = self.get_response(request)

            if not response:
                return
        finally:
            signals.request_finished.send(sender=self.__class__)

        self.set_status(response.status_code)
        for h in response.items():
            self.set_header(h[0], h[1])

        if not hasattr(self, "_new_cookies"):
            self._new_cookies = []
        self._new_cookies.append(response.cookies)

        self.write(response.content)
        self.finish()
示例#6
0
    def get_django_request(self):
        request = \
            WSGIRequest(WSGIContainer.environ(self.request))
        request.session = self.get_django_session()

        if self.current_user:
            request.user = self.current_user
        else:
            request.user = auth.models.AnonymousUser()
        return request
示例#7
0
    def __call__(self, request):
        data = {}
        response = []

        def start_response(status, response_headers, exc_info=None):
            data["status"] = status
            data["headers"] = response_headers
            return response.append

        app_response = self.wsgi_app(
            WSGIContainer.environ(request), start_response)
        try:
            response.extend(app_response)
            body = b"".join(response)
        finally:
            if hasattr(app_response, "close"):
                app_response.close()
        if not data:
            raise Exception("WSGI app did not call start_response")

        status_code, reason = data["status"].split(' ', 1)
        status_code = int(status_code)
        headers = data["headers"]
        header_set = set(k.lower() for (k, v) in headers)
        body = escape.utf8(body)

        if HEAD_END in body:
            body = body.replace(HEAD_END, self.script + HEAD_END)

        if status_code != 304:
            if "content-type" not in header_set:
                headers.append((
                    "Content-Type",
                    "application/octet-stream; charset=UTF-8"
                ))
            if "content-length" not in header_set:
                headers.append(("Content-Length", str(len(body))))

        if "server" not in header_set:
            headers.append(("Server", "LiveServer"))

        start_line = httputil.ResponseStartLine(
            "HTTP/1.1", status_code, reason
        )
        header_obj = httputil.HTTPHeaders()
        for key, value in headers:
            if key.lower() == 'content-length':
                value = str(len(body))
            header_obj.add(key, value)
        request.connection.write_headers(start_line, header_obj, chunk=body)
        request.connection.finish()
        self._log(status_code, request)
    def get(self):
        from tornado.wsgi import HTTPRequest, WSGIContainer
        from django.core.handlers.wsgi import WSGIRequest, STATUS_CODE_TEXT
        import urllib

        environ = WSGIContainer.environ(self.request)
        environ["PATH_INFO"] = urllib.unquote(environ["PATH_INFO"])
        request = WSGIRequest(environ)

        request._tornado_handler = self

        set_script_prefix(base.get_script_name(environ))
        signals.request_started.send(sender=self.__class__)
        try:
            response = self.get_response(request)

            if not response:
                return

            # Apply response middleware
            for middleware_method in self._response_middleware:
                response = middleware_method(request, response)
            response = self.apply_response_fixes(request, response)
        finally:
            signals.request_finished.send(sender=self.__class__)

        try:
            status_text = STATUS_CODE_TEXT[response.status_code]
        except KeyError:
            status_text = "UNKNOWN STATUS CODE"
        status = "%s %s" % (response.status_code, status_text)

        self.set_status(response.status_code)
        for h in response.items():
            self.set_header(h[0], h[1])

        """
        if not hasattr(self, "_new_cookies"):
            self._new_cookies = []
        self._new_cookies.append(response.cookies)
        """
        # Tornado 2.3 has changed the _new_cookies methods. Its not an array.
        # revert back to old method
        for c in response.cookies.values():
            self.set_header("Set-Cookie", str(c.output(header="")))

        self.write(response.content)
        self.finish()
示例#9
0
    def __call__(self, request):
        data = {}
        response = []

        def start_response(status, response_headers, exc_info=None):
            data["status"] = status
            data["headers"] = response_headers
            return response.append
        app_response = self.wsgi_application(
            WSGIContainer.environ(request), start_response)
        try:
            response.extend(app_response)
            body = b"".join(response)
        finally:
            if hasattr(app_response, "close"):
                app_response.close()
        if not data:
            raise Exception("WSGI app did not call start_response")

        status_code = int(data["status"].split()[0])
        headers = data["headers"]
        header_set = set(k.lower() for (k, v) in headers)
        body = escape.utf8(body)
        body = body.replace(
            b'</head>',
            b'<script src="/livereload.js"></script></head>'
        )

        if status_code != 304:
            if "content-length" not in header_set:
                headers.append(("Content-Length", str(len(body))))
            if "content-type" not in header_set:
                headers.append(("Content-Type", "text/html; charset=UTF-8"))
        if "server" not in header_set:
            headers.append(("Server", "livereload-tornado"))

        parts = [escape.utf8("HTTP/1.1 " + data["status"] + "\r\n")]
        for key, value in headers:
            if key.lower() == 'content-length':
                value = str(len(body))
            parts.append(
                escape.utf8(key) + b": " + escape.utf8(value) + b"\r\n"
            )
        parts.append(b"\r\n")
        parts.append(body)
        request.write(b"".join(parts))
        request.finish()
        self._log(status_code, request)
示例#10
0
    def get(self) :
        from tornado.wsgi import HTTPRequest, WSGIContainer
        from django.core.handlers.wsgi import WSGIRequest, STATUS_CODE_TEXT
        import urllib

        environ  = WSGIContainer.environ(self.request)
        environ['PATH_INFO'] = urllib.unquote(environ['PATH_INFO'])
        request  = WSGIRequest(environ)

        request._tornado_handler     = self

        set_script_prefix(base.get_script_name(environ))
        signals.request_started.send(sender=self.__class__)
        try:
            response = self.get_response(request)

            if not response :
                return 

            # Apply response middleware
            for middleware_method in self._response_middleware:
                response = middleware_method(request, response)
            response = self.apply_response_fixes(request, response)
        finally:
            signals.request_finished.send(sender=self.__class__)

        try:
            status_text = STATUS_CODE_TEXT[response.status_code]
        except KeyError:
            status_text = 'UNKNOWN STATUS CODE'
        status = '%s %s' % (response.status_code, status_text)

        self.set_status(response.status_code)
        for h in response.items() :
            self.set_header(h[0], h[1])

        for c in response.cookies.values():
            self.set_header('Set-Cookie', str(c.output(header='')))

        """
        if  hasattr(self, "_new_cookies"):
            print self._new_cookies
        self._new_cookies = response.cookies
        """

        self.write(response.content)
        self.finish()
示例#11
0
    def __call__(self, request):
        parts = []
        parts_append = parts.append

        base_header = strftime('\r\nDate: %a, %d %b %Y %H:%M:%S GMT', gmtime()) + '\r\nServer: tornado\r\n'
        if not request.supports_http_1_1():
            if request.headers.get('Connection', '').lower() == 'keep-alive':
                base_header += 'Connection: Keep-Alive\r\n'

        def start_response(status, response_headers, exc_info=None):
            parts_append(utf8('HTTP/1.1 ' + status + base_header))
            for key, value in response_headers:
                parts_append(utf8(key + ': ' + value + '\r\n'))
            parts_append(self.new_line)
            return None

        environ = WSGIContainer.environ(request)
        environ['wsgi.multiprocess'] = False # Some EvalException middleware fails if set to True

        app_response = self.wsgi_application(environ, start_response)
        if not parts:
            raise Exception('WSGI app did not call start_response')

        if request.method != 'HEAD':
            parts.extend(app_response)

        if hasattr(app_response, 'close'):
            app_response.close()
        app_response = None

        if hasattr(request, "connection"):
            # Now that the request is finished, clear the callback we
            # set on the IOStream (which would otherwise prevent the
            # garbage collection of the RequestHandler when there
            # are keepalive connections)
            request.connection.stream.set_close_callback(None)

        request.write(self.empty_string.join(parts))
        try:
            request.finish()
        except IOError as e:
            self.logger.error('Exception when writing response: %s', str(e))
示例#12
0
 def wrapper(request, response):
     req = yield request.read()
     datas = yield application(WSGIContainer.environ(req), partial(start_response, response))
     for data in datas:
         response.write(data)
     response.close()
示例#13
0
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from api import app as apiapp

metaapi = HTTPServer(WSGIContainer(apiapp))
metaapi.listen(5000)
IOLoop.instance().start()
示例#14
0
文件: server.py 项目: rayleyva/find
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from libraries.configuration import *

conf = getOptions()

from libraries.routing import *

print('running on Tornado with port ' + str(conf['port']))
if len(str(conf['port'])) < 1:
    print('Need to specify port!')
else:
    http_server = HTTPServer(WSGIContainer(app))
    print(conf['address'])
    http_server.listen(conf['port'])
    IOLoop.instance().start()

# Run with
# uwsgi --http 152.3.53.178 -w server --processes 2
示例#15
0
    def serve_phrase_index(self, index_port, args):
        dev_str = '_dev' if args.develop else ''
        args.examples_path = os.path.join(f'static{dev_str}',
                                          args.examples_path)
        args.top10_examples_path = os.path.join(f'static{dev_str}',
                                                args.top10_examples_path)

        # Load mips
        self.mips = self.load_phrase_index(args)
        app = Flask(__name__,
                    static_url_path='/static' + dev_str,
                    static_folder="static" + dev_str,
                    template_folder="templates" + dev_str)
        app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
        CORS(app)

        # From serve_utils
        cached_set = load_caches(args)
        index_example_set, search_examples, inverted_examples, query_entity_ids = parse_example(
            args)

        def batch_search(batch_query,
                         max_answer_length=20,
                         start_top_k=1000,
                         mid_top_k=100,
                         top_k=10,
                         doc_top_k=5,
                         nprobe=64,
                         sparse_weight=0.05,
                         search_strategy='hybrid',
                         aggregate=False):
            t0 = time()
            outs, _ = self.embed_query(batch_query)()
            start = np.concatenate([out[0] for out in outs], 0)
            end = np.concatenate([out[1] for out in outs], 0)
            sparse_uni = [out[2]['1'][1:len(out[3]) + 1] for out in outs]
            sparse_bi = [out[2]['2'][1:len(out[3]) + 1] for out in outs]
            input_ids = [out[3] for out in outs]
            query_vec = np.concatenate([start, end, [[1]] * len(outs)], 1)

            rets = self.mips.search(query_vec,
                                    (input_ids, sparse_uni, sparse_bi),
                                    q_texts=batch_query,
                                    nprobe=nprobe,
                                    doc_top_k=doc_top_k,
                                    start_top_k=start_top_k,
                                    mid_top_k=mid_top_k,
                                    top_k=top_k,
                                    search_strategy=search_strategy,
                                    filter_=args.filter,
                                    max_answer_length=max_answer_length,
                                    sparse_weight=sparse_weight,
                                    aggregate=aggregate)
            t1 = time()
            out = {'ret': rets, 'time': int(1000 * (t1 - t0))}
            return out

        @app.route('/')
        def index():
            return render_template(f'index.html')

        @app.route('/files/<path:path>')
        def static_files(path):
            return app.send_static_file('files/' + path)

        @app.route('/cached_example', methods=['GET'])
        def cached_example():
            start_time = time()
            q_id = request.args['q_id']
            res, query, query_info = get_cached(search_examples, q_id,
                                                query_entity_ids, cached_set)
            latency = time() - start_time
            latency = format(latency, ".3f")
            return render_template(f'cached.html',
                                   latency=latency,
                                   res=res,
                                   query=query,
                                   query_info=query_info)

        @app.route('/search', methods=['GET'])
        def search():
            query = request.args['query']
            params = {
                'strat':
                request.args['strat']
                if 'strat' in request.args else 'dense_first',
                'm_a_l':
                (int(request.args['max_answer_length']) if 'max_answer_length'
                 in request.args else int(args.max_answer_length)),
                't_k':
                int(request.args['top_k'])
                if 'top_k' in request.args else int(args.top_k),
                'n_p':
                int(request.args['nprobe'])
                if 'nprobe' in request.args else int(args.nprobe),
                'd_t_k':
                int(request.args['doc_top_k'])
                if 'doc_top_k' in request.args else int(args.doc_top_k),
                's_w': (float(request.args['sparse_weight']) if 'sparse_weight'
                        in request.args else float(args.sparse_weight)),
                'a_g': (request.args['aggregate'] == 'True')
                if 'aggregate' in request.args else True
            }
            logger.info(f'{params["strat"]} search strategy is used.')

            out = batch_search(
                [query],
                max_answer_length=params['m_a_l'],
                top_k=params['t_k'],
                nprobe=params['n_p'],
                search_strategy=params['strat'],  # [DFS, SFS, Hybrid]
                doc_top_k=params['d_t_k'],
                sparse_weight=params['s_w'],
                aggregate=params['a_g'])
            out['ret'] = out['ret'][0]
            # out['ret'] = out['ret'][:3] # Get top 3 only
            b_out = self.best_search(query, kcw_path=args.examples_path)

            res, query, query_info = get_search(inverted_examples,
                                                search_examples,
                                                query_entity_ids, query, out,
                                                b_out)
            return render_template(f'search.html',
                                   latency=out['time'],
                                   res=res,
                                   query=query,
                                   query_info=query_info,
                                   params=params)

        # This one uses a default hyperparameters
        @app.route('/api', methods=['GET'])
        def api():
            query = request.args['query']
            strat = request.args[
                'strat'] if 'strat' in request.args else 'dense_first'
            out = batch_search([query],
                               max_answer_length=args.max_answer_length,
                               top_k=args.top_k,
                               nprobe=args.nprobe,
                               search_strategy=strat,
                               doc_top_k=args.doc_top_k)
            out['ret'] = out['ret'][0]
            return jsonify(out)

        @app.route('/batch_api', methods=['POST'])
        def batch_api():
            batch_query = json.loads(request.form['query'])
            max_answer_length = int(request.form['max_answer_length'])
            start_top_k = int(request.form['start_top_k'])
            mid_top_k = int(request.form['mid_top_k'])
            top_k = int(request.form['top_k'])
            doc_top_k = int(request.form['doc_top_k'])
            nprobe = int(request.form['nprobe'])
            sparse_weight = float(request.form['sparse_weight'])
            strat = request.form['strat']
            out = batch_search(batch_query,
                               max_answer_length=max_answer_length,
                               start_top_k=start_top_k,
                               mid_top_k=mid_top_k,
                               top_k=top_k,
                               doc_top_k=doc_top_k,
                               nprobe=nprobe,
                               sparse_weight=sparse_weight,
                               search_strategy=strat,
                               aggregate=args.aggregate)
            return jsonify(out)

        @app.route('/get_examples', methods=['GET'])
        def get_examples():
            return render_template(f'example.html', res=index_example_set)

        @app.route('/set_query_port', methods=['GET'])
        def set_query_port():
            self.query_port = request.args['query_port']
            return jsonify(f'Query port set to {self.query_port}')

        if self.query_port is None:
            logger.info(
                'You must set self.query_port for querying. You can use self.update_query_port() later on.'
            )
        logger.info(f'Starting Index server at {self.get_address(index_port)}')
        http_server = HTTPServer(WSGIContainer(app))
        http_server.listen(index_port)
        IOLoop.instance().start()
示例#16
0
# 使用shell指令时下面的变量可以在终端中使用,不用再一个个的导入
@app.shell_context_processor
def make_shell_context():
    return dict(db=db, User=User, Follow=Follow, Role=Role,
                Permission=Permission, Post=Post, Comment=Comment)


@app.cli.command()
def test():
    """Run the unit tests."""
    import unittest
    tests = unittest.TestLoader().discover('tests')
    unittest.TextTestRunner(verbosity=2).run(tests)

# 在 windows 下使用 git 软件自带的terminal可以使用flask shell等指令
# 使用 cmder 是无法使用对应指令的
# 需要在 terminal 中导入的变量有: FLASK_APP=...、FLASK_DEBUG=1

if __name__ == '__main__':
    # create_all只会创建数据库中不存在的表
    # 如果要创建同名新表,最简单的方法是使用 drop_all() 方法删除所有表再创建
    db.app = app
    db.create_all()
    #使用tornado包裹flask app
    tornado_wraped = WSGIContainer(app)
    #设置tornado环境,例如静态文件的路径
    application = Application([(r".*", FallbackHandler, dict(fallback=tornado_wraped)),], static_path = "./app/static") 
    #启动程序
    application.listen(80)
    IOLoop.instance().start()
示例#17
0
# r = interRedis.interRedis()
# r.init_group_name(bot)
# 初始化群员昵称
# TODO

# 初始化全局变量
initGlobValue.init(globValue, gV_Lock)
logging.info('初始化完成!')

# 定时任务
sched_t = threading.Thread(target=job.jobCenter,
                           args=(bot, globValue, gV_Lock))
sched_t.setDaemon(True)
sched_t.start()

stats.stats_sched()

# ircbot
irc = threading.Thread(target=ircbot.start, args=(bot, ))
irc.start()

logging.info('interbot各种加载完成!!')

# 监听启动
# bot.run(host='127.0.0.1', port=8889)

# 使用WSGI
http_server = HTTPServer(WSGIContainer(bot.wsgi))
http_server.listen(8889)
IOLoop.instance().start()
示例#18
0
def run():
    http_server = HTTPServer(WSGIContainer(app))
    http_server.listen(5500)
    IOLoop.instance().start()
        router.mapper('anonymous', TestDocManip, TestWrapper)


# Create the Flask App that will let us do http requests
app = flask.Flask(__name__,
                  template_folder=TEMPLATE_ASSETS_PATH,
                  static_folder=STATIC_ASSETS_PATH)


@app.route("/")
def route_index():
    return flask.render_template("index.html")


# Make the server with Flask embedded
wsgi_app = WSGIContainer(app)
server = TornadoServer(engine_class=MyEngine, wsgi_fallback_handler=wsgi_app)


# Just so we can exit cleanly
def handle_signal(sig, frame):
    IOLoop.instance().add_callback(IOLoop.instance().stop)
    server.shutdown()


signal.signal(signal.SIGINT, handle_signal)
signal.signal(signal.SIGTERM, handle_signal)

# Start the server
server.run()
示例#20
0
# coding:utf-8
# File Name: tirnado.py
# Created Date: 2018-04-16 11:10:26
# Last modified: 2018-04-16 11:19:05
# Author: yeyong
from tornado.wsgi import WSGIContainer
from tornado.ioloop import IOLoop
from tornado.web import FallbackHandler, RequestHandler, Application
from tornado.httpserver import HTTPServer
from uwsgi import app


class MainHandler(RequestHandler):
    def get(self):
        self.write(dict(msg="好久不见甚是想念", code=200))


flask_route = WSGIContainer(app)
application = Application([(r"/", MainHandler),
                           (r".*", FallbackHandler, dict(fallback=flask_route))
                           ])

if __name__ == "__main__":
    application.listen("9001", address="0.0.0.0")
    IOLoop.instance().start()
示例#21
0
文件: manage.py 项目: gamerlv/mmq
def runserver():
    http_server = HTTPServer(WSGIContainer(app))
    http_server.listen(5000, address='0.0.0.0')
    IOLoop.instance().start()
示例#22
0
 def run(self):
     container = WSGIContainer(self._wsgi_func)
     http_server = HTTPServer(container)
     http_server.listen(self.config.web_server.port)
     IOLoop.instance().start()
示例#23
0
def main():
    """
    Main entry point for the application
    """

    # CLI arguments
    parser = argparse.ArgumentParser(description='AWS Web Server')
    parser.add_argument('port', metavar='Port', type=int, nargs='?', default=8080, help='port to run the application')
    parser.add_argument('--env', '-e', dest='environment', action='store', default='dev', help='type of environment')
    parser.add_argument('--tornado', '-t', dest='tornado', action='store_true', help='run the server as tornado wsgi')
    parser.add_argument('--ssl', '-s', dest='use_ssl', action='store_true', help='run server with ssl certs')
    parser.add_argument('--ssl-certfile', '-c', dest='ssl_certfile', action='store', default='server.crt', help='ssl certificate file')
    parser.add_argument('--ssl-keyfile', '-k', dest='ssl_keyfile', action='store', default='server.key', help='ssl key file')
    parser.add_argument('--upload-s3', '-s3', dest='upload_s3', action='store_true', help='deploy s3 assets to AWS')
    parser.add_argument('--create-tables', '-ct', dest='create_tables', action='store_true', help='creates dynamodb tables in AWS')
    args = parser.parse_args()

    # Configure logging
    log_level = logging.INFO if args.environment == 'prod' else logging.DEBUG
    logging.basicConfig(format='[%(levelname)s]: %(message)s', level=log_level)

    # Create the app
    logging.info('Creating application environment: %s' % args.environment)
    app = create_app(env=args.environment)

    # Start app in tornado wsgi container
    if args.tornado:

        if HAS_TORNADO:
            try:
                logging.info('Starting Tornado Server on port %d' % args.port)
                if args.use_ssl:
                    ssl_options = {
                        'certfile': os.path.join(args.ssl_certfile), 
                        'keyfile': os.path.join(args.ssl_keyfile)
                    }
                else:
                    ssl_options=None
                
                http_server = HTTPServer(WSGIContainer(app), ssl_options=ssl_options)
                http_server.listen(args.port)
                IOLoop.instance().start()
            except KeyboardInterrupt as e:
                logging.info('Stopping Tornado Server by Ctrl+C')
        else:
            logging.warning('Failed to start Tornado server. Tornado not installed')
    elif args.upload_s3:
        logging.info('Uploading to S3...')
        import flask_s3
        flask_s3.create_all(app)
        logging.info('Upload complete.')
    elif args.create_tables:
        logging.info('Creating Dynamodb Tables')
        from magic.models import create_tables
        create_tables()
        logging.info('Table creation complete')
    else:
        logging.info('Starting Flask Internal (dev) Server')
        app.run(port=args.port)
        logging.info('Stopping Flask Internal (dev) Server')

    logging.info('Shutting down...')
    return 0
示例#24
0
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from webapp import create_app

app = WSGIContainer(create_app("webapp.config.ProdConfig"))
http_server = HTTPServer(app)
http_server.listen(80)
IOLoop.instance().start()
示例#25
0
def runCouchPotato(options, base_path, args, data_dir=None, log_dir=None, Env=None, desktop=None):

    try:
        locale.setlocale(locale.LC_ALL, "")
        encoding = locale.getpreferredencoding()
    except (locale.Error, IOError):
        encoding = None

    # for OSes that are poorly configured I'll just force UTF-8
    if not encoding or encoding in ("ANSI_X3.4-1968", "US-ASCII", "ASCII"):
        encoding = "UTF-8"

    # Do db stuff
    db_path = os.path.join(data_dir, "couchpotato.db")

    # Backup before start and cleanup old databases
    new_backup = os.path.join(data_dir, "db_backup", str(int(time.time())))

    # Create path and copy
    if not os.path.isdir(new_backup):
        os.makedirs(new_backup)
    src_files = [options.config_file, db_path, db_path + "-shm", db_path + "-wal"]
    for src_file in src_files:
        if os.path.isfile(src_file):
            shutil.copy2(src_file, os.path.join(new_backup, os.path.basename(src_file)))

    # Remove older backups, keep backups 3 days or at least 3
    backups = []
    for directory in os.listdir(os.path.dirname(new_backup)):
        backup = os.path.join(os.path.dirname(new_backup), directory)
        if os.path.isdir(backup):
            backups.append(backup)

    total_backups = len(backups)
    for backup in backups:
        if total_backups > 3:
            if int(os.path.basename(backup)) < time.time() - 259200:
                for src_file in src_files:
                    b_file = os.path.join(backup, os.path.basename(src_file))
                    if os.path.isfile(b_file):
                        os.remove(b_file)
                os.rmdir(backup)
                total_backups -= 1

    # Register environment settings
    Env.set("encoding", encoding)
    Env.set("app_dir", base_path)
    Env.set("data_dir", data_dir)
    Env.set("log_path", os.path.join(log_dir, "CouchPotato.log"))
    Env.set("db_path", "sqlite:///" + db_path)
    Env.set("cache_dir", os.path.join(data_dir, "cache"))
    Env.set("cache", FileSystemCache(os.path.join(Env.get("cache_dir"), "python")))
    Env.set("console_log", options.console_log)
    Env.set("quiet", options.quiet)
    Env.set("desktop", desktop)
    Env.set("args", args)
    Env.set("options", options)

    # Determine debug
    debug = options.debug or Env.setting("debug", default=False, type="bool")
    Env.set("debug", debug)

    # Development
    development = Env.setting("development", default=False, type="bool")
    Env.set("dev", development)

    # Disable logging for some modules
    for logger_name in ["enzyme", "guessit", "subliminal", "apscheduler"]:
        logging.getLogger(logger_name).setLevel(logging.ERROR)

    for logger_name in ["gntp", "migrate"]:
        logging.getLogger(logger_name).setLevel(logging.WARNING)

    # Use reloader
    reloader = debug is True and development and not Env.get("desktop") and not options.daemon

    # Logger
    logger = logging.getLogger()
    formatter = logging.Formatter("%(asctime)s %(levelname)s %(message)s", "%m-%d %H:%M:%S")
    level = logging.DEBUG if debug else logging.INFO
    logger.setLevel(level)
    logging.addLevelName(19, "INFO")

    # To screen
    if (debug or options.console_log) and not options.quiet and not options.daemon:
        hdlr = logging.StreamHandler(sys.stderr)
        hdlr.setFormatter(formatter)
        logger.addHandler(hdlr)

    # To file
    hdlr2 = handlers.RotatingFileHandler(Env.get("log_path"), "a", 500000, 10)
    hdlr2.setFormatter(formatter)
    logger.addHandler(hdlr2)

    # Start logging & enable colors
    import color_logs
    from couchpotato.core.logger import CPLog

    log = CPLog(__name__)
    log.debug("Started with options %s", options)

    def customwarn(message, category, filename, lineno, file=None, line=None):
        log.warning("%s %s %s line:%s", (category, message, filename, lineno))

    warnings.showwarning = customwarn

    # Check if database exists
    db = Env.get("db_path")
    db_exists = os.path.isfile(db_path)

    # Load configs & plugins
    loader = Env.get("loader")
    loader.preload(root=base_path)
    loader.run()

    # Load migrations
    if db_exists:

        from migrate.versioning.api import version_control, db_version, version, upgrade

        repo = os.path.join(base_path, "couchpotato", "core", "migration")

        latest_db_version = version(repo)
        try:
            current_db_version = db_version(db, repo)
        except:
            version_control(db, repo, version=latest_db_version)
            current_db_version = db_version(db, repo)

        if current_db_version < latest_db_version and not debug:
            log.info("Doing database upgrade. From %d to %d", (current_db_version, latest_db_version))
            upgrade(db, repo)

    # Configure Database
    from couchpotato.core.settings.model import setup

    setup()

    # Fill database with needed stuff
    if not db_exists:
        fireEvent("app.initialize", in_order=True)

    # Create app
    from couchpotato import app

    api_key = Env.setting("api_key")
    url_base = "/" + Env.setting("url_base").lstrip("/") if Env.setting("url_base") else ""

    # Basic config
    app.secret_key = api_key
    # app.debug = development
    config = {
        "use_reloader": reloader,
        "host": Env.setting("host", default="0.0.0.0"),
        "port": tryInt(Env.setting("port", default=5000)),
    }

    # Static path
    app.static_folder = os.path.join(base_path, "couchpotato", "static")
    web.add_url_rule("api/%s/static/<path:filename>" % api_key, endpoint="static", view_func=app.send_static_file)

    # Register modules
    app.register_blueprint(web, url_prefix="%s/" % url_base)
    app.register_blueprint(api, url_prefix="%s/api/%s/" % (url_base, api_key))

    # Some logging and fire load event
    try:
        log.info("Starting server on port %(port)s", config)
    except:
        pass
    fireEventAsync("app.load")

    # Go go go!
    web_container = WSGIContainer(app)
    web_container._log = _log
    loop = IOLoop.instance()

    application = Application(
        [
            (r"%s/api/%s/nonblock/(.*)/" % (url_base, api_key), NonBlockHandler),
            (r".*", FallbackHandler, dict(fallback=web_container)),
        ],
        log_function=lambda x: None,
        debug=config["use_reloader"],
    )

    try_restart = True
    restart_tries = 5

    while try_restart:
        try:
            application.listen(config["port"], config["host"], no_keep_alive=True)
            loop.start()
        except Exception, e:
            try:
                nr, msg = e
                if nr == 48:
                    log.info("Already in use, try %s more time after few seconds", restart_tries)
                    time.sleep(1)
                    restart_tries -= 1

                    if restart_tries > 0:
                        continue
                    else:
                        return
            except:
                pass

            raise

        try_restart = False
示例#26
0
 def wrapper(request, response):
     req = yield request.read()
     for data in application(WSGIContainer.environ(req), functools.partial(start_response, response)):
         response.write(data)
     response.close()
示例#27
0
def runserver(port_opt, config, token_opt, secret_opt, rules_opt, dry,
              temperature_opt):
    logger = app.logger
    setup_logging(logger, logging.INFO)

    # Start configuration

    port = 5000
    token = ""
    secret = ""
    rules_path_tmp = "rules.bin"
    temperature_path_tmp = "temperature.json"
    root_display_name = "root"
    mcp23017_address_a = 0
    mcp23017_address_b = 0
    mcp23017_reset_a = 0
    mcp23017_reset_b = 0

    register_devices = None

    # Loading config
    if os.path.isfile(config):
        with open(config, encoding="utf-8") as f:
            code = compile(f.read(), config, 'exec')

            lcl = {}

            exec(code, globals(), lcl)

            port = lcl['port']
            token = lcl['token']
            secret = lcl['secret']
            register_devices = lcl['register_devices']
            root_display_name = lcl['root_display_name']
            mcp23017_address_a = lcl['mcp23017_address_a']
            mcp23017_address_b = lcl['mcp23017_address_b']
            mcp23017_reset_a = lcl['mcp23017_reset_a']
            mcp23017_reset_b = lcl['mcp23017_reset_b']

    if port_opt:
        port = port_opt

    if token_opt:
        token = token_opt

    if secret_opt:
        secret = secret_opt

    if rules_opt:
        rules_path_tmp = rules_opt

    if temperature_opt:
        temperature_path_tmp = temperature_opt

    logger.info(
        "Configuration %s" % {
            'port': port,
            'config': config,
            'token': token,
            'secret': secret,
            'rules_path': rules_path_tmp,
            'mcp23017_address_a': mcp23017_address_a,
            'mcp23017_address_b': mcp23017_address_b,
            'mcp23017_reset_a': mcp23017_reset_a,
            'mcp23017_reset_b': mcp23017_reset_b
        })

    # Start initialising

    logger.info("Initialising hardware interface")
    global control

    if dry:
        from garden_lighting.light_control_dummy import LightControl
    else:
        from garden_lighting.light_control import LightControl

    control = LightControl(timedelta(seconds=3), logger, mcp23017_address_a,
                           mcp23017_reset_a, mcp23017_address_b,
                           mcp23017_reset_b)

    try:
        control.init()
    except Exception as e:
        logger.exception(e)
        logger.error("Failed to initialise hardware!")
        return

    global devices
    devices = new_group(root_display_name, "root")

    logger.info("Initialising scheduling thread")
    global scheduler
    from garden_lighting.web.scheduler import DeviceScheduler

    scheduler = DeviceScheduler(0.5, devices, control, logger)

    if register_devices:
        register_devices(devices)

    # Start web stuff

    global auth
    auth = Auth(token, logger)

    global rules_path
    rules_path = rules_path_tmp

    global temperature_path
    temperature_path = temperature_path_tmp

    scheduler.read(rules_path, devices)

    app.secret_key = secret

    from garden_lighting.web.api import api

    app.register_blueprint(api)

    from garden_lighting.web.control import control as controls

    app.register_blueprint(controls)

    from garden_lighting.web.lights import lights

    app.register_blueprint(lights)

    from garden_lighting.web.temperature import temperature

    app.register_blueprint(temperature)

    logger.info("Starting scheduling thread")
    thread = Thread(target=run)
    thread.start()

    http_server = HTTPServer(WSGIContainer(app))

    try:
        http_server.listen(port)
        try:
            IOLoop.instance().start()
        except KeyboardInterrupt:
            IOLoop.instance().stop()
    except Exception as e:
        logger.exception(e)

    shutdown(thread)
示例#28
0
from tornado.httpserver import HTTPServer
from tornado.wsgi import WSGIContainer
from app import app
from tornado.ioloop import IOLoop

#s = HTTPServer(WSGIContainer(app),ssl_context=('../static/ssl/cert.pem','../static/ssl/pkey.pem'))
##s.bind(443, "0.0.0.0")
#s.listen(9900)
#IOLoop.current().start()
s = HTTPServer(WSGIContainer(app))
s.listen(9900)
IOLoop.current().start()
示例#29
0
def runserver():
    http_server = HTTPServer(WSGIContainer(app))
    http_server.listen(options.port)
    print "Server runing on http://0.0.0.0:%d" % options.port
    IOLoop.instance().start()
示例#30
0
 def wrapper(request, response):
     req = yield request.read()
     for data in application(WSGIContainer.environ(req), functools.partial(start_response, response)):
         response.write(data)
     response.close()
示例#31
0
文件: api.py 项目: damoos3/kekdaqd
    def run(self):
        db = util.connect_to_db(flags='SQLITE_OPEN_READONLY')
        app = flask.Flask(__name__)
        auth = HTTPBasicAuth()

        @auth.get_password
        def get_pw(username):
            if username == config.RPC_USER:
                return config.RPC_PASSWORD
            return None        

        ######################
        #READ API

        # Generate dynamically get_{table} methods
        def generate_get_method(table):
            def get_method(**kwargs):
                return get_rows(db, table=table, **kwargs)
            return get_method

        for table in API_TABLES:
            new_method = generate_get_method(table)
            new_method.__name__ = 'get_{}'.format(table)
            dispatcher.add_method(new_method)

        @dispatcher.add_method
        def sql(query, bindings=[]):
            return db_query(db, query, tuple(bindings))


        ######################
        #WRITE/ACTION API

        # Generate dynamically create_{transaction} and do_{transaction} methods
        def generate_create_method(transaction):

            def split_params(**kwargs):
                transaction_args = {}
                common_args = {}
                private_key_wif = None
                for key in kwargs:
                    if key in COMMONS_ARGS:
                        common_args[key] = kwargs[key]
                    elif key == 'privkey':
                        private_key_wif = kwargs[key]
                    else:
                        transaction_args[key] = kwargs[key]
                return transaction_args, common_args, private_key_wif

            def create_method(**kwargs):
                transaction_args, common_args, private_key_wif = split_params(**kwargs)
                return compose_transaction(db, name=transaction, params=transaction_args, **common_args)

            def do_method(**kwargs):
                transaction_args, common_args, private_key_wif = split_params(**kwargs)
                return do_transaction(db, name=transaction, params=transaction_args, private_key_wif=private_key_wif, **common_args)

            return create_method, do_method

        for transaction in API_TRANSACTIONS:
            create_method, do_method = generate_create_method(transaction)
            create_method.__name__ = 'create_{}'.format(transaction)
            do_method.__name__ = 'do_{}'.format(transaction)
            dispatcher.add_method(create_method)
            dispatcher.add_method(do_method)

        @dispatcher.add_method
        def sign_tx(unsigned_tx_hex, privkey=None):
            return sign_transaction(unsigned_tx_hex, private_key_wif=privkey)

        @dispatcher.add_method
        def broadcast_tx(signed_tx_hex):
            return broadcast_transaction(signed_tx_hex)

        @dispatcher.add_method
        def get_messages(block_index):
            if not isinstance(block_index, int):
                raise Exception("block_index must be an integer.")

            cursor = db.cursor()
            cursor.execute('select * from messages where block_index = ? order by message_index asc', (block_index,))
            messages = cursor.fetchall()
            cursor.close()
            return messages

        @dispatcher.add_method
        def get_messages_by_index(message_indexes):
            """Get specific messages from the feed, based on the message_index.

            @param message_index: A single index, or a list of one or more message indexes to retrieve.
            """
            if not isinstance(message_indexes, list):
                message_indexes = [message_indexes,]
            for idx in message_indexes:  #make sure the data is clean
                if not isinstance(idx, int):
                    raise Exception("All items in message_indexes are not integers")

            cursor = db.cursor()
            cursor.execute('SELECT * FROM messages WHERE message_index IN (%s) ORDER BY message_index ASC'
                % (','.join([str(x) for x in message_indexes]),))
            messages = cursor.fetchall()
            cursor.close()
            return messages

        @dispatcher.add_method
        def get_xcp_supply():
            return util.xcp_supply(db)

        @dispatcher.add_method
        def get_asset_info(assets):
            if not isinstance(assets, list):
                raise Exception("assets must be a list of asset names, even if it just contains one entry")
            assetsInfo = []
            for asset in assets:

                # BTC and XCP.
                if asset in [config.BTC, config.XCP]:
                    if asset == config.BTC:
                        supply = bitcoin.get_btc_supply(normalize=False)
                    else:
                        supply = util.xcp_supply(db)

                    assetsInfo.append({
                        'asset': asset,
                        'owner': None,
                        'divisible': True,
                        'locked': False,
                        'supply': supply,
                        'callable': False,
                        'call_date': None,
                        'call_price': None,
                        'description': '',
                        'issuer': None
                    })
                    continue

                # User‐created asset.
                cursor = db.cursor()
                issuances = list(cursor.execute('''SELECT * FROM issuances WHERE (status = ? AND asset = ?) ORDER BY block_index ASC''', ('valid', asset)))
                cursor.close()
                if not issuances: break #asset not found, most likely
                else: last_issuance = issuances[-1]
                supply = 0
                locked = False
                for e in issuances:
                    if e['locked']: locked = True
                    supply += e['quantity']
                assetsInfo.append({
                    'asset': asset,
                    'owner': last_issuance['issuer'],
                    'divisible': bool(last_issuance['divisible']),
                    'locked': locked,
                    'supply': supply,
                    'callable': bool(last_issuance['callable']),
                    'call_date': last_issuance['call_date'],
                    'call_price': last_issuance['call_price'],
                    'description': last_issuance['description'],
                    'issuer': last_issuance['issuer']})
            return assetsInfo

        @dispatcher.add_method
        def get_block_info(block_index):
            assert isinstance(block_index, int)
            cursor = db.cursor()
            cursor.execute('''SELECT * FROM blocks WHERE block_index = ?''', (block_index,))
            try:
                blocks = list(cursor)
                assert len(blocks) == 1
                block = blocks[0]
            except IndexError:
                raise exceptions.DatabaseError('No blocks found.')
            cursor.close()
            return block
        
        @dispatcher.add_method
        def get_blocks(block_indexes):
            """fetches block info and messages for the specified block indexes"""
            if not isinstance(block_indexes, (list, tuple)):
                raise Exception("block_indexes must be a list of integers.")
            if len(block_indexes) >= 250:
                raise Exception("can only specify up to 250 indexes at a time.")

            block_indexes_str = ','.join([str(x) for x in block_indexes])
            cursor = db.cursor()
            
            cursor.execute('SELECT * FROM blocks WHERE block_index IN (%s) ORDER BY block_index ASC'
                % (block_indexes_str,))
            blocks = cursor.fetchall()
                
            cursor.execute('SELECT * FROM messages WHERE block_index IN (%s) ORDER BY block_index ASC, message_index ASC'
                % (block_indexes_str,))
            messages = collections.deque(cursor.fetchall())
            
            for block in blocks:
                messages_in_block = []
                block['_messages'] = []
                while len(messages) and messages[0]['block_index'] == block['block_index']:
                    block['_messages'].append(messages.popleft())
            
            cursor.close()
            return blocks

        @dispatcher.add_method
        def get_running_info():
            latestBlockIndex = bitcoin.get_block_count()

            try:
                util.database_check(db, latestBlockIndex)
            except exceptions.DatabaseError as e:
                caught_up = False
            else:
                caught_up = True

            try:
                last_block = util.last_block(db)
            except:
                last_block = {'block_index': None, 'block_hash': None, 'block_time': None}

            try:
                last_message = util.last_message(db)
            except:
                last_message = None

            return {
                'db_caught_up': caught_up,
                'bitcoin_block_count': latestBlockIndex,
                'last_block': last_block,
                'last_message_index': last_message['message_index'] if last_message else -1,
                'running_testnet': config.TESTNET,
                'running_testcoin': config.TESTCOIN,
                'version_major': config.VERSION_MAJOR,
                'version_minor': config.VERSION_MINOR,
                'version_revision': config.VERSION_REVISION
            }

        @dispatcher.add_method
        def get_element_counts():
            counts = {}
            cursor = db.cursor()
            for element in ['transactions', 'blocks', 'debits', 'credits', 'balances', 'sends', 'orders',
                'order_matches', 'btcpays', 'issuances', 'broadcasts', 'bets', 'bet_matches', 'dividends',
                'burns', 'cancels', 'callbacks', 'order_expirations', 'bet_expirations', 'order_match_expirations',
                'bet_match_expirations', 'messages']:
                cursor.execute("SELECT COUNT(*) AS count FROM %s" % element)
                count_list = cursor.fetchall()
                assert len(count_list) == 1
                counts[element] = count_list[0]['count']
            cursor.close()
            return counts

        @dispatcher.add_method
        def get_asset_names():
            cursor = db.cursor()
            names = [row['asset'] for row in cursor.execute("SELECT DISTINCT asset FROM issuances WHERE status = 'valid' ORDER BY asset ASC")]
            cursor.close()
            return names

        def _set_cors_headers(response):
            if config.RPC_ALLOW_CORS:
                response.headers['Access-Control-Allow-Origin'] = '*'
                response.headers['Access-Control-Allow-Methods'] = 'GET, POST, OPTIONS'
                response.headers['Access-Control-Allow-Headers'] = 'DNT,X-Mx-ReqToken,Keep-Alive,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type';
    
        @app.route('/', methods=["OPTIONS",])
        @app.route('/api/', methods=["OPTIONS",])
        def handle_options():
            response = flask.Response('', 204)
            _set_cors_headers(response)
            return response

        @app.route('/', methods=["POST",])
        @app.route('/api/', methods=["POST",])
        @auth.login_required
        def handle_post():
            try:
                request_json = flask.request.get_data().decode('utf-8')
                request_data = json.loads(request_json)
                assert 'id' in request_data and request_data['jsonrpc'] == "2.0" and request_data['method']
                # params may be omitted 
            except:
                obj_error = jsonrpc.exceptions.JSONRPCInvalidRequest(data="Invalid JSON-RPC 2.0 request format")
                return flask.Response(obj_error.json.encode(), 200, mimetype='application/json')
            
            #only arguments passed as a dict are supported
            if request_data.get('params', None) and not isinstance(request_data['params'], dict):
                obj_error = jsonrpc.exceptions.JSONRPCInvalidRequest(
                    data='Arguments must be passed as a JSON object (list of unnamed arguments not supported)')
                return flask.Response(obj_error.json.encode(), 200, mimetype='application/json')
            
            #return an error if API fails checks
            if not config.FORCE and current_api_status_code:
                return flask.Response(current_api_status_response_json, 200, mimetype='application/json')

            jsonrpc_response = jsonrpc.JSONRPCResponseManager.handle(request_json, dispatcher)
            response = flask.Response(jsonrpc_response.json.encode(), 200, mimetype='application/json')
            _set_cors_headers(response)
            return response

        if not config.UNITTEST:  #skip setting up logs when for the test suite
            api_logger = logging.getLogger("tornado")
            h = logging_handlers.RotatingFileHandler(os.path.join(config.DATA_DIR, "api.access.log"), 'a', API_MAX_LOG_SIZE, API_MAX_LOG_COUNT)
            api_logger.setLevel(logging.INFO)
            api_logger.addHandler(h)
            api_logger.propagate = False

        http_server = HTTPServer(WSGIContainer(app), xheaders=True)
        try:
            http_server.listen(config.RPC_PORT, address=config.RPC_HOST)
            IOLoop.instance().start()        
        except OSError:
            raise Exception("Cannot start the API subsystem. Is {} already running, or is something else listening on port {}?".format(config.XCP_CLIENT, config.RPC_PORT))
示例#32
0
    def run(self):
        self.ioloop = IOLoop.instance()
        self.alive = True
        self.server_alive = False
        if IOLOOP_PARAMETER_REMOVED:
            PeriodicCallback(self.watchdog, 1000).start()
            PeriodicCallback(self.heartbeat, 1000).start()
        else:
            PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
            PeriodicCallback(self.heartbeat, 1000, io_loop=self.ioloop).start()

        # Assume the app is a WSGI callable if its not an
        # instance of tornado.web.Application or is an
        # instance of tornado.wsgi.WSGIApplication
        app = self.wsgi
        if not isinstance(app, tornado.web.Application) or \
           isinstance(app, tornado.wsgi.WSGIApplication):
            app = WSGIContainer(app)

        # Monkey-patching HTTPConnection.finish to count the
        # number of requests being handled by Tornado. This
        # will help gunicorn shutdown the worker if max_requests
        # is exceeded.
        httpserver = sys.modules["tornado.httpserver"]
        if hasattr(httpserver, 'HTTPConnection'):
            old_connection_finish = httpserver.HTTPConnection.finish

            def finish(other):
                self.handle_request()
                old_connection_finish(other)

            httpserver.HTTPConnection.finish = finish
            sys.modules["tornado.httpserver"] = httpserver

            server_class = tornado.httpserver.HTTPServer
        else:

            class _HTTPServer(tornado.httpserver.HTTPServer):
                def on_close(instance, server_conn):
                    self.handle_request()
                    super(_HTTPServer, instance).on_close(server_conn)

            server_class = _HTTPServer

        if self.cfg.is_ssl:
            _ssl_opt = copy.deepcopy(self.cfg.ssl_options)
            # tornado refuses initialization if ssl_options contains following
            # options
            del _ssl_opt["do_handshake_on_connect"]
            del _ssl_opt["suppress_ragged_eofs"]
            if IOLOOP_PARAMETER_REMOVED:
                server = server_class(app, ssl_options=_ssl_opt)
            else:
                server = server_class(app,
                                      io_loop=self.ioloop,
                                      ssl_options=_ssl_opt)
        else:
            if IOLOOP_PARAMETER_REMOVED:
                server = server_class(app)
            else:
                server = server_class(app, io_loop=self.ioloop)

        self.server = server
        self.server_alive = True

        for s in self.sockets:
            s.setblocking(0)
            if hasattr(server, "add_socket"):  # tornado > 2.0
                server.add_socket(s)
            elif hasattr(server, "_sockets"):  # tornado 2.0
                server._sockets[s.fileno()] = s

        server.no_keep_alive = self.cfg.keepalive <= 0
        server.start(num_processes=1)

        self.ioloop.start()
示例#33
0
文件: api.py 项目: fossabot/Cyclens
 def start_api(self, debug, host, port):
     #Tonado: non-blocking, asynchronous
     asyncio.set_event_loop(asyncio.new_event_loop())
     http_server = HTTPServer(WSGIContainer(self.api))
     http_server.listen(port)
     IOLoop.instance().start()
示例#34
0
        backlog = Configuration.getBacklog()
        file_handler = RotatingFileHandler(logfile, maxBytes=maxLogSize, backupCount=backlog)
        file_handler.setLevel(logging.ERROR)
        formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
        file_handler.setFormatter(formatter)
        app.logger.addHandler(file_handler)

    plugManager.loadPlugins()

    if flaskDebug:
        # start debug flask server
        app.run(host=flaskHost, port=flaskPort, debug=flaskDebug)
    else:
        # start asynchronous server using tornado wrapper for flask
        # ssl connection
        print("Server starting...")
        if Configuration.useSSL():
            cert = Configuration.getSSLCert()
            key = Configuration.getSSLKey()
            ssl_options = {"certfile": cert,
                           "keyfile": key}
        else:
            ssl_options = None
        signal.signal(signal.SIGTERM, sig_handler)
        signal.signal(signal.SIGINT, sig_handler)
        global http_server
        http_server = HTTPServer(WSGIContainer(app), ssl_options=ssl_options)
        http_server.bind(flaskPort, address=flaskHost)
        http_server.start(0)  # Forks multiple sub-processes
        IOLoop.instance().start()
import os

from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop

from GradeServer import create_app

reload(sys).setdefaultencoding('utf-8')

application = create_app(sys.argv)

if __name__ == '__main__':

    print 'running...'
    print 'close server : Ctrl + C'
    http_server = HTTPServer(WSGIContainer(application))
    http_server.bind(80)
    http_server.start(1)

    try:
        IOLoop.instance().start()
    except (KeyboardInterrupt, SystemExit):
        os.system('killall celery')

        print 'container stop&rm'
        os.system('docker stop grade_container1')
        os.system('docker rm grade_container1')

        print 'closed server'
示例#36
0
app.register_blueprint(chart_api, url_prefix='/api/chart')
app.register_blueprint(monitor_api, url_prefix='/api/monitor')

from data.data_chart_host import chart_host_data

app.register_blueprint(chart_host_data, url_prefix='/data/chart')

## 启用websocket服务
from common.ssh_terminal import SshTerminalHandler
from tornado.web import FallbackHandler, Application
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.wsgi import WSGIContainer

app_wsgi = WSGIContainer(app)
handlers = [
    (r"/websocket/(.*)/(.*)", SshTerminalHandler,
     {}),  # {'term_manager': term_manager}),
    (r"/(.*)", FallbackHandler, dict(fallback=app_wsgi))
]

application = Application(handlers, debug=True)


class SchedulerConfig(object):
    JOBS = [{
        'id': 'monitor_service_heartbeat',
        'func': '__main__:monitor_service_heartbeat',
        # 'args': (1, 2),
        'trigger': 'interval',
示例#37
0
def server_start():
    global exclude_ids
    try:
        f = open(settings_file, 'r')
        try:
            allsettings = json.load(f)
        except ValueError as e:
            print('[-] Error: The settings file is not in a valid format, {}'.format(e))
            f.close()
            sys.exit()
        f.close()
    finally:
        if 'f' in vars() and not f.closed:
            f.close()

    exclude_ids = allsettings['exclude_ids']
    port = allsettings['port']
    if allsettings['icon_set'] == 'standard':
        icon_set = 'icons_gen1_standard.png'
    elif allsettings['icon_set'] == 'shuffle':
        icon_set = 'icons_gen1_shuffle.png'
    elif allsettings['icon_set'] == 'alt':
        icon_set = 'icons_gen1_alt.png'
    elif allsettings['icon_set'] == 'toon':
        icon_set = 'icons_gen1_toon.png'
    else:
        print('[-] Error: Icon set in settings file is invalid, possible sets are: "standard", "shuffle", "toon", "alt".')
    list_profiles = []
    list_lats = []
    list_lngs = []
    for i in range(0, len(allsettings['profiles'])):
        if allsettings['profiles'][i]['id'] not in list_profiles:
            list_profiles.append(allsettings['profiles'][i]['id'])
            list_lats.append(allsettings['profiles'][i]['coordinates']['lat'])
            list_lngs.append(allsettings['profiles'][i]['coordinates']['lng'])

    if len(list_profiles) == 0:
        print('[-] Error: No profiles in settings file.')
        sys.exit()
    else:
        main_ind = 0

    db_data = sqlite3.connect(data_file, check_same_thread=False)
    db_data.create_function("isnotExcluded", 1, isnotExcluded)

    # def patched_finish(self):
    #     print('still')
    #     try:
    #         if not self.wfile.closed:
    #             self.wfile.close()
    #     except socket.error as e:
    #         sys.stdout.write('socket error: {}\n'.format(e))
    #     self.rfile.close()
    # SocketServer.StreamRequestHandler.finish = patched_finish
    # BaseHTTPServer.HTTPServer.allow_reuse_address = False

    compress = Compress()
    app = Flask(__name__,template_folder=workdir+'/'+'webres',static_url_path='/static',static_folder=workdir+'/webres/static')
    app.config['COMPRESS_MIN_SIZE'] = 0
    app.config['COMPRESS_LEVEL'] = 6
    app.config['COMPRESS_MIMETYPES'] = ['text/html', 'text/css', 'text/xml', 'application/json', 'application/javascript', 'application/octet-stream', 'image/svg+xml']
    compress.init_app(app)

    @app.teardown_appcontext
    def close_connection(exception):
        db = getattr(g, '_database', None)
        if db is not None:
            db.close()

    @app.after_request
    def add_header(response):
        if response.headers['Content-Type'] == "image/png":
            response.headers['Cache-Control'] = 'must-revalidate, public, max-age=86400'
        else:
            response.headers['Cache-Control'] = 'must-revalidate, public, max-age=-1'
        return response

    @app.route('/_getdata')
    def add_numbers():
        datatill = request.args.get('data_till', 0, type=int)
        profile = request.args.get('profile', -1, type=int)

        timenow = int(round(time.time(),0))

        cursor_data = db_data.cursor()

        while True:
            try:
                if profile == -1:
                    results = cursor_data.execute('SELECT spawnid, latitude, longitude, spawntype, pokeid, expiretime FROM spawns WHERE isnotExcluded(pokeid) AND (expiretime > ?) AND (fromtime >= ?)',(timenow,datatill))
                else:
                    results = cursor_data.execute('SELECT spawnid, latitude, longitude, spawntype, pokeid, expiretime FROM spawns WHERE isnotExcluded(pokeid) AND (profile == ?) AND (expiretime > ?) AND (fromtime >= ?)', (profile,timenow, datatill))
                return jsonify([timenow, results.fetchall()])
            except sqlite3.OperationalError as e:
                print('[-] Sqlite operational error: {} Retrying...'.format(e))


    @app.route("/")
    def mainapp():
        return render_template('index.html',api_key=allsettings['api_key'],icon_scalefactor=allsettings['icon_scalefactor'],mobile_scale=allsettings['mobile_scalefactor'],lat=list_lats[main_ind],lng=list_lngs[main_ind],language=allsettings['language'],icon_set = icon_set, profile=-1)

    @app.route("/id<int:profile>")
    def subapp(profile):
        if profile in list_profiles:
            sub_ind = list_profiles.index(profile)
            return render_template('index.html', api_key=allsettings['api_key'], icon_scalefactor=allsettings['icon_scalefactor'], mobile_scale=allsettings['mobile_scalefactor'],lat=list_lats[sub_ind],lng=list_lngs[sub_ind], language=allsettings['language'], icon_set = icon_set, profile=profile)

    http_server = HTTPServer(WSGIContainer(app))

    try:
        http_server.listen(port=port,address='0.0.0.0')
        IOLoop.instance().start()
    except socket.error as e:
             if e.errno == 10048:
                 print('[-] Error: The specified port {} is already in use.'.format(port))
示例#38
0
 def __init__(self, tornado_request_type, cookies=None):
     self._tornado_request = tornado_request_type
     self._cookies = cookies
     environ = WSGIContainer.environ(tornado_request_type)
     super(DjangoRequest,self).__init__(environ)
     self.tornado_to_django()
示例#39
0
"""This application runs the poolesville voting system
"""

#Tornado Imports
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from tornado.wsgi import WSGIContainer

import pvs

#App start
if __name__ == '__main__':
    HTTP_SERVER = HTTPServer(WSGIContainer(pvs.app))
    HTTP_SERVER.listen(80)
    IOLoop.instance().start()
示例#40
0
#!/usr/bin/python

import tornado.httpserver
import sys
from tornado.wsgi import WSGIContainer
from tornado.ioloop import IOLoop
from tornado.web import FallbackHandler, RequestHandler, Application
from appflask import app


class MainHandler(RequestHandler):
    def get(self):
        self.write("Tornado Starting ^_^")


tr = WSGIContainer(app)

application = Application([
    (r"/tornado", MainHandler),
    (r".*", FallbackHandler, dict(fallback=tr)),
])


def main(argv):
    port = ''
    try:
        port = int(sys.argv[1])
    except:
        port = 8090

    application.listen(port=port, address='localhost')
示例#41
0
    def __call__(self, request):

        data = {}
        response = []

        def start_response(status, response_headers, exc_info=None):
            data["status"] = status
            data["headers"] = response_headers
            return response.append

        app_response = self.wsgi_application(WSGIContainer.environ(request), start_response)

        try:
            response.extend(app_response)
            body = b"".join(response)
        finally:
            if hasattr(app_response, "close"):
                app_response.close()
        if not data:
            raise Exception("WSGI app did not call start_response")

        status_code = int(data["status"].split()[0])
        headers = data["headers"]
        header_set = set(k.lower() for (k, v) in headers)
        body = escape.utf8(body)

        if status_code != 304:
            if "content-length" not in header_set:
                headers.append(("Content-Length", str(len(body))))
            if "content-type" not in header_set:
                headers.append(("Content-Type", "text/html; charset=UTF-8"))
        if "server" not in header_set:
            headers.append(("Server", "TornadoServer/%s" % tornado.version))

        # try:

            # if request.method.lower() != GET:

            # _cookies = Cookie.SimpleCookie()

            # Django session cookies are set at the django level, but
            # tornado-specific cookies are appended here.

            # token = request.cookies[djsettings.CSRF_COOKIE_NAME]
            # if token:
            #     _cookies[settings.XSRF_TOKEN] = token.value

            # TODO Change when we add more contexts
            # _cookies[settings.CONTEXT_COOKIE] = settings.DEFAULT_CONTEXT

            # for c in _cookies.output(sep='\t').split('\t'):
            #     k, v = c.split(': ')
            #     headers.append((k, v + '; Path=/'))

        # except ValueError as e:
        #     logging.getLogger("ef5").error("A Value is either missing or invalid: {0}".format(e))
        # except Exception as e:
        #     logging.getLogger("ef5").error("An unknown error occurred: {0} with stacktrace {1}".format(e, traceback.format_exc()))

        parts = [escape.utf8("HTTP/1.1 " + data["status"] + "\r\n")]
        for key, value in headers:
            parts.append(escape.utf8(key) + b": " + escape.utf8(value) + b"\r\n")

        parts.append(b"\r\n")
        parts.append(body)

        request.write(b"".join(parts))
        request.finish()
        self._log(status_code, request)
示例#42
0
def run_tornado_server(host, port):
    app = create_default_app()
    print(app.config)
    http_server = HTTPServer(WSGIContainer(app))
    http_server.listen(port, host)
    IOLoop.instance().start()
示例#43
0
#coding=utf-8
from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop
from config import app_host, tornado_port
from run import app

# xheaders=True is for Running behind a load balancer: http://www.tornadoweb.org/en/stable/guide/running.html#running-behind-a-load-balancer
http_server = HTTPServer(WSGIContainer(app), xheaders=True)
http_server.listen(tornado_port, address=app_host)
IOLoop.instance().start()
示例#44
0
    python -m proxypool_framework.proxy_collector REDIS_URL=redis:// MAX_NUM_PROXY_IN_DB=500 MAX_SECONDS_MUST_CHECK_AGAIN=12 REQUESTS_TIMEOUT=6 FLASK_PORT=6795 PROXY_KEY_IN_REDIS_DEFAULT=proxy_free
    """

    os.system(f"""netstat -nltp|grep ':{FLASK_PORT} '|awk '{{print $NF}}'|awk -F/ '{{print $1}}'""")  # 杀死端口,避免ctrl c关闭不彻底,导致端口被占用。
    """启动代理池自动持续维护"""
    ProxyCollector(get_iphai_proxies_list, platform_name='iphai', time_sleep_for_get_new_proxies=70, ).work()
    ProxyCollector(get_from_seofangfa, platform_name='seofangfa', time_sleep_for_get_new_proxies=70, ).work()
    for p in range(1, 3):
        ProxyCollector(get_https_proxies_list_from_xici_by_page, (p,), platform_name='xici',
                       time_sleep_for_get_new_proxies=70, redis_key='proxy_xici').work()  # 这个是演示此框架是如何一次性启动维护多个代理池的,通过设置不同的redis_key来实现。
        ProxyCollector(get_89ip_proxies_list, (p,), platform_name='89ip', time_sleep_for_get_new_proxies=70, ).work()
    for p in range(1, 6):
        ProxyCollector(get_from_superfastip, (p,), platform_name='superfastip', time_sleep_for_get_new_proxies=65).work()
    for area in range(1, 30):  # 有30个城市区域
        ProxyCollector(get_66ip_proxies_list, func_kwargs={'area': area}, platform_name='66ip', time_sleep_for_get_new_proxies=300, ).work()
    for p in range(1, 20):
        if p < 5:
            time_sleep_for_get_new_proxiesx = 30
        else:
            time_sleep_for_get_new_proxiesx = 300
        ProxyCollector(get_https_proxies_list_from_xila_https_by_page, func_args=(p,), platform_name='西拉', time_sleep_for_get_new_proxies=time_sleep_for_get_new_proxiesx, ).work()
        ProxyCollector(get_https_proxies_list_from_xila_gaoni_by_page, func_kwargs={'p': p}, platform_name='西拉', time_sleep_for_get_new_proxies=time_sleep_for_get_new_proxiesx, ).work()
        ProxyCollector(get_nima_proxies_list, (p, 'gaoni'), platform_name='nima', time_sleep_for_get_new_proxies=time_sleep_for_get_new_proxiesx).work()
        ProxyCollector(get_nima_proxies_list, (p, 'https'), platform_name='nima', time_sleep_for_get_new_proxies=time_sleep_for_get_new_proxiesx).work()
        ProxyCollector(get_from_jiangxianli, func_kwargs={'p': p}, platform_name='jiangxianli', time_sleep_for_get_new_proxies=time_sleep_for_get_new_proxiesx).work()

    """启动api"""
    http_server = HTTPServer(WSGIContainer(create_app()))
    http_server.listen(FLASK_PORT)
    IOLoop.instance().start()
示例#45
0
def runCouchPotato(options, base_path, args, data_dir = None, log_dir = None, Env = None, desktop = None):

    try:
        locale.setlocale(locale.LC_ALL, "")
        encoding = locale.getpreferredencoding()
    except (locale.Error, IOError):
        encoding = None

    # for OSes that are poorly configured I'll just force UTF-8
    if not encoding or encoding in ('ANSI_X3.4-1968', 'US-ASCII', 'ASCII'):
        encoding = 'UTF-8'

    # Do db stuff
    db_path = os.path.join(data_dir, 'couchpotato.db')

    # Backup before start and cleanup old databases
    new_backup = os.path.join(data_dir, 'db_backup', str(int(time.time())))

    # Create path and copy
    if not os.path.isdir(new_backup): os.makedirs(new_backup)
    src_files = [options.config_file, db_path, db_path + '-shm', db_path + '-wal']
    for src_file in src_files:
        if os.path.isfile(src_file):
            shutil.copy2(src_file, os.path.join(new_backup, os.path.basename(src_file)))

    # Remove older backups, keep backups 3 days or at least 3
    backups = []
    for directory in os.listdir(os.path.dirname(new_backup)):
        backup = os.path.join(os.path.dirname(new_backup), directory)
        if os.path.isdir(backup):
            backups.append(backup)

    total_backups = len(backups)
    for backup in backups:
        if total_backups > 3:
            if tryInt(os.path.basename(backup)) < time.time() - 259200:
                for src_file in src_files:
                    b_file = os.path.join(backup, os.path.basename(src_file))
                    if os.path.isfile(b_file):
                        os.remove(b_file)
                os.rmdir(backup)
                total_backups -= 1


    # Register environment settings
    Env.set('encoding', encoding)
    Env.set('app_dir', base_path)
    Env.set('data_dir', data_dir)
    Env.set('log_path', os.path.join(log_dir, 'CouchPotato.log'))
    Env.set('db_path', 'sqlite:///' + db_path)
    Env.set('cache_dir', os.path.join(data_dir, 'cache'))
    Env.set('cache', FileSystemCache(os.path.join(Env.get('cache_dir'), 'python')))
    Env.set('console_log', options.console_log)
    Env.set('quiet', options.quiet)
    Env.set('desktop', desktop)
    Env.set('daemonized', options.daemon)
    Env.set('args', args)
    Env.set('options', options)

    # Determine debug
    debug = options.debug or Env.setting('debug', default = False, type = 'bool')
    Env.set('debug', debug)

    # Development
    development = Env.setting('development', default = False, type = 'bool')
    Env.set('dev', development)

    # Disable logging for some modules
    for logger_name in ['enzyme', 'guessit', 'subliminal', 'apscheduler']:
        logging.getLogger(logger_name).setLevel(logging.ERROR)

    for logger_name in ['gntp', 'migrate']:
        logging.getLogger(logger_name).setLevel(logging.WARNING)

    # Use reloader
    reloader = debug is True and development and not Env.get('desktop') and not options.daemon

    # Logger
    logger = logging.getLogger()
    formatter = logging.Formatter('%(asctime)s %(levelname)s %(message)s', '%m-%d %H:%M:%S')
    level = logging.DEBUG if debug else logging.INFO
    logger.setLevel(level)
    logging.addLevelName(19, 'INFO')

    # To screen
    if (debug or options.console_log) and not options.quiet and not options.daemon:
        hdlr = logging.StreamHandler(sys.stderr)
        hdlr.setFormatter(formatter)
        logger.addHandler(hdlr)

    # To file
    hdlr2 = handlers.RotatingFileHandler(Env.get('log_path'), 'a', 500000, 10)
    hdlr2.setFormatter(formatter)
    logger.addHandler(hdlr2)

    # Start logging & enable colors
    import color_logs
    from couchpotato.core.logger import CPLog
    log = CPLog(__name__)
    log.debug('Started with options %s', options)

    def customwarn(message, category, filename, lineno, file = None, line = None):
        log.warning('%s %s %s line:%s', (category, message, filename, lineno))
    warnings.showwarning = customwarn

    # Check if database exists
    db = Env.get('db_path')
    db_exists = os.path.isfile(db_path)

    # Load configs & plugins
    loader = Env.get('loader')
    loader.preload(root = base_path)
    loader.run()

    # Load migrations
    if db_exists:

        from migrate.versioning.api import version_control, db_version, version, upgrade
        repo = os.path.join(base_path, 'couchpotato', 'core', 'migration')

        latest_db_version = version(repo)
        try:
            current_db_version = db_version(db, repo)
        except:
            version_control(db, repo, version = latest_db_version)
            current_db_version = db_version(db, repo)

        if current_db_version < latest_db_version:
            if development:
                log.error('There is a database migration ready, but you are running development mode, so it won\'t be used. If you see this, you are stupid. Please disable development mode.')
            else:
                log.info('Doing database upgrade. From %d to %d', (current_db_version, latest_db_version))
                upgrade(db, repo)

    # Configure Database
    from couchpotato.core.settings.model import setup
    setup()

    # Fill database with needed stuff
    if not db_exists:
        fireEvent('app.initialize', in_order = True)

    # Create app
    from couchpotato import app
    api_key = Env.setting('api_key')
    url_base = '/' + Env.setting('url_base').lstrip('/') if Env.setting('url_base') else ''

    # Basic config
    app.secret_key = api_key
    host = Env.setting('host', default = '0.0.0.0')
    # app.debug = development
    config = {
        'use_reloader': reloader,
        'port': tryInt(Env.setting('port', default = 5000)),
        'host': host if host and len(host) > 0 else '0.0.0.0',
        'ssl_cert': Env.setting('ssl_cert', default = None),
        'ssl_key': Env.setting('ssl_key', default = None),
    }

    # Static path
    app.static_folder = os.path.join(base_path, 'couchpotato', 'static')
    web.add_url_rule('api/%s/static/<path:filename>' % api_key,
                      endpoint = 'static',
                      view_func = app.send_static_file)

    # Register modules
    app.register_blueprint(web, url_prefix = '%s/' % url_base)
    app.register_blueprint(api, url_prefix = '%s/api/%s/' % (url_base, api_key))

    # Some logging and fire load event
    try: log.info('Starting server on port %(port)s', config)
    except: pass
    fireEventAsync('app.load')

    # Go go go!
    from tornado.ioloop import IOLoop
    web_container = WSGIContainer(app)
    web_container._log = _log
    loop = IOLoop.current()


    application = Application([
        (r'%s/api/%s/nonblock/(.*)/' % (url_base, api_key), NonBlockHandler),
        (r'.*', FallbackHandler, dict(fallback = web_container)),
    ],
        log_function = lambda x : None,
        debug = config['use_reloader'],
        gzip = True,
    )

    if config['ssl_cert'] and config['ssl_key']:
        server = HTTPServer(application, no_keep_alive = True, ssl_options = {
           "certfile": config['ssl_cert'],
           "keyfile": config['ssl_key'],
        })
    else:
        server = HTTPServer(application, no_keep_alive = True)

    try_restart = True
    restart_tries = 5

    while try_restart:
        try:
            server.listen(config['port'], config['host'])
            loop.start()
        except Exception, e:
            try:
                nr, msg = e
                if nr == 48:
                    log.info('Already in use, try %s more time after few seconds', restart_tries)
                    time.sleep(1)
                    restart_tries -= 1

                    if restart_tries > 0:
                        continue
                    else:
                        return
            except:
                pass

            raise

        try_restart = False
示例#46
0
    def __init__(self, *args, **kwargs):
        _RequestHandler.__init__(self, *args, **kwargs)

        self.environ = WSGIContainer.environ(self.request)
示例#47
0
def run_demo(args):
    dump_dir = os.path.join(args.dump_dir, args.dump_path)
    if args.abs_path:
        index_dir = args.index_name
    else:
        index_dir = os.path.join(args.dump_dir, args.index_name)
    index_path = os.path.join(index_dir, args.index_path)
    idx2id_path = os.path.join(index_dir, args.idx2id_path)
    tfidf_dump_dir = os.path.join(args.dump_dir, 'tfidf')
    max_norm_path = os.path.join(index_dir, 'max_norm.json')
    ranker_path = os.path.join(
        args.wikipedia_dir,
        'docs-tfidf-ngram=2-hash=16777216-tokenizer=simple.npz')

    mips = MIPSSparse(dump_dir,
                      index_path,
                      idx2id_path,
                      ranker_path,
                      args.max_answer_length,
                      para=args.para,
                      tfidf_dump_dir=tfidf_dump_dir,
                      sparse_weight=args.sparse_weight,
                      sparse_type=args.sparse_type,
                      cuda=args.cuda,
                      max_norm_path=max_norm_path)

    app = Flask(__name__, static_url_path='/static')

    app.config['JSONIFY_PRETTYPRINT_REGULAR'] = False
    CORS(app)

    emb_session = FuturesSession()

    def search(query,
               top_k,
               nprobe=64,
               search_strategy='dense_first',
               doc_top_k=5):
        t0 = time()
        (start, end, span), _ = query2emb(query, args.api_port)()
        phrase_vec = np.concatenate([start, end, span], 1)
        rets = mips.search(phrase_vec,
                           top_k=top_k,
                           nprobe=nprobe,
                           start_top_k=args.start_top_k,
                           mid_top_k=args.mid_top_k,
                           q_texts=[query],
                           filter_=args.filter,
                           search_strategy=search_strategy,
                           doc_top_k=doc_top_k)
        t1 = time()
        out = {'ret': rets[0], 'time': int(1000 * (t1 - t0))}
        return out

    def query2emb(query, api_port):
        r = emb_session.get('http://*****:*****@app.route('/')
    def index():
        return app.send_static_file('index.html')

    @app.route('/files/<path:path>')
    def static_files(path):
        return app.send_static_file('files/' + path)

    @app.route('/api', methods=['GET'])
    def api():
        query = request.args['query']
        strat = request.args['strat']
        out = search(query,
                     args.top_k,
                     args.nprobe,
                     search_strategy=strat,
                     doc_top_k=args.doc_top_k)
        return jsonify(out)

    @app.route('/get_examples', methods=['GET'])
    def get_examples():
        with open(args.examples_path, 'r') as fp:
            examples = [line.strip() for line in fp.readlines()]
        return jsonify(examples)

    print('Starting server at %d' % args.port)
    http_server = HTTPServer(WSGIContainer(app))
    http_server.listen(args.port)
    IOLoop.instance().start()
示例#48
0
def get_server(panel,
               port=0,
               address=None,
               websocket_origin=None,
               loop=None,
               show=False,
               start=False,
               title=None,
               verbose=False,
               location=True,
               static_dirs={},
               oauth_provider=None,
               oauth_key=None,
               oauth_secret=None,
               oauth_extra_params={},
               cookie_secret=None,
               oauth_encryption_key=None,
               **kwargs):
    """
    Returns a Server instance with this panel attached as the root
    app.

    Arguments
    ---------
    panel: Viewable, function or {str: Viewable}
      A Panel object, a function returning a Panel object or a
      dictionary mapping from the URL slug to either.
    port: int (optional, default=0)
      Allows specifying a specific port
    address : str
      The address the server should listen on for HTTP requests.
    websocket_origin: str or list(str) (optional)
      A list of hosts that can connect to the websocket.

      This is typically required when embedding a server app in
      an external web site.

      If None, "localhost" is used.
    loop : tornado.ioloop.IOLoop (optional, default=IOLoop.current())
      The tornado IOLoop to run the Server on.
    show : boolean (optional, default=False)
      Whether to open the server in a new browser tab on start.
    start : boolean(optional, default=False)
      Whether to start the Server.
    title : str or {str: str} (optional, default=None)
      An HTML title for the application or a dictionary mapping
      from the URL slug to a customized title.
    verbose: boolean (optional, default=False)
      Whether to report the address and port.
    location : boolean or panel.io.location.Location
      Whether to create a Location component to observe and
      set the URL location.
    static_dirs: dict (optional, default={})
      A dictionary of routes and local paths to serve as static file
      directories on those routes.
    oauth_provider: str
      One of the available OAuth providers
    oauth_key: str (optional, default=None)
      The public OAuth identifier
    oauth_secret: str (optional, default=None)
      The client secret for the OAuth provider
    oauth_extra_params: dict (optional, default={})
      Additional information for the OAuth provider
    cookie_secret: str (optional, default=None)
      A random secret string to sign cookies (required for OAuth)
    oauth_encryption_key: str (optional, default=False)
      A random encryption key used for encrypting OAuth user
      information and access tokens.
    kwargs: dict
      Additional keyword arguments to pass to Server instance.

    Returns
    -------
    server : bokeh.server.server.Server
      Bokeh Server instance running this panel
    """
    server_id = kwargs.pop('server_id', uuid.uuid4().hex)
    kwargs['extra_patterns'] = extra_patterns = kwargs.get(
        'extra_patterns', [])
    if isinstance(panel, dict):
        apps = {}
        for slug, app in panel.items():
            if isinstance(title, dict):
                try:
                    title_ = title[slug]
                except KeyError:
                    raise KeyError(
                        "Keys of the title dictionnary and of the apps "
                        f"dictionary must match. No {slug} key found in the "
                        "title dictionnary.")
            else:
                title_ = title
            slug = slug if slug.startswith('/') else '/' + slug
            if 'flask' in sys.modules:
                from flask import Flask
                if isinstance(app, Flask):
                    wsgi = WSGIContainer(app)
                    if slug == '/':
                        raise ValueError(
                            'Flask apps must be served on a subpath.')
                    if not slug.endswith('/'):
                        slug += '/'
                    extra_patterns.append(
                        ('^' + slug + '.*', ProxyFallbackHandler,
                         dict(fallback=wsgi, proxy=slug)))
                    continue
            apps[slug] = partial(_eval_panel, app, server_id, title_, location)
    else:
        apps = {'/': partial(_eval_panel, panel, server_id, title, location)}

    extra_patterns += get_static_routes(static_dirs)

    opts = dict(kwargs)
    if loop:
        loop.make_current()
        opts['io_loop'] = loop
    elif opts.get('num_procs', 1) == 1:
        opts['io_loop'] = IOLoop.current()

    if 'index' not in opts:
        opts['index'] = INDEX_HTML

    if address is not None:
        opts['address'] = address

    if websocket_origin:
        if not isinstance(websocket_origin, list):
            websocket_origin = [websocket_origin]
        opts['allow_websocket_origin'] = websocket_origin

    # Configure OAuth
    from ..config import config
    if config.oauth_provider:
        from ..auth import OAuthProvider
        opts['auth_provider'] = OAuthProvider()
    if oauth_provider:
        config.oauth_provider = oauth_provider
    if oauth_key:
        config.oauth_key = oauth_key
    if oauth_extra_params:
        config.oauth_extra_params = oauth_extra_params
    if cookie_secret:
        config.cookie_secret = cookie_secret
    opts['cookie_secret'] = config.cookie_secret

    server = Server(apps, port=port, **opts)
    if verbose:
        address = server.address or 'localhost'
        print("Launching server at http://%s:%s" % (address, server.port))

    state._servers[server_id] = (server, panel, [])

    if show:

        def show_callback():
            server.show('/login' if config.oauth_provider else '/')

        server.io_loop.add_callback(show_callback)

    def sig_exit(*args, **kwargs):
        server.io_loop.add_callback_from_signal(do_stop)

    def do_stop(*args, **kwargs):
        server.io_loop.stop()

    try:
        signal.signal(signal.SIGINT, sig_exit)
    except ValueError:
        pass  # Can't use signal on a thread

    if start:
        server.start()
        try:
            server.io_loop.start()
        except RuntimeError:
            pass
    return server