Beispiel #1
0
def main():
    loglevel = 'INFO'

    datefmt = '%b %d %H:%M:%S'
    logformat = '%(asctime)s %(levelname)s pysms: %(message)s'

    logging.basicConfig(level=loglevel,
                        stream=sys.stdout,
                        format=logformat,
                        datefmt=datefmt)

    application = web.application(urls, globals()).wsgifunc()
    server = WSGIServer(('', 8088), application)

    signal(SIGTERM, exit, arping, server=server)
    signal(SIGQUIT, exit, arping, server=server)
    signal(SIGINT, exit, arping, server=server)

    print "Starting message dispatcher"
    dispatcher.start()
    print "Starting arping monitor"
    arping.start()
    print 'Serving on 8088...'
    server.start()

    while True:
        try:
            gevent.sleep(0)
        except KeyboardInterrupt:
            # Switch to main thread, to quit gracefully
            gevent.hub.get_hub().switch()
            exit(arping, server)
Beispiel #2
0
class WebServiceInterface(Interface):
    http_port = 80

    def __init__(self, *args, **kwargs):
        super(WebServiceInterface, self).__init__(*args, **kwargs)
        self.wsgi_server = None

    def on_start(self):
        super(WebServiceInterface, self).on_start()
        self.http_socket = create_socket('fd://%s' % self.container.get_shared_socket_fd(self.http_port))
        self.wsgi_server = WSGIServer(self.http_socket, Request.application(self.dispatch_request))
        self.wsgi_server.start()

    def on_stop(self):
        self.wsgi_server.stop()
        super(WebServiceInterface, self).on_stop()

    def dispatch_request(self, request):
        trace.set_id()
        urls = self.url_map.bind_to_environ(request.environ)
        request.urls = urls
        try:
            endpoint, args = urls.match()
            if callable(endpoint):
                handler = endpoint(self, request)
                response = handler.dispatch(args)
            else:
                try:
                    handler = getattr(self, endpoint)
                except AttributeError:
                    raise  # FIXME
                response = handler(request, **args)
        except HTTPException as e:
            response = e.get_response(request.environ)
        return response
Beispiel #3
0
def start_server(input_channels,
                 cors,
                 auth_token,
                 port,
                 initial_agent,
                 enable_api=True):
    """Run the agent."""

    if enable_api:
        app = server.create_app(initial_agent,
                                cors_origins=cors,
                                auth_token=auth_token)
    else:
        app = Flask(__name__)
        CORS(app, resources={r"/*": {"origins": cors or ""}})

    if input_channels:
        rasa_core.channels.channel.register(input_channels,
                                            app,
                                            initial_agent.handle_message,
                                            route="/webhooks/")

    if logger.isEnabledFor(logging.DEBUG):
        utils.list_routes(app)

    http_server = WSGIServer(('0.0.0.0', port), app)
    logger.info("Rasa Core server is up and running on "
                "{}".format(constants.DEFAULT_SERVER_URL))
    http_server.start()
    return http_server
Beispiel #4
0
class WebService(Service):
    """RPC service with Web server capabilities.

    """

    def __init__(self, listen_port, handlers, parameters, shard=0,
                 custom_logger=None, listen_address=""):
        Service.__init__(self, shard, custom_logger)

        global logger
        from cms.io.GeventLibrary import logger as _logger
        logger = _logger

        self.__responses = {}
        # TODO: why are the following two lines needed?
        self._RPCRequestHandler__responses = self.__responses
        self._RPCAnswerHandler__responses = self.__responses
        handlers += [(r"/rpc_request/([a-zA-Z0-9_-]+)/" \
                      "([0-9]+)/([a-zA-Z0-9_-]+)",
                      RPCRequestHandler),
                     (r"/rpc_answer", RPCAnswerHandler),
                     (r"/sync_rpc_request/([a-zA-Z0-9_-]+)/" \
                      "([0-9]+)/([a-zA-Z0-9_-]+)",
                      SyncRPCRequestHandler)]
        self.application = tornado.wsgi.WSGIApplication(handlers, **parameters)
        self.application.service = self

        # is_proxy_used=True means the content of the header X-Real-IP
        # is interpreted as the request IP. This means that if we're
        # behind a proxy, it can see the real IP the request is coming
        # from. But, to use it, we need to be sure we can trust it
        # (i.e., if we are not behind a proxy that sets that header,
        # we must not use it).
        real_application = self.application
        if parameters.get('is_proxy_used', False):
            real_application = WSGIXheadersMiddleware(real_application)

        self.web_server = WSGIServer((listen_address, listen_port),
                                     real_application)

    def run(self):
        """Start the WebService.

        Both the WSGI server and the RPC server are started.

        """
        self.web_server.start()
        Service.run(self)
        self.web_server.stop()

    @rpc_callback
    def _default_callback(self, data, plus, error=None):
        """This is the callback for the RPC method called from a web
        page, that just collect the response.

        """
        self.__responses[plus] = (data, error)
Beispiel #5
0
class RpcAdapterWebSocket(RpcCommAdapter):
	def __init__(self,id,ep):
		RpcCommAdapter.__init__(self,id,ep)
		self.server = None
		ep.impl = self

	def start(self):
		from gevent.pywsgi import WSGIServer
		import geventwebsocket
		from geventwebsocket.handler import WebSocketHandler

		# geventwebsocket.WebSocketServer
		# self.server = WSGIServer((self.ep.host,self.ep.port), self._service, handler_class=geventwebsocket.WebSocketHandler)

		if self.ep.ssl:
			self.server = WSGIServer((self.ep.host,self.ep.port), self._service, handler_class=WebSocketHandler,keyfile=self.ep.keyfile,certfile=self.ep.certfile)
		else:
			self.server = WSGIServer((self.ep.host,self.ep.port), self._service, handler_class=WebSocketHandler)
		print 'websocket server started!'
		self.server.start() #.serve_forever()

	def stop(self):
		self.server.stop()
		self.stopmtx.set()


	def _http_handler(environ, start_response):
		import geventwebsocket
		if environ["PATH_INFO"].strip("/") == "version":
			start_response("200 OK", [])
			agent = "gevent-websocket/%s" % (geventwebsocket.get_version())
			return [agent]
		else:
			start_response("400 Bad Request", [])
		return ["WebSocket connection is expected here."]

	def _service(self,environ, start_response):
		from communicator import RpcCommunicator
		print ' new client websocket come in :'#,str(address)
		sock = environ.get("wsgi.websocket")
		if sock is None:
			return self._http_handler(environ, start_response)
		conn = RpcConnectionWebSocket(self,self.ep,sock)
		self.addConnection(conn)

		server = RpcCommunicator.instance().currentServer()
		if server.getPropertyValue('userid_check','false') == 'false':
			conn.setUserId( str(self.generateSeq()) )
		conn.recv()

		self.removeConnection(conn)


	def sendMessage(self,m):
		RpcCommAdapter.sendMessage(self,m)
Beispiel #6
0
def init_ws(context):
    """Initialize websocket connection handler.

    """
    addr = config.get_value("websocket_address")
    logging.info("Opening websocket server on %s", addr)
    # TODO: Believe it or not this is not a valid way to check an address
    _ws_server = WSGIServer(addr,
                            WebSocketClientFactory(context),
                            handler_class=WebSocketHandler)
    _ws_server.start()
Beispiel #7
0
def preview(filename, options={}):
    path = os.path.abspath(filename)
    dirpath = os.path.dirname(filename)
    realpath = os.path.realpath(path)

    if not os.path.isfile(realpath):
        error("%s is not a regular file" % filename)

    app = Bottle()

    @app.route('/')
    @view('index')
    def index():
        return { 'css': options.get('css', True) }

    @app.route('/content')
    def content():
        with open(path, 'rt') as fp:
            return render(fp.read(), path)

    @app.route('/update-event')
    def update():
        response.set_header('Content-Type', 'text/event-stream')
        response.set_header('Cache-Control', 'no-cache')
        mtime = lambda: os.stat(path).st_mtime
        last_mtime = mtime()
        while True:
            if not os.path.exists(path):
                error("%s no longer exists" % filename)
            current_mtime = mtime()
            if current_mtime > last_mtime:
                last_mtime = current_mtime
                ok('%s updated' % filename)
                yield 'data: update\n\n'
            time.sleep(0.25)

    @app.route('/__moo__/<res:path>')
    def moo_static(res):
        return static_file(res, root=STATIC_ROOT)

    @app.route('/<res:path>')
    def static(res):
        return static_file(res, root=dirpath)

    server = WSGIServer(("127.0.0.1", options.get('port', 0)), app, log=None)
    server.start()

    url = 'http://127.0.0.1:%d' % server.server_port
    info('Server listening at %s. Press Ctrl-C to stop.' % url)

    webbrowser.open(url)
    server.serve_forever()
def run_server(auction, mapping_expire_time, logger, timezone='Europe/Kiev'):
    app.config.update(auction.worker_defaults)
    # Replace Flask custom logger
    app.logger_name = logger.name
    app._logger = logger
    app.config['auction'] = auction
    app.config['timezone'] = tz(timezone)
    app.config['SESSION_COOKIE_PATH'] = '/tenders/{}'.format(auction.auction_doc_id)
    app.config['SESSION_COOKIE_NAME'] = 'auction_session'
    app.oauth = OAuth(app)
    app.remote_oauth = app.oauth.remote_app(
        'remote',
        consumer_key=app.config['OAUTH_CLIENT_ID'],
        consumer_secret=app.config['OAUTH_CLIENT_SECRET'],
        request_token_params={'scope': 'email'},
        base_url=app.config['OAUTH_BASE_URL'],
        access_token_url=app.config['OAUTH_ACCESS_TOKEN_URL'],
        authorize_url=app.config['OAUTH_AUTHORIZE_URL']
    )

    @app.remote_oauth.tokengetter
    def get_oauth_token():
        return session.get('remote_oauth')
    os.environ['OAUTHLIB_INSECURE_TRANSPORT'] = 'true'

    # Start server on unused port
    lisener = get_lisener(auction.worker_defaults["STARTS_PORT"],
                          host=auction.worker_defaults.get("WORKER_BIND_IP", ""))
    app.logger.info(
        "Start server on {0}:{1}".format(*lisener.getsockname()),
        extra={"JOURNAL_REQUEST_ID": auction.request_id}
    )
    server = WSGIServer(lisener, app,
                        log=_LoggerStream(logger),
                        handler_class=AuctionsWSGIHandler)
    server.start()
    # Set mapping
    mapping_value = "http://{0}:{1}/".format(*lisener.getsockname())
    create_mapping(auction.worker_defaults["REDIS_URL"],
                   auction.auction_doc_id,
                   mapping_value)
    app.logger.info("Server mapping: {} -> {}".format(
        auction.auction_doc_id,
        mapping_value,
        mapping_expire_time
    ), extra={"JOURNAL_REQUEST_ID": auction.request_id})

    # Spawn events functionality
    spawn(push_timestamps_events, app,)
    spawn(check_clients, app, )
    return server
Beispiel #9
0
def start_server():
    studio.debug = True
    # setup secret key
    studio.config['SECRET_KEY'] = secret_key = os.urandom(33)
    secret_key = base64.b64encode(secret_key).decode('ascii')
    # setup OS-assigned port
    server = WSGIServer(('localhost', 0), studio, log=sys.stdout,
                        error_log=sys.stderr)
    server.start()
    # send key and port to parent process
    sys.stdout.write("%d %s" % (server.server_port, secret_key))
    sys.stdout.flush()  # (needed when output is in a pipe)
    # never stop not stopping
    server.serve_forever()
Beispiel #10
0
def start_serving_files(cachedir, port):
    from gevent.pywsgi import WSGIServer
    from bottle import route, static_file, default_app
    cachedir = os.path.abspath(cachedir)

    @route('/cache/:filename#.*#')
    def server_static(filename):
        response = static_file(filename, root=cachedir, mimetype="application/octet-stream")
        if filename.endswith(".rl"):
            response.headers["Content-Disposition"] = "inline; filename=collection.pdf"
        return response
    s = WSGIServer(("", port), default_app())
    s.start()
    return s
Beispiel #11
0
    def handle(self, *args, **options):
        try:
            host = port = None

            if len(args) > 0:
                host = args[0]
                if ':' in host:
                    host, port = host.rsplit(':')

            if host is None:
                host = '0.0.0.0'
            if port is None:
                port = 80

            # Monkey Patch for Gevent
            monkey.patch_all()

            if not options['disable_psycogreen']:
                # Monkey Patch for Psycopg2 using psycogreen
                import psycogreen.gevent
                psycogreen.gevent.patch_psycopg()

            application = bootstrap.get_wsgi_application()

            if options['disable_socketio']:
                from gevent.pywsgi import WSGIServer
                server = WSGIServer((host, int(port)), application)
            else:
                from socketio.server import SocketIOServer
                server = SocketIOServer((host, int(port)), application, resource="socket.io")

            print 'Starting server on {host}:{port} with {workers} workers.'.format(
                host=host,
                port=port,
                workers=options['child_workers']
            )

            server.start()

            print 'Listening on http://{host}:{port}'.format(host=host, port=port)

            for i in range(options['child_workers']):
                pid = fork()
                if pid == 0:
                    break

            server.serve_forever()

        except Exception, ex:
            raise CommandError("Exception occurred during gevent wsgi process startup.", ex)
Beispiel #12
0
def serve_application(app, serve_forever=True, get_next_message=None):
    http_server = WSGIServer(('0.0.0.0', DEFAULT_SERVER_PORT), app)
    logger.info("Rasa Core server is up and running on "
                "{}".format(DEFAULT_SERVER_URL))
    http_server.start()

    endpoint = EndpointConfig(url=DEFAULT_SERVER_URL)
    start_online_learning_io(endpoint, http_server.stop, get_next_message)

    if serve_forever:
        try:
            http_server.serve_forever()
        except Exception as exc:
            logger.exception(exc)

    return http_server
Beispiel #13
0
class HttpServer(Actor):
    def pre_start(self, address, responders, default_content_type='text/html'):
        self.responders = responders
        self.default_content_type = default_content_type
        self.server = WSGIServer(address, self.handle_wsgi_request)
        self.server.start()

    def post_stop(self):
        self.server.stop()

    def handle_wsgi_request(self, env, start_response):
        ch = Channel()
        req = Request(ch, env, start_response, content_type=self.default_content_type)
        self << ('handle', req)
        return response_stream(ch)

    def receive(self, msg):
        if ('handle', ANY) == msg:
            _, req = msg
            try:
                responder = self.get_responder(req.env['PATH_INFO'])
                if responder:
                    responder, args, kwargs = responder
                    self.spawn(RequestHandler.using(req, responder, args, kwargs))
                else:
                    req.start_response('404 Not Found', [('Content-Type', 'text/html')])
                    req.write('<h1>404 Not Found</h1>\n')
                    req.write('The page you tried to reach could not be found.\n')
                    req.close()
            except:
                _send_500(req)
                raise

        elif 'get-addr' == msg:
            self.reply(self.server.address)
        else:
            raise Unhandled

    def get_responder(self, path):
        for pattern, responder in self.responders:
            m = re.compile(pattern).match(path)
            if m:
                args = m.groups()
                kwargs = m.groupdict()
                return responder, args if not kwargs else (), kwargs
Beispiel #14
0
class WebService(Service):
    """RPC service with Web server capabilities.

    """
    def __init__(self, listen_port, handlers, parameters, shard=0,
                 listen_address=""):
        super(WebService, self).__init__(shard)

        static_files = parameters.pop('static_files', [])
        rpc_enabled = parameters.pop('rpc_enabled', False)
        is_proxy_used = parameters.pop('is_proxy_used', False)

        self.wsgi_app = tornado.wsgi.WSGIApplication(handlers, **parameters)
        self.wsgi_app.service = self

        for entry in static_files:
            self.wsgi_app = SharedDataMiddleware(
                self.wsgi_app, {"/static": entry})

        if rpc_enabled:
            self.wsgi_app = DispatcherMiddleware(
                self.wsgi_app, {"/rpc": RPCMiddleware(self)})

        # If is_proxy_used is set to True we'll use the content of the
        # X-Forwarded-For HTTP header (if provided) to determine the
        # client IP address, ignoring the one the request came from.
        # This allows to use the IP lock behind a proxy. Activate it
        # only if all requests come from a trusted source (if clients
        # were allowed to directlty communicate with the server they
        # could fake their IP and compromise the security of IP lock).
        if is_proxy_used:
            self.wsgi_app = ProxyFix(self.wsgi_app)

        self.web_server = WSGIServer((listen_address, listen_port),
                                     self.wsgi_app)

    def run(self):
        """Start the WebService.

        Both the WSGI server and the RPC server are started.

        """
        self.web_server.start()
        Service.run(self)
        self.web_server.stop()
Beispiel #15
0
class Web(object):
    """
        Dissonance's WSGI server. Routes requests to their appropriate module. See `dissonance.module.Module.app`
        for more details.
    """
    _url_map = Map([
        Rule('/<module>/', endpoint='module', defaults={'rest': ''}),
        Rule('/<module>/<path:rest>', endpoint='module')
    ])

    def __init__(self, client, opts):
        self._client = client
        self._opts = opts
        self._bind_addr = self._opts['listen_host'], int(self._opts['listen_port'])
        self._server = WSGIServer(self._bind_addr, self._wsgi_app)

    def _wsgi_app(self, environ, start_response):
        urls = self._url_map.bind_to_environ(environ)
        try:
            endpoint, args = urls.match()
            handler = getattr(self, '_handle_%s' % endpoint)
            return handler(args, environ, start_response)

        except HTTPException as e:
            return e(environ, start_response)

    def _handle_module(self, args, environ, start_response):
        module = self._client.modules.get_module(args['module'])

        if module and module.is_web:
            original_script_name = environ.get('SCRIPT_NAME', '')
            environ['SCRIPT_NAME'] = original_script_name + '/' + args['module']
            environ['PATH_INFO'] = args['rest']
            return module.app(environ, start_response)

        return NotFound()(environ, start_response)

    def start(self):
        logger.info("Starting web server on %s:%s", *self._bind_addr)
        self._server.start()

    def stop(self):
        logger.info("Stopping web server on %s:%s", *self._bind_addr)
        self._server.stop()
Beispiel #16
0
class Frontend(object):
    def start(self, start_wsgi_server=False, forever=False):
        """
        Start the frontend's greenlets.

        If *start_wsgi_server* is True, also create the frontend's main WSGI
        application and serve it with gevent's WSGI server.
        """
        # Start the frontend WSGI server if requested
        if start_wsgi_server:
            wsgi_app = self.create_wsgi_app(
                {
                    "pyramid.debug_authorization": "true",
                    "pyramid.debug_routematch": "true",
                    "pyramid.reload_templates": "true",
                    "pyramid.debug_notfound": "true",
                }
            )
            logfile = open("/tmp/jarvis.log", "w")

            self.wsgi_server = WSGIServer(("127.0.0.1", 9017), wsgi_app, log=logfile)
            if forever:
                self.wsgi_server.serve_forever()
            else:
                self.wsgi_server.start()
        else:
            self.wsgi_server = None

    def stop(self):
        if self.wsgi_server is not None:
            self.wsgi_server.stop()

    def create_wsgi_app(self, settings):
        # Create applincation
        config = Configurator(settings=settings)
        config.add_route("state_update", "/state/update/{id}/")
        config.add_route("state_stream_json", "/state/stream/json/{session_id}/")
        config.add_route("state_stream_eventsource", "/state/stream/eventsource/{session_id}/")
        static_path = op.join(op.dirname(__file__), "media")
        config.add_static_view(name="static", path=static_path)
        config.scan()

        return config.make_wsgi_app()
class ContractingUserTestCase(unittest.TestCase):
    """"""
    def setUp(self):
        #self._testMethodName
        self.app = Bottle()

        setup_routing(self.app)
        self.server = WSGIServer(('localhost', 20602), self.app, log=None)
        self.server.start()
        self.client = ContractingClient(API_KEY,  host_url=HOST_URL,
                                        api_version=API_VERSION)

        with open(ROOT + 'contract_' + TEST_CONTRACT_KEYS.contract_id + '.json') as contract:
            self.contract = munchify(load(contract))
            self.contract.update({'access':{"token": API_KEY}})

    def tearDown(self):
        self.server.stop()

    ###########################################################################
    #             CREATE ITEM TEST
    ###########################################################################

    def test_create_contract(self):
        setup_routing(self.app, routs=["contract_create"])
        contract = munchify({'data': 'contract'})
        self.client.create_contract(contract)

    ###########################################################################
    #             DOCUMENTS FILE TEST
    ###########################################################################

    def test_upload_contract_document(self):
        setup_routing(self.app, routs=["contract_document_create"])
        file_ = StringIO()
        file_.name = 'test_document.txt'
        file_.write("test upload contract document text data")
        file_.seek(0)
        doc = self.client.upload_document(file_, self.contract)
        self.assertEqual(doc.data.title, file_.name)
        self.assertEqual(doc.data.id, TEST_CONTRACT_KEYS.new_document_id)
        file_.close()
Beispiel #18
0
def start_server(manager, host='0.0.0.0', port=8080):
    """
    Start the server on http://{host}:{port} and return
    """
    # TODO: Ewwww, globals :(
    global MANAGER
    MANAGER = manager

    # Construct & start the server
    server = WSGIServer(
        (host, port),
        app,
        handler_class=WebSocketHandler
    )
    print 'NOTE: You must have a websocket display setup for display in browser to work!'
    print 'Starting server on {host}:{port}...'.format(
        host=host,
        port=port
    )
    server.start()
Beispiel #19
0
class HTTPProxyServer(object):
    def __init__(self, ip, port, app, log='default'):
        self.ip = ip
        self.port = port
        self.app = app
        self.server = WSGIServer((self.ip, self.port), log=log,
            application=self.app.application, spawn=Pool(500), handler_class=ProxyHandler)
        
    def start(self):
        self.server.start()
        
    def run(self):
        self.server.serve_forever()
    
    def stop(self):
        self.server.stop()
        
    @property
    def closed(self):
        return self.server.closed
Beispiel #20
0
def main():

    app.logger.info("setting context")
    context = zmq.Context()

    gevent.spawn(zmq_qry_pub, context)

    # websocket server: copies inproc zmq messages to websocket
    ws_server = WSGIServer(
        ('', 9999),
        WebSocketApp(context),
        handler_class=WebSocketHandler
    )

    http_server = WSGIServer(('', 8080), app)

    http_server.start()
    ws_server.start()

    zmq_sub(context)
class ViewerTenderTestCase(unittest.TestCase):
    """"""
    def setUp(self):
        #self._testMethodName
        self.app = Bottle()
        setup_routing(self.app)
        self.server = WSGIServer(('localhost', 20602), self.app, log=None)
        self.server.start()

        self.client = tender_client.TendersClient('', host_url=HOST_URL, api_version=API_VERSION)

        with open(ROOT + 'tenders.json') as tenders:
            self.tenders = munchify(load(tenders))
        with open(ROOT + TEST_KEYS.tender_id + '.json') as tender:
            self.tender = munchify(load(tender))

    def tearDown(self):
        self.server.stop()


    def test_get_tenders(self):
        setup_routing(self.app, routs=["tenders"])
        tenders = self.client.get_tenders()
        self.assertIsInstance(tenders, Iterable)
        self.assertEqual(tenders, self.tenders.data)

    def test_get_tender(self):
        setup_routing(self.app, routs=["tender"])
        tender = self.client.get_tender(TEST_KEYS.tender_id)
        self.assertEqual(tender, self.tender)

    def test_get_tender_location_error(self):
        setup_routing(self.app, routs=["tender"])
        tender = self.client.get_tender(TEST_KEYS.error_id)
        self.assertEqual(tender, munchify(loads(location_error('tender'))))

    def test_offset_error(self):
        setup_routing(self.app, routs=['offset_error'])
        tenders = self.client.get_tenders()
        self.assertIsInstance(tenders, Iterable)
        self.assertEqual(tenders, self.tenders.data)
class ViewerPlanTestCase(unittest.TestCase):
    """"""
    def setUp(self):
        self.app = Bottle()
        setup_routing(self.app)
        self.server = WSGIServer(('localhost', 20602), self.app, log=None)
        self.server.start()

        self.client = plan_client.PlansClient('', host_url=HOST_URL, api_version=API_VERSION)

        with open(ROOT + 'plans.json') as plans:
            self.plans = munchify(load(plans))
        with open(ROOT + 'plan_' + TEST_PLAN_KEYS.plan_id + '.json') as plan:
            self.plan = munchify(load(plan))

    def tearDown(self):
        self.server.stop()


    def test_get_plans(self):
        setup_routing(self.app, routs=["plans"])
        plans = self.client.get_plans()
        self.assertIsInstance(plans, Iterable)
        self.assertEqual(plans, self.plans.data)

    def test_get_plan(self):
        setup_routing(self.app, routs=["plan"])
        plan = self.client.get_plan(TEST_PLAN_KEYS.plan_id)
        self.assertEqual(plan, self.plan)

    def test_get_plan_location_error(self):
        setup_routing(self.app, routs=["plan"])
        tender = self.client.get_plan(TEST_PLAN_KEYS.error_id)
        self.assertEqual(tender, munchify(loads(location_error('plan'))))

    def test_offset_error(self):
        setup_routing(self.app, routs=['plan_offset_error'])
        plans = self.client.get_plans()
        self.assertIsInstance(plans, Iterable)
        self.assertEqual(plans, self.plans.data)
Beispiel #23
0
    def handle_channels(self, channels,
                        http_port=constants.DEFAULT_SERVER_PORT,
                        serve_forever=True):
        # type: (List[InputChannel], int, bool) -> WSGIServer
        """Start a webserver attaching the input channels and handling msgs.

        If ``serve_forever`` is set to ``True``, this call will be blocking.
        Otherwise the webserver will be started, and the method will
        return afterwards."""
        from flask import Flask

        app = Flask(__name__)
        rasa_core.channels.channel.register(channels,
                                            app,
                                            self.handle_message,
                                            route="/webhooks/")

        http_server = WSGIServer(('0.0.0.0', http_port), app)
        http_server.start()

        if serve_forever:
            http_server.serve_forever()
        return http_server
Beispiel #24
0
class WebService(Service):
    """RPC service with Web server capabilities.

    """

    def __init__(self, listen_port, handlers, parameters, shard=0,
                 listen_address=""):
        super().__init__(shard)

        static_files = parameters.pop('static_files', [])
        rpc_enabled = parameters.pop('rpc_enabled', False)
        rpc_auth = parameters.pop('rpc_auth', None)
        auth_middleware = parameters.pop('auth_middleware', None)
        is_proxy_used = parameters.pop('is_proxy_used', None)
        num_proxies_used = parameters.pop('num_proxies_used', None)

        self.wsgi_app = tornado.wsgi.WSGIApplication(handlers, **parameters)
        self.wsgi_app.service = self

        for entry in static_files:
            # TODO If we will introduce a flag to trigger autoreload in
            # Jinja2 templates, use it to disable the cache arg here.
            self.wsgi_app = SharedDataMiddleware(
                self.wsgi_app, {"/static": entry},
                cache=True, cache_timeout=SECONDS_IN_A_YEAR,
                fallback_mimetype="application/octet-stream")

        self.file_cacher = FileCacher(self)
        self.wsgi_app = FileServerMiddleware(self.file_cacher, self.wsgi_app)

        if rpc_enabled:
            self.wsgi_app = DispatcherMiddleware(
                self.wsgi_app, {"/rpc": RPCMiddleware(self, rpc_auth)})

        # The authentication middleware needs to be applied before the
        # ProxyFix as otherwise the remote address it gets is the one
        # of the proxy.
        if auth_middleware is not None:
            self.wsgi_app = auth_middleware(self.wsgi_app)
            self.auth_handler = self.wsgi_app

        # If we are behind one or more proxies, we'll use the content
        # of the X-Forwarded-For HTTP header (if provided) to determine
        # the client IP address, ignoring the one the request came from.
        # This allows to use the IP lock behind a proxy. Activate it
        # only if all requests come from a trusted source (if clients
        # were allowed to directlty communicate with the server they
        # could fake their IP and compromise the security of IP lock).
        if num_proxies_used is None:
            if is_proxy_used:
                num_proxies_used = 1
            else:
                num_proxies_used = 0

        if num_proxies_used > 0:
            self.wsgi_app = ProxyFix(self.wsgi_app, num_proxies_used)

        self.web_server = WSGIServer((listen_address, listen_port), self)

    def __call__(self, environ, start_response):
        """Execute this instance as a WSGI application.

        See the PEP for the meaning of parameters. The separation of
        __call__ and wsgi_app eases the insertion of middlewares.

        """
        return self.wsgi_app(environ, start_response)

    def run(self):
        """Start the WebService.

        Both the WSGI server and the RPC server are started.

        """
        self.web_server.start()
        Service.run(self)
        self.web_server.stop()
Beispiel #25
0
class HttpServer(threading.Thread):
    daemon = True  # causes server to stop on main thread quitting

    def __init__(self,
                 api,
                 port=0,
                 host='0.0.0.0',
                 api_args=None,
                 api_kwargs=None,
                 ssl=None):
        self.api_args = api_args
        self.api_kwargs = api_kwargs
        if self.api_args is None:
            self.api_args = []
        if self.api_kwargs is None:
            self.api_kwargs = {}
        self.api_class = api
        self.port = port
        self.host = host
        self.started = threading.Event()
        self.failed = None
        self.ssl = ssl

        threading.Thread.__init__(self)

    def run(self):
        self.api = self.api_class(*self.api_args, **self.api_kwargs)

        port = self.port

        while True:
            if self.port == 0:
                sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                sock.bind(('localhost', 0))
                self.port = sock.getsockname()[1]
                sock.close()

            if self.port == 0:
                print("ERROR! Port still 0!")
                break

            try:
                #        self.api.app.before_first_request = self.__setstarted()

                if self.ssl is not None:
                    self.server = WSGIServer((self.host, self.port),
                                             self.api.app,
                                             handler_class=WebSocketHandler,
                                             **self.ssl)
                else:
                    self.server = WSGIServer((self.host, self.port),
                                             self.api.app,
                                             handler_class=WebSocketHandler)

                self.server.start()
                self.__setstarted()
                self.server.serve_forever()
            except socket.error as e:
                if e.errno == 48:
                    if port != 0:
                        raise
                    else:
                        self.port = port
                        continue
                else:
                    self.failed = e
                    self.__setstarted()
            break

    def stop(self):
        self.api.stop()

    def __setstarted(self):
        self.api.port = self.port
        self.started.set()
        if self.failed is None:
            print("HttpServer running")
        else:
            print("HttpServer failed to start")
Beispiel #26
0
class server:

    wsgiserver = None
    restart = False

    def __init__(self):
        signal.signal(signal.SIGINT, self.killServer)
        signal.signal(signal.SIGTERM, self.killServer)

    def start_gevent(self):
        try:
            ssl_args = dict()
            certfile_path = web.ub.config.get_config_certfile()
            keyfile_path = web.ub.config.get_config_keyfile()
            if certfile_path and keyfile_path:
                if os.path.isfile(certfile_path) and os.path.isfile(
                        keyfile_path):
                    ssl_args = {
                        "certfile": certfile_path,
                        "keyfile": keyfile_path
                    }
                else:
                    web.app.logger.info(
                        'The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl. Cert path: %s | Key path: %s'
                        % (certfile_path, keyfile_path))
            if os.name == 'nt':
                self.wsgiserver = WSGIServer(
                    ('0.0.0.0', web.ub.config.config_port),
                    web.app,
                    spawn=Pool(),
                    **ssl_args)
            else:
                self.wsgiserver = WSGIServer(('', web.ub.config.config_port),
                                             web.app,
                                             spawn=Pool(),
                                             **ssl_args)
            web.py3_gevent_link = self.wsgiserver
            self.wsgiserver.serve_forever()

        except SocketError:
            try:
                web.app.logger.info(
                    'Unable to listen on \'\', trying on IPv4 only...')
                self.wsgiserver = WSGIServer(
                    ('0.0.0.0', web.ub.config.config_port),
                    web.app,
                    spawn=Pool(),
                    **ssl_args)
                web.py3_gevent_link = self.wsgiserver
                self.wsgiserver.serve_forever()
            except (OSError, SocketError) as e:
                web.app.logger.info("Error starting server: %s" % e.strerror)
                print("Error starting server: %s" % e.strerror)
                web.helper.global_WorkerThread.stop()
                sys.exit(1)
        except Exception:
            web.app.logger.info("Unknown error while starting gevent")

    def startServer(self):
        if gevent_present:
            web.app.logger.info('Starting Gevent server')
            # leave subprocess out to allow forking for fetchers and processors
            self.start_gevent()
        else:
            try:
                ssl = None
                web.app.logger.info('Starting Tornado server')
                certfile_path = web.ub.config.get_config_certfile()
                keyfile_path = web.ub.config.get_config_keyfile()
                if certfile_path and keyfile_path:
                    if os.path.isfile(certfile_path) and os.path.isfile(
                            keyfile_path):
                        ssl = {
                            "certfile": certfile_path,
                            "keyfile": keyfile_path
                        }
                    else:
                        web.app.logger.info(
                            'The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl. Cert path: %s | Key path: %s'
                            % (certfile_path, keyfile_path))

                # Max Buffersize set to 200MB
                http_server = HTTPServer(WSGIContainer(web.app),
                                         max_buffer_size=209700000,
                                         ssl_options=ssl)
                http_server.listen(web.ub.config.config_port)
                self.wsgiserver = IOLoop.instance()
                self.wsgiserver.start()
                # wait for stop signal
                self.wsgiserver.close(True)
            except SocketError as e:
                web.app.logger.info("Error starting server: %s" % e.strerror)
                print("Error starting server: %s" % e.strerror)
                web.helper.global_WorkerThread.stop()
                sys.exit(1)

        # ToDo: Somehow caused by circular import under python3 refactor
        if sys.version_info > (3, 0):
            self.restart = web.py3_restart_Typ
        if self.restart == True:
            web.app.logger.info("Performing restart of Calibre-Web")
            web.helper.global_WorkerThread.stop()
            if os.name == 'nt':
                arguments = ["\"" + sys.executable + "\""]
                for e in sys.argv:
                    arguments.append("\"" + e + "\"")
                os.execv(sys.executable, arguments)
            else:
                os.execl(sys.executable, sys.executable, *sys.argv)
        else:
            web.app.logger.info("Performing shutdown of Calibre-Web")
            web.helper.global_WorkerThread.stop()
        sys.exit(0)

    def setRestartTyp(self, starttyp):
        self.restart = starttyp
        # ToDo: Somehow caused by circular import under python3 refactor
        web.py3_restart_Typ = starttyp

    def killServer(self, signum, frame):
        self.stopServer()

    def stopServer(self):
        # ToDo: Somehow caused by circular import under python3 refactor
        if sys.version_info > (3, 0):
            if not self.wsgiserver:
                if gevent_present:
                    self.wsgiserver = web.py3_gevent_link
                else:
                    self.wsgiserver = IOLoop.instance()
        if self.wsgiserver:
            if gevent_present:
                self.wsgiserver.close()
            else:
                self.wsgiserver.add_callback(self.wsgiserver.stop)

    @staticmethod
    def getNameVersion():
        if gevent_present:
            return {'Gevent': 'v' + geventVersion}
        else:
            return {'Tornado': 'v' + tornadoVersion}
Beispiel #27
0
def run(debug, no_browser, m, mu, mc, f):
    """
    Sacredboard.

\b
Sacredboard is a monitoring dashboard for Sacred.
Homepage: http://github.com/chovanecm/sacredboard

Example usage:

\b
sacredboard -m sacred
    Starts Sacredboard on default port (5000) and connects to
    a local MongoDB database called 'sacred'. Opens web browser.
    Note: MongoDB must be listening on localhost.
\b
sacredboard -m 192.168.1.1:27017:sacred
    Starts Sacredboard on default port (5000) and connects to
    a MongoDB database running on 192.168.1.1 on port 27017
    to a database called 'sacred'. Opens web browser.
\b
sacredboard -mu mongodb://user:pwd@host/admin?authMechanism=SCRAM-SHA-1 sacred
    Starts Sacredboard on default port (5000) and connects to
    a MongoDB database running on localhost on port 27017
    to a database called 'sacred'. Opens web browser.

\b
sacredboard -m sacred -mc default.runs
    Starts Sacredboard on default port (5000) and connects to
    a local MongoDB database called 'sacred' and uses the Sacred's 0.6
    default collection 'default.runs' to search the runs in.
    Opens web browser.
    Note: MongoDB must be listening on localhost.

    """
    if m or mu != (None, None):
        add_mongo_config(app, m, mu, mc)
        app.config["data"].connect()
    elif f:
        app.config["data"] = FileStorage(f)
    else:
        print("Must specify either a mongodb instance or " +
              "a path to a file storage.\nRun sacredboard --help "
              "for more information.",
              file=sys.stderr)
        sys.exit(1)

    app.config['DEBUG'] = debug
    app.debug = debug
    jinja_filters.setup_filters(app)
    routes.setup_routes(app)
    metrics.initialize(app)

    if debug:
        app.run(host="0.0.0.0", debug=True)
    else:
        for port in range(5000, 5050):
            http_server = WSGIServer(('0.0.0.0', port), app)
            try:
                http_server.start()
            except OSError as e:
                # try next port
                continue
            print("Starting sacredboard on port %d" % port)
            if not no_browser:
                click.launch("http://127.0.0.1:%d" % port)
            http_server.serve_forever()
            break
Beispiel #28
0
class server:

    wsgiserver = None
    restart= False

    def __init__(self):
        signal.signal(signal.SIGINT, self.killServer)
        signal.signal(signal.SIGTERM, self.killServer)

    def start_gevent(self):
        try:
            ssl_args = dict()
            if web.ub.config.get_config_certfile() and web.ub.config.get_config_keyfile():
                ssl_args = {"certfile": web.ub.config.get_config_certfile(),
                            "keyfile": web.ub.config.get_config_keyfile()}
            if os.name == 'nt':
                self.wsgiserver= WSGIServer(('0.0.0.0', web.ub.config.config_port), web.app, spawn=Pool(), **ssl_args)
            else:
                self.wsgiserver = WSGIServer(('', web.ub.config.config_port), web.app, spawn=Pool(), **ssl_args)
            self.wsgiserver.serve_forever()
        except SocketError:
            try:
                web.app.logger.info('Unable to listen on \'\', trying on IPv4 only...')
                self.wsgiserver = WSGIServer(('0.0.0.0', web.ub.config.config_port), web.app, spawn=Pool(), **ssl_args)
                self.wsgiserver.serve_forever()
            except (OSError, SocketError) as e:
                web.app.logger.info("Error starting server: %s" % e.strerror)
                print("Error starting server: %s" % e.strerror)
                web.helper.global_WorkerThread.stop()
                sys.exit(1)
        except Exception:
            web.app.logger.info("Unknown error while starting gevent")

    def startServer(self):
        if gevent_present:
            web.app.logger.info('Starting Gevent server')
            # leave subprocess out to allow forking for fetchers and processors
            self.start_gevent()
        else:
            try:
                web.app.logger.info('Starting Tornado server')
                if web.ub.config.get_config_certfile() and web.ub.config.get_config_keyfile():
                    ssl={"certfile": web.ub.config.get_config_certfile(),
                         "keyfile": web.ub.config.get_config_keyfile()}
                else:
                    ssl=None
                # Max Buffersize set to 200MB
                http_server = HTTPServer(WSGIContainer(web.app),
                            max_buffer_size = 209700000,
                            ssl_options=ssl)
                http_server.listen(web.ub.config.config_port)
                self.wsgiserver=IOLoop.instance()
                self.wsgiserver.start()
                # wait for stop signal
                self.wsgiserver.close(True)
            except SocketError as e:
                web.app.logger.info("Error starting server: %s" % e.strerror)
                print("Error starting server: %s" % e.strerror)
                web.helper.global_WorkerThread.stop()
                sys.exit(1)

        if self.restart == True:
            web.app.logger.info("Performing restart of Calibre-Web")
            web.helper.global_WorkerThread.stop()
            if os.name == 'nt':
                arguments = ["\"" + sys.executable + "\""]
                for e in sys.argv:
                    arguments.append("\"" + e + "\"")
                os.execv(sys.executable, arguments)
            else:
                os.execl(sys.executable, sys.executable, *sys.argv)
        else:
            web.app.logger.info("Performing shutdown of Calibre-Web")
            web.helper.global_WorkerThread.stop()
        sys.exit(0)

    def setRestartTyp(self,starttyp):
        self.restart=starttyp

    def killServer(self, signum, frame):
        self.stopServer()

    def stopServer(self):
        if gevent_present:
            self.wsgiserver.close()
        else:
            self.wsgiserver.add_callback(self.wsgiserver.stop)

    @staticmethod
    def getNameVersion():
        if gevent_present:
            return {'Gevent':'v'+geventVersion}
        else:
            return {'Tornado':'v'+tornadoVersion}
Beispiel #29
0
class WebInternalReader(object):
    def __init__(self):
        self._log = logging.getLogger("WebInternalReader")

        memcached_client = memcache.Client(_memcached_nodes)

        self._central_connection = get_central_connection()
        self._cluster_row = get_cluster_row(self._central_connection)
        self._node_local_connection = get_node_local_connection()
        self._deliverator = Deliverator()

        self._zeromq_context = zmq.Context()

        self._pull_server = GreenletPULLServer(
            self._zeromq_context, _web_internal_reader_pipeline_address,
            self._deliverator)
        self._pull_server.link_exception(self._unhandled_greenlet_exception)

        self._data_reader_clients = list()
        self._data_readers = list()
        for node_name, address in zip(_node_names, _data_reader_addresses):
            resilient_client = GreenletResilientClient(
                self._zeromq_context,
                node_name,
                address,
                _client_tag,
                _web_internal_reader_pipeline_address,
                self._deliverator,
                connect_messages=[])
            resilient_client.link_exception(self._unhandled_greenlet_exception)
            self._data_reader_clients.append(resilient_client)
            data_reader = DataReader(node_name, resilient_client)
            self._data_readers.append(data_reader)

        self._space_accounting_dealer_client = GreenletDealerClient(
            self._zeromq_context, _local_node_name,
            _space_accounting_server_address)
        self._space_accounting_dealer_client.link_exception(
            self._unhandled_greenlet_exception)

        push_client = GreenletPUSHClient(
            self._zeromq_context,
            _local_node_name,
            _space_accounting_pipeline_address,
        )

        self._accounting_client = SpaceAccountingClient(
            _local_node_name, self._space_accounting_dealer_client,
            push_client)

        self._event_push_client = EventPushClient(self._zeromq_context,
                                                  "web-internal-reader")

        # message sent to data readers telling them the server
        # is (re)starting, thereby invalidating any archvies or retrieved
        # that are in progress for this node
        timestamp = create_timestamp()
        self._event_push_client.info("web-reader-start",
                                     "web reader (re)start",
                                     timestamp_repr=repr(timestamp),
                                     source_node_name=_local_node_name)

        self._watcher = Watcher(_stats, self._data_reader_clients,
                                self._event_push_client)

        self.application = Application(memcached_client,
                                       self._central_connection,
                                       self._node_local_connection,
                                       self._cluster_row, self._data_readers,
                                       self._accounting_client,
                                       self._event_push_client, _stats)
        self.wsgi_server = WSGIServer(
            (_web_internal_reader_host, _web_internal_reader_port),
            application=self.application,
            backlog=_wsgi_backlog)

    def start(self):
        self._space_accounting_dealer_client.start()
        self._pull_server.start()
        self._watcher.start()
        for client in self._data_reader_clients:
            client.start()
        self.wsgi_server.start()

    def stop(self):
        self._log.info("stopping wsgi web server")
        self.wsgi_server.stop()
        self._accounting_client.close()
        self._log.debug("killing greenlets")
        self._space_accounting_dealer_client.kill()
        self._pull_server.kill()
        self._watcher.kill()
        for client in self._data_reader_clients:
            client.kill()
        self._log.debug("joining greenlets")
        self._space_accounting_dealer_client.join()
        self._pull_server.join()
        self._watcher.join()
        for client in self._data_reader_clients:
            client.join()
        self._log.debug("closing zmq")
        self._event_push_client.close()
        self._zeromq_context.term()
        self._log.info("closing database connections")
        self._central_connection.close()
        self._node_local_connection.close()

    def _unhandled_greenlet_exception(self, greenlet_object):
        try:
            greenlet_object.get()
        except Exception:
            self._log.exception(str(greenlet_object))
Beispiel #30
0
class OrionContextBroker(ResourceManagementXAE):
    def __init__(self,
                 orion_host="http://localhost:1026",
                 orion_api="v2",
                 labels=None,
                 accumulate_address=None,
                 *args,
                 **kw):
        super(OrionContextBroker, self).__init__(*args, **kw)
        if isinstance(labels, str):
            self.labels = {labels}
        elif hasattr(labels, '__iter__'):
            self.labels = set(labels)
        elif labels is None:
            self.labels = ["openmtc:sensor_data"]
        else:
            self.labels = None
        self._entity_names = {}
        self._subscription_endpoints = {}
        self._subscription_services = {}

        # accumulate address
        if not accumulate_address:
            accumulate_address = "http://" + self._get_auto_host(
                orion_host) + ":8080"

        # Orion API
        self._dry_run = not orion_host
        self.orion_api = OrionAPI(
            orion_host=orion_host,
            api_version=orion_api,
            accumulate_endpoint="{}/accumulate".format(accumulate_address))
        if not self._dry_run:
            self._dry_run = not self.orion_api.is_host_alive()

        # Subscription Sink for OCB
        self.app = Flask(__name__)
        self.app.add_url_rule('/accumulate',
                              'process_notification',
                              self.process_notification,
                              methods=["POST"])
        accumulate_ip, accumulate_port = urlparse(
            accumulate_address).netloc.rsplit(':', 1)
        self.server = WSGIServer(("0.0.0.0", int(accumulate_port)), self.app)
        self.server.start()

    @staticmethod
    def _get_auto_host(ep):
        try:
            import socket
            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            netloc = urlparse(ep).netloc.split(':')
            s.connect((netloc[0], int(netloc[1])))
            host = s.getsockname()[0]
            s.close()
        except:
            host = "127.0.0.1"

        return host

    def process_notification(self):
        self.logger.debug("Got from Subscription {}".format(request.json))
        try:
            actuator = self.get_resource(
                self._subscription_endpoints[request.json["subscriptionId"]])
        except KeyError:
            # ignore not deleted old subscriptions
            pass
        else:
            self.push_content(actuator,
                              request.json["data"][0]["cmd"]["value"])
        return Response(status=200, headers={})

    def _on_register(self):
        self._discover_openmtc_ipe_entities()

    def _on_shutdown(self):
        for subscription_id, fiware_service in self._subscription_services.items(
        ):
            self.orion_api.unsubscribe(subscription_id, fiware_service)

    def _sensor_filter(self, sensor_info):
        if self.labels:
            return len(self.labels.intersection(
                sensor_info['sensor_labels'])) > 0
        else:
            return True

    @staticmethod
    def _get_entity_name(sensor_info):
        device_type = "sensor" if sensor_info.get("sensor_labels",
                                                  None) else "actuator"
        try:
            id_label = [
                x for x in sensor_info['{}_labels'.format(device_type)]
                if x.startswith('openmtc:id:')
            ].pop()
            cse_id, dev_id = re.sub('^openmtc:id:', '',
                                    id_label).split('/')[:2]
        except (IndexError, ValueError):
            cse_id = sensor_info['cse_id']
            dev_id = sensor_info['dev_name']
        try:
            f_s, e_pre = cse_id.split('~', 1)
        except ValueError:
            f_s = ''
            e_pre = cse_id
        return re.sub('[\W]', '_', f_s), '%s-%s' % (e_pre, dev_id)

    def _sensor_data_cb(self, sensor_info, sensor_data):
        if self._dry_run:
            return

        try:
            fiware_service, entity_name = self._entity_names[sensor_info['ID']]
        except KeyError:
            self._entity_names[sensor_info['ID']] = self._get_entity_name(
                sensor_info)
            fiware_service, entity_name = self._entity_names[sensor_info['ID']]
            self.orion_api.create_entity(entity_name,
                                         fiware_service=fiware_service)
        self.orion_api.update_attributes(entity_name,
                                         sensor_data,
                                         fiware_service=fiware_service)

    def _new_actuator(self, actuator_info):
        try:
            fiware_service, entity_name = self._entity_names[
                actuator_info['ID']]
        except KeyError:
            self._entity_names[actuator_info['ID']] = self._get_entity_name(
                actuator_info)
            fiware_service, entity_name = self._entity_names[
                actuator_info['ID']]
        self.logger.info("Create new Entity {} on Fiware Service {}".format(
            entity_name, fiware_service))

        if self._dry_run:
            return

        self.orion_api.create_entity(entity_name,
                                     fiware_service=fiware_service)
        data_dummy = {
            'v': "none",
            'bn': "none",
            'n': "cmd",
            'u': "none",
            't': "none"
        }
        self.orion_api.update_attributes(entity_name,
                                         data_dummy,
                                         fiware_service=fiware_service)

        subscription_id = self.orion_api.subscribe(
            entity_name, fiware_service=fiware_service)
        self._subscription_endpoints[subscription_id] = actuator_info['ID']
        self._subscription_services[subscription_id] = fiware_service
Beispiel #31
0
class PFSApi:
    # pylint: disable=too-many-instance-attributes
    # Nine is reasonable in this case.

    def __init__(
        self,
        pathfinding_service: PathfindingService,
        one_to_n_address: Address,
        operator: str,
        info_message: str = DEFAULT_INFO_MESSAGE,
        service_fee: TokenAmount = TokenAmount(0),
        debug_mode: bool = False,
    ) -> None:
        flask_app = Flask(__name__)

        self.flask_app = DispatcherMiddleware(
            NotFound(),
            {
                "/metrics": make_wsgi_app(registry=metrics.REGISTRY),
                API_PATH: flask_app.wsgi_app,
            },
        )

        self.api = ApiWithErrorHandler(flask_app)
        self.rest_server: Optional[WSGIServer] = None
        self.one_to_n_address = one_to_n_address
        self.pathfinding_service = pathfinding_service
        self.service_fee = service_fee
        self.operator = operator
        self.info_message = info_message

        # Enable cross origin requests
        @flask_app.after_request
        def after_request(response: Response) -> Response:  # pylint: disable=unused-variable
            response.headers.add("Access-Control-Allow-Origin", "*")
            response.headers.add("Access-Control-Allow-Headers",
                                 "Origin, Content-Type, Accept")
            response.headers.add("Access-Control-Allow-Methods",
                                 "GET,POST,OPTIONS")
            return response

        resources: List[Tuple[str, Resource, Dict, str]] = [
            (
                "/v1/<token_network_address>/paths",
                PathsResource,
                dict(debug_mode=debug_mode),
                "paths",
            ),
            ("/v1/<token_network_address>/payment/iou", IOUResource, {},
             "payments"),
            ("/v1/<token_network_address>/feedback", FeedbackResource, {},
             "feedback"),
            (
                "/v1/<token_network_address>/suggest_partner",
                SuggestPartnerResource,
                {},
                "suggest_partner",
            ),
            ("/v1/online_addresses", OnlineAddressesResource, {},
             "online_addresses"),
            ("/v1/info", InfoResource, {}, "info"),
            ("/v2/info", InfoResource2, {}, "info2"),
            (
                "/v1/address/<checksummed_address>/metadata",
                AddressMetadataResource,
                {},
                "address_metadata",
            ),
        ]

        if debug_mode:
            log.warning(
                "The debug REST API is enabled. Don't do this on public nodes."
            )
            resources.extend([
                (
                    "/v1/_debug/routes/<token_network_address>/<source_address>",
                    cast(Resource, DebugPathResource),
                    {},
                    "debug1",
                ),
                (
                    "/v1/_debug/routes/<token_network_address>/<source_address>/<target_address>",  # noqa
                    DebugPathResource,
                    {},
                    "debug2",
                ),
                ("/v1/_debug/ious/<source_address>", DebugIOUResource, {},
                 "debug3"),
                ("/v1/_debug/stats", DebugStatsResource, {}, "debug4"),
            ])

        for endpoint_url, resource, kwargs, endpoint in resources:
            kwargs.update({
                "pathfinding_service": pathfinding_service,
                "api": self
            })
            self.api.add_resource(resource,
                                  endpoint_url,
                                  resource_class_kwargs=kwargs,
                                  endpoint=endpoint)

    def run(self, host: str, port: int) -> None:
        self.rest_server = WSGIServer((host, port), self.flask_app)
        self.rest_server.start()

        log.info("Running endpoint", endpoint=f"http://{host}:{port}")

    def stop(self) -> None:
        if self.rest_server:
            self.rest_server.stop()
Beispiel #32
0
    if env['PATH_INFO'] == '/status/disk_io':
        start_response('200 OK', [("Content-Type", "application/json")])
        msg = make_perf_list(lambda x: x[1]['disk_io'])
        return [msg]
    else:
        start_response('404 Not Found', [("Content-Type", "application/json")])
        msg = json.dumps({"data": "Not Found"})
        return [msg]


if __name__ == '__main__':
    #TODO: 设置监视greenlet
    q = queue.JoinableQueue()

    wsgi_server = WSGIServer(('', 23458), application)
    wsgi_server.start()

    def server_exit():
        wsgi_server.stop(0)
        gevent.killall(gs, block=False)

    gsignal(signal.SIGALRM, signal.SIG_IGN)
    gsignal(signal.SIGHUP, signal.SIG_IGN)
    gsignal(signal.SIGINT, server_exit)
    gsignal(signal.SIGTERM, server_exit)

    threading.Thread(target=process.process, args=(thread_queue, )).start()
    xen_manager = XenserverManager(q)
    gs.append(gevent.spawn(xen_manager.make_10m_perf_url))
    gs.append(gevent.spawn(xen_manager.make_2h_perf_url))
    gs.append(gevent.spawn(xen_manager.make_1w_perf_url))
Beispiel #33
0
class ServiceApi:
    def __init__(
            self,
            pathfinding_service: PathfindingService,
            service_fee: TokenAmount = TokenAmount(0),
            debug_mode: bool = False,
    ):
        self.flask_app = Flask(__name__)
        self.api = ApiWithErrorHandler(self.flask_app)
        self.rest_server: WSGIServer = None
        self.pathfinding_service = pathfinding_service
        self.service_fee = service_fee

        resources: List[Tuple[str, Resource, Dict, str]] = [
            (
                "/<token_network_address>/paths",
                PathsResource,
                dict(debug_mode=debug_mode),
                "paths",
            ),
            ("/<token_network_address>/payment/iou", IOUResource, {},
             "payments"),
            ("/info", InfoResource, {}, "info"),
        ]

        if debug_mode:
            log.warning(
                "The debug REST API is enabled. Don't do this on public nodes."
            )
            resources.extend([
                (
                    "/_debug/routes/<token_network_address>/<source_address>",
                    DebugEndpoint,
                    {},
                    "debug1",
                ),
                (
                    "/_debug/routes/<token_network_address>/<source_address>/<target_address>",
                    DebugEndpoint,
                    {},
                    "debug2",
                ),
                ("/_debug/ious/<source_address>", DebugEndpointIOU, {},
                 "debug3"),
            ])

        for endpoint_url, resource, kwargs, endpoint in resources:
            endpoint_url = API_PATH + endpoint_url
            kwargs.update({
                "pathfinding_service": pathfinding_service,
                "service_api": self
            })
            self.api.add_resource(resource,
                                  endpoint_url,
                                  resource_class_kwargs=kwargs,
                                  endpoint=endpoint)

    def run(self,
            host: str = DEFAULT_API_HOST,
            port: int = DEFAULT_API_PORT) -> None:
        self.rest_server = WSGIServer((host, port), self.flask_app)
        self.rest_server.start()

        log.info("Running endpoint", endpoint=f"{host}:{port}")

    def stop(self) -> None:
        self.rest_server.stop()
Beispiel #34
0
class WebService(Service):
    """RPC service with Web server capabilities.

    """

    def __init__(self, listen_port, handlers, parameters, shard=0,
                 listen_address=""):
        super(WebService, self).__init__(shard)

        static_files = parameters.pop('static_files', [])
        rpc_enabled = parameters.pop('rpc_enabled', False)
        rpc_auth = parameters.pop('rpc_auth', None)
        auth_middleware = parameters.pop('auth_middleware', None)
        is_proxy_used = parameters.pop('is_proxy_used', None)
        num_proxies_used = parameters.pop('num_proxies_used', None)

        self.wsgi_app = tornado.wsgi.WSGIApplication(handlers, **parameters)
        self.wsgi_app.service = self

        for entry in static_files:
            # TODO If we will introduce a flag to trigger autoreload in
            # Jinja2 templates, use it to disable the cache arg here.
            self.wsgi_app = SharedDataMiddleware(
                self.wsgi_app, {"/static": entry},
                cache=True, cache_timeout=SECONDS_IN_A_YEAR,
                fallback_mimetype="application/octet-stream")

        self.file_cacher = FileCacher(self)
        self.wsgi_app = FileServerMiddleware(self.file_cacher, self.wsgi_app)

        if rpc_enabled:
            self.wsgi_app = DispatcherMiddleware(
                self.wsgi_app, {"/rpc": RPCMiddleware(self, rpc_auth)})

        # The authentication middleware needs to be applied before the
        # ProxyFix as otherwise the remote address it gets is the one
        # of the proxy.
        if auth_middleware is not None:
            self.wsgi_app = auth_middleware(self.wsgi_app)
            self.auth_handler = self.wsgi_app

        # If we are behind one or more proxies, we'll use the content
        # of the X-Forwarded-For HTTP header (if provided) to determine
        # the client IP address, ignoring the one the request came from.
        # This allows to use the IP lock behind a proxy. Activate it
        # only if all requests come from a trusted source (if clients
        # were allowed to directlty communicate with the server they
        # could fake their IP and compromise the security of IP lock).
        if num_proxies_used is None:
            if is_proxy_used:
                num_proxies_used = 1
            else:
                num_proxies_used = 0

        if num_proxies_used > 0:
            self.wsgi_app = ProxyFix(self.wsgi_app, num_proxies_used)

        self.web_server = WSGIServer((listen_address, listen_port), self)

    def __call__(self, environ, start_response):
        """Execute this instance as a WSGI application.

        See the PEP for the meaning of parameters. The separation of
        __call__ and wsgi_app eases the insertion of middlewares.

        """
        return self.wsgi_app(environ, start_response)

    def run(self):
        """Start the WebService.

        Both the WSGI server and the RPC server are started.

        """
        self.web_server.start()
        Service.run(self)
        self.web_server.stop()
Beispiel #35
0
class OmsEventListener(object):
    """
    HTTP server to get RSN OMS event notifications and do corresponding
    notifications to driver/agent via callback.
    """

    #
    # TODO The whole asynchronous reception of external RSN OMS events needs
    # to be handled in a different way, in particular, it should be a separate
    # service (not a supporting class per platform driver instance) that
    # listens on a well-known host:port and that publishes the corresponding
    # CI events directly.
    #

    def __init__(self, platform_id, notify_driver_event):
        """
        Creates a listener.

        @param notify_driver_event callback to notify event events. Must be
                                    provided.
        """

        self._platform_id = platform_id
        self._notify_driver_event = notify_driver_event

        self._http_server = None
        self._url = None

        # _notifications: if not None, [event_instance, ...]
        self._notifications = None

        # _no_notifications: flag only intended for developing purposes
        # see ion.agents.platform.rsn.simulator.oms_events
        self._no_notifications = os.getenv("NO_OMS_NOTIFICATIONS") is not None
        if self._no_notifications:  # pragma: no cover
            log.warn(
                "%r: NO_OMS_NOTIFICATIONS env variable defined: "
                "no notifications will be done", self._platform_id)
            self._url = "http://NO_OMS_NOTIFICATIONS"

    @property
    def url(self):
        """
        The URL that can be used to register a listener to the OMS.
        This is None if there is no HTTP server currently running.
        """
        return self._url

    def keep_notifications(self, keep=True, reset=True):
        """
        By default, received event notifications are not kept. Call this with
        True (the default) to keep them, or with False to not keep them.
        If they are currently kept and the reset param is True (the default),
        then the notifications list is reinitialized.
        """
        if keep:
            if not self._notifications or reset:
                self._notifications = []
        else:
            self._notifications = None

    @property
    def notifications(self):
        """
        The current list of received notifications. This will be None if such
        notifications are not being kept.
        """
        return self._notifications

    def start_http_server(self, host='localhost', port=0):
        """
        Starts a HTTP server that handles the notification of received events.

        @param host Host, by default 'localhost'.
        @param port Port, by default 0 to get one dynamically.
        """

        # reinitialize notifications if we are keeping them:
        if self._notifications:
            self._notifications = []

        if self._no_notifications:
            return

        log.info(
            "%r: starting http server for receiving event notifications at"
            " %s:%s ...", self._platform_id, host, port)
        try:
            self._http_server = WSGIServer((host, port),
                                           self._application,
                                           log=sys.stdout)
            self._http_server.start()
        except:
            log.exception(
                "%r: Could not start http server for receiving event"
                " notifications", self._platform_id)
            raise

        host_name, host_port = self._http_server.address

        log.info("%r: http server started at %s:%s", self._platform_id,
                 host_name, host_port)

        exposed_host_name = host_name

        ######################################################################
        # adjust exposed_host_name:
        # **NOTE**: the adjustment below is commented out because is not robust
        # enough. For example, the use of the external name for the host would
        # require the particular port to be open to the world.
        # And in any case, this overall handling needs a different approach.
        #
        # # If the given host is 'localhost', need to get the actual hostname
        # # for the exposed URL:
        # if host is 'localhost':
        #     exposed_host_name = socket.gethostname()
        ######################################################################

        self._url = "http://%s:%s" % (exposed_host_name, host_port)
        log.info("%r: http server exposed URL = %r", self._platform_id,
                 self._url)

    def _application(self, environ, start_response):

        input = environ['wsgi.input']
        body = "\n".join(input.readlines())
        # log.trace('%r: notification received payload=%s', self._platform_id, body)
        event_instance = yaml.load(body)

        self._event_received(event_instance)

        # generic OK response  TODO determine appropriate variations if any
        status = '200 OK'
        headers = [('Content-Type', 'text/plain')]
        start_response(status, headers)
        return status

    def _event_received(self, event_instance):
        log.trace('%r: received event_instance=%s', self._platform_id,
                  event_instance)

        if self._notifications:
            self._notifications.append(event_instance)
        else:
            self._notifications = [event_instance]

        log.debug('%r: notifying event_instance=%s', self._platform_id,
                  event_instance)

        driver_event = ExternalEventDriverEvent(event_instance)
        self._notify_driver_event(driver_event)

    def stop_http_server(self):
        """
        Stops the http server.
        @retval the dict of received notifications or None if they are not kept.
        """
        if self._http_server:
            log.info("%r: HTTP SERVER: stopping http server: url=%r",
                     self._platform_id, self._url)
            self._http_server.stop()

        self._http_server = None
        self._url = None

        return self._notifications
Beispiel #36
0
class WebServer(object):
    def __init__(self):
        signal.signal(signal.SIGINT, self._killServer)
        signal.signal(signal.SIGTERM, self._killServer)

        self.wsgiserver = None
        self.access_logger = None
        self.restart = False
        self.app = None
        self.listen_address = None
        self.listen_port = None
        self.unix_socket_file = None
        self.ssl_args = None

    def init_app(self, application, config):
        self.app = application
        self.listen_address = config.get_config_ipaddress()
        self.listen_port = config.config_port

        if config.config_access_log:
            log_name = "gevent.access" if _GEVENT else "tornado.access"
            formatter = logger.ACCESS_FORMATTER_GEVENT if _GEVENT else logger.ACCESS_FORMATTER_TORNADO
            self.access_logger, logfile = logger.create_access_log(
                config.config_access_logfile, log_name, formatter)
            if logfile != config.config_access_logfile:
                log.warning(
                    "Accesslog path %s not valid, falling back to default",
                    config.config_access_logfile)
                config.config_access_logfile = logfile
                config.save()
        else:
            if not _GEVENT:
                logger.get('tornado.access').disabled = True

        certfile_path = config.get_config_certfile()
        keyfile_path = config.get_config_keyfile()
        if certfile_path and keyfile_path:
            if os.path.isfile(certfile_path) and os.path.isfile(keyfile_path):
                self.ssl_args = dict(certfile=certfile_path,
                                     keyfile=keyfile_path)
            else:
                log.warning(
                    'The specified paths for the ssl certificate file and/or key file seem to be broken. '
                    'Ignoring ssl.')
                log.warning('Cert path: %s', certfile_path)
                log.warning('Key path:  %s', keyfile_path)

    def _make_gevent_unix_socket(self, socket_file):
        # the socket file must not exist prior to bind()
        if os.path.exists(socket_file):
            # avoid nuking regular files and symbolic links (could be a mistype or security issue)
            if os.path.isfile(socket_file) or os.path.islink(socket_file):
                raise OSError(errno.EEXIST, os.strerror(errno.EEXIST),
                              socket_file)
            os.remove(socket_file)

        unix_sock = WSGIServer.get_listener(socket_file, family=socket.AF_UNIX)
        self.unix_socket_file = socket_file

        # ensure current user and group have r/w permissions, no permissions for other users
        # this way the socket can be shared in a semi-secure manner
        # between the user running calibre-web and the user running the fronting webserver
        os.chmod(socket_file, 0o660)

        return unix_sock

    def _make_gevent_socket(self):
        if os.name != 'nt':
            unix_socket_file = os.environ.get("CALIBRE_UNIX_SOCKET")
            if unix_socket_file:
                return self._make_gevent_unix_socket(
                    unix_socket_file), "unix:" + unix_socket_file

        if self.listen_address:
            return (self.listen_address, self.listen_port), None

        if os.name == 'nt':
            self.listen_address = '0.0.0.0'
            return (self.listen_address, self.listen_port), None

        try:
            address = ('::', self.listen_port)
            sock = WSGIServer.get_listener(address, family=socket.AF_INET6)
        except socket.error as ex:
            log.error('%s', ex)
            log.warning('Unable to listen on "", trying on IPv4 only...')
            address = ('', self.listen_port)
            sock = WSGIServer.get_listener(address, family=socket.AF_INET)

        return sock, _readable_listen_address(*address)

    def _start_gevent(self):
        ssl_args = self.ssl_args or {}

        try:
            sock, output = self._make_gevent_socket()
            if output is None:
                output = _readable_listen_address(self.listen_address,
                                                  self.listen_port)
            log.info('Starting Gevent server on %s', output)
            self.wsgiserver = WSGIServer(sock,
                                         self.app,
                                         log=self.access_logger,
                                         spawn=Pool(),
                                         **ssl_args)
            if ssl_args:
                wrap_socket = self.wsgiserver.wrap_socket

                def my_wrap_socket(*args, **kwargs):
                    try:
                        return wrap_socket(*args, **kwargs)
                    except (ssl.SSLError) as ex:
                        log.warning('Gevent SSL Error: %s', ex)
                        raise GreenletExit

                self.wsgiserver.wrap_socket = my_wrap_socket
            self.wsgiserver.serve_forever()
        finally:
            if self.unix_socket_file:
                os.remove(self.unix_socket_file)
                self.unix_socket_file = None

    def _start_tornado(self):
        if os.name == 'nt' and sys.version_info > (3, 7):
            import asyncio
            asyncio.set_event_loop_policy(
                asyncio.WindowsSelectorEventLoopPolicy())
        log.info(
            'Starting Tornado server on %s',
            _readable_listen_address(self.listen_address, self.listen_port))

        # Max Buffersize set to 200MB            )
        http_server = HTTPServer(WSGIContainer(self.app),
                                 max_buffer_size=209700000,
                                 ssl_options=self.ssl_args)
        http_server.listen(self.listen_port, self.listen_address)
        self.wsgiserver = IOLoop.current()
        self.wsgiserver.start()
        # wait for stop signal
        self.wsgiserver.close(True)

    def start(self):
        try:
            if _GEVENT:
                # leave subprocess out to allow forking for fetchers and processors
                self._start_gevent()
            else:
                self._start_tornado()
        except Exception as ex:
            log.error("Error starting server: %s", ex)
            print("Error starting server: %s" % ex)
            self.stop()
            return False
        finally:
            self.wsgiserver = None

        if not self.restart:
            log.info("Performing shutdown of Calibre-Web")
            # prevent irritiating log of pending tasks message from asyncio
            logger.get('asyncio').setLevel(logger.logging.CRITICAL)
            return True

        log.info("Performing restart of Calibre-Web")
        arguments = list(sys.argv)
        arguments.insert(0, sys.executable)
        if os.name == 'nt':
            arguments = ["\"%s\"" % a for a in arguments]
        os.execv(sys.executable, arguments)
        return True

    def _killServer(self, __, ___):
        self.stop()

    def stop(self, restart=False):
        from . import updater_thread
        updater_thread.stop()

        log.info("webserver stop (restart=%s)", restart)
        self.restart = restart
        if self.wsgiserver:
            if _GEVENT:
                self.wsgiserver.close()
            else:
                self.wsgiserver.add_callback_from_signal(self.wsgiserver.stop)
Beispiel #37
0
from gevent import monkey
from gevent.pywsgi import WSGIServer
monkey.patch_all()
from flask import Flask
import logging
from logging.handlers import RotatingFileHandler
from multiprocessing import cpu_count, Process,Manager

app = Flask(__name__)
server = WSGIServer(('0.0.0.0', 6000), app, log=None)
server.start()

handler = RotatingFileHandler('app.log',encoding='UTF-8',maxBytes=1024*1024*10,backupCount=20)
# handler = logging.FileHandler('app.log', encoding='UTF-8')
logging_format = logging.Formatter('%(asctime)s - %(levelname)s - %(filename)s - %(funcName)s - %(lineno)s - %(message)s')
handler.setFormatter(logging_format)
app.logger.addHandler(handler)

@app.route('/',methods=['POST','GET'])
def hello():
    return 'hello world'

def serve_forever():
    global server
    server.start_accepting()
    server._stop_event.wait()

if __name__ == "__main__":
    count = cpu_count()
    print("cpu count %s" % count)
    for i in range(count):
class OmsEventListener(object):
    """
    HTTP server to get RSN OMS event notifications and do corresponding
    notifications to driver/agent via callback.
    """
    def __init__(self, notify_driver_event):
        """
        Creates a listener.

        @param notify_driver_event callback to notify event events. Must be
                                    provided.
        """

        assert notify_driver_event, "notify_driver_event callback must be provided"
        self._notify_driver_event = notify_driver_event

        self._http_server = None
        self._url = None

        # _notifications: if not None, { event_type: [event_instance, ...], ...}
        self._notifications = None

    @property
    def url(self):
        """
        The URL that can be used to register a listener to the OMS.
        This is None if there is no HTTP server currently running.
        """
        return self._url

    def keep_notifications(self, keep=True, reset=True):
        """
        By default, received event notifications are not kept. Call this with
        True (the default) to keep them, or with False to not keep them.
        If they are currently kept and the reset param is True (the default),
        then the notifications dict is reinitialized.
        """
        if keep:
            if not self._notifications or reset:
                self._notifications = {}
        else:
            self._notifications = None

    @property
    def notifications(self):
        """
        The current dict of received notifications. This will be None if such
        notifications are not being kept.
        """
        return self._notifications

    def start_http_server(self, host='localhost', port=0):
        """
        Starts a HTTP server that handles the notification of received events.

        @param host by default 'localhost'
        @param port by default 0 to get one dynamically.
        """

        # reinitialize notifications if we are keeping them:
        if self._notifications:
            self._notifications.clear()

        self._http_server = WSGIServer((host, port), self.__application)
        log.info("starting http server for receiving event notifications...")
        self._http_server.start()
        self._url = "http://%s:%s" % self._http_server.address
        log.info("http server started: url=%r", self._url)

    def __application(self, environ, start_response):

        input = environ['wsgi.input']
        body = "\n".join(input.readlines())
        #        log.trace('notification received payload=%s', body)
        event_instance = yaml.load(body)
        log.trace('notification received event_instance=%s', event_instance)
        if not 'url' in event_instance:
            log.warn("expecting 'url' entry in notification call")
            return
        if not 'ref_id' in event_instance:
            log.warn("expecting 'ref_id' entry in notification call")
            return

        url = event_instance['url']
        event_type = event_instance['ref_id']

        if self._url == url:
            self._event_received(event_type, event_instance)
        else:
            log.warn(
                "got notification call with an unexpected url=%s (expected url=%s)",
                url, self._url)

        # generic OK response  TODO determine appropriate variations if any
        status = '200 OK'
        headers = [('Content-Type', 'text/plain')]
        start_response(status, headers)
        return event_type

    def _event_received(self, event_type, event_instance):
        log.trace('received event_instance=%s', event_instance)

        if self._notifications:
            if event_type in self._notifications:
                self._notifications[event_type].append(event_instance)
            else:
                self._notifications[event_type] = [event_instance]

        log.debug('notifying event_instance=%s', event_instance)

        driver_event = ExternalEventDriverEvent(event_type, event_instance)
        self._notify_driver_event(driver_event)

    def stop_http_server(self):
        """
        Stops the http server.
        @retval the dict of received notifications or None if they are not kept.
        """
        if self._http_server:
            log.info("HTTP SERVER: stopping http server: url=%r", self._url)
            self._http_server.stop()

        self._http_server = None
        self._url = None

        return self._notifications
#!/usr/bin/env python
# coding: utf-8
from gevent.pywsgi import WSGIServer
from geventwebsocket.handler import WebSocketHandler
from gevent.event import Event

from app import my_app_wrapper

stopper = Event()

# as suggested by ThiefMaster in:
# http://stackoverflow.com/questions/15932298/how-to-stop-a-flask-server-running-gevent-socketio
#

if __name__ == "__main__":
    http_server = WSGIServer(("", 8000), my_app_wrapper(stopper), handler_class=WebSocketHandler)
    http_server.start()

    try:
        stopper.wait()
    except KeyboardInterrupt:
        print ""
    def run(self):
        # Array since we need to pass by ref
        latest_task_complete = [time()]
        latest_maintenance_complete = time()
        active_upload_counter = ThreadsafeCounter()

        task_list = TaskList()

        logging.info("Starting server on %d..." % self._port)

        logging.debug("task completed callback %s", latest_task_complete)

        def completed_callback(latest_task_complete):
            logging.debug("task completed callback %s", latest_task_complete)
            latest_task_complete[:] = [time()]

        workers = Workers(self._repo,
                          partial(completed_callback, latest_task_complete))
        workers.start(task_list, self._workers)

        def webapp_callback(task_name, filepath):
            task_list.add_task(Task(task_name, filepath))

        webapp = UploadWebApp(__name__, active_upload_counter, webapp_callback)

        http_server = WSGIServer(('', self._port), webapp)
        http_server.start()

        logging.info("Server started on %s" % self._port)

        # loop until interrupted
        while True:
            try:
                gsleep(5)
                task_list.join()
                logging.debug("task queue empty, " +
                              str(active_upload_counter.get_count()) +
                              " uploads ongoing")
                time_since_maintenance = time() - latest_maintenance_complete
                time_since_task = time() - latest_task_complete[0]
                logging.debug("{:.1f} complete".format(time_since_task))
                logging.debug(
                    "{:.1f} since last task, {:.1f} since last maintenance".
                    format(time_since_task, time_since_maintenance))
                if time_since_maintenance > time_since_task:
                    # uploads have been processed since last maintenance
                    logging.debug("maintenance needed")
                    if time_since_task >= MAINTENANCE_WAIT:
                        logging.debug("idle, do maintenance")
                        workers.stop()

                        try:
                            output = check_output([
                                "flatpak", "build-update-repo",
                                "--generate-static-deltas", "--prune",
                                self._repo
                            ],
                                                  stderr=STDOUT)
                            logging.info("completed maintenance: " + output)
                        except CalledProcessError as e:
                            logging.error("failed maintenance: " + e.output)

                        latest_maintenance_complete = time()
                        workers.start(task_list, self._workers)

            except (KeyboardInterrupt, SystemExit):
                break

        logging.info("Cleaning up resources...")

        http_server.stop()

        workers.stop()
Beispiel #41
0
    cthulhu = Manager()
    cthulhu_started = False

    while not cthulhu_started:
        try:
            if not cthulhu_started:
                cthulhu_started = cthulhu.start()

        except Exception, e:
            log.exception('It borked')
            log.error(str(e))
            complete.wait(timeout=5)

    app = get_internal_wsgi_application()
    wsgi = WSGIServer(('0.0.0.0', 8002), app, **ssl)
    wsgi.start()

    def shutdown():
        complete.set()

    gevent.signal(signal.SIGTERM, shutdown)
    gevent.signal(signal.SIGINT, shutdown)

    x = 0
    while not complete.is_set():
        cthulhu.eventer.on_tick()
        complete.wait(timeout=TIMEOUT)
        if x > SALT_RESET_PERIOD:
            cthulhu.eventer.reset_event_sink()
            x = 0
        x += TIMEOUT
class AccessControlTestCase(unittest.TestCase):
    def setUp(self):
        caliope_server.app.config['TESTING'] = True
        caliope_server.init_flask_app()
        caliope_server.configure_server_and_app("../../conf/test_caliope_server.json")
        caliope_server.configure_logger("../../conf/tests_logger.json")
        caliope_server.register_modules()
        caliope_server.app.storekv = RedisStore(redis.StrictRedis())
        self.http_server = WSGIServer((caliope_server.app.config['address'],
                                       caliope_server.app.config['port']),
                                      caliope_server.app,
                                      handler_class=WebSocketHandler)  # @IgnorePep8
        self.http_server.start()
        DefaultDatabase().test_defaultUserGroupOne()

    def tearDown(self):
        """Get rid of the database again after each test."""
        if self.rpc_client:
            self.rpc_client.transport.close()
        self.http_server.stop()
        self.http_server = None
        caliope_server.app = Flask('caliope_server')
        #:Delete database
        neo4j.GraphDatabaseService().clear()


    def login(self, username='******', password='******'):
        self.rpc_client = RPCClient(JSONRPCProtocol(),
                                    HttpWebSocketClientTransport('ws://localhost:9001/api/ws'))
        self.loginManager = self.rpc_client.get_proxy("login.")
        hashed_password = hashlib.sha256(password).hexdigest()
        return self.loginManager.authenticate(username=username,
                                              password=hashed_password)

    def logout(self, uuid):
        if self.loginManager is None:
            return
        return self.loginManager.logout(uuid=uuid)


    def test_login(self):
        rv = self.login(u'user', u'123')
        assert 'login' in rv
        assert rv['login'] is True
        assert 'user_uuid' in rv
        assert 'session_uuid' in rv

    def test_logout(self):
        uuid = self.login(u'user', u'123')['user_uuid']['value']
        rv = self.logout(uuid=uuid)
        assert 'logout' in rv
        assert rv['logout']

    def test_isAccessGranted(self):
        # TODO(nel): Deprecate this method.
        self.login()
        ac_proxy = self.rpc_client.get_proxy("ac.")
        self.assertEqual({'granted': True}, ac_proxy.isAccessGranted({}))

    def test_getUserList(self):
        self.login()
        ac_proxy = self.rpc_client.get_proxy("ac.")

        user_list = set(['revisor_1', 'revisor_2', 'revisor_3', 'recepcionista_1',
                         'recepcionista_2', 'superuser', 'secretaria_1', 'reportero_1',
                         'reportero_2', 'gerente_1'])

        for user in ac_proxy.getUserList({}):
            self.assertIn(user, user_list)
            user_list.remove(user)

        self.assertEqual(user_list, set())

    def test_getGroupList(self):
        self.login()
        ac_proxy = self.rpc_client.get_proxy("ac.")

        group_list = set(['everybody', 'secretarias', 'revisores', 'reportes', 'superusers', 'gerentes', 'recepcionistas'])

        for group in ac_proxy.getGroupList({}):
            self.assertIn(group, group_list)
            group_list.remove(group)

        self.assertEqual(group_list, set())

    def test_getGroupsForUser(self):
        self.login()
        ac_proxy = self.rpc_client.get_proxy('ac.')
        print ac_proxy.getGroupsOfUser('gerente_1')

    def test_getUserPermissions(self):
        self.login()

        permissions_of_user = \
             set([('read', 'form', 'everybody'), ('read', 'form', 'gerentes'),
              ('read', 'document', 'everybody'), ('read', 'document', 'gerentes'),
              ('read', 'task', 'everybody'), ('read', 'task', 'gerentes'),
              ('read', 'report', 'everybody'), ('read', 'report', 'gerentes'),
              ('write', 'form', 'everybody'), ('write', 'form', 'gerentes'),
              ('write', 'document', 'everybody'), ('write', 'document', 'gerentes'),
              ('write', 'task', 'everybody'), ('write', 'task', 'gerentes'),
              ('write', 'report', 'everybody'), ('write', 'report', 'gerentes'),
              ('assign', 'form', 'everybody'), ('assign', 'form', 'gerentes'),
              ('assign', 'document', 'everybody'), ('assign', 'document', 'gerentes'),
              ('assign', 'task', 'everybody'), ('assign', 'task', 'gerentes'),
              ('assign', 'report', 'everybody'), ('assign', 'report', 'gerentes'),
              ('assign', 'form', 'reportes'), ('assign', 'document', 'reportes'),
              ('assign', 'task', 'reportes'), ('assign', 'report', 'reportes')])

        ac_proxy = self.rpc_client.get_proxy("ac.")

        for perm in ac_proxy.getUserPermissions('gerente_1'):
            self.assertIn(tuple(perm), permissions_of_user)
            permissions_of_user.remove(tuple(perm))
        self.assertEqual(permissions_of_user, set())
Beispiel #43
0
class server:

    wsgiserver = None
    restart= False

    def __init__(self):
        signal.signal(signal.SIGINT, self.killServer)
        signal.signal(signal.SIGTERM, self.killServer)

    def start_gevent(self):
        try:
            ssl_args = dict()
            certfile_path   = web.ub.config.get_config_certfile()
            keyfile_path    = web.ub.config.get_config_keyfile()
            if certfile_path and keyfile_path:
                if os.path.isfile(certfile_path) and os.path.isfile(keyfile_path):
                    ssl_args = {"certfile": certfile_path,
                                "keyfile": keyfile_path}
                else:
                    web.app.logger.info('The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl. Cert path: %s | Key path: %s' % (certfile_path, keyfile_path))
            if os.name == 'nt':
                self.wsgiserver= WSGIServer(('0.0.0.0', web.ub.config.config_port), web.app, spawn=Pool(), **ssl_args)
            else:
                self.wsgiserver = WSGIServer(('', web.ub.config.config_port), web.app, spawn=Pool(), **ssl_args)
            web.py3_gevent_link = self.wsgiserver
            self.wsgiserver.serve_forever()

        except SocketError:
            try:
                web.app.logger.info('Unable to listen on \'\', trying on IPv4 only...')
                self.wsgiserver = WSGIServer(('0.0.0.0', web.ub.config.config_port), web.app, spawn=Pool(), **ssl_args)
                web.py3_gevent_link = self.wsgiserver
                self.wsgiserver.serve_forever()
            except (OSError, SocketError) as e:
                web.app.logger.info("Error starting server: %s" % e.strerror)
                print("Error starting server: %s" % e.strerror)
                web.helper.global_WorkerThread.stop()
                sys.exit(1)
        except Exception:
            web.app.logger.info("Unknown error while starting gevent")

    def startServer(self):
        if gevent_present:
            web.app.logger.info('Starting Gevent server')
            # leave subprocess out to allow forking for fetchers and processors
            self.start_gevent()
        else:
            try:
                ssl = None
                web.app.logger.info('Starting Tornado server')
                certfile_path   = web.ub.config.get_config_certfile()
                keyfile_path    = web.ub.config.get_config_keyfile()
                if certfile_path and keyfile_path:
                    if os.path.isfile(certfile_path) and os.path.isfile(keyfile_path):
                        ssl = {"certfile": certfile_path,
                               "keyfile": keyfile_path}
                    else:
                        web.app.logger.info('The specified paths for the ssl certificate file and/or key file seem to be broken. Ignoring ssl. Cert path: %s | Key path: %s' % (certfile_path, keyfile_path))

                # Max Buffersize set to 200MB
                http_server = HTTPServer(WSGIContainer(web.app),
                            max_buffer_size = 209700000,
                            ssl_options=ssl)
                http_server.listen(web.ub.config.config_port)
                self.wsgiserver=IOLoop.instance()
                self.wsgiserver.start()
                # wait for stop signal
                self.wsgiserver.close(True)
            except SocketError as e:
                web.app.logger.info("Error starting server: %s" % e.strerror)
                print("Error starting server: %s" % e.strerror)
                web.helper.global_WorkerThread.stop()
                sys.exit(1)

        # ToDo: Somehow caused by circular import under python3 refactor
        if sys.version_info > (3, 0):
            self.restart = web.py3_restart_Typ
        if self.restart == True:
            web.app.logger.info("Performing restart of Calibre-Web")
            web.helper.global_WorkerThread.stop()
            if os.name == 'nt':
                arguments = ["\"" + sys.executable + "\""]
                for e in sys.argv:
                    arguments.append("\"" + e + "\"")
                os.execv(sys.executable, arguments)
            else:
                os.execl(sys.executable, sys.executable, *sys.argv)
        else:
            web.app.logger.info("Performing shutdown of Calibre-Web")
            web.helper.global_WorkerThread.stop()
        sys.exit(0)

    def setRestartTyp(self,starttyp):
        self.restart = starttyp
        # ToDo: Somehow caused by circular import under python3 refactor
        web.py3_restart_Typ = starttyp

    def killServer(self, signum, frame):
        self.stopServer()

    def stopServer(self):
        # ToDo: Somehow caused by circular import under python3 refactor
        if sys.version_info > (3, 0):
            if not self.wsgiserver:
                if gevent_present:
                    self.wsgiserver = web.py3_gevent_link
                else:
                    self.wsgiserver = IOLoop.instance()
        if self.wsgiserver:
            if gevent_present:
                self.wsgiserver.close()
            else:
                self.wsgiserver.add_callback(self.wsgiserver.stop)

    @staticmethod
    def getNameVersion():
        if gevent_present:
            return {'Gevent':'v'+geventVersion}
        else:
            return {'Tornado':'v'+tornadoVersion}
Beispiel #44
0
class CaliopeServerTestCase(unittest.TestCase):
    def setUp(self):
        caliope_server.app.config['TESTING'] = True
        caliope_server.init_flask_app()
        caliope_server.configure_server_and_app(
            "../../conf/test_caliope_server.json")
        caliope_server.configure_logger("../../conf/tests_logger.json")
        caliope_server.register_modules()
        caliope_server.app.storekv = RedisStore(redis.StrictRedis())
        self.http_server = WSGIServer(
            (caliope_server.app.config['address'],
             caliope_server.app.config['port']),
            caliope_server.app,
            handler_class=WebSocketHandler)  # @IgnorePep8
        self.http_server.start()
        self.create_default_database()

    def tearDown(self):
        """Get rid of the database again after each test."""
        if self.rpc_client:
            self.rpc_client.transport.close()
        self.http_server.stop()
        self.http_server = None
        caliope_server.app = Flask('caliope_server')
        #:Delete database
        neo4j.GraphDatabaseService().clear()

    def create_default_database(self):
        DefaultDatabase().test_defaultUserGroupOne()

    def login(self, username, password):
        self.rpc_client = RPCClient(
            JSONRPCProtocol(),
            HttpWebSocketClientTransport('ws://localhost:9001/api/ws'))
        self.loginManager = self.rpc_client.get_proxy("login.")
        hashed_password = hashlib.sha256(password).hexdigest()
        return self.loginManager.authenticate(username=username,
                                              password=hashed_password)

    def logout(self, uuid):
        if self.loginManager is None:
            return
        return self.loginManager.logout(uuid=uuid)

    def test_login(self):
        rv = self.login(u'user', u'123')
        expected = \
            {u'first_name': {u'value': u'User'},
             u'last_name': {u'value': u'Test'},
             u'image': {u'data': None},
             u'user': {u'value': u'user'},
             u'login': True,
            }
        self.assertDictContainsSubset(expected, rv)
        self.assertIn("session_uuid", rv)
        self.assertIn("user_uuid", rv)

    def test_logout(self):
        uuid = self.login(u'user', u'123')['user_uuid']['value']
        rv = self.logout(uuid=uuid)
        self.assertIn('logout', rv)
        self.assertTrue(rv['logout'])
        self.assertIn('uuid', rv)
        self.assertEqual(uuid, rv['uuid'])

    def test_accounts_get_public_info(self):
        users = [self.login(u'user', u'123')['user_uuid']['value']]
        accounts_proxy = self.rpc_client.get_proxy(prefix="accounts.")
        info = accounts_proxy.getPublicInfo(users)
        assert len(info) == 1
        assert 'uuid' in info[0]
        for user in users:
            info_uuid = info[0]['uuid']['value']
            assert user == info_uuid

    def test_projects_create(self):
        user = self.login(u'user', u'123')
        projects_proxy = self.rpc_client.get_proxy(prefix="project.")
        model = projects_proxy.getModel()
        data = {
            "name":
            "PROYECTO 305",
            "general_location":
            "<p><em><strong>ASDASDASD</strong></em><br></p>",
            "locality":
            "suba",
            "project_type":
            "py_gr_escala",
            "profit_center":
            "ASDASDADS",
            "areas": [{
                "tipo": "A1",
                "valor": "121"
            }, {
                "tipo": "A2",
                "valor": "13"
            }],
            "uuid":
            model['data']['uuid']['value']
        }
        #: TODO Check for real asserts
        try:
            rv = projects_proxy.create(data=data)
            assert True
        except BaseException:
            assert False

    def test_form_find(self):
        user = self.login(u'user', u'123')
        projects_proxy = self.rpc_client.get_proxy(prefix="project.")
        rv = projects_proxy.getAll()
        self.assertIsNotNone(rv)
class TaskServicesTestCase(unittest.TestCase):
    def setUp(self):
        caliope_server.app.config['TESTING'] = True
        caliope_server.init_flask_app()
        caliope_server.configure_server_and_app(
            "conf/test_caliope_server.json")
        caliope_server.configure_logger("conf/tests_logger.json")
        caliope_server.register_modules()
        caliope_server.app.storekv = RedisStore(redis.StrictRedis())
        self.http_server = WSGIServer((caliope_server.app.config['address'],
                                       caliope_server.app.config['port']),
                                      caliope_server.app,
                                      handler_class=WebSocketHandler)  # @IgnorePep8
        self.http_server.start()
        self.nodes_created = set()
        self.create_default_database()

    def tearDown(self):
        """Get rid of the database again after each test."""
        if self.rpc_client:
            self.rpc_client.transport.close()
        self.http_server.stop()
        self.http_server = None
        caliope_server.app = Flask('caliope_server')
        #:Delete database
        self.remove_used_nodes(self.nodes_created)


    def create_default_database(self):
        self.create_UserGroup('user', 'group')

    def create_UserGroup(self, username, groupname):
        try:
            u1 = CaliopeUser()
            u1.username = username
            u1.password = hashlib.sha256(u'123').hexdigest()
            u1.domainname = 'correlibre.org'
            u1.first_name = "User"
            u1.last_name = "Test"
            u1.avatar = "common-img/avatar.png"
            u1.save()
            self.nodes_created.add(u1.uuid)
            g1 = CaliopeGroup()
            g1.name = groupname
            g1.code = groupname
            g1.save()
            self.nodes_created.add(g1.uuid)
            u1.member_of.connect(g1)
            g1.members.connect(u1)
            self.assertTrue(u1.member_of.is_connected(g1))
            self.assertTrue(g1.members.is_connected(u1))
        except UniqueProperty:
            try:
                u1 = CaliopeUser.index.get(username=username)
                g1 = CaliopeGroup.index.get(code=groupname)
                assert u1 is not None and g1 is not None
                assert u1.member_of.is_connected(g1)
                assert g1.members.is_connected(u1)
            except DoesNotExist:
                assert False

    def remove_used_nodes(self, node_list):
        query = "START n=node:CaliopeStorage('uuid:{}') " \
                "MATCH  n-[r]-() " \
                "DELETE n, r"
        batch_query = []
        for uuid in node_list:
            batch_query.append(neo4j.cypher.Query(neo4j.GraphDatabaseService(),
                                                  query.format(uuid)).execute())

    def login(self, username, password):
        self.rpc_client = RPCClient(JSONRPCProtocol(),
                                    HttpWebSocketClientTransport(
                                        'ws://localhost:9001/api/ws'))
        self.loginManager = self.rpc_client.get_proxy("login.")
        hashed_password = hashlib.sha256(password).hexdigest()
        return self.loginManager.authenticate(username=username,
                                              password=hashed_password)

    def logout(self, uuid):
        if self.loginManager is None:
            return
        return self.loginManager.logout(uuid=uuid)


    def test_login(self):
        rv = self.login(u'user', u'123')
        expected = \
            {u'first_name': {u'value': u'User'},
             u'last_name': {u'value': u'Test'},
             u'image': {u'data': None},
             u'user': {u'value': u'user'},
             u'login': True,
            }
        self.assertDictContainsSubset(expected, rv)
        self.assertIn("session_uuid", rv)
        self.assertIn("user_uuid", rv)


    def test_logout(self):
        uuid = self.login(u'user', u'123')['user_uuid']['value']
        rv = self.logout(uuid=uuid)
        self.assertIn('logout', rv)
        self.assertTrue(rv['logout'])
        self.assertIn('uuid', rv)
        self.assertEqual(uuid, rv['uuid'])


    def test_ts_get_model(self):
        user = self.login(u'user', u'123')
        tasks_proxy = self.rpc_client.get_proxy(prefix="tasks.")
        model = tasks_proxy.getModel()
        self.assertIsNotNone(model)

    def test_task_get_model_and_data(self):
        user = self.login(u'user', u'123')
        tasks_proxy = self.rpc_client.get_proxy(prefix="tasks.")
        model = tasks_proxy.getModel()
        self.assertIsNotNone(model)
        uuid = model["data"]["uuid"]["value"]
        #:update
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="name",
                                         value="test")

        #:commit
        commit = tasks_proxy.commit(uuid=uuid)
        self.assertTrue(commit)
        model_and_data = tasks_proxy.getModelAndData(uuid=uuid)
        self.assertEqual(model_and_data["data"]["name"]["value"], "test")


    def test_task_update_commit_field_single_value(self):
        user = self.login(u'user', u'123')
        tasks_proxy = self.rpc_client.get_proxy(prefix="tasks.")
        model = tasks_proxy.getModel()
        uuid = model["data"]["uuid"]["value"]
        #:update
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="name",
                                         value="test")

        #:commit
        commit = tasks_proxy.commit(uuid=uuid)
        self.assertTrue(commit)

        #:update a commited value
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="name",
                                         value="foo")
        self.assertTrue(update)

        #:commit again a previusly commited value after being updated
        self.assertTrue(tasks_proxy.commit(uuid=uuid))

        #:update twice a draft and commit
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="name",
                                         value="more foo")
        self.assertTrue(update)
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="name",
                                         value="not more foo")
        self.assertTrue(update)
        self.assertTrue(tasks_proxy.commit(uuid=uuid))
        self.nodes_created.add(uuid)


    def test_task_update_commit_field_single_value_not_exist_in_class(self):
        user = self.login(u'user', u'123')
        tasks_proxy = self.rpc_client.get_proxy(prefix="tasks.")
        model = tasks_proxy.getModel()
        uuid = model["data"]["uuid"]["value"]
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="other",
                                         value="test")
        self.assertTrue(update)
        try:
            response = tasks_proxy.commit(uuid=uuid)
        except RPCError as error:
            self.assertIsInstance(error, RPCError)

    def test_task_update_commit_field_list(self):
        user = self.login(u'user', u'123')
        tasks_proxy = self.rpc_client.get_proxy(prefix="tasks.")
        model = tasks_proxy.getModel()
        uuid = model["data"]["uuid"]["value"]

        #:update invalid index first.
        try:
            update = tasks_proxy.updateField(uuid=uuid,
                                             field_name="subtasks",
                                             subfield_id=0,
                                             value="subtask0")
            self.assertTrue(update)
        except RPCError as error:
            self.assertIsInstance(error, RPCError)

        #:update and commit with empty lists
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=-1,
                                         value="subtask0")
        self.assertTrue(update)
        self.assertTrue(tasks_proxy.commit(uuid=uuid))

        #: update and commit already commited lists
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=0,
                                         value="new_subtask0")
        self.assertTrue(update)
        self.assertTrue(tasks_proxy.commit(uuid=uuid))

        #: update twice and commit
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=0,
                                         value="new_subtask0_up1")
        self.assertTrue(update)
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=0,
                                         value="new_subtask_up2")
        self.assertTrue(update)
        self.assertTrue(tasks_proxy.commit(uuid=uuid))

        #:update a non valid subfield_id
        try:
            update = tasks_proxy.updateField(uuid=uuid,
                                             field_name="subtasks",
                                             subfield_id=1,
                                             value="not subtask1")
        except RPCError as error:
            self.assertIsInstance(error, RPCError)

    def test_task_update_commit_field_list_list(self):
        user = self.login(u'user', u'123')
        tasks_proxy = self.rpc_client.get_proxy(prefix="tasks.")
        model = tasks_proxy.getModel()
        uuid = model["data"]["uuid"]["value"]

        #:update with invalid pos
        try:
            update = tasks_proxy.updateField(uuid=uuid,
                                             field_name="subtasks",
                                             subfield_id=-1,
                                             pos=0,
                                             value="subtask0")
        except RPCError as error:
            self.assertIsInstance(error, RPCError)

        #:update with invalid subfield_id
        try:
            update = tasks_proxy.updateField(uuid=uuid,
                                             field_name="subtasks",
                                             subfield_id=0,
                                             pos=-1,
                                             value="subtask0")
        except RPCError as error:
            self.assertIsInstance(error, RPCError)

        #:update and commit with empty lists
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=-1,
                                         pos=-1,
                                         value="subtask0")
        self.assertTrue(update)
        self.assertTrue(tasks_proxy.commit(uuid=uuid))

        #: update and commit already commited lists
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=0,
                                         pos=0,
                                         value="new_subtask0")
        self.assertTrue(update)
        self.assertTrue(tasks_proxy.commit(uuid=uuid))

        #update twice same, append new and commit
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=0,
                                         pos=0,
                                         value="new_subtask0_up1")
        self.assertTrue(update)
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=0,
                                         pos=0,
                                         value="new_subtask0_up2")
        self.assertTrue(update)
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=0,
                                         pos=-1,
                                         value="new_subtask1_up2")
        self.assertTrue(update)
        self.assertTrue(tasks_proxy.commit(uuid=uuid))

        #:update a non valid pos
        try:
            update = tasks_proxy.updateField(uuid=uuid,
                                             field_name="subtasks",
                                             subfield_id=0,
                                             pos=1,
                                             value="not subtask1")
        except RPCError as error:
            self.assertIsInstance(error, RPCError)


    def test_task_update_commit_field_list_dict(self):
        user = self.login(u'user', u'123')
        tasks_proxy = self.rpc_client.get_proxy(prefix="tasks.")
        model = tasks_proxy.getModel()
        uuid = model["data"]["uuid"]["value"]

        #:update with invalid subfield_id
        try:
            update = tasks_proxy.updateField(uuid=uuid,
                                             field_name="subtasks",
                                             subfield_id=0,
                                             pos="foo",
                                             value="subtask0")
        except RPCError as error:
            self.assertIsInstance(error, RPCError)

        #:update and commit with empty subfield
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=-1,
                                         pos="foo",
                                         value="subtask0")
        self.assertTrue(update)
        self.assertTrue(tasks_proxy.commit(uuid=uuid))

        #: update and commit already commited lists
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=0,
                                         pos="foo",
                                         value="new_subtask0")
        self.assertTrue(update)
        self.assertTrue(tasks_proxy.commit(uuid=uuid))

        #update twice same, append new and commit
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=0,
                                         pos="foo1",
                                         value="new_subtask0_up1")
        self.assertTrue(update)
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=0,
                                         pos="foo1",
                                         value="new_subtask0_up2")
        self.assertTrue(update)
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="subtasks",
                                         subfield_id=0,
                                         pos="foo_new",
                                         value="new_subtask0_up2")
        self.assertTrue(update)
        self.assertTrue(tasks_proxy.commit(uuid=uuid))

        #:update a non valid pos
        try:
            update = tasks_proxy.updateField(uuid=uuid,
                                             field_name="subtasks",
                                             subfield_id=0,
                                             pos=1,
                                             value="not subtask1")
        except RPCError as error:
            self.assertIsInstance(error, RPCError)

    def test_task_update_commit_field_dict(self):
        user = self.login(u'user', u'123')
        tasks_proxy = self.rpc_client.get_proxy(prefix="tasks.")
        model = tasks_proxy.getModel()
        uuid = model["data"]["uuid"]["value"]


        #:update and commint for fist time
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="comments",
                                         subfield_id="user1",
                                         value="comment0")
        self.assertTrue(update)
        self.assertTrue(tasks_proxy.commit(uuid=uuid))

        #: update and commit already commited lists
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="comments",
                                         subfield_id="user1",
                                         value="comment0_up0")
        self.assertTrue(update)
        self.assertTrue(tasks_proxy.commit(uuid=uuid))

        #: update twice, append new and commit
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="comments",
                                         subfield_id="user1",
                                         value="comment0_up1")
        self.assertTrue(update)
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="comments",
                                         subfield_id="user1",
                                         value="comment0_up2")
        self.assertTrue(update)
        update = tasks_proxy.updateField(uuid=uuid,
                                         field_name="comments",
                                         subfield_id="user2",
                                         value="comment0")
        self.assertTrue(update)
        self.assertTrue(tasks_proxy.commit(uuid=uuid))

        #:update a non valid subfield_id
        try:
            update = tasks_proxy.updateField(uuid=uuid,
                                             field_name="comments",
                                             subfield_id=1,
                                             value="not subtask1")
        except RPCError as error:
            self.assertIsInstance(error, RPCError)
Beispiel #46
0
class APIServer():

    _api_prefix = '/api/1'

    def __init__(self,
                 rest_api: RestAPI,
                 cors_domain_list: List[str] = None) -> None:
        flask_app = Flask(__name__)
        if cors_domain_list:
            CORS(flask_app, origins=cors_domain_list)
        blueprint = create_blueprint()
        flask_api_context = Api(blueprint, prefix=self._api_prefix)
        setup_urls(
            flask_api_context=flask_api_context,
            rest_api=rest_api,
            urls=URLS_V1,
        )

        self.rest_api = rest_api
        self.flask_app = flask_app
        self.blueprint = blueprint

        self.wsgiserver: Optional[WSGIServer] = None
        self.flask_app.register_blueprint(self.blueprint)

        self.flask_app.errorhandler(HTTPStatus.NOT_FOUND)(endpoint_not_found)
        self.flask_app.register_error_handler(Exception,
                                              self.unhandled_exception)

    @staticmethod
    def unhandled_exception(exception: Exception) -> Response:
        """ Flask.errorhandler when an exception wasn't correctly handled """
        log.critical(
            "Unhandled exception when processing endpoint request",
            exc_info=True,
        )
        return api_response(wrap_in_fail_result(str(exception)),
                            HTTPStatus.INTERNAL_SERVER_ERROR)

    def run(self,
            host: str = '127.0.0.1',
            port: int = 5042,
            **kwargs: Any) -> None:
        """This is only used for the data faker and not used in production"""
        self.flask_app.run(host=host, port=port, **kwargs)

    def start(self, host: str = '127.0.0.1', port: int = 5042) -> None:
        """This is used to start the API server in production"""
        wsgi_logger = logging.getLogger(__name__ + '.pywsgi')
        self.wsgiserver = WSGIServer(
            (host, port),
            self.flask_app,
            log=wsgi_logger,
            error_log=wsgi_logger,
        )
        msg = f'Rotki API server is running at: {host}:{port}'
        print(msg)
        log.info(msg)
        self.wsgiserver.start()

    def stop(self, timeout: int = 5) -> None:
        """Stops the API server. If handlers are running after timeout they are killed"""
        if self.wsgiserver is not None:
            self.wsgiserver.stop(timeout)
            self.wsgiserver = None

        self.rest_api.stop()
    cov_port = cov_config.get('sub_port', None)

    cov = coverage.Coverage()

    app_cov = Flask(cov_config.get('exec_file', 'hehe'))

    @app_cov.route("/", methods=['GET', 'POST'])
    def index():
        pass
        return 'eee'

    @app_cov.route("/save", methods=['GET', 'POST'])
    def save():
        cov.save()
        return 'save ok'

    @app_cov.route("/stop", methods=['GET', 'POST'])
    def stop():
        cov.save()
        return 'stop ok'

    config = {
        'host': '127.0.0.1',
        'port': cov_port,
        # 'debug' : True,
    }
    http_server = WSGIServer(('127.0.0.1', cov_port), app_cov)
    http_server.start()
    # app.run(**config)
    print('this time is ', cov_port)
    cov.start()
Beispiel #48
0
class WebServer(object):
    def __init__(self):
        signal.signal(signal.SIGINT, self._killServer)
        signal.signal(signal.SIGTERM, self._killServer)

        self.wsgiserver = None
        self.access_logger = None
        self.restart = False
        self.app = None
        self.listen_address = None
        self.listen_port = None
        self.unix_socket_file = None
        self.ssl_args = None

    def init_app(self, application, config):
        self.app = application
        self.listen_address = config.get_config_ipaddress()
        self.listen_port = config.config_port

        if config.config_access_log:
            log_name = "gevent.access" if _GEVENT else "tornado.access"
            formatter = logger.ACCESS_FORMATTER_GEVENT if _GEVENT else logger.ACCESS_FORMATTER_TORNADO
            self.access_logger, logfile = logger.create_access_log(
                config.config_access_logfile, log_name, formatter)
            if logfile != config.config_access_logfile:
                log.warning(
                    "Accesslog path %s not valid, falling back to default",
                    config.config_access_logfile)
                config.config_access_logfile = logfile
                config.save()
        else:
            if not _GEVENT:
                logger.get('tornado.access').disabled = True

        certfile_path = config.get_config_certfile()
        keyfile_path = config.get_config_keyfile()
        if certfile_path and keyfile_path:
            if os.path.isfile(certfile_path) and os.path.isfile(keyfile_path):
                self.ssl_args = dict(certfile=certfile_path,
                                     keyfile=keyfile_path)
            else:
                log.warning(
                    'The specified paths for the ssl certificate file and/or key file seem to be broken. '
                    'Ignoring ssl.')
                log.warning('Cert path: %s', certfile_path)
                log.warning('Key path:  %s', keyfile_path)

    def _make_gevent_unix_socket(self, socket_file):
        # the socket file must not exist prior to bind()
        if os.path.exists(socket_file):
            # avoid nuking regular files and symbolic links (could be a mistype or security issue)
            if os.path.isfile(socket_file) or os.path.islink(socket_file):
                raise OSError(errno.EEXIST, os.strerror(errno.EEXIST),
                              socket_file)
            os.remove(socket_file)

        unix_sock = WSGIServer.get_listener(socket_file, family=socket.AF_UNIX)
        self.unix_socket_file = socket_file

        # ensure current user and group have r/w permissions, no permissions for other users
        # this way the socket can be shared in a semi-secure manner
        # between the user running calibre-web and the user running the fronting webserver
        os.chmod(socket_file, 0o660)

        return unix_sock

    def _make_gevent_socket(self):
        if os.name != 'nt':
            unix_socket_file = os.environ.get("CALIBRE_UNIX_SOCKET")
            if unix_socket_file:
                return self._make_gevent_unix_socket(
                    unix_socket_file), "unix:" + unix_socket_file

        if self.listen_address:
            return (self.listen_address, self.listen_port), None

        if os.name == 'nt':
            self.listen_address = '0.0.0.0'
            return (self.listen_address, self.listen_port), None

        try:
            address = ('::', self.listen_port)
            sock = WSGIServer.get_listener(address, family=socket.AF_INET6)
        except socket.error as ex:
            log.error('%s', ex)
            log.warning('Unable to listen on "", trying on IPv4 only...')
            address = ('', self.listen_port)
            sock = WSGIServer.get_listener(address, family=socket.AF_INET)

        return sock, _readable_listen_address(*address)

    @staticmethod
    def _get_args_for_reloading():
        """Determine how the script was executed, and return the args needed
        to execute it again in a new process.
        Code from https://github.com/pyload/pyload. Author GammaC0de, voulter
        """
        rv = [sys.executable]
        py_script = sys.argv[0]
        args = sys.argv[1:]
        # Need to look at main module to determine how it was executed.
        __main__ = sys.modules["__main__"]

        # The value of __package__ indicates how Python was called. It may
        # not exist if a setuptools script is installed as an egg. It may be
        # set incorrectly for entry points created with pip on Windows.
        if getattr(__main__, "__package__", None) is None or (
                os.name == "nt" and __main__.__package__ == ""
                and not os.path.exists(py_script)
                and os.path.exists("{}.exe".format(py_script))):
            # Executed a file, like "python app.py".
            py_script = os.path.abspath(py_script)

            if os.name == "nt":
                # Windows entry points have ".exe" extension and should be
                # called directly.
                if not os.path.exists(py_script) and os.path.exists(
                        "{}.exe".format(py_script)):
                    py_script += ".exe"

                if (os.path.splitext(sys.executable)[1] == ".exe"
                        and os.path.splitext(py_script)[1] == ".exe"):
                    rv.pop(0)

            rv.append(py_script)
        else:
            # Executed a module, like "python -m module".
            if sys.argv[0] == "-m":
                args = sys.argv
            else:
                if os.path.isfile(py_script):
                    # Rewritten by Python from "-m script" to "/path/to/script.py".
                    py_module = __main__.__package__
                    name = os.path.splitext(os.path.basename(py_script))[0]

                    if name != "__main__":
                        py_module += ".{}".format(name)
                else:
                    # Incorrectly rewritten by pydevd debugger from "-m script" to "script".
                    py_module = py_script

                rv.extend(("-m", py_module.lstrip(".")))

        rv.extend(args)
        return rv

    def _start_gevent(self):
        ssl_args = self.ssl_args or {}

        try:
            sock, output = self._make_gevent_socket()
            if output is None:
                output = _readable_listen_address(self.listen_address,
                                                  self.listen_port)
            log.info('Starting Gevent server on %s', output)
            self.wsgiserver = WSGIServer(sock,
                                         self.app,
                                         log=self.access_logger,
                                         spawn=Pool(),
                                         **ssl_args)
            if ssl_args:
                wrap_socket = self.wsgiserver.wrap_socket

                def my_wrap_socket(*args, **kwargs):
                    try:
                        return wrap_socket(*args, **kwargs)
                    except (ssl.SSLError, OSError) as ex:
                        log.warning('Gevent SSL Error: %s', ex)
                        raise GreenletExit

                self.wsgiserver.wrap_socket = my_wrap_socket
            self.wsgiserver.serve_forever()
        finally:
            if self.unix_socket_file:
                os.remove(self.unix_socket_file)
                self.unix_socket_file = None

    def _start_tornado(self):
        if os.name == 'nt' and sys.version_info > (3, 7):
            import asyncio
            asyncio.set_event_loop_policy(
                asyncio.WindowsSelectorEventLoopPolicy())
        log.info(
            'Starting Tornado server on %s',
            _readable_listen_address(self.listen_address, self.listen_port))

        # Max Buffersize set to 200MB            )
        http_server = HTTPServer(WSGIContainer(self.app),
                                 max_buffer_size=209700000,
                                 ssl_options=self.ssl_args)
        http_server.listen(self.listen_port, self.listen_address)
        self.wsgiserver = IOLoop.current()
        self.wsgiserver.start()
        # wait for stop signal
        self.wsgiserver.close(True)

    def start(self):
        try:
            if _GEVENT:
                # leave subprocess out to allow forking for fetchers and processors
                self._start_gevent()
            else:
                self._start_tornado()
        except Exception as ex:
            log.error("Error starting server: %s", ex)
            print("Error starting server: %s" % ex)
            self.stop()
            return False
        finally:
            self.wsgiserver = None

        # prevent irritating log of pending tasks message from asyncio
        logger.get('asyncio').setLevel(logger.logging.CRITICAL)

        if not self.restart:
            log.info("Performing shutdown of Calibre-Web")
            return True

        log.info("Performing restart of Calibre-Web")
        args = self._get_args_for_reloading()
        subprocess.call(args, close_fds=True)  # nosec
        return True

    def _killServer(self, __, ___):
        self.stop()

    def stop(self, restart=False):
        from . import updater_thread
        updater_thread.stop()

        log.info("webserver stop (restart=%s)", restart)
        self.restart = restart
        if self.wsgiserver:
            if _GEVENT:
                self.wsgiserver.close()
            else:
                self.wsgiserver.add_callback_from_signal(self.wsgiserver.stop)
Beispiel #49
0
class WebService(Service):
    """RPC service with Web server capabilities.

    """

    def __init__(self, listen_port, handlers, parameters, shard=0,
                 listen_address=""):
        super(WebService, self).__init__(shard)

        static_files = parameters.pop('static_files', [])
        rpc_enabled = parameters.pop('rpc_enabled', False)
        rpc_auth = parameters.pop('rpc_auth', None)
        auth_middleware = parameters.pop('auth_middleware', None)
        is_proxy_used = parameters.pop('is_proxy_used', None)
        num_proxies_used = parameters.pop('num_proxies_used', None)

        self.wsgi_app = tornado.wsgi.WSGIApplication(handlers, **parameters)
        self.wsgi_app.service = self

        for entry in static_files:
            self.wsgi_app = SharedDataMiddleware(
                self.wsgi_app, {"/static": entry})

        if rpc_enabled:
            self.wsgi_app = DispatcherMiddleware(
                self.wsgi_app, {"/rpc": RPCMiddleware(self, rpc_auth)})

        # The authentication middleware needs to be applied before the
        # ProxyFix as otherwise the remote address it gets is the one
        # of the proxy.
        if auth_middleware is not None:
            self.wsgi_app = auth_middleware(self.wsgi_app)
            self.auth_handler = self.wsgi_app

        # If we are behind one or more proxies, we'll use the content
        # of the X-Forwarded-For HTTP header (if provided) to determine
        # the client IP address, ignoring the one the request came from.
        # This allows to use the IP lock behind a proxy. Activate it
        # only if all requests come from a trusted source (if clients
        # were allowed to directlty communicate with the server they
        # could fake their IP and compromise the security of IP lock).
        if num_proxies_used is None:
            if is_proxy_used:
                num_proxies_used = 1
            else:
                num_proxies_used = 0

        if num_proxies_used > 0:
            self.wsgi_app = ProxyFix(self.wsgi_app, num_proxies_used)

        self.web_server = WSGIServer((listen_address, listen_port),
                                     self.wsgi_app)

    def run(self):
        """Start the WebService.

        Both the WSGI server and the RPC server are started.

        """
        self.web_server.start()
        Service.run(self)
        self.web_server.stop()
Beispiel #50
0
class WebWriter(object):
    def __init__(self, halt_event):
        self._log = logging.getLogger("WebWriter")
        memcached_client = memcache.Client(_memcached_nodes)

        self._interaction_pool = gdbpool.interaction_pool.DBInteractionPool(
            get_central_database_dsn(),
            pool_name=_central_pool_name,
            pool_size=_database_pool_size,
            do_log=True)

        authenticator = InteractionPoolAuthenticator(memcached_client,
                                                     self._interaction_pool)

        # Ticket #25: must run database operation in a greenlet
        greenlet = gevent.Greenlet.spawn(_get_cluster_row_and_node_row,
                                         self._interaction_pool)
        greenlet.join()
        self._cluster_row, node_row = greenlet.get()

        self._unified_id_factory = UnifiedIDFactory(node_row.id)

        self._deliverator = Deliverator()

        self._zeromq_context = zmq.Context()

        self._pull_server = GreenletPULLServer(self._zeromq_context,
                                               _web_writer_pipeliner_address,
                                               self._deliverator)
        self._pull_server.link_exception(self._unhandled_greenlet_exception)

        self._data_writer_clients = list()
        for node_name, address in zip(_node_names, _data_writer_addresses):
            resilient_client = GreenletResilientClient(
                self._zeromq_context,
                node_name,
                address,
                _client_tag,
                _web_writer_pipeliner_address,
                self._deliverator,
                connect_messages=[])
            resilient_client.link_exception(self._unhandled_greenlet_exception)
            self._data_writer_clients.append(resilient_client)

        self._space_accounting_dealer_client = GreenletDealerClient(
            self._zeromq_context, _local_node_name,
            _space_accounting_server_address)
        self._space_accounting_dealer_client.link_exception(
            self._unhandled_greenlet_exception)

        push_client = GreenletPUSHClient(
            self._zeromq_context,
            _local_node_name,
            _space_accounting_pipeline_address,
        )

        self._accounting_client = SpaceAccountingClient(
            _local_node_name, self._space_accounting_dealer_client,
            push_client)

        self._event_push_client = EventPushClient(self._zeromq_context,
                                                  "web-server")

        # message sent to data writers telling them the server
        # is (re)starting, thereby invalidating any archives
        # that are in progress for this node
        unified_id = self._unified_id_factory.next()
        timestamp = create_timestamp()
        self._event_push_client.info("web-writer-start",
                                     "web writer (re)start",
                                     unified_id=unified_id,
                                     timestamp_repr=repr(timestamp),
                                     source_node_name=_local_node_name)

        id_translator_keys_path = os.environ.get(
            "NIMBUS_IO_ID_TRANSLATION_KEYS",
            os.path.join(_repository_path, "id_translator_keys.pkl"))
        with open(id_translator_keys_path, "r") as input_file:
            id_translator_keys = pickle.load(input_file)

        self._id_translator = InternalIDTranslator(
            id_translator_keys["key"], id_translator_keys["hmac_key"],
            id_translator_keys["iv_key"], id_translator_keys["hmac_size"])

        redis_queue = gevent.queue.Queue()

        self._redis_sink = OperationalStatsRedisSink(halt_event, redis_queue,
                                                     _local_node_name)
        self._redis_sink.link_exception(self._unhandled_greenlet_exception)

        self.application = Application(self._cluster_row,
                                       self._unified_id_factory,
                                       self._id_translator,
                                       self._data_writer_clients,
                                       authenticator, self._accounting_client,
                                       self._event_push_client, redis_queue)
        self.wsgi_server = WSGIServer((_web_writer_host, _web_writer_port),
                                      application=self.application,
                                      backlog=_wsgi_backlog)

    def start(self):
        self._space_accounting_dealer_client.start()
        self._pull_server.start()
        for client in self._data_writer_clients:
            client.start()
        self._redis_sink.start()
        self.wsgi_server.start()

    def stop(self):
        self._log.info("stopping wsgi web server")
        self.wsgi_server.stop()
        self._accounting_client.close()
        self._log.debug("killing greenlets")
        self._space_accounting_dealer_client.kill()
        self._pull_server.kill()
        for client in self._data_writer_clients:
            client.kill()
        self._redis_sink.kill()
        self._log.debug("joining greenlets")
        self._space_accounting_dealer_client.join()
        self._pull_server.join()
        for client in self._data_writer_clients:
            client.join()
        self._redis_sink.kill()
        self._log.debug("closing zmq")
        self._event_push_client.close()
        self._zeromq_context.term()

    def _unhandled_greenlet_exception(self, greenlet_object):
        try:
            greenlet_object.get()
        except Exception:
            self._log.exception(str(greenlet_object))
            exctype, value = sys.exc_info()[:2]
            self._event_push_client.exception("unhandled_greenlet_exception",
                                              str(value),
                                              exctype=exctype.__name__)
Beispiel #51
0
class APIServer:
    """
    Runs the API-server that routes the endpoint to the resources.
    The API is wrapped in multiple layers, and the Server should be invoked this way::

        # instance of the raiden-api
        raiden_api = RaidenAPI(...)

        # wrap the raiden-api with rest-logic and encoding
        rest_api = RestAPI(raiden_api)

        # create the server and link the api-endpoints with flask / flask-restful middleware
        api_server = APIServer(rest_api)

        # run the server
        api_server.run('127.0.0.1', 5001, debug=True)

    """

    _api_prefix = '/api/1'

    def __init__(self, rest_api, cors_domain_list=None, web_ui=False, eth_rpc_endpoint=None):
        if rest_api.version != 1:
            raise ValueError(
                'Invalid api version: {}'.format(rest_api.version)
            )

        flask_app = Flask(__name__)
        if cors_domain_list:
            CORS(flask_app, origins=cors_domain_list)

        if eth_rpc_endpoint:
            if not eth_rpc_endpoint.startswith('http'):
                eth_rpc_endpoint = 'http://{}'.format(eth_rpc_endpoint)
            flask_app.config['WEB3_ENDPOINT'] = eth_rpc_endpoint

        blueprint = create_blueprint()
        flask_api_context = Api(blueprint, prefix=self._api_prefix)

        restapi_setup_type_converters(
            flask_app,
            {'hexaddress': HexAddressConverter},
        )

        restapi_setup_urls(
            flask_api_context,
            rest_api,
            URLS_V1,
        )

        self.rest_api = rest_api
        self.flask_app = flask_app
        self.blueprint = blueprint
        self.flask_api_context = flask_api_context

        self.wsgiserver = None
        self.flask_app.register_blueprint(self.blueprint)
        self.flask_app.config['WEBUI_PATH'] = '../ui/web/dist/'
        if is_frozen():
            # Inside frozen pyinstaller image
            self.flask_app.config['WEBUI_PATH'] = '{}/raiden/ui/web/dist/'.format(sys.prefix)

        self.flask_app.errorhandler(HTTPStatus.NOT_FOUND)(endpoint_not_found)

        if web_ui:
            for route in ('/ui/<path:file_name>', '/ui', '/ui/', '/index.html', '/'):
                self.flask_app.add_url_rule(
                    route,
                    route,
                    view_func=self._serve_webui,
                    methods=('GET', ),
                )

    def _serve_webui(self, file_name='index.html'):  # pylint: disable=redefined-builtin
        try:
            assert file_name
            web3 = self.flask_app.config.get('WEB3_ENDPOINT')
            if web3 and 'config.' in file_name and file_name.endswith('.json'):
                host = request.headers.get('Host')
                if any(h in web3 for h in ('localhost', '127.0.0.1')) and host:
                    _, _port = split_endpoint(web3)
                    _host, _ = split_endpoint(host)
                    web3 = 'http://{}:{}'.format(_host, _port)
                response = jsonify({'raiden': self._api_prefix, 'web3': web3})
            else:
                response = send_from_directory(self.flask_app.config['WEBUI_PATH'], file_name)
        except (NotFound, AssertionError):
            response = send_from_directory(self.flask_app.config['WEBUI_PATH'], 'index.html')
        return response

    def run(self, host='127.0.0.1', port=5001, **kwargs):
        self.flask_app.run(host=host, port=port, **kwargs)

    def start(self, host='127.0.0.1', port=5001):
        # WSGI expects a stdlib logger, with structlog there's conflict of method names
        wsgi_log = logging.getLogger(__name__ + '.pywsgi')
        self.wsgiserver = WSGIServer(
            (host, port),
            self.flask_app,
            log=wsgi_log,
            error_log=wsgi_log
        )
        self.wsgiserver.start()

    def stop(self, timeout=5):
        if getattr(self, 'wsgiserver', None):
            self.wsgiserver.stop(timeout)
            self.wsgiserver = None
Beispiel #52
0
class Robot:
    """
    A robot is the main context where the templates and service lives.
    It is responsible to:
        - run the REST API server
        - download the template from a git repository
        - load the templates in memory and make them available
    """
    def __init__(self):
        self._stop_event = Event()
        self._stop_event.set()
        self.data_repo_url = None
        self._http = None  # server handler
        self._addr = None
        self._sig_handler = []

    @property
    def started(self):
        return not self._stop_event.is_set()

    @property
    def address(self):
        return self._addr

    def set_data_repo(self, url):
        """
        Set the data repository used to serialize services state.

        @param path: can be a git URL or a absolute path or None
            if git url: clone the repository locally, and use it as configuration repo
            if absolute path: make sure the directory exist and use it as configuration repo
            if None: automatically create a configuration repository in `{j.dirs.DATADIR}/zrobot`

        It can be the same of one of the template repository used.
        """
        config.data_repo = config.DataRepo(url)
        self.data_repo_url = url

    def add_template_repo(self, url, directory='templates'):
        url, branch = giturl.parse_template_repo_url(url)
        tcol.add_repo(url=url, branch=branch, directory=directory)

    def set_config_repo(self, url=None, key=None):
        """
        make sure the jumpscale configuration repository is initialized
        @param path: can be a git URL or a absolute path or None
            if git url: clone the repository locally, and use it as configuration repo
            if absolute path: make sure the directory exist and use it as configuration repo
            if None: automatically create a configuration repository in `{j.dirs.CODEDIR}/local/stdorg/config`

        @param key: path to the sshkey to use with the configuration repo
        if key is None, a key is automatically generated
        """
        config.config_repo = config.ConfigRepo(url=url, key=key)

    def start(self,
              listen=":6600",
              log_level=logging.DEBUG,
              block=True,
              auto_push=False,
              auto_push_interval=60,
              admin_organization=None,
              user_organization=None,
              mode=None,
              god=False,
              **kwargs):
        """
        start the rest web server
        load the services from the local git repository
        """
        self._stop_event.clear()

        config.mode = mode
        config.god = god  # when true, this allow to get data and logs from services using the REST API
        if config.data_repo is None:
            raise RuntimeError(
                "Not data repository set. Robot doesn't know where to save data."
            )
        if not j.tools.configmanager.path:
            raise RuntimeError(
                "config manager is not configured, can't continue")

        # configure storage
        storage.init(config)
        # instantiate webhooks manager and load the configured webhooks
        config.webhooks = webhooks.get(config)

        logger = j.logger.get('zerorobot')
        logger.info("data directory: %s" % config.data_repo.path)
        logger.info("config directory: %s" % j.tools.configmanager.path)
        keypath = j.tools.configmanager.keypath
        if not keypath:
            keypath = os.path.expanduser(
                os.path.join('~/.ssh', j.tools.configmanager.keyname))
        logger.info("sshkey used: %s" % keypath)

        # configure prometheus monitoring
        if not kwargs.get('testing', False):
            monitor(app)

        # configure authentication middleware
        _configure_authentication(admin_organization, user_organization)

        for sig in [signal.SIGINT, signal.SIGTERM]:
            self._sig_handler.append(gevent.signal(sig, self.stop))

        # configure logger
        app._logger = logger

        # auto-push data repo
        if auto_push:
            logger.info("auto push of data repo enabled")
            config.data_repo.start_auto_push(interval=auto_push_interval,
                                             logger=logger)
            config.config_repo.start_auto_push(interval=auto_push_interval,
                                               logger=logger)

        # load services from data repo
        loader.load_services(config)
        # notify services that they can start processing their task list
        config.SERVICE_LOADED.set()

        if mode == 'node':
            _create_node_service()

        # only keep executed tasks for 2 hours
        gevent.spawn(_trim_tasks, 7200)

        # using a pool allow to kill the request when stopping the server
        pool = Pool(None)
        hostport = _split_hostport(listen)
        self._http = WSGIServer(hostport,
                                app,
                                spawn=pool,
                                log=logger,
                                error_log=logger)
        self._http.start()
        self._addr = self._http.address
        logger.info("robot running at %s:%s" % hostport)

        if block:
            try:
                # wait until stop() is called
                self._stop_event.wait()
            finally:
                if self.started:
                    gevent.Greenlet.spawn(self.stop, timeout=60).join()

    def stop(self, timeout=30):
        """
        1. stop receiving requests on the REST API
        2. wait all currently active request finishes
        3. stop all services
        4. wait all services stop gracefully
        5. serialize all services state to disk
        6. exit the process
        """
        logger = j.logger.get('zerorobot')
        logger.info('stopping robot')

        # prevent the signal handler to be called again if
        # more signal are received
        for h in self._sig_handler:
            h.cancel()

        logger.info("stop REST API")
        logger.info("waiting request to finish")
        self._http.stop(timeout=10)
        self._addr = None

        logger.info("waiting for services to stop")
        pool = Pool(30)

        def stop_service(service):
            try:
                service._gracefull_stop(timeout=timeout)
            except Exception as err:
                logger.warning(
                    'exception raised while waiting %s %s (%s) to finish: %s',
                    service.template_uid.name, service.name, service.guid, err)

        pool.map(stop_service, scol.list_services())

        # here no more requests are comming in, we can safely save all services
        self._save_services()

        # notify we can exist the process
        self._stop_event.set()

    def _save_services(self):
        """
        serialize all the services on disk
        """
        for service in scol.list_services():
            # stop all the greenlets attached to the services
            service.gl_mgr.stop_all()
            service.save()