def main(cls, args=None, use_files=True): # type: (Sequence[str], bool) -> Any """Entry point to autopush's main command line scripts. aka autopush/autoendpoint. """ ns = cls.parse_args(cls.config_files if use_files else [], args) if not ns.no_aws: logging.HOSTNAME = utils.get_ec2_instance_id() PushLogger.setup_logging( cls.logger_name, log_level=ns.log_level or ("debug" if ns.debug else "info"), log_format="text" if ns.human_logs else "json", log_output=ns.log_output, sentry_dsn=bool(os.environ.get("SENTRY_DSN")), firehose_delivery_stream=ns.firehose_stream_name) try: app = cls.from_argparse(ns) except InvalidSettings as e: log.critical(str(e)) return 1 app.setup() app.run()
def main(cls, args=None, use_files=True, resource=None): # type: (Sequence[str], bool, DynamoDBResource) -> Any """Entry point to autopush's main command line scripts. aka autopush/autoendpoint. """ ns = cls.parse_args(cls.config_files if use_files else [], args) PushLogger.setup_logging( cls.logger_name, log_level=ns.log_level or ("debug" if ns.debug else "info"), log_format="text" if ns.human_logs else "json", log_output=ns.log_output, sentry_dsn=bool(os.environ.get("SENTRY_DSN")), firehose_delivery_stream=ns.firehose_stream_name, no_aws=ns.no_aws ) try: app = cls.from_argparse(ns, resource=resource) except InvalidConfig as e: log.critical(str(e)) return 1 app.setup() return app.run()
def test_include_stacktrace_when_no_tb(self): pl = PushLogger.setup_logging("Autopush", sentry_dsn=True) log.failure("foo", failure.Failure(ZeroDivisionError(), exc_tb=None)) self.flushLoggedErrors() d = Deferred() co = sys._getframe().f_code filename = co.co_filename testname = co.co_name def check(): logged = self.sentry.logged if not logged: # pragma: nocover reactor.callLater(0, check) return assert len(logged) == 1 # Ensure a stacktrace was included w/ the current frame as # the last entry frames = logged[0]['stacktrace']['frames'] last = frames[-1] assert last['abs_path'] == filename assert last['function'] == testname self._port.stopListening() pl.stop() d.callback(True) reactor.callLater(0, check) return d
def test_custom_type(self): obj = PushLogger.setup_logging("Autopush") obj._output = mock_stdout = Mock() log.info("omg!", Type=7) eq_(len(mock_stdout.mock_calls), 2) kwargs = mock_stdout.mock_calls[0][1][0] ok_("Type" in kwargs)
def test_include_stacktrace_when_no_tb(self): pl = PushLogger.setup_logging("Autopush", sentry_dsn=True) log.failure("foo", failure.Failure(ZeroDivisionError(), exc_tb=None)) self.flushLoggedErrors() d = Deferred() co = sys._getframe().f_code filename = co.co_filename testname = co.co_name def check(): logged = self.sentry.logged if not logged: # pragma: nocover reactor.callLater(0, check) return eq_(len(logged), 1) # Ensure a top level stacktrace was included stacktrace = logged[0]['stacktrace'] ok_( any(filename == f['abs_path'] and testname == f['function'] for f in stacktrace['frames'])) self._port.stopListening() pl.stop() d.callback(True) reactor.callLater(0, check) return d
def test_custom_type(self): obj = PushLogger.setup_logging("Autopush") obj._output = mock_stdout = Mock() log.info("omg!", Type=7) assert len(mock_stdout.mock_calls) == 2 kwargs = mock_stdout.mock_calls[0][1][0] assert "Type" in kwargs obj.stop()
def test_human_logs(self): obj = PushLogger.setup_logging("Autopush", log_format="text") obj._output = mock_stdout = Mock() log.info("omg!", Type=7) eq_(len(mock_stdout.mock_calls), 2) mock_stdout.reset_mock() log.error("wtf!", Type=7) eq_(len(mock_stdout.mock_calls), 2)
def test_sentry_logging(self): os.environ["SENTRY_DSN"] = "some_locale" PushLogger.setup_logging("Autopush", sentry_dsn=True) eq_(len(self.mock_raven.mock_calls), 2) log.failure("error", failure.Failure(Exception("eek"))) self.flushLoggedErrors() d = Deferred() def check(): if len(self.mock_client.mock_calls): eq_(len(self.mock_client.mock_calls), 1) d.callback(True) else: # pragma: nocover reactor.callLater(0, check) del os.environ["SENTRY_DSN"] reactor.callLater(0, check) return d
def test_firehose_only_output(self, mock_boto3): obj = PushLogger("Autoput", log_output="none", firehose_delivery_stream="test") obj.firehose = Mock(spec=FirehoseProcessor) obj.start() log.info("wow") obj.stop() assert len(obj.firehose.mock_calls) == 3 assert len(obj.firehose.process.mock_calls) == 1
def test_file_output(self): try: os.unlink("testfile.txt") except OSError: # pragma: nocover pass obj = PushLogger.setup_logging("Autoput", log_output="testfile.txt") obj.start() log.info("wow") obj.stop() with open("testfile.txt") as f: lines = f.readlines() assert len(lines) == 1
def test_sentry_logging(self): out = StringIO.StringIO() pl = PushLogger.setup_logging("Autopush", sentry_dsn=True) pl._output = out _client_info = dict(key='value') _timings = dict(key2='value', key3=True) log.failure(format="error", failure=failure.Failure(Exception("eek")), client_info=_client_info, timings=_timings) self.flushLoggedErrors() d = Deferred() def check(): logged = self.sentry.logged if not logged: # pragma: nocover reactor.callLater(0, check) return assert len(logged) == 1 # Check that the sentry data has the client info as a sub dict # Note: these are double quoted, single quote strings. assert logged[0].get('extra').get('client_info') == { u"'key'": u"'value'" } # Check that the json written actually contains the client info # collapsed up into 'Fields'. out.seek(0) payload = json.loads(out.readline()) assert payload['Fields']['key'] == 'value' assert payload['Fields']['key2'] == 'value' assert payload['Fields']['key3'] is True self._port.stopListening() pl.stop() d.callback(True) reactor.callLater(0, check) return d
def test_start_stop(self): obj = PushLogger.setup_logging("Autopush") obj.start() obj.stop()
def endpoint_main(sysargs=None, use_files=True): """Main entry point to setup an endpoint node, aka the autoendpoint script""" args, parser = _parse_endpoint(sysargs, use_files) senderid_list = None if args.senderid_list: try: senderid_list = json.loads(args.senderid_list) except (ValueError, TypeError): log.critical(format="Invalid JSON specified for senderid_list") return log_level = args.log_level or ("debug" if args.debug else "info") log_format = "text" if args.human_logs else "json" sentry_dsn = bool(os.environ.get("SENTRY_DSN")) PushLogger.setup_logging( "Autoendpoint", log_level=log_level, log_format=log_format, log_output=args.log_output, sentry_dsn=sentry_dsn, firehose_delivery_stream=args.firehose_stream_name) # Add some entropy to prevent potential conflicts. postfix = os.urandom(4).encode('hex').ljust(8, '0') settings = make_settings( args, endpoint_scheme=args.endpoint_scheme, endpoint_hostname=args.endpoint_hostname or args.hostname, endpoint_port=args.endpoint_port, enable_cors=not args.no_cors, s3_bucket=args.s3_bucket, senderid_expry=args.senderid_expry, senderid_list=senderid_list, bear_hash_key=args.auth_key, preflight_uaid="deadbeef000000000deadbeef" + postfix, ) # Endpoint HTTP router site = cyclone.web.Application( [ (r"/push/(?:(?P<api_ver>v\d+)\/)?(?P<token>[^\/]+)", EndpointHandler, dict(ap_settings=settings)), (r"/spush/(?:(?P<api_ver>v\d+)\/)?(?P<token>[^\/]+)", SimplePushHandler, dict(ap_settings=settings)), (r"/m/([^\/]+)", MessageHandler, dict(ap_settings=settings)), # PUT /register/ => connect info # GET /register/uaid => chid + endpoint (r"/v1/([^\/]+)/([^\/]+)/registration(?:/([^\/]+))" "?(?:/subscription)?(?:/([^\/]+))?", RegistrationHandler, dict(ap_settings=settings)), (r"/v1/err(?:/([^\/]+))?", LogCheckHandler, dict(ap_settings=settings)), ], default_host=settings.hostname, debug=args.debug, log_function=skip_request_logging) mount_health_handlers(site, settings) settings.metrics.start() # start the senderIDs refresh timer if settings.routers.get('gcm') and settings.routers['gcm'].senderIDs: # The following shows coverage on my local machine, but not # travis. settings.routers['gcm'].senderIDs.start() # pragma: nocover if args.ssl_key: contextFactory = AutopushSSLContextFactory(args.ssl_key, args.ssl_cert) if args.ssl_dh_param: contextFactory.getContext().load_tmp_dh(args.ssl_dh_param) reactor.listenSSL(args.port, site, contextFactory) else: reactor.listenTCP(args.port, site) # Start the table rotation checker/updater l = task.LoopingCall(settings.update_rotating_tables) l.start(60) reactor.suggestThreadPoolSize(50) reactor.run()
def connection_main(sysargs=None, use_files=True): """Main entry point to setup a connection node, aka the autopush script""" args, parser = _parse_connection(sysargs, use_files) log_format = "text" if args.human_logs else "json" log_level = args.log_level or ("debug" if args.debug else "info") sentry_dsn = bool(os.environ.get("SENTRY_DSN")) PushLogger.setup_logging( "Autopush", log_level=log_level, log_format=log_format, log_output=args.log_output, sentry_dsn=sentry_dsn, firehose_delivery_stream=args.firehose_stream_name) # Add some entropy to prevent potential conflicts. postfix = os.urandom(4).encode('hex').ljust(8, '0') settings = make_settings( args, port=args.port, endpoint_scheme=args.endpoint_scheme, endpoint_hostname=args.endpoint_hostname, endpoint_port=args.endpoint_port, router_scheme="https" if args.router_ssl_key else "http", router_hostname=args.router_hostname, router_port=args.router_port, env=args.env, hello_timeout=args.hello_timeout, preflight_uaid="deadbeef000000000deadbeef" + postfix, ) r = RouterHandler r.ap_settings = settings n = NotificationHandler n.ap_settings = settings # Internal HTTP notification router site = cyclone.web.Application([ (r"/push/([^\/]+)", r), (r"/notif/([^\/]+)(/([^\/]+))?", n), ], default_host=settings.router_hostname, debug=args.debug, log_function=skip_request_logging) mount_health_handlers(site, settings) # Public websocket server proto = "wss" if args.ssl_key else "ws" factory = WebSocketServerFactory( "%s://%s:%s/" % (proto, args.hostname, args.port), ) factory.protocol = PushServerProtocol factory.protocol.ap_settings = settings factory.setProtocolOptions( webStatus=False, openHandshakeTimeout=5, autoPingInterval=args.auto_ping_interval, autoPingTimeout=args.auto_ping_timeout, maxConnections=args.max_connections, closeHandshakeTimeout=args.close_handshake_timeout, ) settings.factory = factory settings.metrics.start() # Wrap the WebSocket server in a default resource that exposes the # `/status` handler, and delegates to the WebSocket resource for all # other requests. resource = DefaultResource(WebSocketResource(factory)) resource.putChild("status", StatusResource()) siteFactory = Site(resource) # Start the WebSocket listener. if args.ssl_key: contextFactory = AutopushSSLContextFactory(args.ssl_key, args.ssl_cert) if args.ssl_dh_param: contextFactory.getContext().load_tmp_dh(args.ssl_dh_param) reactor.listenSSL(args.port, siteFactory, contextFactory) else: reactor.listenTCP(args.port, siteFactory) # Start the internal routing listener. if args.router_ssl_key: contextFactory = AutopushSSLContextFactory(args.router_ssl_key, args.router_ssl_cert) if args.ssl_dh_param: contextFactory.getContext().load_tmp_dh(args.ssl_dh_param) reactor.listenSSL(args.router_port, site, contextFactory) else: reactor.listenTCP(args.router_port, site) reactor.suggestThreadPoolSize(50) l = task.LoopingCall(periodic_reporter, settings) l.start(1.0) # Start the table rotation checker/updater l = task.LoopingCall(settings.update_rotating_tables) l.start(60) reactor.run()
def endpoint_main(sysargs=None, use_files=True): """Main entry point to setup an endpoint node, aka the autoendpoint script""" args, parser = _parse_endpoint(sysargs, use_files) log_level = args.log_level or ("debug" if args.debug else "info") log_format = "text" if args.human_logs else "json" sentry_dsn = bool(os.environ.get("SENTRY_DSN")) PushLogger.setup_logging( "Autoendpoint", log_level=log_level, log_format=log_format, log_output=args.log_output, sentry_dsn=sentry_dsn, firehose_delivery_stream=args.firehose_stream_name) # Add some entropy to prevent potential conflicts. postfix = os.urandom(4).encode('hex').ljust(8, '0') settings = make_settings( args, endpoint_scheme=args.endpoint_scheme, endpoint_hostname=args.endpoint_hostname or args.hostname, endpoint_port=args.endpoint_port, enable_cors=not args.no_cors, bear_hash_key=args.auth_key, preflight_uaid="deadbeef000000000deadbeef" + postfix, debug=args.debug) if not settings: return 1 # Endpoint HTTP router h_kwargs = dict(ap_settings=settings) site = cyclone.web.Application([ (endpoint_paths['simple'], SimplePushHandler, h_kwargs), (endpoint_paths['webpush'], WebPushHandler, h_kwargs), (endpoint_paths['message'], MessageHandler, h_kwargs), (endpoint_paths['registration'], RegistrationHandler, h_kwargs), (endpoint_paths['logcheck'], LogCheckHandler, h_kwargs), ], default_host=settings.hostname, debug=args.debug, log_function=skip_request_logging) site.protocol = LimitedHTTPConnection site.protocol.maxData = settings.max_data mount_health_handlers(site, settings) site.noisy = args.debug settings.metrics.start() if args.ssl_key: ssl_cf = AutopushSSLContextFactory( args.ssl_key, args.ssl_cert, dh_file=args.ssl_dh_param, require_peer_certs=settings.enable_tls_auth) endpoint = SSL4ServerEndpoint(reactor, args.port, ssl_cf) else: ssl_cf = None endpoint = TCP4ServerEndpoint(reactor, args.port) endpoint.listen(site) if args.proxy_protocol_port: from autopush.haproxy import HAProxyServerEndpoint pendpoint = HAProxyServerEndpoint(reactor, args.proxy_protocol_port, ssl_cf) pendpoint.listen(site) reactor.suggestThreadPoolSize(50) # Start the table rotation checker/updater start_looping_call(60, settings.update_rotating_tables) if args.memusage_port: create_memusage_site(settings, args.memusage_port, args.debug) reactor.run()
def connection_main(sysargs=None, use_files=True): """Main entry point to setup a connection node, aka the autopush script""" args, parser = _parse_connection(sysargs, use_files) log_format = "text" if args.human_logs else "json" log_level = args.log_level or ("debug" if args.debug else "info") sentry_dsn = bool(os.environ.get("SENTRY_DSN")) PushLogger.setup_logging( "Autopush", log_level=log_level, log_format=log_format, log_output=args.log_output, sentry_dsn=sentry_dsn, firehose_delivery_stream=args.firehose_stream_name) # Add some entropy to prevent potential conflicts. postfix = os.urandom(4).encode('hex').ljust(8, '0') settings = make_settings( args, port=args.port, endpoint_scheme=args.endpoint_scheme, endpoint_hostname=args.endpoint_hostname, endpoint_port=args.endpoint_port, router_scheme="https" if args.router_ssl_key else "http", router_hostname=args.router_hostname, router_port=args.router_port, env=args.env, hello_timeout=args.hello_timeout, preflight_uaid="deadbeef000000000deadbeef" + postfix, debug=args.debug, ) if not settings: return 1 # pragma: nocover # Internal HTTP notification router h_kwargs = dict(ap_settings=settings) site = cyclone.web.Application([ (endpoint_paths['route'], RouterHandler, h_kwargs), (endpoint_paths['notification'], NotificationHandler, h_kwargs), ], default_host=settings.router_hostname, debug=args.debug, log_function=skip_request_logging) site.noisy = args.debug mount_health_handlers(site, settings) # Public websocket server proto = "wss" if args.ssl_key else "ws" factory = PushServerFactory( settings, "%s://%s:%s/" % (proto, args.hostname, args.port), ) factory.setProtocolOptions( webStatus=False, openHandshakeTimeout=5, autoPingInterval=args.auto_ping_interval, autoPingTimeout=args.auto_ping_timeout, maxConnections=args.max_connections, closeHandshakeTimeout=args.close_handshake_timeout, ) settings.metrics.start() # Wrap the WebSocket server in a default resource that exposes the # `/status` handler, and delegates to the WebSocket resource for all # other requests. resource = DefaultResource(WebSocketResource(factory)) resource.putChild("status", StatusResource()) site_factory = Site(resource) # Silence starting/stopping messages site_factory.noisy = args.debug site.noisy = args.debug # Start the WebSocket listener. if args.ssl_key: context_factory = AutopushSSLContextFactory(args.ssl_key, args.ssl_cert, dh_file=args.ssl_dh_param) reactor.listenSSL(args.port, site_factory, context_factory) else: reactor.listenTCP(args.port, site_factory) # Start the internal routing listener. if args.router_ssl_key: context_factory = AutopushSSLContextFactory(args.router_ssl_key, args.router_ssl_cert, dh_file=args.ssl_dh_param) reactor.listenSSL(args.router_port, site, context_factory) else: reactor.listenTCP(args.router_port, site) reactor.suggestThreadPoolSize(50) start_looping_call(1.0, periodic_reporter, settings, factory) # Start the table rotation checker/updater start_looping_call(60, settings.update_rotating_tables) if args.memusage_port: create_memusage_site(settings, args.memusage_port, args.debug) reactor.run()