def test_node_clear(self): r = get_router_table() router = Router(r, SinkMetrics()) # Register a node user router.register_user( dict(uaid=dummy_uaid, node_id="asdf", connected_at=1234, router_key="webpush")) # Verify user = router.get_uaid(dummy_uaid) eq_(user["node_id"], "asdf") eq_(user["connected_at"], 1234) eq_(user["router_key"], "webpush") # Clear router.clear_node(user) # Verify user = router.get_uaid(dummy_uaid) eq_(user.get("node_id"), None) eq_(user["connected_at"], 1234) eq_(user["router_key"], "webpush")
def test_drop_user(self): uaid = str(uuid.uuid4()) r = get_router_table() router = Router(r, SinkMetrics()) # Register a node user router.register_user(dict(uaid=uaid, node_id="asdf", connected_at=1234)) router.drop_user(uaid)
def test_incomplete_uaid(self): uaid = str(uuid.uuid4()) r = get_router_table() router = Router(r, SinkMetrics()) router.register_user(dict(uaid=uaid)) self.assertRaises(ItemNotFound, router.get_uaid, uaid) self.assertRaises(ItemNotFound, router.table.get_item, consistent=True, uaid=uaid)
def test_save_new(self): r = get_router_table() router = Router(r, SinkMetrics()) # Sadly, moto currently does not return an empty value like boto # when not updating data. router.table.connection = Mock() router.table.connection.update_item.return_value = {} result = router.register_user(dict(uaid="", node_id="me", connected_at=1234)) eq_(result[0], True)
def setup_dynamodb(): global DDB_PROCESS if os.getenv("AWS_LOCAL_DYNAMODB") is None: print("Starting new DynamoDB instance") cmd = " ".join([ "java", "-Djava.library.path=%s" % DDB_LIB_DIR, "-jar", DDB_JAR, "-sharedDb", "-inMemory" ]) DDB_PROCESS = subprocess.Popen(cmd, shell=True, env=os.environ) os.environ["AWS_LOCAL_DYNAMODB"] = "http://127.0.0.1:8000" else: print("Using existing DynamoDB instance") # Setup the necessary tables boto_resource = DynamoDBResource() create_message_table(MESSAGE_TABLE, boto_resource=boto_resource) get_router_table(ROUTER_TABLE, boto_resource=boto_resource)
def test_drop_old_users(self): # First create a bunch of users r = get_router_table() router = Router(r, SinkMetrics()) for _ in range(0, 53): router.register_user(self._create_minimal_record()) results = router.drop_old_users(months_ago=0) eq_(list(results), [25, 25, 3])
def test_save_uaid(self): uaid = str(uuid.uuid4()) r = get_router_table() router = Router(r, SinkMetrics()) result = router.register_user(dict(uaid=uaid, node_id="me", connected_at=1234)) eq_(result[0], True) eq_(result[1], {"uaid": uaid, "connected_at": 1234, "node_id": "me"}) result = router.get_uaid(uaid) eq_(bool(result), True) eq_(result["node_id"], "me")
def test_preflight_check(self): router = Router(get_router_table(), SinkMetrics()) storage = Storage(get_storage_table(), SinkMetrics()) pf_uaid = "deadbeef00000000deadbeef01010101" preflight_check(storage, router, pf_uaid) # now check that the database reports no entries. notifs = storage.fetch_notifications(pf_uaid) eq_(len(notifs), 0) self.assertRaises(ItemNotFound, router.get_uaid, pf_uaid)
def test_save_new(self): r = get_router_table() router = Router(r, SinkMetrics()) # Sadly, moto currently does not return an empty value like boto # when not updating data. router.table.connection = Mock() router.table.connection.update_item.return_value = {} result = router.register_user( dict(uaid="", node_id="me", connected_at=1234)) eq_(result[0], True)
def test_drop_old_users(self): # First create a bunch of users r = get_router_table() router = Router(r, SinkMetrics()) # Purge any existing users from previous runs. router.drop_old_users(0) for _ in range(0, 53): router.register_user(self._create_minimal_record()) results = router.drop_old_users(months_ago=0) assert list(results) == [25, 25, 3]
def test_preflight_check(self): router = Router(get_router_table(), SinkMetrics()) message = Message(get_rotating_message_table(), SinkMetrics()) pf_uaid = "deadbeef00000000deadbeef01010101" preflight_check(message, router, pf_uaid) # now check that the database reports no entries. _, notifs = message.fetch_messages(uuid.UUID(pf_uaid)) assert len(notifs) == 0 with pytest.raises(ItemNotFound): router.get_uaid(pf_uaid)
def test_uaid_provision_failed(self): r = get_router_table() router = Router(r, SinkMetrics()) router.table = Mock() def raise_error(*args, **kwargs): raise ProvisionedThroughputExceededException(None, None) router.table.get_item.side_effect = raise_error with self.assertRaises(ProvisionedThroughputExceededException): router.get_uaid(uaid="asdf")
def test_clear_node_provision_failed(self): r = get_router_table() router = Router(r, SinkMetrics()) router.table.connection.put_item = Mock() def raise_error(*args, **kwargs): raise ProvisionedThroughputExceededException(None, None) router.table.connection.put_item.side_effect = raise_error with self.assertRaises(ProvisionedThroughputExceededException): router.clear_node(Item(r, dict(uaid="asdf", connected_at="1234", node_id="asdf")))
def test_register_user_provision_failed(self): r = get_router_table() router = Router(r, SinkMetrics()) router.table.connection = Mock() def raise_error(*args, **kwargs): raise ProvisionedThroughputExceededException(None, None) router.table.connection.update_item.side_effect = raise_error with self.assertRaises(ProvisionedThroughputExceededException): router.register_user(dict(uaid="asdf", node_id="me", connected_at=1234))
def test_drop_user(self): uaid = str(uuid.uuid4()) r = get_router_table() router = Router(r, SinkMetrics()) # Register a node user router.register_user(dict(uaid=uaid, node_id="asdf", connected_at=1234)) result = router.drop_user(uaid) eq_(result, True) # Deleting already deleted record should return false. result = router.drop_user(uaid) eq_(result, False)
def test_save_uaid(self): uaid = str(uuid.uuid4()) r = get_router_table() router = Router(r, SinkMetrics()) result = router.register_user( dict(uaid=uaid, node_id="me", connected_at=1234)) eq_(result[0], True) eq_(result[1], {"uaid": uaid, "connected_at": 1234, "node_id": "me"}) result = router.get_uaid(uaid) eq_(bool(result), True) eq_(result["node_id"], "me")
def test_preflight_check_fail(self): router = Router(get_router_table(), SinkMetrics()) message = Message(get_rotating_message_table(), SinkMetrics()) def raise_exc(*args, **kwargs): # pragma: no cover raise Exception("Oops") router.clear_node = Mock() router.clear_node.side_effect = raise_exc with pytest.raises(Exception): preflight_check(message, router)
def test_preflight_check(self): router_table = get_router_table() storage_table = get_storage_table() def raise_exc(*args, **kwargs): # pragma: no cover raise Exception("Oops") router_table.clear_node = Mock() router_table.clear_node.side_effect = raise_exc with self.assertRaises(Exception): preflight_check(storage_table, router_table)
def test_save_new(self): r = get_router_table() router = Router(r, SinkMetrics()) # Sadly, moto currently does not return an empty value like boto # when not updating data. router.table.update_item = Mock(return_value={}) result = router.register_user( dict(uaid=dummy_uaid, node_id="me", router_type="webpush", connected_at=1234)) assert result[0] is True
def test_preflight_check_fail(self): router = Router(get_router_table(), SinkMetrics()) storage = Storage(get_storage_table(), SinkMetrics()) def raise_exc(*args, **kwargs): # pragma: no cover raise Exception("Oops") router.clear_node = Mock() router.clear_node.side_effect = raise_exc with self.assertRaises(Exception): preflight_check(storage, router)
def test_node_clear_fail(self): r = get_router_table() router = Router(r, SinkMetrics()) def raise_condition(*args, **kwargs): raise ConditionalCheckFailedException(None, None) router.table.connection.put_item = Mock() router.table.connection.put_item.side_effect = raise_condition data = dict(uaid=dummy_uaid, node_id="asdf", connected_at=1234) result = router.clear_node(Item(r, data)) eq_(result, False)
def test_node_clear_fail(self): r = get_router_table() router = Router(r, SinkMetrics()) def raise_condition(*args, **kwargs): raise ConditionalCheckFailedException(None, None) router.table.connection.put_item = Mock() router.table.connection.put_item.side_effect = raise_condition data = dict(uaid="asdf", node_id="asdf", connected_at=1234) result = router.clear_node(Item(r, data)) eq_(result, False)
def test_save_fail(self): r = get_router_table() router = Router(r, SinkMetrics()) def raise_condition(*args, **kwargs): raise ConditionalCheckFailedException(None, None) router.table.connection = Mock() router.table.connection.update_item.side_effect = raise_condition router_data = dict(uaid=dummy_uaid, node_id="asdf", connected_at=1234) result = router.register_user(router_data) eq_(result, (False, {}, router_data))
def test_register_user_provision_failed(self): r = get_router_table() router = Router(r, SinkMetrics()) router.table.connection = Mock() def raise_error(*args, **kwargs): raise ProvisionedThroughputExceededException(None, None) router.table.connection.update_item.side_effect = raise_error with self.assertRaises(ProvisionedThroughputExceededException): router.register_user( dict(uaid=dummy_uaid, node_id="me", connected_at=1234))
def test_save_fail(self): r = get_router_table() router = Router(r, SinkMetrics()) def raise_condition(*args, **kwargs): raise ConditionalCheckFailedException(None, None) router.table.connection = Mock() router.table.connection.update_item.side_effect = raise_condition router_data = dict(uaid="asdf", node_id="asdf", connected_at=1234) result = router.register_user(router_data) eq_(result, (False, {}, router_data))
def test_failed_uaid(self): uaid = str(uuid.uuid4()) r = get_router_table() router = Router(r, SinkMetrics()) router.table.get_item = Mock() router.drop_user = Mock() router.table.get_item.return_value = { "ResponseMetadata": { "HTTPStatusCode": 400 }, } with pytest.raises(ItemNotFound): router.get_uaid(uaid)
def test_clear_node_provision_failed(self): r = get_router_table() router = Router(r, SinkMetrics()) router.table.connection.put_item = Mock() def raise_error(*args, **kwargs): raise ProvisionedThroughputExceededException(None, None) router.table.connection.put_item.side_effect = raise_error with self.assertRaises(ProvisionedThroughputExceededException): router.clear_node( Item(r, dict(uaid="asdf", connected_at="1234", node_id="asdf")))
def test_node_clear_fail(self): r = get_router_table() router = Router(r, SinkMetrics()) def raise_condition(*args, **kwargs): raise ClientError( {'Error': { 'Code': 'ConditionalCheckFailedException' }}, 'mock_update_item') router.table.put_item = Mock(side_effect=raise_condition) data = dict(uaid=dummy_uaid, node_id="asdf", connected_at=1234) result = router.clear_node(data) assert result is False
def test_preflight_check_wait(self): router = Router(get_router_table(), SinkMetrics()) message = Message(get_rotating_message_table(), SinkMetrics()) values = ["PENDING", "ACTIVE"] message.table_status = Mock(side_effect=values) pf_uaid = "deadbeef00000000deadbeef01010101" preflight_check(message, router, pf_uaid) # now check that the database reports no entries. _, notifs = message.fetch_messages(uuid.UUID(pf_uaid)) assert len(notifs) == 0 with pytest.raises(ItemNotFound): router.get_uaid(pf_uaid)
def test_incomplete_uaid(self): # Older records may be incomplete. We can't inject them using normal # methods. uaid = str(uuid.uuid4()) r = get_router_table() router = Router(r, SinkMetrics()) router.table.get_item = Mock() router.drop_user = Mock() router.table.get_item.return_value = {"uaid": uuid.uuid4().hex} try: router.register_user(dict(uaid=uaid)) except AutopushException: pass assert_raises(ItemNotFound, router.get_uaid, uaid) ok_(router.drop_user.called)
def drop_users(router_table_name, months_ago, batch_size, pause_time): router_table = get_router_table(router_table_name) router = Router(router_table, SinkMetrics()) click.echo("Deleting users with a last_connect %s months ago." % months_ago) count = 0 for deletes in router.drop_old_users(months_ago): click.echo("") count += deletes if count >= batch_size: click.echo("Deleted %s user records, pausing for %s seconds." % pause_time) time.sleep(pause_time) count = 0 click.echo("Finished old user purge.")
def test_node_clear(self): r = get_router_table() router = Router(r, SinkMetrics()) # Register a node user router.register_user(dict(uaid="asdf", node_id="asdf", connected_at=1234)) # Verify user = router.get_uaid("asdf") eq_(user["node_id"], "asdf") # Clear router.clear_node(user) # Verify user = router.get_uaid("asdf") eq_(user.get("node_id"), None)
def test_uaid_provision_failed(self): r = get_router_table() router = Router(r, SinkMetrics()) router.table = Mock() def raise_condition(*args, **kwargs): import autopush.db raise autopush.db.g_client.exceptions.ClientError( {'Error': { 'Code': 'ProvisionedThroughputExceededException' }}, 'mock_update_item') router.table.get_item.side_effect = raise_condition with pytest.raises(ClientError) as ex: router.get_uaid(uaid="asdf") assert (ex.value.response['Error']['Code'] == "ProvisionedThroughputExceededException")
def test_save_fail(self): r = get_router_table() router = Router(r, SinkMetrics()) def raise_condition(*args, **kwargs): import autopush.db raise autopush.db.g_client.exceptions.ClientError( {'Error': { 'Code': 'ConditionalCheckFailedException' }}, 'mock_update_item') router.table.update_item = Mock(side_effect=raise_condition) router_data = dict(uaid=dummy_uaid, node_id="asdf", connected_at=1234, router_type="webpush") result = router.register_user(router_data) assert result == (False, {})
def test_node_clear(self): r = get_router_table() router = Router(r, SinkMetrics()) # Register a node user router.register_user( dict(uaid="asdf", node_id="asdf", connected_at=1234)) # Verify user = router.get_uaid("asdf") eq_(user["node_id"], "asdf") # Clear router.clear_node(user) # Verify user = router.get_uaid("asdf") eq_(user.get("node_id"), None)
def test_clear_node_condition_failed(self): r = get_router_table() router = Router(r, SinkMetrics()) def raise_error(*args, **kwargs): import autopush.db raise autopush.db.g_client.exceptions.ClientError( {'Error': { 'Code': 'ConditionalCheckFailedException' }}, 'mock_put_item') router.table.put_item = Mock(side_effect=raise_error) res = router.clear_node( dict(uaid=dummy_uaid, connected_at="1234", node_id="asdf", router_type="webpush")) assert res is False
def test_clear_node_provision_failed(self): r = get_router_table() router = Router(r, SinkMetrics()) def raise_condition(*args, **kwargs): import autopush.db raise autopush.db.g_client.exceptions.ClientError( {'Error': { 'Code': 'ProvisionedThroughputExceededException' }}, 'mock_update_item') router.table.put_item = Mock(side_effect=raise_condition) with pytest.raises(ClientError) as ex: router.clear_node( dict(uaid=dummy_uaid, connected_at="1234", node_id="asdf", router_type="webpush")) assert (ex.value.response['Error']['Code'] == "ProvisionedThroughputExceededException")
def test_node_clear(self): r = get_router_table() router = Router(r, SinkMetrics()) # Register a node user router.register_user(dict(uaid=dummy_uaid, node_id="asdf", connected_at=1234, router_key="webpush")) # Verify user = router.get_uaid(dummy_uaid) eq_(user["node_id"], "asdf") eq_(user["connected_at"], 1234) eq_(user["router_key"], "webpush") # Clear router.clear_node(user) # Verify user = router.get_uaid(dummy_uaid) eq_(user.get("node_id"), None) eq_(user["connected_at"], 1234) eq_(user["router_key"], "webpush")
def __init__(self, crypto_key=None, datadog_api_key=None, datadog_app_key=None, datadog_flush_interval=None, hostname=None, port=None, router_scheme=None, router_hostname=None, router_port=None, endpoint_scheme=None, endpoint_hostname=None, endpoint_port=None, router_conf={}, router_tablename="router", router_read_throughput=5, router_write_throughput=5, storage_tablename="storage", storage_read_throughput=5, storage_write_throughput=5, message_tablename="message", message_read_throughput=5, message_write_throughput=5, statsd_host="localhost", statsd_port=8125, resolve_hostname=False, max_data=4096, # Reflected up from UDP Router wake_timeout=0, env='development', enable_cors=False, s3_bucket=DEFAULT_BUCKET, senderid_expry=SENDERID_EXPRY, senderid_list={}, hello_timeout=0, auth_key=None, ): """Initialize the Settings object Upon creation, the HTTP agent will initialize, all configured routers will be setup and started, logging will be started, and the database will have a preflight check done. """ # Use a persistent connection pool for HTTP requests. pool = HTTPConnectionPool(reactor) self.agent = Agent(reactor, connectTimeout=5, pool=pool) # Metrics setup if datadog_api_key: self.metrics = DatadogMetrics( api_key=datadog_api_key, app_key=datadog_app_key, flush_interval=datadog_flush_interval ) elif statsd_host: self.metrics = TwistedMetrics(statsd_host, statsd_port) else: self.metrics = SinkMetrics() if not crypto_key: crypto_key = [Fernet.generate_key()] if not isinstance(crypto_key, list): crypto_key = [crypto_key] self.update(crypto_key=crypto_key) self.crypto_key = crypto_key if auth_key is None: auth_key = [] if not isinstance(auth_key, list): auth_key = [auth_key] self.auth_key = auth_key self.max_data = max_data self.clients = {} # Setup hosts/ports/urls default_hostname = socket.gethostname() self.hostname = hostname or default_hostname if resolve_hostname: self.hostname = resolve_ip(self.hostname) self.port = port self.endpoint_hostname = endpoint_hostname or self.hostname self.router_hostname = router_hostname or self.hostname self.router_conf = router_conf self.router_url = canonical_url( router_scheme or 'http', self.router_hostname, router_port ) self.endpoint_url = canonical_url( endpoint_scheme or 'http', self.endpoint_hostname, endpoint_port ) # Database objects self.router_table = get_router_table(router_tablename, router_read_throughput, router_write_throughput) self.storage_table = get_storage_table(storage_tablename, storage_read_throughput, storage_write_throughput) self.message_table = get_message_table(message_tablename, message_read_throughput, message_write_throughput) self.storage = Storage(self.storage_table, self.metrics) self.router = Router(self.router_table, self.metrics) self.message = Message(self.message_table, self.metrics) # Run preflight check preflight_check(self.storage, self.router) # CORS self.cors = enable_cors # Force timeout in idle seconds self.wake_timeout = wake_timeout # Setup the routers self.routers = {} self.routers["simplepush"] = SimpleRouter( self, router_conf.get("simplepush") ) self.routers["webpush"] = WebPushRouter(self, None) if 'apns' in router_conf: self.routers["apns"] = APNSRouter(self, router_conf["apns"]) if 'gcm' in router_conf: self.routers["gcm"] = GCMRouter(self, router_conf["gcm"]) # Env self.env = env self.hello_timeout = hello_timeout
def setUp(self): table = get_router_table() self.real_table = table self.real_connection = table.connection
def test_no_uaid_found(self): uaid = str(uuid.uuid4()) r = get_router_table() router = Router(r, SinkMetrics()) self.assertRaises(ItemNotFound, router.get_uaid, uaid)
def __init__(self, crypto_key=None, datadog_api_key=None, datadog_app_key=None, datadog_flush_interval=None, hostname=None, port=None, router_scheme=None, router_hostname=None, router_port=None, endpoint_scheme=None, endpoint_hostname=None, endpoint_port=None, router_conf={}, router_tablename="router", router_read_throughput=5, router_write_throughput=5, storage_tablename="storage", storage_read_throughput=5, storage_write_throughput=5, statsd_host="localhost", statsd_port=8125, resolve_hostname=False, max_data=4096, enable_cors=False): """Initialize the Settings object Upon creation, the HTTP agent will initialize, all configured routers will be setup and started, logging will be started, and the database will have a preflight check done. """ # Use a persistent connection pool for HTTP requests. pool = HTTPConnectionPool(reactor) self.agent = Agent(reactor, connectTimeout=5, pool=pool) # Metrics setup if datadog_api_key: self.metrics = DatadogMetrics( api_key=datadog_api_key, app_key=datadog_app_key, flush_interval=datadog_flush_interval ) elif statsd_host: self.metrics = TwistedMetrics(statsd_host, statsd_port) else: self.metrics = SinkMetrics() key = crypto_key or Fernet.generate_key() self.fernet = Fernet(key) self.crypto_key = key self.max_data = max_data self.clients = {} # Setup hosts/ports/urls default_hostname = socket.gethostname() self.hostname = hostname or default_hostname if resolve_hostname: self.hostname = resolve_ip(self.hostname) self.port = port self.endpoint_hostname = endpoint_hostname or self.hostname self.router_hostname = router_hostname or self.hostname self.router_conf = router_conf self.router_url = canonical_url( router_scheme or 'http', self.router_hostname, router_port ) self.endpoint_url = canonical_url( endpoint_scheme or 'http', self.endpoint_hostname, endpoint_port ) # Database objects self.router_table = get_router_table(router_tablename, router_read_throughput, router_write_throughput) self.storage_table = get_storage_table(storage_tablename, storage_read_throughput, storage_write_throughput) self.storage = Storage(self.storage_table, self.metrics) self.router = Router(self.router_table, self.metrics) # Run preflight check preflight_check(self.storage, self.router) # CORS self.cors = enable_cors # Setup the routers self.routers = {} self.routers["simplepush"] = SimpleRouter(self, None) if 'apns' in router_conf: self.routers["apns"] = APNSRouter(self, router_conf["apns"]) if 'gcm' in router_conf: self.routers["gcm"] = GCMRouter(self, router_conf["gcm"])
def __init__(self, crypto_key=None, datadog_api_key=None, datadog_app_key=None, datadog_flush_interval=None, hostname=None, port=None, router_scheme=None, router_hostname=None, router_port=None, endpoint_scheme=None, endpoint_hostname=None, endpoint_port=None, router_conf={}, router_tablename="router", router_read_throughput=5, router_write_throughput=5, storage_tablename="storage", storage_read_throughput=5, storage_write_throughput=5, message_tablename="message", message_read_throughput=5, message_write_throughput=5, statsd_host="localhost", statsd_port=8125, resolve_hostname=False, max_data=4096, # Reflected up from UDP Router wake_timeout=0, env='development', enable_cors=False, s3_bucket=DEFAULT_BUCKET, senderid_expry=SENDERID_EXPRY, senderid_list={}, hello_timeout=0, bear_hash_key=None, preflight_uaid="deadbeef00000000deadbeef000000000", ): """Initialize the Settings object Upon creation, the HTTP agent will initialize, all configured routers will be setup and started, logging will be started, and the database will have a preflight check done. """ # Use a persistent connection pool for HTTP requests. pool = HTTPConnectionPool(reactor) self.agent = Agent(reactor, connectTimeout=5, pool=pool) # Metrics setup if datadog_api_key: self.metrics = DatadogMetrics( api_key=datadog_api_key, app_key=datadog_app_key, flush_interval=datadog_flush_interval ) elif statsd_host: self.metrics = TwistedMetrics(statsd_host, statsd_port) else: self.metrics = SinkMetrics() if not crypto_key: crypto_key = [Fernet.generate_key()] if not isinstance(crypto_key, list): crypto_key = [crypto_key] self.update(crypto_key=crypto_key) self.crypto_key = crypto_key if bear_hash_key is None: bear_hash_key = [] if not isinstance(bear_hash_key, list): bear_hash_key = [bear_hash_key] self.bear_hash_key = bear_hash_key self.max_data = max_data self.clients = {} # Setup hosts/ports/urls default_hostname = socket.gethostname() self.hostname = hostname or default_hostname if resolve_hostname: self.hostname = resolve_ip(self.hostname) self.port = port self.endpoint_hostname = endpoint_hostname or self.hostname self.router_hostname = router_hostname or self.hostname self.router_conf = router_conf self.router_url = canonical_url( router_scheme or 'http', self.router_hostname, router_port ) self.endpoint_url = canonical_url( endpoint_scheme or 'http', self.endpoint_hostname, endpoint_port ) # Database objects self.router_table = get_router_table(router_tablename, router_read_throughput, router_write_throughput) self.storage_table = get_storage_table( storage_tablename, storage_read_throughput, storage_write_throughput) self.message_table = get_rotating_message_table( message_tablename) self._message_prefix = message_tablename self.storage = Storage(self.storage_table, self.metrics) self.router = Router(self.router_table, self.metrics) # Used to determine whether a connection is out of date with current # db objects. There are three noteworty cases: # 1 "Last Month" the table requires a rollover. # 2 "This Month" the most common case. # 3 "Next Month" where the system will soon be rolling over, but with # timing, some nodes may roll over sooner. Ensuring the next month's # table is present before the switchover is the main reason for this, # just in case some nodes do switch sooner. self.create_initial_message_tables() # Run preflight check preflight_check(self.storage, self.router, preflight_uaid) # CORS self.cors = enable_cors # Force timeout in idle seconds self.wake_timeout = wake_timeout # Setup the routers self.routers = {} self.routers["simplepush"] = SimpleRouter( self, router_conf.get("simplepush") ) self.routers["webpush"] = WebPushRouter(self, None) if 'apns' in router_conf: self.routers["apns"] = APNSRouter(self, router_conf["apns"]) if 'gcm' in router_conf: self.routers["gcm"] = GCMRouter(self, router_conf["gcm"]) # Env self.env = env self.hello_timeout = hello_timeout