Exemple #1
0
    def test_preflight_check(self):
        router = Router(get_router_table(), SinkMetrics())
        storage = Storage(get_storage_table(), SinkMetrics())

        pf_uaid = "deadbeef00000000deadbeef01010101"
        preflight_check(storage, router, pf_uaid)
        # now check that the database reports no entries.
        notifs = storage.fetch_notifications(pf_uaid)
        eq_(len(notifs), 0)
        self.assertRaises(ItemNotFound, router.get_uaid, pf_uaid)
Exemple #2
0
    def test_preflight_check(self):
        router = Router(get_router_table(), SinkMetrics())
        storage = Storage(get_storage_table(), SinkMetrics())

        pf_uaid = "deadbeef00000000deadbeef01010101"
        preflight_check(storage, router, pf_uaid)
        # now check that the database reports no entries.
        notifs = storage.fetch_notifications(pf_uaid)
        eq_(len(notifs), 0)
        self.assertRaises(ItemNotFound, router.get_uaid, pf_uaid)
Exemple #3
0
    def test_save_over_provisioned(self):
        s = get_storage_table()
        storage = Storage(s, SinkMetrics())
        storage.table = Mock()

        def raise_error(*args, **kwargs):
            raise ProvisionedThroughputExceededException(None, None)

        storage.table.query_2.side_effect = raise_error
        with self.assertRaises(ProvisionedThroughputExceededException):
            storage.fetch_notifications(dummy_uaid)
Exemple #4
0
    def test_fetch_over_provisioned(self):
        s = get_storage_table()
        storage = Storage(s, SinkMetrics())
        storage.table.connection = Mock()

        def raise_error(*args, **kwargs):
            raise ProvisionedThroughputExceededException(None, None)

        storage.table.connection.put_item.side_effect = raise_error
        with self.assertRaises(ProvisionedThroughputExceededException):
            storage.save_notification(dummy_uaid, dummy_chid, 12)
Exemple #5
0
    def test_save_over_provisioned(self):
        s = get_storage_table()
        storage = Storage(s, SinkMetrics())
        storage.table = Mock()

        def raise_error(*args, **kwargs):
            raise ProvisionedThroughputExceededException(None, None)

        storage.table.query_2.side_effect = raise_error
        with self.assertRaises(ProvisionedThroughputExceededException):
            storage.fetch_notifications("asdf")
Exemple #6
0
    def test_fetch_boto_err(self):
        s = get_storage_table()
        storage = Storage(s, SinkMetrics())
        storage.table.connection = Mock()

        def raise_error(*args, **kwargs):
            raise BotoServerError(None, None)

        storage.table.connection.put_item.side_effect = raise_error
        with assert_raises(BotoServerError):
            storage.save_notification(dummy_uaid, dummy_chid, 12)
Exemple #7
0
    def test_delete_over_provisioned(self):
        s = get_storage_table()
        storage = Storage(s, SinkMetrics())
        storage.table.connection = Mock()

        def raise_error(*args, **kwargs):
            raise ProvisionedThroughputExceededException(None, None)

        storage.table.connection.delete_item.side_effect = raise_error
        results = storage.delete_notification("asdf", "asdf")
        eq_(results, False)
Exemple #8
0
    def test_fetch_over_provisioned(self):
        s = get_storage_table()
        storage = Storage(s, SinkMetrics())
        storage.table.connection = Mock()

        def raise_error(*args, **kwargs):
            raise ProvisionedThroughputExceededException(None, None)

        storage.table.connection.put_item.side_effect = raise_error
        with self.assertRaises(ProvisionedThroughputExceededException):
            storage.save_notification("asdf", "asdf", 12)
Exemple #9
0
    def test_delete_over_provisioned(self):
        s = get_storage_table()
        storage = Storage(s, SinkMetrics())
        storage.table.connection = Mock()

        def raise_error(*args, **kwargs):
            raise ProvisionedThroughputExceededException(None, None)

        storage.table.connection.delete_item.side_effect = raise_error
        results = storage.delete_notification(dummy_uaid, dummy_chid)
        eq_(results, False)
Exemple #10
0
    def test_preflight_check_fail(self):
        router = Router(get_router_table(), SinkMetrics())
        storage = Storage(get_storage_table(), SinkMetrics())

        def raise_exc(*args, **kwargs):  # pragma: no cover
            raise Exception("Oops")

        router.clear_node = Mock()
        router.clear_node.side_effect = raise_exc

        with self.assertRaises(Exception):
            preflight_check(storage, router)
Exemple #11
0
    def test_preflight_check_fail(self):
        router = Router(get_router_table(), SinkMetrics())
        storage = Storage(get_storage_table(), SinkMetrics())

        def raise_exc(*args, **kwargs):  # pragma: no cover
            raise Exception("Oops")

        router.clear_node = Mock()
        router.clear_node.side_effect = raise_exc

        with self.assertRaises(Exception):
            preflight_check(storage, router)
Exemple #12
0
    def test_preflight_check(self):
        router_table = get_router_table()
        storage_table = get_storage_table()

        def raise_exc(*args, **kwargs):  # pragma: no cover
            raise Exception("Oops")

        router_table.clear_node = Mock()
        router_table.clear_node.side_effect = raise_exc

        with self.assertRaises(Exception):
            preflight_check(storage_table, router_table)
Exemple #13
0
    def test_preflight_check(self):
        router_table = get_router_table()
        storage_table = get_storage_table()

        def raise_exc(*args, **kwargs):  # pragma: no cover
            raise Exception("Oops")

        router_table.clear_node = Mock()
        router_table.clear_node.side_effect = raise_exc

        with self.assertRaises(Exception):
            preflight_check(storage_table, router_table)
Exemple #14
0
    def test_dont_save_older(self):
        s = get_storage_table()
        storage = Storage(s, SinkMetrics())
        # Unfortunately moto can't run condition expressions, so
        # we gotta fake it
        storage.table.connection = Mock()

        def raise_error(*args, **kwargs):
            raise ConditionalCheckFailedException(None, None)

        storage.table.connection.put_item.side_effect = raise_error
        result = storage.save_notification(dummy_uaid, dummy_chid, 8)
        eq_(result, False)
Exemple #15
0
    def test_dont_save_older(self):
        s = get_storage_table()
        storage = Storage(s, SinkMetrics())
        # Unfortunately moto can't run condition expressions, so
        # we gotta fake it
        storage.table.connection = Mock()

        def raise_error(*args, **kwargs):
            raise ConditionalCheckFailedException(None, None)

        storage.table.connection.put_item.side_effect = raise_error
        result = storage.save_notification("fdas", "asdf", 8)
        eq_(result, False)
Exemple #16
0
    def test_preflight_check_wait(self):
        router = Router(get_router_table(), SinkMetrics())
        storage = Storage(get_storage_table(), SinkMetrics())

        storage.table.describe = mock_describe = Mock()

        values = [
            dict(Table=dict(TableStatus="PENDING")),
            dict(Table=dict(TableStatus="ACTIVE")),
        ]

        def return_vals(*args, **kwargs):
            return values.pop(0)

        mock_describe.side_effect = return_vals
        pf_uaid = "deadbeef00000000deadbeef01010101"
        preflight_check(storage, router, pf_uaid)
        # now check that the database reports no entries.
        notifs = storage.fetch_notifications(pf_uaid)
        eq_(len(notifs), 0)
        assert_raises(ItemNotFound, router.get_uaid, pf_uaid)
Exemple #17
0
    def __init__(self,
                 crypto_key=None,
                 datadog_api_key=None,
                 datadog_app_key=None,
                 datadog_flush_interval=None,
                 hostname=None,
                 port=None,
                 router_scheme=None,
                 router_hostname=None,
                 router_port=None,
                 endpoint_scheme=None,
                 endpoint_hostname=None,
                 endpoint_port=None,
                 router_conf={},
                 router_tablename="router",
                 router_read_throughput=5,
                 router_write_throughput=5,
                 storage_tablename="storage",
                 storage_read_throughput=5,
                 storage_write_throughput=5,
                 statsd_host="localhost",
                 statsd_port=8125,
                 resolve_hostname=False,
                 max_data=4096,
                 enable_cors=False):
        """Initialize the Settings object

        Upon creation, the HTTP agent will initialize, all configured routers
        will be setup and started, logging will be started, and the database
        will have a preflight check done.

        """
        # Use a persistent connection pool for HTTP requests.
        pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor, connectTimeout=5, pool=pool)

        # Metrics setup
        if datadog_api_key:
            self.metrics = DatadogMetrics(
                api_key=datadog_api_key,
                app_key=datadog_app_key,
                flush_interval=datadog_flush_interval
            )
        elif statsd_host:
            self.metrics = TwistedMetrics(statsd_host, statsd_port)
        else:
            self.metrics = SinkMetrics()

        key = crypto_key or Fernet.generate_key()
        self.fernet = Fernet(key)
        self.crypto_key = key

        self.max_data = max_data
        self.clients = {}

        # Setup hosts/ports/urls
        default_hostname = socket.gethostname()
        self.hostname = hostname or default_hostname
        if resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        self.port = port
        self.endpoint_hostname = endpoint_hostname or self.hostname
        self.router_hostname = router_hostname or self.hostname

        self.router_conf = router_conf
        self.router_url = canonical_url(
            router_scheme or 'http',
            self.router_hostname,
            router_port
        )

        self.endpoint_url = canonical_url(
            endpoint_scheme or 'http',
            self.endpoint_hostname,
            endpoint_port
        )

        # Database objects
        self.router_table = get_router_table(router_tablename,
                                             router_read_throughput,
                                             router_write_throughput)
        self.storage_table = get_storage_table(storage_tablename,
                                               storage_read_throughput,
                                               storage_write_throughput)
        self.storage = Storage(self.storage_table, self.metrics)
        self.router = Router(self.router_table, self.metrics)

        # Run preflight check
        preflight_check(self.storage, self.router)

        # CORS
        self.cors = enable_cors

        # Setup the routers
        self.routers = {}
        self.routers["simplepush"] = SimpleRouter(self, None)
        if 'apns' in router_conf:
            self.routers["apns"] = APNSRouter(self, router_conf["apns"])
        if 'gcm' in router_conf:
            self.routers["gcm"] = GCMRouter(self, router_conf["gcm"])
Exemple #18
0
 def setUp(self):
     table = get_storage_table()
     self.real_table = table
     self.real_connection = table.connection
Exemple #19
0
    def __init__(
        self,
        crypto_key=None,
        datadog_api_key=None,
        datadog_app_key=None,
        datadog_flush_interval=None,
        hostname=None,
        port=None,
        router_scheme=None,
        router_hostname=None,
        router_port=None,
        endpoint_scheme=None,
        endpoint_hostname=None,
        endpoint_port=None,
        router_conf=None,
        router_tablename="router",
        router_read_throughput=5,
        router_write_throughput=5,
        storage_tablename="storage",
        storage_read_throughput=5,
        storage_write_throughput=5,
        message_tablename="message",
        message_read_throughput=5,
        message_write_throughput=5,
        statsd_host="localhost",
        statsd_port=8125,
        resolve_hostname=False,
        max_data=4096,
        # Reflected up from UDP Router
        wake_timeout=0,
        env='development',
        enable_cors=False,
        hello_timeout=0,
        bear_hash_key=None,
        preflight_uaid="deadbeef00000000deadbeef00000000",
        ami_id=None,
        client_certs=None,
        msg_limit=100,
        debug=False,
        connect_timeout=0.5,
    ):
        """Initialize the Settings object

        Upon creation, the HTTP agent will initialize, all configured routers
        will be setup and started, logging will be started, and the database
        will have a preflight check done.

        """
        # Use a persistent connection pool for HTTP requests.
        pool = HTTPConnectionPool(reactor)
        if not debug:
            pool._factory = QuietClientFactory

        self.agent = Agent(reactor, connectTimeout=connect_timeout, pool=pool)

        if not crypto_key:
            crypto_key = [Fernet.generate_key()]
        if not isinstance(crypto_key, list):
            crypto_key = [crypto_key]
        self.update(crypto_key=crypto_key)
        self.crypto_key = crypto_key

        if bear_hash_key is None:
            bear_hash_key = []
        if not isinstance(bear_hash_key, list):
            bear_hash_key = [bear_hash_key]
        self.bear_hash_key = bear_hash_key

        self.max_data = max_data
        self.clients = {}

        # Setup hosts/ports/urls
        default_hostname = socket.gethostname()
        self.hostname = hostname or default_hostname
        if resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        # Metrics setup
        if datadog_api_key:
            self.metrics = DatadogMetrics(
                hostname=self.hostname,
                api_key=datadog_api_key,
                app_key=datadog_app_key,
                flush_interval=datadog_flush_interval,
            )
        elif statsd_host:
            self.metrics = TwistedMetrics(statsd_host, statsd_port)
        else:
            self.metrics = SinkMetrics()

        self.port = port
        self.endpoint_hostname = endpoint_hostname or self.hostname
        self.router_hostname = router_hostname or self.hostname

        if router_conf is None:
            router_conf = {}
        self.router_conf = router_conf
        self.router_url = canonical_url(router_scheme or 'http',
                                        self.router_hostname, router_port)

        self.endpoint_url = canonical_url(endpoint_scheme or 'http',
                                          self.endpoint_hostname,
                                          endpoint_port)
        self.enable_tls_auth = client_certs is not None
        self.client_certs = client_certs

        # Database objects
        self.router_table = get_router_table(router_tablename,
                                             router_read_throughput,
                                             router_write_throughput)
        self.storage_table = get_storage_table(storage_tablename,
                                               storage_read_throughput,
                                               storage_write_throughput)
        self.message_table = get_rotating_message_table(
            message_tablename,
            message_read_throughput=message_read_throughput,
            message_write_throughput=message_write_throughput)
        self._message_prefix = message_tablename
        self.message_limit = msg_limit
        self.storage = Storage(self.storage_table, self.metrics)
        self.router = Router(self.router_table, self.metrics)

        # Used to determine whether a connection is out of date with current
        # db objects. There are three noteworty cases:
        # 1 "Last Month" the table requires a rollover.
        # 2 "This Month" the most common case.
        # 3 "Next Month" where the system will soon be rolling over, but with
        #   timing, some nodes may roll over sooner. Ensuring the next month's
        #   table is present before the switchover is the main reason for this,
        #   just in case some nodes do switch sooner.
        self.create_initial_message_tables()

        # Run preflight check
        preflight_check(self.storage, self.router, preflight_uaid)

        # CORS
        self.cors = enable_cors

        # Force timeout in idle seconds
        self.wake_timeout = wake_timeout

        # Setup the routers
        self.routers = dict()
        self.routers["simplepush"] = SimpleRouter(
            self, router_conf.get("simplepush"))
        self.routers["webpush"] = WebPushRouter(self, None)
        if 'apns' in router_conf:
            self.routers["apns"] = APNSRouter(self, router_conf["apns"])
        if 'gcm' in router_conf:
            self.routers["gcm"] = GCMRouter(self, router_conf["gcm"])

        # Env
        self.env = env

        self.hello_timeout = hello_timeout

        self.ami_id = ami_id

        # Generate messages per legacy rules, only used for testing to
        # generate legacy data.
        self._notification_legacy = False
Exemple #20
0
    def __init__(
        self,
        crypto_key=None,
        datadog_api_key=None,
        datadog_app_key=None,
        datadog_flush_interval=None,
        hostname=None,
        port=None,
        router_scheme=None,
        router_hostname=None,
        router_port=None,
        endpoint_scheme=None,
        endpoint_hostname=None,
        endpoint_port=None,
        router_conf={},
        router_tablename="router",
        router_read_throughput=5,
        router_write_throughput=5,
        storage_tablename="storage",
        storage_read_throughput=5,
        storage_write_throughput=5,
        message_tablename="message",
        message_read_throughput=5,
        message_write_throughput=5,
        statsd_host="localhost",
        statsd_port=8125,
        resolve_hostname=False,
        max_data=4096,
        # Reflected up from UDP Router
        wake_timeout=0,
        env='development',
        enable_cors=False,
        s3_bucket=DEFAULT_BUCKET,
        senderid_expry=SENDERID_EXPRY,
        senderid_list={},
        hello_timeout=0,
    ):
        """Initialize the Settings object

        Upon creation, the HTTP agent will initialize, all configured routers
        will be setup and started, logging will be started, and the database
        will have a preflight check done.

        """
        # Use a persistent connection pool for HTTP requests.
        pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor, connectTimeout=5, pool=pool)

        # Metrics setup
        if datadog_api_key:
            self.metrics = DatadogMetrics(
                api_key=datadog_api_key,
                app_key=datadog_app_key,
                flush_interval=datadog_flush_interval)
        elif statsd_host:
            self.metrics = TwistedMetrics(statsd_host, statsd_port)
        else:
            self.metrics = SinkMetrics()
        if not crypto_key:
            crypto_key = [Fernet.generate_key()]
        if not isinstance(crypto_key, list):
            crypto_key = [crypto_key]
        self.update(crypto_key=crypto_key)
        self.crypto_key = crypto_key

        self.max_data = max_data
        self.clients = {}

        # Setup hosts/ports/urls
        default_hostname = socket.gethostname()
        self.hostname = hostname or default_hostname
        if resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        self.port = port
        self.endpoint_hostname = endpoint_hostname or self.hostname
        self.router_hostname = router_hostname or self.hostname

        self.router_conf = router_conf
        self.router_url = canonical_url(router_scheme or 'http',
                                        self.router_hostname, router_port)

        self.endpoint_url = canonical_url(endpoint_scheme or 'http',
                                          self.endpoint_hostname,
                                          endpoint_port)

        # Database objects
        self.router_table = get_router_table(router_tablename,
                                             router_read_throughput,
                                             router_write_throughput)
        self.storage_table = get_storage_table(storage_tablename,
                                               storage_read_throughput,
                                               storage_write_throughput)
        self.message_table = get_message_table(message_tablename,
                                               message_read_throughput,
                                               message_write_throughput)
        self.storage = Storage(self.storage_table, self.metrics)
        self.router = Router(self.router_table, self.metrics)
        self.message = Message(self.message_table, self.metrics)

        # Run preflight check
        preflight_check(self.storage, self.router)

        # CORS
        self.cors = enable_cors

        # Force timeout in idle seconds
        self.wake_timeout = wake_timeout

        # Setup the routers
        self.routers = {}
        self.routers["simplepush"] = SimpleRouter(
            self, router_conf.get("simplepush"))
        self.routers["webpush"] = WebPushRouter(self, None)
        if 'apns' in router_conf:
            self.routers["apns"] = APNSRouter(self, router_conf["apns"])
        if 'gcm' in router_conf:
            self.routers["gcm"] = GCMRouter(self, router_conf["gcm"])

        # Env
        self.env = env

        self.hello_timeout = hello_timeout
Exemple #21
0
 def setUp(self):
     table = get_storage_table()
     self.real_table = table
     self.real_connection = table.connection
Exemple #22
0
    def __init__(self,
                 crypto_key=None,
                 datadog_api_key=None,
                 datadog_app_key=None,
                 datadog_flush_interval=None,
                 hostname=None,
                 port=None,
                 router_scheme=None,
                 router_hostname=None,
                 router_port=None,
                 endpoint_scheme=None,
                 endpoint_hostname=None,
                 endpoint_port=None,
                 router_conf={},
                 router_tablename="router",
                 router_read_throughput=5,
                 router_write_throughput=5,
                 storage_tablename="storage",
                 storage_read_throughput=5,
                 storage_write_throughput=5,
                 message_tablename="message",
                 message_read_throughput=5,
                 message_write_throughput=5,
                 statsd_host="localhost",
                 statsd_port=8125,
                 resolve_hostname=False,
                 max_data=4096,
                 # Reflected up from UDP Router
                 wake_timeout=0,
                 env='development',
                 enable_cors=False,
                 s3_bucket=DEFAULT_BUCKET,
                 senderid_expry=SENDERID_EXPRY,
                 senderid_list={},
                 hello_timeout=0,
                 bear_hash_key=None,
                 preflight_uaid="deadbeef00000000deadbeef000000000",
                 ):
        """Initialize the Settings object

        Upon creation, the HTTP agent will initialize, all configured routers
        will be setup and started, logging will be started, and the database
        will have a preflight check done.

        """
        # Use a persistent connection pool for HTTP requests.
        pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor, connectTimeout=5, pool=pool)

        # Metrics setup
        if datadog_api_key:
            self.metrics = DatadogMetrics(
                api_key=datadog_api_key,
                app_key=datadog_app_key,
                flush_interval=datadog_flush_interval
            )
        elif statsd_host:
            self.metrics = TwistedMetrics(statsd_host, statsd_port)
        else:
            self.metrics = SinkMetrics()
        if not crypto_key:
            crypto_key = [Fernet.generate_key()]
        if not isinstance(crypto_key, list):
            crypto_key = [crypto_key]
        self.update(crypto_key=crypto_key)
        self.crypto_key = crypto_key

        if bear_hash_key is None:
            bear_hash_key = []
        if not isinstance(bear_hash_key, list):
            bear_hash_key = [bear_hash_key]
        self.bear_hash_key = bear_hash_key

        self.max_data = max_data
        self.clients = {}

        # Setup hosts/ports/urls
        default_hostname = socket.gethostname()
        self.hostname = hostname or default_hostname
        if resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        self.port = port
        self.endpoint_hostname = endpoint_hostname or self.hostname
        self.router_hostname = router_hostname or self.hostname

        self.router_conf = router_conf
        self.router_url = canonical_url(
            router_scheme or 'http',
            self.router_hostname,
            router_port
        )

        self.endpoint_url = canonical_url(
            endpoint_scheme or 'http',
            self.endpoint_hostname,
            endpoint_port
        )

        # Database objects
        self.router_table = get_router_table(router_tablename,
                                             router_read_throughput,
                                             router_write_throughput)
        self.storage_table = get_storage_table(
            storage_tablename,
            storage_read_throughput,
            storage_write_throughput)
        self.message_table = get_rotating_message_table(
            message_tablename)
        self._message_prefix = message_tablename
        self.storage = Storage(self.storage_table, self.metrics)
        self.router = Router(self.router_table, self.metrics)

        # Used to determine whether a connection is out of date with current
        # db objects. There are three noteworty cases:
        # 1 "Last Month" the table requires a rollover.
        # 2 "This Month" the most common case.
        # 3 "Next Month" where the system will soon be rolling over, but with
        #   timing, some nodes may roll over sooner. Ensuring the next month's
        #   table is present before the switchover is the main reason for this,
        #   just in case some nodes do switch sooner.
        self.create_initial_message_tables()

        # Run preflight check
        preflight_check(self.storage, self.router, preflight_uaid)

        # CORS
        self.cors = enable_cors

        # Force timeout in idle seconds
        self.wake_timeout = wake_timeout

        # Setup the routers
        self.routers = {}
        self.routers["simplepush"] = SimpleRouter(
            self,
            router_conf.get("simplepush")
        )
        self.routers["webpush"] = WebPushRouter(self, None)
        if 'apns' in router_conf:
            self.routers["apns"] = APNSRouter(self, router_conf["apns"])
        if 'gcm' in router_conf:
            self.routers["gcm"] = GCMRouter(self, router_conf["gcm"])

        # Env
        self.env = env

        self.hello_timeout = hello_timeout
Exemple #23
0
    def __init__(self,
                 crypto_key=None,
                 datadog_api_key=None,
                 datadog_app_key=None,
                 datadog_flush_interval=None,
                 hostname=None,
                 port=None,
                 router_scheme=None,
                 router_hostname=None,
                 router_port=None,
                 endpoint_scheme=None,
                 endpoint_hostname=None,
                 endpoint_port=None,
                 router_conf={},
                 router_tablename="router",
                 router_read_throughput=5,
                 router_write_throughput=5,
                 storage_tablename="storage",
                 storage_read_throughput=5,
                 storage_write_throughput=5,
                 message_tablename="message",
                 message_read_throughput=5,
                 message_write_throughput=5,
                 statsd_host="localhost",
                 statsd_port=8125,
                 resolve_hostname=False,
                 max_data=4096,
                 # Reflected up from UDP Router
                 wake_timeout=0,
                 env='development',
                 enable_cors=False,
                 s3_bucket=DEFAULT_BUCKET,
                 senderid_expry=SENDERID_EXPRY,
                 senderid_list={},
                 hello_timeout=0,
                 auth_key=None,
                 ):
        """Initialize the Settings object

        Upon creation, the HTTP agent will initialize, all configured routers
        will be setup and started, logging will be started, and the database
        will have a preflight check done.

        """
        # Use a persistent connection pool for HTTP requests.
        pool = HTTPConnectionPool(reactor)
        self.agent = Agent(reactor, connectTimeout=5, pool=pool)

        # Metrics setup
        if datadog_api_key:
            self.metrics = DatadogMetrics(
                api_key=datadog_api_key,
                app_key=datadog_app_key,
                flush_interval=datadog_flush_interval
            )
        elif statsd_host:
            self.metrics = TwistedMetrics(statsd_host, statsd_port)
        else:
            self.metrics = SinkMetrics()
        if not crypto_key:
            crypto_key = [Fernet.generate_key()]
        if not isinstance(crypto_key, list):
            crypto_key = [crypto_key]
        self.update(crypto_key=crypto_key)
        self.crypto_key = crypto_key

        if auth_key is None:
            auth_key = []
        if not isinstance(auth_key, list):
            auth_key = [auth_key]
        self.auth_key = auth_key

        self.max_data = max_data
        self.clients = {}

        # Setup hosts/ports/urls
        default_hostname = socket.gethostname()
        self.hostname = hostname or default_hostname
        if resolve_hostname:
            self.hostname = resolve_ip(self.hostname)

        self.port = port
        self.endpoint_hostname = endpoint_hostname or self.hostname
        self.router_hostname = router_hostname or self.hostname

        self.router_conf = router_conf
        self.router_url = canonical_url(
            router_scheme or 'http',
            self.router_hostname,
            router_port
        )

        self.endpoint_url = canonical_url(
            endpoint_scheme or 'http',
            self.endpoint_hostname,
            endpoint_port
        )

        # Database objects
        self.router_table = get_router_table(router_tablename,
                                             router_read_throughput,
                                             router_write_throughput)
        self.storage_table = get_storage_table(storage_tablename,
                                               storage_read_throughput,
                                               storage_write_throughput)
        self.message_table = get_message_table(message_tablename,
                                               message_read_throughput,
                                               message_write_throughput)
        self.storage = Storage(self.storage_table, self.metrics)
        self.router = Router(self.router_table, self.metrics)
        self.message = Message(self.message_table, self.metrics)

        # Run preflight check
        preflight_check(self.storage, self.router)

        # CORS
        self.cors = enable_cors

        # Force timeout in idle seconds
        self.wake_timeout = wake_timeout

        # Setup the routers
        self.routers = {}
        self.routers["simplepush"] = SimpleRouter(
            self,
            router_conf.get("simplepush")
        )
        self.routers["webpush"] = WebPushRouter(self, None)
        if 'apns' in router_conf:
            self.routers["apns"] = APNSRouter(self, router_conf["apns"])
        if 'gcm' in router_conf:
            self.routers["gcm"] = GCMRouter(self, router_conf["gcm"])

        # Env
        self.env = env

        self.hello_timeout = hello_timeout