def test_get_norecord(self): settings = dict( s3_bucket=TEST_BUCKET, senderid_expry=0, ) self.senderIDs = SenderIDs(settings) fetch = self.senderIDs.choose_ID() eq_(fetch, None) return
def test_get_record(self): settings = dict( s3_bucket=TEST_BUCKET, senderid_expry=0, senderid_list=test_list, ) self.senderIDs = SenderIDs(settings) fetch = self.senderIDs.get_ID('test123') eq_(fetch, {"senderID": "test123", "auth": "abc"}) return self.senderIDs.stop()
def test_start(self, fts): settings = dict( s3_bucket=TEST_BUCKET, senderid_expry=0, senderid_list=test_list, ) self.senderIDs = SenderIDs(settings) self.senderIDs.start() ok_(self.senderIDs.service.start.called) fts.running = True self.senderIDs.stop() ok_(self.senderIDs.service.stop.called)
def test_update(self): settings = dict( s3_bucket=TEST_BUCKET, senderid_expry=0, senderid_list=test_list, ) senderIDs = SenderIDs(settings) update = {"test789": {"auth": "ghi"}} senderIDs.update(update) eq_(senderIDs.conn.get_bucket(settings.get("s3_bucket")). get_key('senderids').get_contents_as_string(), json.dumps(update)) return
def __init__(self, ap_settings, router_conf): """Create a new GCM router and connect to GCM""" self.config = router_conf self.ttl = router_conf.get("ttl", 60) self.dryRun = router_conf.get("dryrun", False) self.collapseKey = router_conf.get("collapseKey", "simplepush") self.senderIDs = router_conf.get("senderIDs") if not self.senderIDs: self.senderIDs = SenderIDs(router_conf) try: senderID = self.senderIDs.choose_ID() self.gcm = gcmclient.GCM(senderID.get("auth")) except: raise IOError("GCM Bridge not initiated in main") log.msg("Starting GCM router...")
def test_refresh(self): settings = dict( s3_bucket=TEST_BUCKET, senderid_expry=0, senderid_list=test_list, ) self.senderIDs = SenderIDs(settings) self.senderIDs._senderIDs = {} self.senderIDs._expry = 0 twisted.internet.base.DelayedCall.debug = True def finish_handler(*args): eq_(self.senderIDs._senderIDs, test_list) d = self.senderIDs._refresh() d.addBoth(finish_handler) return d
def test_success(self): settings = dict( s3_bucket=TEST_BUCKET, senderid_expry=10, senderid_list=test_list, ) self.senderIDs = SenderIDs(settings) eq_(self.senderIDs.conn.get_bucket(settings.get("s3_bucket")). get_key('senderids').get_contents_as_string(), json.dumps(settings.get("senderid_list"))) eq_(self.senderIDs.senderIDs(), settings.get("senderid_list")) # choose_ID may modify the record in memory adding a field. got = self.senderIDs.choose_ID() ok_(got.get('senderID') in settings.get("senderid_list").keys()) ok_(got.get('auth') == settings.get("senderid_list")[got.get('senderID')]['auth']) self.senderIDs._expry = 0
def test_ensureCreated(self): settings = dict( s3_bucket=TEST_BUCKET, senderid_expry=0, senderid_list=test_list, ) self.senderIDs = SenderIDs(settings) oldConn = self.senderIDs.conn self.senderIDs.conn = Mock() self.senderIDs.conn.get_bucket.side_effect = \ [S3ResponseError(404, "Not Found", ""), None] self.senderIDs._create = Mock() def handle_finish(*args): ok_(self.senderIDs._create.called) self.senderIDs.conn = oldConn d = self.senderIDs._refresh() d.addBoth(handle_finish) return d
def test_bad_update(self): settings = dict( s3_bucket=TEST_BUCKET, senderid_expry=0, senderid_list=test_list, ) self.senderIDs = SenderIDs(settings) update = {} d = self.senderIDs.update(update) eq_(d, None) eq_(self.senderIDs._senderIDs, test_list) self.senderIDs.update([123]) eq_(d, None) eq_(self.senderIDs._senderIDs, test_list) self.senderIDs.conn.create_bucket(TEST_BUCKET) # Try a valid, but incorrectly formatted set of senderIDs tkey = Key(self.senderIDs.conn.get_bucket(TEST_BUCKET)) tkey.key = self.senderIDs.KEYNAME tkey.set_contents_from_string("[123,456]") self.senderIDs._update_senderIDs() eq_(self.senderIDs._senderIDs, test_list) return
def make_settings(args, **kwargs): """Helper function to make a :class:`AutopushSettings` object""" router_conf = {} if args.key_hash: db.key_hash = args.key_hash # Some routers require a websocket to timeout on idle (e.g. UDP) if args.wake_pem is not None and args.wake_timeout != 0: router_conf["simplepush"] = { "idle": args.wake_timeout, "server": args.wake_server, "cert": args.wake_pem } if args.apns_enabled: # if you have the critical elements for each external router, create it if args.apns_cert_file is not None and args.apns_key_file is not None: router_conf["apns"] = { "sandbox": args.apns_sandbox, "cert_file": args.apns_cert_file, "key_file": args.apns_key_file } if args.gcm_enabled: # Create a common gcmclient slist = json.loads(args.senderid_list) senderIDs = SenderIDs( dict(s3_bucket=args.s3_bucket, senderid_expry=args.senderid_expry, use_s3=args.s3_bucket.lower() != "none", senderid_list=slist)) # This is an init check to verify that things are configured # correctly. Otherwise errors may creep in later that go # unaccounted. senderID = senderIDs.choose_ID() if senderID is None: log.critical(format="No GCM SenderIDs specified or found.") return router_conf["gcm"] = { "ttl": args.gcm_ttl, "dryrun": args.gcm_dryrun, "max_data": args.max_data, "collapsekey": args.gcm_collapsekey, "senderIDs": senderIDs, "senderid_list": list } return AutopushSettings( crypto_key=args.crypto_key, datadog_api_key=args.datadog_api_key, datadog_app_key=args.datadog_app_key, datadog_flush_interval=args.datadog_flush_interval, hostname=args.hostname, statsd_host=args.statsd_host, statsd_port=args.statsd_port, router_conf=router_conf, router_tablename=args.router_tablename, storage_tablename=args.storage_tablename, storage_read_throughput=args.storage_read_throughput, storage_write_throughput=args.storage_write_throughput, message_tablename=args.message_tablename, message_read_throughput=args.message_read_throughput, message_write_throughput=args.message_write_throughput, router_read_throughput=args.router_read_throughput, router_write_throughput=args.router_write_throughput, resolve_hostname=args.resolve_hostname, wake_timeout=args.wake_timeout, **kwargs)
def test_bad_init(self): self.senderIDs = SenderIDs(dict(senderid_list="[Update")) eq_(self.senderIDs._senderIDs, {})
def test_nos3(self): self.senderIDs = SenderIDs(dict(use_s3=False)) self.senderIDs.conn = Mock() self.senderIDs._refresh() eq_(self.senderIDs.conn.get_bucket.call_count, 0)