def test_topic_shuffle(self): # build services # 2 services as a group, 2 groups services = map( lambda x: ServiceMeta.from_dict({ "name": "testtopicshuffle", "callmode": "notify", "delivermode": "topicshuffle", "serverimpl": "rabbitmq", "coder": "mail", "options": { "cdc": "testsh", "sdc": "testsh", "serversubname": "consumer1" } }), [1, 2]) services += map( lambda x: ServiceMeta.from_dict({ "name": "testtopicshuffle", "callmode": "notify", "delivermode": "topicshuffle", "serverimpl": "rabbitmq", "coder": "mail", "options": { "cdc": "testsh", "sdc": "testsh", "serversubname": "consumer2" } }), [1, 2]) # start queue and receivers sender = babelrabbitmq_async.get_client_sender_for_topic_shuffle( services[0]) receivers = map( lambda service: babelrabbitmq_async. get_server_receiver_for_topic_shuffle(service, max_cache_size=1000 ), services) try: for receiver in receivers: receiver.start_consuming() # first round, send 100 keys for i in range(100): key = str(i) sender.put(key, "testtopicshuffle") sleep(1) counters_first = map(lambda receiver: receiver.get_cached_number(), receivers) print counters_first for c in counters_first: assert c > 0 finally: sender.close() for receiver in receivers: receiver.close()
def get_profile_client(): meta = ServiceMeta.from_json( settings.ProfileSendService_redis ) if settings.Babel_Mode == 'redis' else ServiceMeta.from_json( settings.ProfileSendService_rmq) client = ServiceClient(meta) client.start() return client
def test_topic_only(self): topic_service1 = ServiceMeta.from_json(""" { "name": "testtopic", "callmode": "notify", "delivermode": "topic", "serverimpl": "rabbitmq", "coder": "mail", "options": { "cdc": "testsh", "sdc": "testsh", "serversubname": "consumer1" } } """) topic_service2 = ServiceMeta.from_json(""" { "name": "testtopic", "callmode": "notify", "delivermode": "topic", "serverimpl": "rabbitmq", "coder": "mail", "options": { "cdc": "testsh", "sdc": "testsh", "serversubname": "consumer2" } } """) sender = babelrabbitmq_async.get_client_sender_for_topic( topic_service1) recver1 = babelrabbitmq_async.get_server_receiver_for_topic( topic_service1) recver2 = babelrabbitmq_async.get_server_receiver_for_topic( topic_service2) try: recver1.start_consuming() recver2.start_consuming() sender.put("test_msg", topic_service1.name) m1 = recver1.get(timeout=1) m2 = recver2.get(timeout=1) assert m1 == "test_msg" assert m2 == "test_msg" finally: sender.close() recver1.close() recver2.close()
def test_multiple_queue(self): queue_service = ServiceMeta.from_json(""" { "name": "test", "callmode": "notify", "delivermode": "queue", "serverimpl": "redis", "coder": "mail", "options": {} } """) sender = get_client_sender_for_queue(queue_service) recver = get_server_receiver_for_queue(queue_service, max_cache_size=20000) recver.start_consuming() round = 10000 start = millis_now() for i in xrange(round): sender.put("test_msg", queue_service.name, block=False) sleep(5) while recver.get_cached_number() < round: a = (recver.get_cached_number()) sleep(0.2) end = millis_now() dure = end - start dure /= 1000.0 print "{} records spend {} seconds, {} per second".format( round, dure, round / dure) sender.close() recver.close() assert False
def test_queue(self): queue_service = ServiceMeta.from_json(""" { "name": "testqueue", "callmode": "notify", "delivermode": "queue", "serverimpl": "rabbitmq", "coder": "mail", "options": { "cdc": "testsh", "sdc": "testsh", "durable": true } } """) sender = babelrabbitmq_async.get_client_sender_for_queue(queue_service) recver = babelrabbitmq_async.get_server_receiver_for_queue( queue_service) try: recver.start_consuming() sender.put("test_msg", queue_service.name) print "queue size: ", recver.get_cached_number() msg = recver.get(timeout=2) assert msg == "test_msg" finally: sender.close() recver.close()
def test_sharding(self): # build services services = map( lambda x: ServiceMeta.from_dict({ "name": "test", "callmode": "notify", "delivermode": "sharding", "serverimpl": "rabbitmq", "coder": "mail", "options": { "sdc": "sh", "cdc": "sh", "serverseq": str(x) } }), [1, 2, 3, 4]) # start queue and receivers sender = get_client_sender_for_sharding(services[0]) receivers = map( lambda service: get_server_receiver_for_sharding( service, max_cache_size=1000), services) for receiver in receivers: receiver.start_consuming() # first round, send 100 keys for i in range(100): key = str(i) sender.put(key, key, block=False) sleep(1) counters_first = map(lambda receiver: receiver.get_cached_number(), receivers) print counters_first for c in counters_first: assert c > 0 # dropping cache for receiver in receivers: receiver.dump_cache() # second round ,send 400 keys in range(0, 100) for i in range(400): key = str(i % 100) sender.put(key, key, block=False) sleep(5) counters_second = map(lambda receiver: receiver.get_cached_number(), receivers) print counters_second for counter_first, counter_second in zip(counters_first, counters_second): assert counter_first * 4 == counter_second sender.close() for receiver in receivers: receiver.close()
def get_server(redis_conf, rmq_conf): conf = rmq_conf if mode == "redis": conf = redis_conf meta = ServiceMeta.from_json(conf) server = ServiceServer(meta) if mode == "redis" else ServiceServer(meta, amqp_url=amqp_url, server_id="") return server
def get_server(redis_conf, rmq_conf): conf = rmq_conf if settings.Babel_Mode == "redis": conf = redis_conf meta = ServiceMeta.from_json(conf) server = ServiceServer(meta) return server
def get_client(redis_conf, rmq_conf): conf = rmq_conf if mode == "redis": conf = redis_conf meta = ServiceMeta.from_json(conf) client = ServiceClient(meta) if mode == "redis" else ServiceClient(meta, amqp_url=amqp_url, client_id="") client.start() return wrap_client(client)
def test_topic(self): topic_service1 = ServiceMeta.from_json(""" { "name": "test", "callmode": "notify", "delivermode": "topic", "serverimpl": "rabbitmq", "coder": "mail", "options": { "sdc": "sh", "cdc": "sh", "serversubname": "consumer1" } } """) topic_service2 = ServiceMeta.from_json(""" { "name": "test", "callmode": "notify", "delivermode": "topic", "serverimpl": "rabbitmq", "coder": "mail", "options": { "sdc": "sh", "cdc": "sh", "serversubname": "consumer2" } } """) sender = get_client_sender_for_topic(topic_service1) recver1 = get_server_receiver_for_topic(topic_service1) recver2 = get_server_receiver_for_topic(topic_service2) recver1.start_consuming() recver2.start_consuming() sender.put("test_msg", topic_service1.name) assert recver1.get(timeout=1) == "test_msg" assert recver2.get(timeout=1) == "test_msg" sender.close() recver1.close() recver2.close()
def _common_test_process(self, service_data, events=None): if not isinstance(service_data, list): service_data = [service_data] services = list() for d in service_data: s = ServiceMeta.from_dict({ "name": "test", "callmode": d.get("callmode", "notify"), "delivermode": d.get("delivermode", "queue"), "serverimpl": "redis", "coder": "mail", "options": { "cdc": "sh", "sdc": "sh", "serversubname": d.get("serversubname", ""), "serverseq": d.get("serverseq", ""), "servercardinality": d.get("servercardinality", 1) } }) services.append(s) client = ServiceClient(services[0]) client.start() servers = list() server_results_list = list() for s in services: echo_service, results = get_echo_service() server = ServiceServer(s, echo_service) servers.append(server) server.start() server_results_list.append(results) if events is None: events = [TestService.EVENT] client_response_list = list() for event in events: response = client.send(event, event.key, timeout=5) client_response_list.append(response) sleep(1) client.close() map(lambda s: s.close(), servers) return client_response_list, server_results_list
def test(): try: from bigsec_common.metrics.metricsagent import MetricsAgent, set_async_mode set_async_mode() except: pass metrics_url = { "server": "redis", "app": "alaskan", "redis": { "host": "127.0.0.1", "port": 6379, # 'password': '******', "type": "redis" } } MetricsAgent.get_instance().initialize_by_dict(metrics_url) post_stalker_recorder = MyMetricsRecorder(metrics_name='stalker_post_data', db='default') post_api_recorder = MyMetricsRecorder(metrics_name='api_request', db='default') RedisCtx.get_instance().host = '127.0.0.1' RedisCtx.get_instance().port = 6379 # RedisCtx.get_instance().password = '******' try: meta = ServiceMeta.from_json(Babel_Conf) babel_client = ServiceClient(meta) babel_client.start() now = millis_now() event = Event("warden", "stalker_data", "key", now, exp_notice) babel_client.notify(event, event.key, timeout=5) print('notice notify publish [ok]') time.sleep(1) check_result = requests.get( 'http://localhost:8088/alaskan/get_blacklist?ip={}'.format( exp_notice.get('ip'))).content if 'true' in check_result: print('check recv babel [ok]') else: print('check recv babel [error]') except: import traceback print('notice notify publish [error][{}]'.format( traceback.format_exc()))
def get_misclog_rpc_client(amqp_url): misc_service = """ { "name": "misclog_notify", "callmode": "notify", "delivermode": "topic", "serverimpl": "rabbitmq", "coder": "mail", "options": { } } """ client = ServiceClient(ServiceMeta.from_json(misc_service), amqp_url=amqp_url) return client
def get_httplog_rpc_client(amqp_url): http_service = """ { "name": "httplog_notify", "callmode": "notify", "delivermode": "sharding", "serverimpl": "rabbitmq", "coder": "mail", "options": { "servercardinality": 2 } } """ client = ServiceClient(ServiceMeta.from_json(http_service), amqp_url=amqp_url) return client
def test_batch_notify_service(self): events = list() for i in range(10): e = TestService.EVENT.copy() e.key = str(i) events.append(e) s = ServiceMeta.from_dict( { "name": "test", "callmode": "notify", "delivermode": "queue", "serverimpl": "redis", "coder": "mail", "options": { "sdc": "sh", "cdc": "sh" } }) client = ServiceClient(s) client.start() echo_service, results = get_echo_service() server = ServiceServer(s, echo_service) server.start() if events is None: events = [TestService.EVENT] client_response_list = list() for event in events: response = client.batch_notify(event, event.key, limit=5) client_response_list.append(response) sleep(0.1) print len(results) sleep(1) client.close() server.close() print client_response_list print results
def test_queue(self): queue_service = ServiceMeta.from_json(""" { "name": "test", "callmode": "notify", "delivermode": "queue", "serverimpl": "redis", "coder": "mail", "options": {} } """) sender = get_client_sender_for_queue(queue_service) recver = get_server_receiver_for_queue(queue_service) recver.start_consuming() sender.put("test_msg", queue_service.name) assert recver.get(timeout=1) == "test_msg" sender.close() recver.close()
def test_shuffle(self): # build services services = map( lambda x: ServiceMeta.from_dict({ "name": "test", "callmode": "notify", "delivermode": "shuffle", "serverimpl": "rabbitmq", "coder": "mail", "options": { "sdc": "sh", "cdc": "sh" } }), [1, 2, 3, 4]) # start queue and receivers sender = get_client_sender_for_shuffle(services[0]) receivers = map( lambda service: get_server_receiver_for_shuffle( service, max_cache_size=1000), services) for receiver in receivers: receiver.start_consuming() # first round, send 100 keys for i in range(100): key = str(i) sender.put(key, "test") sleep(1) counters_first = map(lambda receiver: receiver.get_cached_number(), receivers) print counters_first for c in counters_first: assert c > 0 sender.close() for receiver in receivers: receiver.close()
def build_service(id, sname, callmode, delivermode, subname="", sequence="", cardinality=0): s = ServiceMeta.from_dict({ "name": sname, "callmode": callmode, "delivermode": delivermode, "serverimpl": "redis", "coder": "mail", "options": { "cdc": "sh", "sdc": "sh", "serversubname": subname, "serverseq": sequence, "servercardinality": cardinality } }) # start the relative server in server mode if "server" in TestService.MODE: echo_service, results = get_echo_service() server = ServiceServer(s, echo_service) server.start() else: server = None results = None TestService.server_repo[id] = { "service": s, "server": server, "results": results }
def test_topic_sharding(self): # build services # 2 services as a group, 2 groups services = map( lambda x: ServiceMeta.from_dict({ "name": "testtopicsharding", "callmode": "notify", "delivermode": "topicsharding", "serverimpl": "rabbitmq", "coder": "mail", "options": { "cdc": "testsh", "sdc": "testsh", "serversubname": "consumer1", "serverseq": str(x) } }), [1, 2]) services += map( lambda x: ServiceMeta.from_dict({ "name": "testtopicsharding", "callmode": "notify", "delivermode": "topicsharding", "serverimpl": "rabbitmq", "coder": "mail", "options": { "cdc": "testsh", "sdc": "testsh", "serversubname": "consumer2", "serverseq": str(x) } }), [1, 2, 3]) # start queue and receivers sender = babelrabbitmq_async.get_client_sender_for_topic_sharding( services[0]) receivers = map( lambda service: babelrabbitmq_async. get_server_receiver_for_topic_sharding( service, max_cache_size=1000), services) try: for receiver in receivers: receiver.start_consuming() # first round, send 100 keys for i in range(100): key = str(i) sender.put(key, "testtopicsharding." + key) sleep(1) counters_first = map(lambda receiver: receiver.get_cached_number(), receivers) print counters_first for c in counters_first: assert c > 0 finally: logging.error("Test is Closing !!!!!!!!") sender.close() try: for receiver in receivers: receiver.close() except Exception: import traceback logging.error(traceback.format_exc())
def get_misclog_rpc_client(): misc_service = get_babel_file_content('Misclog_redis.service') client = ServiceClient(ServiceMeta.from_json(misc_service)) return client
def get_httplog_rpc_client(): http_service = get_babel_file_content('Httplog_redis.service') client = ServiceClient(ServiceMeta.from_json(http_service)) return client