def __init__(self, name, db_address, pull_address, pub_address, pipelined=False, log_level=logging.INFO, messages=sys.maxsize): self.name = name self.cache = DictDB() self.db_address = db_address self.pull_address = pull_address self.pub_address = pub_address self.pipelined = pipelined self.message = None self.cache.set('name', name.encode('utf-8')) self.cache.set('pull_address', pull_address.encode('utf-8')) self.cache.set('pub_address', pub_address.encode('utf-8')) self.logger = logging.getLogger(name=name) handler = logging.StreamHandler(sys.stdout) handler.setFormatter( logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) ) self.logger.addHandler(handler) self.logger.setLevel(log_level) self.messages = messages self.pull_socket = zmq_context.socket(zmq.PULL) self.pull_socket.bind(self.pull_address) self.pub_socket = zmq_context.socket(zmq.PUB) self.pub_socket.bind(self.pub_address)
def __init__(self, logging_level=logging.INFO, router_messages=sys.maxsize): # Name of the server self.name = '' # Logging level for the server self.logging_level = logging_level # Basic Key-value database for storage self.cache = DictDB() self.inbound_components = {} self.outbound_components = {} self.bypass_components = {} # Basic console logging self.logger = logging.getLogger(name=self.name) handler = logging.StreamHandler(sys.stdout) handler.setFormatter( logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s')) self.logger.addHandler(handler) self.logger.setLevel(self.logging_level) # Finally, the router self.router = Router(logger=self.logger, cache=self.cache, messages=router_messages)
def __init__(self, name: str, sub_address: str, pub_address: str, worker_pull_address: str, worker_push_address: str, db_address: str, previous: str, pipelined: bool=False, cache: object = DictDB(), log_level: int = logging.INFO): super(Hub, self).__init__(logging_level=log_level) self.name = name self.cache = cache self.pipelined = pipelined self.register_inbound( SubConnection, 'Sub', sub_address, route='WorkerPush', previous=previous) self.register_inbound( WorkerPullService, 'WorkerPull', worker_pull_address, route='Pub') self.register_outbound( WorkerPushService, 'WorkerPush', worker_push_address) self.register_outbound( PubService, 'Pub', pub_address, log='to_sink', pipelined=pipelined) self.register_bypass( CacheService, 'Cache', db_address) self.preset_cache(name=name, db_address=db_address, sub_address=sub_address, pub_address=pub_address, worker_pull_address=worker_pull_address, worker_push_address=worker_push_address) # Monkey patches the scatter and gather functions to the # scatter function of Push and Pull parts respectively. self.inbound_components['Sub'].scatter = self.scatter self.outbound_components['Pub'].scatter = self.gather self.outbound_components['Pub'].handle_stream = self.handle_stream
def __init__(self, name, db_address, sub_address, pub_address, previous, to_client=True, log_level=logging.INFO, messages=sys.maxsize): self.name = name self.cache = DictDB() self.db_address = db_address self.sub_address = sub_address self.pub_address = pub_address self.pipelined = not to_client self.message = None self.cache.set('name', name.encode('utf-8')) self.cache.set('sub_address', sub_address.encode('utf-8')) self.cache.set('pub_address', pub_address.encode('utf-8')) self.logger = logging.getLogger(name=name) handler = logging.StreamHandler(sys.stdout) handler.setFormatter( logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) ) self.logger.addHandler(handler) self.logger.setLevel(log_level) self.messages = messages self.sub_socket = zmq_context.socket(zmq.SUB) self.sub_socket.setsockopt_string(zmq.SUBSCRIBE, previous) self.sub_socket.connect(self.sub_address) self.pub_socket = zmq_context.socket(zmq.PUB) self.pub_socket.bind(self.pub_address)
def test_gateway_dealer(): """ Test function for the complete gateway with a dummy router. """ cache = DictDB() def dummy_response(): dummy_router = zmq_context.socket(zmq.ROUTER) dummy_router.bind('inproc://broker') [target, empty, message] = dummy_router.recv_multipart() dummy_router.send_multipart([target, empty, b'0']) broker_message = PalmMessage() broker_message.ParseFromString(message) dummy_router.send_multipart([b'gateway_dealer', empty, message]) [target, message] = dummy_router.recv_multipart() def dummy_initiator(): dummy_client = zmq_context.socket(zmq.REQ) dummy_client.identity = b'0' dummy_client.connect('inproc://gateway_router') message = PalmMessage() message.client = dummy_client.identity message.pipeline = '0' message.function = 'f.servername' message.stage = 1 message.payload = b'This is a message' dummy_client.send(message.SerializeToString()) return dummy_client.recv() got = [] dealer = GatewayDealer(cache=cache, logger=logging, messages=1) router = GatewayRouter(cache=cache, logger=logging, messages=2) with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor: results = [ executor.submit(dummy_response), executor.submit(dummy_initiator), executor.submit(dealer.start), executor.submit(router.start) ] for future in concurrent.futures.as_completed(results): try: result = future.result() if result: got.append(result) except Exception as exc: print(exc) message = PalmMessage() message.ParseFromString(got[0]) assert message.payload == b'This is a message'
def __init__(self, name='', listen_address='inproc://gateway_router', hostname='', port=8888, cache=DictDB(), logger=None): self.handler = MyHandler self.handler.gateway_router_address = listen_address self.handler.logger = logger self.server = MyServer((hostname, port), self.handler) self.logger = logger self.port = port
def test_gateway_http(): """ Test function for the complete gateway with a dummy router. """ cache = DictDB() def dummy_response(): dummy_router = zmq_context.socket(zmq.ROUTER) dummy_router.bind('inproc://broker') [target, empty, message] = dummy_router.recv_multipart() dummy_router.send_multipart([target, empty, b'0']) broker_message = PalmMessage() broker_message.ParseFromString(message) dummy_router.send_multipart([b'gateway_dealer', empty, message]) [target, message] = dummy_router.recv_multipart() def dummy_initiator(): r = requests.get('http://localhost:8888/function') return r.text got = [] dealer = GatewayDealer(cache=cache, logger=logging, messages=1) router = GatewayRouter(cache=cache, logger=logging, messages=2) http = HttpGateway(cache=cache, logger=logging) with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: results = [ executor.submit(dummy_response), executor.submit(dummy_initiator), executor.submit(dealer.start), executor.submit(router.start), executor.submit(http.debug) ] for future in concurrent.futures.as_completed(results): try: result = future.result() if result: got.append(result) except Exception as exc: print(exc) assert got[0] == 'No Payload'
def __init__(self, name, db_address, sub_addresses, pub_address, previous, to_client=True, log_level=logging.INFO, messages=sys.maxsize): self.name = name self.cache = DictDB() self.db_address = db_address self.sub_addresses = sub_addresses self.pub_address = pub_address self.pipelined = not to_client self.message = None self.cache.set('name', name.encode('utf-8')) for i, address in enumerate(sub_addresses): self.cache.set('sub_address_{}'.format(i), address.encode('utf-8')) self.cache.set('pub_address', pub_address.encode('utf-8')) self.logger = logging.getLogger(name=name) handler = logging.StreamHandler(sys.stdout) handler.setFormatter( logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s' ) ) self.logger.addHandler(handler) self.logger.setLevel(log_level) self.messages = messages self.sub_sockets = list() # Simple type checks assert type(previous) == list assert type(sub_addresses) == list for address, prev in zip(self.sub_addresses, previous): self.sub_sockets.append(zmq_context.socket(zmq.SUB)) self.sub_sockets[-1].setsockopt_string(zmq.SUBSCRIBE, prev) self.sub_sockets[-1].connect(address) self.pub_socket = zmq_context.socket(zmq.PUB) self.pub_socket.bind(self.pub_address) self.poller = zmq.Poller() for sock in self.sub_sockets: self.poller.register(sock, zmq.POLLIN)
def test_get_config(): cache = DictDB() cache.set('name', b'master') cache.set('pub_address', pub_address.encode('utf-8')) cache.set('pull_address', pull_address.encode('utf-8')) cache = CacheService('db', db_address, cache=cache, logger=logging, messages=3, ) def boot_client(): client = Client('master', db_address, session=None) return client.push_address, client.sub_address def broker(): socket = zmq_context.socket(zmq.ROUTER) socket.bind(broker_address) with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: results = [ executor.submit(cache.start), executor.submit(boot_client), executor.submit(broker) ] # This works because servers do not return values. for i, future in enumerate(concurrent.futures.as_completed(results)): try: result = future.result() print(result) except Exception as exc: print(exc) lines = traceback.format_exception(*sys.exc_info()) print(lines) assert i == 2
def __init__(self, name='gateway_router', listen_address='inproc://gateway_router', broker_address="inproc://broker", cache=DictDB(), logger=None, messages=sys.maxsize): super(GatewayRouter, self).__init__( 'gateway_router', listen_address, zmq.ROUTER, reply=False, broker_address=broker_address, bind=True, cache=cache, logger=logger, messages=messages, ) if name: self.logger.warning('Gateway router part is called "gateway_router",') self.logger.warning('check that you have called this way')
def test_send_job(): cache = DictDB() cache.set('name', b'master') cache.set('pub_address', pub_address.encode('utf-8')) cache.set('pull_address', pull_address.encode('utf-8')) cache_service = CacheService('db', db_address, cache=cache, logger=logging, messages=3, ) puller = PullService('puller', pull_address, logger=logging, cache=cache, messages=1) publisher = PubService('publisher', pub_address, logger=logging, cache=cache, messages=1) def client_job(): client = Client('master', db_address, session=None) return [r for r in client.job('master.something', [b'1'], messages=1)] def broker(): socket = zmq_context.socket(zmq.ROUTER) socket.bind(broker_address) message = socket.recv_multipart() # Unblock. Here you see why the actual router is complicated. socket.send_multipart(message) socket.send_multipart([b'publisher', b'', message[2]]) socket.close() return b'router' with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor: results = [ executor.submit(cache_service.start), executor.submit(puller.start), executor.submit(publisher.start), executor.submit(client_job), executor.submit(broker) ] # This works because servers do not return values. for i, future in enumerate(concurrent.futures.as_completed(results)): try: result = future.result() print(result) except Exception as exc: print(exc) lines = traceback.format_exception(*sys.exc_info()) print(*lines) assert i == 4
def test_multiple_clients(): cache = DictDB() cache.set('name', b'master') cache.set('pub_address', pub_address.encode('utf-8')) cache.set('pull_address', pull_address.encode('utf-8')) router = Router(logger=logging, cache=cache, messages=4) router.register_inbound('puller', route='publisher') router.register_outbound('publisher') cache_service = CacheService('db', db_address, cache=cache, logger=logging, messages=6, ) puller = PullService('puller', pull_address, broker_address=router.inbound_address, logger=logging, cache=cache, messages=4) publisher = PubService('publisher', pub_address, broker_address=router.outbound_address, logger=logging, cache=cache, messages=4) def client1_job(): client = Client('master', db_address, session=None) return [r for r in client.job('master.something', [b'1', b'2'], messages=2)] def client2_job(): client = Client('master', db_address, session=None) return [r for r in client.job('master.something', [b'3', b'4'], messages=2)] with concurrent.futures.ThreadPoolExecutor(max_workers=6) as executor: results = [ executor.submit(cache_service.start), executor.submit(puller.start), executor.submit(publisher.start), executor.submit(router.start), executor.submit(client1_job), executor.submit(client2_job), ] # This works because servers do not return values. for i, future in enumerate(concurrent.futures.as_completed(results)): try: result = future.result() if type(result) == list: got = [] for r in result: message = PalmMessage() message.ParseFromString(r) got.append(message.payload) assert got == [b'1', b'2'] or got == [b'3', b'4'] except Exception as exc: print(exc) lines = traceback.format_exception(*sys.exc_info()) print(*lines) assert i == 5
import concurrent.futures import time import zmq import logging import sys import traceback logger = logging.getLogger('test_service_pub') logger.setLevel(logging.DEBUG) handler = logging.StreamHandler(sys.stdout) handler.setFormatter( logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')) logger.addHandler(handler) logger.setLevel(logging.DEBUG) cache = DictDB() listen_address = 'inproc://pub1' broker_address = 'inproc://broker1' pub_service = PubService('pull_service', listen_address=listen_address, broker_address=broker_address, logger=logger, cache=cache, messages=1) def fake_router(): original_message = PalmMessage() original_message.pipeline = 'pipeline'