async def polyamorous_send_and_recv(): address = 'tcp://127.0.0.1:12343' with Pair1(listen=address, polyamorous=True) as s0, \ Pair1(dial=address, polyamorous=True) as s1, \ Pair1(dial=address, polyamorous=True) as s2: await s1.asend(b'hello from s1') await s2.asend(b'hello from s2') msg1 = await s0.arecv_msg() msg2 = await s0.arecv_msg() print(msg1.bytes) # prints b'hello from s1' print(msg2.bytes) # prints b'hello from s2' await msg1.pipe.asend(b'hey s1') await msg2.pipe.asend(b'hey s2') print(await s2.arecv()) # prints b'hey s2' print(await s1.arecv()) # prints b'hey s1'
async def test_worker(): with Pair1(listen=f"{SERVER_URL}:{SERVER_PORT}", polyamorous=True, recv_timeout=500) as worker_socket: worker_task = asyncio.create_task( worker(SERVER_URL, SERVER_PORT, num_workers=1)) message = await worker_socket.arecv() assert message == b"Ready" dummy_work = { "@module": "tests.cli.test_distributed", "@class": "DummyBuilder", "@version": None, "dummy_prechunk": False, "val": 0, } for i in range(2): await worker_socket.asend(json.dumps(dummy_work).encode("utf-8")) await asyncio.sleep(1) message = await worker_socket.arecv() assert message == b"Ready" await worker_socket.asend(json.dumps({}).encode("utf-8")) with pytest.raises(Timeout): await worker_socket.arecv() assert len(worker_socket.pipes) == 0 worker_task.cancel()
async def run(self): logger.info(f"Remote is preparing to dial on {self.config.ui_address}") with Pair1(dial=self.config.ui_address) as raw_socket: self.middleware_socket = AsyncUnbufferedSocket( raw_socket, middleware=self.middleware) logger.remove() # remote print logging logger.add(self.forward_log) logger.info( f"Remote has opened socket on {self.config.ui_address}") logger.info(f"Installed log forwarding") while True: # try to forward one message to the remote ui if available try: message = self.messages.get_nowait() if isinstance(message, RequestShutdown): logger.error( "The remote should not be shut down. This should happen implicitly through task cancellation. Ignoring request." ) continue await self.middleware_socket.asend(message) except QueueEmpty: pass # then we try to receive a message and dispatch it on the # local message passing system. Typically this means # forwarding messages to the App or to the Experiment # instance. try: message = self.middleware_socket.recv(block=False) self.app.messages.put_nowait(message) except TryAgain: pass await asyncio.sleep(0.01)
def __init__(self, settings): super().__init__() self.running = False self.settings = settings self.serial = None self.connected = False self.pubSocket = Pub0() self.msgSocket = Pair1(polyamorous=True) self.firstDataReceived = False self.lastDataTime = time.time()
async def manager(url: str, port: int, builders: List[Builder], num_chunks: int): """ Really simple manager for distributed processing that uses a builder prechunk to modify the builder and send out modified builders for each worker to run """ logger = getLogger("Manager") logger.info(f"Binding to Manager URL {url}:{port}") with Pair1(listen=f"{url}:{port}", polyamorous=True) as workers: for builder in builders: logger.info(f"Working on {builder.__class__.__name__}") builder_dict = builder.as_dict() try: builder.connect() chunks_dicts = list(builder.prechunk(num_chunks)) logger.info( f"Distributing {len(chunks_dicts)} chunks to workers") for chunk_dict in tqdm(chunks_dicts, desc="Chunks"): temp_builder_dict = dict(**builder_dict) temp_builder_dict.update(chunk_dict) temp_builder_dict = jsanitize(temp_builder_dict) # Wait for client connection that announces client and says it is ready to do work logger.debug("Waiting for a worker") worker = await workers.arecv_msg() logger.debug( f"Got connection from worker: {worker.pipe.remote_address}" ) # Send out the next chunk await worker.pipe.asend( json.dumps(temp_builder_dict).encode("utf-8")) except NotImplementedError: logger.error( f"Can't distributed process {builder.__class__.__name__}. Skipping for now" ) # Clean up and tell workers to shut down await wait([ pipe.asend(json.dumps({}).encode("utf-8")) for pipe in workers.pipes ])
async def test_master_give_out_chunks(master_server, log_to_stdout): with Pair1(dial=SERVER_URL, polyamorous=True, recv_timeout=500) as master_socket: for i in range(0, 10): log_to_stdout.debug(f"Going to ask Master for work: {i}") await master_socket.asend(b"Ready") message = await master_socket.arecv() print(message) work = json.loads(message.decode("utf-8")) assert work["@class"] == "DummyBuilder" assert work["@module"] == "tests.cli.test_distributed" assert work["val"] == i await master_socket.asend(b"Ready") message = await master_socket.arecv() work = json.loads(message.decode("utf-8")) assert work == {}
async def test_master_give_out_chunks(master_server): with Pair1(dial="tcp://127.0.0.1:8234", polyamorous=True) as master_socket: for i in range(0, 10): await master_socket.asend(b"Ready") message = await master_socket.arecv() work = json.loads(message.decode("utf-8")) assert work["@class"] == "DummyBuilder" assert work["@module"] == "tests.cli.test_distributed" assert work["val"] == i await master_socket.asend(b"Ready") message = await master_socket.arecv() work = json.loads(message.decode("utf-8")) assert work == {}
async def worker(url: str, num_workers: int): """ Simple distributed worker that connects to a master asks for work and deploys using multiprocessing """ # Should this have some sort of unique ID? logger = getLogger("Worker") logger.info(f"Connnecting to Master at {url}") with Pair1(dial=url, polyamorous=True) as master: logger.info(f"Connected to Master at {url}") running = True while running: await master.asend(b"Ready") message = await master.arecv() work = json.loads(message.decode("utf-8")) if "@class" in work and "@module" in work: # We have a valid builder builder = MontyDecoder().process_decoded(work) await multi(builder, num_workers) else: # End the worker # This should look for a specific message ? running = False
async def test_manager_wait_for_ready(manager_server): with Pair1(dial=f"{SERVER_URL}:{SERVER_PORT}", polyamorous=True, recv_timeout=100) as manager: with pytest.raises(Timeout): manager.recv()
from pynng import Pair1 address = 'tcp://127.0.0.1:12343' with Pair1(listen=address, polyamorous=True) as s0, \ Pair1(dial=address, polyamorous=True) as s1, \ Pair1(dial=address, polyamorous=True) as s2: s1.send(b'hello from s1') s2.send(b'hello from s2') msg1 = s0.recv_msg() msg2 = s0.recv_msg() print(msg1.bytes) # prints b'hello from s1' print(msg2.bytes) # prints b'hello from s2' msg1.pipe.send(b'hey s1') msg2.pipe.send(b'hey s2') print(s2.recv()) # prints b'hey s2' print(s1.recv()) # prints b'hey s1'
from time import time import asyncio import signal from flexx import flx from solar_settings import SolarSettings from solar_data import SolarData from pynng import Pair1, Sub0, TryAgain #serial data in and out subSocket = Sub0(dial=SolarSettings.serial_pub_address) subSocket.subscribe("solardata") msgSocket = Pair1(dial=SolarSettings.serial_msg_address, polyamorous=True) class Relay(flx.Component): number_of_connections = flx.IntProp(settable=True) def init(self): self.update_number_of_connections() @flx.manager.reaction('connections_changed') def update_number_of_connections(self, *events): n = 0 for name in flx.manager.get_app_names(): sessions = flx.manager.get_connections(name) n += len(sessions) self.set_number_of_connections(n) @flx.emitter def new_data(self, serial_data): return dict(serial_data=serial_data,
self.label.configure(text=data) pass def command_shutdown(self): print("shutdown") os.system("sudo init 0") if __name__ == '__main__': app = SolarTkApp() # Set signal before starting signal.signal(signal.SIGINT, app.sigint_handler) app.start() subSocket = Sub0(dial=SolarSettings.serial_pub_address) subSocket.subscribe(b'solardata') msgSocket = Pair1(dial=SolarSettings.serial_msg_address) while app.running: data = subSocket.recv() sd = SolarData(from_byte=data) dictData = sd.as_dict() str_data = "" i = 1 for d in dictData: if d == 'time': continue if (i % 2) == 1: if i != 1: str_data += '\n' str_data += "{0: <5}: ".format(d) else:
async def test_master_wait_for_ready(master_server): with Pair1(dial="tcp://127.0.0.1:8234", polyamorous=True, recv_timeout=100) as master: with pytest.raises(Timeout): master.recv()