Exemplo n.º 1
0
def rci_test(coro=None):
    # if server is on remote network, automatic discovery won't work,
    # so add it explicitly
    # yield scheduler.peer('192.168.21.5')

    # find where 'rci_1' is registered (with given name in any
    # known peer)
    rci1 = yield asyncoro.RCI.locate('rci_1')
    print('RCI is at %s' % rci1.location)
    # alternately, location can be explicitly created with
    # asyncoro.Location or obtained with 'locate' of asyncoro etc.
    loc = yield scheduler.locate('server')
    rci1 = yield asyncoro.RCI.locate('rci_1', loc)
    print('RCI is at %s' % rci1.location)

    n = 5
    monitor = asyncoro.Coro(monitor_proc, n)
    for x in range(n):
        rcoro = yield rci1('test%s' % x, b=x)
        asyncoro.logger.debug('RCI %s created' % rcoro)
        # set 'monitor' as monitor for this coroutine
        yield monitor.monitor(rcoro)
        # send a message
        rcoro.send('msg:%s' % x)
        yield coro.sleep(random.uniform(0, 1))
Exemplo n.º 2
0
def peer_status(name, location, status):
    # this function is called when peer is discovered (status=1) or
    # when peer terminates (status=0)
    print('peer "%s" @ %s status: %s' % (name, location, status))
    if status: # peer came online
        asyncoro.Coro(client_proc, computation, location)
    else: # peer terminated
        # if any pending processes, send them to other peers?
        pass
Exemplo n.º 3
0
def receiver_proc2(coro=None):
    # if server is in remote network, add it explicitly
    # yield scheduler.peer('remote.peer.ip', stream_send=True)
    # get reference to remote channel in server
    rchannel = yield asyncoro.Channel.locate('2clients')
    # this coro subscribes to the channel, so gets messages to server
    # channel
    print('server is at %s' % rchannel.location)
    if (yield rchannel.subscribe(coro)) != 0:
        raise Exception('subscription failed')
    asyncoro.Coro(sender_proc, rchannel)
    while True:
        msg = yield coro.receive()
        if msg is None:
            break
        print('Received "%s" from %s at %s' % \
              (msg['msg'], msg['sender'].name, msg['sender'].location))
    yield rchannel.unsubscribe(coro)
Exemplo n.º 4
0
def client_proc(computation, location, coro=None):
    server = yield asyncoro.Coro.locate('discoro_server', location, timeout=2)
    if not server:
        raise Exception('server not found at %s' % location)
    # if messages are sent to server at high frequency, it may be
    # efficient to stream messages; similarly, if lot of data is sent
    # back, the function 'compute' can set the streaming to this
    # client
    yield scheduler.peer(location.addr, tcp_port=location.port, stream_send=True)

    # distribute computation to server
    if (yield computation.setup(server, timeout=3)):
        raise Exception('setup on %s failed' % location)
    hb_coro = asyncoro.Coro(heartbeat, computation, server)

    n = 3
    for i in range(n):
        # create k coroutines at server with this computation.  Note
        # at any time only one coroutine runs, so compute intensive
        # tasks won't benefit from concurrent scheduling
        k = 2
        for i in range(k):
            obj = C(i) # create object of C
            rcoro = yield computation.run(server, obj, coro)
            if not isinstance(rcoro, asyncoro.Coro):
                print('failed to run on %s' % server.location)
                k -= 1
                # terminate heartbeat too?
                continue
            r = random.uniform(10, 100) # send data to remote coro
            print('sending %d, %.3f to %s' % (i, r, location))
            rcoro.send(r)
        for i in range(k):
            # result is instance of C
            result = yield coro.receive()
            print('result: %d, %s from %s' % (i, result, location))
    yield computation.close(server)
    hb_coro.terminate()
    # disable streaming; otherwise, peer remains connected preventing
    # it from automatically terminating even after all coroutines
    # terminated
    yield scheduler.peer(location.addr, tcp_port=location.port, stream_send=False)
    print('server %s is done' % location)
Exemplo n.º 5
0
# client program for sending requests to remote server (tut_server.py)
# using message passing (asynchronous concurrent programming);
# see http://asyncoro.sourceforge.net/tutorial.html for details.

import sys, random, logging
if sys.version_info.major >= 3:
    import disasyncoro3 as asyncoro
else:
    import disasyncoro as asyncoro

def client_proc(n, coro=None):
    global msg_id
    server = yield asyncoro.Coro.locate('server_coro')
    for x in range(3):
        # yield coro.suspend(random.uniform(0.5, 3))
        msg_id += 1
        server.send('%d: %d / %d' % (msg_id, n, x))

msg_id = 0
asyncoro.logger.setLevel(logging.DEBUG)
scheduler = asyncoro.AsynCoro(udp_port=0)
for i in range(1):
    asyncoro.Coro(client_proc, i)
Exemplo n.º 6
0
#!/usr/bin/env python

# client sends messages to a remote coroutine
# use with its server 'remote_coro_server.py'

import sys, logging
# import disasyncoro to use distributed version of AsynCoro
if sys.version_info.major >= 3:
    import disasyncoro3 as asyncoro
else:
    import disasyncoro as asyncoro


def sender(coro=None):
    # if server is in remote network, add it; set 'stream_send' to
    # True for streaming messages to it
    # yield scheduler.peer('remote.peer.ip', stream_send=True)
    rcoro = yield asyncoro.Coro.locate('server_coro')
    print('server is at %s' % rcoro.location)
    for x in range(10):
        rcoro.send('message %s' % x)


asyncoro.logger.setLevel(logging.DEBUG)
# scheduler = asyncoro.AsynCoro(secret='key')
asyncoro.Coro(sender)
Exemplo n.º 7
0
# run this program and then client either on same node or different
# node on local network. Server and client can also be run on two
# different networks but client must call 'scheduler.peer' method
# appropriately.

import sys, logging
# import disasyncoro to use distributed version of AsynCoro
if sys.version_info.major >= 3:
    import disasyncoro3 as asyncoro
else:
    import disasyncoro as asyncoro

def receiver(coro=None):
    coro.set_daemon()
    coro.register('server_coro')
    while True:
        msg = yield coro.receive()
        print('Received %s' % msg)

asyncoro.logger.setLevel(logging.DEBUG)
# call with 'udp_port=0' to start network services
# scheduler = asyncoro.AsynCoro(secret='key')

asyncoro.Coro(receiver)
while True:
    try:
        x = sys.stdin.readline()
    except KeyboardInterrupt:
        break

Exemplo n.º 8
0
    for x in range(10):
        msg = {'msg': 'message %s' % x, 'sender': coro}
        n = yield rchannel.deliver(msg, n=2)
        print('delivered to: %s' % n)
    rchannel.send(None)


def receiver_proc2(coro=None):
    # if server is in remote network, add it explicitly
    # yield scheduler.peer('remote.peer.ip', stream_send=True)
    # get reference to remote channel in server
    rchannel = yield asyncoro.Channel.locate('2clients')
    # this coro subscribes to the channel, so gets messages to server
    # channel
    print('server is at %s' % rchannel.location)
    if (yield rchannel.subscribe(coro)) != 0:
        raise Exception('subscription failed')
    asyncoro.Coro(sender_proc, rchannel)
    while True:
        msg = yield coro.receive()
        if msg is None:
            break
        print('Received "%s" from %s at %s' % \
              (msg['msg'], msg['sender'].name, msg['sender'].location))
    yield rchannel.unsubscribe(coro)


asyncoro.logger.setLevel(logging.DEBUG)
# scheduler = asyncoro.AsynCoro()
asyncoro.Coro(receiver_proc2)
Exemplo n.º 9
0
# server program for processing requests received with message passing
# (asynchronous concurrent programming) from remote client
# (tut_client.py) on same network;
# see http://asyncoro.sourceforge.net/tutorial.html for details.

import sys, random, logging
if sys.version_info.major >= 3:
    import disasyncoro3 as asyncoro
else:
    import disasyncoro as asyncoro

def server_proc(coro=None):
    coro.set_daemon()
    coro.register('server_coro')
    for x in range(10):
        msg = yield coro.receive()
        print('processing %s' % (msg))
    coro.unregister('server_coro')

asyncoro.logger.setLevel(logging.DEBUG)
scheduler = asyncoro.AsynCoro(udp_port=0)
server = asyncoro.Coro(server_proc)
while True:
    cmd = sys.stdin.readline().strip().lower()
    if cmd == 'quit' or cmd == 'exit':
        break
Exemplo n.º 10
0
def rci_test(coro=None):
    # if server is on remote network, automatic discovery won't work,
    # so add it explicitly
    # yield scheduler.peer('192.168.21.5')

    # find where 'rci_1' is registered (with given name in any
    # known peer)
    rci1 = yield asyncoro.RCI.locate('rci_1')
    print('RCI is at %s' % rci1.location)
    # alternately, location can be explicitly created with
    # asyncoro.Location or obtained with 'locate' of asyncoro etc.
    loc = yield scheduler.locate('server')
    rci1 = yield asyncoro.RCI.locate('rci_1', loc)
    print('RCI is at %s' % rci1.location)

    n = 5
    monitor = asyncoro.Coro(monitor_proc, n)
    for x in range(n):
        rcoro = yield rci1('test%s' % x, b=x)
        asyncoro.logger.debug('RCI %s created' % rcoro)
        # set 'monitor' as monitor for this coroutine
        yield monitor.monitor(rcoro)
        # send a message
        rcoro.send('msg:%s' % x)
        yield coro.sleep(random.uniform(0, 1))

asyncoro.logger.setLevel(logging.DEBUG)
scheduler = asyncoro.AsynCoro(name='client', secret='test')
asyncoro.Coro(rci_test)