コード例 #1
0
ファイル: BeremizIDE.py プロジェクト: graynet-dev/Beremiz-3
 def startUpdater(self, mpUpdater):
     queueCmd = aioprocessing.AioQueue()
     queueData = aioprocessing.AioQueue()
     self.updater = aioprocessing.AioProcess(name='updater', target=startcoUpdater, args=(queueCmd, queueData))
     self.updater.start()
     StartCoroutine(mpUpdater(self.updater, queueCmd, queueData), self)
     return queueCmd, queueData
コード例 #2
0
    def __init__(
        self,
        uri="ws://localhost:9873",
        timeout_reconnect=DEFAULT_RECONNECT_TIMEOUT,
        #buffer_failed_sends=False,
        loads=umsgpack.loads,
        dumps=umsgpack.dumps,
        name=__name__,
    ):
        self.name = name
        self.log = logging.getLogger(name)
        self.uri = uri
        self.timeout_reconnect = timeout_reconnect if isinstance(
            timeout_reconnect, datetime.timedelta) else datetime.timedelta(
                seconds=timeout_reconnect)
        self.timeout_msg = self.timeout_reconnect  # temp
        self.timeout_ping = self.timeout_reconnect  # temp
        #self.buffer_failed_sends = buffer_failed_sends
        self.loads = loads
        self.dumps = dumps
        self.queue_recv = aioprocessing.AioQueue()
        self.queue_send = aioprocessing.AioQueue()

        self.active = True
        self.websocket = None
        self.process = None
コード例 #3
0
ファイル: subrpc.py プロジェクト: luciotorre/dataflock
 def start(self):
     self.pending_cmds = {}
     self.channel, client_channel = aioprocessing.AioPipe()
     self.stdout_q = sout = aioprocessing.AioQueue()
     self.stderr_q = serr = aioprocessing.AioQueue()
     self.listener = asyncio.ensure_future(self.listen_task())
     loop = asyncio.get_event_loop()
     kernel = self.slave(client_channel, sout, serr)
     self.process = p = aioprocessing.AioProcess(target=kernel.start)
     p.start()
コード例 #4
0
    def __init__(self, enabled=True):
        super(ProcessingQueueNode, self).__init__(enabled)

        self.read_queue = aioprocessing.AioQueue()
        self.write_queue = aioprocessing.AioQueue()
        self.lock = aioprocessing.AioLock()
        self.exit_event = aioprocessing.AioEvent()

        self.process = aioprocessing.AioProcess(
            target=self.processor_heavy_fn,
            args=(self.exit_event, self.lock, self.write_queue,
                  self.read_queue))
コード例 #5
0
ファイル: supervisor.py プロジェクト: N0omB/aTLAS_host
 def __init__(self, ip_address, max_agents, director_hostname, connector,
              logger_str):
     self.ip_address = ip_address
     self.director_hostname = director_hostname
     self.max_agents = max_agents
     self.agents_in_use = 0
     self.takes_new_scenarios = True
     self.scenario_runs = {}
     self.logger_str = logger_str
     # setup multiprocessing environment
     self.send_queue = aioprocessing.AioQueue()
     self.manager = multiproc.Manager()
     self.pipe_dict = self.manager.dict()
     self.receive_pipe, self.pipe_dict[
         "supervisor"] = aioprocessing.AioPipe(False)
     # setup logger semaphores for all possible scenario runs
     self.logger_semaphores = [{
         "semaphore": self.manager.Semaphore(1),
         "used_by": ""
     } for i in range(max_agents)]
     # setup observations_done lists
     self.observations_done = [{
         "list": self.manager.list(),
         "used_by": ""
     } for i in range(max_agents)]
     # get correct connector to director
     module = importlib.import_module(
         "connectors." + re.sub("([A-Z])", "_\g<1>", connector).lower()[1:])
     connector_class = getattr(module, connector)
     self.connector = connector_class(director_hostname, max_agents,
                                      self.send_queue, self.pipe_dict)
コード例 #6
0
    def test_blocking_put(self):
        q = aioprocessing.AioQueue()

        async def queue_put():
            await q.coro_put(1)

        self.loop.run_until_complete(queue_put())
        self.assertEqual(q.get(), 1)
コード例 #7
0
ファイル: queue_test.py プロジェクト: yutiansut/aioprocessing
    def test_blocking_put(self):
        q = aioprocessing.AioQueue()

        @asyncio.coroutine
        def queue_put():
            yield from q.coro_put(1)

        self.loop.run_until_complete(queue_put())
        self.assertEqual(q.get(), 1)
コード例 #8
0
    def test_put_get(self):
        q = aioprocessing.AioQueue()
        val = 1
        p = Process(target=queue_put, args=(q, val))

        async def queue_get():
            ret = await q.coro_get()
            self.assertEqual(ret, val)

        p.start()
        self.loop.run_until_complete(queue_get())
        p.join()
コード例 #9
0
 def __init__(self, loop, pool, func, consumer, start_event,
              terminate_event):
     self.start_event = start_event
     self.terminate_event = terminate_event
     self.pool = pool
     self.func = func
     self.queue = aioprocessing.AioQueue()
     self.lock = aioprocessing.AioLock()
     self._event = aioprocessing.AioEvent()
     self.consumer = consumer
     self.loop = loop
     self.process = aioprocessing.AioProcess(
         target=GeneratorWorker.func_run,
         args=(self.loop, self.queue, self.lock, self._event, self.func))
コード例 #10
0
    def test_get_put(self):
        q = aioprocessing.AioQueue()
        e = Event()
        val = 2

        async def queue_put():
            await q.coro_put(val)

        p = Process(target=queue_get, args=(q, e))
        p.start()
        self.loop.run_until_complete(queue_put())
        e.wait()
        out = q.get()
        p.join()
        self.assertEqual(out, val)
コード例 #11
0
def hibike_process(bad_things_queue, state_queue, pipe_from_child):
    """
    Run the main hibike processs.
    """
    pipe_from_child = aioprocessing.AioConnection(pipe_from_child)
    # By default, AioQueue instantiates a new Queue object, but we
    # don't want that.
    state_queue = aioprocessing.AioQueue(context=QueueContext(state_queue))
    bad_things_queue = aioprocessing.AioQueue(
        context=QueueContext(bad_things_queue))

    devices = {}
    batched_data = {}
    event_loop = asyncio.get_event_loop()
    error_queue = asyncio.Queue(loop=event_loop)

    event_loop.create_task(batch_data(batched_data, state_queue, event_loop))
    event_loop.create_task(
        hotplug_async(devices, batched_data, error_queue, state_queue,
                      event_loop))
    event_loop.create_task(
        dispatch_instructions(devices, bad_things_queue, state_queue,
                              pipe_from_child, event_loop))
    # start event loop
    if USE_PROFILING:
        try:
            import yappi
            yappi.start()
            event_loop.create_task(
                print_profiler_stats(event_loop, PROFILING_PERIOD))
        except ImportError:
            print(
                "Unable to import profiler. Make sure you installed with the '--dev' flag."
            )

    event_loop.run_forever()
コード例 #12
0
ファイル: slave.py プロジェクト: rrader/aio-csp-python
 def __init__(self):
     self._coro_queue = aioprocessing.AioQueue()
     self._loop = CSPSlaveLoop()
     self._process = multiprocessing.Process(target=self.main)
     self._process.start()
コード例 #13
0
args = parser.parse_args()

host = args.listen_address
port = args.listen_port
upstreams = args.upstreams
cache_size = args.max_cache_size
active = args.active
timeout = args.timeout

# Basic diagram
#           Q           Q
# listener -> cache {} -> forwarder
#           Q           Q

# Queue for listener to post requests and get responses
cache_request = aiomp.AioQueue()
cache_response = aiomp.AioQueue()
forwarder_request = aiomp.AioQueue()
forwarder_response = aiomp.AioQueue()


def main():
    # Setup logging
    logging.basicConfig(level='INFO', format='[%(levelname)s] %(message)s')

    # Setup resolver cache
    workers = []
    cache = DnsLruCache(cache_size)
    wait_table = DnsWaitTable()
    # p4 = mp.Process(target=echo_worker, args=(forwarder_request, forwarder_response), daemon=True)
    # workers.append(p4)
コード例 #14
0
    def __init__(self):
        self.log = get_logger("NetworkReactor")

        self.q = aioprocessing.AioQueue()
        self.reactor = ReactorCore(queue=self.q)
        self.reactor.start()
コード例 #15
0
 def __init__(self):
     self.sq = aioprocessing.AioQueue()
     self.rq = asyncio.Queue()
コード例 #16
0
        if result is None:
            break
        print("Got result {}".format(result))
    await p.coro_join()


async def example2(queue, event, lock):
    await event.coro_wait()
    async with lock:
        await queue.coro_put(78)
        await queue.coro_put(None)  # Shut down the worker


if __name__ == "__main__":
    loop = asyncio.get_event_loop()
    queue = aioprocessing.AioQueue()
    lock = aioprocessing.AioLock()
    event = aioprocessing.AioEvent()
    tasks = [
        asyncio.ensure_future(example(queue, event, lock)),
        asyncio.ensure_future(example2(queue, event, lock)),
    ]
    loop.run_until_complete(asyncio.wait(tasks))
    loop.close()

# endpoints = [1,2,2,3]
#
# queue_list = []
# for i, item in enumerate(endpoints):
#     queue_list.append(aioprocessing.AioQueue)
#
コード例 #17
0
    def __init__(self,
                 width,
                 height,
                 address=("10.76.76.1", 80),
                 enabled=True,
                 enable_images=True):
        super(WebsiteClient, self).__init__(enabled)
        # http://user:[email protected]/api/robot/rightcam

        self.address = address

        self.requested_width = width
        self.requested_height = height

        self.width = width
        self.height = height
        self.num_frames = 0

        self.reader = None
        self.writer = None

        self.enable_images = enable_images

        self.response_start_header = b'\xbb\x08'
        self.message_start_header = b'\xde\xad\xbe\xef'
        self.frame_len = 4
        self.timestamp_len = 8
        self.width_len = 2
        self.height_len = 2
        self.endian = 'big'

        self.chunk_size = int(self.width * self.height / 2)

        self.fps = 30.0
        self.length_sec = 0.0

        self.fps_sum = 0.0
        self.fps_avg = 30.0
        self.prev_t = None

        self.credentials = base64.b64encode(b'robot:naboris').decode('ascii')

        # self.manager = aioprocessing.AioSyncManager()

        self.connection = HTTPConnection("%s:%s" %
                                         (self.address[0], self.address[1]))

        self.headers = {
            'Content-type': 'image/jpeg',
            'Authorization': 'Basic %s' % self.credentials
        }

        if self.enable_images:
            self.connection.request("GET",
                                    "/api/robot/rightcam_meta",
                                    headers=self.headers)
            response = self.connection.getresponse()
        else:
            response = None

        self.image_process = aioprocessing.AioProcess(
            target=self.retrieve_images, args=(response, ))
        self.image_queue = aioprocessing.AioQueue()

        self.connection.request("GET", "/cmd", headers=self.headers)
        response = self.connection.getresponse()

        self.command_process = aioprocessing.AioProcess(
            target=self.retrieve_commands, args=(response, ))
        self.command_queue = aioprocessing.AioQueue()

        self.exit_event = aioprocessing.AioEvent()

        self.command_service_tag = "commands"
        self.define_service(self.command_service_tag, str)
コード例 #18
0
 def setUp(self):
     super(BasicHotplugTests, self).setUp()
     self.devices = {}
     self.error_queue = asyncio.Queue(loop=self.loop)
     self.state_queue = aioprocessing.AioQueue()
コード例 #19
0
parser.add_argument('-p', '--listen-port', type=int, default=53,
					help='port to listen on for DNS over HTTPS requests (default: %(default)s)')
parser.add_argument('-u', '--upstreams', nargs='+', default=['https://1.1.1.1/dns-query', 'https://1.0.0.1/dns-query'],
					help='upstream servers to forward DNS queries and requests to (default: %(default)s)')
parser.add_argument('-t', '--tcp', action='store_true', default=False,
					help='serve TCP based queries and requests along with UDP (default: %(default)s)')
args = parser.parse_args()

host = args.listen_address
port = args.listen_port
upstreams = args.upstreams

headers = {'accept': 'application/dns-message', 'content-type': 'application/dns-message'}
conns = []

request_queue = aioprocessing.AioQueue()
response_queue = aioprocessing.AioQueue()


def main():
	# Setup logging
	logging.basicConfig(level='INFO', format='[%(levelname)s] %(message)s')

	# Setup event loop
	loop = asyncio.get_event_loop()

	# Setup UDP server
	logging.info('Starting UDP server listening on: %s#%d' % (host, port))
	udp_listen = loop.create_datagram_endpoint(UdpDohProtocol, local_addr = (host, port))
	udp, protocol = loop.run_until_complete(udp_listen)