Beispiel #1
0
 async def run_client(self, sock, address):
     response_queue = Queue(maxsize=1) if self.parallel else None
     try:
         async with sock:
             data_as_str = ''
             while True:
                 rawdata = await sock.recv(self.buffer_size)
                 if not rawdata:
                     return
                 data_as_str += rawdata.decode('utf-8').strip()
                 try:
                     request = json.loads(data_as_str)
                 except ValueError:
                     continue
                 response = await self.memoized_handler(
                     request, response_queue=response_queue)
                 data_as_str = ''
                 await sock.sendall(response.encode('utf-8'))
     except CancelledError:
         await sock.close()
Beispiel #2
0
            for one_proc_id in all_adb_process_ids:
                try:
                    psutil.Process(one_proc_id).terminate()
                except Exception:
                    logger.exception('Error:')
        except Exception:
            logger.exception('Error:')

    def free(self):
        self.stop_all_back_procs()
        self.stop_back_handler(force=True)
        logger.info('call free .....')


# 定义队列及关键共享数据
messages = Queue()
subscribers = set()
app_vmjob_list = set()


# Dispatch task that forwards incoming messages to subscribers
async def dispatcher():
    async for msg in messages:
        for q in list(subscribers):
            await q.put(msg)


# Publish a message
async def publish(msg):
    await messages.put(msg)
Beispiel #3
0
 def __init__(self):
     self.outgoing = Queue()
     self.incoming = Queue()
     self.closure = None
     self.closing = Event()
Beispiel #4
0
class AsyncTcpCallbackServer(object):
    '''
    1) receives json as utf-8 encoded bytestream
    2) sends json object to callback as native python object (dict)
        - callback should return a string type
    3) encodes string as utf-8 encoded bytestring and sends to client
    '''
    def __init__(
        self,
        address,
        port,
        request_handler,
        memoized=True,
        buffer_size=1 << 13,
        parallel=True,
        cpus=None,
        search_path=None,
        worker_subprocess_timeout=600,
    ):
        '''
            address:        the address the listening socket will bind to.
            port:           the port the listening socket will bind to.
            request_handler: an importable async function handling the received request as a dict.
                            If 'parallel' is True, this can be either the dot-path to the function or the function itself.
                            You may need to provide an additional search path in 'search_path' if the handler can't be found. 
                            This usually happens if your handler is defined in a script but not in a library.
            memoized:       True is responses are to be cached. Defaults to True.
            buffer_size:    the maximum amount of data to be received at once from the client connection. Defaults to 8KB.
            parallel:       if True, handle all received requests in parallel, using a pool of processes. Defaults to True.
            cpus:           number of processes if 'parallel' is True. Defaults to None which means match the number of CPUs on the host machine.
            search_path:    path of the module containing the 'request_handler' definition. Will be prepended to sys.path when workers boot.
            worker_subprocess_timeout: timeout in seconds after which the subprocesses will be killed if no new request is queued up.
                            Defaults to 5 seconds.
        '''
        self.address = address
        self.port = port
        self.buffer_size = buffer_size
        self.memoized = memoized
        self.request_handler = request_handler
        self._saved_responses = {}
        self.parallel = parallel
        self.cpus = cpus or cpu_count()
        if parallel:
            self.requests = Queue()
            self.authkey = token_bytes()
            self.search_path = search_path
            # tag the socket path with the pid so we can run the multiple servers,
            # as is the case when running the tests while in a container that runs the official server.
            self.unix_socket_path = f'/var/run/asynctcp.server.channel.{current_process().pid}'
            self.channel = Channel(self.unix_socket_path,
                                   family=socket.AF_UNIX)
            self.subprocess_launch_request = Queue(maxsize=self.cpus)
            self.worker_subprocess_timeout = worker_subprocess_timeout

    async def run_client(self, sock, address):
        response_queue = Queue(maxsize=1) if self.parallel else None
        try:
            async with sock:
                data_as_str = ''
                while True:
                    rawdata = await sock.recv(self.buffer_size)
                    if not rawdata:
                        return
                    data_as_str += rawdata.decode('utf-8').strip()
                    try:
                        request = json.loads(data_as_str)
                    except ValueError:
                        continue
                    response = await self.memoized_handler(
                        request, response_queue=response_queue)
                    data_as_str = ''
                    await sock.sendall(response.encode('utf-8'))
        except CancelledError:
            await sock.close()

    async def memoized_handler(self, request, response_queue=None):
        if self.memoized:
            hashable_request = str(request).strip()
            if hashable_request not in self._saved_responses:
                if response_queue:
                    await self.requests.put((response_queue, request))
                    # next available 'worker' task will put the response on the queue.
                    self._saved_responses[
                        hashable_request] = await response_queue.get()
                    await response_queue.task_done()
                else:
                    self._saved_responses[
                        hashable_request] = await self.request_handler(request)
            return self._saved_responses[hashable_request]
        else:
            if response_queue:
                await self.requests.put((response_queue, request))
                response = await response_queue.get()
                await response_queue.task_done()
            else:
                response = await self.request_handler(request)
            return response

    async def worker(self, id, subprocess_timeout=5):
        '''
        There is exactly one 'worker' task per subprocess.
        This is the link between one client connection and one subprocess.
        It waits for a (response_queue, request) from the 'self.requests' queue,
        sends the request to the subprocess,
        waits to receive the response and puts it in the client's response queue.

        The subprocess is started on-demand, so as not to consume CPU and memory when the server is idle.
        '''
        subprocess_task = None
        subprocess_connection = None
        subprocess_launch_response = Queue(
            maxsize=1
        )  # receives the (connection, task) for a requested worker subprocess
        try:
            while True:
                if subprocess_connection:
                    try:
                        response_queue, request = await timeout_after(
                            subprocess_timeout, self.requests.get())
                    except TaskTimeout:
                        await subprocess_task.cancel()
                        await subprocess_connection.close()
                        subprocess_connection = None
                        subprocess_task = None
                        continue
                else:
                    response_queue, request = await self.requests.get()
                    await self.subprocess_launch_request.put(
                        (id, subprocess_launch_response))
                    subprocess_connection, subprocess_task = await subprocess_launch_response.get(
                    )
                await subprocess_connection.send(request)
                response = await subprocess_connection.recv()
                await response_queue.put(response)
                await self.requests.task_done()
        except CancelledError:
            if subprocess_task:
                await subprocess_task.cancel()
            if subprocess_connection:
                await subprocess_connection.close()

    async def subprocess_launcher(self):
        '''
        Launches a subprocess whenever a 'worker' task requests it.
        '''
        async with self.channel:
            while True:
                try:
                    worker_id, response = await self.subprocess_launch_request.get(
                    )
                    subprocess_task = await aside(worker_main, worker_id,
                                                  self.channel, self.authkey,
                                                  self.request_handler,
                                                  self.search_path)
                    subprocess_connection = await self.channel.accept(
                        authkey=self.authkey)
                    await response.put(
                        (subprocess_connection, subprocess_task))
                except CancelledError:
                    break

    async def run_server(self):
        try:
            if self.parallel:
                subprocess_launcher_task = await spawn(
                    self.subprocess_launcher())
                worker_tasks = []
                for id in range(self.cpus):
                    worker_tasks.append(await spawn(
                        self.worker(id, self.worker_subprocess_timeout)))
            async with curiosocket.socket(
                    curiosocket.AF_INET,
                    curiosocket.SOCK_STREAM) as listening_socket:
                listening_socket.setsockopt(curiosocket.SOL_SOCKET,
                                            curiosocket.SO_REUSEADDR, True)
                listening_socket.bind((self.address, self.port))
                listening_socket.listen(100)
                while True:
                    client_socket, remote_address = await listening_socket.accept(
                    )
                    await spawn(
                        self.run_graceful_client(client_socket,
                                                 remote_address))
        except CancelledError:
            if self.parallel:
                await wait(worker_tasks).cancel_remaining()
                await subprocess_launcher_task.cancel()
                if exists(self.unix_socket_path):
                    remove(self.unix_socket_path)

    async def run_graceful_client(self, sock, address):
        client_task = await spawn(self.run_client(sock, address))
        await SignalSet(SIGINT, SIGTERM).wait()
        await client_task.cancel()

    async def run_graceful_server(self):
        server_task = await spawn(self.run_server())
        await SignalSet(SIGINT, SIGTERM).wait()
        await server_task.cancel()

    def run(self):
        with catch_warnings():
            filterwarnings('ignore', category=DeprecationWarning)
            return run(self.run_graceful_server())
Beispiel #5
0
 async def declare_queue(self, channel_number, name):
     queue_declare = pika.spec.Queue.Declare(queue=name)
     frame_value = pika.frame.Method(channel_number, queue_declare)
     await self.sock.sendall(frame_value.marshal())
     await self.assert_recv_method(pika.spec.Queue.DeclareOk)
     return Queue(name=name)
Beispiel #6
0
async def _run_all(ble, system):
    """Curio run loop 
    """
    print('inside curio run loop')
    # Instantiate the Bluetooth LE handler/queue
    ble_q = BLEventQ(ble)
    # The web client out_going queue
    web_out_queue = Queue()
    # Instantiate socket listener
    #task_socket = await spawn(socket_server, web_out_queue, ('',25000))
    task_tcp = await spawn(bricknil_socket_server, web_out_queue, ('', 25000))
    await task_tcp.join()

    # Call the user's system routine to instantiate the processes
    await system()

    hub_tasks = []
    hub_peripheral_listen_tasks = []  # Need to cancel these at the end

    # Run the bluetooth listen queue
    task_ble_q = await spawn(ble_q.run())

    # Connect all the hubs first before enabling any of them
    for hub in Hub.hubs:
        hub.web_queue_out = web_out_queue
        task_connect = await spawn(ble_q.connect(hub))
        await task_connect.join()

    for hub in Hub.hubs:
        # Start the peripheral listening loop in each hub
        task_listen = await spawn(hub.peripheral_message_loop())
        hub_peripheral_listen_tasks.append(task_listen)

        # Need to wait here until all the ports are set
        # Use a faster timeout the first time (for speeding up testing)
        first_delay = True
        for name, peripheral in hub.peripherals.items():
            while peripheral.port is None:
                hub.message_info(
                    f"Waiting for peripheral {name} to attach to a port")
                if first_delay:
                    first_delay = False
                    await sleep(0.1)
                else:
                    await sleep(1)

        # Start each hub
        task_run = await spawn(hub.run())
        hub_tasks.append(task_run)

    # Now wait for the tasks to finish
    ble_q.message_info(f'Waiting for hubs to end')

    for task in hub_tasks:
        await task.join()
    ble_q.message_info(f'Hubs end')

    for task in hub_peripheral_listen_tasks:
        await task.cancel()
    await task_ble_q.cancel()

    # Print out the port information in debug mode
    for hub in Hub.hubs:
        if hub.query_port_info:
            hub.message_info(pprint.pformat(hub.port_info))
Beispiel #7
0
 def __init__(self, *args, **kwargs):
     self.queue = Queue()
     self.subscribers = set()
Beispiel #8
0
class Cmd:

    plugin_list = {}
    stage = ''
    pre_step = []
    post_step = []
    inputs_from = []
    N_THREADS = 1
    msg_queue = Queue()

    def __init__(self, config, skip=False):
        self.config = config
        self._find_executable()
        self.skip = skip
        self.queue = {}
        logging.debug(f'{self.name} skip={skip}')

    @staticmethod
    def register_plugin(new_plugin):
        # First do the registration of this flow step with the registry
        stage_plugins = Cmd.plugin_list.setdefault(new_plugin.stage, {})
        stage_plugins[new_plugin.name] = new_plugin
        logger.debug(f'registered {new_plugin} to plugin_list')

    def _find_executable(self):
        os_ = platform.system()

        if os_ == 'Linux':
            logging.debug('linux')
            #raise UnsupportedOSError
        elif os_ == 'Windows':
            logging.debug('Windows')
            raise UnsupportedOSError
        elif os_ == 'Darwin':
            logging.debug('Mac')
        else:
            raise UnsupportedOSError

        if 'program' in self.config:
            self.executable = self.config['program']
        else:
            #raise UnknownExecutableError(f'Could not find executable for {self.__class__.__name__}')
            raise UnknownExecutableError(
                f'Could not find executable for {self.__class__.name}')

    def _get_filename_base(self, path):
        """ Return the base name without the extension
        """
        basename, ext = os.path.splitext(os.path.basename(path))
        return basename

    def _get_filename_ext(self, path):
        """ Return the extension without anything else 
        """
        basename, ext = os.path.splitext(os.path.basename(path))
        return ext

    def _change_ext(self, path, new_ext):
        """Take the file name as is and return an abspath with the extension changed
        """
        assert not new_ext.startswith(
            '.'), 'New extension must not start with a period'
        directory = os.path.dirname(os.path.abspath(path))
        basename = self._get_filename_base(path)

        return os.path.abspath(os.path.join(directory,
                                            f'{basename}.{new_ext}'))

    def iterate_with_progress(self, items, total=None):
        """

        """
        if not total: total = len(items)
        if self.skip:
            desc = f'{self.name} [skipped]'
        else:
            desc = self.name
        with tqdm(total=total, desc=desc) as pbar:
            for item in items:
                yield item
                pbar.update(1)

    async def _run_command(self, cmd):
        logger.debug(cmd)
        output = await subprocess.check_output(cmd,
                                               shell=True,
                                               stderr=subprocess.STDOUT)
        return output

    async def _get_aws_signed_url(self, url):
        response = await asks_session.get(url, retries=3)
        response.raise_for_status()
        return response.json()

    def _aws_post_file(self, filename, signed_response):
        # This is the requests version just for posterity (run it in curio.run_in_thread)
        upload_url = signed_response['url']
        upload_data = signed_response['fields']
        upload_filename = upload_data['key']
        with open(filename, 'rb') as f:
            files = {'file': (upload_filename, f)}
            req = Request('POST', upload_url, data=upload_data, files=files)
            prepared_req = req.prepare()
            http_response = session.send(prepared_req)
            http_response.raise_for_status()

    async def _aws_asks_post_file(self, filename, signed_response):
        upload_url = signed_response['url']
        upload_data = signed_response['fields']
        upload_filename = upload_data['key']

        data = upload_data
        data['file'] = Path(filename)
        http_response = await asks_session.post(upload_url, multipart=data)
        http_response.raise_for_status()

    @retry(sleep=curio.sleep,
           wait=wait_random_exponential(multiplier=1, max=60),
           stop=stop_after_delay(10))
    async def _aws_upload_file(self, url, filename):
        # Get signed URL for AWS S3
        # POst to signed URL
        #url_response = await curio.run_in_thread(self.get_aws_signed_url, url)
        url_response = await self._get_aws_signed_url(url)

        # Not sure why i can't use asks here to post the files
        #response = await curio.run_in_thread(self._aws_post_file, filename, url_response)
        response = await self._aws_asks_post_file(filename, url_response)
        return url_response

    @retry(sleep=curio.sleep,
           wait=wait_random_exponential(multiplier=1, max=60),
           stop=stop_after_delay(10))
    async def _aws_run_command(self, url, input_files, cmd, output_filenames):
        """Post to aws to run the specified commands with input files, and return
           the output files
        """
        json = {
            'input_files': input_files,
            'cmd': cmd,
            'output_files': [os.path.basename(fn) for fn in output_filenames],
        }
        response = await asks_session.post(url, json=json)
        response.raise_for_status()
        return response.json()

    async def run_command_aws(self, cmd, input_filenames, output_filenames,
                              url):
        logger.debug(f'Running in aws: {cmd}')
        endpoint_run_ocr = f'{url}/ocr'
        endpoint_get_signed_url = f'{url}/geturl'

        # First, upload the input_files
        input_files = []
        for input_filename in input_filenames:
            logger.debug(f'Uploading file {input_filename} to S3')
            url_response = await self._aws_upload_file(endpoint_get_signed_url,
                                                       input_filename)
            input_files.append((url_response['fields']['key'],
                                os.path.basename(input_filename)))

        logging.info(f'Posting to aws {cmd}')
        response_dict = await self._aws_run_command(endpoint_run_ocr,
                                                    input_files, cmd,
                                                    output_filenames)
        logging.debug(response_dict['message'])

        #print (list(response_dict['output_files'].keys()))
        for output_filename in output_filenames:
            contents = response_dict['output_files'][os.path.basename(
                output_filename)]
            b64 = base64.b64decode(contents)
            async with aopen(str(output_filename), 'wb') as f:
                await f.write(b64)

    async def add_to_queue(self, output_filename, task_func, *task_args):
        self.queue[output_filename] = (task_func, *task_args)

    async def add_message(self, msg):
        #msg = f'{self.name}: {msg}'
        msg = (self.name, msg)
        await Cmd.msg_queue.put(msg)

    async def get_messages(self):
        msgs = []
        while not Cmd.msg_queue.empty():
            msg = await Cmd.msg_queue.get()
            msgs.append(msg)
            await Cmd.msg_queue.task_done()
        return msgs

    async def spawn_with_update(self, pbar, task):
        await task[0](*task[1:])
        pbar.update(1)

    async def run_queue(self):
        t_list = []  # All currently executing tasks
        output_filenames = []
        n = len(self.queue)
        if self.skip:
            desc = f'{self.name} [skipped]'
        else:
            desc = self.name

        with tqdm(total=n, desc=desc) as pbar:
            for output_filename, task in self.queue.items():
                output_filenames.append(output_filename)
                if not self.skip:
                    t = await spawn(self.spawn_with_update, pbar, task)
                    t_list.append(t)
                    if len(t_list) == self.N_THREADS:
                        for t in t_list:
                            await t.join()
                        t_list = []
            if not self.skip:
                # Flush out any remaining jobs in the queue (when N_THREADS is not an int factor of the task count)
                for t in t_list:
                    await t.join()
        self.queue = {}  # Empty all the jobs
        return ItemList([os.path.abspath(p) for p in output_filenames])

    def error(self, msg):
        print(f'ERROR: {msg}')
        sys.exit(-1)

    async def write_yaml_to_file(self, filename, python_dict):
        with open(filename, 'w') as f:
            yaml.dump(python_dict, f)

    async def read_yaml_from_file(self, filename):
        with open(filename) as f:
            d = yaml.load(f)
        return d