示例#1
0
    def __init__(self, max_size=1000, loop=asyncio.get_event_loop()):
        """
        Constructor

        :param max_size: Maximum size of the queue
        :type max_size: int()
        """
        self.__is_closed = False
        self.__jq = Queue(maxsize=max_size)
        self.__queue = self.__jq.async_q
示例#2
0
    def __init__(self, messages: EnumMeta):
        if not isinstance(messages, EnumMeta):
            raise TypeError("Messages have to be derived from an Enum type")

        self.messages_type = type(messages)
        self.messages = messages

        self.handlers = {}
        for msg in list(messages):
            self.handlers[msg] = []

        self.queue = Queue()
        self.results = Queue()
        self.join_task = None
示例#3
0
class BlockingQueue:
    """
    Blocking queue using locks. 
    """
    def __init__(self, max_size=1000, loop=asyncio.get_event_loop()):
        """
        Constructor

        :param max_size: Maximum size of the queue
        :type max_size: int()
        """
        self.__is_closed = False
        self.__jq = Queue(maxsize=max_size)
        self.__queue = self.__jq.async_q

    def close(self):
        self.__is_closed = True
        self.__jq.close()

    async def put(self, item, block=False):
        """
        Put into the queue

        :param item: The item to put into the queue
        :type item: object
        :param block: Whether to block on the queue
        :type block: bool()
        """
        if self.__is_closed is False:
            if self.__queue.full() is False or block:
                await self.__queue.put(item)
            else:
                logging.warn("Queue Full")

    async def get(self, block=True):
        """
        Get from the queue synchronously

        :param block: Whether to block on the queue
        :type block: bool()
        :return: An item from the queue
        :type: object
        """
        item = None
        if self.__is_closed is False:
            if block or self.__queue.empty() is False:
                item = await self.__queue.get()
            else:
                item = None
        return item
示例#4
0
    def __init__(self):
        # Set up the terminal
        self.root_window = initscr()
        start_color()
        noecho()
        cbreak()
        self._init_color_pairs()
        locale.setlocale(locale.LC_ALL, '')
        self.root_window.keypad(True)
        try:
            start_color()
        except:
            pass

        # Curses' objects of course aren't thread-safe,
        # so we'll need a lock for every operation while
        # other threads are running.
        self.lock = Lock()
        # Those objects allow communication between threads
        # and coroutines, as well as between coroutines.
        self.input = Queue()
        self.output = Queue()

        self.threads = list()

        # This might cause the sound system to produce logs we
        # can't control; the solution is to let them be and then
        # draw on top of them later.
        self.sink = AudioSink()

        max_y, max_x = self.root_window.getmaxyx()
        self.max = (max_y, max_x)

        # A box to input things.
        # TODO: parametrize the hard-coded values.
        self.input_window = newwin(1, max_x - 2, max_y - 3, 1)
        rectangle(self.root_window, max_y - 4, 0, max_y - 2, max_x - 1)
        self.textbox = Textbox(self.input_window)

        # A box where to draw received messages.
        self.output_window = newwin(max_y - 5, max_x - 2, 0, 1)
        self.output_window.scrollok(True)

        # Draw what we just created.
        self.root_window.refresh()

        # Launch threads which update the interface and get the user's input
        self.get_input()
        self.add_messages()
示例#5
0
async def init_queue(app: web.Application) -> None:
    """
    Janus Queues require the asyncio event loop to be running so this creations needs to be done in a start-up handler
    rather than the synchronous `server` function.

    :param app: Reference to the overall application.
    """
    app["updates_queue"] = Queue()
示例#6
0
 def __init__(self, bots):
     """
     :param bots: [*[login, youtube_key, server_id, voice_channel_id, text_channel_id]]
     """
     self.database = bot_database.BotDatabase()
     self.new_requests = []
     self.bots = []
     for bot_, number in zip(bots, range(1, len(bots) + 1)):
         loop = bot.asyncio.new_event_loop()
         commands_queue = Queue(loop=loop)
         output_queue = Queue(loop=loop)
         Thread(target=bot.bot,
                args=(loop, bot_[0], number, commands_queue.async_q,
                      output_queue.async_q, bot_[1], bot_[2], bot_[3],
                      bot_[4])).start()
         self.bots.append([
             commands_queue.sync_q, output_queue.sync_q, None,
             output_queue.sync_q.get(), number
         ])
示例#7
0
    def __init__(self, name, event_base_class=None):
        """Contructor of KytosEventBuffer receive the parameters below.

        Args:
            name (string): name of KytosEventBuffer.
            event_base_class (class): Class of KytosEvent.
        """
        self.name = name
        self._queue = Queue()
        self._event_base_class = event_base_class
        self._reject_new_events = False
示例#8
0
async def test_concurrency():
    # Database init from scratch

    iterations = 10

    # Feed and write
    async def produce_data(q):
        nonlocal iterations
        print('starting producing')
        for raw_data in range(iterations):
            await q.put(raw_data)
        print('finishing feeding data')
        return None

    def consume_data(q, i):
        print(f"{i} init")
        try:
            for datapoint in iter(q.get, None):
                print(f"{i} cycle")
                # Do some work
                sleep(1)
                q.task_done()
        except Exception as e:
            print('error is:')
            print(e)
            print('consumer exiting on error')
            raise
        print(f"{i} producer exit")

    loop = asyncio.get_running_loop()
    q = Queue(50000)
    producer = asyncio.create_task(produce_data(q.async_q))
    executor = concurrent.futures.ThreadPoolExecutor()
    # consumers = [loop.run_in_executor(executor, consume_data, q.sync_q, x) for x in range(20)]
    consumers = [
        loop.run_in_executor(executor, consume_data, q.sync_q, x)
        for x in range(20)
    ]

    await asyncio.wait({producer})
    print('---- done producing')
    for _ in consumers:
        await q.async_q.put(None)
    await asyncio.wait({*consumers})
    for c in consumers:
        print('canceling')
        c.cancel()

    print('---- done consuming')
示例#9
0
async def do_work():
    q = Queue()
    with ProcessPoolExecutor(max_workers=4) as pool_executor:
        tasks = []
        for i in range(4):
            loop = asyncio.get_event_loop()
            print(f"Starting worker #{i}")
            tasks.append(loop.run_in_executor(pool_executor, do_proc_work_janus, q, i))
        print(f"Awaiting loop")
        done, pending = await asyncio.wait(
            {
                asyncio.create_task(sentinel_emitter(q)),
            }
        )
        print("Got from worker")
        print(done, pending)
        await asyncio.wait({*tasks})
示例#10
0
async def server(*, config: Config) -> t.AsyncIterator[Queue[Message]]:
    """Manage the LMTP server's lifetime.

    Args:
        config: application configuration.

    Yields:
        Queue of incoming mail.
    """
    msg_queue: Queue[Message] = Queue()
    controller = Controller(
        handler=LmtpHandler(config=config, queue=msg_queue),
        hostname=config.lmtp_host,
        port=config.lmtp_port,
    )
    controller.start()
    logger.info(f"listening on {config.lmtp_host}:{config.lmtp_port}")
    yield msg_queue
    controller.stop()
示例#11
0
 def __init__(self, in_port: mido.ports.BaseInput):
     self._in_port = in_port
     self._in_port.callback = self._new_message
     self._queue: Queue = Queue(256)  # We init this in launch command
示例#12
0
 def __init__(self):
     self.q = Queue()
示例#13
0
async def create_queue() -> Queue:
    return Queue()
示例#14
0
    def __init__(self, config: Config):
        self._config = config
        self._loop = asyncio.get_event_loop()
        self._shutdown = False
        self._get_task = None
        self._auto_reset_handles = {}
        self._state = None
        self._ha_state = None

        # Create LifeSOS base unit instance and attach callbacks
        self._baseunit = BaseUnit(self._config.lifesos.host,
                                  self._config.lifesos.port)
        if self._config.lifesos.password:
            self._baseunit.password = self._config.lifesos.password
        self._baseunit.on_device_added = self._baseunit_device_added
        self._baseunit.on_device_deleted = self._baseunit_device_deleted
        self._baseunit.on_event = self._baseunit_event
        self._baseunit.on_properties_changed = self._baseunit_properties_changed
        self._baseunit.on_switch_state_changed = self._baseunit_switch_state_changed

        # Create MQTT client instance
        self._mqtt = MQTTClient(client_id=self._config.mqtt.client_id,
                                clean_session=False)
        self._mqtt.enable_logger()
        self._mqtt.will_set(
            '{}/{}'.format(self._config.translator.baseunit.topic,
                           BaseUnit.PROP_IS_CONNECTED),
            str(False).encode(), QOS_1, True)
        self._mqtt.reconnect_delay_set(Translator.RECONNECT_MIN_DELAY,
                                       Translator.RECONNECT_MAX_DELAY)
        if self._config.mqtt.uri.username:
            self._mqtt.username_pw_set(self._config.mqtt.uri.username,
                                       self._config.mqtt.uri.password)
        if self._config.mqtt.uri.scheme == SCHEME_MQTTS:
            self._mqtt.tls_set()
        self._mqtt.on_connect = self._mqtt_on_connect
        self._mqtt.on_disconnect = self._mqtt_on_disconnect
        self._mqtt.on_message = self._mqtt_on_message
        self._mqtt_was_connected = False
        self._mqtt_last_connection = None
        self._mqtt_last_disconnection = None

        # Generate a list of topics we'll need to subscribe to
        self._subscribetopics = []
        self._subscribetopics.append(
            SubscribeTopic(
                '{}/{}'.format(self._config.translator.baseunit.topic,
                               Translator.TOPIC_CLEAR_STATUS),
                self._on_message_clear_status))
        self._subscribetopics.append(
            SubscribeTopic(
                '{}/{}/{}'.format(self._config.translator.baseunit.topic,
                                  Translator.TOPIC_DATETIME,
                                  Translator.TOPIC_SET),
                self._on_message_set_datetime))
        names = [BaseUnit.PROP_OPERATION_MODE]
        for name in names:
            self._subscribetopics.append(
                SubscribeTopic('{}/{}/{}'.format(
                    self._config.translator.baseunit.topic, name,
                    Translator.TOPIC_SET),
                               self._on_message_baseunit,
                               args=name))
        for switch_number in self._config.translator.switches.keys():
            switch_config = self._config.translator.switches.get(switch_number)
            if switch_config and switch_config.topic:
                self._subscribetopics.append(
                    SubscribeTopic('{}/{}'.format(switch_config.topic,
                                                  Translator.TOPIC_SET),
                                   self._on_message_switch,
                                   args=switch_number))
        if self._config.translator.ha_birth_topic:
            self._subscribetopics.append(
                SubscribeTopic(self._config.translator.ha_birth_topic,
                               self._on_ha_message))

        # Also create a lookup dict for the topics to subscribe to
        self._subscribetopics_lookup = \
            {st.topic: st for st in self._subscribetopics}

        # Create queue to store pending messages from our subscribed topics
        self._pending_messages = Queue()
示例#15
0
async def test_asyncpg():
    await init_db("test")

    iterations = 1000

    async def feed_data(q):
        data_source = generate_data(datetime.datetime(2020, 1, 1))
        nonlocal iterations
        counter = iterations
        print("starting feeding data")
        for raw_data in data_source:
            counter -= 1
            if counter < 0:
                print("finishing feeding data")
                break
            await q.put(raw_data)

    async def write_data(q):
        conn = await init_connection("test")
        try:
            while True:
                datapoint = await q.get()
                datapoint["timestamp"] = datetime.datetime.fromtimestamp(
                    datapoint["timestamp"]).replace(tzinfo=pytz.UTC)
                vals = [(key, f"${i + 1}", value)
                        for i, (key, value) in enumerate(datapoint.items())]
                keys, formats, values = zip(*vals)
                keys = ", ".join(keys)
                formats = ", ".join(formats)
                try:
                    await conn.execute(
                        f"""
                        INSERT INTO ticks({keys}) VALUES({formats})
                        ON CONFLICT (timestamp, session_id, data_type, label, funds) DO UPDATE
                        SET price=EXCLUDED.price,
                            volume=EXCLUDED.volume;
                    """,
                        *values,
                    )
                except asyncpg.exceptions.UniqueViolationError:
                    pass
                q.task_done()
        except asyncio.CancelledError:
            await conn.close()
            raise

    start = timer()
    ticks_q = Queue(50000)
    producer = asyncio.create_task(feed_data(ticks_q.async_q))
    consumers = [
        asyncio.create_task(write_data(ticks_q.async_q)) for _ in range(90)
    ]

    await producer
    print("---- done producing")

    await ticks_q.async_q.join()
    for c in consumers:
        c.cancel()

    print("---- done consuming")
    end1 = timer()
    elapsed_clean = round(end1 - start, 4)

    ticks_q = Queue(50000)
    producer = asyncio.create_task(feed_data(ticks_q.async_q))
    consumers = [
        asyncio.create_task(write_data(ticks_q.async_q)) for _ in range(90)
    ]
    await producer
    print("---- done producing")

    await ticks_q.async_q.join()
    for c in consumers:
        c.cancel()

    end2 = timer()
    elapsed_upsert = round(end2 - end1, 4)

    query = """
        SELECT
            time_bucket('{minutes} minutes', timestamp) AS time,
            first(price, timestamp) as open,
            max(price) as high,
            min(price) as low,
            last(price, timestamp) as close,
            sum(volume) as volume
        FROM ticks
        WHERE session_id = 123
        GROUP BY time
        ORDER BY time ASC;
    """

    conn = await init_connection("test")
    await conn.fetch(query.format(minutes=1))
    end3 = timer()
    elapsed_1min = round(end3 - end2, 4)
    await conn.fetch(query.format(minutes=5))
    end4 = timer()
    elapsed_5min = round(end4 - end3, 4)
    rows30 = await conn.fetch(query.format(minutes=30))
    end5 = timer()
    elapsed_30min = round(end5 - end4, 4)

    for row in rows30:
        print(
            f"{row['time']}\tO{row['open']}\tH{row['high']}\tL{row['low']}\tC{row['close']}\tV{row['volume']}"
        )
    await conn.close()

    print(
        f"{format_int(iterations)} datapoints, write from scratch: {elapsed_clean}(s) elapsed."
    )
    print(
        f"{format_int(iterations)} datapoints, on-conflict upsert: {elapsed_upsert}(s) elapsed."
    )
    print(f"1min OHLCV aggregation: {elapsed_1min}(s) elapsed.")
    print(f"5min OHLCV aggregation: {elapsed_5min}(s) elapsed.")
    print(f"30min OHLCV aggregation: {elapsed_30min}(s) elapsed.")
示例#16
0
async def create_queue() -> Queue:
    q = Queue()
    return q