def test_run_processes_due_to_interval(): global foo_event_id, bar_event_id connection = data_access.connect(DSN) with data_access.cursor(connection) as cursor: foo_event_id = data_access.create_event(cursor, Event(topic=FOO_TOPIC)).id bar_event_id = data_access.create_event(cursor, Event(topic=BAR_TOPIC)).id app = App(DSN, CHANNEL, interval=1) @app.register(FOO_TOPIC) def handler(context): pass now = time.time() def continue_for_two_seconds(app): return time.time() < now + 2 app.run(should_continue=continue_for_two_seconds) with data_access.cursor(app.connection) as cursor: assert (data_access.get_event_by_id( cursor, foo_event_id).status == constants.PROCESSED) assert (data_access.get_event_by_id( cursor, bar_event_id).status == constants.PENDING)
def test_process_next_when_next(empty_queue): """ Ensure that events get processed in parallel. """ stream0 = create_event_stream() stream1 = create_event_stream() event0 = create_event(stream0.connection) event1 = create_event(stream0.connection) start_time = time.time() executor = ThreadPoolExecutor(max_workers=2) result0 = executor.submit(process_next_assert_true, stream0) result1 = executor.submit(process_next_assert_true, stream1) result0.result() result1.result() total_time = time.time() - start_time assert total_time < 2 * HANDLER_SLEEP_TIME assert QUEUE.qsize() == 2 with data_access.cursor(stream0.connection) as cursor: assert (data_access.get_event_by_id( cursor, event0.id).status == constants.PROCESSED) assert (data_access.get_event_by_id( cursor, event1.id).status == constants.PROCESSED)
def test_get_next_event( first_process_after, second_process_after, expected_first_status, expected_second_status, ): connection = data_access.connect(DSN) topic = "foo" with data_access.cursor(connection) as cursor: first = data_access.create_event( cursor, Event(topic=topic, process_after=first_process_after) ) second = data_access.create_event( cursor, Event(topic=topic, process_after=second_process_after) ) time.sleep(0.1) def slow_running(): local_connection = data_access.connect(DSN) with data_access.cursor(local_connection) as cursor: event = data_access.get_next_event(cursor, [topic]) time.sleep(0.5) if event: data_access.mark_event_processed(cursor, event.id) def fast_running(): local_connection = data_access.connect(DSN) with data_access.cursor(local_connection) as cursor: event = data_access.get_next_event(cursor, [topic]) if event: data_access.mark_event_processed(cursor, event.id) slow_thread = Thread(target=slow_running) slow_thread.start() time.sleep(0.1) fast_thread = Thread(target=fast_running) fast_thread.start() slow_thread.join() fast_thread.join() with data_access.cursor(connection) as cursor: retrieved_first = data_access.get_event_by_id(cursor, first.id) assert retrieved_first.status == expected_first_status with data_access.cursor(connection) as cursor: retrieved_second = data_access.get_event_by_id(cursor, second.id) assert retrieved_second.status == expected_second_status
def test_mark_processed(connection, event): with data_access.cursor(connection) as cursor: event.mark_processed(cursor) with data_access.cursor(connection) as cursor: retrieved = data_access.get_event_by_id(cursor, event.id) assert retrieved.status == constants.PROCESSED
def test_create_event(connection): topic = "hello" payload = json.dumps(dict(hello="world")) created = cli.create_event("tests.integration.test_cli", topic, payload) with data_access.cursor(connection) as cursor: retrieved = data_access.get_event_by_id(cursor, created.id) assert retrieved.topic == topic assert retrieved.payload == json.loads(payload)
def test_create_and_get_event_with_process_after(): connection = data_access.connect(DSN) event = Event(topic="foo", process_after=timestamps.now() + timedelta(seconds=10)) with data_access.cursor(connection) as cursor: created = data_access.create_event(cursor, event) with data_access.cursor(connection) as cursor: retrieved = data_access.get_event_by_id(cursor, created.id) assert retrieved.process_after == event.process_after
def test_create_and_get_event_with_payload(payload): connection = data_access.connect(DSN) event = Event(topic="foo", payload=payload) with data_access.cursor(connection) as cursor: created = data_access.create_event(cursor, event) with data_access.cursor(connection) as cursor: retrieved = data_access.get_event_by_id(cursor, created.id) assert retrieved.payload == payload
def test_create_event(connection): with data_access.cursor(connection) as cursor: event = Mock() new_event = Event(topic="hello") context = Context(event, cursor) created = context.create_event(new_event) with data_access.cursor(connection) as cursor: retrieved = data_access.get_event_by_id(cursor, created.id) assert created == retrieved
def test_create_and_get_event_without_payload(): connection = data_access.connect(DSN) event = Event(topic="foo") with data_access.cursor(connection) as cursor: created = data_access.create_event(cursor, event) with data_access.cursor(connection) as cursor: retrieved = data_access.get_event_by_id(cursor, created.id) assert created == retrieved assert created.status == constants.PENDING
def test_run_processes_due_to_notification(): app = App(DSN, CHANNEL, interval=5) @app.register(FOO_TOPIC) def handler(context): pass thread = Thread(target=send_notification) thread.start() now = time.time() def continue_for_two_seconds(app): return time.time() < now + 2 app.run(should_continue=continue_for_two_seconds) with data_access.cursor(app.connection) as cursor: assert (data_access.get_event_by_id( cursor, foo_event_id).status == constants.PROCESSED) assert (data_access.get_event_by_id( cursor, bar_event_id).status == constants.PENDING)
def test_mark_event_processed(): connection = data_access.connect(DSN) event = Event(topic="foo") with data_access.cursor(connection) as cursor: created = data_access.create_event(cursor, event) assert created.status == constants.PENDING assert created.processed_at == None with data_access.cursor(connection) as cursor: data_access.mark_event_processed(cursor, created.id) with data_access.cursor(connection) as cursor: retrieved = data_access.get_event_by_id(cursor, created.id) assert retrieved.status == constants.PROCESSED assert retrieved.processed_at is not None
def test_process_next_error(empty_queue): """ Ensure that if an error occurs when processing an event that it will be picked up again on the next iteration. """ stream0 = create_event_stream(handler_error=True) stream1 = create_event_stream() event0 = create_event(stream0.connection) with pytest.raises(Exception): stream0.process_next() with data_access.cursor(stream0.connection) as cursor: assert (data_access.get_event_by_id( cursor, event0.id).status == constants.PENDING) assert stream1.process_next() with data_access.cursor(stream0.connection) as cursor: assert (data_access.get_event_by_id( cursor, event0.id).status == constants.PROCESSED) assert QUEUE.qsize() == 1
def test_process_next_when_not_next(empty_queue): """ Ensure that the same event does not get processed twice. """ stream0 = create_event_stream() stream1 = create_event_stream() event0 = create_event(stream0.connection) executor = ThreadPoolExecutor(max_workers=2) result0 = executor.submit(process_next_assert_true, stream0) time.sleep(1) result1 = executor.submit(process_next_asset_false, stream1) result0.result() result1.result() assert QUEUE.qsize() == 1 with data_access.cursor(stream0.connection) as cursor: assert (data_access.get_event_by_id( cursor, event0.id).status == constants.PROCESSED)