def test_run_processes_due_to_interval(): global foo_event_id, bar_event_id connection = data_access.connect(DSN) with data_access.cursor(connection) as cursor: foo_event_id = data_access.create_event(cursor, Event(topic=FOO_TOPIC)).id bar_event_id = data_access.create_event(cursor, Event(topic=BAR_TOPIC)).id app = App(DSN, CHANNEL, interval=1) @app.register(FOO_TOPIC) def handler(context): pass now = time.time() def continue_for_two_seconds(app): return time.time() < now + 2 app.run(should_continue=continue_for_two_seconds) with data_access.cursor(app.connection) as cursor: assert (data_access.get_event_by_id( cursor, foo_event_id).status == constants.PROCESSED) assert (data_access.get_event_by_id( cursor, bar_event_id).status == constants.PENDING)
def test_mark_processed(data_access): event = Event(id=sentinel.id, topic=sentinel.topic, payload=sentinel.payload) event.mark_processed(sentinel.cursor) data_access.mark_event_processed.assert_called_once_with( sentinel.cursor, sentinel.id )
def test_get_next_event( first_process_after, second_process_after, expected_first_status, expected_second_status, ): connection = data_access.connect(DSN) topic = "foo" with data_access.cursor(connection) as cursor: first = data_access.create_event( cursor, Event(topic=topic, process_after=first_process_after) ) second = data_access.create_event( cursor, Event(topic=topic, process_after=second_process_after) ) time.sleep(0.1) def slow_running(): local_connection = data_access.connect(DSN) with data_access.cursor(local_connection) as cursor: event = data_access.get_next_event(cursor, [topic]) time.sleep(0.5) if event: data_access.mark_event_processed(cursor, event.id) def fast_running(): local_connection = data_access.connect(DSN) with data_access.cursor(local_connection) as cursor: event = data_access.get_next_event(cursor, [topic]) if event: data_access.mark_event_processed(cursor, event.id) slow_thread = Thread(target=slow_running) slow_thread.start() time.sleep(0.1) fast_thread = Thread(target=fast_running) fast_thread.start() slow_thread.join() fast_thread.join() with data_access.cursor(connection) as cursor: retrieved_first = data_access.get_event_by_id(cursor, first.id) assert retrieved_first.status == expected_first_status with data_access.cursor(connection) as cursor: retrieved_second = data_access.get_event_by_id(cursor, second.id) assert retrieved_second.status == expected_second_status
def send_notification(): global foo_event_id, bar_event_id time.sleep(1) connection = data_access.connect(DSN) with data_access.cursor(connection) as cursor: foo_event_id = data_access.create_event(cursor, Event(topic=FOO_TOPIC)).id bar_event_id = data_access.create_event(cursor, Event(topic=BAR_TOPIC)).id data_access.notify(cursor, CHANNEL)
def create_event(path, topic, string_payload): payload = json.loads(string_payload) app = app_loader.load(path) event = Event(topic=topic, payload=payload) connection = data_access.connect(app.dsn) with data_access.cursor(connection) as cursor: return data_access.create_event(cursor, event)
def get_event_by_id(cursor, event_id): cursor.execute( """ SELECT * FROM events WHERE id=%s """, [event_id], ) return Event.from_dict(cursor.fetchone())
def test_create_and_get_event_with_process_after(): connection = data_access.connect(DSN) event = Event(topic="foo", process_after=timestamps.now() + timedelta(seconds=10)) with data_access.cursor(connection) as cursor: created = data_access.create_event(cursor, event) with data_access.cursor(connection) as cursor: retrieved = data_access.get_event_by_id(cursor, created.id) assert retrieved.process_after == event.process_after
def test_create_and_get_event_with_payload(payload): connection = data_access.connect(DSN) event = Event(topic="foo", payload=payload) with data_access.cursor(connection) as cursor: created = data_access.create_event(cursor, event) with data_access.cursor(connection) as cursor: retrieved = data_access.get_event_by_id(cursor, created.id) assert retrieved.payload == payload
def test_create_and_get_event_without_payload(): connection = data_access.connect(DSN) event = Event(topic="foo") with data_access.cursor(connection) as cursor: created = data_access.create_event(cursor, event) with data_access.cursor(connection) as cursor: retrieved = data_access.get_event_by_id(cursor, created.id) assert created == retrieved assert created.status == constants.PENDING
def test_create_event(app_loader, app, data_access, cli_payload, payload): event = Event(topic=sentinel.topic, payload=payload) connection = data_access.connect.return_value cursor = data_access.cursor.return_value.__enter__.return_value cli.create_event(sentinel.path, sentinel.topic, cli_payload) app_loader.load.assert_called_once_with(sentinel.path) data_access.connect.assert_called_once_with(app.dsn) data_access.cursor.assert_called_once_with(connection) data_access.create_event.assert_called_once_with(cursor, event)
def test_create_event(connection): with data_access.cursor(connection) as cursor: event = Mock() new_event = Event(topic="hello") context = Context(event, cursor) created = context.create_event(new_event) with data_access.cursor(connection) as cursor: retrieved = data_access.get_event_by_id(cursor, created.id) assert created == retrieved
def test_init_db(connection): with data_access.cursor(connection) as cursor: data_access.drop_table(cursor, "pgmigrations") data_access.drop_table(cursor, "events") data_access.drop_type(cursor, "event_status") cli.init_db("tests.integration.test_cli") event = Event(topic="hello") with data_access.cursor(connection) as cursor: data_access.create_event(cursor, event)
def test_mark_event_processed(): connection = data_access.connect(DSN) event = Event(topic="foo") with data_access.cursor(connection) as cursor: created = data_access.create_event(cursor, event) assert created.status == constants.PENDING assert created.processed_at == None with data_access.cursor(connection) as cursor: data_access.mark_event_processed(cursor, created.id) with data_access.cursor(connection) as cursor: retrieved = data_access.get_event_by_id(cursor, created.id) assert retrieved.status == constants.PROCESSED assert retrieved.processed_at is not None
def get_next_event(cursor, topics): LOGGER.debug("Getting next event for topics: %s", topics) query = sql.SQL( """ SELECT * FROM events WHERE status='PENDING' AND process_after < now() AND topic in ({}) ORDER BY id FOR UPDATE SKIP LOCKED LIMIT 1 """ ).format(sql.SQL(", ").join(sql.Literal(topic) for topic in topics)) cursor.execute(query) data = cursor.fetchone() event = Event.from_dict(data) if data else None LOGGER.debug("Next event: %s", event) return event
def create_event(cursor, event): LOGGER.debug("Creating event: %s", event) # jsonb columns cannot handle certain data types without casting # so make sure payload is always a JSON object by nesting it payload = dict(payload=event.payload) if event.process_after: cursor.execute( """ INSERT INTO events (topic, payload, process_after) VALUES (%s, %s, %s) RETURNING * """, [event.topic, payload, event.process_after], ) else: cursor.execute( """ INSERT INTO events (topic, payload) VALUES (%s, %s) RETURNING * """, [event.topic, payload], ) return Event.from_dict(cursor.fetchone())
def event(connection): with data_access.cursor(connection) as cursor: return data_access.create_event(cursor, Event(topic="test"))
def test_repr_field(payload, expected_string_value): event = Event(id=sentinel.id, topic=sentinel.topic, payload=payload) field = Mock() field.name = "payload" assert event.repr_field(field) == f"payload={expected_string_value}"
def test_instantiate_just_topic(): event = Event(topic=sentinel.topic) assert event.topic == sentinel.topic