def test_worker(cluster): dsn = create_temporary_database() create_set(cluster, 'example', create_set_configuration(dsn)) configure_tick_frequency(dsn) queue = Queue() worker = Worker(cluster, dsn, 'example', 'consumer', QueueHandler(queue)) worker.start() with closing(psycopg2.connect( dsn)) as connection, connection.cursor() as cursor: cursor.execute('INSERT INTO auth_user (username) VALUES (%s)', ('example', )) connection.commit() force_tick(connection, cluster.get_queue_name('example')) events = get_events(queue, 3) assert_same_batch(events) (mutation, ) = unwrap_transaction(events) assert mutation.table == 'auth_user' assert mutation.schema == 'public' assert mutation.operation == MutationOperation.INSERT assert not mutation.HasField('old') assert sorted(mutation.new.columns, key=lambda c: c.name) == [ Column(name='id', integer64=1), Column(name='username', string='example'), ] # also make sure tables without column whitelist defined replicate the entire row state with closing(psycopg2.connect( dsn)) as connection, connection.cursor() as cursor: cursor.execute( 'INSERT INTO accounts_userprofile (user_id, display_name) VALUES (%s, %s)', ( 1, 'example', )) connection.commit() force_tick(connection, cluster.get_queue_name('example')) events = get_events(queue, 3) assert_same_batch(events) (mutation, ) = unwrap_transaction(events) assert mutation.table == 'accounts_userprofile' assert mutation.schema == 'public' assert mutation.operation == MutationOperation.INSERT assert not mutation.HasField('old') assert sorted(mutation.new.columns, key=lambda c: c.name) == [ Column(name='display_name', string='example'), Column(name='id', integer64=1), Column(name='user_id', integer64=1), ] worker.stop_async() worker.result(1)
def test_relay(cluster): primary_dsn = create_temporary_database() secondary_dsn = create_temporary_database() create_set(cluster, 'example', create_set_configuration(primary_dsn)) configure_tick_frequency(primary_dsn) queue = Queue() relay = Relay(cluster, 'example', 'consumer', QueueHandler(queue), throttle=0.1) relay.start() with closing(psycopg2.connect(primary_dsn)) as connection, connection.cursor() as cursor: cursor.execute('INSERT INTO auth_user (username) VALUES (%s)', ('example',)) connection.commit() force_tick(connection, cluster.get_queue_name('example')) events = get_events(queue, 3) assert_same_batch(events) (mutation,) = unwrap_transaction(events) assert mutation.table == 'auth_user' assert mutation.schema == 'public' assert mutation.operation == MutationOperation.INSERT assert not mutation.HasField('old') assert sorted(mutation.new.columns, key=lambda c: c.name) == [ Column(name='id', integer64=1), Column(name='username', string='example'), ] # ensure the connection recovers after being killed with closing(psycopg2.connect(primary_dsn)) as connection, connection.cursor() as cursor: connection.autocommit = True cursor.execute('SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE pid != pg_backend_pid()') with closing(psycopg2.connect(primary_dsn)) as connection, connection.cursor() as cursor: cursor.execute('INSERT INTO auth_user (username) VALUES (%s)', ('example',)) connection.commit() force_tick(connection, cluster.get_queue_name('example')) events = get_events(queue, 3) assert_same_batch(events) (mutation,) = unwrap_transaction(events) assert mutation.table == 'auth_user' assert mutation.schema == 'public' assert mutation.operation == MutationOperation.INSERT assert not mutation.HasField('old') assert sorted(mutation.new.columns, key=lambda c: c.name) == [ Column(name='id', integer64=2), Column(name='username', string='example'), ] relay.stop_async() relay.result(1) """
def test_worker(cluster): dsn = create_temporary_database() create_set(cluster, 'example', create_set_configuration(dsn)) configure_tick_frequency(dsn) queue = Queue() worker = Worker(cluster, dsn, 'example', 'consumer', QueueHandler(queue)) worker.start() with closing(psycopg2.connect(dsn)) as connection, connection.cursor() as cursor: cursor.execute('INSERT INTO auth_user (username) VALUES (%s)', ('example',)) connection.commit() force_tick(connection, cluster.get_queue_name('example')) events = get_events(queue, 3) assert_same_batch(events) (mutation,) = unwrap_transaction(events) assert mutation.table == 'auth_user' assert mutation.schema == 'public' assert mutation.operation == MutationOperation.INSERT assert not mutation.HasField('old') assert sorted(mutation.new.columns, key=lambda c: c.name) == [ Column(name='id', integer64=1), Column(name='username', string='example'), ] # also make sure tables without column whitelist defined replicate the entire row state with closing(psycopg2.connect(dsn)) as connection, connection.cursor() as cursor: cursor.execute('INSERT INTO accounts_userprofile (user_id, display_name) VALUES (%s, %s)', (1, 'example',)) connection.commit() force_tick(connection, cluster.get_queue_name('example')) events = get_events(queue, 3) assert_same_batch(events) (mutation,) = unwrap_transaction(events) assert mutation.table == 'accounts_userprofile' assert mutation.schema == 'public' assert mutation.operation == MutationOperation.INSERT assert not mutation.HasField('old') assert sorted(mutation.new.columns, key=lambda c: c.name) == [ Column(name='display_name', string='example'), Column(name='id', integer64=1), Column(name='user_id', integer64=1), ] worker.stop_async() worker.result(1)
def test_relay(cluster): primary_dsn = create_temporary_database() secondary_dsn = create_temporary_database() create_set(cluster, 'example', create_set_configuration(primary_dsn)) configure_tick_frequency(primary_dsn) queue = Queue() relay = Relay(cluster, 'example', 'consumer', QueueHandler(queue), throttle=0.1) relay.start() with closing(psycopg2.connect( primary_dsn)) as connection, connection.cursor() as cursor: cursor.execute('INSERT INTO auth_user (username) VALUES (%s)', ('example', )) connection.commit() force_tick(connection, cluster.get_queue_name('example')) events = get_events(queue, 3) assert_same_batch(events) (mutation, ) = unwrap_transaction(events) assert mutation.table == 'auth_user' assert mutation.schema == 'public' assert mutation.operation == MutationOperation.INSERT assert not mutation.HasField('old') assert sorted(mutation.new.columns, key=lambda c: c.name) == [ Column(name='id', integer64=1), Column(name='username', string='example'), ] # ensure the connection recovers after being killed with closing(psycopg2.connect( primary_dsn)) as connection, connection.cursor() as cursor: connection.autocommit = True cursor.execute( 'SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE pid != pg_backend_pid()' ) with closing(psycopg2.connect( primary_dsn)) as connection, connection.cursor() as cursor: cursor.execute('INSERT INTO auth_user (username) VALUES (%s)', ('example', )) connection.commit() force_tick(connection, cluster.get_queue_name('example')) events = get_events(queue, 3) assert_same_batch(events) (mutation, ) = unwrap_transaction(events) assert mutation.table == 'auth_user' assert mutation.schema == 'public' assert mutation.operation == MutationOperation.INSERT assert not mutation.HasField('old') assert sorted(mutation.new.columns, key=lambda c: c.name) == [ Column(name='id', integer64=2), Column(name='username', string='example'), ] relay.stop_async() relay.result(1) """