def test_relay(cluster): primary_dsn = create_temporary_database() secondary_dsn = create_temporary_database() create_set(cluster, 'example', create_set_configuration(primary_dsn)) configure_tick_frequency(primary_dsn) queue = Queue() relay = Relay(cluster, 'example', 'consumer', QueueHandler(queue), throttle=0.1) relay.start() with closing(psycopg2.connect(primary_dsn)) as connection, connection.cursor() as cursor: cursor.execute('INSERT INTO auth_user (username) VALUES (%s)', ('example',)) connection.commit() force_tick(connection, cluster.get_queue_name('example')) events = get_events(queue, 3) assert_same_batch(events) (mutation,) = unwrap_transaction(events) assert mutation.table == 'auth_user' assert mutation.schema == 'public' assert mutation.operation == MutationOperation.INSERT assert not mutation.HasField('old') assert sorted(mutation.new.columns, key=lambda c: c.name) == [ Column(name='id', integer64=1), Column(name='username', string='example'), ] # ensure the connection recovers after being killed with closing(psycopg2.connect(primary_dsn)) as connection, connection.cursor() as cursor: connection.autocommit = True cursor.execute('SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE pid != pg_backend_pid()') with closing(psycopg2.connect(primary_dsn)) as connection, connection.cursor() as cursor: cursor.execute('INSERT INTO auth_user (username) VALUES (%s)', ('example',)) connection.commit() force_tick(connection, cluster.get_queue_name('example')) events = get_events(queue, 3) assert_same_batch(events) (mutation,) = unwrap_transaction(events) assert mutation.table == 'auth_user' assert mutation.schema == 'public' assert mutation.operation == MutationOperation.INSERT assert not mutation.HasField('old') assert sorted(mutation.new.columns, key=lambda c: c.name) == [ Column(name='id', integer64=2), Column(name='username', string='example'), ] relay.stop_async() relay.result(1) """
def decorated(cluster, set, consumer_id, *args, **kwargs): handler = command(cluster, set, *args, **kwargs) with cluster: relay = Relay(cluster, set, consumer_id, handler) relay.start() def __request_exit(signal, frame): logger.info('Caught signal %s, stopping...', signal) relay.stop_async() signal.signal(signal.SIGINT, __request_exit) signal.signal(signal.SIGTERM, __request_exit) while True: relay.join(0.1) if not relay.is_alive(): relay.result() break
def main(cluster, consumer_id, configuration, set): configuration = yaml.load(configuration) with cluster: stream = configure(configuration['stream'])(cluster, set) relay = Relay(cluster, set, consumer_id, stream) relay.start() def __request_exit(signal, frame): relay.stop_async() signal.signal(signal.SIGINT, __request_exit) signal.signal(signal.SIGTERM, __request_exit) while True: relay.join(0.1) if not relay.is_alive(): relay.result() break
def test_relay(cluster): primary_dsn = create_temporary_database() secondary_dsn = create_temporary_database() create_set(cluster, 'example', create_set_configuration(primary_dsn)) configure_tick_frequency(primary_dsn) queue = Queue() relay = Relay(cluster, 'example', 'consumer', QueueHandler(queue), throttle=0.1) relay.start() with closing(psycopg2.connect( primary_dsn)) as connection, connection.cursor() as cursor: cursor.execute('INSERT INTO auth_user (username) VALUES (%s)', ('example', )) connection.commit() force_tick(connection, cluster.get_queue_name('example')) events = get_events(queue, 3) assert_same_batch(events) (mutation, ) = unwrap_transaction(events) assert mutation.table == 'auth_user' assert mutation.schema == 'public' assert mutation.operation == MutationOperation.INSERT assert not mutation.HasField('old') assert sorted(mutation.new.columns, key=lambda c: c.name) == [ Column(name='id', integer64=1), Column(name='username', string='example'), ] # ensure the connection recovers after being killed with closing(psycopg2.connect( primary_dsn)) as connection, connection.cursor() as cursor: connection.autocommit = True cursor.execute( 'SELECT pg_terminate_backend(pid) FROM pg_stat_activity WHERE pid != pg_backend_pid()' ) with closing(psycopg2.connect( primary_dsn)) as connection, connection.cursor() as cursor: cursor.execute('INSERT INTO auth_user (username) VALUES (%s)', ('example', )) connection.commit() force_tick(connection, cluster.get_queue_name('example')) events = get_events(queue, 3) assert_same_batch(events) (mutation, ) = unwrap_transaction(events) assert mutation.table == 'auth_user' assert mutation.schema == 'public' assert mutation.operation == MutationOperation.INSERT assert not mutation.HasField('old') assert sorted(mutation.new.columns, key=lambda c: c.name) == [ Column(name='id', integer64=2), Column(name='username', string='example'), ] relay.stop_async() relay.result(1) """