コード例 #1
0
ファイル: test_qclient.py プロジェクト: tobgu/qcache-client
def test_circuit_breaker_kicks_in_after_too_many_failures(qcache_factory):
    qcache_factory.spawn_caches('2222', '2223')
    client = QClient(['http://localhost:2222', 'http://localhost:2223'], read_timeout=0.2,
                     consecutive_error_count_limit=5)

    # A query of this size is not possible to execute using a GET (on my machine at least)
    # The circuit breaker should kick in after configured number of retries.
    where = ["in", "foo", [("%s" % i) for i in range(300000)] + ["baz"]]
    with pytest.raises(TooManyConsecutiveErrors):
        client.query('test_key', q=dict(select=['foo', 'bar'], where=where), load_fn=data_source2,
                     load_fn_kwargs=dict(content='baz'), content_type='application/json')
コード例 #2
0
ファイル: test_qclient.py プロジェクト: tobgu/qcache-client
def test_query_using_compression(qcache_factory):
    qcache_factory.spawn_caches('2222')
    client = QClient(['http://localhost:2222'])
    result = client.query('test_key', q={}, load_fn=data_source, content_type='application/json',
                          load_fn_kwargs=dict(content='baz'), query_headers={'Accept-Encoding': 'lz4,gzip'})

    assert result.encoding == 'lz4'
コード例 #3
0
ファイル: test_qclient.py プロジェクト: tobgu/qcache-client
def test_query_with_custom_query_header(qcache_factory):
    qcache_factory.spawn_caches('2222')
    client = QClient(['http://localhost:2222'])
    result = client.query('test_key', q=dict(where=['like', 'foo', "'%b%'"]), load_fn=data_source_csv,
                          query_headers={'X-QCache-filter-engine': 'pandas'})
    result_data = json.loads(result.content.decode('utf8'))
    assert result_data == [{'foo': 'cba', 'bar': 123}, {'foo': 'abc', 'bar': 321}]
コード例 #4
0
ファイル: test_qclient.py プロジェクト: tobgu/qcache-client
def test_query_with_custom_post_header(qcache_factory):
    qcache_factory.spawn_caches('2222')
    client = QClient(['http://localhost:2222'])
    result = client.query('test_key', q=dict(where=['==', 'bar', "'321'"]), load_fn=data_source_csv,
                          post_headers={'X-QCache-types': 'bar=string'})

    result_data = json.loads(result.content.decode('utf8'))
    assert result_data == [{'foo': 'abc', 'bar': '321'}]
コード例 #5
0
ファイル: test_qclient.py プロジェクト: tobgu/qcache-client
def test_no_nodes_available(qcache_factory):
    qcache_factory.kill_caches('2222', '2223')
    client = QClient(['http://localhost:2222', 'http://localhost:2223'])
    with pytest.raises(NoCacheAvailable):
        client.query('test_key', q=dict(select=['foo', 'bar']), load_fn=data_source,
                     load_fn_kwargs=dict(content='baz'), content_type='application/json')

    # The exact behaviour seem to vary between platforms. Connection timout on Linux
    # Connection Error on MacOSX.
    statistics = client.statistics
    assert statistics['http://localhost:2222']['connect_timeout'] + \
           statistics['http://localhost:2222']['connection_error'] == 1
    assert statistics['http://localhost:2223']['connect_timeout'] + \
           statistics['http://localhost:2223']['connection_error'] == 1
    assert statistics is client.get_statistics()

    # Statistics reset after getting it
    assert len(client.get_statistics()) == 0
コード例 #6
0
ファイル: test_qclient.py プロジェクト: tobgu/qcache-client
def test_basic_query_with_no_prior_data(qcache_factory):
    qcache_factory.spawn_caches('2222', '2223')
    client = QClient(['http://localhost:2222', 'http://localhost:2223'])
    result = client.query('test_key', q=dict(select=['foo', 'bar']), load_fn=data_source,
                          load_fn_kwargs=dict(content='baz'), content_type='application/json')

    result_data = json.loads(result.content.decode('utf8'))

    assert result_data == [{'foo': 'baz', 'bar': 123}, {'foo': 'abc', 'bar': 321}]
    assert 'baz' in str(result)
コード例 #7
0
ファイル: test_qclient.py プロジェクト: tobgu/qcache-client
def test_basic_query_using_post_with_no_prior_data(qcache_factory):
    qcache_factory.spawn_caches('2222', '2223')
    client = QClient(['http://localhost:2222', 'http://localhost:2223'], read_timeout=1.0)

    # A query of this size is not possible to execute using a GET (on my machine at least)
    where = ["in", "foo", [("%s" % i) for i in range(300000)] + ["baz"]]
    result = client.query('test_key', q=dict(select=['foo', 'bar'], where=where), load_fn=data_source2,
                          load_fn_kwargs=dict(content='baz'), content_type='application/json', post_query=True)

    result_data = json.loads(result.content.decode('utf8'))
    assert result_data == [{'foo': 'baz', 'bar': 123}]