Exemplo n.º 1
0
def test_query_using_compression(qcache_factory):
    qcache_factory.spawn_caches('2222')
    client = QClient(['http://localhost:2222'])
    result = client.query('test_key', q={}, load_fn=data_source, content_type='application/json',
                          load_fn_kwargs=dict(content='baz'), query_headers={'Accept-Encoding': 'lz4,gzip'})

    assert result.encoding == 'lz4'
Exemplo n.º 2
0
def test_query_with_custom_query_header(qcache_factory):
    qcache_factory.spawn_caches('2222')
    client = QClient(['http://localhost:2222'])
    result = client.query('test_key', q=dict(where=['like', 'foo', "'%b%'"]), load_fn=data_source_csv,
                          query_headers={'X-QCache-filter-engine': 'pandas'})
    result_data = json.loads(result.content.decode('utf8'))
    assert result_data == [{'foo': 'cba', 'bar': 123}, {'foo': 'abc', 'bar': 321}]
Exemplo n.º 3
0
def xtest_repeated_posts_on_small_dataset():
    client = QClient(['http://localhost:8882'])
    content = data_source('foo')

    for x in range(1000):
        t0 = time.time()
        client.post(id_generator(), content, content_type='application/json')
        print("Loop: {num}, duration: {dur}".format(num=x, dur=time.time() - t0))
Exemplo n.º 4
0
def test_query_with_custom_post_header(qcache_factory):
    qcache_factory.spawn_caches('2222')
    client = QClient(['http://localhost:2222'])
    result = client.query('test_key', q=dict(where=['==', 'bar', "'321'"]), load_fn=data_source_csv,
                          post_headers={'X-QCache-types': 'bar=string'})

    result_data = json.loads(result.content.decode('utf8'))
    assert result_data == [{'foo': 'abc', 'bar': '321'}]
Exemplo n.º 5
0
def test_basic_query_with_no_prior_data(qcache_factory):
    qcache_factory.spawn_caches('2222', '2223')
    client = QClient(['http://localhost:2222', 'http://localhost:2223'])
    result = client.query('test_key', q=dict(select=['foo', 'bar']), load_fn=data_source,
                          load_fn_kwargs=dict(content='baz'), content_type='application/json')

    result_data = json.loads(result.content.decode('utf8'))

    assert result_data == [{'foo': 'baz', 'bar': 123}, {'foo': 'abc', 'bar': 321}]
    assert 'baz' in str(result)
Exemplo n.º 6
0
def test_circuit_breaker_kicks_in_after_too_many_failures(qcache_factory):
    qcache_factory.spawn_caches('2222', '2223')
    client = QClient(['http://localhost:2222', 'http://localhost:2223'], read_timeout=0.2,
                     consecutive_error_count_limit=5)

    # A query of this size is not possible to execute using a GET (on my machine at least)
    # The circuit breaker should kick in after configured number of retries.
    where = ["in", "foo", [("%s" % i) for i in range(300000)] + ["baz"]]
    with pytest.raises(TooManyConsecutiveErrors):
        client.query('test_key', q=dict(select=['foo', 'bar'], where=where), load_fn=data_source2,
                     load_fn_kwargs=dict(content='baz'), content_type='application/json')
Exemplo n.º 7
0
def test_basic_query_using_post_with_no_prior_data(qcache_factory):
    qcache_factory.spawn_caches('2222', '2223')
    client = QClient(['http://localhost:2222', 'http://localhost:2223'], read_timeout=1.0)

    # A query of this size is not possible to execute using a GET (on my machine at least)
    where = ["in", "foo", [("%s" % i) for i in range(300000)] + ["baz"]]
    result = client.query('test_key', q=dict(select=['foo', 'bar'], where=where), load_fn=data_source2,
                          load_fn_kwargs=dict(content='baz'), content_type='application/json', post_query=True)

    result_data = json.loads(result.content.decode('utf8'))
    assert result_data == [{'foo': 'baz', 'bar': 123}]
Exemplo n.º 8
0
def test_no_nodes_available_then_node_becomes_available_again(qcache_factory):
    client = QClient(['http://localhost:2222', 'http://localhost:2223'])
    with pytest.raises(NoCacheAvailable):
        client.get('test_key', q=dict())

    # Start a server and validate that the client resumes the connection
    qcache_factory.spawn_caches('2222')
    result = client.get('test_key', q=dict())

    assert result is None
    assert client.statistics['http://localhost:2222']['retry_error'] == 1
    assert client.statistics['http://localhost:2223']['retry_error'] == 2
Exemplo n.º 9
0
def test_one_node_unavailable_then_appears(qcache_factory):
    names1 = qcache_factory.spawn_caches('2222')
    nodes = ['http://localhost:2222', 'http://localhost:2223']
    client = QClient(nodes)
    key = _get_key_on_node(nodes, 'http://localhost:2223')

    content = data_source('foo')
    client.post(key, content, content_type='application/json')

    # Verify that the data is indeed available even though the primary destination node
    # was not available.
    assert client.get(key, q={}) is not None

    # Now start the server that the key is destined for and re-post the data
    # a number of times until it is moved to the destination node.
    qcache_factory.spawn_caches('2223')
    for _ in range(10):
        client.post(key, content, content_type='application/json')

    # Kill the first server to make sure that no stale data exists
    # and perform a get to verify that the data has indeed been moved to
    # the original destination node.
    qcache_factory.kill_caches(*names1)
    assert client.get(key, q={}) is not None
    assert client.statistics['http://localhost:2223']['resurrections']
Exemplo n.º 10
0
def test_no_nodes_available(qcache_factory):
    qcache_factory.kill_caches('2222', '2223')
    client = QClient(['http://localhost:2222', 'http://localhost:2223'])
    with pytest.raises(NoCacheAvailable):
        client.query('test_key', q=dict(select=['foo', 'bar']), load_fn=data_source,
                     load_fn_kwargs=dict(content='baz'), content_type='application/json')

    # The exact behaviour seem to vary between platforms. Connection timout on Linux
    # Connection Error on MacOSX.
    statistics = client.statistics
    assert statistics['http://localhost:2222']['connect_timeout'] + \
           statistics['http://localhost:2222']['connection_error'] == 1
    assert statistics['http://localhost:2223']['connect_timeout'] + \
           statistics['http://localhost:2223']['connection_error'] == 1
    assert statistics is client.get_statistics()

    # Statistics reset after getting it
    assert len(client.get_statistics()) == 0
Exemplo n.º 11
0
def test_https_with_basic_auth(qcache_factory):
    qcache_factory.spawn_caches('2222', certfile='host.pem', auth='abc:123')
    nodes = ['https://localhost:2222']
    client = QClient(nodes, verify='tests/rootCA.crt', auth=('abc', '123'))
    content = data_source('foo')
    key = '12345'

    client.post(key, content, content_type='application/json')
    assert client.get(key, q={}) is not None

    client.delete(key)
    assert client.get(key, q={}) is None
Exemplo n.º 12
0
def test_delete(qcache_factory):
    qcache_factory.spawn_caches('2222')
    nodes = ['http://localhost:2222']
    client = QClient(nodes)
    content = data_source('foo')
    key = '12345'

    client.post(key, content, content_type='application/json')
    assert client.get(key, q={}) is not None

    client.delete(key)
    assert client.get(key, q={}) is None
Exemplo n.º 13
0
    parser.add_argument('--lz4-block-queries', type=int, default=0)
    parser.add_argument('--lz4-frame-uploads', type=int, default=0)
    parser.add_argument('--lz4-frame-queries', type=int, default=0)
    parser.add_argument('--plain-uploads', type=int, default=0)
    parser.add_argument('--plain-queries', type=int, default=0)
    parser.add_argument('--line-count', type=int, default=1000)

    args = parser.parse_args()

    input_string = "a,b,c,d,e,f,g,h\n"
    input_string += "\n".join(
        args.line_count *
        ["1200,456,123.12345,a string,another string,9877654.2,1234567.12,77"])
    print("Size of input = {}".format(len(input_string)))

    c = QClient(node_list=["http://localhost:8888"], read_timeout=10.0)

    frame_uploads = max(args.lz4_frame_uploads,
                        int(args.lz4_frame_queries > 0))
    for _ in range(frame_uploads):
        t0 = time.time()
        c.post("key_lz4_frame",
               frame_lz4er(input_string),
               post_headers={"Content-Encoding": "lz4-frame"})
        print("LZ4 frame upload time: {} s".format(time.time() - t0))

    for _ in range(args.lz4_frame_queries):
        t0 = time.time()
        r = c.get("key_lz4_frame", {},
                  query_headers={"Accept-Encoding": "lz4-frame"})
        qt = time.time() - t0
Exemplo n.º 14
0
import random

import requests
from qclient import QClient
import lz4.frame
import lz4.block
import time

# client = QClient(node_list=('https://localhost:8888',), verify='../tls/ca.pem', cert='../tls/host.pem')
client = QClient(node_list=('http://localhost:8888', ))


def generate_csv(byte_size):
    header = b'abc,def,ghi,jkl,mno\r\n'

    body = []
    l = len(header)
    row_count = 0
    while True:
        row_count += 1
        number = round(random.uniform(-1000, 1000), 2)
        line = f"foobar,,,{number},10"
        l += 2 + len(line)
        body.append(line)
        if l > byte_size:
            break

    data = "\r\n".join(body).encode("utf-8")
    return header + data, row_count

Exemplo n.º 15
0
from qclient import QClient
import time

inputS = "a,b,c,d,e,f,g,h\n"
inputS += "\n".join(5000 * ["1200,456,123.12345,a string,another string,9877654.2,1234567.12,77"])

print("Size of input = {}".format(len(inputS)))

c = QClient(node_list=["http://localhost:8888"], read_timeout=10.0)

t0 = time.time()
for x in range(100000000):
    c.post("key{}".format(x), inputS)
    if x % 100 == 0:
        avg_ms = round(1000*(time.time()-t0) / 100, 2)
        print("Total count: {}, mean req time: {} ms".format(x, avg_ms))
        t0 = time.time()