def test_elasticsearch_invalid_indices(monkeypatch):
    url = 'http://es/'
    es = ElasticsearchWrapper(url)

    with pytest.raises(Exception):
        es.search(indices={'index-1': 'not a list!'})

    with pytest.raises(Exception):
        es.search(indices=['index-*'] * MAX_INDICES + 1)
def test_elasticsearch_invalid_size(monkeypatch):
    url = 'http://es/'
    es = ElasticsearchWrapper(url)

    with pytest.raises(Exception):
        es.search(size=MAX_SIZE + 1)

    with pytest.raises(Exception):
        es.search(size=-1)
def test_elasticsearch_search_error(monkeypatch):
    resp = resp_mock()

    get = requests_mock(resp, failure=requests.Timeout)
    monkeypatch.setattr('requests.get', get)

    url = 'http://es/'
    es = ElasticsearchWrapper(url)

    with pytest.raises(HttpError):
        es.search()

    get = requests_mock(resp, failure=requests.ConnectionError)
    monkeypatch.setattr('requests.get', get)

    with pytest.raises(HttpError):
        es.search()

    # Unhandled exception
    get = requests_mock(resp, failure=Exception)
    monkeypatch.setattr('requests.get', get)

    with pytest.raises(Exception) as ex:
        es.search()

        assert ex is not HttpError
def test_elasticsearch_search_no_source_with_size(monkeypatch):
    resp = resp_mock()
    get = requests_mock(resp)
    monkeypatch.setattr('requests.get', get)

    url = 'http://es/'
    es = ElasticsearchWrapper(url)

    result = es.search(source=False, size=100)

    assert result == resp.json.return_value

    get.assert_called_with(get_full_url(url),
                           headers={'User-Agent': get_user_agent()},
                           params={'size': 100, '_source': 'false'},
                           timeout=10)
def test_elasticsearch_search(monkeypatch):
    resp = resp_mock()
    get = requests_mock(resp)
    monkeypatch.setattr('requests.get', get)

    url = 'http://es/'
    es = ElasticsearchWrapper(url)

    q = 'my-search'
    result = es.search(q=q)

    assert result == resp.json.return_value

    get.assert_called_with(get_full_url(url),
                           headers={'User-Agent': get_user_agent()},
                           params={'q': q, 'size': DEFAULT_SIZE, '_source': 'true'},
                           timeout=10)
def test_elasticsearch_search_oauth2(monkeypatch):
    resp = resp_mock()
    get = requests_mock(resp)
    monkeypatch.setattr('requests.get', get)

    # mock tokens
    token = 'TOKEN-123'
    monkeypatch.setattr('tokens.get', lambda x: token)

    url = 'http://es/'
    es = ElasticsearchWrapper(url, oauth2=True)

    result = es.search()

    assert result == resp.json.return_value

    get.assert_called_with(get_full_url(url),
                           headers={'User-Agent': get_user_agent(), 'Authorization': 'Bearer {}'.format(token)},
                           params={'size': DEFAULT_SIZE, '_source': 'true'},
                           timeout=10)
def test_elasticsearch_search_body(monkeypatch):
    resp = resp_mock()
    post = requests_mock(resp)
    monkeypatch.setattr('requests.post', post)

    url = 'http://es/'
    es = ElasticsearchWrapper(url)

    body = {'query': {'query_string': {'query': ''}}}
    result = es.search(body=body)

    assert result == resp.json.return_value

    body['size'] = DEFAULT_SIZE

    post.assert_called_with(get_full_url(url),
                            params={},
                            json=body,
                            headers={'User-Agent': get_user_agent()},
                            timeout=10)
def test_elasticsearch_search_no_source_body_with_size(monkeypatch):
    resp = resp_mock()
    post = requests_mock(resp)
    monkeypatch.setattr('requests.post', post)

    url = 'http://es/'
    es = ElasticsearchWrapper(url)

    body = {'query': {'query_string': {'query': ''}}}

    indices = ['logstash-2016-*', 'logstash-2015-*']
    result = es.search(indices=indices, body=body, source=False, size=100)

    assert result == resp.json.return_value

    body['size'] = 100
    body['_source'] = False

    post.assert_called_with(get_full_url(url, indices=indices),
                            params={},
                            json=body,
                            headers={'User-Agent': get_user_agent()},
                            timeout=10)