async def test_mount_snapshot_with_renames(es):
    es.snapshot.get.return_value = as_future({
        "responses": [{
            "repository":
            "eventdata",
            "snapshots": [{
                "snapshot":
                "eventdata-snapshot",
                "uuid":
                "mWJnRABaSh-gdHF3-pexbw",
                "indices": [
                    "elasticlogs-2018-05-03", "elasticlogs-2018-05-04",
                    "elasticlogs-2018-06-05"
                ]
            }]
        }]
    })
    # one call for each matching index
    es.transport.perform_request.side_effect = [
        as_future(),
        as_future(),
    ]

    params = {
        "repository": "eventdata",
        "snapshot": "eventdata-snapshot",
        "index_pattern": "elasticlogs-2018-05-*",
        "rename_pattern": "elasticlogs-(.*)",
        "rename_replacement": "renamed-logs-\\1"
    }

    runner = MountSearchableSnapshotRunner()

    await runner(es, params=params)

    es.snapshot.get.assert_called_once_with("eventdata", "eventdata-snapshot")
    es.transport.perform_request.assert_has_calls([
        mock.call(method="POST",
                  url="/_snapshot/eventdata/eventdata-snapshot/_mount",
                  body={
                      "index": "elasticlogs-2018-05-03",
                      "renamed_index": "renamed-logs-2018-05-03"
                  },
                  params=None),
        mock.call(method="POST",
                  url="/_snapshot/eventdata/eventdata-snapshot/_mount",
                  body={
                      "index": "elasticlogs-2018-05-04",
                      "renamed_index": "renamed-logs-2018-05-04"
                  },
                  params=None)
    ])
async def test_mount_snapshot_frozen(es):
    es.snapshot.get.return_value = as_future({
        "responses": [{
            "repository":
            "eventdata",
            "snapshots": [{
                "snapshot":
                "eventdata-snapshot",
                "uuid":
                "mWJnRABaSh-gdHF3-pexbw",
                "indices": [
                    "elasticlogs-2018-05-03", "elasticlogs-2018-05-04",
                    "elasticlogs-2018-05-05"
                ]
            }]
        }]
    })
    # one call for each index
    es.transport.perform_request.side_effect = [
        as_future(),
        as_future(),
        as_future(),
    ]

    params = {
        "repository": "eventdata",
        "snapshot": "eventdata-snapshot",
        "query_params": {
            "storage": "shared_cache"
        }
    }

    runner = MountSearchableSnapshotRunner()

    await runner(es, params=params)

    es.snapshot.get.assert_called_once_with("eventdata", "eventdata-snapshot")
    es.transport.perform_request.assert_has_calls([
        mock.call(method="POST",
                  url="/_snapshot/eventdata/eventdata-snapshot/_mount",
                  body={"index": "elasticlogs-2018-05-03"},
                  params={"storage": "shared_cache"}),
        mock.call(method="POST",
                  url="/_snapshot/eventdata/eventdata-snapshot/_mount",
                  body={"index": "elasticlogs-2018-05-04"},
                  params={"storage": "shared_cache"}),
        mock.call(method="POST",
                  url="/_snapshot/eventdata/eventdata-snapshot/_mount",
                  body={"index": "elasticlogs-2018-05-05"},
                  params={"storage": "shared_cache"}),
    ])
예제 #3
0
async def test_msearch_without_hits(es):
    params = {
        "body": [{
            "index": "elasticlogs-*"
        }, {
            "query": {
                "match_all": {}
            },
            "from": 0,
            "size": 10
        }, {
            "index": "elasticlogs-*"
        }, {
            "query": {
                "match_all": {}
            },
            "from": 0,
            "size": 10
        }],
        "meta_data": {
            "debug": True
        }
    }
    es.msearch.return_value = as_future({
        "responses": [{
            "took": 0,
            "timed_out": False,
            "hits": {
                "total": 0,
                "hits": []
            },
            "status": 200
        }, {
            "took": 0,
            "timed_out": False,
            "hits": {
                "total": 0,
                "hits": []
            },
            "status": 200
        }]
    })

    response = await kibana(es, params=params)

    assert response == {
        "debug": True,
        "success": True,
        "error-count": 0,
        "hits": 0,
        "took": 0,
        "weight": 1,
        "unit": "ops",
        "visualisation_count": 2,
    }
예제 #4
0
    def __init__(self, *args, **kwargs):
        StaticClientFactory.SYNC_PATCHER = mock.patch(
            "elasticsearch.Elasticsearch")
        self.es = StaticClientFactory.SYNC_PATCHER.start()
        self.es.indices.stats.return_value = {"mocked": True}
        self.es.info.return_value = {
            "cluster_name": "elasticsearch",
            "version": {
                "number": "7.3.0",
                "build_flavor": "oss",
                "build_type": "tar",
                "build_hash": "de777fa",
                "build_date": "2019-07-24T18:30:11.767338Z",
                "build_snapshot": False,
                "lucene_version": "8.1.0",
                "minimum_wire_compatibility_version": "6.8.0",
                "minimum_index_compatibility_version": "6.0.0-beta1"
            }
        }

        StaticClientFactory.ASYNC_PATCHER = mock.patch(
            "elasticsearch.Elasticsearch")
        self.es_async = StaticClientFactory.ASYNC_PATCHER.start()
        # we want to simulate that the request took 10 seconds. Internally this is measured using `time#perf_counter`
        # and the code relies that measurements are taken consistently with `time#perf_counter` because in some places
        # we take a value and will subtract other measurements (e.g. in the main loop in AsyncExecutor we subtract
        # `total_start` from `stop`.
        #
        # On some systems (MacOS), `time#perf_counter` starts at zero when the process is started but on others (Linux),
        # `time#perf_counter` starts when the OS is started. Thus we need to ensure that this value here is roughly
        # consistent across all systems by using the current value of `time#perf_counter` as basis.

        start = time.perf_counter()
        self.es_async.init_request_context.return_value = {
            "request_start": start,
            "request_end": start + 10
        }
        bulk_response = {"errors": False, "took": 5}
        # bulk responses are raw strings
        self.es_async.bulk.return_value = as_future(
            io.StringIO(json.dumps(bulk_response)))
        self.es_async.transport.close.return_value = as_future()
예제 #5
0
async def test_msearch_with_hits_as_number(es):
    params = {
        "body": [{
            "index": "elasticlogs-*"
        }, {
            "query": {
                "match_all": {}
            },
            "from": 0,
            "size": 10
        }, {
            "index": "elasticlogs-*"
        }, {
            "query": {
                "match_all": {}
            },
            "from": 0,
            "size": 10
        }],
        "meta_data": {
            "debug": True
        }
    }
    es.msearch.return_value = as_future({
        "responses": [{
            "took": 5,
            "timed_out": False,
            "hits": {
                "total":
                1,
                "hits": [{
                    "_index": "my-docs",
                    "_type": "_doc",
                    "_id": "1",
                    "_score": 1,
                    "_source": {
                        "title": "Hello"
                    }
                }]
            },
            "status": 200
        }, {
            "took": 7,
            "timed_out": False,
            "hits": {
                "total":
                2,
                "hits": [{
                    "_index": "my-other-docs",
                    "_type": "_doc",
                    "_id": "1",
                    "_score": 1,
                    "_source": {
                        "title": "Hello"
                    }
                }, {
                    "_index": "my-other-docs",
                    "_type": "_doc",
                    "_id": "2",
                    "_score": 1,
                    "_source": {
                        "title": "World"
                    }
                }]
            },
            "status": 200
        }]
    })

    response = await kibana(es, params=params)

    assert response == {
        "debug": True,
        "success": True,
        "error-count": 0,
        "hits": 3,
        "took": 7,
        "weight": 1,
        "unit": "ops",
        "visualisation_count": 2,
    }
예제 #6
0
async def test_msearch_with_error(es):
    params = {
        "body": [
            {
                "index": "elasticlogs-*"
            },
            {
                "query": {
                    "match_all": {}
                },
                "from": 0,
                "size": 10
            },
        ],
        "meta_data": {
            "debug": True
        }
    }
    es.msearch.return_value = as_future({
        "took":
        80000,
        "responses": [{
            "error": {
                "root_cause": [{
                    "type": "i_o_exception",
                    "reason": "failed to read data from cache"
                }],
                "type":
                "search_phase_execution_exception",
                "reason":
                "all shards failed",
                "phase":
                "fetch",
                "grouped":
                True,
                "failed_shards": [{
                    "shard": 0,
                    "index": "logs-2020-01-01",
                    "node": "Xl9IRaJ6RI-YQRZuBNsbTg",
                    "reason": {
                        "type": "i_o_exception",
                        "reason": "failed to read data from cache"
                    }
                }],
                "caused_by": {
                    "type": "i_o_exception",
                    "reason": "failed to read data from cache",
                }
            },
            "status": 503
        }]
    })

    response = await kibana(es, params=params)

    assert response == {
        "debug": True,
        "hits": 0,
        "took": 80000,
        "weight": 1,
        "unit": "ops",
        "visualisation_count": 1,
        "success": False,
        "error-count": 1,
        "error-type": "kibana",
        "error-description": "HTTP status: 503, message: all shards failed"
    }