def test_paging(es_client): """test_paging""" populate_es_test_case_1(es_client) # All items are returned for empty query, sleeps for 2 seconds # before searching to allow ES to index the documents. # See # https://stackoverflow.com/questions/45936211/check-if-elasticsearch-has-finished-indexing # for a possibly better solution time.sleep(2) res = stac_search(es_client=es_client, limit=1, page=1).execute() assert len(res) == 1 assert res["hits"]["total"]["value"] == 2 assert res[0]["id"] == "CBERS_4_AWFI_20170409_167_123_L4" res = stac_search(es_client=es_client, limit=1, page=2).execute() assert len(res) == 1 assert res["hits"]["total"]["value"] == 2 assert res[0]["id"] == "CBERS_4_MUX_20170528_090_084_L2" # past last page res = stac_search(es_client=es_client, limit=1, page=3).execute() assert len(res) == 0 assert res["hits"]["total"]["value"] == 2
def test_feature_filter_search(es_client): """test_feature_filter_search""" populate_es_test_case_1(es_client) # Sleeps for 2 seconds # before searching to allow ES to index the documents. # See # https://stackoverflow.com/questions/45936211/check-if-elasticsearch-has-finished-indexing # for a possibly better solution time.sleep(2) assert es_client.exists(index="stac", doc_type="_doc", id="CBERS_4_MUX_20170528_090_084_L2") assert es_client.exists(index="stac", doc_type="_doc", id="CBERS_4_AWFI_20170409_167_123_L4") empty_query = stac_search(es_client=es_client) # Single MUX item q_dsl = process_feature_filter( dsl_query=empty_query, feature_ids=["CBERS_4_MUX_20170528_090_084_L2"]) res = q_dsl.execute() assert res["hits"]["total"]["value"] == 1 assert res[0].to_dict()["id"] == "CBERS_4_MUX_20170528_090_084_L2" # Unknown collection, should return no items q_dsl = process_feature_filter(dsl_query=empty_query, feature_ids=["NOID"]) res = q_dsl.execute() assert res["hits"]["total"]["value"] == 0
def test_collection_filter_search(es_client): """test_collection_filter_search""" populate_es_test_case_1(es_client) # Sleeps for 2 seconds # before searching to allow ES to index the documents. # See # https://stackoverflow.com/questions/45936211/check-if-elasticsearch-has-finished-indexing # for a possibly better solution time.sleep(2) assert es_client.exists(index="stac", doc_type="_doc", id="CBERS_4_MUX_20170528_090_084_L2") assert es_client.exists(index="stac", doc_type="_doc", id="CBERS_4_AWFI_20170409_167_123_L4") empty_query = stac_search(es_client=es_client) # Only items in MUX collection q_dsl = process_collections_filter(dsl_query=empty_query, collections=["CBERS4-MUX"]) res = q_dsl.execute() assert res["hits"]["total"]["value"] == 1 assert res[0].to_dict()["properties"]["instruments"][0] == "MUX" # Only items in AWFI collection q_dsl = process_collections_filter(dsl_query=empty_query, collections=["CBERS4-AWFI"]) res = q_dsl.execute() assert res["hits"]["total"]["value"] == 1 assert res[0].to_dict()["properties"]["instruments"][0] == "AWFI" # Unknown collection, should return no items q_dsl = process_collections_filter(dsl_query=empty_query, collections=["NOCOLLECTIONS"]) res = q_dsl.execute() assert res["hits"]["total"]["value"] == 0 # Two collections, should return both items q_dsl = process_collections_filter( dsl_query=empty_query, collections=["CBERS4-MUX", "CBERS4-AWFI"]) res = q_dsl.execute() assert res["hits"]["total"]["value"] == 2 # Two collections, only one MUX item returned q_dsl = process_collections_filter( dsl_query=empty_query, collections=["CBERS4-MUX", "NOCOLLECTION"]) res = q_dsl.execute() assert res["hits"]["total"]["value"] == 1 assert res[0].to_dict()["properties"]["instruments"][0] == "MUX"
def test_process_intersects_filter(es_client): """test_process_intersects_filter""" populate_es_test_case_1(es_client) empty_query = stac_search(es_client=es_client) geometry = { "type": "Feature", "properties": {}, "geometry": { "type": "Polygon", "coordinates": [[ [-180.0, -90.0], [-180.0, 90.0], [180.0, 90.0], [180.0, -90.0], [-180.0, -90.0], ]], }, } q_dsl = process_intersects_filter(dsl_query=empty_query, geometry=geometry) # All items are returned for empty query, sleeps for 2 seconds # before searching to allow ES to index the documents. # See # https://stackoverflow.com/questions/45936211/check-if-elasticsearch-has-finished-indexing # for a possibly better solution time.sleep(2) res = q_dsl.execute() assert res["hits"]["total"]["value"] == 2 # Change the polygon to intersect with a single scene geometry["geometry"]["coordinates"] = [[[23, 13], [25, 13], [25, 15], [23, 15], [23, 13]]] q_dsl = process_intersects_filter(dsl_query=empty_query, geometry=geometry) res = q_dsl.execute() assert res["hits"]["total"]["value"] == 1 assert res[0].to_dict()["properties"]["instruments"][0] == "MUX"
def test_query_extension_search(es_client): # pylint: disable=too-many-statements """test_query_extension_search""" populate_es_test_case_1(es_client) empty_query = stac_search(es_client=es_client) q_dsl = process_query_extension(dsl_query=empty_query, query_params={}) # Start with an empty query assert q_dsl.to_dict()["query"] == {"match_all": {}} # eq operator q_payload = { "instruments": { "eq": '["MUX"]' }, "cbers:data_type": { "eq": "L2" } } q_dsl = process_query_extension(dsl_query=empty_query, query_params=q_payload) assert q_dsl.to_dict()["query"] == { "bool": { "must": [ { "match": { "properties.instruments": '["MUX"]' } }, { "match": { "properties.cbers:data_type": "L2" } }, ] } } # All items are returned for empty query, sleeps for 2 seconds # before searching to allow ES to index the documents. # See # https://stackoverflow.com/questions/45936211/check-if-elasticsearch-has-finished-indexing # for a possibly better solution time.sleep(2) res = q_dsl.execute() assert res["hits"]["total"]["value"] == 1 assert res[0].to_dict()["properties"]["cbers:data_type"] == "L2" # neq and eq operator q_payload = { "instruments": { "eq": '["MUX"]' }, "cbers:data_type": { "neq": "L4" } } q_dsl = process_query_extension(dsl_query=empty_query, query_params=q_payload) assert q_dsl.to_dict()["query"] == { "bool": { "must_not": [{ "match": { "properties.cbers:data_type": "L4" } }], "must": [{ "match": { "properties.instruments": '["MUX"]' } }], } } res = q_dsl.execute() assert res["hits"]["total"]["value"] == 1 assert res[0].to_dict()["properties"]["cbers:data_type"] == "L2" # gt, gte, lt, lte operators q_payload = {"cbers:path": {"gte": 90, "lte": 90}} q_dsl = process_query_extension(dsl_query=empty_query, query_params=q_payload) # print(q_dsl.to_dict()['query']) assert q_dsl.to_dict()["query"] == { "bool": { "must": [ { "range": { "properties.cbers:path": { "gte": 90 } } }, { "range": { "properties.cbers:path": { "lte": 90 } } }, ] } } res = q_dsl.execute() assert res["hits"]["total"]["value"] == 1 assert res[0].to_dict()["properties"]["cbers:path"] == 90 q_payload = {"cbers:path": {"gt": 90, "lt": 90}} q_dsl = process_query_extension(dsl_query=empty_query, query_params=q_payload) res = q_dsl.execute() assert res["hits"]["total"]["value"] == 0 # startsWith operator q_payload = { "cbers:data_type": { "startsWith": "L" }, "instruments": { "eq": '["MUX"]' }, } q_dsl = process_query_extension(dsl_query=empty_query, query_params=q_payload) # assert q_dsl.to_dict()['query'] == \ # {'bool': # {'must': # [{'query_string': # {'default_field': 'properties.cbers:data_type', # 'query': 'L*'}}, # {'query_string': # {'default_field': 'properties.instruments', # 'query': '["MU*'}}]}} res = q_dsl.execute() assert res["hits"]["total"]["value"] == 1 assert res[0].to_dict()["properties"]["cbers:data_type"] == "L2" assert res[0].to_dict()["properties"]["instruments"] == ["MUX"] # endsWith, contains operators q_payload = { "cbers:data_type": { "endsWith": "2" }, "instruments": { "contains": "U" } } q_dsl = process_query_extension(dsl_query=empty_query, query_params=q_payload) # print(q_dsl.to_dict()['query']) res = q_dsl.execute() assert res["hits"]["total"]["value"] == 1 assert res[0].to_dict()["properties"]["cbers:data_type"] == "L2" assert res[0].to_dict()["properties"]["instruments"] == ["MUX"]
def test_basic_search(es_client): """test_basic_search""" populate_es_test_case_1(es_client) # All items are returned for empty query, sleeps for 2 seconds # before searching to allow ES to index the documents. # See # https://stackoverflow.com/questions/45936211/check-if-elasticsearch-has-finished-indexing # for a possibly better solution time.sleep(2) res = stac_search(es_client=es_client).execute() assert res["hits"]["total"]["value"] == 2 assert len(res) == 2 # Test limit on the number of results res = stac_search(es_client=es_client, limit=1).execute() assert res["hits"]["total"]["value"] == 2 assert len(res) == 1 # Single item depending on date range res = stac_search(es_client=es_client, start_date="2017-05-28T00:00:00.000").execute() assert res["hits"]["total"]["value"] == 1 assert res[0]["id"] == "CBERS_4_MUX_20170528_090_084_L2" res = stac_search(es_client=es_client, end_date="2017-04-10T00:00:00.000").execute() assert res["hits"]["total"]["value"] == 1 assert res[0]["id"] == "CBERS_4_AWFI_20170409_167_123_L4" # Geo search res = stac_search( es_client=es_client, start_date="2010-04-10T00:00:00.000", end_date="2018-04-10T00:00:00.000", bbox=[[24.13, 14.34], [24.13, 14.34]], ) res = res.execute() assert res["hits"]["total"]["value"] == 1 assert res[0]["id"] == "CBERS_4_MUX_20170528_090_084_L2" # print(res[0].to_dict()) # Geo search for whole envelope, was raising error when # update to ES 7.7 res = stac_search( es_client=es_client, start_date=None, end_date=None, bbox=[[-180.0, 90.0], [180.0, -90.0]], ) res = res.execute() assert res["hits"]["total"]["value"] == 2 # Query extension (eq operator only) empty_query = stac_search(es_client=es_client) res = process_query_extension(dsl_query=empty_query, query_params={}).execute() assert res["hits"]["total"]["value"] == 2 query = process_query_extension( dsl_query=empty_query, query_params={"cbers:data_type": { "eq": "L2" }}) # print(json.dumps(query.to_dict(), indent=2)) res = query.execute() assert res["hits"]["total"]["value"] == 1 assert res[0].to_dict()["properties"]["cbers:data_type"] == "L2" query = process_query_extension( dsl_query=empty_query, query_params={"cbers:data_type": { "eq": "L4" }}) # print(json.dumps(query.to_dict(), indent=2)) res = query.execute() assert res["hits"]["total"]["value"] == 1 assert res[0].to_dict()["properties"]["cbers:data_type"] == "L4" query = process_query_extension(dsl_query=empty_query, query_params={"cbers:path": { "eq": 90 }}) # print(json.dumps(query.to_dict(), indent=2)) res = query.execute() # print(res.to_dict()) # for hit in res: # print(hit.to_dict()) assert res["hits"]["total"]["value"] == 1 assert res[0].to_dict()["properties"]["cbers:path"] == 90