def test_query_all_before_insert(translator): # Query all loaded_entities = translator.query() assert len(loaded_entities) == 0 # Query Some loaded_entities = translator.query(entity_type="Lamp", fiware_service="openiot", fiware_servicepath="/") assert len(loaded_entities) == 0 # Query one loaded_entities = translator.query(entity_id="Lamp:001", fiware_service="openiot", fiware_servicepath="/") assert len(loaded_entities) == 0
def test_structured_value_to_array(translator): entity = { 'id': '8906', 'type': 'AirQualityObserved', TIME_INDEX_NAME: datetime.now().isoformat(timespec='milliseconds'), 'aqi': {'type': 'Number', 'value': 43}, 'city': {'type': 'Text', 'value': 'Antwerpen'}, 'h': {'type': 'Number', 'value': 93}, 'location': { 'type': 'geo:point', 'value': '51.2056589, 4.4180728', }, 'measurand': { 'type': 'StructuredValue', 'value': ['pm25, 43, ugm3, PM25', 'pm10, 30, ugm3, PM10', 'p, 1012, hPa, Pressure'] }, 'p': {'type': 'Number', 'value': 1012}, 'pm10': {'type': 'Number', 'value': 30}, 'pm25': {'type': 'Number', 'value': 43}, 't': {'type': 'Number', 'value': 8.33} } translator.insert([entity]) r = translator.query() check_notifications_record([entity], r)
def test_geo_point(translator): # Github issue #35: Support geo:point entity = { 'id': 'Room1', 'type': 'Room', TIME_INDEX_NAME: datetime.now().isoformat(timespec='milliseconds'), 'location': { 'type': 'geo:point', 'value': "19.6389474, -98.9109537" # lat, long } } translator.insert([entity]) # Check location is saved as a geo_point column in crate op = 'select latitude(location), longitude(location) from etroom' translator.cursor.execute(op) res = translator.cursor.fetchall() assert len(res) == 1 assert res[0] == [19.6389474, -98.9109537] entities = translator.query() assert len(entities) == 1 # Check entity is retrieved as it was inserted check_notifications_record([entity], entities)
def test_traffic_flow_observed(translator, traffic_flow_observed): # Add TIME_INDEX as Reporter would now = datetime.now(timezone.utc).isoformat(timespec='milliseconds') traffic_flow_observed[TIME_INDEX_NAME] = now translator.insert([traffic_flow_observed]) loaded = translator.query() check_notifications_record([traffic_flow_observed], loaded)
def test_attrs_by_id_ambiguity(translator): entities = create_random_entities(num_types=2, num_ids_per_type=1, num_updates=3) for e in entities: e['id'] = 'repeated_id' translator.insert(entities) # OK if specifying type loaded_entities = translator.query(entity_type='0', entity_id='repeated_id') assert len(loaded_entities[0]['index']) == 3 assert len(loaded_entities) == 1 # NOT OK otherwise with pytest.raises(AmbiguousNGSIIdError): translator.query(entity_id='repeated_id')
def test_air_quality_observed(translator, air_quality_observed): # Add TIME_INDEX as Reporter would now = datetime.now().isoformat(timespec='milliseconds') air_quality_observed[TIME_INDEX_NAME] = now translator.insert([air_quality_observed]) loaded = translator.query() check_notifications_record([air_quality_observed], loaded)
def test_query_multiple_ids_with_invalids(translator): # Nonexistent ids should be ignored num_updates = 3 entities = create_random_entities(num_types=2, num_ids_per_type=4, num_updates=num_updates) translator.insert(entities) translator._refresh(['0', '1']) loaded_entities = translator.query(entity_type='0', entity_ids=['nonexistent']) assert len(loaded_entities) == 0 loaded_entities = translator.query(entity_type='0', entity_ids=['0-1', 'nonexistent']) assert len(loaded_entities) == 1 * num_updates
def test_fiware_tenant_reserved_word(translator): e = entity("Room1") fs = "default" fsp = "/" translator.insert([e], fiware_service=fs, fiware_servicepath=fsp) entities = translator.query(fiware_service=fs, fiware_servicepath=fsp) assert len(entities) == 1
def test_fiware_empty_tenant_is_no_tenant(translator): # Insert with EMPTY tenant e = entity("Room1") fs = "" fsp = "" translator.insert([e], fiware_service=fs, fiware_servicepath=fsp) # Query WITHOUT tenant -> get results entities = translator.query() assert len(entities) == 1 # Insert WITHOUT tenant e = entity("Room2") translator.insert([e]) # Query with EMPTY tenant -> get results entities = translator.query() assert len(entities) == 2
def test_fiware_tenant_services(translator): # Insert in tenant A e = entity("X") translator.insert([e], fiware_service="A", fiware_servicepath="/") # Insert in tenant B e = entity("Y") translator.insert([e], fiware_service="B", fiware_servicepath="/") # Query tenant A entities = translator.query(fiware_service="A", fiware_servicepath="/") assert len(entities) == 1 assert entities[0]['id'] == "X" # Query tenant B entities = translator.query(fiware_service="B", fiware_servicepath="/") assert len(entities) == 1 assert entities[0]['id'] == "Y"
def test_lastN_ordering(translator): entities = create_random_entities(num_updates=5) result = translator.insert(entities) assert result.rowcount > 0 loaded_entities = translator.query(last_n=3) index = loaded_entities[0]['index'] assert len(index) == 3 assert index[-1] > index[0]
def test_geo_point_null_values(translator): # Github PR #198: Support geo:point null values entity = { 'id': 'Room1', 'type': 'Room', TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), 'location': { 'type': 'geo:point', 'value': "19.6389474, -98.9109537" # lat, long } } translator.insert([entity]) entities = translator.query() assert len(entities) == 1 check_notifications_record([entity], entities) entity_new = { 'id': 'Room1', 'type': 'Room', TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), 'temperature': { 'type': 'Number', 'value': 19 } } translator.insert([entity_new]) entities = translator.query() assert len(entities) == 1 # Check location's None is saved as a geo_point column in crate op = "select latitude(_doc['location']), longitude(_doc['location']), temperature from " \ "etroom order by time_index ASC" translator.cursor.execute(op) res = translator.cursor.fetchall() assert len(res) == 2 assert res[0] == [19.6389474, -98.9109537, None] assert res[1] == [None, None, 19] translator.clean()
def test_query_multiple_ids_bak(translator): # Should not break old usage of one single entity_id num_updates = 3 entities = create_random_entities(num_types=2, num_ids_per_type=4, num_updates=num_updates) translator.insert(entities) records = translator.query(entity_type='0', entity_ids=['0-1']) assert len(records) == 1 assert records[0]['id'] == '0-1'
def test_traffic_flow_observed(translator, traffic_flow_observed): # Add TIME_INDEX as Reporter would now = datetime.now().isoformat(timespec='microseconds') traffic_flow_observed[TIME_INDEX_NAME] = now result = translator.insert([traffic_flow_observed]) assert result.rowcount > 0 translator._refresh([traffic_flow_observed['type']]) loaded = translator.query() assert len(loaded) > 0 assert_ngsi_entity_equals(traffic_flow_observed, loaded[0])
def test_query_multiple_ids_bak(translator): # Should not break old usage of one single entity_id num_updates = 3 entities = create_random_entities(num_types=2, num_ids_per_type=4, num_updates=num_updates) translator.insert(entities) translator._refresh(['0', '1']) loaded_entities = translator.query(entity_type='0', entity_ids=['0-1']) assert len(loaded_entities) == 1 * num_updates assert all([e['id'] == '0-1' for e in loaded_entities])
def test_insert_entity(translator, entity): now = datetime.now(timezone.utc) now_iso = now.isoformat(timespec='milliseconds') entity[BaseTranslator.TIME_INDEX_NAME] = now_iso result = translator.insert([entity]) assert result.rowcount != 0 loaded_entities = translator.query() assert len(loaded_entities) == 1 check_notifications_record([entity], loaded_entities)
def test_delete_entities_defaults(translator): entities = create_random_entities(num_types=3, num_ids_per_type=2, num_updates=20) translator.insert(entities) type_to_delete = entities[0]['type'] res = translator.delete_entities(type_to_delete) assert res == 20 * 2 remaining = translator.query() assert len(remaining) == (3 - 1) * 2 assert all([r['type'] != type_to_delete for r in remaining])
def test_unsupported_ngsi_type(translator): e = { "type": "SoMeWeIrDtYpE", "id": "sOmEwEiRdId", TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), "foo": { "type": "IgnoreThisDefinitivelyNotValidNGSITypeMessage", "value": "BaR", }, } translator.insert([e]) entities = translator.query() check_notifications_record([e], entities)
def test_capitals(translator): entity_type = "SoMeWeIrDtYpE" e1 = { "type": entity_type, "id": "sOmEwEiRdId", TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), "Foo": { "type": "Text", "value": "FoO", }, "bAr": { "type": "Text", "value": "bAr", }, } translator.insert([e1]) entities = translator.query() assert len(entities) == 1 check_notifications_record([e1], entities) # If a new attribute comes later, I want it translated as well. e2 = e1.copy() e2['id'] = 'SOmEwEiRdId2' e2['NewAttr'] = {"type": "Text", "value": "NewAttrValue!"} e2[TIME_INDEX_NAME] = datetime.now(timezone.utc).isoformat(timespec='milliseconds') translator.insert([e2]) entities = translator.query() assert len(entities) == 2 assert entities[0]['id'] == e2['id'] assert entities[0]['NewAttr']['values'] == [e2['NewAttr']['value']] # Note that old entity gets None for the new attribute assert entities[1]['id'] == e1['id'] assert entities[1]['NewAttr']['values'] == [None]
def test_insert_same_entity_with_different_attrs( translator, sameEntityWithDifferentAttrs ): """ Test that the CrateTranslator can insert entity updates that are of the same type but have different attributes. """ # Add time index to the updates. Use the dateModified meta data attribute of temperature. for entity in sameEntityWithDifferentAttrs: entity[BaseTranslator.TIME_INDEX_NAME] = entity['temperature']['metadata']['dateModified']['value'] result = translator.insert( sameEntityWithDifferentAttrs ) assert result.rowcount != 0 loaded_entities = translator.query() assert len(loaded_entities) == 1 check_notifications_record( sameEntityWithDifferentAttrs, loaded_entities)
def test_query_all(translator): entities = create_random_entities(2, 2, 2, use_time=True, use_geo=True) result = translator.insert(entities) assert result.rowcount > 0 translator._refresh(['0', '1']) loaded_entities = translator.query() assert len(loaded_entities) == len(entities) key = lambda e: e[BaseTranslator.TIME_INDEX_NAME] a = sorted(entities, key=key) b = sorted(loaded_entities, key=key) for e, le in zip(a, b): assert_ngsi_entity_equals(e, le)
def test_capitals(translator): entity_type = "SoMeWeIrDtYpE" e = { "type": entity_type, "id": "sOmEwEiRdId", TIME_INDEX_NAME: datetime.now().isoformat(timespec='microseconds'), "Foo": { "type": "Text", "value": "FoO", }, "bAr": { "type": "Text", "value": "bAr", }, } translator.insert([e]) translator._refresh([entity_type]) entities = translator.query() assert len(entities) == 1 assert_ngsi_entity_equals(e, entities[0]) # If a new attribute comes later, I want it translated as well. e2 = e.copy() e2['id'] = 'SOmEwEiRdId2' e2['NewAttr'] = {"type": "Text", "value": "NewAttrValue!"} e2[TIME_INDEX_NAME] = datetime.now().isoformat(timespec='microseconds') translator.insert([e2]) translator._refresh([entity_type]) entities = translator.query() assert len(entities) == 2 assert_ngsi_entity_equals(e2, entities[1]) # Note that old entity gets None for the new attribute e['NewAttr'] = {'type': 'Text', 'value': None} assert_ngsi_entity_equals(e, entities[0])
def test_no_time_index(translator): """ The Reporter is responsible for injecting the 'time_index' attribute to the entity. If for some reason there's no such index, the translator will add one with current_time. """ e = { 'id': 'entityId1', 'type': 'type1', 'foo': {'type': 'Text', 'value': "SomeText"} } translator.insert([e]) records = translator.query() assert len(records) == 1 assert len(records[0]['index']) == 1
def test_unsupported_ngsi_type(translator): e = { "type": "SoMeWeIrDtYpE", "id": "sOmEwEiRdId", TIME_INDEX_NAME: datetime.now().isoformat(timespec='microseconds'), "foo": { "type": "DefinitivelyNotAValidNGSIType", "value": "BaR", }, } translator.insert([e]) translator._refresh([e['type']]) entities = translator.query() assert len(entities) == 1 assert_ngsi_entity_equals(e, entities[0])
def test_insert_entity(translator, entity): now = datetime.now().isoformat(timespec='microseconds') entity[BaseTranslator.TIME_INDEX_NAME] = now result = translator.insert([entity]) assert result.rowcount == 1 translator._refresh([entity['type']]) loaded_entity = translator.query() # These 2 can be ignored when empty. TODO: #12 Support attribute metadata entity['temperature'].pop('metadata') entity['pressure'].pop('metadata') assert_ngsi_entity_equals(entity, loaded_entity[0])
def test_missing_type_defaults_to_string(translator): e = { "type": "SoMeWeIrDtYpE", "id": "sOmEwEiRdId", TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), "foo": { "value": "BaR", }, } translator.insert([e]) entities = translator.query() assert len(entities) == 1 # Response will include the type e["foo"]["type"] = NGSI_TEXT check_notifications_record([e], entities)
def test_missing_type_defaults_string(translator): e = { "type": "SoMeWeIrDtYpE", "id": "sOmEwEiRdId", TIME_INDEX_NAME: datetime.now().isoformat(timespec='microseconds'), "foo": { "value": "BaR", }, } translator.insert([e]) translator._refresh([e['type']]) entities = translator.query() assert len(entities) == 1 # Response will include the type e["foo"]["type"] = NGSI_TEXT assert_ngsi_entity_equals(e, entities[0])
def test_long_json(translator): # Github issue 44 big_entity = { 'id': 'entityId1', 'type': 'type1', TIME_INDEX_NAME: datetime.now(timezone.utc).isoformat(timespec='milliseconds'), 'foo': { 'type': 'Text', 'value': "SomeTextThatWillGetLong" * 2000 } } translator.insert([big_entity]) r = translator.query() assert len(r) == 1 check_notifications_record([big_entity], r)
def test_long_json(translator): # Github issue 44 big_entity = { 'id': 'entityId1', 'type': 'type1', TIME_INDEX_NAME: datetime.now().isoformat(timespec='microseconds'), 'foo': { 'type': 'Text', 'value': "SomeTextThatWillGetLong" * 2000 } } translator.insert([big_entity]) translator._refresh([big_entity['type']]) r = translator.query() assert len(r) == 1 assert_ngsi_entity_equals(big_entity, r[0])
def test_no_time_index(translator): """ The Reporter is responsible for injecting the 'time_index' attribute to the entity, but even if for some reason the attribute is not there, there should be no problem with the insertion. """ e = { 'id': 'entityId1', 'type': 'type1', 'foo': { 'type': 'Text', 'value': "SomeText" } } translator.insert([e]) translator._refresh([e['type']]) assert len(translator.query()) == 1