コード例 #1
0
def test_no_type_not_unique(service):
    # If id is not unique across types, you must specify type.

    etype_1, etype_2 = 'RoomDevice', 'Car'
    etypes = [etype_1, etype_2]
    # The reporter_dataset fixture is still in the DB cos it has a scope of
    # module. We use different entity types to store this test's rows in
    # different tables to avoid messing up global state---see also delete
    # down below.
    shared_entity_id = "sharedId"

    insert_test_data(service,
                     etypes,
                     n_entities=2,
                     index_size=2,
                     entity_id=shared_entity_id)

    url = "{qlUrl}/entities/{entityId}/attrs/temperature".format(
        qlUrl=QL_URL,
        entityId=shared_entity_id,
    )

    h = {'Fiware-Service': service}

    # With type
    r = requests.get(url, params={'type': etype_1}, headers=h)
    assert r.status_code == 200, r.text

    # Without type
    r = requests.get(url, params={}, headers=h)
    assert r.status_code == 400, r.text
    e = AmbiguousNGSIIdError(shared_entity_id)
    assert r.json() == {"error": "{}".format(type(e)), "description": str(e)}
    delete_test_data(service, etypes)
コード例 #2
0
def test_no_type(service):
    """
    Specifying entity type is optional, provided that id is unique.
    """

    etype_1, etype_2 = 'test_no_type_RoomDevice', 'test_no_type_Car'
    etypes = [etype_1, etype_2]
    eid = "{}1".format(etype_1)
    # The reporter_dataset fixture is still in the DB cos it has a scope of
    # module. We use different entity types to store this test's rows in
    # different tables to avoid messing up global state---see also delete
    # down below.
    insert_test_data(service, etypes, n_entities=2, index_size=2)
    wait_for_insert(etypes, service, 2 * 2)

    h = {'Fiware-Service': service}

    # With type
    r = requests.get(query_url(eid=eid), params={'type': etype_1}, headers=h)
    assert r.status_code == 200, r.text
    res_with_type = r.json()

    # Without type
    r = requests.get(query_url(eid=eid), params={}, headers=h)
    assert r.status_code == 200, r.text
    res_without_type = r.json()

    assert res_with_type == res_without_type
    delete_test_data(service, etypes)
コード例 #3
0
def test_1T1ENA_aggrPeriod(service, aggr_period, exp_index, ins_period):
    # Custom index to test aggrPeriod

    etype = f"test_1T1ENA_aggrPeriod_{aggr_period}"
    # The reporter_dataset fixture is still in the DB cos it has a scope of
    # module. We use a different entity type to store this test's rows in a
    # different table to avoid messing up global state---see also delete down
    # below.
    eid = "{}0".format(etype)

    for i in exp_index:
        base = dateutil.parser.isoparse(i)
        insert_test_data(service, [etype],
                         entity_id=eid,
                         index_size=3,
                         index_base=base,
                         index_period=ins_period)

    wait_for_insert([etype], service, 3 * len(exp_index))

    # aggrPeriod needs aggrMethod
    query_params = {
        'type': etype,
        'aggrPeriod': aggr_period,
    }
    h = {'Fiware-Service': service}

    r = requests.get(query_url(eid=eid), params=query_params, headers=h)
    assert r.status_code == 400, r.text

    # Check aggregation with aggrPeriod
    query_params = {
        'type': etype,
        'attrs': temperature + ',' + pressure,
        'aggrMethod': 'max',
        'aggrPeriod': aggr_period,
    }
    h = {'Fiware-Service': service}

    r = requests.get(query_url(eid=eid), params=query_params, headers=h)
    assert r.status_code == 200, r.text
    # Assert Results
    expected = {
        'entityId':
        eid,
        'entityType':
        etype,
        'index':
        exp_index,
        'attributes': [{
            'attrName': pressure,
            'values': [20., 20., 20.],
        }, {
            'attrName': temperature,
            'values': [2., 2., 2.],
        }]
    }
    obtained = r.json()
    assert_1T1ENA_response(obtained, expected)
    delete_test_data(service, [etype])
コード例 #4
0
def reporter_dataset():
    for service in services:
        insert_test_data(service, [entity_type], n_entities=3,
                         index_size=n_days)
    yield
    for service in services:
        delete_test_data(service, [entity_type])
コード例 #5
0
def reporter_dataset():
    for service in services:
        insert_test_data(service, [entity_type], n_entities=1, index_size=30)
    time.sleep(SLEEP_TIME)
    yield
    for service in services:
        delete_test_data(service, [entity_type])
コード例 #6
0
def test_aggregation_is_per_instance(translator):
    """
    Attribute Aggregation works by default on a per-instance basis.
    Cross-instance aggregation not yet supported.
    It would change the shape of the response.
    """
    t = 'Room'
    insert_test_data(translator, [t], entity_id='Room0', index_size=3)
    insert_test_data(translator, [t], entity_id='Room1', index_size=9)

    query_params = {'type': t, 'id': 'Room0,Room1', 'aggrMethod': 'sum'}
    r = requests.get(query_url(), params=query_params)
    assert r.status_code == 200, r.text

    # Assert Results
    expected_entities = [{
        'entityId': 'Room0',
        'index': ['', ''],
        'values': [sum(range(3))],
    }, {
        'entityId': 'Room1',
        'index': ['', ''],
        'values': [sum(range(9))],
    }]

    obtained_data = r.json()
    assert isinstance(obtained_data, dict)
    assert obtained_data['entityType'] == t
    assert obtained_data['attrName'] == attr_name
    assert obtained_data['entities'] == expected_entities

    # Index array in the response is the used fromDate and toDate
    query_params = {
        'type': t,
        'id': 'Room0,Room1',
        'aggrMethod': 'max',
        'fromDate': datetime(1970, 1, 1).isoformat(),
        'toDate': datetime(1970, 1, 6).isoformat(),
    }
    r = requests.get(query_url(), params=query_params)
    assert r.status_code == 200, r.text

    # Assert Results
    expected_entities = [{
        'entityId':
        'Room0',
        'index': ['1970-01-01T00:00:00', '1970-01-06T00:00:00'],
        'values': [2],
    }, {
        'entityId':
        'Room1',
        'index': ['1970-01-01T00:00:00', '1970-01-06T00:00:00'],
        'values': [5],
    }]
    obtained_data = r.json()
    assert isinstance(obtained_data, dict)
    assert obtained_data['entityType'] == t
    assert obtained_data['attrName'] == attr_name
    assert obtained_data['entities'] == expected_entities
コード例 #7
0
def test_NTNENA_aggrPeriod(service, aggr_period, exp_index, ins_period):
    etype = 'test_NTNENA_aggrPeriod'
    # The reporter_dataset fixture is still in the DB cos it has a scope of
    # module. We use a different entity type to store this test's rows in a
    # different table to avoid messing up global state---see also delete down
    # below.
    eid = "{}0".format(etype)

    # Custom index to test aggrPeriod
    for i in exp_index:
        base = dateutil.parser.isoparse(i)
        insert_test_data(service, [etype],
                         index_size=5,
                         index_base=base,
                         index_period=ins_period)

    wait_for_insert([etype], service, 5 * len(exp_index))

    # aggrPeriod needs aggrMethod
    query_params = {
        'aggrPeriod': aggr_period,
    }
    r = query(params=query_params, service=service)
    assert r.status_code == 400, r.text

    # Check aggregation with aggrPeriod
    query_params = {
        'type': etype,
        'attrs': 'temperature',
        'aggrMethod': 'sum',
        'aggrPeriod': aggr_period,
    }
    r = query(params=query_params, service=service)
    # Assert
    assert r.status_code == 200, r.text
    obtained = r.json()

    delete_test_data(service, [etype])

    expected_temperatures = 0 + 1 + 2 + 3 + 4
    expected_entities = [
        {
            'entityId':
            eid,
            'index':
            exp_index,
            'values': [
                expected_temperatures, expected_temperatures,
                expected_temperatures
            ]
        },
    ]
    expected_types = [{'entities': expected_entities, 'entityType': etype}]
    expected_attrs = [{'attrName': 'temperature', 'types': expected_types}]

    expected = {'attrs': expected_attrs}

    assert obtained == expected
コード例 #8
0
def reporter_dataset():
    for service in services:
        insert_test_data(service, [entity_type], n_entities=1, index_size=30,
                         entity_id=entity_id)
        insert_test_data(service, [entity_type_1], n_entities=1, index_size=30,
                         entity_id=entity_id_1)
    yield
    for service in services:
        delete_test_data(service, [entity_type, entity_type_1])
コード例 #9
0
def reporter_dataset():
    for service in services:
        insert_test_data(service, [entity_type], n_entities=1, index_size=30)
    for service in services:
        wait_for_insert([entity_type], service, 30)

    yield

    for service in services:
        delete_test_data(service, [entity_type])
コード例 #10
0
def reporter_dataset(translator):
    insert_test_data(translator, [entity_type],
                     n_entities=1,
                     index_size=4,
                     entity_id=entity_id)
    insert_test_data(translator, [entity_type],
                     n_entities=1,
                     index_size=4,
                     entity_id=entity_id_1)
    yield
コード例 #11
0
def reporter_dataset():
    for service in services:
        insert_test_data(service, [entity_type],
                         n_entities=3,
                         entity_id=entity_id,
                         index_size=n_days)
        wait_for_insert([entity_type], service, 3)
    yield
    for service in services:
        delete_test_data(service, [entity_type])
コード例 #12
0
def reporter_dataset():
    service = ''
    entity_type = result_gen.formatter.entity_type
    sz = result_gen.time_index_size
    insert_test_data(service, [entity_type], n_entities=1,
                     index_size=sz, entity_id=entity_id_1)
    insert_test_data(service, [entity_type], n_entities=1,
                     index_size=sz, entity_id=entity_id_2)
    yield
    delete_test_data(service, [entity_type])
コード例 #13
0
def reporter_dataset(translator):
    insert_test_data(translator, [entity_type],
                     n_entities=1,
                     index_size=30,
                     entity_id=entity_id)
    insert_test_data(translator, [entity_type_1],
                     n_entities=1,
                     index_size=30,
                     entity_id=entity_id_1,
                     index_base=datetime(1980, 1, 1, 0, 0, 0, 0))
    yield
コード例 #14
0
def test_1TNE1A_aggrPeriod(service, aggr_period, exp_index, ins_period):
    # Custom index to test aggrPeriod
    etype = f"test_1TNE1A_aggrPeriod_{aggr_period}"
    # The reporter_dataset fixture is still in the DB cos it has a scope of
    # module. We use a different entity type to store this test's rows in a
    # different table to avoid messing up global state---see also delete down
    # below.
    eid = '{}0'.format(etype)

    for i in exp_index:
        base = dateutil.parser.isoparse(i)
        insert_test_data(service,
                         [etype],
                         entity_id=eid,
                         index_size=5,
                         index_base=base,
                         index_period=ins_period)

    wait_for_insert([etype], service, 5 * len(exp_index))

    # aggrPeriod needs aggrMethod
    query_params = {
        'type': etype,
        'aggrPeriod': aggr_period,
    }
    h = {'Fiware-Service': service}

    r = requests.get(query_url(etype=etype), params=query_params, headers=h)
    assert r.status_code == 400, r.text

    # Check aggregation with aggrPeriod
    query_params = {
        'type': etype,
        'aggrMethod': 'sum',
        'aggrPeriod': aggr_period,
    }
    r = requests.get(query_url(etype=etype), params=query_params, headers=h)
    assert r.status_code == 200, r.text

    # Assert Results
    exp_sum = 0 + 1 + 2 + 3 + 4
    expected_entities = [
        {
            'entityId': eid,
            'index': exp_index,
            'values': [exp_sum, exp_sum, exp_sum],
        }
    ]
    obtained_data = r.json()
    assert isinstance(obtained_data, dict)
    assert obtained_data['entityType'] == etype
    assert obtained_data['attrName'] == attr_name
    assert obtained_data['entities'] == expected_entities
    delete_test_data(service, [etype])
コード例 #15
0
def reporter_dataset():
    entity_type = result_gen.formatter.entity_type
    sz = result_gen.time_index_size
    for service in services:
        insert_test_data(service, [entity_type], n_entities=1,
                         index_size=sz, entity_id=entity_id_1)
        insert_test_data(service, [entity_type], n_entities=1,
                         index_size=sz, entity_id=entity_id_2)
        wait_for_insert([entity_type], service, sz * 2)
    yield
    for service in services:
        delete_test_data(service, [entity_type])
コード例 #16
0
def test_1TNENA_aggrPeriod(translator, aggr_period, exp_index, ins_period):
    # Custom index to test aggrPeriod
    for i in exp_index:
        base = datetime.strptime(i, "%Y-%m-%dT%H:%M:%S.%f")
        insert_test_data(translator,
                         [entity_type],
                         index_size=5,
                         index_base=base,
                         index_period=ins_period)

    # aggrPeriod needs aggrMethod
    query_params = {
        'aggrPeriod': aggr_period,
    }
    r = requests.get(query_url(), params=query_params)
    assert r.status_code == 400, r.text

    # Check aggregation with aggrPeriod
    query_params = {
        'attrs': 'temperature',
        'aggrMethod': 'sum',
        'aggrPeriod': aggr_period,
    }
    r = requests.get(query_url(), params=query_params)
    assert r.status_code == 200, r.text

    # Assert Results
    exp_sum = 0 + 1 + 2 + 3 + 4

    expected_attributes = [
        {
            'attrName': attr_name_1,
            'values' : [exp_sum, exp_sum, exp_sum]
        }
    ]

    expected_entities = [
        {
            'attributes': expected_attributes,
            'entityId': 'Room0',
            'index': exp_index
        }
    ]

    expected = {
        'entities': expected_entities,
        'entityType': entity_type
    }

    obtained = r.json()
    assert isinstance(obtained, dict)
    assert_1TNENA_response(obtained, expected)
コード例 #17
0
def reporter_dataset():
    service = ''
    insert_test_data(service, [entity_type],
                     n_entities=1,
                     index_size=30,
                     entity_id=entity_id)
    insert_test_data(service, [entity_type_1],
                     n_entities=1,
                     index_size=30,
                     entity_id=entity_id_1,
                     index_base=datetime(1980, 1, 1, 0, 0, 0, 0))
    yield
    delete_test_data(service, [entity_type, entity_type_1])
コード例 #18
0
def test_1T1E1A_aggrPeriod(service, aggr_period, exp_index, ins_period):
    etype = 'test_1T1E1A_aggrPeriod'
    # The reporter_dataset fixture is still in the DB cos it has a scope of
    # module. We use a different entity type to store this test's rows in a
    # different table to avoid messing up global state---see also delete down
    # below.
    eid = "{}0".format(etype)

    # Custom index to test aggrPeriod
    for i in exp_index:
        base = dateutil.parser.isoparse(i)
        insert_test_data(service,
                         [etype],
                         entity_id=eid,
                         index_size=4,
                         index_base=base,
                         index_period=ins_period)

    # aggrPeriod needs aggrMethod
    query_params = {
        'type': etype,
        'aggrPeriod': aggr_period,
    }
    h = {'Fiware-Service': service}
    r = requests.get(query_url(eid=eid), params=query_params, headers=h)
    assert r.status_code == 400, r.text

    # Check aggregation with aggrPeriod
    query_params = {
        'type': etype,
        'aggrMethod': 'avg',
        'aggrPeriod': aggr_period,
    }
    r = requests.get(query_url(eid=eid), params=query_params, headers=h)

    delete_test_data(service, [etype])

    assert r.status_code == 200, r.text

    # Assert Results
    obtained = r.json()
    exp_avg = (0 + 1 + 2 + 3) / 4.
    expected = {
        'entityId': eid,
        'entityType': etype,
        'attrName': attr_name,
        'index': exp_index,
        'values': [exp_avg, exp_avg, exp_avg]
    }
    assert_1T1E1A_response(obtained, expected)
コード例 #19
0
def test_1T1ENA_aggrPeriod(translator, aggr_period, exp_index, ins_period):
    # Custom index to test aggrPeriod
    for i in exp_index:
        base = datetime.strptime(i, "%Y-%m-%dT%H:%M:%S.%f")
        insert_test_data(translator, [entity_type],
                         index_size=3,
                         index_base=base,
                         index_period=ins_period)

    # aggrPeriod needs aggrMethod
    query_params = {
        'type': entity_type,
        'aggrPeriod': aggr_period,
    }
    r = requests.get(query_url(), params=query_params)
    assert r.status_code == 400, r.text

    # Check aggregation with aggrPeriod
    query_params = {
        'type': entity_type,
        'attrs': temperature + ',' + pressure,
        'aggrMethod': 'max',
        'aggrPeriod': aggr_period,
    }
    r = requests.get(query_url(), params=query_params)
    assert r.status_code == 200, r.text

    # Assert Results
    expected = {
        'data': {
            'entityId':
            entity_id,
            'index':
            exp_index,
            'attributes': [
                {
                    'attrName': pressure,
                    'values': [20., 20., 20.],
                },
                {
                    'attrName': temperature,
                    'values': [2., 2., 2.],
                },
            ]
        }
    }
    obtained = r.json()
    assert_1T1ENA_response(obtained, expected)
コード例 #20
0
def test_NTNENA_aggrPeriod(translator, aggr_period, exp_index, ins_period):
    # Custom index to test aggrPeriod
    for i in exp_index:
        base = datetime.strptime(i, "%Y-%m-%dT%H:%M:%S.%f")
        insert_test_data(translator, [entity_type],
                         index_size=5,
                         index_base=base,
                         index_period=ins_period)

    # aggrPeriod needs aggrMethod
    query_params = {
        'aggrPeriod': aggr_period,
    }
    r = requests.get(query_url(), params=query_params)
    assert r.status_code == 400, r.text

    # Check aggregation with aggrPeriod
    query_params = {
        'attrs': 'temperature',
        'aggrMethod': 'sum',
        'aggrPeriod': aggr_period,
    }
    r = requests.get(query_url(), params=query_params)
    # Assert
    assert r.status_code == 200, r.text
    expected_temperatures = 0 + 1 + 2 + 3 + 4
    obtained = r.json()
    expected_entities = [
        {
            'entityId':
            'Room0',
            'index':
            exp_index,
            'values': [
                expected_temperatures, expected_temperatures,
                expected_temperatures
            ]
        },
    ]
    expected_types = [{'entities': expected_entities, 'entityType': 'Room'}]
    expected_attrs = [{'attrName': 'temperature', 'types': expected_types}]

    expected = {'attrs': expected_attrs}

    obtained = r.json()
    assert obtained == expected
コード例 #21
0
def test_none_service():
    service = None
    service_path = None
    alt_service_path = '/notdefault'
    insert_test_data(
        service,
        [entity_type],
        n_entities=1,
        index_size=30,
        service_path=service_path)
    insert_test_data(
        service,
        [entity_type],
        n_entities=1,
        index_size=15,
        service_path=alt_service_path)

    wait_for_insert([entity_type], service, 30 + 15)

    body = {
        'entities': [
            {
                'type': entity_type,
                'id': entity_id
            }
        ],
        'attrs': [
            'temperature',
            'pressure'
        ]
    }

    r = requests.post('{}'.format(query_url),
                      data=json.dumps(body),
                      headers=headers(service, service_path))
    assert r.status_code == 200, r.text
    assert r.json()[0]['temperature']['value'] == 29
    assert len(r.json()) == 1
    r = requests.post('{}'.format(query_url),
                      data=json.dumps(body),
                      headers=headers(service, alt_service_path))
    assert r.status_code == 200, r.text
    assert r.json()[0]['temperature']['value'] == 14
    assert len(r.json()) == 1
    delete_test_data(service, [entity_type], service_path=service_path)
    delete_test_data(service, [entity_type], service_path=alt_service_path)
コード例 #22
0
def test_no_type(translator):
    """
    Specifying entity type is optional, provided that id is unique.
    """
    insert_test_data(translator, ['Room', 'Car'], n_entities=2, index_size=30)

    # With type
    r = requests.get(query_url(), params={'type': 'Room'})
    assert r.status_code == 200, r.text
    res_with_type = r.json()

    # Without type
    r = requests.get(query_url(), params={})
    assert r.status_code == 200, r.text
    res_without_type = r.json()

    assert res_with_type == res_without_type
コード例 #23
0
def test_1TNENA_types_one_attribute(translator):
    # Query
    t = 'Room'
    t1 = 'Kitchen'

    insert_test_data(translator, [t], entity_id='Room1', index_size=3)
    insert_test_data(translator, [t1], entity_id='Kitchen1', index_size=3)

    query_params = {'attrs': 'pressure'}
    r = requests.get(query_url(), params=query_params)
    assert r.status_code == 200, r.text

    # Assert Results
    expected_temperatures = list(range(3))
    expected_pressures = [t * 10 for t in expected_temperatures]
    expected_index = [
        '1970-01-{:02}T00:00:00.000'.format(i + 1)
        for i in expected_temperatures
    ]
    expected_index_kitchen = [
        '1970-01-{:02}T00:00:00.000'.format(i + 1)
        for i in expected_temperatures
    ]

    expected_entities_kitchen = [{
        'entityId': 'Kitchen1',
        'index': expected_index_kitchen,
        'values': expected_pressures
    }]
    expected_entities_room = [{
        'entityId': 'Room1',
        'index': expected_index,
        'values': expected_pressures
    }]
    expected_types = [{
        'entities': expected_entities_kitchen,
        'entityType': 'Kitchen'
    }, {
        'entities': expected_entities_room,
        'entityType': 'Room'
    }]
    expected_attrs = [{'attrName': 'pressure', 'types': expected_types}]
    expected = {'attrs': expected_attrs}
    obtained = r.json()
    assert obtained == expected
コード例 #24
0
def test_no_type_not_unique(translator):
    # If id is not unique across types, you must specify type.
    insert_test_data(translator, ['Room', 'Car'],
                     n_entities=2,
                     index_size=30,
                     entity_id="repeatedId")

    url = "{qlUrl}/entities/{entityId}/attrs/temperature".format(
        qlUrl=QL_URL,
        entityId="repeatedId",
    )

    # With type
    r = requests.get(url, params={'type': 'Room'})
    assert r.status_code == 200, r.text

    # Without type
    r = requests.get(url, params={})
    assert r.status_code == 400, r.text
    e = AmbiguousNGSIIdError('repeatedId')
    assert r.json() == {"error": "{}".format(type(e)), "description": str(e)}
コード例 #25
0
def test_1T1E1A_aggrPeriod(translator, aggr_period, exp_index, ins_period):
    # Custom index to test aggrPeriod
    for i in exp_index:
        base = datetime.strptime(i, "%Y-%m-%dT%H:%M:%S.%f")
        insert_test_data(translator,
                         [entity_type],
                         index_size=4,
                         index_base=base,
                         index_period=ins_period)

    # aggrPeriod needs aggrMethod
    query_params = {
        'type': entity_type,
        'aggrPeriod': aggr_period,
    }
    r = requests.get(query_url(), params=query_params)
    assert r.status_code == 400, r.text

    # Check aggregation with aggrPeriod
    query_params = {
        'type': entity_type,
        'aggrMethod': 'avg',
        'aggrPeriod': aggr_period,
    }
    r = requests.get(query_url(), params=query_params)
    assert r.status_code == 200, r.text

    # Assert Results
    obtained = r.json()
    exp_avg = (0 + 1 + 2 + 3) / 4.
    expected = {
        'data': {
            'entityId': entity_id,
            'attrName': attr_name,
            'index': exp_index,
            'values': [exp_avg, exp_avg, exp_avg],
        }
    }
    assert_1T1E1A_response(obtained, expected)
コード例 #26
0
def reporter_dataset():

    insert_test_data(default_service, [entity_type], n_entities=1, index_size=4,
                     entity_id=entity_id)
    insert_test_data(default_service, [entity_type], n_entities=1, index_size=4,
                     entity_id=entity_id_1)

    insert_test_data(service_1, [entity_type], entity_id=entity_id,
                     index_size=3)
    insert_test_data(service_1, [entity_type_1], entity_id=entity_id_1_1,
                     index_size=3)

    yield

    delete_test_data(default_service, [entity_type])
    delete_test_data(service_1, [entity_type, entity_type_1])
コード例 #27
0
def test_default_service_path(service):
    service_path = '/'
    alt_service_path = '/notdefault'
    insert_test_data(service, [entity_type],
                     n_entities=1,
                     index_size=30,
                     service_path=service_path)
    insert_test_data(service, [entity_type],
                     n_entities=1,
                     index_size=15,
                     service_path=alt_service_path)

    time.sleep(SLEEP_TIME)

    body = {
        'entities': [{
            'type': entity_type,
            'id': entity_id
        }],
        'attrs': ['temperature', 'pressure']
    }

    r = requests.post('{}'.format(query_url),
                      data=json.dumps(body),
                      headers=headers(service, service_path))
    assert r.status_code == 200, r.text
    assert len(r.json()) == 1
    assert r.json()[0]['temperature']['value'] == 29
    r = requests.post('{}'.format(query_url),
                      data=json.dumps(body),
                      headers=headers(service, alt_service_path))
    assert r.status_code == 200, r.text
    assert len(r.json()) == 1
    assert r.json()[0]['temperature']['value'] == 14
    delete_test_data(service, [entity_type], service_path=service_path)
    delete_test_data(service, [entity_type], service_path=alt_service_path)
コード例 #28
0
def test_different_time_indexes(service):
    """
    Each entity should have its time_index array.
    """
    etype = 'test_different_time_indexes'
    # The reporter_dataset fixture is still in the DB cos it has a scope of
    # module. We use a different entity type to store this test's rows in a
    # different table to avoid messing up global state---see also delete down
    # below.
    insert_test_data(service, [etype], entity_id='Room1', index_size=2)
    insert_test_data(service, [etype], entity_id='Room3', index_size=4)
    insert_test_data(service, [etype], entity_id='Room2', index_size=3)

    wait_for_insert([etype], service, 2 + 4 + 3)

    query_params = {
        'type': etype,
        'id': 'Room3,Room1,Room2',
    }
    h = {'Fiware-Service': service}

    r = requests.get(query_url(etype=etype), params=query_params, headers=h)
    assert r.status_code == 200, r.text

    expected_entities = [{
        'entityId':
        'Room3',
        'index':
        ['1970-01-{:02}T00:00:00+00:00'.format(i + 1) for i in range(4)],
        'values':
        list(range(4)),
    }, {
        'entityId':
        'Room1',
        'index':
        ['1970-01-{:02}T00:00:00+00:00'.format(i + 1) for i in range(2)],
        'values':
        list(range(2)),
    }, {
        'entityId':
        'Room2',
        'index':
        ['1970-01-{:02}T00:00:00+00:00'.format(i + 1) for i in range(3)],
        'values':
        list(range(3)),
    }]

    expected = {
        'entityType': etype,
        'attrName': attr_name,
        'entities': expected_entities
    }
    obtained = r.json()
    assert_1TNE1A_response(obtained, expected, etype=etype)
    delete_test_data(service, [etype])
コード例 #29
0
def reporter_dataset():
    for service in services:
        insert_test_data(service, [entity_type], n_entities=1, index_size=4,
                         entity_id=entity_id)
        insert_test_data(service, [entity_type], n_entities=1, index_size=4,
                         entity_id=entity_id_1)
        insert_test_data(service, [entity_type_1], entity_id=entity_id_1_1,
                         index_size=3)
        wait_for_insert([entity_type], service, 4 * 2)
        wait_for_insert([entity_type_1], service, 3)
    yield
    for service in services:
        delete_test_data(service, [entity_type, entity_type_1])
コード例 #30
0
def test_different_time_indexes(translator):
    """
    Each entity should have its time_index array.
    """
    t = 'Room'
    insert_test_data(translator, [t],
                     n_entities=1,
                     entity_id='Room1',
                     n_days=2)
    insert_test_data(translator, [t],
                     n_entities=1,
                     entity_id='Room3',
                     n_days=4)
    insert_test_data(translator, [t],
                     n_entities=1,
                     entity_id='Room2',
                     n_days=3)

    query_params = {
        'type': 'Room',
        'id': 'Room3,Room1,Room2',
    }
    r = requests.get(query_url(), params=query_params)
    assert r.status_code == 200, r.text

    expected_entities = [{
        'entityId':
        'Room3',
        'index': ['1970-01-{:02}T00:00:00'.format(i + 1) for i in range(4)],
        'values':
        list(range(4)),
    }, {
        'entityId':
        'Room1',
        'index': ['1970-01-{:02}T00:00:00'.format(i + 1) for i in range(2)],
        'values':
        list(range(2)),
    }, {
        'entityId':
        'Room2',
        'index': ['1970-01-{:02}T00:00:00'.format(i + 1) for i in range(3)],
        'values':
        list(range(3)),
    }]

    obtained_data = r.json()
    assert isinstance(obtained_data, dict)
    assert obtained_data['data']['entityType'] == 'Room'
    assert obtained_data['data']['attrName'] == attr_name
    assert obtained_data['data']['entities'] == expected_entities