def test_analysis_topic(volttron_instance, database_client): agent_uuid = install_historian_agent(volttron_instance, mongo_agent_config()) try: publisher = volttron_instance.build_agent() oat_reading = random.uniform(30, 100) message = [{'FluffyWidgets': oat_reading}, {'FluffyWidgets': {'units': 'F', 'tz': 'UTC', 'type': 'float'}}] publisheddt = publish_data(publisher, BASE_ANALYSIS_TOPIC, message) gevent.sleep(0.1) lister = volttron_instance.build_agent() topic_list = lister.vip.rpc.call('platform.historian', 'get_topic_list').get(timeout=5) assert topic_list is not None assert len(topic_list) == 1 assert 'FluffyWidgets' in topic_list[0] result = lister.vip.rpc.call('platform.historian', 'query', topic=BASE_ANALYSIS_TOPIC[9:]+'/FluffyWidgets').get(timeout=5) assert result is not None assert len(result['values']) == 1 assert isinstance(result['values'], list) mongoizetimestamp = publisheddt.isoformat()[:-3]+'000' assert result['values'][0] == [mongoizetimestamp, oat_reading] finally: volttron_instance.stop_agent(agent_uuid) volttron_instance.remove_agent(agent_uuid)
def test_two_hours_of_publishing(request, volttron_instance1, database_client): clean_db(database_client) # Install the historian agent (after this call the agent should be running # on the platform). agent_uuid = install_historian_agent(volttron_instance1, mongo_agent_config()) assert agent_uuid is not None assert volttron_instance1.is_agent_running(agent_uuid) # Create a publisher and publish to the message bus some fake data. Keep # track of the published data so that we can query the historian. publisher = volttron_instance1.build_agent() assert publisher is not None expected = publish_minute_data_for_two_hours(publisher) # The mongo historian now should have published 2 hours worth of data. # Based upon the structure that we expect the database to be in we should # now have 3 topics present in the database and 2 records for each of the # 3 data items. db = database_client.get_default_database() assert 3 == db.topics.find().count() topic_to_id = {} for row in db.topics.find(): topic_to_id[row['topic_name']] = row['_id'] gevent.sleep(0.5) for d, v in expected.items(): print('d, v: ({}, {})'.format(d, v)) assert db['data'].find({'ts': d}).count() == 3 for t, _id in topic_to_id.items(): value = db['data'].find_one({'ts': d, 'topic_id': _id})['value'] assert value == v[t]
def test_two_hours_of_publishing(request, volttron_instance1, database_client): clean_db(database_client) # Install the historian agent (after this call the agent should be running # on the platform). agent_uuid = install_historian_agent(volttron_instance1, mongo_agent_config()) assert agent_uuid is not None assert volttron_instance1.is_agent_running(agent_uuid) # Create a publisher and publish to the message bus some fake data. Keep # track of the published data so that we can query the historian. publisher = volttron_instance1.build_agent() assert publisher is not None expected = publish_minute_data_for_two_hours(publisher) # The mongo historian now should have published 2 hours worth of data. # Based upon the structure that we expect the database to be in we should # now have 3 topics present in the database and 2 records for each of the # 3 data items. db = database_client.get_default_database() assert 3 == db.topics.find().count() topic_to_id = {} for row in db.topics.find(): topic_to_id[row['topic_name']] = row['_id'] gevent.sleep(0.5) for d, v in expected.items(): print('d, v: ({}, {})'.format(d,v)) assert db['data'].find({'ts': d}).count() == 3 for t, _id in topic_to_id.items(): value = db['data'].find_one({'ts': d, 'topic_id': _id})['value'] assert value == v[t]
def test_analysis_topic(volttron_instance1, database_client): agent_uuid = install_historian_agent(volttron_instance1, mongo_agent_config()) try: publisher = volttron_instance1.build_agent() oat_reading = random.uniform(30, 100) message = [{'FluffyWidgets': oat_reading}, {'FluffyWidgets': {'units': 'F', 'tz': 'UTC', 'type': 'float'}}] publisheddt = publish_data(publisher, BASE_ANALYSIS_TOPIC+'/FluffyWidgets', message) gevent.sleep(0.1) lister = volttron_instance1.build_agent() topic_list = lister.vip.rpc.call('platform.historian', 'get_topic_list').get(timeout=5) assert topic_list is not None assert len(topic_list) == 1 assert 'FluffyWidgets' in topic_list[0] result = lister.vip.rpc.call('platform.historian', 'query', topic=BASE_ANALYSIS_TOPIC[9:]+'/FluffyWidgets').get(timeout=5) assert result is not None assert len(result['values']) == 1 assert isinstance(result['values'], list) mongoizetimestamp = publisheddt.isoformat()[:-3]+'000' assert result['values'][0] == [mongoizetimestamp, oat_reading] finally: volttron_instance1.stop_agent(agent_uuid)
def test_topic_name_case_change(volttron_instance, database_client): """ When case of a topic name changes check if they are saved as two topics Expected result: query result should be cases insensitive """ clean_db(database_client) agent_uuid = install_historian_agent(volttron_instance, mongo_agent_config()) try: publisher = volttron_instance.build_agent() oat_reading = random.uniform(30, 100) message = [{'FluffyWidgets': oat_reading}, {'FluffyWidgets': {'units': 'F', 'tz': 'UTC', 'type': 'float'}}] publisheddt = publish_data(publisher, BASE_ANALYSIS_TOPIC, message) gevent.sleep(0.1) lister = volttron_instance.build_agent() topic_list = lister.vip.rpc.call('platform.historian', 'get_topic_list').get(timeout=5) assert topic_list is not None assert len(topic_list) == 1 assert 'FluffyWidgets' in topic_list[0] result = lister.vip.rpc.call('platform.historian', 'query', topic=BASE_ANALYSIS_TOPIC[9:]+'/FluffyWidgets').get(timeout=5) assert result is not None assert len(result['values']) == 1 assert isinstance(result['values'], list) mongoizetimestamp = publisheddt.isoformat()[:-3]+'000' assert result['values'][0] == [mongoizetimestamp, oat_reading] message = [{'Fluffywidgets': oat_reading}, {'Fluffywidgets': {'units': 'F', 'tz': 'UTC', 'type': 'float'}}] publisheddt = publish_data(publisher, BASE_ANALYSIS_TOPIC, message) gevent.sleep(0.1) topic_list = lister.vip.rpc.call('platform.historian', 'get_topic_list').get(timeout=5) assert topic_list is not None assert len(topic_list) == 1 assert 'Fluffywidgets' in topic_list[0] result = lister.vip.rpc.call( 'platform.historian', 'query', topic=BASE_ANALYSIS_TOPIC[9:]+'/Fluffywidgets', order="LAST_TO_FIRST").get(timeout=5) assert result is not None assert len(result['values']) == 2 assert isinstance(result['values'], list) mongoizetimestamp = publisheddt.isoformat()[:-3]+'000' assert result['values'][0] == [mongoizetimestamp, oat_reading] finally: volttron_instance.stop_agent(agent_uuid) volttron_instance.remove_agent(agent_uuid)
def test_get_topic_map(volttron_instance1, database_client): try: agent_uuid = install_historian_agent(volttron_instance1, mongo_agent_config()) oat_reading = random.uniform(30, 100) all_message = [{ 'OutsideAirTemperature': oat_reading }, { 'OutsideAirTemperature': { 'units': 'F', 'tz': 'UTC', 'type': 'float' } }] publisher = volttron_instance1.build_agent() publisheddt = publish_data(publisher, ALL_TOPIC, all_message) db = database_client.get_default_database() assert db.topics.count() == 1 lister = volttron_instance1.build_agent() topic_list = lister.vip.rpc.call('platform.historian', 'get_topic_list').get(timeout=5) assert topic_list is not None assert len(topic_list) == 1 # Publish data again for the next point. publisheddt = publish_data(publisher, ALL_TOPIC, all_message) topic_list = lister.vip.rpc.call('platform.historian', 'get_topic_list').get(timeout=5) # Same topic shouldn't add anything else. assert topic_list is not None assert len(topic_list) == 1 assert topic_list[ 0] == BASE_DEVICE_TOPIC[8:] + '/OutsideAirTemperature' mixed_reading = random.uniform(30, 100) all_message = [{ 'MixedAirTemperature': mixed_reading }, { 'MixedAirTemperature': { 'units': 'F', 'tz': 'UTC', 'type': 'float' } }] publisheddt = publish_data(publisher, ALL_TOPIC, all_message) topic_list = lister.vip.rpc.call('platform.historian', 'get_topic_list').get(timeout=5) assert topic_list is not None assert len(topic_list) == 2 finally: volttron_instance1.stop_agent(agent_uuid)
def test_basic_function(volttron_instance, database_client): """ Test basic functionality of sql historian. Inserts three points as part of all topic and checks if all three got into the database :param database_client: :param volttron_instance: The instance against which the test is run """ global query_points agent_uuid = install_historian_agent(volttron_instance, mongo_agent_config()) try: # print('HOME', volttron_instance.volttron_home) print("\n** test_basic_function **") publish_agent = volttron_instance.build_agent() # Publish data to message bus that should be recorded in the mongo # database. expected = publish_fake_data(publish_agent) expected = publish_fake_data(publish_agent) gevent.sleep(0.5) # Query the historian result = publish_agent.vip.rpc.call('platform.historian', 'query', topic=query_points['oat_point'], count=20, order="LAST_TO_FIRST").get( timeout=100) assert expected['datetime'].isoformat()[:-3] + '000+00:00' == \ result['values'][0][0] assert result['values'][0][1] == expected['oat_point'] result = publish_agent.vip.rpc.call('platform.historian', 'query', topic=query_points['mixed_point'], count=20, order="LAST_TO_FIRST").get( timeout=100) assert expected['datetime'].isoformat()[:-3] + '000+00:00' == \ result['values'][0][0] assert result['values'][0][1] == expected['mixed_point'] result = publish_agent.vip.rpc.call('platform.historian', 'query', topic=query_points['damper_point'], count=20, order="LAST_TO_FIRST").get( timeout=100) assert expected['datetime'].isoformat()[:-3] + '000+00:00' == \ result['values'][0][0] assert result['values'][0][1] == expected['damper_point'] finally: volttron_instance.stop_agent(agent_uuid) volttron_instance.remove_agent(agent_uuid)
def test_topic_name_case_change(volttron_instance1, database_client): """ When case of a topic name changes check if they are saved as two topics Expected result: query result should be cases insensitive """ clean_db(database_client) agent_uuid = install_historian_agent(volttron_instance1, mongo_agent_config()) try: publisher = volttron_instance1.build_agent() oat_reading = random.uniform(30, 100) message = [{'FluffyWidgets': oat_reading}, {'FluffyWidgets': {'units': 'F', 'tz': 'UTC', 'type': 'float'}}] publisheddt = publish_data(publisher, BASE_ANALYSIS_TOPIC+'/FluffyWidgets', message) gevent.sleep(0.1) lister = volttron_instance1.build_agent() topic_list = lister.vip.rpc.call('platform.historian', 'get_topic_list').get(timeout=5) assert topic_list is not None assert len(topic_list) == 1 assert 'FluffyWidgets' in topic_list[0] result = lister.vip.rpc.call('platform.historian', 'query', topic=BASE_ANALYSIS_TOPIC[9:]+'/FluffyWidgets').get(timeout=5) assert result is not None assert len(result['values']) == 1 assert isinstance(result['values'], list) mongoizetimestamp = publisheddt.isoformat()[:-3]+'000' assert result['values'][0] == [mongoizetimestamp, oat_reading] message = [{'Fluffywidgets': oat_reading}, {'Fluffywidgets': {'units': 'F', 'tz': 'UTC', 'type': 'float'}}] publisheddt = publish_data(publisher, BASE_ANALYSIS_TOPIC+'/Fluffywidgets', message) gevent.sleep(0.1) topic_list = lister.vip.rpc.call('platform.historian', 'get_topic_list').get(timeout=5) assert topic_list is not None assert len(topic_list) == 1 assert 'Fluffywidgets' in topic_list[0] result = lister.vip.rpc.call( 'platform.historian', 'query', topic=BASE_ANALYSIS_TOPIC[9:]+'/Fluffywidgets', order="LAST_TO_FIRST").get(timeout=5) assert result is not None assert len(result['values']) == 2 assert isinstance(result['values'], list) mongoizetimestamp = publisheddt.isoformat()[:-3]+'000' assert result['values'][0] == [mongoizetimestamp, oat_reading] finally: volttron_instance1.stop_agent(agent_uuid)
def test_multi_topic(volttron_instance, database_client): """ Test basic functionality of sql historian. Inserts three points as part of all topic and checks if all three got into the database :param database_client: :param volttron_instance: The instance against which the test is run """ global query_points agent_uuid = install_historian_agent(volttron_instance, mongo_agent_config()) try: # print('HOME', volttron_instance.volttron_home) print("\n** test_basic_function **") publish_agent = volttron_instance.build_agent() # Publish data to message bus that should be recorded in the mongo # database. expected_result = {} values_dict = {query_points['oat_point']: [], query_points['mixed_point']: []} for x in range(0, 5): expected = publish_fake_data(publish_agent) gevent.sleep(0.5) if x < 3: values_dict[query_points['oat_point']].append( [expected["datetime"].isoformat()[:-3] + '000+00:00', expected["oat_point"]]) values_dict[query_points['mixed_point']].append( [expected["datetime"].isoformat()[:-3] + '000+00:00', expected["mixed_point"]]) expected_result["values"] = values_dict expected_result["metadata"] = {} # Query the historian result = publish_agent.vip.rpc.call( 'platform.historian', 'query', topic=[query_points['mixed_point'], query_points['oat_point']], count=3, order="FIRST_TO_LAST").get(timeout=100) # print("expected result {}".format(expected_result)) # print("result {}".format(result)) assert result["metadata"] == expected_result["metadata"] assert result["values"][query_points['mixed_point']] == \ expected_result["values"][query_points['mixed_point']] assert result["values"][query_points['oat_point']] == \ expected_result["values"][query_points['oat_point']] finally: volttron_instance.stop_agent(agent_uuid) volttron_instance.remove_agent(agent_uuid)
def test_basic_function(volttron_instance1, database_client): """ Test basic functionality of sql historian. Inserts three points as part of all topic and checks if all three got into the database :param volttron_instance1: The instance against which the test is run :param mongohistorian: instance of the sql historian tested :param clean: teardown function """ global query_points, db_connection install_historian_agent(volttron_instance1, mongo_agent_config()) # print('HOME', volttron_instance1.volttron_home) print("\n** test_basic_function **") publish_agent = volttron_instance1.build_agent() # Publish data to message bus that should be recorded in the mongo database. expected = publish_fake_data(publish_agent) gevent.sleep(0.5) # Query the historian result = publish_agent.vip.rpc.call('platform.historian', 'query', topic=query_points['oat_point'], count=20, order="LAST_TO_FIRST").get(timeout=100) assert expected['datetime'].isoformat()[:-3] + '000' == result['values'][0][0] assert result['values'][0][1] == expected['oat_point'] result = publish_agent.vip.rpc.call('platform.historian', 'query', topic=query_points['mixed_point'], count=20, order="LAST_TO_FIRST").get(timeout=100) assert expected['datetime'].isoformat()[:-3] + '000' == result['values'][0][0] assert result['values'][0][1] == expected['mixed_point'] result = publish_agent.vip.rpc.call('platform.historian', 'query', topic=query_points['damper_point'], count=20, order="LAST_TO_FIRST").get(timeout=100) assert expected['datetime'].isoformat()[:-3] + '000' == result['values'][0][0] assert result['values'][0][1] == expected['damper_point']
def test_insert_duplicate(volttron_instance1, database_client): clean_db(database_client) data_collection = database_client.get_default_database()['data'] index_model = pymongo.IndexModel([("ts", pymongo.ASCENDING), ("topic_id", pymongo.ASCENDING)], unique=True) # make sure the data collection has the unique constraint. data_collection.create_indexes([index_model]) # Install the historian agent (after this call the agent should be running # on the platform). agent_uuid = install_historian_agent(volttron_instance1, mongo_agent_config()) assert agent_uuid is not None assert volttron_instance1.is_agent_running(agent_uuid) oat_reading = random.uniform(30, 100) all_message = [{ 'OutsideAirTemperature': oat_reading }, { 'OutsideAirTemperature': { 'units': 'F', 'tz': 'UTC', 'type': 'float' } }] publisher = volttron_instance1.build_agent() # Create timestamp (no parameter to isoformat so the result is a T # separator) The now value is a string after this function is called. now = get_aware_utc_now() # now = now.replace(microsecond=random.randint(0,100)) # now = datetime(now.year, now.month, now.day, now.hour, # now.minute, now.second) # now = now.isoformat() print('NOW IS: ', now) # now = '2015-12-02T00:00:00' headers = {headers_mod.DATE: now.isoformat()} # Publish messages publisher.vip.pubsub.publish('pubsub', ALL_TOPIC, headers, all_message).get(timeout=10) gevent.sleep(0.5) publisher.vip.pubsub.publish('pubsub', ALL_TOPIC, headers, all_message).get(timeout=10)
def test_empty_result(volttron_instance1, database_client): """ When case of a topic name changes check if they are saved as two topics Expected result: query result should be cases insensitive """ agent_uuid = install_historian_agent(volttron_instance1, mongo_agent_config()) try: lister = volttron_instance1.build_agent() result = lister.vip.rpc.call( 'platform.historian', 'query', topic=BASE_ANALYSIS_TOPIC[9:]+'/FluffyWidgets').get(timeout=5) print ("query result:" ,result) assert result == {} finally: volttron_instance1.stop_agent(agent_uuid)
def test_empty_result(volttron_instance1, database_client): """ When case of a topic name changes check if they are saved as two topics Expected result: query result should be cases insensitive """ agent_uuid = install_historian_agent(volttron_instance1, mongo_agent_config()) try: lister = volttron_instance1.build_agent() result = lister.vip.rpc.call('platform.historian', 'query', topic=BASE_ANALYSIS_TOPIC[9:] + '/FluffyWidgets').get(timeout=5) print("query result:", result) assert result == {} finally: volttron_instance1.stop_agent(agent_uuid)
def test_insert_duplicate(volttron_instance1, database_client): clean_db(database_client) data_collection = database_client.get_default_database()['data'] index_model = pymongo.IndexModel([("ts", pymongo.ASCENDING), ("topic_id", pymongo.ASCENDING)], unique=True) # make sure the data collection has the unique constraint. data_collection.create_indexes([index_model]) # Install the historian agent (after this call the agent should be running # on the platform). agent_uuid = install_historian_agent(volttron_instance1, mongo_agent_config()) assert agent_uuid is not None assert volttron_instance1.is_agent_running(agent_uuid) oat_reading = random.uniform(30, 100) all_message = [{'OutsideAirTemperature': oat_reading}, {'OutsideAirTemperature': {'units': 'F', 'tz': 'UTC', 'type': 'float'}}] publisher = volttron_instance1.build_agent() # Create timestamp (no parameter to isoformat so the result is a T # separator) The now value is a string after this function is called. now = get_aware_utc_now() # now = now.replace(microsecond=random.randint(0,100)) # now = datetime(now.year, now.month, now.day, now.hour, # now.minute, now.second) # now = now.isoformat() print('NOW IS: ', now) # now = '2015-12-02T00:00:00' headers = { headers_mod.DATE: now.isoformat() } # Publish messages publisher.vip.pubsub.publish( 'pubsub', ALL_TOPIC, headers, all_message).get(timeout=10) gevent.sleep(0.5) publisher.vip.pubsub.publish( 'pubsub', ALL_TOPIC, headers, all_message).get(timeout=10)
def test_get_topic_map(volttron_instance1, database_client): try: agent_uuid = install_historian_agent(volttron_instance1, mongo_agent_config()) oat_reading = random.uniform(30, 100) all_message = [{'OutsideAirTemperature': oat_reading}, {'OutsideAirTemperature': {'units': 'F', 'tz': 'UTC', 'type': 'float'}}] publisher = volttron_instance1.build_agent() publisheddt = publish_data(publisher, ALL_TOPIC, all_message) db = database_client.get_default_database() assert db.topics.count() == 1 lister = volttron_instance1.build_agent() topic_list = lister.vip.rpc.call('platform.historian', 'get_topic_list').get(timeout=5) assert topic_list is not None assert len(topic_list) == 1 # Publish data again for the next point. publisheddt = publish_data(publisher, ALL_TOPIC, all_message) topic_list = lister.vip.rpc.call('platform.historian', 'get_topic_list').get(timeout=5) # Same topic shouldn't add anything else. assert topic_list is not None assert len(topic_list) == 1 assert topic_list[0] == BASE_DEVICE_TOPIC[8:] + '/OutsideAirTemperature' mixed_reading = random.uniform(30, 100) all_message = [{'MixedAirTemperature': mixed_reading}, {'MixedAirTemperature': {'units': 'F', 'tz': 'UTC', 'type': 'float'}}] publisheddt = publish_data(publisher, ALL_TOPIC, all_message) topic_list = lister.vip.rpc.call('platform.historian', 'get_topic_list').get(timeout=5) assert topic_list is not None assert len(topic_list) == 2 finally: volttron_instance1.stop_agent(agent_uuid)
def test_data_rollup_insert(volttron_instance, database_client): """ Test the creation of rolled up data in hourly, daily and monthly data tables when data is published for new or existing topics :param database_client: :param volttron_instance: The instance against which the test is run """ global query_points agent_uuid = None try: # print('HOME', volttron_instance.volttron_home) print("\n** test_data_rollup_insert **") # Clean data and roll up tables db = database_client.get_default_database() db['data'].drop() db['topics'].drop() db['meta'].drop() db['hourly_data'].drop() db['daily_data'].drop() db['monthly_data'].drop() gevent.sleep(0.5) config = mongo_agent_config() config['periodic_rollup_initial_wait'] = 0.1 config['rollup_query_end'] = 0 config['periodic_rollup_frequency'] = 2 agent_uuid = install_historian_agent(volttron_instance, config) publish_agent = volttron_instance.build_agent() version = publish_agent.vip.rpc.call('platform.historian', 'get_version').get(timeout=5) version_nums = version.split(".") if int(version_nums[0]) < 2: pytest.skip("Only version >= 2.0 support rolled up data.") # ################### # Initialization test # ################### # Publish data to message bus that should be # recorded in the mongo # database. All topics are new now = datetime(year=2016, month=03, day=01, hour= 01, minute=01, second=01, microsecond=123, tzinfo=tzutc()) expected1 = publish_fake_data(publish_agent, now) expected2 = publish_fake_data(publish_agent, now + timedelta( minutes=1)) # publish again. this time topic is not new. rolled up data should # get append in the array initialized during last publish expected3 = publish_fake_data(publish_agent, now + timedelta(minutes=4)) gevent.sleep(0.5) result = publish_agent.vip.rpc.call('platform.historian', 'query', topic=query_points['oat_point'], count=20, order="FIRST_TO_LAST").get(timeout=10) print result gevent.sleep(6) #allow for periodic rollup function to catchup compare_query_results(db, expected1, expected2, expected3, 'oat_point', result) finally: if agent_uuid: volttron_instance.stop_agent(agent_uuid) volttron_instance.remove_agent(agent_uuid)
def test_rollup_query_with_topic_pattern(volttron_instance, database_client): """ Test the query of rolled up data from hourly, daily and monthly data tables :param database_client: :param volttron_instance: The instance against which the test is run """ global query_points agent_uuid = None try: # Clean data and roll up tables db = database_client.get_default_database() db['data'].drop() db['topics'].drop() db['meta'].drop() db['hourly_data'].drop() db['daily_data'].drop() publish_t1 = datetime(year=2016, month=3, day=1, hour=1, minute=10, second=1, microsecond=0, tzinfo=tzutc()) publish_t2 = publish_t1 + timedelta(minutes=1) publish_t3 = publish_t2 + timedelta(minutes=3) query_end = publish_t3 + timedelta(seconds=2) #query time period should be greater than 3 hours for historian to use # hourly_data collection and >= 1 day to use daily_data table query_start = query_end - timedelta(hours=4) query_start_day = query_end - timedelta(days=2) config = mongo_agent_config() config['periodic_rollup_initial_wait'] = 0.1 config['rollup_query_end'] = 0 config['periodic_rollup_frequency'] = 2 config['rollup_query_start'] = query_start_day.strftime( '%Y-%m-%dT%H:%M:%S.%f') config['initial_rollup_start_time'] = query_start_day.strftime( '%Y-%m-%dT%H:%M:%S.%f') config['rollup_topic_pattern'] = \ ".*/OutsideAirTemperature|.*/MixedAirTemperature" agent_uuid = install_historian_agent(volttron_instance, config) # print('HOME', volttron_instance.volttron_home) print("\n** test_data_rollup_insert **") publish_agent = volttron_instance.build_agent() version = publish_agent.vip.rpc.call('platform.historian', 'get_version').get(timeout=5) version_nums = version.split(".") if int(version_nums[0]) < 2: pytest.skip("Only version >= 2.0 support rolled up data.") expected1 = publish_fake_data(publish_agent, publish_t1) expected2 = publish_fake_data(publish_agent, publish_t2) expected3 = publish_fake_data(publish_agent, publish_t3) gevent.sleep(6) #test query from data table for damper_point - point not in # rollup_topic_pattern configured result = publish_agent.vip.rpc.call('platform.historian', 'query', topic=query_points['damper_point'], count=20, start=query_start.isoformat(), end=query_end.isoformat(), order="FIRST_TO_LAST").get(timeout=10) print result compare_query_results(db, expected1, expected2, expected3, 'damper_point', result) # test query from hourly_data table # db['data'].drop() # result = publish_agent.vip.rpc.call( # 'platform.historian', # 'query', # topic=query_points['oat_point'], # count=20, # start = query_start.isoformat(), # end = query_end.isoformat(), # order="FIRST_TO_LAST").get(timeout=10) # print result # compare_query_results(db, expected1, expected2, expected3, # 'oat_point', result) # verify_hourly_collection(db, expected1, expected2, expected3) # # # test damper_point result don't come back from hourly_table. Result # # should be empty since we dropped damper_point # result = publish_agent.vip.rpc.call('platform.historian', 'query', # topic=query_points['damper_point'], count=20, # start=query_start.isoformat(), end=query_end.isoformat(), # order="FIRST_TO_LAST").get(timeout=10) # assert result == {} # # # Check query from daily_data # db['hourly_data'].drop() # result = publish_agent.vip.rpc.call('platform.historian', 'query', # topic=query_points['oat_point'], count=20, # start=query_start_day.isoformat(), # end= query_end.isoformat(), # order="FIRST_TO_LAST").get(timeout=10) # print result # # compare_query_results(db, expected1, expected2, expected3, # 'oat_point', result) # verify_daily_collection(db, expected1, expected2, expected3) # # test damper_point result don't come back from daily_data. Result # # should be empty since we dropped damper_point # result = publish_agent.vip.rpc.call( # 'platform.historian', 'query', # topic=query_points['damper_point'], # count=20, # start=query_start.isoformat(), # end=query_end.isoformat(), # order="FIRST_TO_LAST").get(timeout=10) # assert result == {} finally: if agent_uuid: volttron_instance.stop_agent(agent_uuid) volttron_instance.remove_agent(agent_uuid)
def test_dict_insert_special_character(volttron_instance, database_client): """ Test the query of rolled up data from hourly, daily and monthly data tables :param database_client: :param volttron_instance: The instance against which the test is run """ global query_points agent_uuid = None try: # Clean data and roll up tables db = database_client.get_default_database() db['data'].drop() db['topics'].drop() db['meta'].drop() db['hourly_data'].drop() db['daily_data'].drop() publish_t1 = datetime(year=2016, month=3, day=1, hour=1, minute=10, second=1, microsecond=0, tzinfo=tzutc()) publish_t2 = publish_t1 + timedelta(minutes=1) query_end = publish_t2 + timedelta(seconds=2) #query time period should be greater than 3 hours for historian to use # hourly_data collection and >= 1 day to use daily_data table query_start = query_end - timedelta(hours=4) query_start_day = query_end - timedelta(days=2) config = mongo_agent_config() config['periodic_rollup_initial_wait'] = 0.1 config['rollup_query_end'] = 0 config['periodic_rollup_frequency'] = 2 config['rollup_query_start'] = query_start_day.strftime( '%Y-%m-%dT%H:%M:%S.%f') config['initial_rollup_start_time'] = query_start_day.strftime( '%Y-%m-%dT%H:%M:%S.%f') agent_uuid = install_historian_agent(volttron_instance, config) # print('HOME', volttron_instance.volttron_home) print("\n** test_data_rollup_insert **") publish_agent = volttron_instance.build_agent() version = publish_agent.vip.rpc.call('platform.historian', 'get_version').get(timeout=5) version_nums = version.split(".") if int(version_nums[0]) < 2: pytest.skip("Only version >= 2.0 support rolled up data.") dict1 = {"key.1":"value1", "$":1} expected1 = publish_fake_data(publish_agent, publish_t1, dict1) expected2 = publish_fake_data(publish_agent, publish_t2, dict1) gevent.sleep(6) # test query from hourly_data table db['data'].drop() result = publish_agent.vip.rpc.call( 'platform.historian', 'query', topic=query_points['oat_point'], count=20, start = query_start.isoformat(), end = query_end.isoformat(), order="FIRST_TO_LAST").get(timeout=10) print(result) compare_query_results(db, expected1, expected2, None, 'oat_point', result) # Check query from daily_data db['hourly_data'].drop() result = publish_agent.vip.rpc.call('platform.historian', 'query', topic=query_points['oat_point'], count=20, start=query_start_day.isoformat(), end= query_end.isoformat(), order="LAST_TO_FIRST").get(timeout=10) print(result) compare_query_results(db, expected2, expected1, None, 'oat_point', result) finally: if agent_uuid: volttron_instance.stop_agent(agent_uuid) volttron_instance.remove_agent(agent_uuid)