def get_list_obstobackup() -> List[ObservableGeneric]: try: dictionary_type_observable = \ CacheRedisAdapter.dictionary_get_all(label_info=UtilityCatalogCached.LABEL_DICTIONARY_OBSERVABLE_TO_BACKUP, type_value=str) if not dictionary_type_observable: return None list_return = list() for type_observable in dictionary_type_observable: if dictionary_type_observable[type_observable] is None: continue list_partial = CacheRedisAdapter.get_cached_info( label_info=dictionary_type_observable[type_observable], type_data=list) if not list_partial: continue list_return.extend(list_partial) return list_return except Exception as ex: logger.error( 'get_dictionary_specific_observable Exception: {}'.format(ex)) return None
def increase_counter_totaldevices(incr_value: int): try: CacheRedisAdapter.counter_increase( label_info=ServiceCatalogClient. LABEL_COUNTER_TOTAL_DEVICE_REGISTERED, increase=incr_value) except Exception as ex: logger.error( 'ServiceCatalogClient increase_counter_totaldevices Exception: {}' .format(ex))
def append_topic(single_topic: str) -> bool: try: CacheRedisAdapter.dictionary_update_value( label_info=UtilityCatalogCached.LABEL_DICTIONARY_TOPICS, key=single_topic, value=1) except Exception as ex: logger.info( 'UtilityCatalogCached append_topic Exception: {}'.format(ex)) return False
def get_complete_dictionary_observables( list_type_observables: List[str]) -> Dict[str, ObservableGeneric]: try: if not list_type_observables: logger.warning( 'UtilityCatalogCache get_complete_dictionary_observables list_type_observable is None' ) return None dict_return = dict() for type_observable in list_type_observables: dict_observable_type = \ CacheRedisAdapter.dictionary_get_all(label_info=UtilityCatalogCached.get_dictionary_name(type_observable), type_value=ObservableGeneric) if not dict_observable_type: logger.info( 'UtilityCatalogCache get_complete_dictionary_observables not available for type_obs: {}' .format(type_observable)) continue logger.info( 'UtilityCatalogCache get_complete_dictionary_observables available for type_obs: {0}, ' 'counter_elements: {1}'.format(type_observable, len(dict_observable_type))) dict_return[type_observable] = dict_observable_type return dict_return except Exception as ex: logger.error( 'UtilityCatalogCache: get_complete_dictionary_observables Exception: {}' .format(ex)) return None
def get_last_observable(label_observable: str) -> ObservableGeneric: try: return CacheRedisAdapter.get_cached_info( label_info=label_observable, type_data=ObservableGeneric) except Exception as ex: logger.error('get_mostrecent_observable Exception: {}'.format(ex)) return None
def initialize_environment() -> bool: try: return CacheRedisAdapter.initialize() except Exception as ex: logger.error( 'initialize_environment test_method Exception: {}'.format(ex)) return False
def configure_catalog_observable_backup(label_list_types: List[str]): try: CacheRedisAdapter.dictionary_create( label_info=UtilityCatalogCached. LABEL_DICTIONARY_OBSERVABLE_TO_BACKUP) for type_observable in label_list_types: label_store = UtilityCatalogCached.LABEL_DICTIONARY_OBSERVABLE_TO_BACKUP + type_observable CacheRedisAdapter.dictionary_update_value( label_info=UtilityCatalogCached. LABEL_DICTIONARY_OBSERVABLE_TO_BACKUP, key=type_observable, value=label_store) except Exception as ex: logger.error( 'configure_catalog_observable_backup Exception: {}'.format(ex))
def test_method_cachelist() -> bool: try: name_list = 'LISTTESTS' counter_test = 20 CacheRedisAdapter.list_create(label_info=name_list) for index in range(0, counter_test): CacheRedisAdapter.list_append_singleelement( label_info=name_list, elem_to_append=index * 2) list_extracted = CacheRedisAdapter.list_extractallelements( label_info=name_list, type_element=int) if not list_extracted: return False return True except Exception as ex: logger.error('Exception test_method_todelete: {}'.format(ex))
def set_list_obstobackup( type_observable: str, list_obs_to_backup: List[ObservableGeneric]) -> bool: try: label_list = UtilityCatalogCached.LABEL_DICTIONARY_OBSERVABLE_TO_BACKUP + type_observable return CacheRedisAdapter.set_cache_info(label_info=label_list, data=list_obs_to_backup) except Exception as ex: logger.error('set_list_obstobackup Exception: {}'.format(ex)) return False
def get_total_counterdevices() -> int: try: value_return = \ CacheRedisAdapter.counter_get(label_info=ServiceCatalogClient.LABEL_COUNTER_TOTAL_DEVICE_REGISTERED) if not value_return: return 0 return value_return except Exception as ex: logger.error('ServiceCatalogClient: Exception {}'.format(ex)) return 0
def confirm_obs_backup() -> bool: try: dictionary_type_observable = CacheRedisAdapter.\ dictionary_get_all(label_info=UtilityCatalogCached.LABEL_DICTIONARY_OBSERVABLE_TO_BACKUP, type_value=str) if not dictionary_type_observable: return None for type_observable in dictionary_type_observable: if dictionary_type_observable[type_observable] is None: continue CacheRedisAdapter.remove_cache_info( label_info=dictionary_type_observable[type_observable]) return True except Exception as ex: logger.error( 'get_dictionary_specific_observable Exception: {}'.format(ex)) return False
def store_catalog_datastreams( catalog_datastreams: Dict[str, List[DatastreamTopicAdapter]]): try: if not catalog_datastreams: return False for subject in catalog_datastreams: list_datastreams_adapter = catalog_datastreams[subject] CacheRedisAdapter.dictionary_create(label_info=subject) if not list_datastreams_adapter: continue CacheRedisAdapter.set_cache_info(label_info=subject, data=list_datastreams_adapter) logger.info( 'UtilityCatalogCache Store Datastreams Subject: {0} number: {1}' .format(subject, len(list_datastreams_adapter))) except Exception as ex: logger.info( 'UtilityCatalogCached store_catalog_datastreams Exception: {}'. format(ex)) return False
def append_new_observable(label_type_observable: str, observable: ObservableGeneric) -> bool: try: if not observable: return False return CacheRedisAdapter.dictionary_update_value( label_info=UtilityCatalogCached.get_dictionary_name( label_type_observable), key=observable.get_label_cache(), value=observable) except Exception as ex: logger.error( 'UtilityCatalogCache append_new_observable Exception: {}'. format(ex)) return False
def get_device_registration(datastream_id: str) -> DeviceRegistration: try: if not datastream_id: return None elem_read = CacheRedisAdapter.dictionary_get_value( label_info=UtilityCatalogCached. LABEL_DICTIONARY_DEVICE_REGISTRATION, key=datastream_id, type_value=DeviceRegistration) return elem_read except Exception as ex: logger.error( 'UtilityCatalogCached add_device_registration Exception: {}'. format(ex)) return False
def add_device_registration( device_registration: DeviceRegistration) -> bool: try: if not device_registration: return False CachedComponents.checkandset_maxiotid( observable_type=device_registration.get_obs_type(), iot_id=device_registration.get_datastream_id()) return CacheRedisAdapter.dictionary_update_value( label_info=UtilityCatalogCached. LABEL_DICTIONARY_DEVICE_REGISTRATION, key=device_registration.get_datastream_id(), value=device_registration) except Exception as ex: logger.error( 'UtilityCatalogCached add_device_registration Exception: {}'. format(ex)) return False
def initialize_catalog() -> bool: try: CacheRedisAdapter.initialize() CacheRedisAdapter.dictionary_create( label_info=UtilityCatalogCached.LABEL_DICTIONARY_TOPICS) CacheRedisAdapter.dictionary_create( label_info=UtilityCatalogCached. LABEL_DICTIONARY_DEVICE_REGISTRATION) UtilityCatalogCached.configure_catalog_observable_backup( [LabelObservationType.LABEL_OBSTYPE_LOCALIZATION]) return True except Exception as ex: logger.error('initialize_catalog Exception: {}'.format(ex)) return False
def set_flag_subscribed(new_value: int): CacheRedisAdapter.set_cache_info( label_info=ServiceUpdateDatastreamsClient.LABEL_FLAG_SUBSCRIBED, data=new_value)
def set_flag_connected(new_value: int): CacheRedisAdapter.set_cache_info( label_info=ServiceUpdateDatastreamsClient.LABEL_FLAG_CONNECTED, data=new_value)
def get_flag_subscribed() -> int: value_return = CacheRedisAdapter.get_cached_info( ServiceObservationClient.LABEL_FLAG_SUBSCRIBED, type_data=int) return value_return
def initialize(): CacheRedisAdapter.counter_create(label_info=ServiceCatalogClient. LABEL_COUNTER_TOTAL_DEVICE_REGISTERED, start_value=0)
def set_flag_subscribed(new_value: int): CacheRedisAdapter.set_cache_info( label_info=ServiceObservationClient.LABEL_FLAG_SUBSCRIBED, data=new_value)
def get_flag_connected() -> int: value_return = CacheRedisAdapter.get_cached_info( label_info=ServiceUpdateDatastreamsClient.LABEL_FLAG_CONNECTED, type_data=int) return value_return
def set_flag_connected(new_value: int): CacheRedisAdapter.set_cache_info( label_info=ServiceObservationClient.LABEL_FLAG_CONNECTED, data=new_value)
def test_method(): try: LABEL_DICTIONARY_OBSERVABLE = 'OBSERVABLE' LABEL_COUNTER = 'TEST_COUNTER' LABEL_TEST_LIST = 'TEST_LIST' LABEL_TEST_LISTSTRING = 'TEST_LIST_STRING' STRING_TRY = 'PROVA' LABEL_LIST_OBSERVABLE = 'TEST_LIST_OBSERVABLE' LABEL_DICT_OBSERVABLE = 'TEST_DICT_OBSERVABLES' LABEL_GOST_FILTER = 'GOST_LARGE_SCALE_TEST' max_counter = 3 timestamp = datetime.datetime.now(tz=pytz.utc) logger.info( 'STARTED TEST DEMO {} Observable CatalogCache Write Simulated'. format(max_counter)) start_value = 4 increase_time = 10 CacheRedisAdapter.set_cache_info(label_info='NEW', data=0) byte_array = CacheRecord.dumps(STRING_TRY) string_return = CacheRecord.loads(byte_array=byte_array, type_object=str) byte_array = CacheRecord.dumps(start_value) compare_value = CacheRecord.loads(byte_array=byte_array, type_object=int) logger.info('String pass: {0}, string get: {1}'.format( STRING_TRY, string_return)) observable = UnitTestsUtilityObservables.test_method_create_observable( label_gost=LABEL_GOST_FILTER, index_element=1, device_index=1, timestamp=timestamp) byte_array = CacheRecord.dumps(record=observable) observable_test = CacheRecord.loads(byte_array=byte_array, type_object=Localization) CacheRedisAdapter.counter_create(label_info=LABEL_COUNTER, start_value=start_value) for counter in range(0, increase_time): CacheRedisAdapter.counter_increase(label_info=LABEL_COUNTER) value = CacheRedisAdapter.counter_get(label_info=LABEL_COUNTER) if value != start_value + increase_time: logger.error( 'Unexpected counter value: {0} rather than {1}'.format( value, counter + increase_time)) list_elements = list() dictionary_test = dict() for index_element in range(0, max_counter): observable = UnitTestsUtilityObservables.test_method_create_observable( label_gost=LABEL_GOST_FILTER, index_element=index_element, device_index=index_element, timestamp=timestamp) list_elements.append(observable) dictionary_test[observable.get_label_cache()] = observable CacheRedisAdapter.set_cache_info(label_info=LABEL_LIST_OBSERVABLE, data=list_elements) CacheRedisAdapter.set_cache_info(label_info=LABEL_DICT_OBSERVABLE, data=dictionary_test) list_return = CacheRedisAdapter.get_cached_info( label_info=LABEL_LIST_OBSERVABLE, type_data=list) dict_return = CacheRedisAdapter.get_cached_info( label_info=LABEL_DICT_OBSERVABLE, type_data=dict) CacheRedisAdapter.dictionary_create( label_info=LABEL_DICTIONARY_OBSERVABLE) CacheRedisAdapter.list_create(label_info=LABEL_TEST_LISTSTRING) CacheRedisAdapter.list_append_singleelement( label_info=LABEL_TEST_LISTSTRING, elem_to_append=STRING_TRY) string_get = CacheRedisAdapter.list_extractfirstelement( label_info=LABEL_TEST_LISTSTRING, type_element=str) logger.info('Test Set String: {0}, Extracted: {1}'.format( STRING_TRY, string_get)) for index_element in range(0, max_counter): if (index_element % 500) == 0: logger.info( 'TEST EXAMPLE COUNTER: {}'.format(index_element)) localization_test = UnitTestsUtilityObservables.test_method_create_observable( label_gost=LABEL_GOST_FILTER, index_element=index_element, device_index=index_element, timestamp=timestamp) CacheRedisAdapter.dictionary_update_value( label_info=LABEL_DICTIONARY_OBSERVABLE, key=localization_test.get_label_cache(), value=localization_test) logger.info( 'STARTED TEST DEMO {} Observable CatalogCache Read Simulated'. format(max_counter)) dictionary_imported = CacheRedisAdapter.dictionary_get_all( label_info=LABEL_DICTIONARY_OBSERVABLE, type_value=ObservableGeneric) for index_element in range(0, max_counter): label_cache = UnitTestsUtilityObservables.\ test_method_get_label_cache(label_gostfilter=LABEL_GOST_FILTER, index_element=index_element) if label_cache not in dictionary_imported or not dictionary_imported[ label_cache]: return False localization_read = dictionary_imported[label_cache] if not UnitTestsUtilityObservables.test_method_check_obs_validity( observable=localization_read, index_element=index_element, timestamp=timestamp): return False if (index_element % 500) == 0: logger.info( 'TEST EXAMPLE COUNTER READ: {}'.format(index_element)) logger.info( 'END TEST DEMO {} Observable CatalogCache Read Simulated'. format(max_counter)) CacheRedisAdapter.list_create(label_info=LABEL_TEST_LIST) for index_element in range(0, max_counter): localiz_to_append = UnitTestsUtilityObservables.test_method_create_observable( label_gost=LABEL_GOST_FILTER, index_element=index_element, device_index=index_element, timestamp=timestamp) CacheRedisAdapter.list_append_singleelement( label_info=LABEL_TEST_LIST, elem_to_append=localiz_to_append) index_element = 0 while CacheRedisAdapter.list_getcounterelements( label_info=LABEL_TEST_LIST) > 0: localiz_extracted = CacheRedisAdapter.list_extractfirstelement( label_info=LABEL_TEST_LIST, type_element=Localization) if not UnitTestsUtilityObservables.test_method_check_obs_validity( observable=localiz_extracted, index_element=index_element, timestamp=timestamp): return False index_element += 1 logger.info( 'END TEST DEMO {} Observable CatalogCache List Extract'.format( max_counter)) return True except Exception as ex: logger.error( 'UnitTestCacheRedis test_method Exception: {}'.format(ex)) return False
def get_flag_connected() -> int: value_return = CacheRedisAdapter.get_cached_info( ServiceObservationClient.LABEL_FLAG_CONNECTED, type_data=int) return value_return
def get_flag_subscribed() -> int: value_return = CacheRedisAdapter.get_cached_info( label_info=ServiceUpdateDatastreamsClient.LABEL_FLAG_SUBSCRIBED, type_data=int) return value_return
def test_method_queue_detection() -> bool: try: CacheRedisAdapter.initialize() logger.info('test_method_queue_detection Started') crowd_density_local = CrowdDensityLocalObservation() crowd_density_local.density_map = UnitTestQueueDetection.get_densitymap( ) camera_registration = CameraRegistration( ground_plane_position=Point(x=12.56539, y=55.67474, srid=4326), ground_plane_orientation=30) logger.info('test_method_queue_detection Third Step') dictionary_regions = QueueDetectionAlgorithm.find_queueshape_areas( density_map=crowd_density_local.density_map, min_cell_count=3) count_queueshapearea_saved = 0 logger.info('test_method_queue_detection Fourth Step') for mean_people in dictionary_regions.keys(): logger.info( 'test_method_queue_detection mean count queue: {}'.format( mean_people)) group_region = dictionary_regions[mean_people] if not group_region or group_region.empty(): logger.info( 'test_method_queue_detection group_region empty') continue logger.info( 'test_method_queue_detection group_region count: {}'. format(len(group_region))) for region in group_region.get_listregions(): if not region: continue logger.info( 'test_method_queue_detection single queue detection creation...' ) single_queue_detection = QueueDetectionAlert() single_queue_detection.qsma_id = "QSA_ID{0:02d}_Mean{1:03d}".format( count_queueshapearea_saved, mean_people) single_queue_detection.initialize_status() logger.info( 'test_method_queue_detection single queue detection created' ) single_queue_detection.set_region_queue( region_queue=region, camera_registration=camera_registration) single_queue_detection.mean_people = mean_people single_queue_detection.set_timestamp( timestamp=datetime.datetime.now(tz=pytz.utc)) logger.info( 'test_method_queue_detection Try Saving SingleQueueDetection' ) single_queue_detection.save() logger.info( 'test_method_queue_detection saved single_queue_detection: {}' .format(single_queue_detection.qda_id)) count_queueshapearea_saved += 1 json_test = single_queue_detection.to_dictionary() json_test1 = single_queue_detection.to_ogc_dictionary() string_test = single_queue_detection.to_string() CacheRedisAdapter.set_cache_info( label_info='DATATESTSAVED', data=json_test1) json_test_2 = CacheRedisAdapter.get_cached_info( label_info='DATATESTSAVED', type_data=dict) logger.info('Cached JSON: {}'.format(json_test1)) logger.info('Read JSON From Cache: {}'.format(json_test_2)) queue_detection_db = UtilityDatabase.get_outputmessage_byid( id=single_queue_detection.qda_id, outputmessagetype=OutputMessageType. OUTPUT_MESSAGE_TYPE_QUEUEDETECTIONALERT) json_test_db = queue_detection_db.to_dictionary() string_test_db = queue_detection_db.to_string() if UnitTestQueueDetection.compare_dictionaries( dict_a=json_test, dict_b=json_test_db): logger.info( 'Comparison JSON From hardcoded to DB extracted QueueDetectionAlert info are equals' ) else: logger.info( 'Comparison JSON From hardcoded to DB extracted QueueDetectionAlert are not equals' ) logger.info('String Hardcoded: {}'.format(string_test)) logger.info( 'String DB Extracted: {}'.format(string_test_db)) logger.info( 'test_method_queue_detection saved correctly {} QueueDetectionAlert' .format(count_queueshapearea_saved)) return True except Exception as ex: logger.error( 'UnitTestQueueDetection test_method_queue_detection test_method Exception: {}' .format(ex)) return False