def assemble_handlers(self): query_list = [] query_list = self.qs.add_match_relationship( query_list, relationship="SITE", label=self.site_data["site"]) query_list = self.qs.add_match_relationship( query_list, relationship="SYSTEM_MONITOR") query_list = self.qs.add_match_terminal(query_list, relationship="PACKAGE", label="SYSTEM_MONITOR") package_sets, package_sources = self.qs.match_list(query_list) package = package_sources[0] generate_handlers = Generate_Handlers(package, self.qs) data_structures = package["data_structures"] self.handlers = {} self.handlers["SYSTEM_STATUS"] = generate_handlers.construct_hash( data_structures["SYSTEM_STATUS"]) self.handlers["MONITORING_DATA"] = generate_handlers.construct_hash( data_structures["MONITORING_DATA"]) self.handlers[ "SYSTEM_ALERTS"] = generate_handlers.construct_redis_stream_reader( data_structures["SYSTEM_ALERTS"])
def generate_rpc_queue(self, search_list, key): query_list = [] query_list = self.qs.add_match_relationship( query_list, relationship="SITE", label=self.site_data["site"]) for i in range(0, len(search_list) - 1): if type(search_list[-1]) == list: query_list = self.qs.add_match_relationship( query_list, relationship=search_list[i][0], label=search_list[i][1]) else: query_list = self.qs.add_match_relationship( query_list, relationship=search_list[i]) if type(search_list[-1]) == list: query_list = self.qs.add_match_terminal( query_list, relationship=search_list[-1][0], label=search_list[-1][1]) else: query_list = self.qs.add_match_terminal( query_list, relationship=search_list[-1]) package_sets, package_sources = self.qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] queue_name = data_structures[key]['queue'] print("queue_name", queue_name) generate_handlers = Generate_Handlers(package, self.qs) rpc_client = generate_handlers.construct_rpc_client() rpc_client.set_rpc_queue(queue_name) return rpc_client
def __init__(self, mqtt_server_data, mqtt_devices, site_data, package, qs): generate_handlers = Generate_Handlers(package, qs) data_structures = package["data_structures"] self.server_state_hash = generate_handlers.construct_hash( data_structures["MQTT_SERVER_STATE"]) self.server_state_hash.hset("SERVER_STATE", False) self.mqtt_server_data = mqtt_server_data self.site_data = site_data self.mqtt_bridge = MQTT_TO_REDIS_BRIDGE_STORE(site_data, mqtt_devices, package, generate_handlers) self.client = mqtt.Client(client_id="", clean_session=True, userdata=None, transport="tcp") #self.client.tls_set( cert_reqs=ssl.CERT_NONE ) self.client.on_connect = self.on_connect self.client.on_message = self.on_message self.client.on_disconnect = self.on_disconnect self.connection_flag = False print(self.mqtt_server_data["HOST"], self.mqtt_server_data["PORT"]) self.client.connect(self.mqtt_server_data["HOST"], self.mqtt_server_data["PORT"], 60) self.client.loop_forever()
def generate_redis_handlers(self): self.handlers = {} data_structures = self.package["data_structures"] generate_handlers = Generate_Handlers(self.package, self.qs) self.ds_handlers = {} self.ds_handlers[ "EXCEPTION_VALUES"] = generate_handlers.construct_hash( data_structures["EXCEPTION_VALUES"]) self.ds_handlers["ETO_VALUES"] = generate_handlers.construct_hash( data_structures["ETO_VALUES"]) self.ds_handlers["RAIN_VALUES"] = generate_handlers.construct_hash( data_structures["RAIN_VALUES"]) self.ds_handlers["ETO_CONTROL"] = generate_handlers.construct_hash( data_structures["ETO_CONTROL"]) self.ds_handlers[ "ETO_HISTORY"] = generate_handlers.construct_stream_writer( data_structures["ETO_HISTORY"]) self.ds_handlers[ "RAIN_HISTORY"] = generate_handlers.construct_stream_writer( data_structures["RAIN_HISTORY"]) self.ds_handlers[ "EXCEPTION_LOG"] = generate_handlers.construct_stream_writer( data_structures["EXCEPTION_LOG"]) self.ds_handlers[ "ETO_ACCUMULATION_TABLE"] = generate_handlers.construct_hash( data_structures["ETO_ACCUMULATION_TABLE"])
def __init__(self, redis_site): qs = Query_Support(redis_site) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "MQTT_DEVICES_DATA"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, qs) self.job_queue_server = generate_handlers.construct_job_queue_server( data_structures["MQTT_PUBLISH_QUEUE"]) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal(query_list, relationship="MQTT_SERVER") host_sets, host_sources = qs.match_list(query_list) self.mqtt_server_data = host_sources[0] self.client = mqtt.Client(client_id="", clean_session=True, userdata=None, transport="tcp") self.client.on_connect = self.on_connect self.client.on_disconnect = self.on_disconnect self.client.on_publish = self.on_publish self.connection_flag = False print("connection attempting") while self.connection_flag == False: try: self.client.connect(self.mqtt_server_data["HOST"], self.mqtt_server_data["PORT"], 60) except: time.sleep(5) else: self.connection_flag = True print("connection achieved") self.client.loop_start() self.server_job_queue()
def generate_data_handlers(self,qs): data_structures = self.package["data_structures"] generate_handlers = Generate_Handlers(self.package,qs) self.ds_handlers = {} self.ds_handlers["MQTT_PAST_ACTION_QUEUE"] = generate_handlers.construct_redis_stream_writer(data_structures["MQTT_PAST_ACTION_QUEUE"]) self.ds_handlers["MQTT_INPUT_QUEUE"] = generate_handlers.construct_redis_stream_writer(data_structures["MQTT_INPUT_QUEUE"]) self.ds_handlers["MQTT_DEVICES"] = generate_handlers.construct_hash(data_structures["MQTT_DEVICES"]) self.ds_handlers["MQTT_SUBSCRIPTIONS"] = generate_handlers.construct_hash(data_structures["MQTT_SUBSCRIPTIONS"]) self.ds_handlers["MQTT_CONTACT_LOG"] = generate_handlers.construct_hash(data_structures["MQTT_CONTACT_LOG"]) self.ds_handlers["MQTT_REBOOT_LOG"] = generate_handlers.construct_hash(data_structures["MQTT_REBOOT_LOG"]) self.ds_handlers["MQTT_UNKNOWN_DEVICES"] = generate_handlers.construct_hash(data_structures["MQTT_UNKNOWN_DEVICES"]) self.ds_handlers["MQTT_UNKNOWN_SUBSCRIPTIONS"] = generate_handlers.construct_hash(data_structures["MQTT_UNKNOWN_SUBSCRIPTIONS"])
def generate_mqtt_devices(redis_site,qs ): query_list = [] query_list = qs.add_match_relationship( query_list,relationship="SITE",label=redis_site["site"] ) query_list = qs.add_match_terminal( query_list, relationship = "PACKAGE", property_mask={"name":"MQTT_DEVICES_DATA"} ) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package,qs) return generate_handlers.construct_hash(data_structures["MQTT_CONTACT_LOG"])
def generate_irrigation_control(redis_site_data,qs ): query_list = [] query_list = qs.add_match_relationship( query_list,relationship="SITE",label=redis_site_data["site"] ) query_list = qs.add_match_terminal( query_list, relationship = "PACKAGE", property_mask={"name":"IRRIGATION_CONTROL_MANAGEMENT"} ) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package,qs) return generate_handlers.construct_managed_hash(data_structures["IRRIGATION_CONTROL"])
def __init__( self, qs, site_data ): self.site_data = site_data self.qs = qs query_list = [] query_list = qs.add_match_relationship( query_list,relationship="SITE",label=site_data["site"] ) query_list = qs.add_match_relationship( query_list,relationship= "SQL_SERVER" ) query_list = qs.add_match_terminal( query_list, relationship = "PACKAGE", property_mask={"name":"SQL_SERVER"} ) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] queue_name = data_structures["SQL_SERVER_RPC_SERVER"]['queue'] generate_handlers = Generate_Handlers( package, qs ) self.rpc_client = generate_handlers.construct_rpc_client( ) self.rpc_client.set_rpc_queue(queue_name)
def __init__(self, mqtt_server_data, mqtt_devices, site_data, package, qs, irrigation_hash_control, irrigation_hash_fields): self.mqtt_server_data = mqtt_server_data self.site_data = site_data generate_handlers = Generate_Handlers(package, qs) self.mqtt_bridge = MQTT_TO_REDIS_BRIDGE_STORE(site_data, mqtt_devices, package, generate_handlers, irrigation_hash_fields, irrigation_hash_control) self.client = mqtt.Client(client_id="", clean_session=True, userdata=None, transport="tcp") self.client.tls_set(cert_reqs=ssl.CERT_NONE) redis_handle_pw = redis.StrictRedis(site_data["host"], site_data["port"], db=site_data["redis_password_db"], decode_responses=True) self.client.username_pw_set( "pi", redis_handle_pw.hget("mosquitto_local", "pi")) self.client.on_connect = self.on_connect self.client.on_message = self.on_message self.connection_flag = False print(self.mqtt_server_data["HOST"], self.mqtt_server_data["PORT"]) self.client.connect(self.mqtt_server_data["HOST"], self.mqtt_server_data["PORT"], 60) self.client.loop_forever()
def assemble_handlers(self): query_list = [] query_list = self.qs.add_match_relationship( query_list, relationship="SITE", label=self.site_data["site"]) query_list = self.qs.add_match_relationship( query_list, relationship="MQTT_DEVICES") query_list = self.qs.add_match_terminal(query_list, relationship="PACKAGE", label="MQTT_DEVICES_DATA") package_sets, package_sources = self.qs.match_list(query_list) package = package_sources[0] generate_handlers = Generate_Handlers(package, self.qs) data_structures = package["data_structures"] self.handlers = {} self.handlers[ "MQTT_PAST_ACTION_QUEUE"] = generate_handlers.construct_redis_stream_reader( data_structures["MQTT_PAST_ACTION_QUEUE"]) self.handlers["MQTT_CONTACT_LOG"] = generate_handlers.construct_hash( data_structures["MQTT_CONTACT_LOG"]) self.handlers["MQTT_REBOOT_LOG"] = generate_handlers.construct_hash( data_structures["MQTT_REBOOT_LOG"]) self.handlers[ "MQTT_UNKNOWN_DEVICES"] = generate_handlers.construct_hash( data_structures["MQTT_UNKNOWN_DEVICES"]) self.handlers[ "MQTT_UNKNOWN_SUBSCRIPTIONS"] = generate_handlers.construct_hash( data_structures["MQTT_UNKNOWN_SUBSCRIPTIONS"])
def generate_structures_with_processor_container(self, processor_list, key_list, hash_flag=True): processor_ds = {} for i in processor_list: containers = self.find_containers(i) #print("containers",containers) container_ds = {} for j in containers: #print(i,j) query_list = [] query_list = self.qs.add_match_relationship( query_list, relationship="SITE", label=self.site_data["site"]) query_list = self.qs.add_match_relationship( query_list, relationship="PROCESSOR", label=i) query_list = self.qs.add_match_relationship( query_list, relationship="CONTAINER", label=j) query_list = self.qs.add_match_terminal( query_list, relationship="PACKAGE", label="DATA_STRUCTURES") package_sets, package_sources = self.qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] #print("data_structures",data_structures.keys()) generate_handlers = Generate_Handlers(package, self.qs) temp = {} for k in key_list: if hash_flag == True: temp[k] = generate_handlers.construct_hash( data_structures[k]) else: temp[ k] = generate_handlers.construct_redis_stream_reader( data_structures[k]) container_ds[j] = temp processor_ds[i] = container_ds #print("pocessor_ds",processor_ds) return processor_ds
def generate_data_handlers(self,redis_site,qs): query_list = [] query_list = qs.add_match_relationship( query_list,relationship="SITE",label=redis_site["site"] ) query_list = qs.add_match_relationship( query_list,relationship="PLC_MEASUREMENTS" ) query_list = qs.add_match_terminal( query_list, relationship = "PACKAGE", property_mask={"name":"PLC_MEASUREMENTS_PACKAGE"} ) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package,qs) self.ds_handlers = {} self.ds_handlers["PLC_MEASUREMENTS_STREAM"] = generate_handlers.construct_redis_stream_writer(data_structures["PLC_MEASUREMENTS_STREAM"]) self.construct_access_class = Construct_Access_Classes(generate_handlers)
def construct_fileserver_instance(qs, site_data): query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=site_data["site"]) query_list = qs.add_match_relationship(query_list, relationship="FILE_SERVER") query_list = qs.add_match_terminal(query_list, relationship="PACKAGE", property_mask={"name": "FILE_SERVER"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, qs) rpc_queue = generate_handlers.construct_rpc_sever( data_structures["FILE_SERVER_RPC_SERVER"]) Construct_RPC_Server(rpc_queue)
def __init__(self, redis_site, topic_prefix, qs): self.topic_prefix = topic_prefix query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "MQTT_DEVICES_DATA"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] generate_handlers = Generate_Handlers(package, qs) data_structures = package["data_structures"] self.job_queue_client = generate_handlers.construct_job_queue_client( data_structures["MQTT_PUBLISH_QUEUE"])
def generate_structures_with_processor(self, processor_list, search_list, key_list, hash_flag=True): return_value = {} for i in processor_list: #print(i) query_list = [] query_list = self.qs.add_match_relationship( query_list, relationship="SITE", label=self.site_data["site"]) query_list = self.qs.add_match_relationship( query_list, relationship="PROCESSOR", label=i) for j in range(0, len(search_list) - 1): query_list = self.qs.add_match_relationship( query_list, relationship=search_list[j]) if type(search_list[-1]) == list: query_list = self.qs.add_match_terminal( query_list, relationship=search_list[-1][0], label=search_list[-1][1]) else: query_list = self.qs.add_match_terminal( query_list, relationship=search_list[-1]) package_sets, package_sources = self.qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] print("data_structures", data_structures.keys()) generate_handlers = Generate_Handlers(package, self.qs) temp = {} for k in key_list: if hash_flag == True: temp[k] = generate_handlers.construct_hash( data_structures[k]) else: temp[k] = generate_handlers.construct_redis_stream_reader( data_structures[k]) return_value[i] = temp return return_value
def generate_data_handlers(self, package, qs): self.handlers = {} data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, qs) self.ds_handlers = {} self.ds_handlers[ "MQTT_PAST_ACTION_QUEUE"] = generate_handlers.construct_redis_stream_writer( data_structures["MQTT_PAST_ACTION_QUEUE"]) self.ds_handlers[ "MQTT_CONTACT_LOG"] = generate_handlers.construct_hash( data_structures["MQTT_CONTACT_LOG"]) self.ds_handlers["MQTT_REBOOT_LOG"] = generate_handlers.construct_hash( data_structures["MQTT_REBOOT_LOG"]) contact_set = set(self.ds_handlers["MQTT_CONTACT_LOG"].hkeys()) device_set = set(self.mqtt_devices.keys()) difference_set = contact_set - device_set for i in list(difference_set): self.ds_handlers["MQTT_CONTACT_LOG"].hdelete(i)
def __init__(base_self ,self ): # from graph get hash tables query_list = [] query_list = self.qs.add_match_relationship( query_list,relationship="SITE",label=self.site_data["site"] ) query_list = self.qs.add_match_terminal( query_list, relationship = "WS_STATION" ) eto_sets, eto_sources = self.qs.match_list(query_list) query_list = [] query_list = self.qs.add_match_relationship( query_list,relationship="SITE",label=self.site_data["site"] ) query_list = self.qs.add_match_terminal( query_list, relationship = "PACKAGE", property_mask={"name":"WEATHER_STATION_DATA"} ) package_sets, package_sources = self.qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package,self.qs) self.ds_handlers = {} self.ds_handlers["EXCEPTION_VALUES"] = generate_handlers.construct_hash(data_structures["EXCEPTION_VALUES"]) self.ds_handlers["ETO_VALUES"] = generate_handlers.construct_hash(data_structures["ETO_VALUES"]) self.ds_handlers["RAIN_VALUES"] = generate_handlers.construct_hash(data_structures["RAIN_VALUES"]) self.ds_handlers["ETO_CONTROL"] = generate_handlers.construct_hash(data_structures["ETO_CONTROL"]) self.ds_handlers["ETO_HISTORY"] = generate_handlers.construct_redis_stream_reader(data_structures["ETO_HISTORY"]) self.ds_handlers["RAIN_HISTORY"] = generate_handlers.construct_redis_stream_reader(data_structures["RAIN_HISTORY"] ) self.ds_handlers["EXCEPTION_LOG"] = generate_handlers.construct_redis_stream_reader(data_structures["EXCEPTION_LOG"] ) self.ds_handlers["ETO_ACCUMULATION_TABLE"] = generate_handlers.construct_hash(data_structures["ETO_ACCUMULATION_TABLE"]) self.redis_access.add_access_handlers("ETO_VALUES",self.ds_handlers["ETO_VALUES"],"Redis_Hash_Dictionary") self.redis_access.add_access_handlers("RAIN_VALUES",self.ds_handlers["RAIN_VALUES"],"Redis_Hash_Dictionary") eto_update_table = self.ds_handlers["ETO_ACCUMULATION_TABLE"] self.redis_access.add_access_handlers("eto_update_table",eto_update_table,"Redis_Hash_Dictionary") Load_ETO_Management_Web(self.app, self.auth,request, file_server_library = self.file_server_library,path='eto_py3',url_rule_class=self.url_rule_class, subsystem_name= "ETO_MANAGEMENT",render_template=render_template,redis_access = self.redis_access,eto_update_table = eto_update_table, handlers=self.ds_handlers )
def construct_redis_instance(qs, site_data): query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=site_data["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "REDIS_MONITORING"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] # # do verifications of data package # # # data_structures = package["data_structures"] print("data_structures", data_structures.keys()) generate_handlers = Generate_Handlers(package, qs) redis_monitoring_streams = {} redis_monitoring_streams[ "KEYS"] = generate_handlers.construct_stream_writer( data_structures["REDIS_MONITOR_KEY_STREAM"]) redis_monitoring_streams[ "CLIENTS"] = generate_handlers.construct_stream_writer( data_structures["REDIS_MONITOR_CLIENT_STREAM"]) redis_monitoring_streams[ "MEMORY"] = generate_handlers.construct_stream_writer( data_structures["REDIS_MONITOR_MEMORY_STREAM"]) redis_monitoring_streams[ "REDIS_MONITOR_CALL_STREAM"] = generate_handlers.construct_stream_writer( data_structures["REDIS_MONITOR_CALL_STREAM"]) redis_monitoring_streams[ "REDIS_MONITOR_CMD_TIME_STREAM"] = generate_handlers.construct_stream_writer( data_structures["REDIS_MONITOR_CMD_TIME_STREAM"]) redis_monitoring_streams[ "REDIS_MONITOR_SERVER_TIME"] = generate_handlers.construct_stream_writer( data_structures["REDIS_MONITOR_SERVER_TIME"]) redis_monitor = Redis_Monitor(qs.get_redis_data_handle(), redis_monitoring_streams) return redis_monitor
def assemble_data_structures(self, controller_name): query_list = [] query_list = self.qs.add_match_relationship( query_list, relationship="SITE", label=self.site_data["site"]) query_list = self.qs.add_match_relationship(query_list, relationship="PROCESSOR", label=controller_name) query_list = self.qs.add_match_relationship( query_list, relationship="NODE_PROCESSES", label=controller_name) query_list = self.qs.add_match_terminal(query_list, relationship="PACKAGE") package_sets, package_sources = self.qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, self.qs) handlers = {} handlers[ "ERROR_STREAM"] = generate_handlers.construct_redis_stream_reader( data_structures["ERROR_STREAM"]) handlers["ERROR_HASH"] = generate_handlers.construct_hash( data_structures["ERROR_HASH"]) handlers[ "WEB_COMMAND_QUEUE"] = generate_handlers.construct_job_queue_client( data_structures["WEB_COMMAND_QUEUE"]) handlers["WEB_DISPLAY_DICTIONARY"] = generate_handlers.construct_hash( data_structures["WEB_DISPLAY_DICTIONARY"]) return handlers
def __init__(self, redis_site): qs = Query_Support(redis_site) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "MQTT_DEVICES_DATA"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, qs) self.job_queue_client = generate_handlers.construct_job_queue_client( data_structures["MQTT_PUBLISH_QUEUE"]) self.send_request("REBOOT") while 1: self.send_request("HEART_BEAT") self.send_request("SERVER_CHECK") time.sleep(15.)
def assemble_container_data_structures(self, container_name): query_list = [] query_list = self.qs.add_match_relationship( query_list, relationship="SITE", label=self.site_data["site"]) query_list = self.qs.add_match_relationship(query_list, relationship="CONTAINER", label=container_name) query_list = self.qs.add_match_terminal(query_list, relationship="PACKAGE", label="DATA_STRUCTURES") package_sets, package_nodes = self.qs.match_list(query_list) #print("package_nodes",package_nodes) generate_handlers = Generate_Handlers(package_nodes[0], self.qs) data_structures = package_nodes[0]["data_structures"] handlers = {} handlers[ "ERROR_STREAM"] = generate_handlers.construct_redis_stream_reader( data_structures["ERROR_STREAM"]) handlers["ERROR_HASH"] = generate_handlers.construct_hash( data_structures["ERROR_HASH"]) handlers[ "WEB_COMMAND_QUEUE"] = generate_handlers.construct_job_queue_client( data_structures["WEB_COMMAND_QUEUE"]) handlers["WEB_DISPLAY_DICTIONARY"] = generate_handlers.construct_hash( data_structures["WEB_DISPLAY_DICTIONARY"]) handlers[ "PROCESS_VSZ"] = generate_handlers.construct_redis_stream_reader( data_structures["PROCESS_VSZ"]) handlers[ "PROCESS_RSS"] = generate_handlers.construct_redis_stream_reader( data_structures["PROCESS_RSS"]) handlers[ "PROCESS_CPU"] = generate_handlers.construct_redis_stream_reader( data_structures["PROCESS_CPU"]) return handlers
def __init__(self, redis_site_data): qs = Query_Support(redis_server_ip=redis_site_data["host"], redis_server_port=redis_site_data["port"]) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site_data["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "IRRIGATION_CONTROL_MANAGEMENT"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, redis_site_data) self.handler = generate_handlers.construct_hash( data_structures["IRRIGATION_CONTROL"]) self.access_handler = {} self.access_handler["RAIN_FLAG"] = self.set_rain_flag self.access_handler["ETO_MANAGEMENT"] = self.set_eto_management_flag self.access_handler["FLOW_CUT_OFF"] = self.set_flow_cutoff self.access_handler["CLEANING_INTERVAL"] = self.set_cleaning_interval self.access_handler["MASTER_VALVE"] = self.set_master_valve self.access_handler["CLEANING_VALVE"] = self.set_cleaning_valve self.access_handler["MASTER_VALVE_SETUP"] = self.set_master_valve_setup self.access_handler["SCHEDULE_NAME"] = self.set_schedule_name self.access_handler["STEP"] = self.set_step_number self.access_handler["RUN_TIME"] = self.set_run_time self.access_handler["ELASPED_TIME"] = self.set_elasped_time self.access_handler["TIME_STAMP"] = self.set_time_stamp self.access_handler["SUSPEND"] = self.set_suspend
def generate_data_handlers(self, package, qs): self.handlers = {} data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, qs) self.ds_handlers = {} self.ds_handlers[ "MQTT_INPUT_QUEUE"] = generate_handlers.construct_redis_stream_reader( data_structures["MQTT_INPUT_QUEUE"]) self.ds_handlers[ "MQTT_PAST_ACTION_QUEUE"] = generate_handlers.construct_redis_stream_writer( data_structures["MQTT_PAST_ACTION_QUEUE"]) self.ds_handlers[ "MQTT_SENSOR_QUEUE"] = generate_handlers.construct_redis_stream_writer( data_structures["MQTT_SENSOR_QUEUE"]) self.ds_handlers[ "MQTT_CONTACT_LOG"] = generate_handlers.construct_hash( data_structures["MQTT_CONTACT_LOG"]) self.ds_handlers["MQTT_REBOOT_LOG"] = generate_handlers.construct_hash( data_structures["MQTT_REBOOT_LOG"]) self.ds_handlers[ "MQTT_SENSOR_STATUS"] = generate_handlers.construct_hash( data_structures["MQTT_SENSOR_STATUS"]) contact_set = set(self.ds_handlers["MQTT_CONTACT_LOG"].hkeys()) device_set = set(self.mqtt_devices.keys()) difference_set = contact_set - device_set for i in list(difference_set): self.ds_handlers["MQTT_CONTACT_LOG"].hdelete(i) return contact_set = set(self.ds_handlers["MQTT_CONTACT_LOG"].hkeys()) difference_set = device_set - contact_set print("contact_set", contact_set) print("difference_set", difference_set) for i in list(difference_set): data = {} data["time"] = time.time() data["status"] = status data["name"] = name data["device_id"] = name # redundant with name self.ds_handlers["MQTT_PAST_ACTION_QUEUE"].push({ "action": "Device_Change", "device_id": name, "status": status }) self.ds_handlers["MQTT_CONTACT_LOG"].hset(name, data)
def construct_op_monitoring_instance(qs, site_data): query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=site_data["site"]) query_list = qs.add_match_terminal(query_list, relationship="OP_MONITOR") data_sets, data_sources = qs.match_list(query_list) data = data_sources[0] monitoring_list = data['OP_MONITOR_LIST'] print("monitoring_list", monitoring_list) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=site_data["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "SYSTEM_MONITOR"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] # # do verifications of data package # # # data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, qs) handlers = {} handlers["SYSTEM_STATUS"] = generate_handlers.construct_hash( data_structures["SYSTEM_STATUS"]) handlers["MONITORING_DATA"] = generate_handlers.construct_hash( data_structures["MONITORING_DATA"]) handlers["SYSTEM_ALERTS"] = generate_handlers.construct_stream_writer( data_structures["SYSTEM_ALERTS"]) handlers[ "SYSTEM_PUSHED_ALERTS"] = generate_handlers.construct_stream_writer( data_structures["SYSTEM_PUSHED_ALERTS"]) handlers["SYSTEM_STATUS"].delete_all() op_monitor = Op_Monitor(site_data, qs, monitoring_list, handlers) return op_monitor
def assemble_handlers(self): query_list = [] query_list = self.qs.add_match_relationship( query_list, relationship="SITE", label=self.site_data["site"]) query_list = self.qs.add_match_relationship(query_list, relationship="CONTAINER", label="monitor_redis") query_list = self.qs.add_match_terminal(query_list, relationship="PACKAGE", label="REDIS_MONITORING") package_sets, package_sources = self.qs.match_list(query_list) package = package_sources[0] generate_handlers = Generate_Handlers(package, self.qs) data_structures = package["data_structures"] self.handlers = {} self.handlers[ "REDIS_MONITOR_KEY_STREAM"] = generate_handlers.construct_redis_stream_reader( data_structures["REDIS_MONITOR_KEY_STREAM"]) self.handlers[ "REDIS_MONITOR_CLIENT_STREAM"] = generate_handlers.construct_redis_stream_reader( data_structures["REDIS_MONITOR_CLIENT_STREAM"]) self.handlers[ "REDIS_MONITOR_MEMORY_STREAM"] = generate_handlers.construct_redis_stream_reader( data_structures["REDIS_MONITOR_MEMORY_STREAM"]) self.handlers[ "REDIS_MONITOR_CALL_STREAM"] = generate_handlers.construct_redis_stream_reader( data_structures["REDIS_MONITOR_CALL_STREAM"]) self.handlers[ "REDIS_MONITOR_CMD_TIME_STREAM"] = generate_handlers.construct_redis_stream_reader( data_structures["REDIS_MONITOR_CMD_TIME_STREAM"]) self.handlers[ "REDIS_MONITOR_SERVER_TIME"] = generate_handlers.construct_redis_stream_reader( data_structures["REDIS_MONITOR_SERVER_TIME"])
def generate_data_handlers(self, qs): self.handlers = {} data_structures = self.package["data_structures"] generate_handlers = Generate_Handlers(self.package, qs) self.ds_handlers = {} self.ds_handlers[ "MQTT_INPUT_QUEUE"] = generate_handlers.construct_redis_stream_reader( data_structures["MQTT_INPUT_QUEUE"]) self.ds_handlers[ "MQTT_PAST_ACTION_QUEUE"] = generate_handlers.construct_redis_stream_writer( data_structures["MQTT_PAST_ACTION_QUEUE"]) self.ds_handlers[ "MQTT_SENSOR_QUEUE"] = generate_handlers.construct_redis_stream_writer( data_structures["MQTT_SENSOR_QUEUE"]) self.ds_handlers[ "MQTT_CONTACT_LOG"] = generate_handlers.construct_hash( data_structures["MQTT_CONTACT_LOG"]) self.ds_handlers[ "MQTT_SENSOR_STATUS"] = generate_handlers.construct_hash( data_structures["MQTT_SENSOR_STATUS"])
def determine_container_structure(self, processor_name): query_list = [] query_list = self.qs.add_match_relationship( query_list, relationship="SITE", label=self.site_data["site"]) query_list = self.qs.add_match_relationship(query_list, relationship="PROCESSOR", label=processor_name) query_list = self.qs.add_match_relationship( query_list, relationship="DOCKER_MONITOR") query_list = self.qs.add_match_terminal(query_list, relationship="PACKAGE", label="DATA_STRUCTURES") package_sets, package_nodes = self.qs.match_list(query_list) #print("package_nodes",package_nodes) generate_handlers = Generate_Handlers(package_nodes[0], self.qs) package_node = package_nodes[0] data_structures = package_node["data_structures"] #print(data_structures.keys()) handlers = {} handlers[ "ERROR_STREAM"] = generate_handlers.construct_redis_stream_reader( data_structures["ERROR_STREAM"]) handlers[ "WEB_COMMAND_QUEUE"] = generate_handlers.construct_job_queue_client( data_structures["WEB_COMMAND_QUEUE"]) handlers["WEB_DISPLAY_DICTIONARY"] = generate_handlers.construct_hash( data_structures["WEB_DISPLAY_DICTIONARY"]) queue_name = data_structures["DOCKER_UPDATE_QUEUE"]['queue'] handlers[ "DOCKER_UPDATE_QUEUE"] = generate_handlers.construct_rpc_client() handlers["DOCKER_UPDATE_QUEUE"].set_rpc_queue(queue_name) return handlers
def assemble_processor_monitoring_data_structures(self, controller): query_list = [] query_list = self.qs.add_match_relationship( query_list, relationship="SITE", label=self.site_data["site"]) query_list = self.qs.add_match_relationship(query_list, relationship="PROCESSOR", label=controller) query_list = self.qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "SYSTEM_MONITORING"}) package_sets, package_sources = self.qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, self.qs) handlers = {} handlers["FREE_CPU"] = generate_handlers.construct_redis_stream_reader( data_structures["FREE_CPU"]) handlers["RAM"] = generate_handlers.construct_redis_stream_reader( data_structures["RAM"]) handlers[ "DISK_SPACE"] = generate_handlers.construct_redis_stream_reader( data_structures["DISK_SPACE"]) handlers[ "TEMPERATURE"] = generate_handlers.construct_redis_stream_reader( data_structures["TEMPERATURE"]) handlers[ "PROCESS_CPU"] = generate_handlers.construct_redis_stream_reader( data_structures["PROCESS_CPU"]) handlers["CPU_CORE"] = generate_handlers.construct_redis_stream_reader( data_structures["CPU_CORE"]) handlers[ "SWAP_SPACE"] = generate_handlers.construct_redis_stream_reader( data_structures["SWAP_SPACE"]) handlers["IO_SPACE"] = generate_handlers.construct_redis_stream_reader( data_structures["IO_SPACE"]) handlers[ "BLOCK_DEV"] = generate_handlers.construct_redis_stream_reader( data_structures["BLOCK_DEV"]) handlers[ "CONTEXT_SWITCHES"] = generate_handlers.construct_redis_stream_reader( data_structures["CONTEXT_SWITCHES"]) handlers[ "RUN_QUEUE"] = generate_handlers.construct_redis_stream_reader( data_structures["RUN_QUEUE"]) handlers["EDEV"] = generate_handlers.construct_redis_stream_reader( data_structures["EDEV"]) return handlers
query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_relationship(query_list, relationship="PLC_SERVER", label=plc_server_name) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "PLC_SERVER_DATA"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] generate_handlers = Generate_Handlers(package, qs) data_structures = package["data_structures"] # # finding IO_LINKS # # query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_relationship(query_list, relationship="PLC_SERVER", label=plc_server_name) query_list = qs.add_match_terminal(query_list, relationship="IO_LINK")