Пример #1
0
    def assemble_handlers(self):
        query_list = []
        query_list = self.qs.add_match_relationship(
            query_list, relationship="SITE", label=self.site_data["site"])
        query_list = self.qs.add_match_relationship(
            query_list, relationship="MQTT_DEVICES")

        query_list = self.qs.add_match_terminal(query_list,
                                                relationship="PACKAGE",
                                                label="MQTT_DEVICES_DATA")

        package_sets, package_sources = self.qs.match_list(query_list)

        package = package_sources[0]
        generate_handlers = Generate_Handlers(package, self.qs)
        data_structures = package["data_structures"]

        self.handlers = {}
        self.handlers[
            "MQTT_PAST_ACTION_QUEUE"] = generate_handlers.construct_redis_stream_reader(
                data_structures["MQTT_PAST_ACTION_QUEUE"])
        self.handlers["MQTT_CONTACT_LOG"] = generate_handlers.construct_hash(
            data_structures["MQTT_CONTACT_LOG"])
        self.handlers["MQTT_REBOOT_LOG"] = generate_handlers.construct_hash(
            data_structures["MQTT_REBOOT_LOG"])
        self.handlers[
            "MQTT_UNKNOWN_DEVICES"] = generate_handlers.construct_hash(
                data_structures["MQTT_UNKNOWN_DEVICES"])
        self.handlers[
            "MQTT_UNKNOWN_SUBSCRIPTIONS"] = generate_handlers.construct_hash(
                data_structures["MQTT_UNKNOWN_SUBSCRIPTIONS"])
Пример #2
0
 def generate_redis_handlers(self):
     self.handlers = {}
     data_structures = self.package["data_structures"]
     generate_handlers = Generate_Handlers(self.package, self.qs)
     self.ds_handlers = {}
     self.ds_handlers[
         "EXCEPTION_VALUES"] = generate_handlers.construct_hash(
             data_structures["EXCEPTION_VALUES"])
     self.ds_handlers["ETO_VALUES"] = generate_handlers.construct_hash(
         data_structures["ETO_VALUES"])
     self.ds_handlers["RAIN_VALUES"] = generate_handlers.construct_hash(
         data_structures["RAIN_VALUES"])
     self.ds_handlers["ETO_CONTROL"] = generate_handlers.construct_hash(
         data_structures["ETO_CONTROL"])
     self.ds_handlers[
         "ETO_HISTORY"] = generate_handlers.construct_stream_writer(
             data_structures["ETO_HISTORY"])
     self.ds_handlers[
         "RAIN_HISTORY"] = generate_handlers.construct_stream_writer(
             data_structures["RAIN_HISTORY"])
     self.ds_handlers[
         "EXCEPTION_LOG"] = generate_handlers.construct_stream_writer(
             data_structures["EXCEPTION_LOG"])
     self.ds_handlers[
         "ETO_ACCUMULATION_TABLE"] = generate_handlers.construct_hash(
             data_structures["ETO_ACCUMULATION_TABLE"])
    def assemble_handlers(self):
        query_list = []
        query_list = self.qs.add_match_relationship(
            query_list, relationship="SITE", label=self.site_data["site"])
        query_list = self.qs.add_match_relationship(
            query_list, relationship="SYSTEM_MONITOR")

        query_list = self.qs.add_match_terminal(query_list,
                                                relationship="PACKAGE",
                                                label="SYSTEM_MONITOR")

        package_sets, package_sources = self.qs.match_list(query_list)

        package = package_sources[0]
        generate_handlers = Generate_Handlers(package, self.qs)
        data_structures = package["data_structures"]

        self.handlers = {}
        self.handlers["SYSTEM_STATUS"] = generate_handlers.construct_hash(
            data_structures["SYSTEM_STATUS"])
        self.handlers["MONITORING_DATA"] = generate_handlers.construct_hash(
            data_structures["MONITORING_DATA"])
        self.handlers[
            "SYSTEM_ALERTS"] = generate_handlers.construct_redis_stream_reader(
                data_structures["SYSTEM_ALERTS"])
    def assemble_data_structures(self, controller_name):
        query_list = []
        query_list = self.qs.add_match_relationship(
            query_list, relationship="SITE", label=self.site_data["site"])

        query_list = self.qs.add_match_relationship(query_list,
                                                    relationship="PROCESSOR",
                                                    label=controller_name)
        query_list = self.qs.add_match_relationship(
            query_list, relationship="NODE_PROCESSES", label=controller_name)
        query_list = self.qs.add_match_terminal(query_list,
                                                relationship="PACKAGE")

        package_sets, package_sources = self.qs.match_list(query_list)

        package = package_sources[0]
        data_structures = package["data_structures"]
        generate_handlers = Generate_Handlers(package, self.qs)
        handlers = {}
        handlers[
            "ERROR_STREAM"] = generate_handlers.construct_redis_stream_reader(
                data_structures["ERROR_STREAM"])
        handlers["ERROR_HASH"] = generate_handlers.construct_hash(
            data_structures["ERROR_HASH"])
        handlers[
            "WEB_COMMAND_QUEUE"] = generate_handlers.construct_job_queue_client(
                data_structures["WEB_COMMAND_QUEUE"])

        handlers["WEB_DISPLAY_DICTIONARY"] = generate_handlers.construct_hash(
            data_structures["WEB_DISPLAY_DICTIONARY"])
        return handlers
def construct_op_monitoring_instance(qs, site_data):

    query_list = []
    query_list = qs.add_match_relationship(query_list,
                                           relationship="SITE",
                                           label=site_data["site"])

    query_list = qs.add_match_terminal(query_list, relationship="OP_MONITOR")

    data_sets, data_sources = qs.match_list(query_list)

    data = data_sources[0]
    monitoring_list = data['OP_MONITOR_LIST']
    print("monitoring_list", monitoring_list)

    query_list = []
    query_list = qs.add_match_relationship(query_list,
                                           relationship="SITE",
                                           label=site_data["site"])

    query_list = qs.add_match_terminal(
        query_list,
        relationship="PACKAGE",
        property_mask={"name": "SYSTEM_MONITOR"})

    package_sets, package_sources = qs.match_list(query_list)

    package = package_sources[0]

    #
    #  do verifications of data package
    #
    #
    #
    data_structures = package["data_structures"]

    generate_handlers = Generate_Handlers(package, qs)

    handlers = {}
    handlers["SYSTEM_STATUS"] = generate_handlers.construct_hash(
        data_structures["SYSTEM_STATUS"])
    handlers["MONITORING_DATA"] = generate_handlers.construct_hash(
        data_structures["MONITORING_DATA"])
    handlers["SYSTEM_ALERTS"] = generate_handlers.construct_stream_writer(
        data_structures["SYSTEM_ALERTS"])
    handlers[
        "SYSTEM_PUSHED_ALERTS"] = generate_handlers.construct_stream_writer(
            data_structures["SYSTEM_PUSHED_ALERTS"])

    handlers["SYSTEM_STATUS"].delete_all()

    op_monitor = Op_Monitor(site_data, qs, monitoring_list, handlers)

    return op_monitor
Пример #6
0
    def __init__(self, mqtt_server_data, mqtt_devices, site_data, package, qs):

        generate_handlers = Generate_Handlers(package, qs)
        data_structures = package["data_structures"]
        self.server_state_hash = generate_handlers.construct_hash(
            data_structures["MQTT_SERVER_STATE"])
        self.server_state_hash.hset("SERVER_STATE", False)

        self.mqtt_server_data = mqtt_server_data
        self.site_data = site_data

        self.mqtt_bridge = MQTT_TO_REDIS_BRIDGE_STORE(site_data, mqtt_devices,
                                                      package,
                                                      generate_handlers)

        self.client = mqtt.Client(client_id="",
                                  clean_session=True,
                                  userdata=None,
                                  transport="tcp")
        #self.client.tls_set( cert_reqs=ssl.CERT_NONE )

        self.client.on_connect = self.on_connect
        self.client.on_message = self.on_message
        self.client.on_disconnect = self.on_disconnect
        self.connection_flag = False

        print(self.mqtt_server_data["HOST"], self.mqtt_server_data["PORT"])
        self.client.connect(self.mqtt_server_data["HOST"],
                            self.mqtt_server_data["PORT"], 60)

        self.client.loop_forever()
Пример #7
0
   def __init__(base_self ,self  ):
       
        # from graph get hash tables
       
       query_list = []
       query_list = self.qs.add_match_relationship( query_list,relationship="SITE",label=self.site_data["site"] )
       query_list = self.qs.add_match_terminal( query_list, 
                                        relationship = "WS_STATION" )
                                        
       eto_sets, eto_sources = self.qs.match_list(query_list)                                    
    
       query_list = []
       query_list = self.qs.add_match_relationship( query_list,relationship="SITE",label=self.site_data["site"] )

       query_list = self.qs.add_match_terminal( query_list, 
                                        relationship = "PACKAGE", property_mask={"name":"WEATHER_STATION_DATA"} )
                                           
       package_sets, package_sources = self.qs.match_list(query_list)  
     
       package = package_sources[0] 
       data_structures = package["data_structures"]
       generate_handlers = Generate_Handlers(package,self.qs)
       self.ds_handlers = {}
       self.ds_handlers["EXCEPTION_VALUES"] = generate_handlers.construct_hash(data_structures["EXCEPTION_VALUES"])
       self.ds_handlers["ETO_VALUES"] = generate_handlers.construct_hash(data_structures["ETO_VALUES"])
       self.ds_handlers["RAIN_VALUES"] = generate_handlers.construct_hash(data_structures["RAIN_VALUES"])
       self.ds_handlers["ETO_CONTROL"] = generate_handlers.construct_hash(data_structures["ETO_CONTROL"])
       self.ds_handlers["ETO_HISTORY"] = generate_handlers.construct_redis_stream_reader(data_structures["ETO_HISTORY"])
       self.ds_handlers["RAIN_HISTORY"] = generate_handlers.construct_redis_stream_reader(data_structures["RAIN_HISTORY"] )
       self.ds_handlers["EXCEPTION_LOG"] = generate_handlers.construct_redis_stream_reader(data_structures["EXCEPTION_LOG"] )
       self.ds_handlers["ETO_ACCUMULATION_TABLE"] = generate_handlers.construct_hash(data_structures["ETO_ACCUMULATION_TABLE"])
       
       self.redis_access.add_access_handlers("ETO_VALUES",self.ds_handlers["ETO_VALUES"],"Redis_Hash_Dictionary") 

       
       self.redis_access.add_access_handlers("RAIN_VALUES",self.ds_handlers["RAIN_VALUES"],"Redis_Hash_Dictionary") 


       
       eto_update_table = self.ds_handlers["ETO_ACCUMULATION_TABLE"]
       self.redis_access.add_access_handlers("eto_update_table",eto_update_table,"Redis_Hash_Dictionary") 
  
       
       Load_ETO_Management_Web(self.app, self.auth,request, file_server_library = self.file_server_library,path='eto_py3',url_rule_class=self.url_rule_class,
                  subsystem_name= "ETO_MANAGEMENT",render_template=render_template,redis_access = self.redis_access,eto_update_table = eto_update_table,
                     handlers=self.ds_handlers )    
    def generate_data_handlers(self, package, qs):
        self.handlers = {}
        data_structures = package["data_structures"]
        generate_handlers = Generate_Handlers(package, qs)
        self.ds_handlers = {}
        self.ds_handlers[
            "MQTT_INPUT_QUEUE"] = generate_handlers.construct_redis_stream_reader(
                data_structures["MQTT_INPUT_QUEUE"])
        self.ds_handlers[
            "MQTT_PAST_ACTION_QUEUE"] = generate_handlers.construct_redis_stream_writer(
                data_structures["MQTT_PAST_ACTION_QUEUE"])
        self.ds_handlers[
            "MQTT_SENSOR_QUEUE"] = generate_handlers.construct_redis_stream_writer(
                data_structures["MQTT_SENSOR_QUEUE"])
        self.ds_handlers[
            "MQTT_CONTACT_LOG"] = generate_handlers.construct_hash(
                data_structures["MQTT_CONTACT_LOG"])
        self.ds_handlers["MQTT_REBOOT_LOG"] = generate_handlers.construct_hash(
            data_structures["MQTT_REBOOT_LOG"])
        self.ds_handlers[
            "MQTT_SENSOR_STATUS"] = generate_handlers.construct_hash(
                data_structures["MQTT_SENSOR_STATUS"])
        contact_set = set(self.ds_handlers["MQTT_CONTACT_LOG"].hkeys())
        device_set = set(self.mqtt_devices.keys())
        difference_set = contact_set - device_set
        for i in list(difference_set):
            self.ds_handlers["MQTT_CONTACT_LOG"].hdelete(i)

        return
        contact_set = set(self.ds_handlers["MQTT_CONTACT_LOG"].hkeys())
        difference_set = device_set - contact_set
        print("contact_set", contact_set)
        print("difference_set", difference_set)
        for i in list(difference_set):
            data = {}
            data["time"] = time.time()
            data["status"] = status
            data["name"] = name
            data["device_id"] = name  # redundant with name
            self.ds_handlers["MQTT_PAST_ACTION_QUEUE"].push({
                "action": "Device_Change",
                "device_id": name,
                "status": status
            })
            self.ds_handlers["MQTT_CONTACT_LOG"].hset(name, data)
    def generate_data_handlers(self, package, qs):
        self.handlers = {}
        data_structures = package["data_structures"]
        generate_handlers = Generate_Handlers(package, qs)
        self.ds_handlers = {}

        self.ds_handlers[
            "MQTT_PAST_ACTION_QUEUE"] = generate_handlers.construct_redis_stream_writer(
                data_structures["MQTT_PAST_ACTION_QUEUE"])
        self.ds_handlers[
            "MQTT_CONTACT_LOG"] = generate_handlers.construct_hash(
                data_structures["MQTT_CONTACT_LOG"])
        self.ds_handlers["MQTT_REBOOT_LOG"] = generate_handlers.construct_hash(
            data_structures["MQTT_REBOOT_LOG"])

        contact_set = set(self.ds_handlers["MQTT_CONTACT_LOG"].hkeys())
        device_set = set(self.mqtt_devices.keys())
        difference_set = contact_set - device_set
        for i in list(difference_set):
            self.ds_handlers["MQTT_CONTACT_LOG"].hdelete(i)
 def generate_data_handlers(self, qs):
     self.handlers = {}
     data_structures = self.package["data_structures"]
     generate_handlers = Generate_Handlers(self.package, qs)
     self.ds_handlers = {}
     self.ds_handlers[
         "MQTT_INPUT_QUEUE"] = generate_handlers.construct_redis_stream_reader(
             data_structures["MQTT_INPUT_QUEUE"])
     self.ds_handlers[
         "MQTT_PAST_ACTION_QUEUE"] = generate_handlers.construct_redis_stream_writer(
             data_structures["MQTT_PAST_ACTION_QUEUE"])
     self.ds_handlers[
         "MQTT_SENSOR_QUEUE"] = generate_handlers.construct_redis_stream_writer(
             data_structures["MQTT_SENSOR_QUEUE"])
     self.ds_handlers[
         "MQTT_CONTACT_LOG"] = generate_handlers.construct_hash(
             data_structures["MQTT_CONTACT_LOG"])
     self.ds_handlers[
         "MQTT_SENSOR_STATUS"] = generate_handlers.construct_hash(
             data_structures["MQTT_SENSOR_STATUS"])
Пример #11
0
    def assemble_container_data_structures(self, container_name):

        query_list = []
        query_list = self.qs.add_match_relationship(
            query_list, relationship="SITE", label=self.site_data["site"])
        query_list = self.qs.add_match_relationship(query_list,
                                                    relationship="CONTAINER",
                                                    label=container_name)
        query_list = self.qs.add_match_terminal(query_list,
                                                relationship="PACKAGE",
                                                label="DATA_STRUCTURES")

        package_sets, package_nodes = self.qs.match_list(query_list)

        #print("package_nodes",package_nodes)

        generate_handlers = Generate_Handlers(package_nodes[0], self.qs)
        data_structures = package_nodes[0]["data_structures"]

        handlers = {}
        handlers[
            "ERROR_STREAM"] = generate_handlers.construct_redis_stream_reader(
                data_structures["ERROR_STREAM"])
        handlers["ERROR_HASH"] = generate_handlers.construct_hash(
            data_structures["ERROR_HASH"])
        handlers[
            "WEB_COMMAND_QUEUE"] = generate_handlers.construct_job_queue_client(
                data_structures["WEB_COMMAND_QUEUE"])
        handlers["WEB_DISPLAY_DICTIONARY"] = generate_handlers.construct_hash(
            data_structures["WEB_DISPLAY_DICTIONARY"])
        handlers[
            "PROCESS_VSZ"] = generate_handlers.construct_redis_stream_reader(
                data_structures["PROCESS_VSZ"])
        handlers[
            "PROCESS_RSS"] = generate_handlers.construct_redis_stream_reader(
                data_structures["PROCESS_RSS"])
        handlers[
            "PROCESS_CPU"] = generate_handlers.construct_redis_stream_reader(
                data_structures["PROCESS_CPU"])

        return handlers
Пример #12
0
def generate_mqtt_devices(redis_site,qs ):

       query_list = []
       query_list = qs.add_match_relationship( query_list,relationship="SITE",label=redis_site["site"] )

       query_list = qs.add_match_terminal( query_list, 
                                        relationship = "PACKAGE", property_mask={"name":"MQTT_DEVICES_DATA"} )
                                           
       package_sets, package_sources = qs.match_list(query_list)  
     
       package = package_sources[0] 
       data_structures = package["data_structures"]
       generate_handlers = Generate_Handlers(package,qs)
       
       return generate_handlers.construct_hash(data_structures["MQTT_CONTACT_LOG"])
    def generate_structures_with_processor_container(self,
                                                     processor_list,
                                                     key_list,
                                                     hash_flag=True):
        processor_ds = {}

        for i in processor_list:
            containers = self.find_containers(i)
            #print("containers",containers)
            container_ds = {}
            for j in containers:

                #print(i,j)
                query_list = []
                query_list = self.qs.add_match_relationship(
                    query_list,
                    relationship="SITE",
                    label=self.site_data["site"])
                query_list = self.qs.add_match_relationship(
                    query_list, relationship="PROCESSOR", label=i)
                query_list = self.qs.add_match_relationship(
                    query_list, relationship="CONTAINER", label=j)
                query_list = self.qs.add_match_terminal(
                    query_list,
                    relationship="PACKAGE",
                    label="DATA_STRUCTURES")
                package_sets, package_sources = self.qs.match_list(query_list)
                package = package_sources[0]
                data_structures = package["data_structures"]
                #print("data_structures",data_structures.keys())
                generate_handlers = Generate_Handlers(package, self.qs)
                temp = {}
                for k in key_list:
                    if hash_flag == True:
                        temp[k] = generate_handlers.construct_hash(
                            data_structures[k])
                    else:
                        temp[
                            k] = generate_handlers.construct_redis_stream_reader(
                                data_structures[k])
                container_ds[j] = temp
            processor_ds[i] = container_ds
        #print("pocessor_ds",processor_ds)

        return processor_ds
 def generate_data_handlers(self,qs):
      
      data_structures = self.package["data_structures"]
      
      generate_handlers = Generate_Handlers(self.package,qs)
      self.ds_handlers = {}
      self.ds_handlers["MQTT_PAST_ACTION_QUEUE"] = generate_handlers.construct_redis_stream_writer(data_structures["MQTT_PAST_ACTION_QUEUE"])
      self.ds_handlers["MQTT_INPUT_QUEUE"] = generate_handlers.construct_redis_stream_writer(data_structures["MQTT_INPUT_QUEUE"])
      self.ds_handlers["MQTT_DEVICES"] = generate_handlers.construct_hash(data_structures["MQTT_DEVICES"])
      self.ds_handlers["MQTT_SUBSCRIPTIONS"] = generate_handlers.construct_hash(data_structures["MQTT_SUBSCRIPTIONS"])
      self.ds_handlers["MQTT_CONTACT_LOG"] = generate_handlers.construct_hash(data_structures["MQTT_CONTACT_LOG"])
      self.ds_handlers["MQTT_REBOOT_LOG"]       = generate_handlers.construct_hash(data_structures["MQTT_REBOOT_LOG"])
      self.ds_handlers["MQTT_UNKNOWN_DEVICES"] = generate_handlers.construct_hash(data_structures["MQTT_UNKNOWN_DEVICES"])
      self.ds_handlers["MQTT_UNKNOWN_SUBSCRIPTIONS"] = generate_handlers.construct_hash(data_structures["MQTT_UNKNOWN_SUBSCRIPTIONS"])
    def generate_structures_with_processor(self,
                                           processor_list,
                                           search_list,
                                           key_list,
                                           hash_flag=True):
        return_value = {}

        for i in processor_list:
            #print(i)
            query_list = []
            query_list = self.qs.add_match_relationship(
                query_list, relationship="SITE", label=self.site_data["site"])

            query_list = self.qs.add_match_relationship(
                query_list, relationship="PROCESSOR", label=i)
            for j in range(0, len(search_list) - 1):
                query_list = self.qs.add_match_relationship(
                    query_list, relationship=search_list[j])
            if type(search_list[-1]) == list:
                query_list = self.qs.add_match_terminal(
                    query_list,
                    relationship=search_list[-1][0],
                    label=search_list[-1][1])
            else:
                query_list = self.qs.add_match_terminal(
                    query_list, relationship=search_list[-1])
            package_sets, package_sources = self.qs.match_list(query_list)
            package = package_sources[0]
            data_structures = package["data_structures"]
            print("data_structures", data_structures.keys())
            generate_handlers = Generate_Handlers(package, self.qs)
            temp = {}
            for k in key_list:
                if hash_flag == True:
                    temp[k] = generate_handlers.construct_hash(
                        data_structures[k])
                else:
                    temp[k] = generate_handlers.construct_redis_stream_reader(
                        data_structures[k])
            return_value[i] = temp

        return return_value
Пример #16
0
def construct_sql_server_instance(qs, site_data):

    query_list = []
    query_list = qs.add_match_relationship(query_list,
                                           relationship="SITE",
                                           label=site_data["site"])
    query_list = qs.add_match_relationship(query_list,
                                           relationship="SQL_SERVER")
    query_list = qs.add_match_terminal(query_list,
                                       relationship="PACKAGE",
                                       property_mask={"name": "SQL_SERVER"})
    package_sets, package_sources = qs.match_list(query_list)
    package = package_sources[0]
    data_structures = package["data_structures"]
    generate_handlers = Generate_Handlers(package, qs)
    rpc_queue = generate_handlers.construct_rpc_sever(
        data_structures["SQL_SERVER_RPC_SERVER"])
    sql_databases = generate_handlers.construct_hash(
        data_structures["SQL_DB_MAPPING"])
    Construct_RPC_Server(rpc_queue, sql_databases)
Пример #17
0
    def determine_container_structure(self, processor_name):
        query_list = []
        query_list = self.qs.add_match_relationship(
            query_list, relationship="SITE", label=self.site_data["site"])
        query_list = self.qs.add_match_relationship(query_list,
                                                    relationship="PROCESSOR",
                                                    label=processor_name)
        query_list = self.qs.add_match_relationship(
            query_list, relationship="DOCKER_MONITOR")
        query_list = self.qs.add_match_terminal(query_list,
                                                relationship="PACKAGE",
                                                label="DATA_STRUCTURES")

        package_sets, package_nodes = self.qs.match_list(query_list)

        #print("package_nodes",package_nodes)

        generate_handlers = Generate_Handlers(package_nodes[0], self.qs)

        package_node = package_nodes[0]
        data_structures = package_node["data_structures"]

        #print(data_structures.keys())
        handlers = {}
        handlers[
            "ERROR_STREAM"] = generate_handlers.construct_redis_stream_reader(
                data_structures["ERROR_STREAM"])

        handlers[
            "WEB_COMMAND_QUEUE"] = generate_handlers.construct_job_queue_client(
                data_structures["WEB_COMMAND_QUEUE"])
        handlers["WEB_DISPLAY_DICTIONARY"] = generate_handlers.construct_hash(
            data_structures["WEB_DISPLAY_DICTIONARY"])
        queue_name = data_structures["DOCKER_UPDATE_QUEUE"]['queue']
        handlers[
            "DOCKER_UPDATE_QUEUE"] = generate_handlers.construct_rpc_client()
        handlers["DOCKER_UPDATE_QUEUE"].set_rpc_queue(queue_name)
        return handlers
Пример #18
0
    def __init__(self, redis_site_data):
        qs = Query_Support(redis_server_ip=redis_site_data["host"],
                           redis_server_port=redis_site_data["port"])
        query_list = []
        query_list = qs.add_match_relationship(query_list,
                                               relationship="SITE",
                                               label=redis_site_data["site"])

        query_list = qs.add_match_terminal(
            query_list,
            relationship="PACKAGE",
            property_mask={"name": "IRRIGATION_CONTROL_MANAGEMENT"})

        package_sets, package_sources = qs.match_list(query_list)

        package = package_sources[0]
        data_structures = package["data_structures"]

        generate_handlers = Generate_Handlers(package, redis_site_data)

        self.handler = generate_handlers.construct_hash(
            data_structures["IRRIGATION_CONTROL"])
        self.access_handler = {}
        self.access_handler["RAIN_FLAG"] = self.set_rain_flag
        self.access_handler["ETO_MANAGEMENT"] = self.set_eto_management_flag
        self.access_handler["FLOW_CUT_OFF"] = self.set_flow_cutoff
        self.access_handler["CLEANING_INTERVAL"] = self.set_cleaning_interval
        self.access_handler["MASTER_VALVE"] = self.set_master_valve
        self.access_handler["CLEANING_VALVE"] = self.set_cleaning_valve
        self.access_handler["MASTER_VALVE_SETUP"] = self.set_master_valve_setup
        self.access_handler["SCHEDULE_NAME"] = self.set_schedule_name
        self.access_handler["STEP"] = self.set_step_number
        self.access_handler["RUN_TIME"] = self.set_run_time
        self.access_handler["ELASPED_TIME"] = self.set_elasped_time
        self.access_handler["TIME_STAMP"] = self.set_time_stamp
        self.access_handler["SUSPEND"] = self.set_suspend
        "IRRIGATION_CURRENT_CLIENT"] = generate_handlers.construct_job_queue_client(
            data_structures["IRRIGATION_CURRENT"])
    ds_handlers[
        "IRRIGATION_CURRENT_SERVER"] = generate_handlers.construct_job_queue_server(
            data_structures["IRRIGATION_CURRENT"])
    ds_handlers[
        "IRRIGATION_JOB_SCHEDULING"] = generate_handlers.construct_job_queue_server(
            data_structures["IRRIGATION_JOB_SCHEDULING"])
    ds_handlers[
        "IRRIGATION_PENDING_CLIENT"] = generate_handlers.construct_job_queue_client(
            data_structures["IRRIGATION_PENDING"])
    ds_handlers[
        "IRRIGATION_PENDING_SERVER"] = generate_handlers.construct_job_queue_server(
            data_structures["IRRIGATION_PENDING"])

    ds_handlers["IRRIGATION_VALVE_TEST"] = generate_handlers.construct_hash(
        data_structures["IRRIGATION_VALVE_TEST"])
    ds_handlers["IRRIGATION_TIME_HISTORY"] = generate_handlers.construct_hash(
        data_structures["IRRIGATION_TIME_HISTORY"])
    ds_handlers[
        "VALVE_JOB_QUEUE_CLIENT"] = generate_handlers.construct_job_queue_client(
            data_structures["IRRIGATION_VALVE_JOB_QUEUE"])
    ds_handlers[
        "VALVE_JOB_QUEUE_SERVER"] = generate_handlers.construct_job_queue_server(
            data_structures["IRRIGATION_VALVE_JOB_QUEUE"])
    ds_handlers["MQTT_SENSOR_STATUS"] = generate_sensor_minute_status(
        redis_site, qs)
    ds_handlers["MQTT_CONTACT_LOG"] = generate_mqtt_devices(redis_site, qs)
    irrigation_hash_control = generate_irrigation_control(redis_site, qs)

    query_list = []
    query_list = qs.add_match_relationship(query_list,
Пример #20
0
    #
    #  do verifications of data package
    #
    #
    #
    data_structures = package["data_structures"]

    generate_handlers = Generate_Handlers(package, qs)

    data_structures = package["data_structures"]
    job_queue = generate_handlers.construct_job_queue_client(
        data_structures["IRRIGATION_JOB_SCHEDULING"])
    job_queue.delete_all()

    completion_dictionary = generate_handlers.construct_hash(
        data_structures["SYSTEM_COMPLETION_DICTIONARY"])

    irrigation_control = generate_irrigation_control(redis_site, qs)
    sched = Irrigation_Schedule_Monitoring(file_server_library,
                                           completion_dictionary, job_queue,
                                           irrigation_control)
    action = System_Monitoring(file_server_library, completion_dictionary,
                               job_queue)

    ntpd = Ntpd()
    #
    # Adding chains
    #

    cf = CF_Base_Interpreter()
    add_chains(cf, sched, action, ntpd)
    query_list = []
    query_list = qs.add_match_relationship(query_list,
                                           relationship="SITE",
                                           label=redis_site["site"])

    query_list = qs.add_match_terminal(
        query_list,
        relationship="PACKAGE",
        property_mask={"name": "CLOUD_SERVICE_QUEUE_DATA"})

    package_sets, package_sources = qs.match_list(query_list)
    package = package_sources[0]
    data_structures = package["data_structures"]
    generate_handlers = Generate_Handlers(package, qs)
    sub_event_hash = generate_handlers.construct_hash(
        data_structures["CLOUD_SUB_EVENTS"])

    query_list = []
    query_list = qs.add_match_relationship(query_list,
                                           relationship="SITE",
                                           label=redis_site["site"])
    query_list = qs.add_match_relationship(
        query_list, relationship="CLOUD_SERVICE_HOST_INTERFACE")

    query_list = qs.add_match_terminal(query_list,
                                       relationship="HOST_INFORMATION")

    host_sets, host_sources = qs.match_list(query_list)

    host_source = host_sources[0]
    remote_redis_handle = redis.StrictRedis(host=host_source["host"],