def __init__(self, redis_site): qs = Query_Support(redis_site) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "MQTT_DEVICES_DATA"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, qs) self.job_queue_server = generate_handlers.construct_job_queue_server( data_structures["MQTT_PUBLISH_QUEUE"]) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal(query_list, relationship="MQTT_SERVER") host_sets, host_sources = qs.match_list(query_list) self.mqtt_server_data = host_sources[0] self.client = mqtt.Client(client_id="", clean_session=True, userdata=None, transport="tcp") self.client.on_connect = self.on_connect self.client.on_disconnect = self.on_disconnect self.client.on_publish = self.on_publish self.connection_flag = False print("connection attempting") while self.connection_flag == False: try: self.client.connect(self.mqtt_server_data["HOST"], self.mqtt_server_data["PORT"], 60) except: time.sleep(5) else: self.connection_flag = True print("connection achieved") self.client.loop_start() self.server_job_queue()
def __init__(self, name, site_data): redis_handle_pw = redis.StrictRedis(site_data["host"], site_data["port"], db=site_data["redis_password_db"], decode_responses=True) self.site_data = site_data startup_dict = redis_handle_pw.hgetall("web") self.qs = Query_Support(site_data) self.file_server_library = Construct_RPC_Library( self.qs, self.site_data) self.app = Flask(name) self.auth = HTTPDigestAuth() self.url_rule_class = URL_Rule_Class(self.app, self.auth) self.auth.get_password(self.get_pw) self.startup_dict = startup_dict self.app.template_folder = 'flask_templates' self.app.static_folder = 'static' self.app.config['SECRET_KEY'] = startup_dict["SECRET_KEY"] self.users = json.loads(startup_dict["users"]) Load_Static_Files(self.app, self.auth) #enable static files to be fetched self.redis_access = Load_Redis_Access( self.app, self.auth, request) #enable web access for redis operations Load_App_Sys_Files(self.app, self.auth, request, self.file_server_library) self.subsystems = [] self.modules = {} self.load_specified_modules()
def __init__(self, redis_site_data): qs = Query_Support(redis_server_ip=redis_site_data["host"], redis_server_port=redis_site_data["port"]) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site_data["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "IRRIGATION_CONTROL_MANAGEMENT"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, redis_site_data) self.handler = generate_handlers.construct_hash( data_structures["IRRIGATION_CONTROL"]) self.access_handler = {} self.access_handler["RAIN_FLAG"] = self.set_rain_flag self.access_handler["ETO_MANAGEMENT"] = self.set_eto_management_flag self.access_handler["FLOW_CUT_OFF"] = self.set_flow_cutoff self.access_handler["CLEANING_INTERVAL"] = self.set_cleaning_interval self.access_handler["MASTER_VALVE"] = self.set_master_valve self.access_handler["CLEANING_VALVE"] = self.set_cleaning_valve self.access_handler["MASTER_VALVE_SETUP"] = self.set_master_valve_setup self.access_handler["SCHEDULE_NAME"] = self.set_schedule_name self.access_handler["STEP"] = self.set_step_number self.access_handler["RUN_TIME"] = self.set_run_time self.access_handler["ELASPED_TIME"] = self.set_elasped_time self.access_handler["TIME_STAMP"] = self.set_time_stamp self.access_handler["SUSPEND"] = self.set_suspend
def __init__(self, redis_site): qs = Query_Support(redis_site) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "MQTT_DEVICES_DATA"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, qs) self.job_queue_client = generate_handlers.construct_job_queue_client( data_structures["MQTT_PUBLISH_QUEUE"]) self.send_request("REBOOT") while 1: self.send_request("HEART_BEAT") self.send_request("SERVER_CHECK") time.sleep(15.)
# # # Read Boot File # expand json file # file_handle = open("/data/redis_server.json", 'r') data = file_handle.read() file_handle.close() redis_site = json.loads(data) # # Setup handle # open data stores instance qs = Query_Support(redis_site) redis_handle = qs.get_redis_data_handle() query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "CLOUD_SERVICE_QUEUE_DATA"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"]
# # # Read Boot File # expand json file # file_handle = open("system_data_files/redis_server.json", 'r') data = file_handle.read() file_handle.close() redis_site = json.loads(data) # # Setup handle # open data stores instance user_table = User_Data_Tables(redis_site) qs = Query_Support(redis_server_ip=redis_site["host"], redis_server_port=redis_site["port"]) eto = construct_eto_instance(qs, redis_site, user_table) # # Adding chains # cf = CF_Base_Interpreter() add_eto_chains(eto, cf) # # Executing chains # cf.execute() else: pass
if __name__ == "__main__": file_handle = open("system_data_files/redis_server.json",'r') data = file_handle.read() file_handle.close() site_data = json.loads(data) results = [] for i in range(1,len(sys.argv)): results.append( [run_process_to_completion_no_log(sys.argv[i]), sys.argv[i]]) print("results",results) qs = Query_Support( site_data ) query_list = [] query_list = qs.add_match_relationship( query_list,relationship="SITE",label=site_data["site"] ) query_list = qs.add_match_relationship( query_list,relationship="PROCESSOR",label=site_data["local_node"] ) query_list = qs.add_match_terminal(query_list,relationship="PROCESS_INITIALIZATION") initialization_sets, initialization_nodes = qs.match_list(query_list) if len(initialization_nodes) > 0: initialization_list = initialization_nodes[0]["command_list"] else: initialization_list= [] query_list = [] query_list = qs.add_match_relationship( query_list,relationship="SITE",label=site_data["site"] ) query_list = qs.add_match_relationship( query_list,relationship="PROCESSOR",label=site_data["local_node"] ) query_list = qs.add_match_terminal( query_list,