def __init__(self, redis_site): qs = Query_Support(redis_site) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "MQTT_DEVICES_DATA"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, qs) self.job_queue_server = generate_handlers.construct_job_queue_server( data_structures["MQTT_PUBLISH_QUEUE"]) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal(query_list, relationship="MQTT_SERVER") host_sets, host_sources = qs.match_list(query_list) self.mqtt_server_data = host_sources[0] self.client = mqtt.Client(client_id="", clean_session=True, userdata=None, transport="tcp") self.client.on_connect = self.on_connect self.client.on_disconnect = self.on_disconnect self.client.on_publish = self.on_publish self.connection_flag = False print("connection attempting") while self.connection_flag == False: try: self.client.connect(self.mqtt_server_data["HOST"], self.mqtt_server_data["PORT"], 60) except: time.sleep(5) else: self.connection_flag = True print("connection achieved") self.client.loop_start() self.server_job_queue()
def __init__(self, name, site_data): redis_handle_pw = redis.StrictRedis(site_data["host"], site_data["port"], db=site_data["redis_password_db"], decode_responses=True) self.site_data = site_data startup_dict = redis_handle_pw.hgetall("web") self.qs = Query_Support(site_data) self.file_server_library = Construct_RPC_Library( self.qs, self.site_data) self.app = Flask(name) self.auth = HTTPDigestAuth() self.url_rule_class = URL_Rule_Class(self.app, self.auth) self.auth.get_password(self.get_pw) self.startup_dict = startup_dict self.app.template_folder = 'flask_templates' self.app.static_folder = 'static' self.app.config['SECRET_KEY'] = startup_dict["SECRET_KEY"] self.users = json.loads(startup_dict["users"]) Load_Static_Files(self.app, self.auth) #enable static files to be fetched self.redis_access = Load_Redis_Access( self.app, self.auth, request) #enable web access for redis operations Load_App_Sys_Files(self.app, self.auth, request, self.file_server_library) self.subsystems = [] self.modules = {} self.load_specified_modules()
def __init__(self, redis_site_data): qs = Query_Support(redis_server_ip=redis_site_data["host"], redis_server_port=redis_site_data["port"]) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site_data["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "IRRIGATION_CONTROL_MANAGEMENT"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, redis_site_data) self.handler = generate_handlers.construct_hash( data_structures["IRRIGATION_CONTROL"]) self.access_handler = {} self.access_handler["RAIN_FLAG"] = self.set_rain_flag self.access_handler["ETO_MANAGEMENT"] = self.set_eto_management_flag self.access_handler["FLOW_CUT_OFF"] = self.set_flow_cutoff self.access_handler["CLEANING_INTERVAL"] = self.set_cleaning_interval self.access_handler["MASTER_VALVE"] = self.set_master_valve self.access_handler["CLEANING_VALVE"] = self.set_cleaning_valve self.access_handler["MASTER_VALVE_SETUP"] = self.set_master_valve_setup self.access_handler["SCHEDULE_NAME"] = self.set_schedule_name self.access_handler["STEP"] = self.set_step_number self.access_handler["RUN_TIME"] = self.set_run_time self.access_handler["ELASPED_TIME"] = self.set_elasped_time self.access_handler["TIME_STAMP"] = self.set_time_stamp self.access_handler["SUSPEND"] = self.set_suspend
def __init__(self, redis_site): qs = Query_Support(redis_site) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "MQTT_DEVICES_DATA"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, qs) self.job_queue_client = generate_handlers.construct_job_queue_client( data_structures["MQTT_PUBLISH_QUEUE"]) self.send_request("REBOOT") while 1: self.send_request("HEART_BEAT") self.send_request("SERVER_CHECK") time.sleep(15.)
from irrigation_control_py3.eto_management_py3 import ETO_Management from irrigation_control_py3.Failure_Report_py3 import Failure_Report from core_libraries.irrigation_hash_control_py3 import generate_irrigation_control from core_libraries.irrigation_hash_control_py3 import generate_sensor_minute_status from core_libraries.irrigation_hash_control_py3 import generate_mqtt_devices # # # Read Boot File # expand json file # file_handle = open("system_data_files/redis_server.json", 'r') data = file_handle.read() file_handle.close() redis_site = json.loads(data) qs = Query_Support(redis_site) irrigation_excessive_flow_limits = get_flow_checking_limits(redis_site, qs) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "IRRIGIGATION_SCHEDULING_CONTROL_DATA"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"]
# # # Read Boot File # expand json file # file_handle = open("/data/redis_server.json", 'r') data = file_handle.read() file_handle.close() redis_site = json.loads(data) # # Setup handle # open data stores instance qs = Query_Support(redis_site) file_server_library = Construct_RPC_Library(qs, redis_site) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "IRRIGIGATION_SCHEDULING_CONTROL_DATA"}) package_sets, package_sources = qs.match_list(query_list) #print("package sources",package_sources)
# # # Read Boot File # expand json file # file_handle = open("/data/redis_server.json", 'r') data = file_handle.read() file_handle.close() redis_site = json.loads(data) # # Setup handle # open data stores instance qs = Query_Support(redis_site) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_relationship( query_list, relationship="CLOUD_BLOCK_CHAIN_SERVER") query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "CLOUD_BLOCK_CHAIN_SERVER"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"] generate_handlers = Generate_Handlers(package, qs)
from redis_support_py3.graph_query_support_py3 import Query_Support import datetime from py_cf_new_py3.chain_flow_py3 import CF_Base_Interpreter # # # Read Boot File # expand json file # file_handle = open("/data/redis_server.json", 'r') data = file_handle.read() file_handle.close() redis_site = json.loads(data) qs = Query_Support(redis_site) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal(query_list, relationship="MQTT_SERVER") host_sets, host_sources = qs.match_list(query_list) host_data = host_sources[0] query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal(query_list, relationship="MQTT_DEVICE")
from py_cf_new_py3.chain_flow_py3 import CF_Base_Interpreter # # # Read Boot File # expand json file # file_handle = open("/data/redis_server.json", 'r') data = file_handle.read() file_handle.close() site_data = json.loads(data) # # Setup handle # open data stores instance qs = Query_Support(site_data) op_monitor = construct_op_monitoring_instance(qs, site_data) print("made it here 2") # # Adding chains # cf = CF_Base_Interpreter() add_chains(op_monitor, cf) # # Executing chains # print("made it here 3") cf.execute()
# # # Read Boot File # expand json file # file_handle = open("system_data_files/redis_server.json", 'r') data = file_handle.read() file_handle.close() redis_site = json.loads(data) # # Setup handle # open data stores instance user_table = User_Data_Tables(redis_site) qs = Query_Support(redis_server_ip=redis_site["host"], redis_server_port=redis_site["port"]) eto = construct_eto_instance(qs, redis_site, user_table) # # Adding chains # cf = CF_Base_Interpreter() add_eto_chains(eto, cf) # # Executing chains # cf.execute() else: pass
cf.execute() if __name__ == "__main__": # # # Read Boot File # expand json file # file_handle = open("system_data_files/redis_server.json", 'r') data = file_handle.read() file_handle.close() site_data = json.loads(data) qs = Query_Support(site_data) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=site_data["site"]) query_list = qs.add_match_relationship(query_list, relationship="PROCESSOR", label=site_data["local_node"]) query_list = qs.add_match_terminal(query_list, relationship="PACKAGE", label="SYSTEM_MONITORING") package_sets, package_nodes = qs.match_list(query_list) generate_handlers = Generate_Handlers(package_nodes[0], qs) pi_monitor = PI_MONITOR(package_nodes[0], generate_handlers,
return_value.append(valid_address) elif i["type"] == "int": return_value.append(int(i["value"])) elif i["type"] == "string": return_value.append(str(i["value"])) else: raise ValueError("unsupported type") return return_value file_handle = open("/data/redis_server.json", 'r') data = file_handle.read() file_handle.close() redis_site = json.loads(data) qs = Query_Support(redis_site) redis_data_handle = qs.get_redis_data_handle() redis_contract_handle = redis.StrictRedis(host=redis_site["host"], port=redis_site["port"], db=redis_site["redis_contract_db"]) ipc_socket = "/ipc/geth.ipc" provider = Web3.IPCProvider(ipc_socket) w3 = Web3(provider) w3.middleware_onion.inject(geth_poa_middleware, layer=0) assert (w3.isConnected()) w3.eth.defaultAccount = w3.eth.accounts[0] File_object = open("contracts_to_load.json", "r") contract_json = File_object.read() File_object.close()
class PI_Web_Server_Core(object): def __init__(self, name, site_data): redis_handle_pw = redis.StrictRedis(site_data["host"], site_data["port"], db=site_data["redis_password_db"], decode_responses=True) self.site_data = site_data startup_dict = redis_handle_pw.hgetall("web") self.qs = Query_Support(site_data) self.file_server_library = Construct_RPC_Library( self.qs, self.site_data) self.app = Flask(name) self.auth = HTTPDigestAuth() self.url_rule_class = URL_Rule_Class(self.app, self.auth) self.auth.get_password(self.get_pw) self.startup_dict = startup_dict self.app.template_folder = 'flask_templates' self.app.static_folder = 'static' self.app.config['SECRET_KEY'] = startup_dict["SECRET_KEY"] self.users = json.loads(startup_dict["users"]) Load_Static_Files(self.app, self.auth) #enable static files to be fetched self.redis_access = Load_Redis_Access( self.app, self.auth, request) #enable web access for redis operations Load_App_Sys_Files(self.app, self.auth, request, self.file_server_library) self.subsystems = [] self.modules = {} self.load_specified_modules() def load_specified_modules(self): results = self.common_qs_search(["WEB_SERVER", "WEB_SERVER"]) result = results[0] modules = result["modules"] for i in modules: if i == "monitoring": print(i) PI_Web_Monitor_Server(self) elif i == "system_control": print(i) PI_Web_System_Control(self) elif i == "mqtt_client": print(i) PI_MQTT_Client_Monitor(self) elif i == "eto": print(i) ETO_Management(self) elif i == "irrigation_scheduling": print(i) Irrigation_Scheduling(self) elif i == "irrigation_control": print(i) Load_Irrigation_Control(self) elif i == "modbus_control": print("do nothing right now") else: raise ValueError("bad web module") self.result = result if "status_function" in self.result: print(self.result["status_function"]) else: self.result["status_function"] = "" print("status function not defined") file_handle = open("flask_templates/js/status_definition.js", "w") file_handle.write('__status_option__ = "' + self.result["status_function"] + '"; \n') file_handle.close() def common_qs_search(self, search_list): # generalized graph search query_list = [] query_list = self.qs.add_match_relationship( query_list, relationship="SITE", label=self.site_data["site"]) for i in range(0, len(search_list) - 1): if type(search_list[i]) == list: query_list = self.qs.add_match_relationship( query_list, relationship=search_list[i][0], label=search_list[i][1]) else: query_list = self.qs.add_match_relationship( query_list, relationship=search_list[i]) if type(search_list[-1]) == list: query_list = self.qs.add_match_terminal( query_list, relationship=search_list[-1][0], label=search_list[-1][1]) else: query_list = self.qs.add_match_terminal( query_list, relationship=search_list[-1]) node_sets, node_sources = self.qs.match_list(query_list) return node_sources def get_pw(self, username): if username in self.users: return self.users[username] return None def generate_menu_page(self): self.subsystems.sort() self.generate_menu_template() self.generate_modal_template() def generate_default_index_page(self): self.app.add_url_rule("/", "home_page", self.links_a1) def generate_index_page(self, module, element): menu_data = self.url_rule_class.subsystems[module]["menu_data"] menu_element = menu_data[element] self.app.add_url_rule("/", "home page", menu_element[0]) def generate_site_map(self): self.links_a1 = self.auth.login_required(self.site_map_function) self.app.add_url_rule("/link_page", "/links_page", self.links_a1) def site_map_function(self): links = [] for rule in self.app.url_map.iter_rules(): # Filter out rules we can't navigate to in a browser # and rules that require parameters #url = url_for(rule.endpoint, **(rule.defaults or {})) links.append((rule.endpoint)) links.sort() return render_template("list_of_endpoints", endpoints=links) def run_http(self): self.app.run(threaded=True, use_reloader=True, host='0.0.0.0', port=self.port, debug=self.debug) def run_https(self): startup_dict = self.startup_dict self.app.run(threaded=True, use_reloader=True, host='0.0.0.0', debug=self.debug, port=self.port, ssl_context=("/data/cert.pem", "/data/key.pem")) def generate_menu_template(self): f = open(self.app.template_folder + '/menu', 'w') output_string = ''' <nav class="navbar navbar-expand-sm bg-dark navbar-dark"> <!-- Links --> <ul class="navbar-nav"> <!-- Dropdown --> <li class="nav-item dropdown"> <a class="nav-link dropdown-toggle" href="#" id="navbardrop" data-toggle="dropdown">Menu</a> <div class="dropdown-menu"> ''' f.write(output_string) self.url_rule_class.subsystems for i in self.url_rule_class.subsystems: temp = ' <a class="dropdown-item" href="#" data-toggle="modal" data-target="#' + i + '">' + i + "</a>\n" f.write(temp) output_string = ''' </div> </li> </ul> <ul class="navbar-nav"> <button id="status_panel", class="btn " type="submit">Status</button> </ul> <nav class="navbar navbar-light bg-dark navbar-dark"> <span class="navbar-text" > <h4 id ="status_display"> Status: </h4> </span> </nav> </nav> ''' f.write(output_string) f.close() def generate_modal_template(self): f = open(self.app.template_folder + '/modals', 'w') for i in self.url_rule_class.subsystem_order: #print("generate_modal_template - i",i) output_string = '<!–' + i + ' –>\n' f.write(output_string) output_string = '<div class="modal fade" id=' + i + ' tabindex="-1" role="dialog" aria-labelledby="accountModalLabel" aria-hidden="true">\n' f.write(output_string) output_string = ''' <div class="modal-dialog" role="document"> <div class="modal-content"> <div class="modal-header"> ''' f.write(output_string) f.write(' <h5 class="modal-title" id="accountModalLabel">' + i + '</h5>\n') output_string = ''' <button type="button" class="close" data-dismiss="modal" aria-label="close"> <span aria-hidden="true">×</span> </button> </div> <div class="modal-body"> <ul > ''' f.write(output_string) # <li><a href ='/control/display_past_system_alerts' target="_self">Current System State</a></li> sub_system_data = self.url_rule_class.subsystems[i] temp = sub_system_data["menu_data"] # for j in sub_system_data['menu_list']: data = temp[j] #print("data",data) format_output = '<li><a href=' + '"/' + i + '/' + data[ 1] + '" target="_self">' + data[2] + '</a></li>\n' f.write(format_output) output_string = ''' </ul> </div> <div class="modal-footer"> <button type="button" class="btn btn-secondary" data-dismiss="modal">Close</button> </div> </div> </div> </div> ''' f.write(output_string) f.close()
remote_sets, remote_sources = qs.match_list(query_list) for i in remote_sources: return_value[i["modbus_address"]] = i["parameters"] return return_value if __name__ == "__main__": plc_server_name = sys.argv[1] file_handle = open("/data/redis_server.json", 'r') data = file_handle.read() file_handle.close() redis_site = json.loads(data) qs = Query_Support(redis_site) # find data structures query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_relationship(query_list, relationship="PLC_SERVER", label=plc_server_name) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "PLC_SERVER_DATA"}) package_sets, package_sources = qs.match_list(query_list)
if __name__ == "__main__": cf = CF_Base_Interpreter() # # # Read Boot File # expand json file # time.sleep(20) # wait for mqtt server get started file_handle = open("system_data_files/redis_server.json", 'r') data = file_handle.read() file_handle.close() site_data = json.loads(data) qs = Query_Support(site_data) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=site_data["site"]) query_list = qs.add_match_terminal(query_list, relationship="PROCESSOR", label=site_data["local_node"]) processor_sets, processor_nodes = qs.match_list(query_list) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=site_data["site"]) query_list = qs.add_match_relationship(query_list,
import os import copy # # # Read Boot File # expand json file # file_handle = open("/data/redis_server.json", 'r') data = file_handle.read() file_handle.close() redis_site = json.loads(data) # # Setup handle # open data stores instance qs = Query_Support(redis_site) file_client = Construct_RPC_Library(qs, redis_site) print(file_client.file_directory("")) print(file_client.mkdir("test_path")) print(file_client.file_directory("")) print( file_client.save_file("test_path", "test_file.test", "hi\nthere\nbrown\ncow")) print(file_client.load_file("test_path", "test_file.test")) print(file_client.file_directory("test_path")) print(file_client.file_exists("test_path", "test_file.test")) print(file_client.delete_file("test_path", "test_file.test")) print(file_client.file_directory("test_path"))
if __name__ == "__main__": file_handle = open("system_data_files/redis_server.json",'r') data = file_handle.read() file_handle.close() site_data = json.loads(data) results = [] for i in range(1,len(sys.argv)): results.append( [run_process_to_completion_no_log(sys.argv[i]), sys.argv[i]]) print("results",results) qs = Query_Support( site_data ) query_list = [] query_list = qs.add_match_relationship( query_list,relationship="SITE",label=site_data["site"] ) query_list = qs.add_match_relationship( query_list,relationship="PROCESSOR",label=site_data["local_node"] ) query_list = qs.add_match_terminal(query_list,relationship="PROCESS_INITIALIZATION") initialization_sets, initialization_nodes = qs.match_list(query_list) if len(initialization_nodes) > 0: initialization_list = initialization_nodes[0]["command_list"] else: initialization_list= [] query_list = [] query_list = qs.add_match_relationship( query_list,relationship="SITE",label=site_data["site"] ) query_list = qs.add_match_relationship( query_list,relationship="PROCESSOR",label=site_data["local_node"] ) query_list = qs.add_match_terminal( query_list,
# # # Read Boot File # expand json file # file_handle = open("/data/redis_server.json", 'r') data = file_handle.read() file_handle.close() redis_site = json.loads(data) # # Setup handle # open data stores instance qs = Query_Support(redis_site) redis_handle = qs.get_redis_data_handle() query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "CLOUD_SERVICE_QUEUE_DATA"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0] data_structures = package["data_structures"]
from redis_support_py3.load_files_py3 import APP_FILES from redis_support_py3.user_data_tables_py3 import User_Data_Tables from redis_support_py3.graph_query_support_py3 import Query_Support from py_cf_new_py3.chain_flow_py3 import CF_Base_Interpreter from redis_support_py3.construct_data_handlers_py3 import Generate_Handlers file_handle = open("system_data_files/redis_server.json", 'r') data = file_handle.read() file_handle.close() redis_site = json.loads(data) app_files = APP_FILES(redis_site) user_table = User_Data_Tables(redis_site) qs = Query_Support(redis_server_ip=redis_site["host"], redis_server_port=redis_site["port"]) delete_cimis_email = Delete_Cimis_Email(app_files, user_table, qs) query_list = [] query_list = qs.add_match_relationship(query_list, relationship="SITE", label=redis_site["site"]) query_list = qs.add_match_terminal( query_list, relationship="PACKAGE", property_mask={"name": "IRRIGATION_DATA"}) package_sets, package_sources = qs.match_list(query_list) package = package_sources[0]