def setTrigger(self, trigger): if not isinstance(trigger, dict): raise TypeError("trigger parameter must be a dict instance") if not isinstance(trigger, Trigger.Trigger): trigger = Trigger.Trigger(trigger) for key in trigger.keys(): if key != '*' and key is not None and not self.validateSingle(key): raise ValueError(str(key) + ' not correct for ' + str(self.id)) self.trigger = trigger
def __init__(self, dwf, hdwf): self.dwf = dwf self.hdwf = hdwf self.ch1 = Channel.Channel(self.dwf, self.hdwf, 0) self.ch2 = Channel.Channel(self.dwf, self.hdwf, 1) self.frequency = 100000.00 self.buffersize = 512 self.trigger = Trigger.Trigger(self.dwf, self.hdwf)
def addTriggers(self, graph): no_heathGivers = 5 no_weaponGivers = 10 no_railguns = 5 no_rocket = 5 for i in range(no_heathGivers): node_id = i * 29 trigger = tr.Trigger(node_id, 60, "Health_Giver", None) self.triggers.append(trigger) graph.nodes[node_id].extra_info = trigger for i in range(no_railguns): node_id = (5 + i) * 29 trigger = tr.Trigger(node_id, 100, "Weapon_Giver", "Railgun") self.triggers.append(trigger) graph.nodes[node_id].extra_info = trigger for i in range(no_rocket): node_id = (10 + i) * 29 trigger = tr.Trigger(node_id, 100, "Weapon_Giver", "Rocket") self.triggers.append(trigger) graph.nodes[node_id].extra_info = trigger
def __init__(self, name='RNA_Scan_Thread'): pfx = 'RNA_Scan_Thread.__init__:' msg = '%s Entering...' % pfx msglog.log('broadway', msglog.types.INFO, msg) self.debug = 0 # Maps file-descriptors to host-names. self.hosts = {} # Maps file-descriptors to sessions. self.sessions = {} # Maps host-names to sets of file-descriptors self.socketmap = {} self.connections = {} self.bPollRun = False self.work_queue = None self.descriptors = set() self.trigger_channel = Trigger(self.socketmap) self.descriptors.add(self.trigger_channel.fileno()) msg = '%s Done.' % pfx msglog.log('broadway', msglog.types.INFO, msg) super(RNA_Scan_Thread, self).__init__(name=name)
def __deepcopy__(self, memo): newone = type(self)(id=self.id, type=self.type, label=self.label, placeholder=self.placeholder, required=self.required, default=self.default, value=self.value, trigger={}) newone.choices = dict(self.choices) newone.setTrigger(Trigger.Trigger(self.trigger)) return newone
def Setup(self): #On crée un joueur joueur = Player.Player("Player", self.engine) self.Ennemies = [] for i in range(5): self.Ennemies.append(Enemy.Enemy("Zoubida", joueur, self.engine)) self.engine.scene.contenu.append(Trigger.Porte("Porte", self.engine, [0,64], [20,48])) #enemy = Enemy.Enemy("Zoubida", joueur, self.engine) #enemy2 = Enemy.Enemy("zoubida2leretour", joueur, self.engine) WallWrapper(self.engine)
async def crTrigger(ctx, *args): if len(config.openModule) == 0: await ctx.channel.send(embed=utility.getEmbed( "Error: No module is open, please open a module first with .OpenModule [.om]" )) return argl = list(args) if len(argl) < 2: await ctx.channel.send( embed=utility.getEmbed("Error: Invalid arguments")) return name = argl.pop(0) mode = argl.pop(0) if not checkArgs(mode, argl): await ctx.channel.send( embed=utility.getEmbed("Error: Invalid arguments")) return n = Trigger.Trigger(name, checkMode(mode), config.openModule[-1], argl) config.openModule[-1].addTrigger(n) await ctx.channel.send(embed=utility.getEmbed("Created Trigger"))
class RNA_Scan_Thread(Thread): """ Polls for and handles any incoming msgs from established RNA sockets connected to clients.""" def __init__(self, name='RNA_Scan_Thread'): pfx = 'RNA_Scan_Thread.__init__:' msg = '%s Entering...' % pfx msglog.log('broadway', msglog.types.INFO, msg) self.debug = 0 # Maps file-descriptors to host-names. self.hosts = {} # Maps file-descriptors to sessions. self.sessions = {} # Maps host-names to sets of file-descriptors self.socketmap = {} self.connections = {} self.bPollRun = False self.work_queue = None self.descriptors = set() self.trigger_channel = Trigger(self.socketmap) self.descriptors.add(self.trigger_channel.fileno()) msg = '%s Done.' % pfx msglog.log('broadway', msglog.types.INFO, msg) super(RNA_Scan_Thread, self).__init__(name=name) def start(self): self.bPollRun = True if not self.work_queue: self.work_queue = ThreadPool(5) else: self.work_queue.resize(5) # Superclass calls run() on separate thread: return super(RNA_Scan_Thread, self).start() def stop(self): self.bPollRun = False self.notify_poll() self.work_queue.resize(0) return super(RNA_Scan_Thread, self).stop() def is_running(self): return self.isAlive() def run(self): pfx = 'RNA_Scan_Thread.run:' cmdfd = self.trigger_channel.fileno() enqueue = self.work_queue.queue_noresult handle_session = self.handle_session_input clear_notifications = self.clear_notifications while self.bPollRun: try: descriptors = self.descriptors.copy() if cmdfd not in descriptors: msglog.warn("Command channel FD removed!") descriptors.add(cmdfd) r,w,e = select.select(descriptors, [], descriptors, 1) for fd in e: if fd == cmdfd: message = "%s internal polling error. Must restart." msglog.error(message % pfx) raise TypeError("command channel OOB data") try: self.unregister_session(self.get_session(fd)) except: msglog.warn("%s I/O event handling error." % pfx) msglog.exception(prefix="handled") for fd in r: if fd in self.descriptors: try: if fd == cmdfd: clear_notifications() else: self.descriptors.discard(fd) enqueue(handle_session, fd) except: msglog.warn("%s I/O event handling error." % pfx) msglog.exception(prefix="handled") except: msglog.warn("%s loop error." % pfx) msglog.exception(prefix="handled") msglog.inform("%s exiting." % pfx) def notify_poll(self): self.trigger_channel.trigger_event() def clear_notifications(self): self.trigger_channel.handle_read() def register_session(self, session): try: fd = session.socket.fileno() host,port = session.socket.getpeername() except: msglog.warn("Failed to add session: %r" % session) msglog.exception(prefix="handled") registered = False else: self.hosts[fd] = host self.sessions[fd] = session self.connections.setdefault(host, set()).add(fd) self.descriptors.add(fd) self.notify_poll() registered = True return registered def unregister_session(self, session): try: fd = session.socket.fileno() except: unregistered = False else: host = self.hosts.pop(fd, None) if host is not None: connections = self.connections.get(host, None) if connections is not None: connections.discard(fd) if not connections: self.connections.pop(host, None) self.sessions.pop(fd, None) if fd in self.descriptors: self.descriptors.discard(fd) self.notify_poll() try: session.disconnect() except: msglog.exception(prefix="handled") unregistered = True return unregistered def get_session(self, fd): if not isinstance(fd, int): fd = fd.fileno() return self.sessions[fd] def get_host(self, fd): if not isinstance(fd, int): fd = fd.fileno() return self.hosts[fd] def get_connections(self, host): return self.connections.get(host, []) def has_connections(self, host): return host in self.connections def get_sessions(self, host): connections = self.get_connections(host) return map(self.get_session, connections) def handle_session_input(self, fd): pfx = 'RNA_Scan_Thread.handle_session_input' unregister = False session = self.get_session(fd) try: rna_header = RNAHeader(session.socket) protocol = _protocol_factory(rna_header.protocol) protocol.setup(session, rna_header) command = protocol.recv_command() result = _invoke_command(command) protocol.send_result(command, result) except EInvalidMessage, error: if not error.message: unregister = True msglog.warn("Removing client-closed session: %s" % session) else: unregister = False msglog.exception(prefix="handled") except SocketTimeoutError: unregister = False message.exception(prefix="handled")
def _setup_trigger_channel(self): assert getattr(self, '_trigger_channel', None) is None self.trigger_channel = Trigger(self)
import vk_api from vk_api.longpoll import VkLongPoll, VkEventType import Command import Trigger TOKEN = "" BOT_ID = 283644170 ADMINS = [283644170] PREFIX = "-->" trigger = Trigger.Trigger() command = Command.Command() api = vk_api.VkApi(token=TOKEN) longpoll = VkLongPoll(api) vk = api.get_api() @command.add(True) def name(ev): print(ev.raw) def parseMessage(ev): # trigger.handle(ev) # command.handle(ev) # event.attachments["attach1_kind"] == "audiomsg" pass
class HTTPServer(asyncore.dispatcher): PROTOCOL = "HTTP" SERVER_IDENT = '%s Server (%s)' % (PROTOCOL, VERSION_STRING) channel_class = Channel event_channel = Trigger(REDUSA_SOCKET_MAP) def __init__(self, ip, port, user_manager, realm, authentication, maintenance_interval=25, zombie_timeout=600, debug=0): self.debug = debug self.name = ip self.port = port self.user_manager = user_manager self.realm = realm self.authentication = authentication self.maintenance_interval = maintenance_interval self.zombie_timeout = zombie_timeout asyncore.dispatcher.__init__(self) self.create_socket(socket.AF_INET, socket.SOCK_STREAM) # Note the double-reference for backwards comatibility. self.request_handlers = self.handlers = [] self.response_handlers = [] self.set_reuse_addr() self.bind((ip, port)) self.listen(1024) host, port = self.socket.getsockname() try: if not ip: self.log_info('Computing default hostname', msglog.types.WARN) ip = socket.gethostbyname(socket.gethostname()) self.name = socket.gethostbyaddr(ip)[0] except socket.error: self.name = ip self.log_info('Cannot do reverse lookup', msglog.types.WARN) self.total_clients = Counter() self.total_requests = Counter() self.exceptions = Counter() self.bytes_out = Counter() self.bytes_in = Counter() self.log_info('Started') def protocol(self): return self.PROTOCOL def __str__(self): name = self.name if not name: name = self.SERVER_IDENT if self.port: name = '%s:%s' % (name, self.port) return name def log_info(self, message, type=msglog.types.INFO): if type == msglog.types.DB and not self.debug: return msglog.log(str(self), type, message) def writable(self): return 0 def response_ready(self, channel): self.event_channel.trigger_event() def handle_read(self): pass def readable(self): return self.accepting def handle_connect(self): pass def handle_accept(self): self.total_clients.increment() try: conn, addr = self.accept() except socket.error: # linux: on rare occasions we get a bogus socket back from # accept. socketmodule.c:makesockaddr complains that the # address family is unknown. We don't want the whole server # to shut down because of this. self.log_info('warning: server accept() threw an exception', msglog.types.WARN) return except TypeError: # unpack non-sequence. this can happen when a read event # fires on a listening socket, but when we call accept() # we get EWOULDBLOCK, so dispatcher.accept() returns None. # Seen on FreeBSD3. self.log_info('warning: server accept() threw EWOULDBLOCK', msglog.types.WARN) return self.channel_class(self, conn, addr) def install_handler(self, handler, back=0): if isinstance(handler, Responder): self.install_response_handler(handler, back) else: self.install_request_handler(handler, back) def remove_handler(self, handler): if isinstance(handler, Responder): self.remove_response_handler(handler) else: self.remove_request_handler(handler) def install_request_handler(self, handler, back=0): if handler not in self.request_handlers: if back: self.request_handlers.append(handler) else: self.request_handlers.insert(0, handler) def remove_request_handler(self, handler): self.request_handlers.remove(handler) def install_response_handler(self, responder, back=0): if responder not in self.response_handlers: if back or responder.isumbrella(): self.response_handlers.append(responder) else: self.response_handlers.insert(0, responder) def remove_response_handler(self, responder): self.response_handlers.remove(responder) # # REDUSA_SOCKET_MAP HACK: # # This section forces a Redusa specific socket_map so it will play nice # with with other threads that use asyncore. # def add_channel(self, map=None): assert map is None, 'Hack assumes that the map argument is None...' return asyncore.dispatcher.add_channel(self, REDUSA_SOCKET_MAP) def del_channel(self, map=None): assert map is None, 'Hack assumes that the map argument is None...' return asyncore.dispatcher.del_channel(self, REDUSA_SOCKET_MAP)
def __init__(self, log_flag): self.logger = Logger(log_flag, LOG_FILE); self.logger.info('Started Domoleaf Master Daemon'); self.d3config = {}; self.aes_slave_keys = {}; self.aes_master_key = None self.connected_clients = {}; self.sql = MasterSql(); self._parser = DaemonConfigParser(MASTER_CONF_FILE); self.db_username = self._parser.getValueFromSection(MASTER_CONF_MYSQL_SECTION, MASTER_CONF_MYSQL_USER_ENTRY); self.db_passwd = self._parser.getValueFromSection(MASTER_CONF_MYSQL_SECTION, MASTER_CONF_MYSQL_PASSWORD_ENTRY); self.db_dbname = self._parser.getValueFromSection(MASTER_CONF_MYSQL_SECTION, MASTER_CONF_MYSQL_DB_NAME_ENTRY); self.get_aes_slave_keys(0); self.reload_camera(None, None, 0); self._scanner = Scanner(); self.hostlist = []; self.hostlist.append(Host('', '127.0.0.1', socket.gethostname().upper())); self.knx_manager = KNXManager(self.aes_slave_keys); self.enocean_manager = EnOceanManager(self.aes_slave_keys); self.reload_d3config(None, None, 0); self.trigger = Trigger(self); self.scenario = Scenario(self); self.schedule = Schedule(self); self.calcLogs = CalcLogs(self); self.functions = { 1 : self.knx_manager.send_knx_write_short_to_slave, 2 : self.knx_manager.send_knx_write_long_to_slave, 3 : self.knx_manager.send_knx_write_speed_fan, 4 : self.knx_manager.send_knx_write_temp, 5 : IP_IRManager().send_to_gc, 6 : self.knx_manager.send_on, 7 : self.knx_manager.send_to_thermostat, 8 : self.knx_manager.send_clim_mode, 9 : HttpReq().http_action, 10 : self.upnp_audio, 11 : self.knx_manager.send_knx_write_percent, 12 : self.knx_manager.send_off, 13 : self.knx_manager.send_knx_write_short_to_slave_r, }; self.data_function = { DATA_MONITOR_KNX : self.monitor_knx, DATA_MONITOR_IP : self.monitor_ip, DATA_MONITOR_ENOCEAN : self.monitor_enocean, DATA_MONITOR_BLUETOOTH : self.monitor_bluetooth, DATA_KNX_READ : self.knx_read, DATA_KNX_WRITE_S : self.knx_write_short, DATA_KNX_WRITE_L : self.knx_write_long, DATA_SEND_TO_DEVICE : self.send_to_device, DATA_CRON_UPNP : self.cron_upnp, DATA_SEND_MAIL : self.send_mail, DATA_MODIF_DATETIME : self.modif_datetime, DATA_CHECK_SLAVE : self.check_slave, DATA_RELOAD_CAMERA : self.reload_camera, DATA_RELOAD_D3CONFIG : self.reload_d3config, DATA_BACKUP_DB_CREATE_LOCAL : self.backup_db_create_local, DATA_BACKUP_DB_REMOVE_LOCAL : self.backup_db_remove_local, DATA_BACKUP_DB_LIST_LOCAL : self.backup_db_list_local, DATA_BACKUP_DB_RESTORE_LOCAL : self.backup_db_restore_local, DATA_CHECK_USB : self.check_usb, DATA_BACKUP_DB_CREATE_USB : self.backup_db_create_usb, DATA_BACKUP_DB_REMOVE_USB : self.backup_db_remove_usb, DATA_BACKUP_DB_LIST_USB : self.backup_db_list_usb, DATA_BACKUP_DB_RESTORE_USB : self.backup_db_restore_usb, DATA_SMARTCMD_LAUNCH : self.smartcmd_launch, DATA_TRIGGERS_LIST_UPDATE : self.triggers_list_update, DATA_SCHEDULES_LIST_UPDATE : self.schedules_list_update, DATA_SCENARIOS_LIST_UPDATE : self.scenarios_list_update, DATA_CHECK_ALL_SCHEDULES : self.check_schedules, DATA_CALC_LOGS : self.launch_calc_logs, DATA_CHECK_UPDATES : self.check_updates, DATA_UPDATE : self.update, DATA_SEND_ALIVE : self.send_request, DATA_SEND_TECH : self.send_tech, DATA_SEND_INTERFACES : self.send_interfaces, DATA_SHUTDOWN_D3 : self.shutdown_d3, DATA_REBOOT_D3 : self.reboot_d3, DATA_WIFI_UPDATE : self.wifi_update, DATA_REMOTE_SQL : self.remote_sql };
from pynput.keyboard import Key, Listener import atexit import signal import sys import Trigger configs = [] ################## Config ########################## useEscapeToExit = False showKeysLogs = True #### Your Apps #### discord = Trigger.Trigger( "disc", True, '\"C:\\Users\\Fuking useless shit\\AppData\\Local\\Discord\\Update.exe\" --processStart Discord.exe', "Discord.exe") configs.append(discord) firefox = Trigger.Trigger("fire", True, '\"C:\\Program Files\\Mozilla Firefox\\firefox.exe', "firefox.exe") configs.append(firefox) #################################################### def on_press(key): global configs for trigger in configs: trigger.check_sequence(key) if key == Key.esc and useEscapeToExit: return False if showKeysLogs: print(str(key))
class RNA_Scan_Thread(Thread): """ Polls for and handles any incoming msgs from established RNA sockets connected to clients.""" def __init__(self, name='RNA_Scan_Thread'): pfx = 'RNA_Scan_Thread.__init__:' msg = '%s Entering...' % pfx msglog.log('broadway', msglog.types.INFO, msg) self.debug = 0 # Maps file-descriptors to host-names. self.hosts = {} # Maps file-descriptors to sessions. self.sessions = {} # Maps host-names to sets of file-descriptors self.socketmap = {} self.connections = {} self.bPollRun = False self.work_queue = None self.descriptors = set() self.trigger_channel = Trigger(self.socketmap) self.descriptors.add(self.trigger_channel.fileno()) msg = '%s Done.' % pfx msglog.log('broadway', msglog.types.INFO, msg) super(RNA_Scan_Thread, self).__init__(name=name) def start(self): self.bPollRun = True if not self.work_queue: self.work_queue = ThreadPool(5) else: self.work_queue.resize(5) # Superclass calls run() on separate thread: return super(RNA_Scan_Thread, self).start() def stop(self): self.bPollRun = False self.notify_poll() self.work_queue.resize(0) return super(RNA_Scan_Thread, self).stop() def is_running(self): return self.isAlive() def run(self): pfx = 'RNA_Scan_Thread.run:' cmdfd = self.trigger_channel.fileno() enqueue = self.work_queue.queue_noresult handle_session = self.handle_session_input clear_notifications = self.clear_notifications while self.bPollRun: try: descriptors = self.descriptors.copy() if cmdfd not in descriptors: msglog.warn("Command channel FD removed!") descriptors.add(cmdfd) r, w, e = select.select(descriptors, [], descriptors, 1) for fd in e: if fd == cmdfd: message = "%s internal polling error. Must restart." msglog.error(message % pfx) raise TypeError("command channel OOB data") try: self.unregister_session(self.get_session(fd)) except: msglog.warn("%s I/O event handling error." % pfx) msglog.exception(prefix="handled") for fd in r: if fd in self.descriptors: try: if fd == cmdfd: clear_notifications() else: self.descriptors.discard(fd) enqueue(handle_session, fd) except: msglog.warn("%s I/O event handling error." % pfx) msglog.exception(prefix="handled") except: msglog.warn("%s loop error." % pfx) msglog.exception(prefix="handled") msglog.inform("%s exiting." % pfx) def notify_poll(self): self.trigger_channel.trigger_event() def clear_notifications(self): self.trigger_channel.handle_read() def register_session(self, session): try: fd = session.socket.fileno() host, port = session.socket.getpeername() except: msglog.warn("Failed to add session: %r" % session) msglog.exception(prefix="handled") registered = False else: self.hosts[fd] = host self.sessions[fd] = session self.connections.setdefault(host, set()).add(fd) self.descriptors.add(fd) self.notify_poll() registered = True return registered def unregister_session(self, session): try: fd = session.socket.fileno() except: unregistered = False else: host = self.hosts.pop(fd, None) if host is not None: connections = self.connections.get(host, None) if connections is not None: connections.discard(fd) if not connections: self.connections.pop(host, None) self.sessions.pop(fd, None) if fd in self.descriptors: self.descriptors.discard(fd) self.notify_poll() try: session.disconnect() except: msglog.exception(prefix="handled") unregistered = True return unregistered def get_session(self, fd): if not isinstance(fd, int): fd = fd.fileno() return self.sessions[fd] def get_host(self, fd): if not isinstance(fd, int): fd = fd.fileno() return self.hosts[fd] def get_connections(self, host): return self.connections.get(host, []) def has_connections(self, host): return host in self.connections def get_sessions(self, host): connections = self.get_connections(host) return map(self.get_session, connections) def handle_session_input(self, fd): pfx = 'RNA_Scan_Thread.handle_session_input' unregister = False session = self.get_session(fd) try: rna_header = RNAHeader(session.socket) protocol = _protocol_factory(rna_header.protocol) protocol.setup(session, rna_header) command = protocol.recv_command() result = _invoke_command(command) protocol.send_result(command, result) except EInvalidMessage, error: if not error.message: unregister = True msglog.warn("Removing client-closed session: %s" % session) else: unregister = False msglog.exception(prefix="handled") except SocketTimeoutError: unregister = False message.exception(prefix="handled")
def CheckLabel(configFile, label, product, OS): Trigger.configFilePath = configFile buildlabel = Trigger.GetVersionFromJson(product, OS) return buildlabel == label
class MasterDaemon: """ Main class of the master daemon It provides communication between master and slave boxes and a part of the database management """ def __init__(self, log_flag): self.logger = Logger(log_flag, LOG_FILE); self.logger.info('Started Domoleaf Master Daemon'); self.d3config = {}; self.aes_slave_keys = {}; self.aes_master_key = None self.connected_clients = {}; self.sql = MasterSql(); self._parser = DaemonConfigParser(MASTER_CONF_FILE); self.db_username = self._parser.getValueFromSection(MASTER_CONF_MYSQL_SECTION, MASTER_CONF_MYSQL_USER_ENTRY); self.db_passwd = self._parser.getValueFromSection(MASTER_CONF_MYSQL_SECTION, MASTER_CONF_MYSQL_PASSWORD_ENTRY); self.db_dbname = self._parser.getValueFromSection(MASTER_CONF_MYSQL_SECTION, MASTER_CONF_MYSQL_DB_NAME_ENTRY); self.get_aes_slave_keys(0); self.reload_camera(None, None, 0); self._scanner = Scanner(); self.hostlist = []; self.hostlist.append(Host('', '127.0.0.1', socket.gethostname().upper())); self.knx_manager = KNXManager(self.aes_slave_keys); self.enocean_manager = EnOceanManager(self.aes_slave_keys); self.reload_d3config(None, None, 0); self.trigger = Trigger(self); self.scenario = Scenario(self); self.schedule = Schedule(self); self.calcLogs = CalcLogs(self); self.functions = { 1 : self.knx_manager.send_knx_write_short_to_slave, 2 : self.knx_manager.send_knx_write_long_to_slave, 3 : self.knx_manager.send_knx_write_speed_fan, 4 : self.knx_manager.send_knx_write_temp, 5 : IP_IRManager().send_to_gc, 6 : self.knx_manager.send_on, 7 : self.knx_manager.send_to_thermostat, 8 : self.knx_manager.send_clim_mode, 9 : HttpReq().http_action, 10 : self.upnp_audio, 11 : self.knx_manager.send_knx_write_percent, 12 : self.knx_manager.send_off, 13 : self.knx_manager.send_knx_write_short_to_slave_r, }; self.data_function = { DATA_MONITOR_KNX : self.monitor_knx, DATA_MONITOR_IP : self.monitor_ip, DATA_MONITOR_ENOCEAN : self.monitor_enocean, DATA_MONITOR_BLUETOOTH : self.monitor_bluetooth, DATA_KNX_READ : self.knx_read, DATA_KNX_WRITE_S : self.knx_write_short, DATA_KNX_WRITE_L : self.knx_write_long, DATA_SEND_TO_DEVICE : self.send_to_device, DATA_CRON_UPNP : self.cron_upnp, DATA_SEND_MAIL : self.send_mail, DATA_MODIF_DATETIME : self.modif_datetime, DATA_CHECK_SLAVE : self.check_slave, DATA_RELOAD_CAMERA : self.reload_camera, DATA_RELOAD_D3CONFIG : self.reload_d3config, DATA_BACKUP_DB_CREATE_LOCAL : self.backup_db_create_local, DATA_BACKUP_DB_REMOVE_LOCAL : self.backup_db_remove_local, DATA_BACKUP_DB_LIST_LOCAL : self.backup_db_list_local, DATA_BACKUP_DB_RESTORE_LOCAL : self.backup_db_restore_local, DATA_CHECK_USB : self.check_usb, DATA_BACKUP_DB_CREATE_USB : self.backup_db_create_usb, DATA_BACKUP_DB_REMOVE_USB : self.backup_db_remove_usb, DATA_BACKUP_DB_LIST_USB : self.backup_db_list_usb, DATA_BACKUP_DB_RESTORE_USB : self.backup_db_restore_usb, DATA_SMARTCMD_LAUNCH : self.smartcmd_launch, DATA_TRIGGERS_LIST_UPDATE : self.triggers_list_update, DATA_SCHEDULES_LIST_UPDATE : self.schedules_list_update, DATA_SCENARIOS_LIST_UPDATE : self.scenarios_list_update, DATA_CHECK_ALL_SCHEDULES : self.check_schedules, DATA_CALC_LOGS : self.launch_calc_logs, DATA_CHECK_UPDATES : self.check_updates, DATA_UPDATE : self.update, DATA_SEND_ALIVE : self.send_request, DATA_SEND_TECH : self.send_tech, DATA_SEND_INTERFACES : self.send_interfaces, DATA_SHUTDOWN_D3 : self.shutdown_d3, DATA_REBOOT_D3 : self.reboot_d3, DATA_WIFI_UPDATE : self.wifi_update, DATA_REMOTE_SQL : self.remote_sql }; def get_aes_slave_keys(self, db): """ Get the secretkeys of each slave daemon stored in database """ query = "SELECT serial, secretkey FROM daemon"; res = self.sql.mysql_handler_personnal_query(query, db); self_hostname = socket.gethostname(); for r in res: if SLAVE_NAME_PREFIX in r[0] or 'MD3' in r[0]: self.aes_slave_keys[r[0]] = r[1]; elif self_hostname == r[0]: self.aes_slave_keys[r[0]] = r[1]; self.aes_master_key = r[1]; def stop(self): """ Stops the daemon and closes sockets """ flag = False; while not flag: flag = True; for client in self.connected_clients.values(): flag = False; client.close(); break; self.slave_connection.close(); sys.exit(0); def run(self): """ Initialization of the connections and accepting incomming communications """ self.slave_connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM); self.cmd_connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM); self.slave_connection.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1); self.cmd_connection.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1); self.slave_connection.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1); self.cmd_connection.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1); s_port = self._parser.getValueFromSection(MASTER_CONF_LISTEN_SECTION, MASTER_CONF_LISTEN_PORT_SLAVE_ENTRY); c_port = self._parser.getValueFromSection(MASTER_CONF_LISTEN_SECTION, MASTER_CONF_LISTEN_PORT_CMD_ENTRY); if not s_port: frameinfo = getframeinfo(currentframe()); self.logger.error('in run: No slave listening port defined in '+MASTER_CONF_FILE); sys.exit(1); if not c_port: frameinfo = getframeinfo(currentframe()); self.logger.error('in run: No command listening port defined in '+MASTER_CONF_FILE); sys.exit(1); self.slave_connection.bind(('', int(s_port))); self.slave_connection.listen(MAX_SLAVES); self.cmd_connection.bind(('', int(c_port))); self.cmd_connection.listen(MAX_CMDS); self.loop(); def loop(self): """ Main loop. Waits for new connections. """ self.run = True; while self.run: try: rlist, wlist, elist = select.select([self.slave_connection], [], [], SELECT_TIMEOUT); for connection in rlist: self.accept_new_slave_connection(connection); rlist, wlist, elist = select.select([self.cmd_connection], [], [], SELECT_TIMEOUT); for connection in rlist: self.accept_new_cmd_connection(connection); except KeyboardInterrupt as e: frameinfo = getframeinfo(currentframe()); self.logger.info('in loop: Keyboard interrupt: leaving program'); print("[ MASTER DAEMON ",frameinfo.filename,":",str(frameinfo.lineno)," ]: Keyboard Interrupt"); self.stop(); sys.exit(0); except ValueError as e: frameinfo = getframeinfo(currentframe()); self.logger.error('in loop: Value error: '+str(e)); print("[ MASTER DAEMON ",frameinfo.filename,":",str(frameinfo.lineno),"]: Value Error"); print(e); pass; def accept_new_cmd_connection(self, connection): """ Gets new domoleaf connections and threads the treatment. """ new_connection, addr = connection.accept(); r = CommandReceiver(new_connection, self); r.start(); def accept_new_slave_connection(self, connection): """ Gets new slave connections and threads the treatment. """ new_connection, addr = connection.accept(); myname = socket.gethostname(); try: name = socket.gethostbyaddr(addr[0])[0] except socket.error as serr: name = 'localhost' if name == 'localhost': name = myname name = name.split('.')[0]; r = SlaveReceiver(new_connection, name, self); r.start(); def parse_data(self, data, connection, daemon_id, db): """ Once data are received whether from domoleaf or slave, the function of the packet_type in data is called. """ json_obj = json.JSONDecoder().decode(data); json_obj['daemon_id'] = daemon_id; if json_obj['packet_type'] in self.data_function.keys(): self.data_function[json_obj['packet_type']](json_obj, connection, db); else: frameinfo = getframeinfo(currentframe()); def check_updates(self, json_obj, connection, db): query = 'SELECT configuration_value FROM configuration WHERE configuration_id=4'; actual_version = self.sql.mysql_handler_personnal_query(query, db); if not actual_version: self.logger.error("CHECK_UPDATE : No Master Version"); return; query = 'UPDATE configuration SET configuration_value="" WHERE configuration_id=13'; self.sql.mysql_handler_personnal_query(query, db); p = call(['dpkg', '--configure', '-a']) p = Popen(['apt-get', 'update'], stdin=PIPE, stdout=PIPE, stderr=PIPE, bufsize=-1); output, error = p.communicate(); p = Popen(['apt-show-versions', '-u', 'domomaster'], stdin=PIPE, stdout=PIPE, stderr=PIPE, bufsize=-1); output, error = p.communicate(); if not p.returncode: tab = output.decode("utf-8").split(" "); version = tab[-1].rsplit("\n")[0]; else: version = actual_version[0][0]; query = ''.join(['UPDATE configuration SET configuration_value="', version, '" WHERE configuration_id=13']); self.sql.mysql_handler_personnal_query(query, db); def update(self, json_obj, connection, db): call(['apt-get', 'update']); p = Popen("DEBIAN_FRONTEND=noninteractive apt-get install domomaster domoslave -y ", shell=True, stdin=None, stdout=False, stderr=False,executable="/bin/bash"); output, error = p.communicate(); hostname = socket.gethostname(); if '.' in hostname: hostname = hostname.split('.')[0]; version = os.popen("dpkg-query -W -f='${Version}\n' domomaster").read().split('\n')[0]; query = ''.join(['UPDATmon SET version="', version, '" WHERE name="', hostname, '"' ]); self.sql.mysql_handler_personnal_query(query, db); query = ''.join(['UPDATE configuration SET configuration_value="', version, '" WHERE configuration_id=4']); self.sql.mysql_handler_personnal_query(query, db); json_obj['data'].append(hostname); port = self._parser.getValueFromSection('connect', 'port'); for host in self.hostlist: if (host._Hostname.startswith('MD3') or host._Hostname.startswith('SD3')) and host._Hostname not in json_obj['data']: sock = socket.create_connection((host._IpAddr, port)); json_str = json.JSONEncoder().encode(json_obj); sock.send(bytes(json_str, 'utf-8')); data = sock.recv(4096); decrypt_IV = data[:16].decode(); decode_obj = AES.new(self.aes_master_key, AES.MODE_CBC, decrypt_IV); data2 = decode_obj.decrypt(data[16:]).decode(); version = data2['new_version']; query = ''.join(['UPDATE daemon SET version="', version, '" WHERE name="', host._Hostname, '"']); self.sql.mysql_handler_personnal_query(query, db); sock.close(); def backup_db_create_local(self, json_obj, connection, db): path = '/etc/domoleaf/sql/backup/'; filename = 'domoleaf_backup_'; t = str(time.time()); if '.' in t: t = t.split('.')[0]; filename += t+'.sql'; os.system("mysqldump --defaults-file=/etc/mysql/debian.cnf domoleaf > "+path+filename); os.system('cd '+path+' && tar -czf '+filename+'.tar.gz'+' '+filename); os.system('rm '+path+filename); def backup_db_remove_local(self, json_obj, connection, db): filename = ''.join(['/etc/domoleaf/sql/backup/domoleaf_backup_', str(json_obj['data']), '.sql.tar.gz']); if str(json_obj['data'][0]) == '.' or str(json_obj['data'][0]) == '/': self.logger.error('The filename is corrupted. Aborting database file removing.') return; try: os.stat(filename); except Exception as e: try: filename = filename.split('.tar.gz')[0]; os.stat(filename); except Exception as e: self.logger.error("The database file to remove does not exists.") self.logger.error(e) return; os.remove(filename); def backup_db_list_local(self, json_obj, connection, db): json_obj = []; append = json_obj.append; backup_list = os.listdir('/etc/domoleaf/sql/backup/') for f in backup_list: s = os.stat('/etc/domoleaf/sql/backup/'+f); if '.sql' in f: g = f.split('.sql')[0]; append({"name": g, "size": s.st_size}); json_sorted = sorted(json_obj, key=lambda json_obj: json_obj['name'], reverse=True); json_str = json.JSONEncoder().encode(json_sorted); connection.send(bytes(json_str, 'utf-8')); def backup_db_restore_local(self, json_obj, connection, db): path = '/etc/domoleaf/sql/backup/'; filename = ''.join(['domoleaf_backup_', str(json_obj['data']), '.sql.tar.gz']); if json_obj['data'][0] == '.' or json_obj['data'][0] == '/': self.logger.error('The filename is corrupted. Aborting database restoring.') return; try: os.stat(path+filename); os.system('cd '+path+' && tar -xzf '+filename); os.system('mysql --defaults-file=/etc/mysql/debian.cnf domoleaf < '+path+filename.split('.tar.gz')[0]); os.system('rm '+path+filename.split('.tar.gz')[0]); return; except Exception as e: try: filename = filename.split('.tar.gz')[0]; os.stat(path+filename); except Exception as e: self.logger.error("The database file to restore does not exists."); self.logger.error(e); return; os.system('mysql --defaults-file=/etc/mysql/debian.cnf domoleaf < '+path+filename); def check_usb(self, json_obj, connection, db): try: sdx1 = glob.glob('/dev/sd?1')[0]; except Exception as e: return; if not (os.path.exists(sdx1)): json_obj = 0; else: json_obj = 1; json_str = json.JSONEncoder().encode(json_obj); connection.send(bytes(json_str, 'utf-8')); def backup_db_list_usb(self, json_obj, connection, db): json_obj = []; append = json_obj.append sdx1 = glob.glob('/dev/sd?1')[0]; if not (os.path.exists(sdx1)): return; os.system('mount '+sdx1+' /etc/domoleaf/mnt'); os.system('mkdir -p /etc/domoleaf/mnt/backup'); backup_list = os.listdir('/etc/domoleaf/mnt/backup/') for f in backup_list: s = os.stat('/etc/domoleaf/mnt/backup/'+f); if '.sql' in f: g = f.split('.sql')[0]; append({"name": g, "size": s.st_size}); os.system('umount /etc/domoleaf/mnt'); json_sorted = sorted(json_obj, key=lambda json_obj: json_obj['name'], reverse=True); json_str = json.JSONEncoder().encode(json_sorted); connection.send(bytes(json_str, 'utf-8')); def backup_db_remove_usb(self, json_obj, connection, db): filename = ''.join(['/etc/domoleaf/mnt/backup/domoleaf_backup_', str(json_obj['data']), '.sql.tar.gz']); if str(json_obj['data'][0]) == '.' or str(json_obj['data'][0]) == '/': self.logger.error('The filename is corrupted. Aborting database file removing.') return; sdx1 = glob.glob('/dev/sd?1')[0]; if not (os.path.exists(sdx1)): return; os.system('mount '+sdx1+' /etc/domoleaf/mnt'); path = '/etc/domoleaf/mnt/backup/'; try: os.stat(filename); except Exception as e: try: filename = filename.split('.tar.gz')[0]; os.stat(filename); except Exception as e: self.logger.error("The database file to remove does not exists.") self.logger.error(e) os.system('umount /etc/domoleaf/mnt'); return; os.remove(filename); os.system('umount /etc/domoleaf/mnt'); def backup_db_restore_usb(self, json_obj, connection, db): path = '/etc/domoleaf/mnt/backup/'; filename = ''.join(['domoleaf_backup_', str(json_obj['data']), '.sql']); if json_obj['data'][0] == '.' or json_obj['data'][0] == '/': self.logger.error('The filename is corrupted. Aborting database restoring.') return; sdx1 = glob.glob('/dev/sd?1')[0]; if not (os.path.exists(sdx1)): return; os.system('mount '+sdx1+' /etc/domoleaf/mnt'); try: os.stat(path+filename); os.system('cp '+path+filename+' /tmp/ && umount /etc/domoleaf/mnt && cd /tmp/'); os.system('mysql --defaults-file=/etc/mysql/debian.cnf domoleaf < /tmp/'+filename); os.remove('/tmp/'+filename); return; except Exception as e: try: filename += '.tar.gz'; os.stat(path+filename); os.system('cp '+path+filename+' /tmp/ && umount /etc/domoleaf/mnt && cd /tmp/ && tar -xzf '+filename); except Exception as e: self.logger.error("The database file to restore does not exists."); self.logger.error(e); os.system('umount /etc/domoleaf/mnt'); return; os.system('umount /etc/domoleaf/mnt'); os.system('mysql --defaults-file=/etc/mysql/debian.cnf domoleaf < /tmp/'+filename.split('.tar.gz')[0]); os.remove('/tmp/'+filename); os.remove('/tmp/'+filename.split('.tar.gz')[0]); def backup_db_create_usb(self, json_obj, connection, db): sdx1 = glob.glob('/dev/sd?1')[0]; if not (os.path.exists(sdx1)): return; os.system('mount '+sdx1+' /etc/domoleaf/mnt'); path = '/etc/domoleaf/mnt/backup/'; filename = 'domoleaf_backup_'; os.system('mkdir -p '+path); t = str(time.time()); if '.' in t: t = t.split('.')[0]; filename += t+'.sql'; os.system("mysqldump --defaults-file=/etc/mysql/debian.cnf domoleaf > "+path+filename); os.system('cd '+path+' && tar -czf '+filename+'.tar.gz'+' '+filename); os.system('rm '+path +filename); os.system('umount /etc/domoleaf/mnt'); def monitor_knx(self, json_obj, connection, db): """ Callback called each time a monitor_knx packet is received. Updates room_device_option values in the database and check scenarios. """ daemon_id = self.sql.update_knx_log(json_obj, db); doList = self.knx_manager.update_room_device_option(daemon_id, json_obj, db); if doList: self.scenario.setValues(self.get_global_state(db), self.trigger, self.schedule, connection, doList); self.scenario.start(); connection.close(); def knx_write_short(self, json_obj, connection, db): """ Callback called each time a knx_write_short packet is received. Updates room_device_option values in the database. """ daemons = self.sql.get_daemons(db); slave_name = self.get_slave_name(json_obj, daemons); if slave_name is None: connection.close(); return None; dev = {} dev["addr_dst"] = json_obj['data']['addr'] slave_name = slave_name.split('.')[0]; self.knx_manager.send_knx_write_short_to_slave(json_obj, dev, slave_name); connection.close(); return None; def knx_write_long(self, json_obj, connection, db): """ Callback called each time a knx_write_long packet is received. Updates room_device_option values in the database. """ daemons = self.sql.get_daemons(db); slave_name = self.get_slave_name(json_obj, daemons); if slave_name is None: connection.close(); return None; dev = {} dev["addr_dst"] = json_obj['data']['addr'] slave_name = slave_name.split('.')[0]; self.knx_manager.send_knx_write_long_to_slave(json_obj, dev, slave_name); connection.close(); return None; def knx_read(self, json_obj, connection, db): """ Callback called each time a knx_read packet is received. """ daemons = self.sql.get_daemons(db); slave_name = self.get_slave_name(json_obj, daemons); if slave_name is None: return None; slave_name = slave_name.split('.')[0]; self.knx_manager.send_knx_read_request_to_slave(slave_name, json_obj); connection.close(); def monitor_ip(self, json_obj, connection, db): """ Callback called each time a monitor_ip packet is received. A new local network scan is performed and the result stored in the database """ self.scanner.scan(); self.sql.insert_hostlist_in_db(self.scanner._HostList, db); self.hostlist = self.scanner._HostList; connection.close(); def monitor_bluetooth(self, json_obj, connection, db): """ TODO """ connection.close(); return None; def monitor_enocean(self, json_obj, connection, db): """ Callback called each time a monitor_enocean packet is received. Stores the data in enocean_log table. """ daemon_id = self.sql.update_enocean_log(json_obj, db); doList = self.enocean_manager.update_room_device_option(daemon_id, json_obj, db); connection.close(); if doList: self.scenario.setValues(self.get_global_state(db), self.trigger, self.schedule, connection, doList); self.scenario.start(); return None; def send_to_device(self, json_obj, connection, db): """ Retrieves the good device in the database and builds the request to send. """ hostname = ''; dm = DeviceManager(int(json_obj['data']['room_device_id']), int(json_obj['data']['option_id']), DEBUG_MODE); dev = dm.load_from_db(db); if dev is None: connection.close(); return ; if 'daemon_name' in dev: for host in self.hostlist: if dev['daemon_name'] == host._Hostname: hostname = host._Hostname; break; function_writing = int(dev['function_writing']); if (function_writing > 0): try: self.functions[function_writing](json_obj, dev, hostname); except Exception as e: self.logger.error(e); connection.close(); def upnp_audio(self, json_obj, dev, hostname): cmd = UpnpAudio(dev['addr'], int(dev['plus1'])); cmd.action(json_obj); def get_ip_ifname(self, ifname): """ Retrieves network interface name from IP address. """ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM); try: res = socket.inet_ntoa(fcntl.ioctl(s.fileno(), 0x8915, struct.pack('256s', bytes(ifname, 'utf-8')))[20:24]); return res; except Exception as e: frameinfo = getframeinfo(currentframe()); self.logger.error('in get_ip_ifname: '+str(e)); return None; def cron_upnp(self, json_obj, connection, db): """ Callback called each time a cron_upnp packet is received. """ local_ip = self.get_ip_ifname("eth0"); if local_ip is None: connection.close(); return None; query = "SELECT configuration_id, configuration_value FROM configuration"; res = self.sql.mysql_handler_personnal_query(query); actions = json_obj['data']; for act in actions: if act['action'] == 'open': for r in res: if int(r[0]) == int(act['configuration_id']): if int(r[0]) == 1: call(["upnpc", "-a", local_ip, str(r[1]), "80", act['protocol']]); elif int(r[0]) == 2: call(["upnpc", "-a", local_ip, str(r[1]), "443", act['protocol']]); elif act['action'] == 'close': for r in res: if int(r[0]) == int(act['configuration_id']): call(["upnpc", "-d", str(r[1]), act['protocol']]); def reload_camera(self, json_obj, connection, db): """ Generation of the file devices.conf located in /etc/domoleaf by default. """ camera_file = open(CAMERA_CONF_FILE, 'w'); query = "SELECT room_device_id, addr, plus1 FROM room_device WHERE protocol_id = 6"; res = self.sql.mysql_handler_personnal_query(query, db); for r in res: ip = str(r[1]); if r[1] and utils.is_valid_ip(ip): camera_file.write("location /device/"+str(r[0])); camera_file.write("/ {\n") camera_file.write("\tproxy_buffering off;\n") camera_file.write("\tproxy_pass http://"+ip); if str(r[2]).isdigit(): camera_file.write(":"+str(r[2])+"/;\n}\n\n"); else: camera_file.write(":/;\n}\n\n"); camera_file.close(); call(["service", "nginx", "restart"]); def reload_d3config(self, json_obj, connection, db): """ Loads port config. Reading in database and storing. """ query = "SELECT configuration_id, configuration_value FROM configuration"; res = self.sql.mysql_handler_personnal_query(query, db); for r in res: self.d3config[str(r[0])] = r[1]; def check_slave(self, json_obj, connection, db): """ Asks "check_slave" to the slave described in json_obj and waits for answer. """ query = ''.join(["SELECT serial, secretkey FROM daemon WHERE daemon_id=", str(json_obj['data']['daemon_id'])]); res = self.sql.mysql_handler_personnal_query(query, db); if res is None or not res: self.logger.error('in check_slave: No daemon for id '+str(json_obj['data']['daemon_id'])); connection.close(); return ; elif len(res) > 1: self.logger.error('in check_slave: Too much daemons for id '+str(json_obj['data']['daemon_id'])); connection.close(); return ; hostname = res[0][0]; self_hostname = socket.gethostname(); if hostname == self_hostname: ip = '127.0.0.1'; else: ip = ''; for h in self.hostlist: if hostname in h._Hostname.upper(): ip = h._IpAddr; if not ip: self.logger.error('in check_slave: '+hostname+' not in hostlist. Try perform network scan again.'); connection.close(); return ; port = self._parser.getValueFromSection('connect', 'port'); sock = socket.create_connection((ip, port)); if '.' in self_hostname: self_hostname = self_hostname.split('.')[0]; aes_IV = AESManager.get_IV(); aes_key = self.get_secret_key(hostname); obj_to_send = ''.join(['{"packet_type": "check_slave", "sender_name": "', self_hostname, '"}']); encode_obj = AES.new(aes_key, AES.MODE_CBC, aes_IV); spaces = 16 - len(obj_to_send) % 16; sock.send(bytes(aes_IV, 'utf-8') + encode_obj.encrypt(obj_to_send + (spaces * ' '))); rlist, wlist, elist = select.select([sock], [], [], SELECT_TIMEOUT * 10); val = '0'; version = ''; interface_knx = ''; interface_enocean = ''; data = sock.recv(4096); if data: decrypt_IV = data[:16].decode(); decode_obj = AES.new(res[0][1], AES.MODE_CBC, decrypt_IV); data2 = decode_obj.decrypt(data[16:]).decode(); resp = json.JSONDecoder().decode(data2); if str(self.aes_slave_keys[hostname]) == str(resp['aes_pass']): val = '1'; version = resp['version']; interface_knx = resp['interface_knx']; interface_enocean = resp['interface_enocean']; connection.send(bytes(version, 'utf-8')); connection.close(); query = ''.join(['UPDATE daemon SET validation=', val, ', version="', version, '" WHERE serial="', hostname, '"']); self.sql.mysql_handler_personnal_query(query, db); query = ''.join(['UPDATE daemon_protocol SET interface="', interface_knx, '" WHERE daemon_id="', str(json_obj['data']['daemon_id']), '" AND protocol_id="1"']); self.sql.mysql_handler_personnal_query(query, db); query = ''.join(['UPDATE daemon_protocol SET interface="', interface_enocean, '" WHERE daemon_id="', str(json_obj['data']['daemon_id']), '" AND protocol_id="2"']); self.sql.mysql_handler_personnal_query(query, db); sock.close(); def get_secret_key(self, hostname): """ Retrieves the secretkey of 'hostname' in the database. """ query = ''.join(['SELECT serial, secretkey FROM daemon WHERE serial = \'', hostname, '\'']); res = self.sql.mysql_handler_personnal_query(query); for r in res: if r[0] == hostname: return str(r[1]); def send_mail(self, json_obj, connection, db): """ Callback called each time a send_mail packet is received. The parameters are stored in 'json_obj'. """ try: from_addr = formataddr((self.d3config['6'], self.d3config['5'])); host = self.d3config['7']; secure = self.d3config['8'] port = self.d3config['9']; username = self.d3config['10']; password = self.d3config['11']; msg = MIMEMultipart(); mdr = json_obj['data']['object']; msg['Subject'] = json_obj['data']['object']; msg['From'] = from_addr; msg['To'] = json_obj['data']['destinator']; msg.attach(MIMEText(json_obj['data']['message'])); server = smtplib.SMTP(host, port); if (secure == 2): server.ehlo(); server.starttls(); server.ehlo(); if not username and not password: server.login(self.d3config['5'], username); server.sendmail(from_addr, json_obj['data']['destinator'], msg.as_string()); server.quit(); connection.close(); except Exception as e: self.logger.error('Error for sending mail'); self.logger.error(e); connection.send(bytes('Error', 'utf-8')); connection.close(); def modif_datetime(self, json_obj, connection, db): os.system('date --set '+json_obj['data'][0]); os.system('date --set '+json_obj['data'][1]); def get_slave_name(self, json_obj, daemons): """ Retrieves the hostname of the daemon described by 'json_obj' in the 'daemons' list. """ daemon_found = False; slave_name = ''; for d in daemons: if int(json_obj['data']['daemon']) == int(d[0]): daemon_found = True; slave_name = str(d[2]); break; if daemon_found is False: frameinfo = getframeinfo(currentframe()); self.logger.error('in get_slave_name: '+str(json_obj['data']['daemon'])); return None; if str(json_obj['data']['addr']).count('/') != 2: frameinfo = getframeinfo(currentframe()); self.logger.error('in get_slave_name: '+str(json_obj['data']['addr'])); return None; return slave_name; def reload_web_server(self): """ Call "service reload nginx" """ self.logger.debug('Reloading web server...'); call(["service", "nginx", "reload"]); self.logger.debug('[ OK ] Done reloading web server.'); def smartcmd_launch(self, json_obj, connection, db): s = Smartcommand(self, int(json_obj['data'])) s.setValues(connection); s.start(); def triggers_list_update(self, json_obj, connection, db): self.trigger.update_triggers_list(db); def schedules_list_update(self, json_obj, connection, db): self.schedule.update_schedules_list(db); def scenarios_list_update(self, json_obj, connection, db): self.scenario.update_scenarios_list(db); def check_schedules(self, json_obj, connection, db): self.schedule.check_all_schedules(connection); def launch_calc_logs(self, json_obj, connection, db): try: self.calcLogs.sort_logs(connection, db); except Exception as e: self.logger.error(e); def get_global_state(self, db): query = 'SELECT room_device_id, option_id, opt_value FROM room_device_option'; res = self.sql.mysql_handler_personnal_query(query, db); filtered = []; append = filtered.append; for elem in res: if elem[2]: append(elem); global_state = []; if filtered: global_state = filtered; else: global_state = ''; return global_state; def send_tech(self, json_obj, connection, db): query = 'SELECT configuration_value FROM configuration WHERE configuration_id=1'; http = self.sql.mysql_handler_personnal_query(query, db); query = 'SELECT configuration_value FROM configuration WHERE configuration_id=2'; ssl = self.sql.mysql_handler_personnal_query(query, db); json_obj['info']['http'] = http[0][0]; json_obj['info']['ssl'] = ssl[0][0]; self.send_request(json_obj, connection, db) def send_request(self, json_obj, connection, db): if self._parser.getValueFromSection('greenleaf', 'commercial') == "1": admin_addr = self._parser.getValueFromSection('greenleaf', 'admin_addr') hostname = socket.gethostname() GLManager.SendRequest(str(json_obj), admin_addr, self.get_secret_key(hostname)) def send_interfaces(self, json_obj, connection, db): query = ''.join(["SELECT serial, secretkey FROM daemon WHERE daemon_id=", str(json_obj['data']['daemon_id'])]); res = self.sql.mysql_handler_personnal_query(query, db); if res is None or not res: self.logger.error('in send_interfaces: No daemon for id '+str(json_obj['data']['daemon_id'])); connection.close(); return ; elif len(res) > 1: self.logger.error('in send_interfaces: Too much daemons for id '+str(json_obj['data']['daemon_id'])); connection.close(); return ; hostname = res[0][0]; ip = ''; for h in self.hostlist: if hostname in h._Hostname.upper(): ip = h._IpAddr; if not ip: self.logger.error('in send_interfaces: '+hostname+' not in hostlist. Try perform network scan again.'); connection.close(); return ; port = self._parser.getValueFromSection('connect', 'port'); sock = socket.create_connection((ip, port)); self_hostname = socket.gethostname(); if '.' in self_hostname: self_hostname = self_hostname.split('.')[0]; aes_IV = AESManager.get_IV(); aes_key = self.get_secret_key(hostname); obj_to_send = json.JSONEncoder().encode( { "packet_type": "send_interfaces", "sender_name": self_hostname, "interface_knx": json_obj['data']['interface_knx'], "interface_EnOcean": json_obj['data']['interface_EnOcean'], "interface_arg_knx": json_obj['data']['interface_arg_knx'], "interface_arg_EnOcean": json_obj['data']['interface_arg_EnOcean'], "daemon_knx": json_obj['data']['daemon_knx'] } ); encode_obj = AES.new(aes_key, AES.MODE_CBC, aes_IV); spaces = 16 - len(obj_to_send) % 16; sock.send(bytes(aes_IV, 'utf-8') + encode_obj.encrypt(obj_to_send + (spaces * ' '))); rlist, wlist, elist = select.select([sock], [], [], SELECT_TIMEOUT * 300); re = ''; data = sock.recv(4096); if data: decrypt_IV = data[:16].decode(); host = None; for h in self.hostlist: if h._IpAddr == ip: host = h; decode_obj = AES.new(res[0][1], AES.MODE_CBC, decrypt_IV); data2 = decode_obj.decrypt(data[16:]).decode(); resp = json.JSONDecoder().decode(data2); hostname = host._Hostname; if '.' in host._Hostname: hostname = host._Hostname.split('.')[0]; if str(self.aes_slave_keys[hostname]) == str(resp['aes_pass']): re = '1'; connection.send(bytes(re, 'utf-8')); connection.close(); sock.close(); def shutdown_d3(self, json_obj, connection, db): """ Asks "shutdown_d3" to the slave described in json_obj for shutdown daemon. """ query = ''.join(["SELECT serial, secretkey FROM daemon WHERE daemon_id=", str(json_obj['data']['daemon_id'])]); res = self.sql.mysql_handler_personnal_query(query, db); if res is None or not res: self.logger.error('in shutdown_d3: No daemon for id '+str(json_obj['data']['daemon_id'])); connection.close(); return ; elif len(res) > 1: self.logger.error('in shutdown_d3: Too much daemons for id '+str(json_obj['data']['daemon_id'])); connection.close(); return ; hostname = res[0][0]; ip = ''; for h in self.hostlist: if hostname in h._Hostname.upper(): ip = h._IpAddr; if not ip: self.logger.error('in shutdown_d3: '+hostname+' not in hostlist. Try perform network scan again.'); connection.close(); return ; port = self._parser.getValueFromSection('connect', 'port'); sock = socket.create_connection((ip, port)); self_hostname = socket.gethostname(); if '.' in self_hostname: self_hostname = self_hostname.split('.')[0]; aes_IV = AESManager.get_IV(); aes_key = self.get_secret_key(hostname); obj_to_send = ''.join(['{"packet_type": "shutdown_d3", "sender_name": "', self_hostname, '"}']); encode_obj = AES.new(aes_key, AES.MODE_CBC, aes_IV); spaces = 16 - len(obj_to_send) % 16; sock.send(bytes(aes_IV, 'utf-8') + encode_obj.encrypt(obj_to_send + (spaces * ' '))); connection.close(); sock.close(); def reboot_d3(self, json_obj, connection, db): """ Asks "reboot_d3" to the slave described in json_obj for reboot daemon. """ query = ''.join(["SELECT serial, secretkey FROM daemon WHERE daemon_id=", str(json_obj['data']['daemon_id'])]); res = self.sql.mysql_handler_personnal_query(query, db); if res is None or not res: self.logger.error('in reboot_d3: No daemon for id '+str(json_obj['data']['daemon_id'])); connection.close(); return ; elif len(res) > 1: self.logger.error('in reboot_d3: Too much daemons for id '+str(json_obj['data']['daemon_id'])); connection.close(); return ; hostname = res[0][0]; ip = ''; for h in self.hostlist: if hostname in h._Hostname.upper(): ip = h._IpAddr; if not ip: self.logger.error('in reboot_d3: '+hostname+' not in hostlist. Try perform network scan again.'); connection.close(); return ; port = self._parser.getValueFromSection('connect', 'port'); sock = socket.create_connection((ip, port)); self_hostname = socket.gethostname(); if '.' in self_hostname: self_hostname = self_hostname.split('.')[0]; aes_IV = AESManager.get_IV(); aes_key = self.get_secret_key(hostname); obj_to_send = ''.join(['{"packet_type": "reboot_d3", "sender_name": "', self_hostname, '"}']); encode_obj = AES.new(aes_key, AES.MODE_CBC, aes_IV); spaces = 16 - len(obj_to_send) % 16; sock.send(bytes(aes_IV, 'utf-8') + encode_obj.encrypt(obj_to_send + (spaces * ' '))); connection.close(); sock.close(); def wifi_update(self, json_obj, connection, db): """ Send "wifi_update" to the slave described in json_obj for update the wifi configuration. """ query = ''.join(["SELECT serial, secretkey FROM daemon WHERE daemon_id=", str(json_obj['data']['daemon_id'])]); res = self.sql.mysql_handler_personnal_query(query, db); if res is None or not res: self.logger.error('in wifi_update: No daemon for id '+str(json_obj['data']['daemon_id'])); connection.close(); return ; elif len(res) > 1: self.logger.error('in wifi_update: Too much daemons for id '+str(json_obj['data']['daemon_id'])); connection.close(); return ; hostname = res[0][0]; ip = ''; for h in self.hostlist: if hostname in h._Hostname.upper(): ip = h._IpAddr; if not ip: self.logger.error('in wifi_update: '+hostname+' not in hostlist. Try perform network scan again.'); connection.close(); return ; port = self._parser.getValueFromSection('connect', 'port'); sock = socket.create_connection((ip, port)); self_hostname = socket.gethostname(); if '.' in self_hostname: self_hostname = self_hostname.split('.')[0]; aes_IV = AESManager.get_IV(); aes_key = self.get_secret_key(hostname); obj_to_send = ''.join(['{"packet_type": "wifi_update", "sender_name": "', str(self_hostname), '", "ssid": "', str(json_obj['data']['ssid']), '", "password": "******", "security": "', str(json_obj['data']['security']), '", "mode": "', str(json_obj['data']['mode']), '"}']); encode_obj = AES.new(aes_key, AES.MODE_CBC, aes_IV); spaces = 16 - len(obj_to_send) % 16; sock.send(bytes(aes_IV, 'utf-8') + encode_obj.encrypt(obj_to_send + (spaces * ' '))); rlist, wlist, elist = select.select([sock], [], [], SELECT_TIMEOUT * 300); re = ''; for s in rlist: data = sock.recv(4096); if not data: continue; decrypt_IV = data[:16].decode(); host = None; for h in self.hostlist: if h._IpAddr == ip: host = h; decode_obj = AES.new(res[0][1], AES.MODE_CBC, decrypt_IV); data2 = decode_obj.decrypt(data[16:]).decode(); resp = json.JSONDecoder().decode(data2); hostname = host._Hostname; if '.' in host._Hostname: hostname = host._Hostname.split('.')[0]; if str(self.aes_slave_keys[hostname]) == str(resp['aes_pass']): re = '1'; connection.send(bytes(re, 'utf-8')); connection.close(); sock.close(); def remote_sql(self, json_obj, connection): """ Execute sql command from configurator """ db = MasterSql(); req = json_obj['data'].split(';'); for item in req: if item != '': db.mysql_handler_personnal_query(item); connection.close(); return;