def refresh_cache_data(self, args=None): logger.debug('[timer] refresh cache data start...') server_dir = model_gate.settings_data.server_dir scripts = model_gate.settings_data.refresh_cache['scripts'] server_cache_data = model_gate.server_cache_data.get_data() try: for ip, data in model_gate.login_state_data.get_data().items(): ssh = data['SSH'] for script in scripts: cmd = "{}/{}".format(server_dir, script) result = SSHUtil.exec_info(ssh, cmd, True)[0] result = result.split(Global.G_INNER_SPLIT)[1].strip() for line in result.split('\n'): key, value = line.split(Global.G_CACHE_SPLIT) key = key.strip() value = value.strip().split() logger.debug("modify cache: {}: {}".format( ip, {key: value})) if ip in server_cache_data: server_cache_data[ip][key] = value else: server_cache_data[ip] = {} server_cache_data[ip][key] = value model_gate.server_cache_data.set_data(server_cache_data) except Exception as e: logger.error("refresh_cache_data {}".format( traceback.format_exc())) logger.debug( '[timer] refresh cache data end, server_cache_data: {}'.format( server_cache_data))
def json_parser(self, reload=False): try: # dependence.json解析 _depend_data = JSONParser.parser(Global.G_DEPENDENCE_FILE) # settings.json解析 _setting_data = JSONParser.parser(Global.G_SETTINGS_FILE) # 设置数据初始化 model_gate.settings_data.parser(_setting_data) # 界面数据初始化 model_gate.dependence_data.parser(_depend_data, reload) except Exception as e: model_gate.exception_data.set_data(e) logger.error(e) return False return True
def check_file(self): Global.G_PID_DIR = "{}\\{}".format(Global.G_RUN_DIR, Common.get_pid()) Common.mkdir(Global.G_RUN_DIR) Common.mkdir(Global.G_DOWNLOAD_DIR) Common.mkdir(Global.G_PID_DIR) logger.info(Global.G_TEXT_LOGO) for path in [ Global.G_RESOURCE_DIR, Global.G_DEPENDENCE_FILE, Global.G_SETTINGS_FILE, Global.G_SCRIPTS_DIR ]: if not Common.is_exists(path): model_gate.exception_data.set_data( "{} is not exist".format(path)) logger.error("{} is not exist".format(path)) return False return True
def refresh_file_data(self, args=None): logger.debug('[timer] refresh file data start...') server_dir = model_gate.settings_data.server_dir scripts = model_gate.settings_data.refresh_file['scripts'] server_download = Global.G_SERVER_DOWNLOAD % server_dir try: for ip, data in model_gate.login_state_data.get_data().items(): ssh = data['SSH'] local_download = "{}\\{}".format(Global.G_DOWNLOAD_DIR, ip) data_dir = "{}\\__FILE_DATA__".format(local_download) Common.mkdir(local_download) Common.mkdir(data_dir) # 初始运行时先把之前已经运行的进程杀死,每次都用最新代码跑 if ip not in self.is_first_run: [ SSHUtil.exec_ret(ssh, "killall {}".format(script), True) for script in scripts ] self.is_first_run[ip] = True cmd = '' for script in scripts: cmd = "{0}\n{1}/{2}".format(cmd, server_dir, script) SSHUtil.exec_ret(ssh, cmd, True) # 再压缩DOWNLOAD目录 cmd = 'cd {0} && zip refresh_file.zip *;chmod 777 *.zip'.format( server_download) SSHUtil.exec_ret(ssh, cmd, True) # 然后下载文件 SSHUtil.download_file( ssh, "{}/refresh_file.zip".format(server_download), '{}\\refresh_file.zip'.format(local_download)) # 最后解压 Common.unzip_file( '{}\\refresh_file.zip'.format(local_download), data_dir) except Exception as e: logger.error("RefreshTimer refresh_file_impl {}".format(str(e))) logger.debug('[timer] refresh file data end')
def refresh_json_data(self, args=None): def record_change(key, last, curr): if last != curr: logger.info('[change] {} changed to {}'.format(key, curr)) logger.debug('[timer] refresh json data start...') last_settings = deepcopy(model_gate.settings_data) last_widgets = deepcopy(model_gate.dependence_data.widget_data) last_trees = deepcopy(model_gate.dependence_data.tree_data) if not loader.json_parser(True): model_gate.settings_data = deepcopy(last_settings) model_gate.dependence_data.widget_data = deepcopy(last_widgets) model_gate.dependence_data.tree_data = deepcopy(last_trees) del last_settings del last_widgets del last_trees return curr_settings = model_gate.settings_data curr_dependence = model_gate.dependence_data try: logger.change_level(curr_settings.log_level) record_change('log_level', last_settings.log_level, curr_settings.log_level) record_change('tool_alias', last_settings.tool_alias, curr_settings.tool_alias) record_change('tool_version', last_settings.tool_version, curr_settings.tool_version) record_change('keepalive_period', self.ssh_timer.period, curr_settings.keepalive_period) record_change('refresh_json_period', self.json_timer.period, curr_settings.refresh_json_period) record_change("refresh_cache['period']", self.cache_timer.period, curr_settings.refresh_cache['period']) record_change("refresh_cache['scripts']", last_settings.refresh_cache['scripts'], curr_settings.refresh_cache['scripts']) record_change("refresh_file['period']", self.file_timer.period, curr_settings.refresh_file['period']) record_change("refresh_cache['scripts']", last_settings.refresh_file['scripts'], curr_settings.refresh_file['scripts']) self.ssh_timer.update_period(curr_settings.keepalive_period) self.json_timer.update_period(curr_settings.refresh_json_period) self.cache_timer.update_period( curr_settings.refresh_cache['period']) self.file_timer.update_period(curr_settings.refresh_file['period']) if last_settings.tool_alias != curr_settings.tool_alias or \ last_settings.tool_version != curr_settings.tool_version: title = '{} v{}'.format(curr_settings.tool_alias, curr_settings.tool_version) model_gate.app_title_data.set_data((title, None)) logger.info('[change] tool title changed to {}'.format(title)) if last_trees != curr_dependence.tree_data: model_gate.app_trees_data.set_data(curr_dependence.tree_data) logger.info('[change] tool trees changed') if last_widgets != curr_dependence.widget_data: model_gate.app_widgets_data.set_data( curr_dependence.widget_data) logger.info('[change] tool widgets changed') except: logger.error('Exception apply: {}'.format(traceback.format_exc())) del last_settings del last_widgets del last_trees logger.debug('[timer] refresh json data end')