def remove_queue_task(self, work_tag, key, report_tag=None, sub_key=None): task_key = key if report_tag is not None: re_work_tag = StringTool.join_decode([work_tag, report_tag], join_str="|") else: re_work_tag = work_tag if sub_key is not None: key = StringTool.join_decode([key, sub_key], join_str="|") value_prefix = StringTool.join_decode([re_work_tag, key], ",") value_prefix2 = RedisQueue.package_task_v2(work_tag, task_key, "", sub_key=sub_key, return_prefix=True) queue_tasks = self.list_queue_detail(work_tag) if queue_tasks is None: return 0 count = 0 key = StringTool.join_decode([self.queue_prefix_key, work_tag], join_str="_") for task in queue_tasks: if task.startswith(value_prefix) is True or task.startswith( value_prefix2) is True: try: count += self.redis_man.lrem(key, task, num=0) except Exception: continue return count
def clear_task_item(self, work_tag, key): k_l = [self.queue_prefix_key, work_tag, key, "*"] key_prefix = StringTool.join_decode(k_l, "_") hs = self.redis_man.keys(key_prefix) task_items = dict(sub=dict(), values=dict()) for item in hs: if self.redis_man.type(item) != "hash": continue self.redis_man.delete(item) return task_items
def look_task_item(): arg_man.add_argument("-w", "--work-tag", dest="work_tag", help="work tag", metavar="") args = parse_args() rs = RedisStat() values = [] if args.work_tag is not None: values.append(args.work_tag) while True: prompt_prefix = "" if len(values) <= 0: work_tag = jy_input("Please Input Work_tag", prompt_prefix=prompt_prefix) work_tag = work_tag.strip() if work_tag.lower() in ("e", "exit"): sys.exit(0) values.append(work_tag) continue if len(values) == 1: prompt_prefix += "[work_tag:%s]" % values[0] key = jy_input("Please Input Task Key", prompt_prefix=prompt_prefix) key = key.strip() if key.lower() in ("e", "exit"): values.remove(values[-1]) continue values.append(key) continue elif len(values) >= 2: sub_key = None prompt_prefix += "[work_tag:%s][key:%s]" % (values[0], values[1]) if len(values) >= 3: sub_key = StringTool.join_decode(values[2:], "_") prompt_prefix += "[sub_key:%s]" % sub_key task_items = rs.list_task_item(values[0], values[1], sub_key) prompt = "" if len(task_items["sub"].keys()) > 0: prompt += "Input follow value look sub item\n" prompt += "\n".join(task_items["sub"]) prompt += "\n" if len(task_items["values"].keys()) > 0: prompt += "Input follow value look item value\n" prompt += "\n".join(task_items["values"]) while True: item_key = jy_input(prompt, prompt_prefix=prompt_prefix) item_key = item_key.strip() if item_key.lower() in ("e", "exit"): values.remove(values[-1]) break if item_key in task_items["sub"]: values.append(item_key) break elif item_key in task_items["values"]: print(task_items["values"][item_key]) continue else: continue
def _handle_package_sub_part(cls, data, action="package"): if action == "package": if isinstance(data, (tuple, list)): p = StringTool.join_decode( map(lambda x: cls.sub_part_handler.escape(x), data), "|") else: p = cls.sub_part_handler.escape("%s" % data) return p elif action == "unpack": s_data = data.split("|") return map(cls.sub_part_handler.unescape, s_data) return None
def write_example(self, work_tag, params): save_name = StringTool.join_decode([ self.current_task.task_key, self.current_task.task_sub_key, int(time.time()), "json" ], join_str=".") save_dir = StringTool.path_join(example_dir, work_tag) if os.path.isdir(save_dir) is False: os.mkdir(save_dir) save_path = StringTool.path_join(save_dir, save_name) with open(save_path, "w") as wp: wp.write(StringTool.encode(json.dumps(params))) return save_path
def list_task_item(self, work_tag, key, sub_key=None): k_l = [self.queue_prefix_key, work_tag, key] if sub_key is not None: k_l.append(sub_key) task_item_compile = re.compile( re.escape(StringTool.join_decode(k_l, "_")) + "_(\\d+)$") get_key = StringTool.join_decode([k_l], "_") k_l.append("*") key_prefix = StringTool.join_decode(k_l, "_") hs = self.redis_man.keys(key_prefix) task_items = dict(sub=dict(), values=dict()) for item in hs: if self.redis_man.type(item) != "hash": continue m_r = task_item_compile.match(item) if m_r is None: continue task_items["sub"][m_r.groups()[0]] = item if self.redis_man.type(get_key) == "hash": item = self.redis_man.hgetall(get_key) for key in item.keys(): task_items["values"][key] = StringData.unpack_data(item[key]) return task_items
def list_heartbeat(self): """ add in version 1.0.6 """ l_h = [] key_prefix = StringTool.join_decode([self.heartbeat_prefix_key, "_*"]) len_k = len(key_prefix) - 1 hs = self.redis_man.keys(key_prefix) for item in hs: if self.redis_man.type(item) == "string": tag = item[len_k:] if len(tag) > 0: l_h.append(tag) return l_h
def write_pbs_task(self, work_tag, cmd): save_name = StringTool.join_decode([ self.current_task.task_key, self.current_task.task_sub_key, int(time.time()), "pbs" ], join_str=".") save_dir = StringTool.path_join(pbs_task_dir, work_tag) if os.path.isdir(save_dir) is False: os.mkdir(save_dir) save_path = StringTool.path_join(save_dir, save_name) with open(save_path, "w") as wp: cmd = StringTool.join_encode(cmd, join_str=" ") s = StringTool.join_encode([pbs_template, cmd], join_str="\n") wp.write(StringTool.encode(s)) return save_path
def get_dirty_item(self, work_tag): k_l = [self.queue_prefix_key, work_tag, "*"] key_prefix = StringTool.join_decode(k_l, "_") prefix_len = len(key_prefix) - 1 hs = self.redis_man.keys(key_prefix) all_keys = dict() find_sub_key = re.compile("_(\d+)$") for item in hs: if self.redis_man.type(item) != "hash": continue search_r = find_sub_key.search(item) if search_r is None: continue sub_key = search_r.groups()[0] p = item[:0 - len(sub_key) - 1] if p in all_keys: all_keys[p].append(sub_key) else: all_keys[p] = [sub_key] delete_items = [] # 删除 没有任务描述的零散任务 for key in all_keys.keys(): union_key = key[prefix_len:] if "0" not in all_keys[key]: delete_items.append( dict(prefix=union_key, sub_keys=all_keys[key], message="未发现pipeline信息")) continue task_len = StringData.unpack_data( self.redis_man.hget(key + "_0", "task_len")) if task_len is None: delete_items.append( dict(prefix=union_key, sub_keys=all_keys[key], message="pipeline信息未发现task_len")) continue for i in range(task_len): if "%s" % i not in all_keys[key]: delete_items.append( dict(prefix=union_key, sub_keys=all_keys[key], message="缺少子任务%s的信息" % i)) return delete_items
def task_log(self, *args, **kwargs): if self.current_task is None or self.current_task.log_path is None: return msg = StringTool.join(args, " ") level = kwargs.pop("level", "INFO") level = str(level).upper() if level not in ["INFO", "DEBUG"]: p_msg_a = [self.current_task.task_key] if self.current_task.task_sub_key is not None: p_msg_a.extend([" ", self.current_task.task_sub_key]) p_msg = StringTool.join([p_msg_a, "\n", msg], "") self.publish_message(p_msg) if self.upload_log_tag is not None: upload_info = dict(log_path=self.current_task.log_path, timestamp=int(time.time())) self.push_task(StringTool.join_decode( [self.current_task.task_key, self.work_tag], join_str="_"), upload_info, work_tag=self.upload_log_tag) log_file = self.current_task.log_path now_time = datetime.now().strftime(TIME_FORMAT) write_a = ["[", self.heartbeat_value] if self.worker_index is not None: write_a.extend([":", self.worker_index]) if self.current_task.task_sub_key is not None: write_a.extend(["][", self.current_task.task_sub_key]) write_a.extend(["] ", now_time, ": ", level, " ", msg, "\n"]) with open(log_file, "ab", 0) as wl: u = StringTool.join(write_a, join_str="") s = StringTool.encode(u) wl.write(s) if self.redirect_stdout is False and self.debug is True: try: logging.info(s) except Exception as e: pass
def read_task_log(self, work_tag, key, sub_key=None, sub_key_prefix=None, level="INFO", max_length=1000000): """ :param work_tag: :param key: :param sub_key: 为None时查询所有有子key和无子key的日志,为空字符串时仅查询无子key的日志,为具体某个子key时查询具体子key的日志 :param level: 默认为INFO,允许DEBUG,INFO,WARNING,ERROR。其他值认为是INFO :return: """ name = StringTool.join([work_tag, "_", key, ".log"], "") log_path = StringTool.path_join(self.log_dir, work_tag.lower(), name) if os.path.exists(log_path) is False: log_path = StringTool.path_join(self.log_dir, name) if os.path.exists(log_path) is False: return False, None s_log = os.stat(log_path) read_seek = s_log.st_size - max_length if max_length < s_log.st_size else 0 # 处理参数 if sub_key is not None: sub_key = StringTool.encode(sub_key) if sub_key_prefix is not None: sub_key_prefix = StringTool.encode(sub_key_prefix) if StringTool.is_string(level) is False: level = "INFO" level = level.upper() if level not in self.log_level: level = "INFO" allow_levels = self.log_level[level] logs_list = [] last_save = False with open(log_path, "r") as rl: rl.seek(read_seek) c = rl.read() all_lines = c.split("\n") for line in all_lines: rl = self.log_compile.match(line) if rl is not None: line_sub_key = rl.groups()[0] log_time = rl.groups()[1] if len(line_sub_key) >= 2: line_sub_key = line_sub_key[1:-1] line_level = rl.groups()[2] log_msg = rl.groups()[3] if sub_key is not None and sub_key != line_sub_key: last_save = False continue if sub_key_prefix is not None and line_sub_key.startswith( sub_key_prefix) is False: last_save = False continue if line_level not in allow_levels: last_save = False continue last_save = True logs_list.append( map(StringTool.decode, [line_sub_key, log_time, line_level, log_msg])) elif last_save is True: logs_list[-1][3] = StringTool.join_decode( [logs_list[-1][3], line]) return True, logs_list
def __str__(self): return StringTool.join_decode([ "The Key", self.key, "Except Type Is", self.except_type, "But The Value Is", self.value, "Not Match" ], join_str=" ")