def main(): fname = Filter.Filter.filter_login_log() filt = Filter.Filter(fname, None) tmp_log_name = filt.filter_out() player_dic = {} player_name_dic = {} out_name = utils.get_out_name('out', '1_20.txt') fw = utils.utf8_open(out_name, 'w') mark_set = set() with utils.utf8_open(fname, encoding='utf-8') as fr: for line in fr: log_one = LogOne.get_log_from_line(line) player_dic.setdefault(log_one.account, set()) player_dic[log_one.account].add(log_one.day) player_name_dic.setdefault(log_one.account, set()) try: player_name_dic[log_one.account].add(log_one.name) except: pass if len(player_dic[log_one.account]) >= 4: mark_set.add(log_one.account) count = 0 for i in mark_set: names = player_name_dic[i] _2 = '|'.join(names) fw.write(','.join((i, _2)) + '\n') count += 1 print(count)
def out_as_csv(self, filename): rds = self.output() with utils.utf8_open(filename, 'w') as fw: row = 0 while 1: strs = [rd.get_row(row) for rd in rds] if not any(strs): break fw.write(','.join(strs) + '\n') row += 1 with utils.utf8_open('{}.full.csv'.format(filename), 'w') as fw: day_idx = 1 line1 = [] line2 = [] while 1: tmp = [rd.get_stay_by_day_index(day_idx) for rd in rds] tmp = functools.reduce(lambda a, b: (a[0] + b[0], a[1] + b[1]), tmp) if not tmp[1]: break line1.append(tmp[0]) line2.append(tmp[1]) day_idx += 1 fw.write(','.join(map(str, line1)) + '\n') fw.write(','.join(map(str, line2)) + '\n')
def filter_sys_log(tag_name): new_dir = utils.get_dir('tmp') basename = os.path.basename(const.SYS_LOG_NAME) fw_name = os.path.join(new_dir, '{}.{}.log'.format(basename, tag_name)) fw = utils.utf8_open(fw_name, 'w') with utils.utf8_open(const.SYS_LOG_NAME, encoding='utf-8') as fr: for line in fr: if tag_name in line: fw.write(line) fw.close() return fw_name
def do_resolve(self): var_stack.set_var("PRINT_COMMAND_TIME").append("no") # do not print time report config_file = var_stack.ResolveVarToStr("__CONFIG_FILE__") if not os.path.isfile(config_file): raise FileNotFoundError(config_file, var_stack.unresolved_var("__CONFIG_FILE__")) input_file = var_stack.ResolveVarToStr("__MAIN_INPUT_FILE__") if not os.path.isfile(input_file): raise FileNotFoundError(input_file, var_stack.unresolved_var("__MAIN_INPUT_FILE__")) output_file = var_stack.ResolveVarToStr("__MAIN_OUT_FILE__") self.read_yaml_file(config_file) with utils.utf8_open(input_file, "r") as rfd: text_to_resolve = rfd.read() resolved_text = var_stack.ResolveStrToStr(text_to_resolve) with utils.utf8_open(output_file, "w") as wfd: wfd.write(resolved_text)
def parse_lingxu(): fname = utils.filter_from_origin(LogOne.LingxuAttackFlow.FILTER_STR) with utils.utf8_open(fname) as fr: for line in fr: lo = LogOne.get_log_from_line(line) if lo.gbid == '8444553113334133613': print(line)
def get_header(): dic = {} fname = utils.filter_from_origin('PlayerLogout') with utils.utf8_open(fname) as fr: for line in fr: lo = LogOne.get_log_from_line(line) if not lo: continue uk = lo.unique_key() if uk in dic: dic[uk].update_level(lo.level, lo.battle_point) else: dic[uk] = NewBieLeft(uk, lo.level, lo.login_channel, lo.battle_point) csv = csv_output.CSVOutPut() csv.set(0, 0, 'GOPENID') csv.set(0, 1, '等级') csv.set(0, 2, '战力') csv.set(0, 3, '渠道号') idx = 1 for lo in dic.values(): if lo.level > const.MAX_LEFT_LEVEL: continue csv.set(idx, 0, f'{lo.open_id}\'') csv.set(idx, 1, lo.level) csv.set(idx, 2, lo.battle_point) csv.set(idx, 3, lo.login_channel) idx += 1 fname = utils.get_out_name('out', 'left_newbie_header.csv') csv.output(fname)
def filter_out(self): fw_name = '{}\\{}.{}.log'.format(self.newdir, self.basename, 'outter') fw = utils.utf8_open(fw_name, 'w', encoding='utf-8') with utils.utf8_open(self.filename, encoding='utf-8') as fr: for line in fr: lo = LogOne.get_log_from_line(line) if not lo: continue if lo.account in self.inner_openid_set: continue fw.write(line) fw.close() return fw_name
def get_openid_info_by_txt(): # 根据openids.txt提供特定玩家信息 avatar_dic = get_avatar_dic() csv = csv_output.CSVOutPut() csv.set(0, 0, 'openid') csv.set(0, 1, '次留') csv.set(0, 2, '三留') csv.set(0, 3, '七留') csv.set(0, 4, '首周登录天数') csv.set(0, 5, '战力') csv.set(0, 6, '等级') csv.set(0, 7, '哪一天登陆') idx = 0 with utils.utf8_open(const.OPEN_IDS_TXT) as fr: for line in fr: idx += 1 line = line.strip() if line in avatar_dic: a = avatar_dic[line] csv.set(idx, 0, line) csv.set(idx, 1, 1 if a.is_stay_by(1) else 0) csv.set(idx, 2, 1 if a.is_stay_by(2) else 0) csv.set(idx, 3, 1 if a.is_stay_by(6) else 0) csv.set(idx, 4, len(a.days)) csv.set(idx, 5, a.battle_point) csv.set(idx, 6, a.level) csv.set(idx, 7, '|'.join(map(str, a.days))) else: print(line) out_name = utils.get_out_name('out', 'avatar_info_by_open_id_txt.csv') csv.output(out_name)
def main1(): fname = utils.filter_from_origin('SecSNSGetFlow') print(fname) with utils.utf8_open(fname) as fr: for line in fr: if gbid1 in line and gbid2 in line: print(line)
def guide_flow(): fname = utils.filter_from_origin('GuideFlow') id_dic = {} avatar_count = utils.get_avatar_count() with utils.utf8_open(fname) as fr: for line in fr: lo = LogOne.get_log_from_line(line) if not lo: continue id_dic.setdefault(lo.guide_id, set()) id_dic[lo.guide_id].add(lo.gbid) rets = [(int(k), len(v)) for k, v in id_dic.items()] rets.sort(key=lambda x: x[0]) csv = csv_output.CSVOutPut() csv.set(0, 0, '节点') csv.set(0, 1, '创角数') csv.set(0, 2, '节点通过人数') csv.set(0, 3, '节点通过率') idx = 1 for key, num in rets: csv.set(idx, 0, key) csv.set(idx, 1, avatar_count) csv.set(idx, 2, num) csv.set(idx, 3, num / avatar_count) idx += 1 out_name = utils.get_out_name('out', 'guide_flow.csv') csv.output(out_name)
def parse_today_not_login(): fname = log_one_utils.get_login_out_log_new() avatar_dic = {} with utils.utf8_open(fname) as fr: for line in fr: lo = LogOne.get_log_from_line(line) if not (const.FIRST_DAY <= lo.day <= const.FINAL_DAY): continue uk = lo.unique_key() if uk in avatar_dic: avatar_dic[uk].add_log(lo) else: avatar_dic[uk] = Avatar(uk, lo.level, lo.battle_point, lo) print(len(avatar_dic)) csv = csv_output.CSVOutPut() csv.set(0, 0, 'GOPENID') csv.set(0, 1, '等级') csv.set(0, 2, '战力') csv.set(0, 3, '渠道号') idx = 1 for av in avatar_dic.values(): if 22 <= av.level <= 35 and (const.FINAL_DAY not in av.days): csv.set(idx, 0, f'{av.open_id}') csv.set(idx, 1, av.level) csv.set(idx, 2, av.battle_point) csv.set(idx, 3, av.login_channel) idx += 1 fname = utils.get_out_name('out', 'today_not_login.csv') csv.output(fname)
def get_avatar_dic(): # 获得一个dic,里面每个玩家包含有自己登陆的天数 avatar_dic = {} out_name = utils.get_out_name('tmp', 'avatar_days_dict') if os.path.exists(out_name): avatar_dic = pickle.load(open(out_name, 'rb')) else: fname = log_one_utils.filter_by_log_one_all() with utils.utf8_open(fname) as fr: for line in fr: lo = LogOne.get_log_from_line(line) if not lo: continue if not (const.FIRST_DAY <= lo.day <= const.FINAL_DAY): continue uk = lo.unique_key() # 获取open_id if uk not in avatar_dic: avatar_dic[uk] = Avatar(uk, lo.day, lo.FILTER_STR) avatar_val = avatar_dic[uk] avatar_val.add_day(lo.day) if lo.FILTER_STR == 'SecLogin': avatar_val.add_school(lo.school) elif lo.FILTER_STR == 'PlayerLogin' or lo.FILTER_STR == 'PlayerLogout': avatar_val.add_level(lo.level) avatar_val.add_battle_point(lo.battle_point) avatar_val.add_channel(lo.login_channel) pickle.dump(avatar_dic, open(out_name, 'wb')) return avatar_dic
def filter_item_flow_by_src(src): fw_name = utils.get_out_name('tmp', f'itme_flow_{src}.log') if os.path.exists(fw_name): return fw_name fname = utils.filter_from_origin(LogOne.ItemFlow.FILTER_STR) fw = utils.utf8_open(fw_name, 'w') with utils.utf8_open(fname) as fr: for line in fr: lo = LogOne.get_log_from_line(line) if lo.src != src: continue fw.write(line) fw.close() return fw_name
def login(): fname = Filter.Filter.filter_login_log() print(fname) with utils.utf8_open(fname) as fr: for line in fr: log_one = LogOne.get_log_from_line(line) if log_one.gbid == gbid2: print(log_one.FILTER_STR, log_one.time_str)
def main2(): fname = utils.filter_from_origin('PlayerFriendsList') print(fname) with utils.utf8_open(fname) as fr: for line in fr: log_one = LogOne.get_log_from_line(line) if log_one.gbid == gbid2: print(log_one.time_str, log_one.friend_gbid, log_one.friend_name)
def parse(self): print(f'self.filename:{self.filename}') with utils.utf8_open(self.filename, encoding='utf-8') as fr: for line in fr: lo = LogOne.RoundFlow.get_log_obj_from_line(line) self.days.setdefault(lo.get_day(), {}) day_dict = self.days[lo.get_day()] day_dict.setdefault(lo.gbid, []) day_dict[lo.gbid].append(lo)
def get_dm(filename): dm = DaysManager() print(filename) with utils.utf8_open(filename, encoding='utf-8') as fr: for line in fr: log_one = LogOne.get_log_from_line(line) dm.add_one(log_one) print(f'len:{len(dm.uk_dict)}') return dm
def do_parallel_run(self): processes_list_file = var_stack.ResolveVarToStr("__MAIN_INPUT_FILE__") commands = list() with utils.utf8_open(processes_list_file, "r") as rfd: for line in rfd: line = line.strip() if line and line[0] != "#": args = shlex.split(line) commands.append(args) utils.run_processes_in_parallel(commands)
def prepare_command_list_from_file(self): command_list = list() with utils.utf8_open(self.options.config_file[0], "r") as rfd: command_lines = rfd.readlines() for command_line in command_lines: resolved_command_line = var_stack.ResolveStrToStr(command_line.strip()) argv = shlex.split(resolved_command_line) command_list.append(argv) return command_list
def parse(self): with utils.utf8_open(self.filename) as fr: for line in fr: lo = LogOne.get_log_from_line(line) day = lo.get_day() uk = lo.unique_key() self.uk_day_dict.setdefault(day, {}) self.uk_day_dict[day][uk] = max( self.uk_day_dict[day].get(uk, 0), int(lo.score))
def write_copy_debug_info(self): try: if var_stack.defined('ECHO_LOG_FILE'): log_file_path = var_stack.ResolveVarToStr("ECHO_LOG_FILE") log_folder, log_file = os.path.split(log_file_path) with utils.utf8_open(os.path.join(log_folder, "sync-folder-manifest.txt"), "w") as wfd: repo_sync_dir = var_stack.ResolveVarToStr("COPY_SOURCES_ROOT_DIR") wfd.write(utils.disk_item_listing(repo_sync_dir)) except Exception: pass # if it did not work - forget it
def download_from_config_files(self, parallel_run_config_file_path, config_files): import win32api with utils.utf8_open(parallel_run_config_file_path, "w") as wfd: utils.make_open_file_read_write_for_all(wfd) for config_file in config_files: # curl on windows has problem with path to config files that have unicode characters normalized_path = win32api.GetShortPathName(config_file) wfd.write(var_stack.ResolveStrToStr('''"$(DOWNLOAD_TOOL_PATH)" --config "{}"\n'''.format(normalized_path))) download_command = " ".join((self.platform_helper.run_instl(), "parallel-run", "--in", utils.quoteme_double(parallel_run_config_file_path))) return download_command, self.platform_helper.exit_if_error()
def parse_guild(): fname = utils.filter_from_origin(LogOne.GuildFlow.FILTER_STR) with utils.utf8_open(fname) as fr: for line in fr: lo = LogOne.get_log_from_line(line) if not lo: continue if lo.gbid == '8444553112797262642': print(lo.act_type) print(line)
def parse(self): with utils.utf8_open(self.filename) as fr: for line in fr: lo = LogOne.LogOut.get_log_obj_from_line(line) self.days.setdefault(lo.get_day(), {}) day_dict = self.days[lo.get_day()] uk = lo.unique_key() if uk in day_dict: if lo.level > day_dict[uk].level: day_dict[uk] = lo else: day_dict[uk] = lo
def parse(self): with utils.utf8_open(self.filename) as fr: for line in fr: ls = LogOne.LogSys.get_log_obj_from_line(line) if not ls: print(line) continue self.days.setdefault(ls.get_day(), {}) day_dict = self.days[ls.get_day()] day_dict.setdefault(ls.unique_key(), []) day_dict[ls.unique_key()].append(ls)
def do_exec(self): py_file_path = "unknown file" try: self.read_yaml_file("InstlClient.yaml") # temp hack, which additional config file to read should come from command line options config_file = var_stack.ResolveVarToStr("__CONFIG_FILE__") if os.path.isfile(config_file): self.read_yaml_file(config_file) py_file_path = var_stack.ResolveVarToStr("__MAIN_INPUT_FILE__") with utils.utf8_open(py_file_path, 'r') as rfd: py_text = rfd.read() exec(py_text, globals()) except Exception as ex: print("Exception while exec ", py_file_path, ex)
def parse(self, filename): with utils.utf8_open(filename, encoding='utf-8') as fr: for line in fr: lo = LogOne.get_log_from_line(line) if lo.gbid in self.gbid_dic: self.gbid_dic[lo.gbid] else: self.gbid_dic[lo.gbid] = AvatarVal(lo.gbid, lo.account) if lo.IS_LOGIN: self.gbid_dic[lo.gbid].update(lo.name, lo.level, lo.school) self.gbid_dic[lo.gbid].add_info(lo.timestamp, lo.IS_LOGIN, lo.level)
def filter_by_act(self, battle_type): battle_type = str(battle_type) fw_name = os.path.join(self.newdir, '{}.{}.log'.format(self.basename, battle_type)) if os.path.exists(fw_name): return fw_name fw = utils.utf8_open(fw_name, 'w') with utils.utf8_open(self.filename, encoding='utf-8') as fr: for line in fr: lo = LogOne.RoundFlow.get_log_obj_from_line(line) if not lo: print('error1:', line) continue if lo.battle_type != battle_type: continue fw.write(line) fw.close() return fw_name
def filter_guild_train(): tmp_dir = utils.get_dir('tmp') fw_name = os.path.join(tmp_dir, 'guild_train_tlog.log') if os.path.exists(fw_name): return fw_name fw = utils.utf8_open(fw_name, 'w') for line in utils.get_origin_line_stream(): if 'guild train upgrade' in line and line.startswith( 'LOG_GUILD_CONTRIBUTION'): fw.write(line) fw.close() return fw_name
def create_instl_history_file(self): var_stack.set_var("__BATCH_CREATE_TIME__").append(time.strftime("%Y/%m/%d %H:%M:%S")) yaml_of_defines = aYaml.YamlDumpDocWrap(var_stack, '!define', "Definitions", explicit_start=True, sort_mappings=True) # write the history file, but only if variable LOCAL_REPO_BOOKKEEPING_DIR is defined # and the folder actually exists. instl_temp_history_file_path = var_stack.ResolveVarToStr("INSTL_HISTORY_TEMP_PATH") instl_temp_history_folder, instl_temp_history_file_name = os.path.split(instl_temp_history_file_path) if os.path.isdir(instl_temp_history_folder): with utils.utf8_open(instl_temp_history_file_path, "w") as wfd: utils.make_open_file_read_write_for_all(wfd) aYaml.writeAsYaml(yaml_of_defines, wfd) self.batch_accum += self.platform_helper.append_file_to_file("$(INSTL_HISTORY_TEMP_PATH)", "$(INSTL_HISTORY_PATH)")
def create_unwtar_batch_file(self, wtar_instructions, name_for_progress): if wtar_instructions: main_out_file_dir, main_out_file_leaf = os.path.split(var_stack.ResolveVarToStr("__MAIN_OUT_FILE__")) unwtar_batch_files_dir = os.path.join(main_out_file_dir, "unwtar") os.makedirs(unwtar_batch_files_dir, exist_ok=True) batch_file_path = os.path.join(unwtar_batch_files_dir, name_for_progress+"_"+str(self.unwtar_batch_file_counter)+".unwtar") self.unwtar_batch_file_counter += 1 batch_file_path = var_stack.ResolveStrToStr(batch_file_path) with utils.utf8_open(batch_file_path, "w") as wfd: for wtar_inst in self.unwtar_instructions: unwtar_line = var_stack.ResolveStrToStr("""unwtar --in "{}" --out "{}" --no-numbers-progress\n""".format(*wtar_inst)) self.platform_helper.increment_progress() wfd.write(unwtar_line) self.batch_accum += self.platform_helper.progress("Verify {}".format(name_for_progress)) self.batch_accum += self.platform_helper.run_instl_command_list(batch_file_path, parallel=True)
def plugin_bundle(in_os, in_path): retVal = None xml_path = os.path.join(in_path, 'Contents', 'Info.xml') if os.path.exists(xml_path): with utils.utf8_open(xml_path, "r") as rfd: info_xml = rfd.read() match = plugin_version_and_guid_re.match(info_xml) if match: retVal = (in_path, match.group('version'), match.group('guid')) else: if in_os == 'Mac': retVal = Mac_bundle(in_os, in_path) elif in_os == 'Win': retVal = Win_bundle(in_os, in_path) return retVal
def filter_by_log_one_all(): fw_name = utils.get_out_name('tmp', 'log_one.log') if os.path.exists(fw_name): return fw_name fw = utils.utf8_open(fw_name, 'w') for line in utils.get_origin_line_stream(): lo = LogOne.get_log_from_line(line) if not lo: continue fw.write(line) fw.close() return fw_name
def filter_login_log(): dirname = utils.get_dir('tmp') fw_name = utils.get_out_name('tmp', 'log_in_and_out.log') if os.path.exists(fw_name): return fw_name fw = utils.utf8_open(fw_name, 'w', encoding='utf-8') for line in utils.get_origin_line_stream(): if not (line.startswith('SecLogin') or line.startswith('SecLogout')): continue fw.write(line) fw.close() return fw_name
def get_login_out_log_new(): fw_name = utils.get_out_name('tmp', 'log_and_out_new.log') if os.path.exists(fw_name): return fw_name fw = utils.utf8_open(fw_name, 'w') for line in utils.get_origin_line_stream(): lo = LogOne.get_log_from_line(line) if not lo: continue if lo.FILTER_STR == 'PlayerLogin' or lo.FILTER_STR == 'PlayerLogout': fw.write(line) fw.close() return fw_name
def parse(self): with utils.utf8_open(self.filename, encoding='utf-8') as fr: for line in fr: lo = LogOne.get_log_from_line(line) uk = lo.unique_key() if uk not in self.gbid_dic: if not lo.IS_LOGIN: print('error first not login') continue self.gbid_dic[uk] = lo self.days.setdefault(lo.get_day(), {}) self.days[lo.get_day()][uk] = lo else: if lo.IS_LOGIN: continue self.gbid_dic[uk].add_log_out_time(lo.timestamp)
def out_as_csv(self, csv_name): dirname = utils.get_dir('out') full_csv_name = os.path.join(dirname, csv_name) days_list = list(self.days.keys()) days_list.sort() with utils.utf8_open(full_csv_name, 'w') as fw: days_str = ','.join(map(str, days_list)) fw.write(days_str + '\n') avatar_count = ','.join( map(lambda day: str(len(self.days[day])), days_list)) fw.write(avatar_count + '\n') def sum_times(day_dict): return sum(map(len, day_dict.values())) times = ','.join( map(lambda day: str(sum_times(self.days[day])), days_list)) fw.write(times + '\n')
def bandit_boss(): fname = log_one_utils.filter_resource_flow_by_src(65) days = {} with utils.utf8_open(fname) as fr: for line in fr: lo = LogOne.get_log_from_line(line) if lo.count != 200: continue days.setdefault(lo.day, set()) days[lo.day].add(lo.unique_key()) csv = csv_output.CSVOutPut() csv.set(0, 0, '日期') csv.set(1, 0, '人数') day_list = list(days.keys()) day_list.sort() for idx, day in enumerate(day_list): csv.set(0, idx + 1, day) csv.set(1, idx + 1, len(days[day])) fw_name = utils.get_out_name('out', 'bandit_boss.csv') csv.output(fw_name)
fname = filt.filter_by_act(act_id) f = Filter.Filter(fname, LogOne.RoundFlow) fname = f.filter_inner() parse_file(fname, '{}.{}.csv'.format(act_id, 'inner')) fname = f.filter_out() parse_file(fname, '{}.{}.csv'.format(act_id, 'outter')) if __name__ == '__main__': # whole_log = r'E:\shLog\tlog\xzj.log.LOG_GUILD_BANDIT.log' print(1) fname = utils.filter_from_origin('RoundFlow') print(2) f = Filter.Filter(fname, LogOne.RoundFlow) print(3) # parse_by_act(f, 9) print(4) fname = f.filter_by_act(20) print(5) with utils.utf8_open(fname, encoding='utf-8') as fr: for line in fr: lo = LogOne.RoundFlow.get_log_obj_from_line(line) if int(lo.round_time) > 1800: print(lo.result) # parse_by_act(f, 32000004) # f = Filter.Filter(whole_log, filter_inner_name, filter_out_name) # f.filter_tlog(r'E:\shLog\tlog\xzj.log', 'LOG_VITALITY') # f.filter_guild_bandit()
def create_excludes_file(self): if self.excludes_set: with utils.utf8_open(var_stack.ResolveVarToStr("XCOPY_EXCLUDE_FILE_PATH"), "w") as wfd: utils.make_open_file_read_write_for_all(wfd) wfd.write("\n".join(self.excludes_set))