def evaluated_movement(self, pawn_info , moved): pawn_value = 100 if moved: valid_move = [pawn_info.position] else: valid_move = logic_controller.get_valid_move(pawn_info) action_list = [] for pos in valid_move: action_list.append( (AI_ACTION_MOVE , pos) ) attack_target = self.find_attack_candidates(pawn_info) for target in attack_target: action_list.append( (AI_ACTION_ATTACK , target) ) enermy_list = self.evaluate_enermy(pawn_info) enermy_value = {} for e in enermy_list: enermy_value[ e[1].index ] = e[0] # logger(u'Enermy {0} : {1}'.format(e[1].hero.name , e[0])) valued_action_list = [] for action in action_list: offence = 0 danger = 0 if action[0] == AI_ACTION_MOVE: offence , danger = self.evaluate_position(pawn_info , action[1] , enermy_value) else: target_pawn = logic_controller.get_target_pawn(action[1]) damage , ratio = logic_controller.calculate_damage(pawn_info , target_pawn) damage = damage * ratio rest = target_pawn.hero.current_health - damage rest = max(0, rest) if rest == 0: offence += enermy_value[ target_pawn.index ] else: offence += enermy_value[target_pawn.index] * damage * 1.0 /target_pawn.hero.current_health position_evl = self.evaluate_position( pawn_info, pawn_info.position ,enermy_value ) offence += position_evl[0] danger += position_evl[1] valued_action_list.append(( (offence , danger) , action )) if valued_action_list: if pawn_info.ai_strategy == AI_STRATEGY_OFFENCE: valued_action_list.sort(reverse=True) elif pawn_info.ai_strategy == AI_STRATEGY_DEFENCE: valued_action_list.sort(key=lambda x:x[1]) ret = valued_action_list[0] if debug: for tmp in valued_action_list: if tmp[1][0] == AI_ACTION_MOVE: logger( u'[Move to {0}] [ offence : {1} , danger : {2} ]'.format(tmp[1][1] ,tmp[0][0] , tmp[0][1]) ) else: logger(u'[Attack {0}] [ offence : {1} , danger : {2} ]'.format( logic_controller.get_target_pawn(tmp[1][1]).hero.name , tmp[0][0] , tmp[0][1])) return ret return None
def buildContentSummary(categories, categoryData, database): # builds the content summary for a database iters = 2 if len(categories) > 1 else 1 keywords = [TAXONOMY.get(cat) for cat in categories[:iters]] for i in range(iters): keys = reduce(list.__add__, keywords[i:]) urls = getUniqueDocs(keys, categoryData) logger("Building the content summary for " + categories[i] + \ ". Total docs to fetch: " + str(len(urls)), highlight=True) crawler.getContentSummary(database, categories[i], urls, categoryData)
def process_attack_result(self , attacker , defender): damage , succ_ratio = self.calculate_damage(attacker,defender) defender.hero.health_decrease = min(damage , defender.hero.current_health) if debug: logger( u'{0} attacked {1} , with {2} damage, {1}\'s hp reduced from {3} to {4}'.format( attacker.hero.name , defender.hero.name , damage , defender.hero.current_health , defender.hero.current_health - defender.hero.health_decrease ) ) defender.action = pawn.ACTION_PROCESSING
def end_turn(self): if debug: logger(u'队伍({0})行动结束'.format(self.turn_team)); self.turn_team = (self.turn_team + 1) self.turn_team %= self.team_count if self.turn_team == 0: self.round_count += 1 self.new_turn()
def __init__(self, current_map, pawn_list): self.current_map = current_map self.class_mobility = {pawn.MOBILE_WALK: 5, pawn.MOBILE_MOUNT: 8} self.pawn_list = pawn_list self.team_count = current_map.team_count self.turn_team = TURN_TEAM_PLAYER self.round_count = 1 if debug: logger(u'Logic Module initialized'); logger(u'Current map: {0}'.format(current_map.map_name))
def buildQueryUrlMap(database, filename): # returns a list of unique urls for a given database. logger("Collecting data for " + filename) cache = {} queriesMappings = readQueryFile(filename) for keyword, queries in queriesMappings.iteritems(): cache[keyword] = {} for query in queries: results = bing.get_restricted_results(database, query, BING_KEY)[0] cache[keyword][query] = { "count": int(results.get('WebTotal')), "urls": [r["Url"] for r in results.get('Web')] } return cache
def main(): global log cfg_file = "tevs.cfg" out_file = open("summary.csv","w") config.get(cfg_file) log = config.logger(util.root("log.txt")) #pdb.set_trace() #matches = is_this_like_them("hello there",{"hellc there":"bonjour","yabba":"dabba"}) if not const.use_db: print "The configuration file indicates no database is in use." print "We will now build totals from the results files." build_totals_from_results_files() output_totals_from_results_files() return 0 try: dbc = db.PostgresDB(const.dbname, const.dbuser) print "Generating totals from db %s, user %s" % (const.dbname, const.dbuser) print "If there are many vote ops, this may take several minutes." print "The next output will be the number of vote opportunities." qs = query_database(dbc) except db.DatabaseError: print "Although the configuration file indicates a database is in use," print "we could not connect for dbname %s user %s." % (const.dbname, const.dbuser) print "We will now build totals from the results files." build_totals_from_results_files() output_totals_from_results_files() return 0 return 0
def new_turn(self): if debug: logger(u'轮到队伍({0})行动...'.format(self.turn_team)) for p in self.pawn_list: p.action_started = False p.turn_finished = False p.hero.skill_triggered = False p.ai_status = AI_STATUS_IDLE p.attack_count = 0 p.move_count = 0 p.reset_render_index() p.taunted_to = [] if p.turn_team != self.turn_team: p.action_turn = False else: p.action_turn = True fight_logic_controller.trigger_passive_skills_at_turn_start()
def runner(database, Tc, Ts): # the program runner categories, categoryData = classifyDb(database, Tc, Ts) logger(">>>>>> Categorization complete: {0}<<<<<<< ".format("/".join(categories)), highlight=True) buildContentSummary(categories, categoryData, database) logger("Process Complete.") logger("Results generated in " + crawler.RESULTS_PATH)
def getPageContent(url): # returns the content from the webpage associated with the url # Checks the cache first and if not found, downloads the page # from the internet url = url.encode("ascii", "ignore") # create folder if it doesnt exist if not os.path.exists(CACHE_PATH): os.makedirs(CACHE_PATH) logger("Fetching " + url) filename = os.path.join(CACHE_PATH, md5(url.encode("ascii", "ignore")).hexdigest()) content = None if os.path.isfile(filename): with open(filename, 'r') as f: content = f.read() else: p = Popen(["lynx", "--dump", url], stdin=PIPE, stdout=PIPE, stderr=PIPE) content, err = p.communicate() if content: with open(filename,'w') as f: f.write(content) return content
def take_action(self, pawn_info=None): if debug: logger(u'{0}{1} is taking action, is_leader = {2}'.format(pawn_info.position, pawn_info.hero.name , pawn_info.is_leader)) if pawn_info.ai_status == AI_STATUS_IDLE: value, action = self.evaluated_movement(pawn_info , False) if action[0] == AI_ACTION_MOVE: logger(u' {0} at {1} decide to move to {2}.'.format(pawn_info.hero.name , pawn_info.position , action[1])) pawn_info.next_move = [(AI_ACTION_MOVE , action[1])] pawn_info.ai_status = AI_STATUS_MOVED else: logger(u'{0} at {1} decide to attack {2} at {3}'.format( pawn_info.hero.name , pawn_info.position , logic_controller.get_target_pawn(action[1]).hero.name , action[1] )) pawn_info.next_move = [(AI_ACTION_ATTACK , action[1])] else: value, action = self.evaluated_movement(pawn_info , True) if action[0] == AI_ACTION_MOVE: logger(u'{0} at {1} decide to move to {2}.'.format(pawn_info.hero.name , pawn_info.position , action[1])) pawn_info.next_move = [(AI_ACTION_MOVE , action[1])] pawn_info.turn_finished = True else: logger(u'{0} at {1} decide to attack {2} at {3}'.format( pawn_info.hero.name , pawn_info.position , logic_controller.get_target_pawn(action[1]).hero.name , action[1] )) pawn_info.next_move = [(AI_ACTION_ATTACK , action[1])]
def main(counter, duplex, comment, inches, resolution): # read configuration from tevs.cfg and set constants for this run const.debug = False #XXX config.get("tevs.cfg") util.mkdirp(const.root) log = config.logger(util.root("scan.log")) inches_to_mm = 25.4 inc = 1 if duplex: inc = 2 num = next.IncrementingFile(util.root("nexttoscan.txt"), inc) try: scanner = Scanner( duplex, int(inches * inches_to_mm), resolution ) while True: counter = num.value() print "Scanning",counter stamp = datetime.now().isoformat() for i, img in enumerate(scanner.scan(counter)): #get path n = counter + i p = "%03d" % (n/1000,) f = "%06d" % n dir = util.root(const.incoming, p) util.mkdirp(dir) filename = os.path.join(dir, f + ".jpg") img.save(filename) print "Saved",filename log.info("Saved %s at %s\n%s", filename, stamp, comment) num.increment_and_save() except ScanningException: print "Empty feeder?" log.info("Scan aborted due to empty feeder for 20 seconds.") sys.exit(2) except KeyboardInterrupt: log.info("Scan aborted by user") sys.exit(1)
def trigger_passive_skills_at_turn_start(self): """ 触发部分在回合开始前触发的被动技能 1). 迅捷 """ if debug: logger(u'Triggering passive_skills_at_turn_start') for p in self.pawn_list: for s in p.hero.skills: if s.sid == skill.SKILL_ID_MOVE_AFTER_FIGHT: if debug: logger(u"{0} 触发了回合开始前的被动技能 {1}".format(p.hero.name , s.name)) p.hero.use_skill(s) if debug: logger(u'Finished triggering passive_skills_at_turn_start')
def classifyDb(database, Tc=100, Ts=0.6): # classifies a database based on values of # threshold and specificity categories, categoryData = ["Root"], {} for cat in categories: logger("Analyzing " + cat + " category") filename = cat.lower() + ".txt" keywords = TAXONOMY.get(cat) if keywords: queryUrlMap = buildQueryUrlMap(database, filename) categoryData.update(queryUrlMap) keywordCount = {k: sum([q["count"] for q in queryUrlMap[k].itervalues()]) for k in keywords} N = float(sum(keywordCount.values())) for k, v in keywordCount.items(): logger("Coverage for {0} : {1}, Specificity: {2}".format(k, str(v), str(v/N))) if v >= Tc and v/N >= Ts: logger(">>>>>> Adding " + k + " to category <<<<<<") categories.append(k) return (categories, categoryData)
def run(): YouZhi(logger('MakePng')).run()
def main(): miss_counter = 0 # get command line arguments cfg_file = get_args() # read configuration from tevs.cfg and set constants for this run config.get(cfg_file) util.mkdirp(const.root) log = config.logger(const.logfilename) log.info("Log created.") # create initial toplevel directories if they don't exist for p in ( "%s" % ("templates"), "%s" % ("template_images"), "%s" % ("composite_images"), "results", "proc", "errors"): util.mkdirp(util.root(p)) # make sure you have code for ballot type spec'd in config file try: ballotfrom = Ballot.LoadBallotType(const.layout_brand) except KeyError as e: util.fatal("No such ballot type: " + const.layout_brand + ": check " + cfg_file) cache = Ballot.TemplateCache(util.root("templates")) extensions = Ballot.Extensions(template_cache=cache) # connect to db and open cursor if const.use_db: try: dbc = db.PostgresDB(database=const.dbname, user=const.dbuser) except db.DatabaseError: util.fatal("Could not connect to database") else: dbc = db.NullDB() log.info("Database connected.") total_images_processed, total_images_left_unprocessed = 0, 0 base = os.path.basename # Each time given a signal to proceed for count_to_process ballots, # create ballot from images, get landmarks, get layout code, get votes. # Write votes to database and results directory. # for profiling # from guppy import hpy;hp=hpy();hp.setref(); # import gc;gc.disable();gc.collect();hp.setref() count_to_process = 0 while True: next_ballot_number = int( util.readfrom(util.root("nexttoprocess.txt"))) if count_to_process == 0: # wait here until get_count_to_process returns # it will wait on input instruction from stdio processing_command = get_processing_command(next_ballot_number) if processing_command.startswith("+"): next_ballot_number += const.num_pages util.writeto(util.root("nexttoprocess.txt"), next_ballot_number) count_to_process=1 if processing_command.startswith("="): next_ballot_number = int(processing_command[1:]) util.writeto(util.root("nexttoprocess.txt"), next_ballot_number) count_to_process=1 if processing_command.startswith("S"): count_to_process=1 if processing_command.startswith("0"): count_to_process=0 # we're done when we get instructed to process 0 if count_to_process == 0: break count_to_process -= 1 try: # get number of next image, # clean up, in case... gc.collect() log.debug("Request for %d" % (next_ballot_number,)) unprocs = [incomingn(next_ballot_number + m) for m in range(const.num_pages)] log.info(unprocs) # we need all images for sheet to be available to process it for filename in unprocs: if not os.path.exists(filename): errmsg = "File %s not present or available!" % ( base(filename),) log.info(errmsg) # if a file is not yet available, that's not fatal raise FileNotPresentException(errmsg) #Processing #log.info("Processing %s:\n %s" % # (n, "\n".join("\t%s" % base(u) for u in unprocs)) #) log.debug("Creating ballot.") try: ballot = ballotfrom(unprocs, extensions) log.debug("Created ballot, processing." ) results = ballot.ProcessPages() log.debug("Processed.") except BallotException as e: total_images_left_unprocessed += mark_error(e, *unprocs) log.exception("Could not process ballot") continue #Write all data #make dirs: proc1d = dirn("proc", next_ballot_number) resultsd = dirn("results", next_ballot_number) resultsfilename = filen(resultsd, next_ballot_number) for p in (proc1d, resultsd): util.mkdirp(p) #try: # results_to_vop_files(results,resultsfilename) #except Exception as e: # log.info(e) # print e #write csv and mosaic #log.info("local results_to_CSV") #csv = results_to_CSV(results,log) #log.info("Back from results_to_CSV") #util.genwriteto(resultsfilename + ".csv", csv) #write to the database try: log.debug("Inserting to db") dbc.insert(ballot) except db.DatabaseError: #dbc does not commit if there is an error, just need to remove #partial files remove_partial(resultsfilename + ".txt") remove_partial(resultsfilename + const.filename_extension) log.info("Could not commit to db") print "Could not commit to db!" util.fatal("Could not commit vote information to database") #Post-processing # move the images from unproc to proc log.debug("Renaming") procs = [filen(proc1d, next_ballot_number + m) + const.filename_extension for m in range(const.num_pages)] for a, b in zip(unprocs, procs): try: os.rename(a, b) except OSError as e: log.info("Could not rename %s" % a) util.fatal("Could not rename %s", a) total_images_processed += const.num_pages # Tell caller you've processed all images of this ballot log.debug("Requesting next") util.writeto(util.root("nexttoprocess.txt"), next_ballot_number+const.num_pages) # update next ballot file with next image number log.debug("Done writing nexttoprocess.txt") print "%d extracted. " % (next_ballot_number,) log.info("%d images processed", const.num_pages) # for profiling # hp.heap().dump('prof.hpy');hp.setref();gc.collect(); # hp.setref();hp.heap().dump('prof.hpy') except FileNotPresentException,e: print e sys.stdout.flush()
# -*- encoding:utf-8-*- # 采集股原油日报所需要的所有数据 import json from config import logger from WindPy import * from bs4 import BeautifulSoup import requests, datetime, re, pymssql, pymysql logger = logger('GUZHI') class YuanYou(): header = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:67.0) Gecko/20100101 Firefox/67.0', 'Host': 'www.99qh.com' } logger = logger def __init__(self): self.today = datetime.datetime.today() # 品种分析解析列表页面 def pzfx_parse_lsit_html(self, html): soup = BeautifulSoup(html, 'lxml') table = soup.find('td', class_='s_99qh_line6').table if not table: return # <span class="s_99qh_12px"><a href="http://www.99qh.com/s/news20190531165501060.shtml" target="_blank" span_list = table.find_all('span', class_='s_99qh_12px') url_list = [
# -*- coding: utf-8 -*- from config import logger, SelectDatas import json, os import matplotlib as mpl import matplotlib.pyplot as plt logger = logger('MakePng') plt.style.use('bmh') # 设置中文编码和负号的正常显示 plt.rcParams['font.sans-serif'] = [u'SimHei'] plt.rcParams['axes.unicode_minus'] = False class JiaChun(): # 存储路径 def __init__(self): self.save_path = r'../static/images/jiachun/%s' self.dir_name = '../static/images/jiachun/' def is_save_dir(self): # 判断是否存在文件夹 if not os.path.exists(self.dir_name): os.mkdir(r'%s' % (self.dir_name)) def jicha(self, param, png_path_name): date_list = [i[0] for i in param] data_list1 = [i[1] for i in param] data_list2 = [i[2] for i in param] data_list3 = [i[3] for i in param] # 主次坐标 fig, ax = plt.subplots(1, 1)
import sys, os, time, re, shutil, logging import ftplib, signal, pymysql import traceback import config, ftp_db from ftp_down_cfg import * from ftp import * taskId = int(sys.argv[1]) logger = config.logger('ftp_down_' + str(taskId), **config.logger_args) # if len(sys.argv) < 2: # print("启动参数任务ID必需") # exit() def checkLocalPath(): if not (os.path.exists(Tasks[taskId]["delivery"]["target"]["root_dir"])): err_msg = "任务Tasks[%d]的本地文件存放目录不存在" % taskId return False, err_msg if not os.path.exists(Tasks[taskId]["delivery"]["target"]["tmp_dir"]): err_msg = "任务Tasks[%d]的本地临时文件存放目录不存在" % taskId return False, err_msg return True, "" def checkRemotePath(): # ftp连接 ftp, msg = open_ftp(Tasks[taskId]["delivery"]["source"]["host"]) if not ftp: err_msg = "ftp连接失败,失败原因:%s,请检查FtpInfo[%d]配置" % (msg, taskId)
def main(): miss_counter = 0 # get command line arguments cfg_file = get_args() # read configuration from tevs.cfg and set constants for this run config.get(cfg_file) util.mkdirp(const.root) log = config.logger(const.logfilename) #create initial top level dirs, if they do not exist for p in ( "%s" % ("templates"), "%s%d" % ("template_images", os.getpid()), "%s%d" % ("composite_images", os.getpid()), "results", "proc", "errors"): util.mkdirp(util.root(p)) next_ballot = next.File(util.root("nexttoprocess.txt"), const.num_pages) try: ballotfrom = Ballot.LoadBallotType(const.layout_brand) except KeyError as e: util.fatal("No such ballot type: " + const.layout_brand + ": check " + cfg_file) # allow all instances to share a common template location, # though need per-pid locs for template_images and composite_images cache = Ballot.TemplateCache(util.root("templates")) extensions = Ballot.Extensions(template_cache=cache) # connect to db and open cursor if const.use_db: try: dbc = db.PostgresDB(const.dbname, const.dbuser) except db.DatabaseError: util.fatal("Could not connect to database") else: dbc = db.NullDB() total_proc, total_unproc = 0, 0 base = os.path.basename # While ballot images exist in the directory specified in tevs.cfg, # create ballot from images, get landmarks, get layout code, get votes. # Write votes to database and results directory. Repeat. #from guppy import hpy;hp=hpy();hp.setref();import gc;gc.disable();gc.collect();hp.setref() try: for n in next_ballot: gc.collect() unprocs = [incomingn(n + m) for m in range(const.num_pages)] if not os.path.exists(unprocs[0]): miss_counter += 1 log.info(base(unprocs[0]) + " does not exist. No more records to process") if miss_counter > 10: break continue #for i, f in enumerate(unprocs[1:]): # if not os.path.exists(f): # log.info(base(f) + " does not exist. Cannot proceed.") # for j in range(i): # log.info(base(unprocs[j]) + " will NOT be processed") # total_unproc += mark_error(None, *unprocs[:i-1]) #Processing log.info("Processing %s:\n %s" % (n, "\n".join("\t%s" % base(u) for u in unprocs)) ) try: ballot = ballotfrom(unprocs, extensions) results = ballot.ProcessPages() except BallotException as e: total_unproc += mark_error(e, *unprocs) log.exception("Could not process ballot") continue csv = Ballot.results_to_CSV(results) #moz = Ballot.results_to_mosaic(results) #Write all data #make dirs: proc1d = dirn("proc", n) resultsd = dirn("results", n) resultsfilename = filen(resultsd, n) for p in (proc1d, resultsd): util.mkdirp(p) try: results_to_vop_files(results,resultsfilename) except Exception as e: print e #write csv and mosaic util.genwriteto(resultsfilename + ".txt", csv) #write to the database try: dbc.insert(ballot) except db.DatabaseError: #dbc does not commit if there is an error, just need to remove #partial files remove_partial(resultsfilename + ".txt") remove_partial(resultsfilename + const.filename_extension) util.fatal("Could not commit vote information to database") #Post-processing # move the images from unproc to proc procs = [filen(proc1d, n + m) + const.filename_extension for m in range(const.num_pages)] for a, b in zip(unprocs, procs): try: os.rename(a, b) except OSError as e: util.fatal("Could not rename %s", a) total_proc += const.num_pages log.info("%d images processed", const.num_pages) #hp.heap().dump('prof.hpy');hp.setref();gc.collect();hp.setref();hp.heap().dump('prof.hpy') finally: cache.save_all() dbc.close() next_ballot.save() log.info("%d images processed", total_proc) if total_unproc > 0: log.warning("%d images NOT processed.", total_unproc)
# -*- encoding:utf-8-*- # 采集塑化日报所需要的所有数据 from config import logger from bs4 import BeautifulSoup import requests import re from WindPy import * import datetime import pymssql import pymysql import json logger = logger('SHUHUA') class ShuHua(): header = { 'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:67.0) Gecko/20100101 Firefox/67.0', 'Host': 'www.99qh.com' } logger = logger def __init__(self): self.today = datetime.datetime.today() # 品种分析解析列表页面 def pzfx_parse_lsit_html(self, html): soup = BeautifulSoup(html, 'lxml') table = soup.find('td', class_='s_99qh_line6').table if not table: return
# -*- encoding:utf-8-*- # 采集股油脂日报所需要的所有数据 import json from config import logger from WindPy import * from bs4 import BeautifulSoup import datetime, re, pymssql, pymysql logger = logger('YUMI') class YuMi(): logger = logger def __init__(self): self.today = datetime.datetime.today() def close_api(self, param1, param2, num): oneday = datetime.timedelta(days=1) lastday = self.today - num * oneday wData = w.wsd('%s,%s' % (param1, param2), "close", str(lastday)[:10], str(self.today)[:10], "") data1 = wData.Data[0] data2 = wData.Data[1] jg = data1[-1] - data2[-1] zrjg = data1[-2] - data2[-2] if jg == zrjg: zrjg = data1[-3] - data2[-3] zd = jg - zrjg return {'jg': jg, 'zrjg': zrjg, 'zd': zd}
# -*- encoding:utf-8-*- # 采集股原油日报所需要的所有数据 import json from config import logger from WindPy import * from bs4 import BeautifulSoup import datetime, re, pymssql, pymysql logger = logger('YOUZHI') class YouZhi(): logger = logger def __init__(self): self.today = datetime.datetime.today() # 品种分析解析列表页面 # wind 获取收盘价 def close_api(self, param1, param2, num): oneday = datetime.timedelta(days=1) lastday = self.today - num * oneday wData = w.wsd('%s,%s' % (param1, param2), "close", str(lastday)[:10], str(self.today)[:10], "") data1 = wData.Data[0] data2 = wData.Data[1] jg = data1[-1] - data2[-1] zrjg = data1[-2] - data2[-2] if jg == zrjg: zrjg = data1[-3] - data2[-3] zd = jg - zrjg
self.logger.info('MAKEING YOUZHI PNG...') self.is_save_dir() tid_item = { 'a':{'1':'S0264410','5':'S0264411','9':'S0264412'},# 'm':{'1':'S0264422','5':'S0264423','9':'S0264424'}, 'y':{'1':'S0264428','5':'S0264429','9':'S0264430'}, 'p':{'1':'S0264448','5':'S0264449','9':'S0264450'}, 'c':{'1':'S0264478','5':'S0264479','9':'S0264480'}, 'jd':{'1':'S0264472','5':'S0264473','9':'S0264474'}, 'cs':{'1':'S0264484','5':'S0264485','9':'S0264486'}, 'rs':{'1':'S0264436','5':'S0264437','9':'S0264438'}, 'rm':{'1':'S0264442','5':'S0264443','9':'S0264444'}, 'sr':{'1':'S0264454','5':'S0264455','9':'S0264456'}, 'cf':{'1':'S0264460','5':'S0264461','9':'S0264462'}, } pz_list = [] for k,v in tid_item.items(): for k2,v2 in v.items(): pz_list.append(k+'-'+k2) res = list(it.permutations(pz_list,2)) print(res) print(len(res)) for i in res: print(i) # SelectDatas.select_target_datas() # self.xh(xhkc_item, self.save_path % 'xhkc') if __name__ == '__main__': yz = YouZhi(logger('MakePng')) yz.run()