def main(): args = dirtyopts.parse(docs) game = util.loadpgn(args.pgn) #if args.pgn else chess.pgn.Game() fens = {} # known gamenodes def getjmp(): global lalala r = f"#{chr(lalala)}#" lalala +=1 return r mydb = tree.lichess(args.dbus) if args.dbus == args.dbthem: db = mydb else: db = tree.lichess2(args.dbthem) def myturn(gn): return gn.ply()% 2 != (args.color == 'white') def visit(gn): ''' a visitation does this: 1. ask liches whats up (and print status) 2. decide if we need to add moves 3. update probabilities of all children ''' fen = gn.board().fen() if fen in fens: # seen before, abbort jump = getjmp() gn.comment += f"to {jump}" other= fens[fen] other.comment += f"from {jump}" return [] else: # we see this for the first tims fens[fen]=gn try: moves, movesum, opening = (mydb if myturn(gn) else db).ask(fen) except: db.end() mydb.end() assert False, 'sending requests too fast probably' if not moves: # to few moves, i guess return [] print(gn.ply()) print(gn.board().unicode(invert_color=True)) skip = gn.ply() < args.minply if myturn(gn) and not skip: best_san = util.find_best(moves, gn.ply()) best_san_valid = bunseki.util.sumdi(moves[0]) >= 50 # best move has been played at least 50x mov = gn.board().push_san(best_san) if gn.variations: if not gn.has_variation(mov) and best_san_valid: gn.variations[0].comment+=f" better:{best_san}" elif best_san_valid: gn.add_variation(mov) elif not skip: second_border = tree.sumdi(moves[0])*.2 for move in moves: freq = tree.sumdi(move) if freq > args.cut and freq > second_border: mov = gn.board().push_san(move['san']) if not gn.has_variation(mov): # move is not yet in the list and noteworthy gn.add_variation(mov) return gn.variations # main loop nodelist = [game] while nodelist: node = nodelist.pop() nodelist+=visit(node) db.end() mydb.end() print(game)
import logging logger = logging.getLogger(__name__) from lmz import * import basics as ba import time import random import numpy as np import structout as so from sklearn.metrics.pairwise import euclidean_distances from graken.pareto import pareto_funcs from graken.pareto import editdistance from scipy import sparse import dirtyopts as opts doc = '--drawerror bool False' moduleargs = opts.parse(doc) if moduleargs.drawerror: import matplotlib matplotlib.use('module://matplotlib-sixel') import matplotlib.pyplot as plt def calc_average(l): """ Small function to mitigate possibility of an empty list of average productions. """ if len(l) == 0: return 0 return sum(l) / len(l)
--node_labels int 4 --edge_labels int 2 --labeldistribution str uniform --maxdeg int 3 ''' docIter = ''' --numgr int 510 --iter int 3 --out str nugraphs --graphsize int 8 --bottleneck int 100 ''' if __name__ == "__main__": args = opts.parse(docStartgraphs) startgraphs = rg.make_graphs_static(**args.__dict__) so.gprint(startgraphs[:3]) args = opts.parse(docIter) out = args.__dict__.pop('out') graphs, _ = rrg.rule_rand_graphs(startgraphs, **args.__dict__) so.gprint(graphs[:3]) dumpfile(graphs, out) ############ # CHEM STUFF COMES LATER ############# ''' def get_chem_filenames():
# OPTIMIZER --removedups bool False --n_iter int 10 --pareto str default ['default', 'random', 'greedy', 'paretogreed', 'pareto_only', 'all'] --keepgraphs int 30 --out str res/out.txt ''' if __name__ == "__main__": ################### # 1. LOAD ################### starttime = time.time() args = opts.parse(doc) logger.debug(args.__dict__) graphs = loadfile(args.i) print(f"########## THERE ARE {len(graphs)} graphs in the file ##########") if args.specialset: graphs, origs = graphs[:-30], graphs[-30:] if args.shuffle != -1: random.seed(args.shuffle) random.shuffle(graphs) random.shuffle(origs) assert args.n_train <= len(graphs) domain = graphs[:args.n_train] target = origs[args.taskid] else:
def makequiz(color, inpgn, outdeck): game = util.loadpgn(inpgn) nodes = [game] data = {} # fen:{q:board, a:variation} sort = 0 while nodes: n = nodes.pop() gn, mv = add_to_data(n, color) if gn: b = gn.board().unicode(invert_color=False, empty_square='x') b = decorate_unicode_2(b, color=color) data[hash(gn.board().fen())] = {'q': b, 'a': mv, 'sort': sort} sort += 1 nodes += n.variations #p.pprint(data) ba.jdumpfile(data, outdeck) if __name__ == "__main__": import dirtyopts as opts doc = ''' --color str white --inpgn str myfile.pgn --outdeck str quiz.json ''' args = opts.parse(doc).__dict__ makequiz(**args)
def prep(aidfile): stuff =load_chem(aidfile) random.shuffle(stuff) return stuff def load_chem(AID): with open(AID, 'r') as handle: js = json.load(handle) res = [sg.node_link_graph(jsg) for jsg in js] res = [g for g in res if len(g)> 2] res = [g for g in res if nx.is_connected(g)] # rm not connected crap for g in res:g.graph={} zz = vector.Vectorizer() res2 = list(zz._duplicate_rm(res, {})) print ("duplicates in chem files:%d"% (len(res)-len(res2))) zomg = [(len(g),g) for g in res] zomg.sort(key=lambda x:x[0]) cut = int(len(res)*.1) res2 = [b for l,b in zomg[cut:-cut]] return res2 if __name__=="__main__": args = opts.parse(chemfiles) graphs = prep(args.json) dumpfile(graphs, args.out)