def setUp(self): with open('gamefiles/3fold.pgn') as f1: self.PgnFile1 = pgn.load(f1) with open('gamefiles/bilbao.pgn') as f2: self.PgnFile2 = pgn.load(f2) with open('gamefiles/material.pgn') as f3: self.PgnFile3 = pgn.load(f3)
def setUp(self): self.f1 = protoopen('gamefiles/3fold.pgn') self.PgnFile1 = pgn.load(self.f1) self.f2 = protoopen('gamefiles/bilbao.pgn') self.PgnFile2 = pgn.load(self.f2) self.f3 = protoopen('gamefiles/material.pgn') self.PgnFile3 = pgn.load(self.f3)
def setUp(self): self.f1 = protoopen('gamefiles/3fold.pgn') self.PgnFile1 = pgn.load(self.f1) self.PgnFile1.get_records() self.f2 = protoopen('gamefiles/bilbao.pgn') self.PgnFile2 = pgn.load(self.f2) self.PgnFile2.get_records() self.f3 = protoopen('gamefiles/material.pgn') self.PgnFile3 = pgn.load(self.f3) self.PgnFile3.get_records()
def setUpClass(cls): cls.f1 = protoopen('gamefiles/3fold.pgn') cls.PgnFile1 = pgn.load(cls.f1) cls.PgnFile1.get_records() cls.f2 = protoopen('gamefiles/bilbao.pgn') cls.PgnFile2 = pgn.load(cls.f2) cls.PgnFile2.get_records() cls.f3 = protoopen('gamefiles/material.pgn') cls.PgnFile3 = pgn.load(cls.f3) cls.PgnFile3.get_records()
def pgn_test(self, name): pgnfile = load(protoopen("gamefiles/%s.pgn" % name)) pgnfile.limit = 1000 pgnfile.init_tag_database() games, plys = pgnfile.get_records() for i, game in enumerate(games): print("%s/%s" % (i + 1, pgnfile.get_count())) orig_moves_text = normalize(pgnfile.get_movetext(game)) model = pgnfile.loadToModel(game) print(model.tags["Site"]) new_moves = [] walk(model.boards[0].board, new_moves, model) new_moves_text = normalize(" ".join(new_moves)) for orig, new in zip(orig_moves_text.split(), new_moves_text.split()): # Seems most .PGN unnecessary contains unambiguous notation # when second move candidate is invalid (leaves king in check) # f.e.: 1.e4 e5 2.d4 Nf6 3.Nc3 Bb4 Nge2 if len(orig) == len(new) + 1 and orig[0] == new[0] and orig[2:] == new[1:]: continue elif orig[-1] in "?!" and new[-1] not in "?!": # pgn export format uses nag break elif (orig == "0-0" and new == "O-O") or (orig == "0-0-0" and new == "O-O-O"): continue self.assertEqual(orig, new) pgnfile.close()
def opening(): if filename.endswith(".pgn"): chessfile = pgn.load(protoopen(filename), self.progressbar1) elif filename.endswith(".epd"): chessfile = epd.load(protoopen(filename)) elif filename.endswith(".fen"): chessfile = fen.load(protoopen(filename)) else: chessfile = None GLib.idle_add(self.spinner.stop) GLib.idle_add(self.progress_dialog.hide) if chessfile is not None: GLib.idle_add(self.emit, "chessfile_opened", chessfile)
def opening(): if filename.endswith(".pgn"): chessfile = pgn.load(protoopen(filename), self.progressbar1) elif filename.endswith(".epd"): chessfile = epd.load(protoopen(filename)) elif filename.endswith(".fen"): chessfile = fen.load(protoopen(filename)) else: chessfile = None GLib.idle_add(self.spinner.stop) GLib.idle_add(self.progress_dialog.hide) if chessfile is not None: self.chessfile = chessfile self.chessfiles.append(chessfile) GLib.idle_add(self.emit, "chessfile_opened0", chessfile)
def open_chessfile(self, filename): if filename.endswith(".pdb"): chessfile = database.load(filename) elif filename.endswith(".pgn"): chessfile = pgn.load(protoopen(filename)) elif filename.endswith(".epd"): chessfile = epd.load(protoopen(filename)) elif filename.endswith(".fen"): chessfile = fen.load(protoopen(filename)) else: return if self.gamelist is None: self.init_layout() perspective_manager.activate_perspective("database") self.emit("chessfile_opened", chessfile)
def feed(pgnfile, lang): cf = load(protoopen(pgnfile)) cf.limit = 5000 importer = PgnImport(cf) cf.init_tag_database(importer) records, plys = cf.get_records() rows = [] old_eco = "" for rec in records: model = cf.loadToModel(rec) eco = rec["ECO"] opening = rec["White"] if opening is None: opening = "" variation = rec["Black"] if variation is None: variation = "" base = int(old_eco != eco) ply = len(model.moves) if ply == 0: cu = conn.cursor() cu.execute( "select * from openings where eco=? and lang='en' and base=1", (eco, )) res = cu.fetchone() if res is not None: hash = res[0] else: hash = memoryview(hash_struct.pack( model.boards[-1].board.hash)) if opening: rows.append((hash, base, eco, lang, opening, variation)) old_eco = eco c.executemany( "insert into openings(hash, base, eco, lang, opening, variation) values (?, ?, ?, ?, ?, ?)", rows) conn.commit()
def feed(pgnfile, lang): cf = load(protoopen(pgnfile)) rows = [] old_eco = "" ply_max = 0 for i, game in enumerate(cf.games): model = cf.loadToModel(i) eco = cf._getTag(i, "ECO")[:3] opening = cf._getTag(i, "Opening") if opening is None: opening = "" variation = cf._getTag(i, "Variation") if variation is None: variation = "" base = int(old_eco != eco) ply = len(model.moves) ply_max = max(ply_max, ply) if ply == 0: cu = conn.cursor() cu.execute( "select * from openings where eco=? and lang='en' and base=1", (eco, )) res = cu.fetchone() if res is not None: hash = res[0] else: hash = memoryview(hash_struct.pack( model.boards[-1].board.hash)) if opening: rows.append((hash, base, unicode(eco), unicode(lang), unicode(opening), unicode(variation))) old_eco = eco c.executemany( "insert into openings(hash, base, eco, lang, opening, variation) values (?, ?, ?, ?, ?, ?)", rows) conn.commit() print("Max ply was %s" % ply_max)
def queryGameno(path): pgnfile = pgn.load(protoopen(path)) print "Selected file %s" % path if len(pgnfile) == 0: print "The file is empty." sys.exit() print print "The file contains the following games:" for i in xrange(len(pgnfile)): name1, name2 = pgnfile.get_player_names(i) print "[%d] %s vs. %s" % (i, name1, name2) print if len(pgnfile) == 1: print "Autoselecting game 0." gameno = 0 else: gameno = int(raw_input("What engine should be your analyzer? [n] ")) print return pgnfile, gameno
def queryGameno(path): pgnfile = pgn.load(protoopen(path)) print("Selected file %s" % path) if len(pgnfile) == 0: print("The file is empty.") sys.exit() print() print("The file contains the following games:") for i in range(len(pgnfile)): name1, name2 = pgnfile.get_player_names(i) print("[%d] %s vs. %s" % (i, name1, name2)) print() if len(pgnfile) == 1: print("Autoselecting game 0.") gameno = 0 else: gameno = int(raw_input("Select game number to be analyzed. [n]: ")) print() return pgnfile, gameno
def feed(pgnfile, lang): cf = load(protoopen(pgnfile)) rows = [] old_eco = "" ply_max = 0 for i, game in enumerate(cf.games): model = cf.loadToModel(i) eco = cf._getTag(i, "ECO")[:3] opening = cf._getTag(i, "Opening") if opening is None: opening = "" variation = cf._getTag(i, "Variation") if variation is None: variation = "" base = int(old_eco != eco) ply = len(model.moves) ply_max = max(ply_max, ply) if ply == 0: cu = conn.cursor() cu.execute("select * from openings where eco=? and lang='en' and base=1", (eco,)) res = cu.fetchone() if res is not None: hash = res[0] else: hash = memoryview(hash_struct.pack(model.boards[-1].board.hash)) if opening: rows.append((hash, base, unicode(eco), unicode(lang), unicode(opening), unicode(variation))) old_eco = eco c.executemany("insert into openings(hash, base, eco, lang, opening, variation) values (?, ?, ?, ?, ?, ?)", rows) conn.commit() print("Max ply was %s" % ply_max)
def feed(pgnfile, lang): cf = load(protoopen(pgnfile)) cf.limit = 5000 cf.init_tag_database() records, plys = cf.get_records() rows = [] old_eco = "" for rec in records: model = cf.loadToModel(rec) eco = rec["ECO"] opening = rec["White"] if opening is None: opening = "" variation = rec["Black"] if variation is None: variation = "" base = int(old_eco != eco) ply = len(model.moves) if ply == 0: cu = conn.cursor() cu.execute("select * from openings where eco=? and lang='en' and base=1", (eco,)) res = cu.fetchone() if res is not None: hash = res[0] else: hash = memoryview(hash_struct.pack(model.boards[-1].board.hash)) if opening: rows.append((hash, base, eco, lang, opening, variation)) old_eco = eco c.executemany("insert into openings(hash, base, eco, lang, opening, variation) values (?, ?, ?, ?, ?, ?)", rows) conn.commit()
def normalize(text): text = text.splitlines() text = " ".join(text) text = text.replace('. ', '. ').replace('. ', '. ') text = text.replace(' )', ')').replace(' )', ')') text = text.replace('( ', '(').replace('( ', '(') text = text.replace(' }', '}').replace(' }', '}') text = text.replace('{ ', '{').replace('{ ', '{') return text for j, name in enumerate(file_names): print("Creating test methods for %s" % name) pgnfile = load(file_handles[j]) pgnfile.limit = 1000 pgnfile.init_tag_database() games, plys = pgnfile.get_records() for i, game in enumerate(games): print("%s/%s" % (i + 1, pgnfile.get_count())) orig = normalize(pgnfile.get_movetext(game)) model = pgnfile.loadToModel(game) new = [] walk(model.boards[0].board, new, model) new = normalize(" ".join(new)) # create test method test_method = create_test(orig, new)
def setUp(self): self.PgnFile = pgn.load(GAMES.splitlines(KEEPENDS))
def normalize(text): text = text.splitlines() text = " ".join(text) text = text.replace('. ', '. ').replace('. ', '. ') text = text.replace(' )', ')').replace(' )', ')') text = text.replace('( ', '(').replace('( ', '(') text = text.replace(' }', '}').replace(' }', '}') text = text.replace('{ ', '{').replace('{ ', '{') return text filenames = ("atomic", "chess960rwch", "world_matches", "zh") for filename in filenames: print("Creating test methods for %s" % filename) pgnfile = load(protoopen('gamefiles/%s.pgn' % filename)) pgnfile.get_records() for i, game in enumerate(pgnfile.games): print("%s/%s" % (i + 1, len(pgnfile.games))) if i > 100: break orig = normalize(pgnfile.get_movetext(game)) model = pgnfile.loadToModel(game) new = [] walk(model.boards[0].board, new, model) new = normalize(" ".join(new)) # create test method test_method = create_test(orig, new)
def normalize(text): text = text.splitlines() text = " ".join(text) text = text.replace('. ', '. ').replace('. ', '. ') text = text.replace(' )', ')').replace(' )', ')') text = text.replace('( ', '(').replace('( ', '(') text = text.replace(' }', '}').replace(' }', '}') text = text.replace('{ ', '{').replace('{ ', '{') return text filenames = ("atomic", "chess960rwch", "world_matches", "zh2200plus") for filename in filenames: print("Creating test methods for %s" % filename) pgnfile = load(open('gamefiles/%s.pgn' % filename)) for i, game in enumerate(pgnfile.games): print("%s/%s" % (i + 1, len(pgnfile.games))) if i > 100: break orig = normalize(game[1]) model = pgnfile.loadToModel(i) new = [] walk(model.boards[0].board, new, model) new = normalize(" ".join(new)) # create test method test_method = create_test(orig, new)
def feed(pgnfile, lang): # Check the existence of the file if not os.path.isfile(pgnfile): return # Load the ECO file first print(' - Parsing') cf = load(protoopen(pgnfile)) cf.limit = 5000 cf.init_tag_database() records, plys = cf.get_records() # Cache the content entries = [] plyMax = 0 old_eco = "" for rec in records: model = cf.loadToModel(rec) eco = '' if rec['ECO'] is None else rec['ECO'] entry = { 'h': [], # Hashes 'f': '', # Final hash of the line 'n': [], # FENs 'm': old_eco != eco, # Main line = shortest sequence of moves for the ECO code. The 'EN' ECO file is specially crafted 'e': eco, # ECO 'o': '' if rec['White'] is None else rec['White'], # Opening 'v': '' if rec['Black'] is None else rec['Black'], # Variation 'p': len(model.moves) } # Number of plies plyMax = max(plyMax, entry['p']) # No move means that we are translating the name of the ECO code, so we need to find all the related positions from another language if entry['p'] == 0: if lang == ECO_MAIN_LANG: continue c.execute( "select hash, endline, fen from openings where eco=? and lang=? and mainline=1", (eco, ECO_MAIN_LANG)) rows = c.fetchall() for row in rows: entry['h'].append(row[0]) if (row[1] == int(True)): entry['f'] = row[0] entry['n'].append(row[2]) else: # Find the Polyglot hash for each position of the opening for i in range(entry['p']): nextboard = model.getBoardAtPly(i, 0).board.next h = hex(nextboard.hash)[2:] entry['h'].append(h) entry['f'] = h entry['n'].append(nextboard.asFen()) entries.append(entry) old_eco = entry['e'] print(' - Max ply : %d' % plyMax) # Process all the data in reverse order for depth in reversed(range(plyMax + 1)): sys.stdout.write( "\r - Loading into the database (%d remaining) " % depth) sys.stdout.flush() for i in reversed(range(len( entries))): # Long lines are overwritten by short lines entry = entries[i] if entry['p'] != depth: continue for i in range(len(entry['h'])): h = entry['h'][i] hkey = int(h[-2:], 16) c.execute( "select endline from openings where hash=? and hkey=? and lang=?", (h, hkey, lang)) r = c.fetchone() if r is not None and r[0] == int(True): continue c.execute( "delete from openings where hash=? and hkey=? and lang=?", (h, hkey, lang)) c.execute( "insert into openings (hash, hkey, mainline, endline, eco, lang, opening, variation, fen) values (?, ?, ?, ?, ?, ?, ?, ?, ?)", (h, hkey, int( entry['m']), int(h == entry['f']), entry['e'], lang, entry['o'], entry['v'], entry['n'][i])) conn.commit() print('\n - Processed %d openings' % len(entries))