def start(self, host, port, connected_event, timeseal=True): if self.canceled: raise CanceledException() self.port = port self.host = host self.connected_event = connected_event self.timeseal = timeseal self.name = host if host == "chessclub.com": self.ICC = True self.timeseal = False # You can get ICC timestamp from # https://www.chessclub.com/user/resources/icc/timestamp/ if sys.platform == "win32": timestamp = "timestamp_win32.exe" else: timestamp = "timestamp_linux_2.6.8" altpath = getEngineDataPrefix() path = shutil.which(timestamp, os.X_OK, path=altpath) if path is None: binary = "https://www.chessclub.com/user/resources/icc/timestamp/%s" % timestamp filename = download_file(binary) if filename is not None: dest = shutil.move(filename, os.path.join(altpath, timestamp)) os.chmod(dest, stat.S_IEXEC | stat.S_IREAD | stat.S_IWRITE) if path: self.host = "127.0.0.1" self.port = 5500 try: self.timestamp_proc = subprocess.Popen(["%s" % path, "-p", "%s" % self.port]) log.info("%s started OK" % path) except OSError as err: log.info("Can't start %s OSError: %s %s" % (path, err.errno, err.strerror)) self.port = port self.host = host else: log.info("%s not found, downloading..." % path) def cb(reader, writer): reader.stream_writer = writer reader.connected_event.set() loop = asyncio.get_event_loop() self.reader = ICSStreamReader(_DEFAULT_LIMIT, loop, self.connected_event, self.name) self.protocol = ICSStreamReaderProtocol(self.reader, cb, loop, self.name, self.timeseal) coro = loop.create_connection(lambda: self.protocol, self.host, self.port) self.transport, _protocol = yield from coro # writer = asyncio.StreamWriter(transport, protocol, reader, loop) if self.timeseal: self.write(self.get_init_string())
def do_import(self, filename, info=None, progressbar=None): self.progressbar = progressbar orig_filename = filename count_source = self.conn.execute( self.count_source.where(source.c.name == orig_filename)).scalar() if count_source > 0: log.info("%s is already imported" % filename) return # collect new names not in they dict yet self.event_data = [] self.site_data = [] self.player_data = [] self.annotator_data = [] self.source_data = [] # collect new games and commit them in big chunks for speed self.game_data = [] self.tag_game_data = [] if filename.startswith("http"): filename = download_file(filename, progressbar=progressbar) if filename is None: return else: if not os.path.isfile(filename): log.info("Can't open %s" % filename) return if filename.lower().endswith(".zip") and zipfile.is_zipfile(filename): with zipfile.ZipFile(filename, "r") as zf: path = os.path.dirname(filename) files = [ os.path.join(path, f) for f in zf.namelist() if f.lower().endswith(".pgn") ] zf.extractall(path) else: files = [filename] for pgnfile in files: base_offset = self.chessfile.size if self.append_pgn else 0 basename = os.path.basename(pgnfile) if progressbar is not None: GLib.idle_add(progressbar.set_text, _("Reading %s ..." % basename)) else: log.info("Reading %s ..." % pgnfile) size = os.path.getsize(pgnfile) handle = protoopen(pgnfile) # estimated game count all_games = max(size / 840, 1) get_id = self.get_id # use transaction to avoid autocommit slowness # and to let undo importing (rollback) if self.cancel was set trans = self.conn.begin() try: i = 0 for tags in read_games(handle): if not tags: log.info("Empty game #%s" % (i + 1)) continue if self.cancel: trans.rollback() return fenstr = tags["FEN"] variant = tags["Variant"] if variant: if "fischer" in variant.lower() or "960" in variant: variant = "Fischerandom" else: variant = variant.lower().capitalize() # Fixes for some non statndard Chess960 .pgn if fenstr and variant == "Fischerandom": parts = fenstr.split() parts[0] = parts[0].replace(".", "/").replace("0", "") if len(parts) == 1: parts.append("w") parts.append("-") parts.append("-") fenstr = " ".join(parts) if variant: if variant not in name2variant: log.info("Unknown variant: %s" % variant) continue variant = name2variant[variant].variant if variant == NORMALCHESS: # lichess uses tag [Variant "Standard"] variant = 0 else: variant = 0 if basename == "eco.pgn": white = tags["Opening"] black = tags["Variation"] else: white = tags["White"] black = tags["Black"] event_id = get_id(tags["Event"], event, EVENT) site_id = get_id(tags["Site"], site, SITE) date = tags["Date"] game_round = tags['Round'] white_id = get_id(white, player, PLAYER) black_id = get_id(black, player, PLAYER) result = tags["Result"] if result in pgn2Const: result = pgn2Const[result] else: result = RUNNING white_elo = tags['WhiteElo'] black_elo = tags['BlackElo'] time_control = tags["TimeControl"] eco = tags["ECO"][:3] fen = tags["FEN"] board_tag = int(tags["Board"]) if "Board" in tags else 0 annotator_id = get_id(tags["Annotator"], annotator, ANNOTATOR) source_id = get_id(orig_filename, source, SOURCE, info=info) ply_count = tags["PlyCount"] if "PlyCount" in tags else 0 offset = base_offset + int(tags["offset"]) self.game_data.append({ 'offset': offset, 'offset8': (offset >> 3) << 3, 'event_id': event_id, 'site_id': site_id, 'date': date, 'round': game_round, 'white_id': white_id, 'black_id': black_id, 'result': result, 'white_elo': white_elo, 'black_elo': black_elo, 'ply_count': ply_count, 'eco': eco, 'fen': fen, 'variant': variant, 'board': board_tag, 'time_control': time_control, 'annotator_id': annotator_id, 'source_id': source_id, }) for tag in tags: if tag not in dedicated_tags and tag not in other_game_tags and tags[ tag]: self.tag_game_data.append({ 'game_id': self.next_id[GAME], 'tag_name': tag, 'tag_value': tags[tag], }) self.next_id[GAME] += 1 i += 1 if len(self.game_data) >= self.CHUNK: if self.event_data: self.conn.execute(self.ins_event, self.event_data) self.event_data = [] if self.site_data: self.conn.execute(self.ins_site, self.site_data) self.site_data = [] if self.player_data: self.conn.execute(self.ins_player, self.player_data) self.player_data = [] if self.annotator_data: self.conn.execute(self.ins_annotator, self.annotator_data) self.annotator_data = [] if self.source_data: self.conn.execute(self.ins_source, self.source_data) self.source_data = [] if self.tag_game_data: self.conn.execute(self.ins_tag_game, self.tag_game_data) self.tag_game_data = [] self.conn.execute(self.ins_game, self.game_data) self.game_data = [] if progressbar is not None: GLib.idle_add(progressbar.set_fraction, i / float(all_games)) GLib.idle_add( progressbar.set_text, _("%(counter)s game headers from %(filename)s imported" % ({ "counter": i, "filename": basename }))) else: log.info("From %s imported %s" % (pgnfile, i)) if self.event_data: self.conn.execute(self.ins_event, self.event_data) self.event_data = [] if self.site_data: self.conn.execute(self.ins_site, self.site_data) self.site_data = [] if self.player_data: self.conn.execute(self.ins_player, self.player_data) self.player_data = [] if self.annotator_data: self.conn.execute(self.ins_annotator, self.annotator_data) self.annotator_data = [] if self.source_data: self.conn.execute(self.ins_source, self.source_data) self.source_data = [] if self.tag_game_data: self.conn.execute(self.ins_tag_game, self.tag_game_data) self.tag_game_data = [] if self.game_data: self.conn.execute(self.ins_game, self.game_data) self.game_data = [] if progressbar is not None: GLib.idle_add(progressbar.set_fraction, i / float(all_games)) GLib.idle_add( progressbar.set_text, _("%(counter)s game headers from %(filename)s imported" % ({ "counter": i, "filename": basename }))) else: log.info("From %s imported %s" % (pgnfile, i)) trans.commit() if self.append_pgn: # reopen database to write self.db_handle.close() with protosave(self.chessfile.path, self.append_pgn) as self.db_handle: log.info("Append from %s to %s" % (pgnfile, self.chessfile.path)) handle.seek(0) self.db_handle.writelines(handle) handle.close() if self.chessfile.scoutfish is not None: # create new .scout from pgnfile we are importing from pychess.Savers.pgn import scoutfish_path args = [ scoutfish_path, "make", pgnfile, "%s" % base_offset ] output = subprocess.check_output( args, stderr=subprocess.STDOUT).decode() # append it to our existing one if output.find("Processing...done") > 0: old_scout = self.chessfile.scoutfish.db new_scout = os.path.splitext(pgnfile)[0] + '.scout' with open(old_scout, "ab") as file1, open(new_scout, "rb") as file2: file1.write(file2.read()) self.chessfile.handle = protoopen(self.chessfile.path) except SQLAlchemyError as e: trans.rollback() log.info("Importing %s failed! \n%s" % (pgnfile, e))
def do_import(self, filename, info=None, progressbar=None): self.progressbar = progressbar orig_filename = filename count_source = self.conn.execute(self.count_source.where(source.c.name == orig_filename)).scalar() if count_source > 0: print("%s is already imported" % filename) return # collect new names not in they dict yet self.event_data = [] self.site_data = [] self.player_data = [] self.annotator_data = [] self.source_data = [] # collect new games and commit them in big chunks for speed self.game_data = [] self.tag_game_data = [] if filename.startswith("http"): filename = download_file(filename, progressbar=progressbar) if filename is None: return else: if not os.path.isfile(filename): print("Can't open %s" % filename) return if filename.lower().endswith(".zip") and zipfile.is_zipfile(filename): with zipfile.ZipFile(filename, "r") as zf: path = os.path.dirname(filename) files = [os.path.join(path, f) for f in zf.namelist() if f.lower().endswith(".pgn")] zf.extractall(path) else: files = [filename] for pgnfile in files: base_offset = self.chessfile.size if self.append_pgn else 0 basename = os.path.basename(pgnfile) if progressbar is not None: GLib.idle_add(progressbar.set_text, "Reading %s ..." % basename) else: print("Reading %s ..." % pgnfile) size = os.path.getsize(pgnfile) handle = protoopen(pgnfile) # estimated game count all_games = max(size / 840, 1) handle_json = None if pgnextractor is not None: try: headers_json = os.path.splitext(pgnfile)[0] + ".headers.json" if not os.path.isfile(headers_json): output = subprocess.check_output([pgnextractor, "headers", pgnfile]).decode() for line in output: if line.startswith("Games"): all_games = line.split()[1] handle_json = protoopen(headers_json) except subprocess.CalledProcessError: print("pgnextractor failed") get_id = self.get_id # use transaction to avoid autocommit slowness trans = self.conn.begin() try: i = 0 for tags in read_games(handle, handle_json): if not tags: print("Empty game #%s" % (i + 1)) continue if self.cancel: trans.rollback() return fenstr = tags["FEN"] if "FEN" in tags else "" variant = tags["Variant"] if "Variant" in tags else "" if variant: if "fischer" in variant.lower() or "960" in variant: variant = "Fischerandom" else: variant = variant.lower().capitalize() # Fixes for some non statndard Chess960 .pgn if fenstr and variant == "Fischerandom": parts = fenstr.split() parts[0] = parts[0].replace(".", "/").replace("0", "") if len(parts) == 1: parts.append("w") parts.append("-") parts.append("-") fenstr = " ".join(parts) if variant: if variant not in name2variant: print("Unknown variant: %s" % variant) continue variant = name2variant[variant].variant if variant == NORMALCHESS: # lichess uses tag [Variant "Standard"] variant = 0 else: variant = 0 white = tags["White"] if "White" in tags else "" black = tags["Black"] if "Black" in tags else "" event_id = get_id(tags["Event"] if "Event" in tags else "", event, EVENT) site_id = get_id(tags["Site"] if "Site" in tags else "", site, SITE) game_date = tags["Date"] if "Date" in tags else "" try: if game_date and '?' not in game_date: ymd = game_date.split('.') if len(ymd) == 3: game_year, game_month, game_day = map(int, ymd) else: game_year, game_month, game_day = int(game_date[:4]), None, None elif game_date and '?' not in game_date[:4]: game_year, game_month, game_day = int(game_date[:4]), None, None else: game_year, game_month, game_day = None, None, None except: game_year, game_month, game_day = None, None, None game_round = tags['Round'] if "Round" in tags else "" white_id = get_id(white, player, PLAYER) black_id = get_id(black, player, PLAYER) result = tags["Result"] if "Result" in tags else "" if result in pgn2Const: result = pgn2Const[result] else: if basename != "eco.pgn": print("Invalid Result tag in game #%s: %s" % (i + 1, result)) continue white_elo = tags['WhiteElo'] if "WhiteElo" in tags else "0" white_elo = int(white_elo) if white_elo and white_elo.isdigit() else 0 black_elo = tags['BlackElo'] if "BlackElo" in tags else "" black_elo = int(black_elo) if black_elo and black_elo.isdigit() else 0 time_control = tags["TimeControl"] if "TimeControl" in tags else "" eco = tags["ECO"][:3] if "ECO" in tags else "" fen = tags["FEN"] if "FEN" in tags else "" board_tag = int(tags["Board"]) if "Board" in tags else 0 annotator_id = get_id(tags["Annotator"] if "Annotator" in tags else "", annotator, ANNOTATOR) source_id = get_id(orig_filename, source, SOURCE, info=info) self.next_id[GAME] += 1 ply_count = tags["PlyCount"] if "PlyCount" in tags else 0 offset = base_offset + int(tags["offset"]) self.game_data.append({ 'offset': offset, 'offset8': (offset >> 3) << 3, 'event_id': event_id, 'site_id': site_id, 'date_year': game_year, 'date_month': game_month, 'date_day': game_day, 'round': game_round, 'white_id': white_id, 'black_id': black_id, 'result': result, 'white_elo': white_elo, 'black_elo': black_elo, 'ply_count': ply_count, 'eco': eco, 'fen': fen, 'variant': variant, 'board': board_tag, 'time_control': time_control, 'annotator_id': annotator_id, 'source_id': source_id, }) i += 1 if len(self.game_data) >= self.CHUNK: if self.event_data: self.conn.execute(self.ins_event, self.event_data) self.event_data = [] if self.site_data: self.conn.execute(self.ins_site, self.site_data) self.site_data = [] if self.player_data: self.conn.execute(self.ins_player, self.player_data) self.player_data = [] if self.annotator_data: self.conn.execute(self.ins_annotator, self.annotator_data) self.annotator_data = [] if self.source_data: self.conn.execute(self.ins_source, self.source_data) self.source_data = [] self.conn.execute(self.ins_game, self.game_data) self.game_data = [] if progressbar is not None: GLib.idle_add(progressbar.set_fraction, i / float(all_games)) GLib.idle_add(progressbar.set_text, "%s game headers from %s imported" % (i, basename)) else: print(pgnfile, i) if self.event_data: self.conn.execute(self.ins_event, self.event_data) self.event_data = [] if self.site_data: self.conn.execute(self.ins_site, self.site_data) self.site_data = [] if self.player_data: self.conn.execute(self.ins_player, self.player_data) self.player_data = [] if self.annotator_data: self.conn.execute(self.ins_annotator, self.annotator_data) self.annotator_data = [] if self.source_data: self.conn.execute(self.ins_source, self.source_data) self.source_data = [] if self.game_data: self.conn.execute(self.ins_game, self.game_data) self.game_data = [] if progressbar is not None: GLib.idle_add(progressbar.set_fraction, i / float(all_games)) GLib.idle_add(progressbar.set_text, "%s game headers from %s imported" % (i, basename)) else: print(pgnfile, i) trans.commit() if self.append_pgn: # reopen database to write self.db_handle.close() self.db_handle = protosave(self.chessfile.path, self.append_pgn) print("Append from %s to %s" % (pgnfile, self.chessfile.path)) handle.seek(0) self.db_handle.writelines(handle) self.db_handle.close() handle.close() if self.chessfile.scoutfish is not None: # create new .scout from pgnfile we are importing from pychess.Savers.pgn import scoutfish_path args = [scoutfish_path, "make", pgnfile, "%s" % base_offset] output = subprocess.check_output(args, stderr=subprocess.STDOUT).decode() # append it to our existing one if output.find("Processing...done") > 0: old_scout = self.chessfile.scoutfish.db new_scout = os.path.splitext(pgnfile)[0] + '.scout' with open(old_scout, "ab") as file1, open(new_scout, "rb") as file2: file1.write(file2.read()) self.chessfile.handle = protoopen(self.chessfile.path) except SQLAlchemyError as e: trans.rollback() print("Importing %s failed! \n%s" % (pgnfile, e))
def do_import(self, filename, info=None, progressbar=None): self.progressbar = progressbar orig_filename = filename count_source = self.conn.execute( self.count_source.where(source.c.name == orig_filename)).scalar() if count_source > 0: print("%s is already imported" % filename) return # collect new names not in they dict yet self.event_data = [] self.site_data = [] self.player_data = [] self.annotator_data = [] self.source_data = [] # collect new games and commit them in big chunks for speed self.game_data = [] self.tag_game_data = [] if filename.startswith("http"): filename = download_file(filename, progressbar=progressbar) if filename is None: return else: if not os.path.isfile(filename): print("Can't open %s" % filename) return if filename.lower().endswith(".zip") and zipfile.is_zipfile(filename): with zipfile.ZipFile(filename, "r") as zf: path = os.path.dirname(filename) files = [ os.path.join(path, f) for f in zf.namelist() if f.lower().endswith(".pgn") ] zf.extractall(path) else: files = [filename] for pgnfile in files: base_offset = self.chessfile.size if self.append_pgn else 0 basename = os.path.basename(pgnfile) if progressbar is not None: GLib.idle_add(progressbar.set_text, "Reading %s ..." % basename) else: print("Reading %s ..." % pgnfile) size = os.path.getsize(pgnfile) handle = protoopen(pgnfile) # estimated game count all_games = max(size / 840, 1) handle_json = None if pgnextractor is not None: try: headers_json = os.path.splitext( pgnfile)[0] + ".headers.json" if not os.path.isfile(headers_json): output = subprocess.check_output( [pgnextractor, "headers", pgnfile]).decode() for line in output: if line.startswith("Games"): all_games = line.split()[1] handle_json = protoopen(headers_json) except subprocess.CalledProcessError: print("pgnextractor failed") get_id = self.get_id # use transaction to avoid autocommit slowness trans = self.conn.begin() try: i = 0 for tags in read_games(handle, handle_json): if not tags: print("Empty game #%s" % (i + 1)) continue if self.cancel: trans.rollback() return fenstr = tags["FEN"] if "FEN" in tags else "" variant = tags["Variant"] if "Variant" in tags else "" if variant: if "fischer" in variant.lower() or "960" in variant: variant = "Fischerandom" else: variant = variant.lower().capitalize() # Fixes for some non statndard Chess960 .pgn if fenstr and variant == "Fischerandom": parts = fenstr.split() parts[0] = parts[0].replace(".", "/").replace("0", "") if len(parts) == 1: parts.append("w") parts.append("-") parts.append("-") fenstr = " ".join(parts) if variant: if variant not in name2variant: print("Unknown variant: %s" % variant) continue variant = name2variant[variant].variant if variant == NORMALCHESS: # lichess uses tag [Variant "Standard"] variant = 0 else: variant = 0 white = tags["White"] if "White" in tags else "" black = tags["Black"] if "Black" in tags else "" event_id = get_id(tags["Event"] if "Event" in tags else "", event, EVENT) site_id = get_id(tags["Site"] if "Site" in tags else "", site, SITE) game_date = tags["Date"] if "Date" in tags else "" try: if game_date and '?' not in game_date: ymd = game_date.split('.') if len(ymd) == 3: game_year, game_month, game_day = map(int, ymd) else: game_year, game_month, game_day = int( game_date[:4]), None, None elif game_date and '?' not in game_date[:4]: game_year, game_month, game_day = int( game_date[:4]), None, None else: game_year, game_month, game_day = None, None, None except: game_year, game_month, game_day = None, None, None game_round = tags['Round'] if "Round" in tags else "" white_id = get_id(white, player, PLAYER) black_id = get_id(black, player, PLAYER) result = tags["Result"] if "Result" in tags else "" if result in pgn2Const: result = pgn2Const[result] else: if basename != "eco.pgn": print("Invalid Result tag in game #%s: %s" % (i + 1, result)) continue white_elo = tags['WhiteElo'] if "WhiteElo" in tags else "0" white_elo = int( white_elo) if white_elo and white_elo.isdigit() else 0 black_elo = tags['BlackElo'] if "BlackElo" in tags else "" black_elo = int( black_elo) if black_elo and black_elo.isdigit() else 0 time_control = tags[ "TimeControl"] if "TimeControl" in tags else "" eco = tags["ECO"][:3] if "ECO" in tags else "" fen = tags["FEN"] if "FEN" in tags else "" board_tag = int(tags["Board"]) if "Board" in tags else 0 annotator_id = get_id( tags["Annotator"] if "Annotator" in tags else "", annotator, ANNOTATOR) source_id = get_id(orig_filename, source, SOURCE, info=info) self.next_id[GAME] += 1 ply_count = tags["PlyCount"] if "PlyCount" in tags else 0 offset = base_offset + int(tags["offset"]) self.game_data.append({ 'offset': offset, 'offset8': (offset >> 3) << 3, 'event_id': event_id, 'site_id': site_id, 'date_year': game_year, 'date_month': game_month, 'date_day': game_day, 'round': game_round, 'white_id': white_id, 'black_id': black_id, 'result': result, 'white_elo': white_elo, 'black_elo': black_elo, 'ply_count': ply_count, 'eco': eco, 'fen': fen, 'variant': variant, 'board': board_tag, 'time_control': time_control, 'annotator_id': annotator_id, 'source_id': source_id, }) i += 1 if len(self.game_data) >= self.CHUNK: if self.event_data: self.conn.execute(self.ins_event, self.event_data) self.event_data = [] if self.site_data: self.conn.execute(self.ins_site, self.site_data) self.site_data = [] if self.player_data: self.conn.execute(self.ins_player, self.player_data) self.player_data = [] if self.annotator_data: self.conn.execute(self.ins_annotator, self.annotator_data) self.annotator_data = [] if self.source_data: self.conn.execute(self.ins_source, self.source_data) self.source_data = [] self.conn.execute(self.ins_game, self.game_data) self.game_data = [] if progressbar is not None: GLib.idle_add(progressbar.set_fraction, i / float(all_games)) GLib.idle_add( progressbar.set_text, "%s game headers from %s imported" % (i, basename)) else: print(pgnfile, i) if self.event_data: self.conn.execute(self.ins_event, self.event_data) self.event_data = [] if self.site_data: self.conn.execute(self.ins_site, self.site_data) self.site_data = [] if self.player_data: self.conn.execute(self.ins_player, self.player_data) self.player_data = [] if self.annotator_data: self.conn.execute(self.ins_annotator, self.annotator_data) self.annotator_data = [] if self.source_data: self.conn.execute(self.ins_source, self.source_data) self.source_data = [] if self.game_data: self.conn.execute(self.ins_game, self.game_data) self.game_data = [] if progressbar is not None: GLib.idle_add(progressbar.set_fraction, i / float(all_games)) GLib.idle_add( progressbar.set_text, "%s game headers from %s imported" % (i, basename)) else: print(pgnfile, i) trans.commit() if self.append_pgn: # reopen database to write self.db_handle.close() self.db_handle = protosave(self.chessfile.path, self.append_pgn) print("Append from %s to %s" % (pgnfile, self.chessfile.path)) handle.seek(0) self.db_handle.writelines(handle) self.db_handle.close() handle.close() if self.chessfile.scoutfish is not None: # create new .scout from pgnfile we are importing from pychess.Savers.pgn import scoutfish_path args = [ scoutfish_path, "make", pgnfile, "%s" % base_offset ] output = subprocess.check_output( args, stderr=subprocess.STDOUT).decode() # append it to our existing one if output.find("Processing...done") > 0: old_scout = self.chessfile.scoutfish.db new_scout = os.path.splitext(pgnfile)[0] + '.scout' with open(old_scout, "ab") as file1, open(new_scout, "rb") as file2: file1.write(file2.read()) self.chessfile.handle = protoopen(self.chessfile.path) except SQLAlchemyError as e: trans.rollback() print("Importing %s failed! \n%s" % (pgnfile, e))
try: with open("/proc/cpuinfo") as f: cpuinfo = f.read() except OSError: cpuinfo = "" BITNESS = "64" if platform.machine().endswith('64') else "32" MODERN = "-modern" if "popcnt" in cpuinfo else "" EXT = ".exe" if sys.platform == "win32" else "" scoutfish = "scoutfish_x%s%s%s" % (BITNESS, MODERN, EXT) altpath = getEngineDataPrefix() scoutfish_path = shutil.which(scoutfish, mode=os.X_OK, path=altpath) if scoutfish_path is None: binary = "https://github.com/gbtami/scoutfish/releases/download/20170627/%s" % scoutfish filename = download_file(binary) if filename is not None: dest = shutil.move(filename, os.path.join(altpath, scoutfish)) os.chmod(dest, stat.S_IEXEC | stat.S_IREAD | stat.S_IWRITE) parser = "parser_x%s%s%s" % (BITNESS, MODERN, EXT) altpath = getEngineDataPrefix() chess_db_path = shutil.which(parser, mode=os.X_OK, path=altpath) if chess_db_path is None: binary = "https://github.com/gbtami/chess_db/releases/download/20170627/%s" % parser filename = download_file(binary) if filename is not None: dest = shutil.move(filename, os.path.join(altpath, parser)) os.chmod(dest, stat.S_IEXEC | stat.S_IREAD | stat.S_IWRITE)