# leave enough time to process a 300 halfply game visibility_timeout = (300 * movetime) / 1000 # max permitted wait time. wait_time_seconds = 20 def queue_read(q): return q.read(visibility_timeout, wait_time_seconds) msg("Hi! Analyzing %s, writing results to %s" % (in_queuename, out_queuename)) engine = pystockfish.Engine(depth=depth, param={ 'Threads': threads, 'Hash': hash }, movetime=movetime) inq = conn.get_queue(in_queuename) outq = conn.get_queue(out_queuename) s3conn = boto.connect_s3() gamesbucket = s3conn.get_bucket('bc-games') # read outputs from outqueue in batches and stuff them into S3 def consolidate_outputs(): if outq.count() < MIN_ITEMS_PER_KEY * 2: msg("Not enough outputs to consolidate. Sleeping 10 seconds.")
def __init__(self, depth=15): self.engine = pystockfish.Engine(depth=depth) ChessEngine.__init__(self, depth)
conn = boto.connect_s3() config_bucket = conn.get_bucket(config_bucket) key = config_bucket.get_key(config_key) runconfig = json.loads(key.get_contents_as_string()) pgn_key = runconfig['pgn_key'] depth = runconfig['depth'] msg("Hi! Analyzing %s to depth %d" % (pgn_key, depth)) inputs_bucket = conn.get_bucket('bc-runinputs') games_key = inputs_bucket.get_key(pgn_key) games_fd = StringIO.StringIO(games_key.get_contents_as_string()) result_list = [] engine = pystockfish.Engine(depth=depth) game = chess.pgn.read_game(games_fd) while game is not None: result_list.append( do_it(engine=engine, game=game, depth=depth, debug=DEBUG)) game = chess.pgn.read_game(games_fd) output_bucket = conn.get_bucket('bc-runoutputs') new_key = runconfig['result_key'] key = output_bucket.new_key(new_key) key.set_contents_from_string(json.dumps(result_list)) msg("All done.")
def __init__(self): os.chdir(os.path.join(os.path.dirname(__file__), "../../engine/")) self.engine = pystockfish.Engine(depth=20) self.position = [] os.chdir(os.path.join(os.path.dirname(__file__), "../.."))