def validate_hourly(working_dir, validate_name=None): """ compiles a list of games based on the new hourly directory format. Then calls validate on it """ holdout_dirs = gfile.ListDirectory(fsdb.holdout_dir()) holdout_files = (os.path.join(fsdb.holdout_dir(), d, f) for d in reversed(gfile.ListDirectory(fsdb.holdout_dir())) for f in gfile.ListDirectory(os.path.join(fsdb.holdout_dir(),d)) if gfile.IsDirectory(os.path.join(fsdb.holdout_dir(),d))) holdout_files = list(itertools.islice(holdout_files, 20000)) random.shuffle(holdout_files) dual_net.validate(holdout_files)
def validate( *tf_record_dirs: 'Directories where holdout data are', validate_name: 'Name for validation set (i.e., selfplay or human)'=None): tf_records = [] with utils.logged_timer("Building lists of holdout files"): for record_dir in tf_record_dirs: tf_records.extend(gfile.Glob(os.path.join(record_dir, '*.zz'))) first_record = os.path.basename(tf_records[0]) last_record = os.path.basename(tf_records[-1]) with utils.logged_timer("Validating from {} to {}".format(first_record, last_record)): dual_net.validate(tf_records, validate_name=validate_name)
def validate( working_dir: 'tf.estimator working directory', *tf_record_dirs: 'Directories where holdout data are', checkpoint_name: 'Which checkpoint to evaluate (None=latest)'=None, validate_name: 'Name for validation set (i.e. selfplay or human)'=None): tf_records = [] with timer("Building lists of holdout files"): for record_dir in tf_record_dirs: tf_records.extend(gfile.Glob(os.path.join(record_dir, '*.zz'))) with timer("Validating from {} to {}".format(os.path.basename(tf_records[0]), os.path.basename(tf_records[-1]))): dual_net.validate(working_dir, tf_records, checkpoint_name=checkpoint_name, name=validate_name)