コード例 #1
0
def validate_feature_planes(verbose, dataset, model_features):
    """Verify that dataset's features match the model's expected features.
    """

    if 'features' in dataset:
        dataset_features = dataset['features'][()]
        dataset_features = dataset_features.split(",")
        if len(dataset_features) != len(model_features) or \
           any(df != mf for (df, mf) in zip(dataset_features, model_features)):
            raise ValueError(
                "Model JSON file expects features \n\t%s\n"
                "But dataset contains \n\t%s" %
                ("\n\t".join(model_features), "\n\t".join(dataset_features)))
        elif verbose:
            print(
                "Verified that dataset features and model features exactly match."
            )
    else:
        # Cannot check each feature, but can check number of planes.
        n_dataset_planes = dataset["states"].shape[1]
        tmp_preprocess = Preprocess(model_features)
        n_model_planes = tmp_preprocess.get_output_dimension()
        if n_dataset_planes != n_model_planes:
            raise ValueError(
                "Model JSON file expects a total of %d planes from features \n\t%s\n"
                "But dataset contains %d planes" %
                (n_model_planes, "\n\t".join(model_features),
                 n_dataset_planes))
        elif verbose:
            print(
                "Verified agreement of number of model and dataset feature planes, but cannot "
                "verify exact match using old dataset format.")
コード例 #2
0
class GameConverter:
    def __init__(self, features):
        self.feature_processor = Preprocess(features)
        self.n_features = self.feature_processor.get_output_dimension()

    def convert_game(self, file_name, bd_size):
        """Read the given SGF file into an iterable of (input,output) pairs
        for neural network training

        Each input is a GameState converted into one-hot neural net features
        Each output is an action as an (x,y) pair (passes are skipped)

        If this game's size does not match bd_size, a SizeMismatchError is raised
        """

        with open(file_name, 'r') as file_object:
            state_action_iterator = sgf_iter_states(file_object.read(),
                                                    include_end=False)

        for (state, move, player) in state_action_iterator:
            if state.get_size() != bd_size:
                raise SizeMismatchError()
            if move != go.PASS:
                nn_input = self.feature_processor.state_to_tensor(state)
                yield (nn_input, move)

    def sgfs_to_hdf5(self,
                     sgf_files,
                     hdf5_file,
                     bd_size=19,
                     ignore_errors=True,
                     verbose=False):
        """Convert all files in the iterable sgf_files into an hdf5 group to be stored in hdf5_file

        Arguments:
        - sgf_files : an iterable of relative or absolute paths to SGF files
        - hdf5_file : the name of the HDF5 where features will be saved
        - bd_size : side length of board of games that are loaded

        - ignore_errors : if True, issues a Warning when there is an unknown
            exception rather than halting. Note that sgf.ParseException and
            go.IllegalMove exceptions are always skipped

        The resulting file has the following properties:
            states  : dataset with shape (n_data, n_features, board width, board height)
            actions : dataset with shape (n_data, 2) (actions are stored as x,y tuples of
                      where the move was played)
            file_offsets : group mapping from filenames to tuples of (index, length)

        For example, to find what positions in the dataset come from 'test.sgf':
            index, length = file_offsets['test.sgf']
            test_states = states[index:index+length]
            test_actions = actions[index:index+length]

        """

        # make a hidden temporary file in case of a crash.
        # on success, this is renamed to hdf5_file
        tmp_file = os.path.join(os.path.dirname(hdf5_file),
                                ".tmp." + os.path.basename(hdf5_file))
        h5f = h5.File(tmp_file, 'w')

        try:
            # see http://docs.h5py.org/en/latest/high/group.html#Group.create_dataset
            states = h5f.require_dataset(
                'states',
                dtype=np.uint8,
                shape=(1, self.n_features, bd_size, bd_size),
                maxshape=(None, self.n_features, bd_size,
                          bd_size),  # 'None' == arbitrary size
                exact=
                False,  # allow non-uint8 datasets to be loaded, coerced to uint8
                chunks=(64, self.n_features, bd_size,
                        bd_size),  # approximately 1MB chunks
                compression="lzf")
            actions = h5f.require_dataset('actions',
                                          dtype=np.uint8,
                                          shape=(1, 2),
                                          maxshape=(None, 2),
                                          exact=False,
                                          chunks=(1024, 2),
                                          compression="lzf")

            # 'file_offsets' is an HDF5 group so that 'file_name in file_offsets' is fast
            file_offsets = h5f.require_group('file_offsets')

            # Store comma-separated list of feature planes in the scalar field 'features'. The
            # string can be retrieved using h5py's scalar indexing: h5f['features'][()]
            h5f['features'] = np.string_(','.join(
                self.feature_processor.get_feature_list()))

            if verbose:
                print("created HDF5 dataset in {}".format(tmp_file))

            next_idx = 0
            for file_name in sgf_files:
                if verbose:
                    print(file_name)
                # count number of state/action pairs yielded by this game
                n_pairs = 0
                file_start_idx = next_idx
                try:
                    for state, move in self.convert_game(file_name, bd_size):
                        if next_idx >= len(states):
                            states.resize((next_idx + 1, self.n_features,
                                           bd_size, bd_size))
                            actions.resize((next_idx + 1, 2))
                        states[next_idx] = state
                        actions[next_idx] = move
                        n_pairs += 1
                        next_idx += 1
                except go.IllegalMove:
                    warnings.warn("Illegal Move encountered in %s\n"
                                  "\tdropping the remainder of the game" %
                                  file_name)
                except sgf.ParseException:
                    warnings.warn("Could not parse %s\n\tdropping game" %
                                  file_name)
                except SizeMismatchError:
                    warnings.warn("Skipping %s; wrong board size" % file_name)
                except Exception as e:
                    # catch everything else
                    if ignore_errors:
                        warnings.warn("Unkown exception with file %s\n\t%s" %
                                      (file_name, e),
                                      stacklevel=2)
                    else:
                        raise e
                finally:
                    if n_pairs > 0:
                        # '/' has special meaning in HDF5 key names, so they
                        # are replaced with ':' here
                        file_name_key = file_name.replace('/', ':')
                        file_offsets[file_name_key] = [file_start_idx, n_pairs]
                        if verbose:
                            print("\t%d state/action pairs extracted" %
                                  n_pairs)
                    elif verbose:
                        print("\t-no usable data-")
        except Exception as e:
            print("sgfs_to_hdf5 failed")
            os.remove(tmp_file)
            raise e

        if verbose:
            print("finished. renaming %s to %s" % (tmp_file, hdf5_file))

        # processing complete; rename tmp_file to hdf5_file
        h5f.close()
        os.rename(tmp_file, hdf5_file)
コード例 #3
0
ファイル: nn_util.py プロジェクト: Mavericks2019/roc-alpha-go
class NeuralNetBase(object):
    """Base class for neural network classes handling feature processing, construction
    of a 'forward' function, etc.
    """

    # keep track of subclasses to make generic saving/loading cleaner.
    # subclasses can be 'registered' with the @neuralnet decorator
    subclasses = {}

    def __init__(self, feature_list, **kwargs):
        """create a neural net object that preprocesses according to feature_list and uses
        a neural network specified by keyword arguments (using subclass' create_network())

        optional argument: init_network (boolean). If set to False, skips initializing
        self.model and self.forward and the calling function should set them.
        """
        defaults = {"board": 19}
        defaults.update(kwargs)
        self.preprocessor = Preprocess(feature_list, size=defaults["board"])
        kwargs["input_dim"] = self.preprocessor.get_output_dimension()

        if kwargs.get('init_network', True):
            # self.__class__ refers to the subclass so that subclasses only
            # need to override create_network()
            self.model = self.__class__.create_network(**kwargs)
            # self.forward is a lambda function wrapping a Keras function
            self.forward = self._model_forward()

    def _model_forward(self):
        """Construct a function using the current keras backend that, when given a batch
        of inputs, simply processes them forward and returns the output

        This is as opposed to model.compile(), which takes a loss function
        and training method.

        c.f. https://github.com/fchollet/keras/issues/1426
        """
        # The uses_learning_phase property is True if the model contains layers that behave
        # differently during training and testing, e.g. Dropout or BatchNormalization.
        # In these cases, K.learning_phase() is a reference to a backend variable that should
        # be set to 0 when using the network in prediction mode and is automatically set to 1
        # during training.
        if self.model.uses_learning_phase:
            forward_function = K.function(
                [self.model.input, K.learning_phase()], [self.model.output])

            # the forward_function returns a list of tensors
            # the first [0] gets the front tensor.
            return lambda inpt: forward_function([inpt, 0])[0]
        else:
            # identical but without a second input argument for the learning phase
            forward_function = K.function([self.model.input],
                                          [self.model.output])
            return lambda inpt: forward_function([inpt])[0]

    @staticmethod
    def load_model(json_file):
        """create a new neural net object from the architecture specified in json_file
        """
        with open(json_file, 'r') as f:
            object_specs = json.load(f)

        # Create object; may be a subclass of networks saved in specs['class']
        class_name = object_specs.get('class', 'CNNPolicy')
        try:
            network_class = NeuralNetBase.subclasses[class_name]
        except KeyError:
            raise ValueError(
                "Unknown neural network type in json file: {}\n"
                "(was it registered with the @neuralnet decorator?)".format(
                    class_name))

        # create new object
        new_net = network_class(object_specs['feature_list'],
                                init_network=False)

        new_net.model = model_from_json(object_specs['keras_model'],
                                        custom_objects={'Bias': Bias})
        if 'weights_file' in object_specs:
            new_net.model.load_weights(object_specs['weights_file'])
        new_net.forward = new_net._model_forward()
        return new_net

    def save_model(self, json_file, weights_file=None):
        """write the network model and preprocessing features to the specified file

        If a weights_file (.hdf5 extension) is also specified, model weights are also
        saved to that file and will be reloaded automatically in a call to load_model
        """
        # this looks odd because we are serializing a model with json as a string
        # then making that the value of an object which is then serialized as
        # json again.
        # It's not as crazy as it looks. A Network has 2 moving parts - the
        # feature preprocessing and the neural net, each of which gets a top-level
        # entry in the saved file. Keras just happens to serialize models with JSON
        # as well. Note how this format makes load_model fairly clean as well.
        object_specs = {
            'class': self.__class__.__name__,
            'keras_model': self.model.to_json(),
            'feature_list': self.preprocessor.get_feature_list()
        }
        if weights_file is not None:
            self.model.save_weights(weights_file)
            object_specs['weights_file'] = weights_file
        # use the json module to write object_specs to file
        with open(json_file, 'w') as f:
            json.dump(object_specs, f)