コード例 #1
0
def main():
    (x_train, t_train), (x_test, t_test) = load_mnist(flatten=False)
    print(x_train.shape, t_train.shape)
    print(t_train[0])

    net = MultiLayerNet(is_use_dropout=False)
    net.add_layer(Layer.Conv2D(16, (3, 3), pad=1, input_size=(1, 28, 28)), initializer=Initializer.He(),
                  activation=Layer.Relu())
    net.add_layer(Layer.BatchNormalization())
    net.add_layer(Layer.Relu())
    net.add_layer(Layer.Pooling(pool_h=2, pool_w=2, stride=2))
    net.add_layer(Layer.Conv2D(16, (3, 3), pad=1, initializer=Initializer.He()))
    net.add_layer(Layer.BatchNormalization())
    net.add_layer(Layer.Relu())
    net.add_layer(Layer.Pooling(pool_h=2, pool_w=2, stride=2))
    net.add_layer(Layer.Dense(20, initializer=Initializer.He(), activation=Layer.Relu()))
    net.add_layer(Layer.Dropout(0.5))
    net.add_layer(Layer.Dense(10))
    net.add_layer(Layer.Dropout(0.5))
    net.add_layer(Layer.SoftmaxWithLoss())


    if gpu_enable:
        net.to_gpu()

    for k, v in net.params.items():
        print(k, v.shape)

    result = net.train(
        x_train, t_train, x_test, t_test, batch_size=200, iters_num=100, print_epoch=1, evaluate_limit=500,
        is_use_progress_bar=True,
        optimizer=Optimizer.Adam(lr=0.001))

    import pickle
    import datetime
    ## Save pickle
    with open(f"train_data_{str(datetime.datetime.now())[:-7].replace(':', '')}.pickle", "wb") as fw:
        pickle.dump(result, fw)
    # net.save_model()

    print("============================================")
コード例 #2
0
def make_net2():
    net = MultiLayerNet(is_use_dropout=False)
    net.add_layer(Layer.Conv2D(32, (3, 3), pad=1, input_size=(1, 64, 64)),
                  initializer=Initializer.He(),
                  activation=Layer.Relu())
    net.add_layer(Layer.Pooling(pool_h=2, pool_w=2, stride=2))
    net.add_layer(
        Layer.Dense(128, initializer=Initializer.He(),
                    activation=Layer.Relu()))
    net.add_layer(Layer.Dense(2, initializer=Initializer.He()))
    net.add_layer(Layer.SoftmaxWithLoss())

    return net
コード例 #3
0
def make_net1():
    net = MultiLayerNet(is_use_dropout=False)
    net.add_layer(Layer.Conv2D(32, (3, 3), pad=1, input_size=(1, 128, 128)),
                  initializer=Initializer.He())
    net.add_layer(Layer.BatchNormalization())
    net.add_layer(Layer.Relu())
    net.add_layer(Layer.Pooling(pool_h=2, pool_w=2, stride=2))
    # net.add_layer(Layer.Conv2D(64, (3, 3), pad=1, initializer=Initializer.He()))
    # net.add_layer(Layer.BatchNormalization())
    # net.add_layer(Layer.Relu())
    # net.add_layer(Layer.Pooling(pool_h=2, pool_w=2, stride=2))
    net.add_layer(Layer.Conv2D(32, (3, 3), pad=1,
                               initializer=Initializer.He()))
    net.add_layer(Layer.BatchNormalization())
    net.add_layer(Layer.Relu())
    net.add_layer(Layer.Pooling(pool_h=2, pool_w=2, stride=2))
    net.add_layer(
        Layer.Dense(30, initializer=Initializer.He(), activation=Layer.Relu()))
    net.add_layer(Layer.Dropout(0.5))
    net.add_layer(Layer.Dense(3))
    net.add_layer(Layer.Dropout(0.5))
    net.add_layer(Layer.SoftmaxWithLoss())
    return net
コード例 #4
0
    def save_model(self, path=None):
        save_data = OrderedDict()
        params = self.params

        save_data["NetConfig"] = {
            "weight_decay_lambda": self.weight_decay_lambda,
            "is_use_dropout": self.is_use_dropout,
            "dropout_ratio": self.dropout_ratio
        }
        layer_info = []
        for layer in self.added_layer_list:
            t = layer
            if isinstance(t, Layer.SoftmaxWithLoss):
                t = Layer.SoftmaxWithLoss()
            elif isinstance(t, Layer.IdentityWithLoss):
                t = Layer.IdentityWithLoss()
            elif isinstance(t, Layer.BatchNormalization):
                t = Layer.BatchNormalization(running_mean=t.running_mean, running_var=t.running_var)
            elif isinstance(t, Layer.Pooling):
                t = Layer.Pooling(t.pool_h, t.pool_w, t.stride, t.pad)

            s = jsp.encode(t)
            layer_info.append(s)

        save_data["LayerInfo"] = layer_info
        # for layer_idx, layer in enumerate(self.layers.values()):
        #     if isinstance(layer, Layer.BatchNormalization):
        #         params[f"BN_m{layer_idx}"] = layer.running_mean
        #         params[f"BN_v{layer_idx}"] = layer.running_var

        save_data["Params"] = params

        if path is None:
            path = f"train_weight_{str(datetime.datetime.now())[:-7].replace(':', '')}.npz"
        # print(save_data)


        np.savez_compressed(path, **save_data)
        print(f"Weight was saved at {path}")