Ejemplo n.º 1
0
    xt = xt.reshape(10000, 32, 32, 3).transpose(0, 3, 1, 2)
    y = y.ravel()
    yt = yt.ravel()

    n = 50000
    nt = 10000
    x = x[:n]
    y = y[:n]
    xt = xt[:nt]
    yt = yt[:nt]

    # Model
    net = Net()
    net.push(Conv2d(5, 5, 3, 20))  # 3x32 -> 10x28
    net.push(Relu())
    net.push(BatchNorm())
    net.push(Maxpooling(4, 4))  # 10x28 -> 10x7
    net.push(Reshape((980)))
    net.push(Linear(980, 200))
    net.push(Relu())
    net.push(BatchNorm())
    net.push(Softmax(200, 10))

    # Data
    data = DataProvider()
    data.train_input(x, y)
    data.test_input(xt, yt)
    data.batch_size(32)
    data.batch_size_test(1000)

    lr = 1e-3
Ejemplo n.º 2
0
    def __init__(self,sizes, batch_size, epoch_num, 
                 use_trained_params=False,filename=None,
                 img_dim=(1,28,28),
                 conv_param={'filter_num':32, 'filter_size':3, 'padding':1, 'stride':1},
                 optimizer='Adam', activation='ReLU', use_dropout=True, dropout_p=0.2, use_bn=True):
        self.num_layers = len(sizes)
        self.sizes = sizes
        self.batch_size = batch_size
        self.epoch_num = epoch_num
        # self.learning_rate = learning_rate
        self.activation = activation
        self.use_dropout = use_dropout
        self.dropout_p = dropout_p
        self.use_bn = use_bn

        self.filter_num = conv_param['filter_num']
        self.filter_size = conv_param['filter_size']
        self.filter_padding = conv_param['padding']
        self.filter_stride = conv_param['stride']
        self.img_c = img_dim[0]
        self.img_wh = img_dim[1]
        self.conv_output_size = int((img_dim[1] - self.filter_size + 2*self.filter_padding) / self.filter_stride) + 1
        self.pool_output_size = int(self.filter_num * (self.conv_output_size/2) * (self.conv_output_size/2))

        self.opt = optimizer
        optimizers = {'SGD':SGD, 'Momentum_SGD':Momentum_SGD, 'AdaGrad':AdaGrad, 'RMSProp':RMSProp, 'AdaDelta':AdaDelta, 'Adam':Adam}
        self.optimizer = optimizers[self.opt]()

        if use_trained_params:
            path = os.path.dirname(os.path.abspath(__file__))
            loaded_params = np.load(os.path.join(path,filename))
            self.W1 = loaded_params['W1']
            self.b1 = loaded_params['b1']
            self.W2 = loaded_params['W2']
            self.b2 = loaded_params['b2']
            self.gamma = loaded_params['gamma']
            self.beta = loaded_params['beta']
            if use_bn:
                self.running_mean = loaded_params['running_mean']
                self.running_var = loaded_params['running_var']
        else:
            np.random.seed(12)
            # Conv層重み
            self.W1 = np.sqrt(1/sizes[0]) * np.random.randn(self.filter_num, img_dim[0], self.filter_size, self.filter_size)
            self.b1 = np.sqrt(1/sizes[0]) * np.random.randn(self.filter_num)
            # BatchNorm層
            self.gamma = np.ones(self.filter_num*self.conv_output_size*self.conv_output_size)
            self.beta = np.zeros(self.filter_num*self.conv_output_size*self.conv_output_size)
            # Fullyconnected層重み
            self.W2 = np.sqrt(1/sizes[0]) * np.random.randn(self.pool_output_size, self.sizes[2])
            self.b2 = np.sqrt(1/sizes[0]) * np.random.randn(self.sizes[2])
            
        # layers of network
        activation_function= {'Sigmoid':Sigmoid, 'ReLU':ReLU}
        self.layers = {}
        self.layers['Conv'] = Conv2D(self.W1,self.b1,self.filter_stride,self.filter_padding)
        if self.use_bn:
            if use_trained_params:
                self.layers['BatchNorm'] = BatchNorm(self.gamma, self.beta,\
                                           running_mean=self.running_mean,running_var=self.running_var)
            else:
                self.layers['BatchNorm'] = BatchNorm(self.gamma, self.beta)
        self.layers['Activation'] = activation_function[self.activation]()
        if self.use_dropout:
            self.layers['Dropout'] = Dropout(self.dropout_p)
        self.layers['Pool'] = MaxPool(pool_h=2, pool_w=2, stride=2)
        self.layers['FullyConnected2'] = FullyConnected(self.W2, self.b2)
        self.lastLayer = SoftmaxLoss()
Ejemplo n.º 3
0
    def __init__(self,
                 sizes,
                 batch_size,
                 epoch_num,
                 use_trained_params=False,
                 filename=None,
                 optimizer='SGD',
                 activation='ReLU',
                 use_dropout=True,
                 dropout_p=0.2,
                 use_bn=True):
        self.num_layers = len(sizes)
        self.sizes = sizes
        self.batch_size = batch_size
        self.epoch_num = epoch_num
        self.activation = activation
        self.use_dropout = use_dropout
        self.dropout_p = dropout_p
        self.use_bn = use_bn
        self.opt = optimizer
        optimizers = {
            'SGD': SGD,
            'Momentum_SGD': Momentum_SGD,
            'AdaGrad': AdaGrad,
            'RMSProp': RMSProp,
            'AdaDelta': AdaDelta,
            'Adam': Adam
        }
        self.optimizer = optimizers[self.opt]()

        if use_trained_params:
            path = os.path.dirname(os.path.abspath(__file__))
            loaded_params = np.load(os.path.join(path, filename))
            self.W1 = loaded_params['W1']
            self.b1 = loaded_params['b1']
            self.W2 = loaded_params['W2']
            self.b2 = loaded_params['b2']
            self.gamma = loaded_params['gamma']
            self.beta = loaded_params['beta']
            # Batch Normalizationを使う場合
            if self.use_bn:
                self.running_mean = loaded_params['running_mean']
                self.running_var = loaded_params['running_var']

        else:
            np.random.seed(12)
            self.W1 = np.sqrt(1 / sizes[0]) * np.random.randn(
                sizes[0], sizes[1])  #(784,50)
            self.b1 = np.sqrt(1 / sizes[0]) * np.random.randn(sizes[1])
            self.W2 = np.sqrt(1 / sizes[1]) * np.random.randn(
                sizes[1], sizes[2])  #(50,10)
            self.b2 = np.sqrt(1 / sizes[1]) * np.random.randn(sizes[2])
            self.gamma = np.ones(self.W1.shape[1])
            self.beta = np.zeros(self.W1.shape[1])

        # layers of network
        activation_function = {'Sigmoid': Sigmoid, 'ReLU': ReLU}
        self.layers = {}
        self.layers['FullyConnected1'] = FullyConnected(self.W1, self.b1)
        if self.use_bn:
            if use_trained_params:
                self.layers['BatchNorm'] = BatchNorm(
                    self.gamma,
                    self.beta,
                    running_mean=self.running_mean,
                    running_var=self.running_var)
            else:
                self.layers['BatchNorm'] = BatchNorm(self.gamma, self.beta)
        self.layers['Activation'] = activation_function[self.activation]()
        if self.use_dropout:
            self.layers['Dropout'] = Dropout(self.dropout_p)
        self.layers['FullyConnected2'] = FullyConnected(self.W2, self.b2)

        self.lastLayer = SoftmaxLoss()