def test_relu_2_backward(self): print('\n==================================') print(' Test ReLU backward ') print('==================================') np.random.seed(123) relu = ReLU() x = np.random.randn(7, 7) d_prev = np.random.randn(*x.shape) out = relu.forward(x) dx = relu.backward(d_prev, 0.0) correct_dx = [[0., -1.29408532, -1.03878821, 0., 0., 0.02968323, 0.], [0., 1.75488618, 0., 0., 0., 0.79486267, 0.], [ 0., 0., 0.80723653, 0.04549008, -0.23309206, -1.19830114, 0.19952407 ], [0.46843912, 0., 1.16220405, 0., 0., 1.03972709, 0.], [0., 0., 0., 0., 0., 0., 0.80730819], [0., -1.0859024, -0.73246199, 0., 2.08711336, 0., 0.], [ 0., 0.18103513, 1.17786194, 0., 1.03111446, -1.08456791, -1.36347154 ]] e = rel_error(correct_dx, dx) print('dX relative difference:', e) self.assertTrue(e <= 5e-08)
def test_model(num_feat, num_classes): classifier = ClassifierModel() classifier.add_layer('FC-1', FCLayer(num_feat, 2)) classifier.add_layer('Sigmoid', Sigmoid()) classifier.add_layer('FC-2', FCLayer(2, 5)) classifier.add_layer('ReLU', ReLU()) classifier.add_layer('FC-3', FCLayer(5, 3)) classifier.add_layer('tanh', Tanh()) classifier.add_layer('FC-4', FCLayer(3, num_classes)) classifier.add_layer('Softmax', SoftmaxLayer()) return classifier
def test_relu_1_forward(self): print('\n==================================') print(' Test ReLU forward ') print('==================================') x = np.linspace(-0.7, 0.5, num=20).reshape(5, 4) relu = ReLU() out = relu.forward(x) correct_out = np.array( [[0., 0., 0., 0.], [0., 0., 0., 0.], [0., 0., 0., 0.], [0.05789474, 0.12105263, 0.18421053, 0.24736842], [0.31052632, 0.37368421, 0.43684211, 0.5]]) e = rel_error(correct_out, out) print('Relative difference:', e) self.assertTrue(e <= 5e-08)
def test_model(num_feat, num_classes): classifier = CNN_Classifier() classifier.add_layer( 'Conv-1', ConvolutionLayer(num_feat, 2, kernel_size=3, stride=1, pad=1)) classifier.add_layer('ReLU', ReLU()) classifier.add_layer( 'Conv-2', ConvolutionLayer(2, 3, kernel_size=3, stride=1, pad=1)) classifier.add_layer('tanh', Tanh()) classifier.add_layer( 'Conv-3', ConvolutionLayer(3, 3, kernel_size=3, stride=1, pad=0)) classifier.add_layer('Sigmoid', Sigmoid()) classifier.add_layer('Max-pool - 1', MaxPoolingLayer(kernel_size=2, stride=1)) classifier.add_layer('FC-4', FCLayer(12, num_classes)) classifier.add_layer('Softmax', SoftmaxLayer()) return classifier
# CNN 모델을 구성하고 실험하세요 # # 레이어는 아래 예제와 같이 추가할 수 있습니다. # # 주의하세요! 레이어들은 추가한 순서대로 실행됩니다. # # Layer에 따라, 구현에 따라 다르지만 1 Epoch에 5분 이상 걸려도 정상입니다. # # ^^ # # # ########################################################################### # Add Layers, Layer 추가 예제 # 아래 예시를 참고하여 과제에 주어진 대로 (혹은 과제와 별개로 원하는 대로) Layer를 추가하세요. # Convolution Layer CNN.add_layer('Conv Layer - 1', ConvolutionLayer(in_channels=in_channnel, out_channels=8, kernel_size=3, pad=1)) # ReLU Layer CNN.add_layer('ReLU Layer - 1', ReLU()) # Convolution Layer CNN.add_layer('Conv Layer - 2', ConvolutionLayer(in_channels=8, out_channels=8, kernel_size=3, pad=1)) # ReLU Layer CNN.add_layer('ReLU Layer - 2', ReLU()) # Max-pool Layer CNN.add_layer('Max-Pool Layer', MaxPoolingLayer(kernel_size=2, stride=2)) # FC Layer CNN.add_layer('FC Example Layer - 1', FCLayer(input_dim=1568, output_dim=500)) # FC Layer CNN.add_layer('FC Example Layer - 2', FCLayer(input_dim=500, output_dim=5))
#plt.show() # =========================================================================== print( '================================ 2. ReLU ==================================' ) """ * Correct ReLU * Forward: [1. 0. 3.] Backward: [-10 0 -30] """ relu = ReLU() temp2 = np.array([1, -0.1, 3], dtype=np.float32) temp3 = np.array([-10, -20, -30], dtype=np.float32) print('ReLU Check') print('Forward: \n', relu.forward(temp2)) print('Backward: \n', relu.backward(temp3)) print() # =========================================================================== print( '=========================== 3. Convolution Layer ==========================' ) # Convolution with stride 1, no padding in_channel = 1 out_channel = 3
ex) shuffle data every epoch (You don't need to care this. But you can try personally.) ex) set/change random seed (I've already done it.) """ print('===== ReLU Check =====') """ The results should be exactly same as below: 결과는 아래와 일치해야 합니다: Forward: [1. 0. 3.] Backward: [-10 0 -30] """ relu = ReLU() temp2 = np.array([1, -0.1, 3]) temp3 = np.array([-10, -20, -30]) print('Forward: \n', relu.forward(temp2)) print('Backward: \n', relu.backward(temp3)) print() print('===== Sigmoid Check =====') """ The results should be exactly same as below: 결과는 아래와 일치해야 합니다: Forward: [[0.26894142 0.88079708 0.62245933] [0.5 0.47502081 0.52497919]]
# Enjoy. # # # # CNN 모델을 구성하고 실험하세요 # # 레이어는 아래 예제와 같이 추가할 수 있습니다. # # 주의하세요! 레이어들은 추가한 순서대로 실행됩니다. # # Layer에 따라, 구현에 따라 다르지만 1 Epoch에 5분 이상 걸려도 정상입니다. # # ^^ # # # ########################################################################### # Add Layers, Layer 추가 예제 # 아래 예시를 참고하여 과제에 주어진 대로 (혹은 과제와 별개로 원하는 대로) Layer를 추가하세요. # Convolution Layer CNN.add_layer('Conv Layer 1', ConvolutionLayer(in_channels=in_channnel, out_channels=8, kernel_size=3, pad=1)) CNN.add_layer('ReLU 1', ReLU()) CNN.add_layer('Conv Layer 2', ConvolutionLayer(in_channels=8, out_channels=8, kernel_size=3, pad=1)) CNN.add_layer('ReLU 2', ReLU()) # Max-pool Layer CNN.add_layer('Max-Pool Layer', MaxPoolingLayer(kernel_size=2, stride=2)) # FC Layer CNN.add_layer('FC Layer 1', FCLayer(input_dim=1568, output_dim=500)) CNN.add_layer('FC Layer 2', FCLayer(input_dim=500, output_dim=5)) # Softmax Layer # 이 layer는 항상 마지막에 추가 CNN.add_layer('Softmax Layer', SoftmaxLayer()) # Model Architecture 출력
# # ########################################################################### # Add Layers, Layer 추가 예제 # 아래 예시를 참고하여 과제에 주어진 대로 (혹은 과제와 별개로 원하는 대로) Layer를 추가하세요. # Convolution Layer CNN.add_layer( 'Conv Example Layer', ConvolutionLayer(in_channels=in_channnel, out_channels=8, kernel_size=3, pad=1)) # ReLU Layer CNN.add_layer('ReLU Example Layer', ReLU()) # Convolution Layer CNN.add_layer( 'Conv Example Layer2', ConvolutionLayer(in_channels=8, out_channels=8, kernel_size=3, pad=1)) # ReLU Layer CNN.add_layer('ReLU Example Layer2', ReLU()) # Max-pool Layer CNN.add_layer('Max-Pool Example Layer', MaxPoolingLayer(kernel_size=2, stride=2)) # FC Layer CNN.add_layer('FC Example Layer1', FCLayer(input_dim=1568, output_dim=500))
num_train, in_channels, H, W = x_train.shape num_class = y_train.shape[1] train_accuracy = [] valid_accuracy = [] best_epoch = -1 best_acc = -1 best_model = None # =============================== EDIT HERE =============================== # Add layers CNN.add_layer('Conv-1', ConvolutionLayer(in_channels=in_channels, out_channels=4, kernel_size=3, pad=1)) CNN.add_layer('ReLU-1',ReLU()) CNN.add_layer('Conv-2', ConvolutionLayer(in_channels=4, out_channels=4, kernel_size=3, pad=1)) CNN.add_layer('ReLU-2',ReLU()) CNN.add_layer('Max-pool-1',MaxPoolingLayer(2,2)) CNN.add_layer('FC-1',FCLayer(784,500)) CNN.add_layer('ReLU-3',ReLU()) CNN.add_layer('FC-2',FCLayer(500,10)) CNN.add_layer('Softmax Layer',SoftmaxLayer()) # ========================================================================= CNN.summary() print('Training Starts...') num_batch = int(np.ceil(num_train / batch_size))