from core.Layers.conv import conv2d from core.Layers.pool import max_pool from core.Layers import relu, dense, flatten, tanh, sigmoid from core.Modules import sequential from core.Functions.loss import softmax_cross_entropy CNN1 = sequential(conv2d(input_shape=(28, 28, 1), output_ch=32, kernel_sz=3), relu(), max_pool(kernel_sz=2), conv2d(output_ch=32, kernel_sz=3), relu(), max_pool(kernel_sz=2), flatten(), dense(output_shape=10), loss_f=softmax_cross_entropy()) CNN2 = sequential(conv2d(input_shape=(28, 28, 1), output_ch=6, kernel_sz=5), relu(), max_pool(kernel_sz=2), conv2d(output_ch=16, kernel_sz=5), relu(), max_pool(kernel_sz=2), flatten(), dense(output_shape=10), loss_f=softmax_cross_entropy()) CNN3 = sequential(conv2d(input_shape=(28, 28, 1), output_ch=1, kernel_sz=2), relu(), max_pool(kernel_sz=2), flatten(), dense(output_shape=10),
""" This script tests the performance of our tools """ from numba import cuda from tensorflow import keras from core.Layers.conv import conv2d from core.Layers.pool import max_pool from core.Layers import relu, softmax from core.Layers import dense from core.Layers import flatten import time conv1 = conv2d((1, 28, 28), output_channel=32, kernel_size=3) activ1 = relu((3, 26, 26)) pool1 = max_pool(input_dim=(32, 26, 26), kernel_sz=2) conv2 = conv2d((32, 13, 13), output_channel=32, kernel_size=3) activ2 = relu((32, 11, 11)) pool2 = max_pool(input_dim=(32, 11, 11), kernel_sz=3) activ3 = relu((32, 4, 4)) flat = flatten((32, 4, 4)) linear_layer = dense(32 * 4 * 4, 10) softmax_layer = softmax(10) fashion_mnist = keras.datasets.fashion_mnist (train_images, train_labels), (test_images, test_labels) = fashion_mnist.load_data() # origin = np.random.randn(3, 28, 28).astype(np.float32) input_image = train_images[0, :, :].reshape(1, 28, 28) input_image_gpu = cuda.to_device(input_image) cnt = 0