n_data, n_input = X.shape n_class = np.unique(t).size T = create_label(t, n_data, n_class) print 'make train/test data' n_train, n_test = 1000, 50 i = np.random.permutation(n_data)[:n_train+n_test] i_train, i_test = np.hsplit(i, [n_train]) X_train, X_test = X[i_train, :].reshape(n_train, 1, 28, 28), X[i_test, :].reshape(n_test, 1, 28, 28) T_train, T_test = T[i_train, :], T[i_test, :] print 'initialize...' linear, sigmoid, softmax, relu = act.linear(), act.sigmoid(), act.softmax(), act.relu() conv1, conv2 = conv(20, 1, 5, 5, relu), conv(50, 20, 5, 5, relu) pool1, pool2 = pool(2, 2, 2), pool(2, 2, 2) neural = NN(800, 500, 10, linear, sigmoid, softmax) error = err.cross_entropy() cnn = CNN(conv1, pool1, conv2, pool2, neural, error) print 'train...' cnn.train(X_train, T_train, epsilon = 0.005, lam = 0.0001, gamma = 0.9, s_batch = 5, epochs = 50) print 'predict...' Y_test = cnn.predict(X_test) accuracy = cnn.accuracy(Y_test, T_test) print 'accuracy: {0}'.format(accuracy) print 'save figure of loss...' cnn.save_lossfig()
output=Dense(256,activation='relu')(output) output=Dense(101,activation='softmax')(output) VGG=Model(VGG.input,output) VGG.compile(optimizer='adam',loss='categorical_crossentropy',metrics=['accuracy']) plot_model(VGG, to_file='./model.png', show_shapes=True, show_layer_names=True) train_model(3, 'vgg', VGG, srgan, resize = True) """Test, evaluate the model""" test_set = devide(12, 2, 2) X = tensor2numpy('./data/', test_set, srgan) x = [X[i] for i in X.keys()] test = np.array(x, dtype = "float64") y = create_onehot(X) evaluation = cnn.evaluate(test, y) prediction = cnn.predict(test) """Make predictions""" sss = devide_submission(5) preds = {} for i in range(0, len(sss),5): test_set = tensor2numpy('./data/', sss[i:i+5], model) t = [test_set[i] for i in test_set.keys()] test = np.array(t, dtype = 'float64') prediction = cnn.predict(test) top_five = np.argsort(prediction, axis=1)[:,:5] top = mode(top_five,0) preds[sss[i]] = top top_five_prediction = {}