import load_data [tr_data, va_data, te_data] = load_data.load_data_wrapper() print(list(tr_data)[0][1]) print(list(va_data)[0][1]) print(list(te_data)[0][1])
buffer = [] # extract and cluster centers as list of convolutional kernles kernels = [self.hlf((patch.reshape(patch_size)-np.min(patch))/(np.max(patch) - np.min(patch)),0.5) for patch in kmeans.cluster_centers_] return kernels def hlf(self, in_array, thr=0.0): array_out = in_array.copy() array_out[array_out > thr] = 1 array_out[array_out <= thr] = -1 return array_out if __name__ is '__main__': ''' make figures for convolutional layre ''' __, patches_data, __, test_data = load_data.load_data_wrapper() #%% np.random.seed(seed=0) test_data = [(load_data.random_maniputlate_image(img.reshape(28,28)), key) for img, key in test_data[:49]] t0 = time.time() cnn = Convolution_Layer(patches_data, 25, (8,8)) dt = time.time() - t0 plt.figure(figsize=(4.2, 4.5)) for i, patch in enumerate(cnn.kernels): plt.subplot(int(np.sqrt(cnn.n_clusters)), int(np.sqrt(cnn.n_clusters)), i + 1) plt.imshow(patch, cmap=plt.cm.gray,
# coding=gbk import load_data import network # 读取训练、测试和验证数据 training_data, validation_data, test_data = load_data.load_data_wrapper() net = network.Network([784, 30, 10], cost=network.CrossEntropyCost) net.large_weight_initializer() x = net.SGD(training_data, 30, 10, 0.5, evaluation_data=test_data, monitor_evaluation_accuracy=True) print(x)
#!/usr/bin/python3 import network import pickle import load_data if __name__ == "__main__": nw = network.Network() model = "model/1.model.pkl" f = open(model, "rb") nw.bias = pickle.load(f) nw.weight = pickle.load(f) nw.sizes = pickle.load(f) _, _, te_data = load_data.load_data_wrapper() td = list(te_data) correct = nw.evaluate(td) print("res: %d / %d" % (correct, len(td)))