def save_pretrained_network(epochs=30, mini_batch_size=10, eta=3.0): network = Network(sizes=DEFAULT_LAYER_SIZES) training_data, validation_data, test_data = load_data_wrapper() network.SGD(training_data, epochs, mini_batch_size, eta) weights_and_biases = (network.weights, network.biases) data_file = open(PRETRAINED_DATA_FILE, mode='w') pickle.dump(weights_and_biases, data_file) data_file.close()
def save_pretrained_network(epochs = 30, mini_batch_size = 10, eta = 3.0): network = Network(sizes = DEFAULT_LAYER_SIZES) training_data, validation_data, test_data = load_data_wrapper() network.SGD(training_data, epochs, mini_batch_size, eta) weights_and_biases = (network.weights, network.biases) data_file = open(PRETRAINED_DATA_FILE, mode = 'w') cPickle.dump(weights_and_biases, data_file) data_file.close()
def test_network(): network = get_pretrained_network() training_data, validation_data, test_data = load_data_wrapper() n_right, n_wrong = 0, 0 for test_in, test_out in test_data: if np.argmax(network.feedforward(test_in)) == test_out: n_right += 1 else: n_wrong += 1 print((n_right, n_wrong, float(n_right) / (n_right + n_wrong)))
def test_network(): network = get_pretrained_network() training_data, validation_data, test_data = load_data_wrapper() n_right, n_wrong = 0, 0 for test_in, test_out in test_data: if np.argmax(network.feedforward(test_in)) == test_out: n_right += 1 else: n_wrong += 1 print(n_right, n_wrong, float(n_right)/(n_right + n_wrong))
def save_organized_images(n_images_per_number=10): training_data, validation_data, test_data = load_data_wrapper() image_map = dict([(k, []) for k in range(10)]) for im, output_arr in training_data: if min(list(map(len, list(image_map.values())))) >= n_images_per_number: break value = int(np.argmax(output_arr)) if len(image_map[value]) >= n_images_per_number: continue image_map[value].append(im) data_file = open(IMAGE_MAP_DATA_FILE, mode='wb') pickle.dump(image_map, data_file) data_file.close()
def save_organized_images(n_images_per_number = 10): training_data, validation_data, test_data = load_data_wrapper() image_map = dict([(k, []) for k in range(10)]) for im, output_arr in training_data: if min(map(len, image_map.values())) >= n_images_per_number: break value = int(np.argmax(output_arr)) if len(image_map[value]) >= n_images_per_number: continue image_map[value].append(im) data_file = open(IMAGE_MAP_DATA_FILE, mode = 'w') cPickle.dump(image_map, data_file) data_file.close()