Example #1
0
        self.bias = []
        for i in range(length):
            index = "arr_" + str(i)
            self.weights.append(file_weights[index])
            self.bias.append(file_bias[index])
            
def get_minibatchs(data, label,batch_size):
    minibatch_data = [data[k:k+batch_size] for k in range(0, len(data), batch_size)]
    minibatch_label = [label[k:k+batch_size] for k in range(0, len(data), batch_size)]
    return minibatch_data, minibatch_label

if __name__ == "__main__":
    
    batch_size = 10
    DL = DataLoader()
    train_data, train_label0 = DL.load_mnist('./data/mnist')
    test_data, test_label = DL.load_mnist('./data/mnist', 't10k')

    train_images = [(im / 255).reshape(1, 784) for im in train_data] 
    test_images = [(im / 255).reshape(1, 784) for im in test_data] 
    train_label = [vectorized_result(int(i)) for i in train_label0]
    train_img_batchs, train_label_batchs = get_minibatchs(train_images, train_label, batch_size)

    model = DNN([28 * 28, 64, 10])
    steps = 0
    eval_freq = 6000
    for epoch in range(50):

        for train_img_batch, train_res_batch in zip(train_img_batchs, train_label_batchs):
            # normal SGD train
            # print("normal training!")
Example #2
0
        for j in range(batch_size):
            if np.argmax(model.out.predict_result[j]) == label[j]:
                val_acc += 1

    return val_acc / (batch_num * batch_size), val_loss / batch_num


if __name__ == "__main__":
    logpath = 'logs'
    if not os.path.exists(logpath):
        os.mkdir(logpath)
    logdir = logpath + '/LRELU0.001_log.txt'
    print_freq = 50
    val_freq = 200
    DL = DataLoader()
    images, labels = DL.load_mnist('./data/mnist')
    test_images, test_labels = DL.load_mnist('./data/mnist', 't10k')
    batch_size = 100
    model = Model(batch_size)
    #record
    train_loss_record = []
    train_acc_record = []
    val_loss_record = []
    val_acc_record = []
    with open(logdir, 'w') as logf:
        for epoch in range(20):
            # save record every epoch
            history = dict()
            history['train_acc'] = train_acc_record
            history['train_loss'] = train_loss_record
            history['val_acc'] = val_acc_record
Example #3
0
        for j in range(batch_size):
            if np.argmax(model.out.predict_result[j]) == label[j]:
                val_acc += 1

    return val_acc / (batch_num * batch_size), val_loss / batch_num


if __name__ == "__main__":
    logpath = 'logs'
    if not os.path.exists(logpath):
        os.mkdir(logpath)
    logdir = logpath + '/EXP1_Norm_log.txt'
    print_freq = 50
    val_freq = 200
    DL = DataLoader()
    images, labels = DL.load_mnist('./data/mnist', Norm=True)
    test_images, test_labels = DL.load_mnist('./data/mnist', 't10k', Norm=True)
    batch_size = 100
    model = Model(batch_size)
    #record
    train_loss_record = []
    train_acc_record = []
    val_loss_record = []
    val_acc_record = []
    with open(logdir, 'w') as logf:
        for epoch in range(20):
            # save record every epoch
            history = dict()
            history['train_acc'] = train_acc_record
            history['train_loss'] = train_loss_record
            history['val_acc'] = val_acc_record