コード例 #1
0
ファイル: PA1.py プロジェクト: sokolov-alex/Neural-Networks
    def fit(self, X_train, y_train, mini_batch=100, verbose=10):
        old_loss = np.inf
        of = 0
        for i in range(30):
            X_train, y_train = shuffle_data(X_train, y_train)
            for b in range(mini_batch, X_train.shape[0] + 1, mini_batch):
                x_batch, y_batch = X_train[b -
                                           mini_batch:b], y_train[b -
                                                                  mini_batch:b]
                y_pred = self.predict(x_batch)
                self.SGD(y_batch, y_pred, x_batch)

            if i % verbose == 0:
                loss = self.cost(y_batch, y_pred)
                if old_loss - loss >= 0:
                    of -= 1
                    self.eta *= 2
                else:
                    of += 1
                    self.eta /= 3
                    if of > 5:
                        self.eta /= 20
                        if of > 10:
                            break
                #print 'rate: ', self.eta
                old_loss = loss

                #print 'loss: ', loss
            if loss < 0.001:
                #print 'enough (loss = 0.01)'
                break
コード例 #2
0
ファイル: PA1.py プロジェクト: sokolov-alex/Neural-Networks
    def fit(self, X_train, y_train, mini_batch = 100, verbose = 10):
        old_loss = np.inf
        of = 0
        for i in range(30):
            X_train, y_train = shuffle_data(X_train, y_train)
            for b in range(mini_batch, X_train.shape[0]+1, mini_batch):
                x_batch, y_batch = X_train[b-mini_batch:b], y_train[b-mini_batch:b]
                y_pred = self.predict(x_batch)
                self.SGD(y_batch, y_pred, x_batch)

            if i % verbose == 0:
                loss = self.cost(y_batch, y_pred)
                if old_loss - loss >= 0:
                    of -= 1
                    self.eta *= 2
                else:
                    of += 1
                    self.eta /= 3
                    if of > 5:
                        self.eta /= 20
                        if of > 10:
                            break
                #print 'rate: ', self.eta
                old_loss = loss

                #print 'loss: ', loss
            if loss < 0.001:
                #print 'enough (loss = 0.01)'
                break
コード例 #3
0
ファイル: PA1.py プロジェクト: sokolov-alex/Neural-Networks
def run_lr_bfgs(X_train, y_train):
    dim = X_train.shape[1]
    for i in range(10):
        lr = LogRegBFGS(dim)
        X_train, y_train = shuffle_data(X_train, y_train)
        y_true = (y_train == i).astype(int)

        lr.fit(X_train, y_true)

        y_true_test = (y_test == i).astype(int)
        print('test accuracy: ', lr.score(X_test, y_true_test))
コード例 #4
0
ファイル: PA1.py プロジェクト: sokolov-alex/Neural-Networks
def run_lr_bfgs(X_train, y_train):
    dim = X_train.shape[1]
    for i in range(10):
        lr = LogRegBFGS(dim)
        X_train, y_train = shuffle_data(X_train, y_train)
        y_true = (y_train == i).astype(int)

        lr.fit(X_train, y_true)

        y_true_test = (y_test == i).astype(int)
        print('test accuracy: ', lr.score(X_test, y_true_test))
コード例 #5
0
ファイル: PA1.py プロジェクト: sokolov-alex/Neural-Networks
from time import clock
from Preprocessing import shuffle_data, zscore
from DataLoader import load_mnist
from sklearn.preprocessing import MinMaxScaler, StandardScaler
from scipy.special import expit as sigmoid
from scipy.optimize import fmin_l_bfgs_b
from matplotlib import pyplot as plt
from numba import jit
#%matplotlib inline

X_train, y_train, X_test, y_test = load_mnist()

X_train = zscore(X_train)
X_test = zscore(X_test)

X_train, y_train = shuffle_data(X_train, y_train)

n_classes = len(set(y_train))


def accuracy_score(y_true, y_pred):
    return (y_true == y_pred).mean()


class LogisticRegression():
    def __init__(self, dim):
        self.w = np.zeros(dim)
        self.eta = 10e-7

    def predict(self, x):
        return sigmoid(x.dot(self.w))
コード例 #6
0
ファイル: PA1.py プロジェクト: sokolov-alex/Neural-Networks
from  Preprocessing import shuffle_data, zscore
from DataLoader import load_mnist
from sklearn.preprocessing import MinMaxScaler, StandardScaler
from scipy.special import expit as sigmoid
from scipy.optimize import fmin_l_bfgs_b
from matplotlib import pyplot as plt
from numba import jit
#%matplotlib inline


X_train, y_train, X_test, y_test = load_mnist()

X_train = zscore(X_train)
X_test = zscore(X_test)

X_train, y_train = shuffle_data(X_train, y_train)

n_classes = len(set(y_train))


def accuracy_score(y_true, y_pred):
    return (y_true == y_pred).mean()

class LogisticRegression():
    def __init__(self, dim):
        self.w = np.zeros(dim)
        self.eta = 10e-7

    def predict(self, x):
        return sigmoid(x.dot(self.w))