Exemplo n.º 1
0
import scipy.optimize
from util.util import read_mnist, permute_data
from logistic_objective import LogisticObjective
import phessianfree
from phessianfree import convergence

set_printoptions(precision=4, linewidth=150)
logging.basicConfig(level="DEBUG")
logger = logging.getLogger("opt")

# (X,d) is the training data, (Xt, dt) is the test data
X, d, Xt, dt = read_mnist(partial=False)
ndata = X.shape[0]
m = X.shape[1]

X, d = permute_data(X, d)  # Randomize order

f = LogisticObjective(X, d, reg=0.001)
x0 = 0.01 * ones(m)

##########################################
# Stores the intermediate values for later plotting
phf_cb = convergence.PlottingCallback("Conf. Hessian free", ndata)

props = {
    'gradRelErrorBound': 0.1  # Default is a more conservative 0.1
}

x, optinfo = phessianfree.optimize(f,
                                   x0,
                                   ndata,
Exemplo n.º 2
0
import logging.config
from numpy import *
import scipy.optimize
import phessianfree
from phessianfree import convergence
from util.util import read_mnist, permute_data

set_printoptions(precision=4, linewidth=150)
logging.basicConfig(level="DEBUG")
logger = logging.getLogger("opt")

A, b, _, _ = read_mnist(partial=False)
ndata = A.shape[0]
m = A.shape[1]

A, b = permute_data(A,b) # Randomize order
reg = 0.005

def f(x, s=0, e=ndata):
    y = dot(A[s:e,:],x) - b[s:e]
    fval = 0.5*dot(y,y) + 0.5*reg*(e-s)*dot(x,x)
    grad = dot(A[s:e,:].T, y) + reg*(e-s)*x
    return (fval/ndata, grad/ndata)

x0 = 0.01*ones(m)

##########################################
# Stores the intermediate values for later plotting
phf_cb = convergence.PlottingCallback("Conf. Hessian free", ndata)
x, optinfo = phessianfree.optimize(f, x0, ndata, maxiter=20, callback=phf_cb, props={})
import scipy.optimize
from util.util import read_mnist, permute_data
from logistic_objective import LogisticObjective
import phessianfree
from phessianfree import convergence

set_printoptions(precision=4, linewidth=150)
logging.basicConfig(level="DEBUG")
logger = logging.getLogger("opt")

# (X,d) is the training data, (Xt, dt) is the test data
X, d, Xt, dt = read_mnist(partial=False)
ndata = X.shape[0]
m = X.shape[1]

X, d = permute_data(X,d) # Randomize order

f = LogisticObjective(X,d, reg=0.001)
x0 = 0.01*ones(m)

##########################################
# Stores the intermediate values for later plotting
phf_cb = convergence.PlottingCallback("Conf. Hessian free", ndata)

props = {
    'gradRelErrorBound': 0.1 # Default is a more conservative 0.1
}

x, optinfo = phessianfree.optimize(f, x0, ndata, maxiter=20, callback=phf_cb, props=props)

##########################################