Beispiel #1
0
def func(T, *args):
    t = T.reshape((26, 26))
    dataX = args[0]
    dataY = args[1]
    W = args[2]

    return prob_grad.compute_log_p(dataX, dataY, W, t)
Beispiel #2
0
def func(W, *args):
	w = W.reshape(26, 128)
	dataX = args[0]
	dataY = args[1]
	T = args[2]

	return prob_grad.compute_log_p(dataX, dataY, w, T)
Beispiel #3
0
def func(params, *args):
    #computes function value for a single example

    W, T = params[:26*129].reshape((26, 129)),\
     params[26*129:].reshape((26, 26))
    x, y = args[0]
    l = args[1]

    log_p = prob_grad.compute_log_p(x, y, W, T)

    return -1*log_p + 0.5*l*(\
     np.sum(np.square(W)) +\
     np.sum(np.square(T)))
def func(params, *args):
    #computes function value for a single example

    W, T = params[:26*129].reshape((26, 129)),\
     params[26*129:].reshape((26, 26))
    data = args[0]
    l = args[1]

    log_p = 0
    for example in data:
        log_p += prob_grad.compute_log_p(example[0], example[1], W, T)

    return -1*log_p/len(data) + 0.5*l*(\
     np.sum(np.square(W)) +\
     np.sum(np.square(T)))
Beispiel #5
0
def func(params, *args):
    #objective function specified in the handout
    W, T = params[:26 * 128].reshape((26, 128)), params[26 * 128:].reshape(
        (26, 26))
    data = args[0]
    C = args[1]

    log_sum = 0
    for example in data:
        log_sum += prob_grad.compute_log_p(example[0], example[1], W, T)

    norm = numpy.zeros(26)
    for i in range(26):
        norm[i] = numpy.linalg.norm(W[i])

    numpy.square(norm, out=norm)

    return -1 * (C / len(data)) * log_sum + 0.5 * numpy.sum(
        norm) + 0.5 * numpy.sum(numpy.square(T))
Beispiel #6
0
import numpy, data_read, prob_grad

W, T = data_read.read_model()

data = data_read.read_train()

import time

t0 = time.time()

a = []
for example in data:
    a.append(prob_grad.compute_log_p(example[0], example[1], W, T))
print(f"AVG: {sum(a)/len(data)}")
t1 = time.time()

print(f"Time: {t1-t0}")