"""
import numpy as np
import pylab as pl
from scipy.linalg import toeplitz
import tseries as t
import kernel_lms
reload(kernel_lms)
from kernel_lms import KernelLMS
from sklearn.preprocessing import scale
from sklearn.linear_model import LinearRegression

data = np.loadtxt('MackeyGlass_t17.txt')
X = toeplitz(data, np.zeros(10))
#X = t.time_delay_input(data,10)
klms = KernelLMS(learning_rate=.001, gamma=1, growing_criterion="dense", \
                 growing_param=[1,.4], loss_function="least_squares", \
                 correntropy_sigma=.4, dropout=1)

d = np.squeeze(np.hstack([np.zeros(10)[np.newaxis], data[np.newaxis]]))
d = d[0:5e3]
klms.fit_transform(X[0:5e3], d[0:5e3])

_r = LinearRegression()
_R = np.hstack([klms.X_transformed_[np.newaxis].T, np.ones([5000, 1])])
_r.fit(_R, d[np.newaxis].T)
_R2 = np.hstack([klms.X_transformed_[np.newaxis].T, np.ones([5000, 1])])
reg = np.squeeze(_r.predict(_R2))

#plot(klms.X_transformed_[4e3:5e3])
#plot(d[4e3:5e3])
Esempio n. 2
0
from kernel_lms import KernelLMS

t = np.arange(0, 1500, .5)
f = .09
N = t.shape[0] - 1
x1 = np.sin(2 * np.pi * f * t[0:-1])
x2 = np.sin(2 * np.pi * f * t[1:])
X = np.vstack([x1, x2]).T
d = np.sin(4 * np.pi * f * t[1:])
n1 = .01 * np.random.randn(np.floor(.9 * N))
n2 = .01 * np.random.randn(np.ceil(.1 * N)) + 2
n = np.random.permutation(np.hstack([n1, n2]))
dhat = d + n

klms = []
klms = KernelLMS(learning_rate=.9,
                 gamma=1,
                 growing_criterion="novelty",
                 growing_param=[1, .4],
                 loss_function="minimum_correntropy",
                 correntropy_sigma=.4,
                 dropout=.5)

t1 = time.time()
klms.fit_transform(X, dhat)
t2 = time.time()
print "Elapsed time = %f" % (t2 - t1)

plot(klms.X_transformed_)
plot(d)
# import pdb; pdb.set_trace()
import numpy as np
from kernel_lms import KernelLMS

# =========================
#       Learn XOR
# =========================
klms = KernelLMS(
    kernel="rbf",
    learning_mode="regression",
    learning_rate=0.001,
    gamma=0.5,
    growing_criterion="dense",
    growing_param=[0.1, 0.1],
    loss_function="least_squares",
    correntropy_sigma=0.4,
)

X = np.vstack([[0, 0], [0, 1], [1, 0], [1, 1]])
d = np.array([0, 1, 1, 0])

w = np.random.rand(2)
# w = np.ones(2)
# w = np.array([1, 1])

klms.fit_transform(X, d)  # Initialize net
for i in xrange(1, 2000):
    xout = klms.transform(X)[-4:]  # Forwad
    yout = np.dot(w, [xout, np.ones_like(xout)])
    yout = 1 / (1 + np.exp(-yout))  # Calculate last layer output
"""
import numpy as np
import pylab as pl
from scipy.linalg import toeplitz
import tseries as t
import kernel_lms; reload(kernel_lms)
from kernel_lms import KernelLMS
from sklearn.preprocessing import scale
from sklearn.linear_model import LinearRegression


data = np.loadtxt('MackeyGlass_t17.txt')
X = toeplitz(data , np.zeros(10))
#X = t.time_delay_input(data,10)
klms = KernelLMS(learning_rate=.001, gamma=1, growing_criterion="dense", \
                 growing_param=[1,.4], loss_function="least_squares", \
                 correntropy_sigma=.4, dropout=1)

d = np.squeeze(np.hstack([np.zeros(10)[np.newaxis], data[np.newaxis]]))
d = d[0:5e3]
klms.fit_transform(X[0:5e3],d[0:5e3])

_r = LinearRegression()
_R = np.hstack([klms.X_transformed_[np.newaxis].T, np.ones([5000,1])])
_r.fit(_R, d[np.newaxis].T)
_R2 = np.hstack([klms.X_transformed_[np.newaxis].T, np.ones([5000,1])])
reg = np.squeeze(_r.predict(_R2))


#plot(klms.X_transformed_[4e3:5e3])
#plot(d[4e3:5e3])
import scipy as sp
import pylab as pl
import time
from kernel_lms import KernelLMS


t = np.arange(0,1500,.5)
f = .09
N = t.shape[0]-1
x1 = np.sin( 2*np.pi*f*t[0:-1] )
x2 = np.sin( 2*np.pi*f*t[1:] )
X = np.vstack( [x1, x2] ).T
d = np.sin( 4*np.pi*f*t[1:] )
n1 = .01*np.random.randn(np.floor(.9*N))
n2 = .01*np.random.randn(np.ceil(.1*N))+2
n = np.random.permutation(np.hstack([n1,n2]))
dhat = d + n;

klms  = []
klms = KernelLMS(learning_rate=.9, gamma=1, growing_criterion="novelty", 
                 growing_param=[1,.4], loss_function="minimum_correntropy",
                 correntropy_sigma=.4, dropout=.5)

t1 = time.time()
klms.fit_transform(X,dhat)
t2 = time.time()
print "Elapsed time = %f" % (t2-t1)

plot(klms.X_transformed_)
plot(d)
Esempio n. 6
0
#import pdb; pdb.set_trace()
import numpy as np
from kernel_lms import KernelLMS

# =========================
#       Learn XOR
# =========================
klms = KernelLMS(kernel="rbf", learning_mode = "regression", learning_rate=.001,\
        gamma=.5, growing_criterion="dense",growing_param=[.1,.1],\
        loss_function="least_squares", correntropy_sigma=.4)

X = np.vstack([[0, 0], [0, 1], [1, 0], [1, 1]])
d = np.array([0, 1, 1, 0])

w = np.random.rand(2)
#w = np.ones(2)
#w = np.array([1, 1])

klms.fit_transform(X, d)  # Initialize net
for i in xrange(1, 2000):
    xout = klms.transform(X)[-4:]  # Forwad
    yout = np.dot(w, [xout, np.ones_like(xout)])
    yout = 1 / (1 + np.exp(-yout))  # Calculate last layer output

    e = (d - yout) * yout * (1 - yout)
    w = w + .01 * (e * [xout, np.ones_like(xout)]).sum(axis=1)
    err = e * w[0]
    klms.fit_transform(X, d, err)  # Backpropagate error

print "Learning XOR:"
print klms.X_transformed_[-4:]