Beispiel #1
0
    def setup_class(cls):
        super(TestPolySmoother1, cls).setup_class() #initialize DGP

        y, x, exog = cls.y, cls.x, cls.exog

        #use order = 2 in regression
        pmod = smoothers.PolySmoother(2, x)
        pmod.fit(y)  #no return

        cls.res_ps = pmod
        cls.res2 = OLS(y, exog[:,:2+1]).fit()
    def __init__(self):
        super(self.__class__, self).__init__() #initialize DGP

        y, x, exog = self.y, self.x, self.exog

        #use order = 2 in regression
        pmod = smoothers.PolySmoother(2, x)
        pmod.fit(y)  #no return

        self.res_ps = pmod
        self.res2 = OLS(y, exog[:,:2+1]).fit()
Beispiel #3
0
    def setup_class(cls):
        super(TestPolySmoother2, cls).setup_class() #initialize DGP

        y, x, exog = cls.y, cls.x, cls.exog

        #use order = 3 in regression
        pmod = smoothers.PolySmoother(3, x)
        #pmod.fit(y)  #no return
        pmod.smooth(y)  #no return, use alias for fit

        cls.res_ps = pmod
        cls.res2 = OLS(y, exog[:,:3+1]).fit()
    def __init__(self):
        super(self.__class__, self).__init__() #initialize DGP

        y, x, exog = self.y, self.x, self.exog

        #use order = 3 in regression
        pmod = smoothers.PolySmoother(3, x)
        #pmod.fit(y)  #no return
        pmod.smooth(y)  #no return, use alias for fit

        self.res_ps = pmod
        self.res2 = OLS(y, exog[:,:3+1]).fit()
Beispiel #5
0
    def setup_class(cls):
        super(TestPolySmoother3, cls).setup_class() #initialize DGP

        y, x, exog = cls.y, cls.x, cls.exog
        nobs = y.shape[0]
        weights = np.ones(nobs)
        weights[:nobs//3] = 0.1
        weights[-nobs//5:] = 2

        #use order = 2 in regression
        pmod = smoothers.PolySmoother(2, x)
        pmod.fit(y, weights=weights)  #no return

        cls.res_ps = pmod
        cls.res2 = WLS(y, exog[:,:2+1], weights=weights).fit()
    def __init__(self):
        super(self.__class__, self).__init__() #initialize DGP

        y, x, exog = self.y, self.x, self.exog
        nobs = y.shape[0]
        weights = np.ones(nobs)
        weights[:nobs//3] = 0.1
        weights[-nobs//5:] = 2

        #use order = 2 in regression
        pmod = smoothers.PolySmoother(2, x)
        pmod.fit(y, weights=weights)  #no return

        self.res_ps = pmod
        self.res2 = WLS(y, exog[:,:2+1], weights=weights).fit()
Beispiel #7
0
from statsmodels.sandbox.nonparametric import smoothers, kernels
from statsmodels.regression.linear_model import OLS, WLS

#DGP: simple polynomial
order = 3
sigma_noise = 0.5
nobs = 100
lb, ub = -1, 2
x = np.linspace(lb, ub, nobs)
x = np.sin(x)
exog = x[:, None]**np.arange(order + 1)
y_true = exog.sum(1)
y = y_true + sigma_noise * np.random.randn(nobs)

#xind = np.argsort(x)
pmod = smoothers.PolySmoother(2, x)
pmod.fit(y)  #no return
y_pred = pmod.predict(x)
error = y - y_pred
mse = (error * error).mean()
print(mse)
res_ols = OLS(y, exog[:, :3]).fit()
print(np.squeeze(pmod.coef) - res_ols.params)

weights = np.ones(nobs)
weights[:nobs // 3] = 0.1
weights[-nobs // 5:] = 2

pmodw = smoothers.PolySmoother(2, x)
pmodw.fit(y, weights=weights)  #no return
y_predw = pmodw.predict(x)
Beispiel #8
0
    ax2.plot(KSx, KVar, "-o")

    ax3 = fig.add_subplot(223)
    ax3.plot(x, y, "+")
    ax3.plot(KSx, KS2y, "-o")
    ax3.set_ylim(-20, 30)
    ax4 = fig.add_subplot(224)
    ax4.plot(KSx, K2Var, "-o")

    fig2 = plt.figure()
    ax5 = fig2.add_subplot(111)
    ax5.plot(x, y, "+")
    ax5.plot(KSConfIntx, KSConfInty, "-o")

    from statsmodels.nonparametric import lowess as lo
    ys = lo.lowess(y, x)
    ax5.plot(ys[:, 0], ys[:, 1], 'b-')
    ys2 = lo.lowess(y, x, frac=0.25)
    ax5.plot(ys2[:, 0], ys2[:, 1], 'b--', lw=2)

    #need to sort for matplolib plot ?
    xind = np.argsort(x)
    pmod = smoothers.PolySmoother(5, x[xind])
    pmod.fit(y[xind])

    yp = pmod(x[xind])
    ax5.plot(x[xind], yp, 'k-')
    ax5.set_title('Kernel regression, lowess - blue, polysmooth - black')

    #plt.show()