phi = lognorm(a_sigma) def p(x, y): d = s * x**alpha return phi.pdf((y - (1 - delta) * x) / d) / d # other data n_a, n_b, n_y = 50, (5, 5), 20 a = np.random.rand(n_a) + 0.01 b = np.random.rand(*n_b) + 0.01 y = np.linspace(0, 10, 20) lae_a = LAE(p, a) lae_b = LAE(p, b) def test_x_flattened(): "lae: is x flattened and reshaped" # should have a trailing singleton dimension assert_equal(lae_b.X.shape[-1], 1) assert_equal(lae_a.X.shape[-1], 1) def test_x_2d(): "lae: is x 2d" assert_equal(lae_a.X.ndim, 2) assert_equal(lae_b.X.ndim, 2)
Both x and y must be strictly positive. """ d = s * x**alpha return phi.pdf((y - (1 - delta) * x) / d) / d n = 10000 # Number of observations at each date t T = 30 # Compute density of k_t at 1,...,T+1 # == Generate matrix s.t. t-th column is n observations of k_t == # k = np.empty((n, T)) A = phi.rvs((n, T)) k[:, 0] = psi_0.rvs(n) # Draw first column from initial distribution for t in range(T - 1): k[:, t + 1] = s * A[:, t] * k[:, t]**alpha + (1 - delta) * k[:, t] # == Generate T instances of LAE using this data, one for each date t == # laes = [LAE(p, k[:, t]) for t in range(T)] # == Plot == # fig, ax = plt.subplots() ygrid = np.linspace(0.01, 4.0, 200) greys = [str(g) for g in np.linspace(0.0, 0.8, T)] greys.reverse() for psi, g in zip(laes, greys): ax.plot(ygrid, psi(ygrid), color=g, lw=2, alpha=0.6) ax.set_xlabel('capital') title = r'Density of $k_1$ (lighter) to $k_T$ (darker) for $T={}$' ax.set_title(title.format(T)) plt.show()
def psi_star(y): return 2 * norm.pdf(y) * norm.cdf(delta * y) def p(x, y): return phi.pdf((y - theta * np.abs(x)) / d) / d #generate n random number from normal distribution Z = phi.rvs(n) X = np.empty(n) for t in range(n - 1): X[t + 1] = theta * np.abs(X[t]) + d * Z[t] psi_est = LAE(p, X) k_est = gaussian_kde(X) fig, ax = plt.subplots(figsize=(10, 7)) #generate 200 numbers from -3 to 3 ys = np.linspace(-3, 3, 200) ax.plot(ys, psi_star(ys), "b-", lw=2, alpha=0.6, label="true") ax.plot(ys, psi_est(ys), "g-", lw=2, alpha=0.6, label="look ahead estimate") ax.plot(ys, k_est(ys), "k-", lw=2, alpha=0.6, label="kernel based estimate") ax.legend(loc="upper left") #.show()