Ejemplo n.º 1
0
def simulate_rap(N_sim, x, p_opt, k=None):
    if k is None:
        # k = int(numpy.power(x.shape[0] - p_opt, 4./(4 + 1 + p_opt)))+1
        k = int(numpy.power(x.shape[0] - p_opt, 2. / (2 + 1 + p_opt))) + 1

        # print("Using k = {}.".format(k))

    N = x.shape[0]

    X = sidpy.embed_ts(x, p_max=p_opt)

    x_boot = numpy.zeros(N_sim)

    J = numpy.random.randint(p_opt, N)

    x_boot[:p_opt] = x[J - p_opt:J]

    nbrs = NearestNeighbors(n_neighbors=k,
                            algorithm='ball_tree').fit(X[:, :-1])

    for t in range(N_sim - p_opt):
        x_cur = x_boot[t:t + p_opt]

        distances, indices = nbrs.kneighbors(x_cur.reshape(1, -1))

        distances = distances.flatten()
        indices = indices.flatten()

        if distances[0] == 0:
            distances = distances[1:]
            indices = indices[1:]

        else:
            distances = distances[:-1]
            indices = indices[:-1]

        sample_prob = 1 / distances  # Sample weighted by the reciprocal of the distance to the target point.

        sample_prob = sample_prob / numpy.sum(
            sample_prob)  # Normalize the probability.

        which_nn = numpy.random.choice(indices, size=1, p=sample_prob)

        delta_resid = X[which_nn, -1] - X[which_nn, -2]

        x_boot[t + p_opt] = x_boot[t + p_opt - 1] + delta_resid

    return x_boot
Ejemplo n.º 2
0
def simulate_block_bootstrap(N_sim, x, k=None):
    n = x.shape[0]

    if k is None:
        k = int(numpy.ceil(numpy.power(n, 1. / 3)))

    X = sidpy.embed_ts(x, k - 1)

    num_blocks_needed = int(numpy.ceil(N_sim / float(k)))

    row_inds = numpy.random.choice(X.shape[0],
                                   size=num_blocks_needed,
                                   replace=True)

    X_bs = X[row_inds, :]

    x_boot = X_bs.ravel(order='C')[:N_sim]

    return x_boot
Ejemplo n.º 3
0
def simulate_ls(N_sim, x, p_opt, k=None):
    if k is None:
        k = int(numpy.power(x.shape[0],
                            0.5))  # Suggested in Lall-Sharma paper.

        # print("Using k = {}.".format(k))

    N = x.shape[0]

    X = sidpy.embed_ts(x, p_max=p_opt)

    x_boot = numpy.zeros(N_sim)

    J = numpy.random.randint(p_opt, N)

    x_boot[:p_opt] = x[J - p_opt:J]

    nbrs = NearestNeighbors(n_neighbors=k,
                            algorithm='ball_tree').fit(X[:, :-1])

    resampling_weights = 1. / numpy.arange(1, k + 1)
    resampling_weights = resampling_weights / numpy.sum(resampling_weights)

    for t in range(N_sim - p_opt):
        x_cur = x_boot[t:t + p_opt]

        distances, indices = nbrs.kneighbors(x_cur.reshape(1, -1))

        distances = distances.flatten()
        indices = indices.flatten()

        x_boot[t + p_opt] = X[
            numpy.random.choice(indices, size=1, p=resampling_weights), -1]
        # x_boot[t + p_opt] = numpy.random.randn(1)*0.025 + X[numpy.random.choice(indices[0][1:], size = 1), -1]

    return x_boot
import getpass

username = getpass.getuser()

import sys

sys.path.append('../sidpy')

import sidpy

import numpy

p_max = 10

x = []

for trial_ind in range(10):
    # x.append(numpy.arange(trial_ind, 2*p_max))
    x.append(numpy.random.rand(p_max + 1 + trial_ind))

X = sidpy.embed_ts(x, p_max, is_multirealization=True)

print(x)

print(X)
Ejemplo n.º 5
0
    x,
    p_max=p_opt,
    N_res=N_res,
    rho=rho,
    Win_scale=Win_scale,
    multi_bias=True,
    to_plot_regularization=True,
    renormalize_by=renormalize_by)

knn_errs = False
print("NOTE: Using knn_errs = False.")
# knn_errs = True; print("NOTE: Using knn_errs = True.")

nn_number = None

X = sidpy.embed_ts(x, p_max=p_opt)

vec_ones = numpy.ones(X.shape[1]).reshape(1, -1)

#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#
# Continue running the time series forward in
# time 'unlinked' to the original time series.
#
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

N_sim = N

x_esn_sim = simESN.simulate_from_esn_umd_sparse(N_sim,
                                                X.T,
                                                Y,
Ejemplo n.º 6
0
def estimate_ser_kde(x, p_opt, h, active_set, is_multirealization=False):
    lwo_halfwidth = 10

    De_max = stack_distance_matrix(x,
                                   p_opt,
                                   mean_x=0.0,
                                   sd_x=1.0,
                                   is_multirealization=is_multirealization)

    De = numpy.empty(shape=(De_max.shape[0], De_max.shape[1], len(active_set)),
                     dtype='float32',
                     order='C')

    for lag_ind, lag_val in enumerate(active_set):
        De[:, :, lag_ind] = De_max[:, :, lag_val]

    #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
    #
    # Compute the specific entropy rate via integration
    # of the estimator for the predictive density.
    #
    #%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

    h_squared = h * h

    # We are now ready to *vary* h, which requires
    # a recompute for each value of h.

    # Rescale De by the bandwidths.

    De_scaled = De.copy()

    De_scaled = De_scaled / h_squared

    # Each term of the KDE numerator and
    # denominator sum is given by summing
    # across the third dimension of De_scaled
    # and exponentiating.

    S_bottom = De_scaled[:, :, :(len(h) - 1)].sum(2)

    X = sidpy.embed_ts(x, p_max=p_opt, is_multirealization=is_multirealization)

    X_futures = X[:, -1]

    summands_bottom = numpy.exp(S_bottom)

    spenra = numpy.zeros(De.shape[0])

    if is_multirealization:
        x_stacked = numpy.concatenate(x)
        sd_x = x_stacked.std()
        x_sorted = numpy.sort(x_stacked)
    else:
        sd_x = x.std()
        x_sorted = numpy.sort(X_futures)

    N = De_scaled.shape[0]

    for t in range(N):
        if t % 100 == 0:
            print 'On t = {} of {}.'.format(t, N)

        lb = numpy.max([0, t - lwo_halfwidth])
        ub = numpy.min([N - 1, t + lwo_halfwidth])

        mask = numpy.array([1] * lb + [0] * (ub - lb + 1) + [1] * (N - ub - 1),
                           dtype=numpy.bool)

        S_bottom_cur = S_bottom[t, :]
        summands_bottom_cur = summands_bottom[t, :]

        quad_flogf_out = quad(integrand_flogf_er,
                              x_sorted[0] - 5 * sd_x,
                              x_sorted[-1] + 5 * sd_x,
                              epsabs=0.01,
                              args=(X_futures, S_bottom_cur,
                                    summands_bottom_cur, h, h_squared, mask))

        spenra[t] = quad_flogf_out[0]

    return spenra
block_sizes = [.20 * x.shape[0], .50 * x.shape[0], 0.30 * x.shape[0]]
block_sizes = map(int, block_sizes)
block_limits = [0] + numpy.cumsum(block_sizes).tolist()

x_stacked = []

for block_ind in range(len(block_sizes)):
    x_stacked.append(x[block_limits[block_ind]:block_limits[block_ind + 1]])

De_max = pycondens.stack_distance_matrix(x,
                                         p_max,
                                         is_multirealization=False,
                                         output_verbose=False)

X = sidpy.embed_ts(x_stacked, p_max, is_multirealization=True)

D_final = -0.5 * numpy.power(pairwise_distances(X, metric='euclidean'), 2)

x = x_stacked

ns = []

for r in range(len(x)):
    ns.append(x[r].shape[0])

De_max = pycondens.stack_distance_matrix(x, p_max, is_multirealization=True)

D_final_v2 = numpy.sum(De_max, 2)

dist_error = numpy.abs(D_final - D_final_v2)