Beispiel #1
0
def partion_data(data, Rest=2, Ntr=1, Ntr_steady=1):
    y = data['y']
    u = data['u']
    lines = data['lines']
    fs = data['fs']
    npp, p, R, P = y.shape
    freq = np.arange(npp) / npp * fs
    # partitioning the data. Use last period of two last realizations.
    # test for performance testing and val for model selection
    utest = u[:, :, -1, -1]
    ytest = y[:, :, -1, -1]
    uval = u[:, :, -1, -1]
    yval = y[:, :, -1, -1]
    # all other realizations are used for estimation
    uest = u[..., :Rest, Ntr_steady:]
    yest = y[..., :Rest, Ntr_steady:]
    # noise estimate over periods. This sets the performace limit for the
    # estimated model
    covY = covariance(yest)

    # create signal object
    sig = Signal(uest, yest, fs=fs)
    sig.lines = lines
    # plot periodicity for one realization to verify data is steady state
    # sig.periodicity()
    # Calculate BLA, total- and noise distortion. Used for subspace
    # identification
    sig.bla()
    # average signal over periods. Used for training of PNLSS model
    um, ym = sig.average()

    return Data(sig, uest, yest, uval, yval, utest, ytest, um, ym, covY, freq,
                lines, npp, Ntr)
def simulate(true_model, npp=1024, Ntr=1, Rest=2, add_noise=False):
    print()
    print(f'Nonlinear parameters:',
          f'{len(true_model.nlx.active) + len(true_model.nly.active)}')
    print(f'Parameters to estimate: {true_model.npar}')
    # set non-active coefficients to zero. Note order of input matters
    idx = np.setdiff1d(np.arange(true_model.E.size), true_model.nlx.active)
    idy = np.setdiff1d(np.arange(true_model.F.size), true_model.nly.active)
    true_model.E.flat[idx] = 0
    true_model.F.flat[idy] = 0

    # get predictable random numbers. https://dilbert.com/strip/2001-10-25
    np.random.seed(10)
    # shape of u from multisine: (R,P*npp)
    u, lines, freq = multisine(N=npp, P=P, R=R, lines=kind, rms=RMSu)

    # Transient: Add Ntr periods before the start of each realization. To
    # generate steady state data.
    T1 = np.r_[npp * Ntr, np.r_[0:(R - 1) * P * npp + 1:P * npp]]
    _, yorig, _ = true_model.simulate(u.ravel(), T1=T1)
    u = u.reshape((R, P, npp)).transpose((2, 0, 1))[:, None]  # (npp,m,R,P)
    y = yorig.reshape((R, P, npp, p)).transpose((2, 3, 0, 1))

    # Add colored noise to the output. randn generate white noise
    if add_noise:
        np.random.seed(10)
        noise = 1e-3 * np.std(y[:, -1, -1]) * np.random.randn(*y.shape)
        # Do some filtering to get colored noise
        noise[1:-2] += noise[2:-1]
        y += noise

    ## START of Identification ##
    # partitioning the data. Use last period of two last realizations.
    # test for performance testing and val for model selection
    utest = u[:, :, -1, -1]
    ytest = y[:, :, -1, -1]
    uval = u[:, :, -2, -1]
    yval = y[:, :, -2, -1]
    # all other realizations are used for estimation
    uest = u[..., :Rest, :]
    yest = y[..., :Rest, :]
    # noise estimate over periods. This sets the performace limit for the
    # estimated model
    covY = covariance(yest)

    # create signal object
    sig = Signal(uest, yest, fs=fs)
    sig.lines = lines
    # plot periodicity for one realization to verify data is steady state
    # sig.periodicity()
    # Calculate BLA, total- and noise distortion. Used for subspace
    # identification
    sig.bla()
    # average signal over periods. Used for training of PNLSS model
    um, ym = sig.average()

    return Data(sig, uest, yest, uval, yval, utest, ytest, um, ym, covY, freq,
                lines, npp, Ntr)
Beispiel #3
0
uest = u[..., :-2, :]
yest = y[..., :-2, :]
# noise estimate over periods. This sets the performace limit for the estimated
# model
covY = covariance(yest)
npp, p, Rest, Pest = yest.shape
npp, m, Rest, Pest = uest.shape
Ptr = 5  # number of periods to use for transient handling during simulation

# create signal object
sig = Signal(uest, yest, fs=fs)
sig.lines = lines
# plot periodicity for one realization to verify data is steady state
# sig.periodicity()
# Calculate BLA, total- and noise distortion. Used for subspace identification
sig.bla()
# average signal over periods. Used for training of PNLSS model
um, ym = sig.average()

# model orders and Subspace dimensioning parameter
nvec = [3]
maxr = 10

if 'linmodel' not in locals() or True:
    linmodel = Subspace(sig)
    linmodel.estimate(2, maxr, weight=weight)
    linmodel.optimize(weight=weight)

    print(f"Best subspace model, n, r: {linmodel.n}, {linmodel.r}")
    linmodel_orig = linmodel
Beispiel #4
0
yval_raw = yval_raw.reshape(npp, Pval, 50, order='F').swapaxes(1, 2)[:, None]
uval = uval_raw[:, :, -1, -1]
yval = yval_raw[:, :, -1, -1]
utest = uval_raw[:, :, 1, -1]
ytest = yval_raw[:, :, 1, -1]
Rval = uval_raw.shape[2]

sig = Signal(uest, yest, fs=fs)
sig.lines = lines
um, ym = sig.average()
# sig.periodicity()

# for subspace model (from BLA)
sig2 = Signal(uest[:, :, None], yest[:, :, None], fs=fs)
sig2.lines = lines
sig2.bla()

# model orders and Subspace dimensioning parameter
n = 2
maxr = 20
dof = 0
iu = 0
xpowers = np.array([[2], [3]])

# subspace model
lin1 = Subspace(sig2)
# models, infodict = linmodel.scan(n, maxr, weight=False)
# ensure we use same dimension as for the fnsi model
lin1.estimate(n, maxr)
lin2 = deepcopy(lin1)
lin2.optimize(weight=False)