예제 #1
0
ax.autoscale()

# %%
# Generate conditional trajectories
# ---------------------------------

# %%
# support for trajectories with training samples removed
values = np.linspace(0, 10, 500)
for xx in x:
    if len(np.argwhere(values == xx)) == 1:
        values = np.delete(values, np.argwhere(values == xx)[0, 0])

# %%
# Conditional Gaussian process
krv = ot.KrigingRandomVector(krigingResult, np.vstack(values))
krv_sample = krv.getSample(5)

# %%
x_plot = np.vstack(np.linspace(xmin, xmax, n_pts_plot))
fig, ax = plt.subplots(figsize=(8, 6))
ax.plot(x, y, ('ro'))
for i in range(krv_sample.getSize()):
    if i == 0:
        ax.plot(values,
                krv_sample[i, :],
                '--',
                alpha=0.8,
                label='Conditional trajectories')
    else:
        ax.plot(values, krv_sample[i, :], '--', alpha=0.8)
예제 #2
0
algo.setOptimizeParameters(False)  # do not optimize hyper-parameters
algo.run()
result = algo.getResult()
# Get meta model
metaModel = result.getMetaModel()
outData = metaModel(inputValidSample)

# 4) Errors
# Interpolation
ott.assert_almost_equal(outputSample, metaModel(inputSample), 3.0e-5, 3.0e-5)

# 5) Kriging variance is 0 on learning points
var = result.getConditionalCovariance(inputSample)

# assert_almost_equal could not be applied to matrices
# application to Point
covariancePoint = ot.Point(var.getImplementation())
trueVariance = ot.Point(covariancePoint.getSize(), 0.0)
ott.assert_almost_equal(covariancePoint, trueVariance, 1e-6, 1e-6)

# Random vector evaluation
rvector = ot.KrigingRandomVector(result, inputValidSample[0])

# Realization of the random vector
realization = rvector.getRealization()
print("Realization of the KRV=", realization)

# Get a sample of size 10
realizations = rvector.getSample(10)
print("Sample of realizations of the KRV=", realizations)