if 0 or ALL: s = 'Medium gray box model, measured Y(X) = E(mDot, p)' print('-' * len(s) + '\n' + s + '\n' + '-' * len(s)) df = pd.read_csv(raw, sep=',', comment='#') df.rename(columns=df.iloc[0]) df = df.apply(pd.to_numeric, errors='coerce') X = np.asfarray(df.loc[:, ['mDot', 'p']]) Y = np.asfarray(df.loc[:, ['A']]) model = Black() y = model(X=X, Y=Y, neurons=[], x=X) print('*** x:', model.x, 'y:', model.y, y) plotIsoMap(X.T[0], X.T[1], Y.T[0] * 1e3, title=r'$A_{prc}\cdot 10^3$') plotIsoMap(X.T[0], X.T[1], y.T[0] * 1e3, title=r'$A_{blk}\cdot 10^3$') plotIsoMap(X.T[0], X.T[1], (Y.T[0] - y.T[0]) * 1e3, title=r'$(A_{prc} - A_{blk})\cdot 10^3$') plotWireframe(X.T[0], X.T[1], Y.T[0] * 1e3, title=r'$A_{prc}\cdot 10^3$') plotWireframe(X.T[0], X.T[1], y.T[0] * 1e3, title=r'$A_{blk}\cdot 10^3$') plotWireframe(X.T[0], X.T[1], (Y.T[0] - y.T[0]) * 1e3, title=r'$(A_{prc} - A_{blk})\cdot 10^3$')
y_exa[:, 0], labels=(lx0, lx1, lYexa), xrange=xRng, yrange=yRng, units=['m/s', 'mm$^2$/s', 'MPa']) plotSurface(x[:, 1], x[:, 0], y_exa[:, 0], labels=(lx1, lx0, lYexa), xrange=xRng, yrange=yRng, units=['m/s', 'mm$^2$/s', 'MPa']) plotIsoMap(x[:, 0], x[:, 1], y_exa[:, 0], labels=(lx0, lx1, lYexa), figsize=figsize, xrange=xRng, yrange=yRng, units=['m/s', 'mm$^2$/s', 'MPa']) plotIsolines(x[:, 0], x[:, 1], y_exa[:, 0], labels=(lx0, lx1, lYexa), figsize=figsize, xrange=xRng, yrange=yRng, units=['m/s', 'mm$^2$/s', 'MPa']) plotIsoMap(X[:, 0], X[:, 1], Y[:, 0], labels=(lx0, lx1, lY),
""" nTun = 3 if x is None: return np.ones(nTun) # get number of tuning parameters tun = args if len(args) == nTun else np.ones(nTun) y0 = tun[0] + tun[2] * x[0]**2 + tun[1] * x[1] y1 = tun[0] * x[1] return [y0, y1] if 0 or ALL: x = grid(100, [0.9, 1.1], [0.9, 1.1]) y_exa = White('demo')(x=x) y = noise(y_exa, relative=20e-2) plotIsoMap(x[:, 0], x[:, 1], y_exa[:, 0], title='$y_{exa,0}$') plotSurface(x[:, 0], x[:, 1], y_exa[:, 0], title='$y_{exa,0}$') plotIsolines(x[:, 0], x[:, 1], y_exa[:, 0], title='$y_{exa,0}$', levels=[0, 1e-4, 5e-4, .003, .005, .01, .02, .05, .1, .2]) plotIsoMap(x[:, 0], x[:, 1], y[:, 0], title='$y_0$') plotIsoMap(x[:, 0], x[:, 1], (y - y_exa)[:, 0], title='$y_0-y_{exa,0}$') if 0 or ALL: x = grid(4, [0, 12], [0, 10]) y_exa = White(f)(x=x) y = noise(y_exa, relative=20e-2)
# function without access to 'self' attributes def function(x, *args): print('0') return 3.3 * np.array(np.sin(x[0]) + (x[1] - 1)**2) # method with access to 'self' attributes def method(self, x, *args): print('1') return 3.3 * np.array(np.sin(x[0]) + (x[1] - 1)**2) if 1 or ALL: s = 'Forward() with demo function build-in into Model' print('-' * len(s) + '\n' + s + '\n' + '-' * len(s)) x, y = Forward(White(function))(x=grid(3, [0, 1], [0, 1])) plotIsoMap(x[:, 0], x[:, 1], y[:, 0]) if 0 or ALL: s = 'Forward() with demo function build-in into Model' print('-' * len(s) + '\n' + s + '\n' + '-' * len(s)) x, y = Forward(White('demo'))(x=cross(5, [1, 2], [3, 4])) plotIsoMap(x[:, 0], x[:, 1], y[:, 0], scatter=True) if 0 or ALL: s = "Forward, assign external function (without self-argument) to f" print('-' * len(s) + '\n' + s + '\n' + '-' * len(s)) op = Forward(White(function)) _, y = op(x=rand(12, [2, 3], [3, 4])) print('x:', op.model.x, '\ny1:', op.model.y)
0.003393, 0.260, NaN, -0.002922, 0.000471 0.003393, 0.500, NaN, -0.002774, 0.000619 0.003393, 0.770, NaN, -0.002710, 0.000682 0.003393, 1.000, NaN, -0.002770, 0.000623 0.003393, 1.000, NaN, -0.002688, 0.000705 0.003393, 1.000, NaN, -0.002686, 0.000707 """) if 0 or ALL: s = 'Dark gray box model 1' print('-' * len(s) + '\n' + s + '\n' + '-' * len(s)) model = DarkGray('demo') X, Y = model.frame2arrays(df, ['x0', 'x4'], ['y0']) y = model(X=X, Y=Y, x=X, silent=True, neurons=[10]) plotIsoMap(X[:, 0], X[:, 1], Y[:, 0], title='Y(X)') plotIsoMap(X[:, 0], X[:, 1], y[:, 0], title='y(X)') plotIsoMap(X[:, 0], X[:, 1], (y-Y)[:, 0], title='y(X) -Y') print('*** X:', X.shape, 'Y:', Y.shape, 'y:', y.shape) if 0 or ALL: s = 'Dark gray box model 2' print('-' * len(s) + '\n' + s + '\n' + '-' * len(s)) df = pd.read_csv(raw, sep=',', comment='#') df.rename(columns=df.iloc[0]) df = df.apply(pd.to_numeric, errors='coerce') X = np.asfarray(df.loc[:, ['mDot', 'p']]) Y = np.asfarray(df.loc[:, ['A']])
print('-' * len(s) + '\n' + s + '\n' + '-' * len(s)) op = Minimum(White(f)) x, y = op(x=rand(10, [-5, 5], [-7, 7]), method='nelder-mead', silent=True) # op.plot() print('x:', x, 'y:', y, '\nop.x:', op.x, 'op.y:', op.y) if 0 or ALL: s = 'Minimum, generates series of initial x on grid' print('-' * len(s) + '\n' + s + '\n' + '-' * len(s)) x, y = Forward(White(f))(x=grid(3, [-2, 2], [-2, 2])) plotSurface(x[:, 0], x[:, 1], y[:, 0]) plotIsoMap(x[:, 0], x[:, 1], y[:, 0]) op = Minimum(White(f)) x, y = op(x=rand(3, [-5, 5], [-7, 7])) op.plot() print('x:', x, 'y:', y) if 1 or ALL: s = 'Minimum, test all optimizers' print('-' * len(s) + '\n' + s + '\n' + '-' * len(s)) if True: op = Minimum(White('demo')) methods = ['ga', 'BFGS']
s = 'Error compensation (black box): Y(X) = A(mDot, p)' print('-' * len(s) + '\n' + s + '\n' + '-' * len(s)) plotWireframes = False plotIsoMaps = True df = pd.read_csv(raw, sep=',', comment='#') df.rename(columns=df.iloc[0]) df = df.apply(pd.to_numeric, errors='coerce') X = np.asfarray(df.loc[:, ['mDot', 'p']]) Y = np.asfarray(df.loc[:, ['A']]) YDiff = round(Y[:, 0].max() - Y[:, 0].min(), 5) plotIsoMap(X[:, 0], X[:, 1], Y[:, 0] * 1e3, title=r'$A_{prc}\cdot 10^3' + r'\ \ (\Delta A$: ' + str(YDiff * 1e3) + 'e-3)', labels=[r'$\dot m$', '$p$']) plotWireframe(X[:, 0], X[:, 1], Y[:, 0] * 1e3, title=r'$A_{prc}\cdot 10^3$', labels=[r'$\dot m$', '$p$']) model = Black() dyDiffAll = [] hidden = range(1, 20 + 1) for hid in hidden: print('+++ hidden:', hid, end='') print(' ==> autodefinition') if hid == 0 else print()
def example4(): # 2D problem: three 1D user-defined functions f(x) are fitted, # and a neural network df = pd.DataFrame({ 'x': [13, 21, 32, 33, 43, 55, 59, 60, 62, 82], 'y': [.56, .65, .7, .7, 2.03, 1.97, 1.92, 1.81, 2.89, 7.83], 'u': [ -0.313, -0.192, -0.145, -0.172, -0.563, -0.443, -0.408, -0.391, -0.63, -1.701 ] }) def f1(x, c0=0, c1=0, c2=0, c3=0, c4=0, c5=0): """ Computes polynomium: u(x) = c0 + c1*x + ... + c5*x^5 """ return c0 + x * (c1 + x * (c2 + x * (c3 + x * (c4 + x * c5)))) def f2(x, c0=0, c1=0, c2=0, c3=0, c4=0, c5=0): y = c0 / (x[0] * x[0]) + c1 / x[1] + c2 + c3 * x[1] + c4 * x[0] * x[0] return [y] def f3(x, c0=0, c1=0, c2=0, c3=0, c4=0, c5=0): return c0 * x * x + c1 / x + c2 + c3 * x definitions = [f1, f2, f3, [50, 10, 2], f2] # neural network options opt = {'methods': 'bfgs rprop', 'neurons': []} Y = np.array(df.loc[:, ['u']]) # extracts an 2D array for f in definitions: blk = Black() if hasattr(f, '__call__'): print(f.__name__) print('f1==f', f1 == f, id(f) == id(f1)) print('f2==f', f2 == f, id(f) == id(f2)) print('f3==f', f3 == f, id(f) == id(f3)) if hasattr(f, '__call__') and f2 != f: X = np.array(df.loc[:, ['x']]) else: X = np.array(df.loc[:, ['x', 'y']]) blk.train(X, Y, **opt) y = blk.predict(X) dy = y - Y # print(' shapes X:', X.shape, 'U:', U.shape, 'u:', u.shape, # 'du:', du.shape) # console output print(' ' + 76 * '-') su = '[j:0..' + str(Y.shape[1] - 1) + '] ' print(' i X[j:0..' + str(X.shape[1] - 1) + ']' + 'U' + su + 'u' + su + 'du' + su + 'rel' + su + '[%]:') for i in range(X.shape[0]): print('{:5d} '.format(i), end='') for a in X[i]: print('{:f} '.format(a), end='') for a in Y[i]: print('{:f} '.format(a), end='') for a in y[i]: print('{:f} '.format(a), end='') for j in range(Y.shape[1]): print('{:f} '.format(dy[i][j]), end='') for j in range(Y.shape[1]): print('{:f} '.format(dy[i][j] / Y[i][j] * 100), end='') print() print(' ' + 76 * '-') # graphic presentation if X.shape[1] == 1 and Y.shape[1] == 1: plt.title('Approximation') plt.xlabel('$x$') plt.ylabel('$u$') plt.scatter(X, Y, label='$u$', marker='x') plt.scatter(X, Y, label=r'$\tilde u$', marker='o') if y is not None: plt.plot(X, y, label=r'$\tilde u$ (cont)') plt.plot(X, dy, label=r'$\tilde u - u$') plt.legend(bbox_to_anchor=(0, 0), loc='lower left') plt.show() if 1: plt.title('Absolute error') plt.ylabel(r'$\tilde u - u$') plt.plot(X, dy) plt.show() if 1: plt.title('Relative error') plt.ylabel('E [%]') plt.plot(X, dy / Y * 100) plt.show() else: if isinstance(f, str): s = ' (' + f + ') ' elif not hasattr(f, '__call__'): s = ' $(neural: ' + str(f) + ')$ ' else: s = '' plotIsoMap(X[:, 0], X[:, 1], Y[:, 0], labels=['$x$', '$y$', r'$u$' + s]) plotIsoMap(X[:, 0], X[:, 1], Y[:, 0], labels=['$x$', '$y$', r'$\tilde u$' + s]) plotIsoMap(X[:, 0], X[:, 1], dy[:, 0], labels=['$x$', '$y$', r'$\tilde u - u$' + s])