def SetCoordinates(self, cArray0, cArray1, cArray2):
     assert len(cArray0.shape) == 1
     assert len(cArray1.shape) == 1
     assert len(cArray2.shape) == 1
     assert cArray2.shape == cArray1.shape
     assert cArray2.shape == cArray0.shape
     points = vtk.vtkPoints()
     self.__mesh.SetPoints(points)
     points.SetData(
         vtknp.numpy_to_vtk(algs.make_vector(cArray0, cArray1, cArray2)))
     self.__nnodes = cArray0.shape[0]
     self.Make_PolyVertex()
 def SetCoordinates(self, cArray0, cArray1, cArray2):
     assert len(cArray0.shape) == 3
     assert len(cArray1.shape) == 3
     assert len(cArray2.shape) == 3
     assert cArray2.shape == cArray1.shape
     assert cArray2.shape == cArray0.shape
     points = vtk.vtkPoints()
     self.__mesh.SetPoints(points)
     points.SetData(vtknp.numpy_to_vtk(
        algs.make_vector(cArray0.ravel(), cArray1.ravel(), cArray2.ravel()))
        )
     self.__dims = np.flip(cArray0.shape)
     self.__mesh.SetDimensions(self.__dims)
 def RequestData(self, request, inInfo, outInfo):
     output = dsa.WrapDataObject(vtk.vtkStructuredGrid.GetData(outInfo))
     info = outInfo.GetInformationObject(0)
     exts = info.Get(vtk.vtkStreamingDemandDrivenPipeline.WHOLE_EXTENT())
     dims = [exts[1]-exts[0]+1, exts[3]-exts[2]+1, exts[5]-exts[4]+1]
     output.SetExtent(exts)
     Raxis = np.linspace(1., 2., dims[0])
     Thetaaxis = np.linspace(0.,np.pi*0.5, dims[1])
     xc, yc = np.meshgrid(Raxis, Thetaaxis, indexing="xy")
     X = xc * np.cos(yc)
     Y = xc * np.sin(yc)
     print X.size, X.shape
     Z=np.zeros(X.size).reshape(X.shape)
     coordinates = algs.make_vector(X.ravel(),Y.ravel(),Z.ravel())
     pts = vtk.vtkPoints()
     pts.SetData(dsa.numpyTovtkDataArray(coordinates , "Points"))
     output.SetPoints(pts)
     output.PointData.append(xc.ravel(), "radius")
     output.PointData.append(yc.ravel(), "angle")
     return 1
Esempio n. 4
0
# Test slicing and indexing
compare(randomVec[randomVec[:,0] > 0.2].Arrays[0] - npa[npa[:,0] > 0.2], 1E-7)
compare(randomVec[algs.where(randomVec[:,0] > 0.2)].Arrays[0] - npa[numpy.where(npa[:,0] > 0.2)], 1E-7)
compare(randomVec[dsa.VTKCompositeDataArray([(slice(None, None, None), slice(0,2,None)), 2])].Arrays[0] - npa[:, 0:2], 1E-6)

# Test ufunc
compare(algs.cos(randomVec) - numpy.cos(npa), 1E-7)
assert algs.cos(randomVec).DataSet is randomVec.DataSet

assert algs.in1d(elev, [0,1]) == [item in [0, 1] for item in elev]

# Various numerical ops implemented in VTK
g = algs.gradient(elev)
assert algs.all(g[0] == (1, 0, 0))

v = algs.make_vector(elev, g[:,0], elev)
assert algs.all(algs.gradient(v) == [[1, 0, 1], [0, 0, 0], [0, 0, 0]])

v = algs.make_vector(elev, g[:,0], elev2)
assert algs.all(algs.curl(v) == [1, 0, 0])

v = algs.make_vector(elev, elev2, 2*elev3)
g = algs.gradient(v)
assert g.DataSet is v.DataSet
assert algs.all(algs.det(g) == 2)

assert algs.all(algs.eigenvalue(g) == [2, 1, 1])

assert algs.all(randomVec[:,0] == randomVec[:,0])

int_array1 = numpy.array([1, 0, 1], dtype=numpy.int)
Esempio n. 5
0
# correctly
req_time = GetUpdateTimestep(self)

output = self.GetOutput()

# TODO: Generate the data as you want.
from vtk.numpy_interface import dataset_adapter as dsa
from vtk.numpy_interface import algorithms as algs
import h5py as h5
f1 = h5.File(
    "/pkg/clion/etc/clion/system/cmake/generated/2774870a/2774870a/Debug/example/em/tokamak.h5"
)
x = f1["/record/H"][:, :][:, req_time, 0]
y = f1["/record/H"][:, :][:, req_time, 1]
z = f1["/record/H"][:, :][:, req_time, 2]
coords = algs.make_vector(x, y, z)
pts = vtk.vtkPoints()
pts.SetData(dsa.numpyTovtkDataArray(coords, "Points"))
output.SetPoints(pts)
# Now mark the timestep produced.
output.GetInformation().Set(output.DATA_TIME_STEP(), req_time)

########################################################################################################
## Script (RequestInformation)
########################################################################################################


def SetOutputTimesteps(algorithm, timesteps):
    executive = algorithm.GetExecutive()
    outInfo = executive.GetOutputInformation(0)
    outInfo.Remove(executive.TIME_STEPS())
x = coordVec[:, 0]
y = coordVec[:, 1]
z = coordVec[:, 2]

radius = algs.sqrt(x**2 + y**2)
theta = algs.arctan2(y, x)

# Cylindrical Direction Vectors
# radVec
radVec = coordVec.copy()
radVec[:, 2] = radVec[:, 2] * 0
radVec = algs.norm(radVec)

# zVec
zVec = np.repeat([[0, 0, 1]], radVec[:, 0].size, axis=0)
zVec = algs.make_vector(*zVec.T)

# thetaVec
thetaVec = algs.cross(zVec, radVec)
thetaVec = algs.make_vector(*thetaVec.T)
thetaVec = algs.norm(thetaVec)

############################################
#      CREATING CARTESIAN VECTOR DATA
############################################
VelMeanVec = algs.make_vector(input0.PointData['Mean_X_Velocity'].Arrays[0],
                              input0.PointData['Mean_Y_Velocity'].Arrays[0],
                              input0.PointData['Mean_Z_Velocity'].Arrays[0])

VelRMSEVec = algs.make_vector(input0.PointData['RMSE_X_Velocity'].Arrays[0],
                              input0.PointData['RMSE_Y_Velocity'].Arrays[0],
Esempio n. 7
0
assert (1 + randomVec).DataSet is randomVec.DataSet

# Test slicing and indexing
assert algs.all(randomVec[randomVec[:,0] > 0.2].Arrays[0] - npa[npa[:,0] > 0.2] < 1E-7)
assert algs.all(randomVec[algs.where(randomVec[:,0] > 0.2)].Arrays[0] - npa[numpy.where(npa[:,0] > 0.2)] < 1E-7)
assert algs.all(randomVec[dsa.VTKCompositeDataArray([(slice(None, None, None), slice(0,2,None)), 2])].Arrays[0] - npa[:, 0:2] < 1E-6)

# Test ufunc
assert algs.all(algs.cos(randomVec) - numpy.cos(npa) < 1E-7)
assert algs.cos(randomVec).DataSet is randomVec.DataSet

# Various numerical ops implemented in VTK
g = algs.gradient(elev)
assert algs.all(g[0] == (1, 0, 0))

v = algs.make_vector(elev, g[:,0], elev)
assert algs.all(algs.gradient(v) == [[1, 0, 0], [0, 0, 0], [1, 0, 0]])

v = algs.make_vector(elev, g[:,0], elev2)
assert algs.all(algs.curl(v) == [1, 0, 0])

v = algs.make_vector(elev, elev2, 2*elev3)
g = algs.gradient(v)
assert g.DataSet is v.DataSet
assert algs.all(algs.det(g) == 2)

assert algs.all(algs.eigenvalue(g) == [2, 1, 1])

assert algs.all(randomVec[:,0] == randomVec[:,0])

ssource = vtk.vtkSphereSource()
Esempio n. 8
0
def make_targets(les_vtk, y_type, Ls=1, Us=1, ros=1):
    from tqdm import tqdm

    small = np.cbrt(np.finfo(float).tiny)
    Ps = 0.5 * ros * Us**2

    les_nnode = les_vtk.number_of_points

    delij = np.zeros([les_nnode, 3, 3])
    for i in range(0, 3):
        delij[:, i, i] = 1.0

    # Wrap vista object in dsa wrapper
    les_dsa = dsa.WrapDataObject(les_vtk)

    if (y_type == 'classification'):
        ntarg = 5
        y_targ = np.zeros([les_nnode, ntarg], dtype=int)
        print('Classifier targets:')
    elif (y_type == 'regression'):
        ntarg = 2
        y_targ = np.zeros([les_nnode, ntarg], dtype=float)
        print('regressor targets:')

    y_raw = np.zeros([les_nnode, ntarg])
    target_labels = np.empty(ntarg, dtype='object')
    targ = 0

    # Copy Reynolds stresses to tensor
    uiuj = np.zeros([les_nnode, 3, 3])
    uiuj[:, 0, 0] = les_dsa.PointData['uu']
    uiuj[:, 1, 1] = les_dsa.PointData['vv']
    uiuj[:, 2, 2] = les_dsa.PointData['ww']
    uiuj[:, 0, 1] = les_dsa.PointData['uv']
    uiuj[:, 0, 2] = les_dsa.PointData['uw']
    uiuj[:, 1, 2] = les_dsa.PointData['vw']
    uiuj[:, 1, 0] = uiuj[:, 0, 1]
    uiuj[:, 2, 0] = uiuj[:, 0, 2]
    uiuj[:, 2, 1] = uiuj[:, 1, 2]

    # resolved TKE
    tke = 0.5 * (uiuj[:, 0, 0] + uiuj[:, 1, 1] + uiuj[:, 2, 2])

    # Velocity vector
    U = algs.make_vector(les_dsa.PointData['U'], les_dsa.PointData['V'],
                         les_dsa.PointData['W'])

    # Velocity gradient tensor and its transpose
    # J[:,i-1,j-1] is dUidxj
    # Jt[:,i-1,j-1] is dUjdxi
    Jt = algs.gradient(U)  # Jt is this one as algs uses j,i ordering
    J = algs.apply_dfunc(np.transpose, Jt, (0, 2, 1))

    # Strain and vorticity tensors
    Sij = 0.5 * (J + Jt)
    Oij = 0.5 * (J - Jt)

    # Anisotropy tensor and eigenvalues
    aij = copy.deepcopy(Sij) * 0.0
    inv2 = np.zeros(les_nnode)
    inv3 = np.zeros(les_nnode)

    for i in range(0, 3):
        for j in range(0, 3):
            aij[:, i,
                j] = uiuj[:, i, j] / (2.0 * tke + small) - delij[:, i, j] / 3.0

    # Get eigenvalues of aij
    eig = algs.eigenvalue(aij)
    eig1 = eig[:, 0]
    eig2 = eig[:, 1]
    eig3 = eig[:, 2]

    # Get coords on barycentric triangle from eigenvalues
    xc = [1.0, 0.0, 0.5]  #x,y coords of corner of triangle
    yc = [0.0, 0.0, np.cos(np.pi / 6.0)]
    C1c = eig1 - eig2
    C2c = 2 * (eig2 - eig3)
    C3c = 3 * eig3 + 1
    x0 = C1c * xc[0] + C2c * xc[1] + C3c * xc[2]
    y0 = C1c * yc[0] + C2c * yc[1] + C3c * yc[2]

    if (y_type == 'Classification'):
        # Target 1: Negative eddy viscosity
        #########################################
        print('1: Negative eddy viscosity')
        A = np.zeros(les_nnode)
        B = np.zeros(les_nnode)

        for i in range(0, 3):
            for j in range(0, 3):
                A += -uiuj[:, i, j] * Sij[:, i, j] + (
                    2.0 / 3.0) * tke * delij[:, i, j] * Sij[:, i, j]
                B += 2.0 * Sij[:, i, j] * Sij[:, i, j]

        Str = algs.sqrt(B)  # magnitude of Sij strain tensor (used later)
        nu_t = A / (B + small)
        nu_t = nu_t / (Us * Ls)
        y_raw[:, targ] = nu_t

        index = algs.where(nu_t < 0.0)
        y_targ[index, targ] = 1
        target_labels[targ] = 'Negative eddy viscosity'
        targ += 1

        # Target 2: Deviation from plane shear
        #################################################
        print('2: Deviation from plane shear turbulence')
        # Get distance from plane shear line
        p1 = (1 / 3, 0)
        p2 = (0.5, np.sqrt(3) / 2)
        dist = abs((p2[1] - p1[1]) * x0 -
                   (p2[0] - p1[0]) * y0 + p2[0] * p1[1] -
                   p2[1] * p1[0]) / np.sqrt((p2[1] - p1[1])**2 +
                                            (p2[0] - p1[0])**2)
        y_raw[:, targ] = dist
        index = algs.where(dist > 0.25)

        y_targ[index, targ] = 1
        target_labels[targ] = 'Deviation from plane shar turbulence'
        targ += 1

        # Target 3: Anisotropy of turbulence
        ##########################################
        print('3: Anisotropy of turbulence')
        Caniso = 1.0 - C3c
        y_raw[:, targ] = Caniso
        index = algs.where(Caniso > 0.5)
        y_targ[index, targ] = 1
        target_labels[targ] = 'Stress anisotropy'
        targ += 1

        # Target 4: Negative Pk
        ############################################
        print('4: Negative Pk')
        A = np.zeros(les_nnode)
        for i in range(0, 3):
            for j in range(0, 3):
                A[:] += (-uiuj[:, i, j] * J[:, i, j])

        A = A * Ls / Us**3
        y_raw[:, targ] = A
        index = algs.where(A < -0.0005)

        y_targ[index, targ] = 1
        target_labels[targ] = 'Negative Pk'
        targ += 1

        # Target 5: 2-eqn Cmu constant
        ############################################
        print('5: 2-equation Cmu constant')
        A = np.zeros(les_nnode)
        for i in range(0, 3):
            for j in range(0, 3):
                A[:] += aij[:, i, j] * Sij[:, i, j]

        Cmu = nu_t**2.0 * (Str / (tke + small))**2.0

        y_raw[:, targ] = Cmu
        allow_err = 0.25  #i.e. 10% err
        Cmu_dist = algs.abs(Cmu - 0.09)
        #    index = algs.where(Cmu_dist>allow_err*0.09)
        index = algs.where(Cmu > 1.1 * 0.09)
        y_targ[index, targ] = 1
        target_labels[targ] = 'Cmu != 0.09'
        targ += 1

    #    ab = ((uiuj[:,1,1]-uiuj[:,0,0])*U[:,0]*U[:,1] + uiuj[:,0,1]*(U[:,0]**2-U[:,1]**2))/(U[:,0]**2+U[:,1]**2)
    #    y_raw[:,err] = ab

    #    # Target 3: Non-linearity
    #    ###############################
    #    print('3: Non-linearity')
    #
    #    # Build cevm equation in form A*nut**3 + B*nut**2 + C*nut + D = 0
    #    B, A = build_cevm(Sij,Oij)
    #    B = B/(tke      +1e-12)
    #    A = A/(tke**2.0 +1e-12)
    #
    #    C = np.zeros_like(A)
    #    D = np.zeros_like(A)
    #    for i in range(0,3):
    #        for j in range(0,3):
    #            C += -2.0*Sij[:,i,j]*Sij[:,i,j]
    #            D += (2.0/3.0)*tke*Sij[:,i,j]*delij[:,i,j] - uiuj[:,i,j]*Sij[:,i,j]
    #
    #    nu_t_cevm = np.empty_like(nu_t)
    #    for i in tqdm(range(0,les_nnode)):
    #        # Find the roots of the cubic equation (i.e. potential values for nu_t_cevm)
    #        roots = np.roots([A[i],B[i],C[i],D[i]])
    #        roots_orig = roots
    #
    #        # Remove complex solutions (with imaginary part > a small number, to allow for numerical error)
    #        #roots = roots.real[abs(roots.imag)<1e-5]  #NOTE - Matches nu_t much better without this?!
    #
    #        # Out of remaining solutions(s), pick one that is closest to linear nu_t
    #        if(roots.size==0):
    #            nu_t_cevm[i] = nu_t[i]
    #        else:
    #            nu_t_cevm[i] = roots.real[np.argmin( np.abs(roots - np.full(roots.size,nu_t[i])) )]
    #
    #    normdiff = algs.abs(nu_t_cevm - nu_t) / (algs.abs(nu_t_cevm) + algs.abs(nu_t) + 1e-12)
    #    y_raw[:,err] = nu_t_cevm
    #
    #    index = algs.where(normdiff>0.15)
    #    y_targ[index,err] = 1
    #    error_labels[err] = 'Non-linearity'
    #    err += 1

    elif (y_type == 'regression'):
        # Target 3: Anisotropy of turbulence
        ##########################################
        print('1: Anisotropy of turbulence')
        Caniso = 1.0 - C3c
        y_raw[:, targ] = Caniso
        y_targ[:, targ] = Caniso
        target_labels[targ] = 'Stress anisotropy'
        targ += 1

    return y_raw, y_targ, target_labels
Esempio n. 9
0
from vtk.numpy_interface import dataset_adapter as dsa
from vtk.numpy_interface import algorithms as algs
import h5py as h5
f1 = h5.File(
    "/pkg/etc/clion/system/cmake/generated/2774870a/2774870a/Debug/example/em/tokamak0007.h5"
)
x = f1["/record/0/H"][:]['p']['v']
coords = algs.make_vector(x[:, 0], x[:, 1], x[:, 2])
pts = vtk.vtkPoints()
pts.SetData(dsa.numpyTovtkDataArray(coords, " Points "))
output.SetPoints(pts)
Esempio n. 10
0
from vtk . numpy_interface import dataset_adapter as dsa
from vtk . numpy_interface import algorithms as algs
import h5py as h5
f1=h5.File("/pkg/etc/clion/system/cmake/generated/2774870a/2774870a/Debug/example/em/tokamak0007.h5")
x=f1["/record/0/H"][:]['p']['v']
coords=algs.make_vector(x[:,0],x[:,1],x[:,2])
pts=vtk.vtkPoints()
pts.SetData(dsa.numpyTovtkDataArray ( coords , " Points "))
output.SetPoints(pts)
Esempio n. 11
0
  # This is the requested time-step. This may not be exactly equal to the
  # timesteps published in RequestInformation(). Your code must handle that
  # correctly
req_time = GetUpdateTimestep(self)

output = self.GetOutput()

  # TODO: Generate the data as you want.
from vtk.numpy_interface import dataset_adapter as dsa
from vtk.numpy_interface import algorithms as algs
import h5py as h5
f1=h5.File("/pkg/clion/etc/clion/system/cmake/generated/2774870a/2774870a/Debug/example/em/tokamak.h5")
x=f1["/record/H"][:,:][:,req_time,0]
y=f1["/record/H"][:,:][:,req_time,1]
z=f1["/record/H"][:,:][:,req_time,2]
coords=algs.make_vector(x,y,z)
pts=vtk.vtkPoints()
pts.SetData(dsa.numpyTovtkDataArray ( coords , "Points"))
output.SetPoints(pts)
  # Now mark the timestep produced.
output.GetInformation().Set(output.DATA_TIME_STEP(), req_time)


########################################################################################################
## Script (RequestInformation)
########################################################################################################

def SetOutputTimesteps(algorithm, timesteps):
      executive = algorithm.GetExecutive()
      outInfo = executive.GetOutputInformation(0)
      outInfo.Remove(executive.TIME_STEPS())