示例#1
0
 def writeVTK(self, filename):
     import pyvtk
     origin = N.array([self.x_axis[0], self.y_axis[0], self.z_axis[0]])
     spacing = N.array([self.x_axis[1], self.y_axis[1], self.z_axis[1]]) \
               - origin
     values = pyvtk.Scalars(N.ravel(N.transpose(self.data)),
                            'electron density')
     data = pyvtk.VtkData(
         pyvtk.StructuredPoints(self.data.shape, origin, spacing),
         'Density map', pyvtk.PointData(values))
     data.tofile(filename, format='binary')
示例#2
0
    def field2VTKData (self,name=None,lookupTable=None):
        """
        Creates VTK representation of the receiver. Useful for visualization. Requires pyvtk module.

        :param str name: human-readable name of the field
        :param pyvtk.LookupTable lookupTable: color lookup table
        :return: Instance of pyvtk
        :rtype: pyvtk
        """
        import pyvtk

        if name is None:
            name=self.getFieldIDName()
        if lookupTable and not isinstance(lookupTable,pyvtk.LookupTable):
            log.info('ignoring lookupTable which is not a pyvtk.LookupTable instance.')
            lookupTable=None
        if lookupTable is None:
            lookupTable=pyvtk.LookupTable([(0,.231,.298,1.0),(.4,.865,.865,1.0),(.8,.706,.016,1.0)],name='coolwarm')
            #Scalars use different name than 'coolwarm'. Then Paraview uses its own color mapping instead of taking 'coolwarm' from *.vtk file. This prevents setting Paraview's color mapping.
            scalarsKw=dict(name=name,lookup_table='default')
        else:
            scalarsKw=dict(name=name,lookup_table=lookupTable.name)
        # see http://cens.ioc.ee/cgi-bin/cvsweb/python/pyvtk/examples/example1.py?rev=1.3 for an example
        vectorsKw=dict(name=name) # vectors don't have a lookup_table

        if (self.fieldType == FieldType.FT_vertexBased):
            if (self.getValueType() == ValueType.Scalar):
                return pyvtk.VtkData(self.mesh.getVTKRepresentation(), pyvtk.PointData(pyvtk.Scalars([val[0] for val in self.value],**scalarsKw),lookupTable), 'Unstructured Grid Example')
            elif (self.getValueType() == ValueType.Vector):
                return pyvtk.VtkData(self.mesh.getVTKRepresentation(), pyvtk.PointData(pyvtk.Vectors(self.value,**vectorsKw),lookupTable), 'Unstructured Grid Example')
            elif (self.getValueType() == ValueType.Tensor):
                return pyvtk.VtkData(self.mesh.getVTKRepresentation(), pyvtk.PointData(pyvtk.Tensors(self.getMartixForTensor(self.value),**vectorsKw),lookupTable),'Unstructured Grid Example')
            
        else:
            if (self.getValueType() == ValueType.Scalar):
                return pyvtk.VtkData(self.mesh.getVTKRepresentation(), pyvtk.CellData(pyvtk.Scalars([val[0] for val in self.value],**scalarsKw),lookupTable), 'Unstructured Grid Example')
            elif (self.getValueType() == ValueType.Vector):
                return pyvtk.VtkData(self.mesh.getVTKRepresentation(), pyvtk.CellData(pyvtk.Vectors(self.value,**vectorsKw),lookupTable), 'Unstructured Grid Example')
            elif (self.getValueType() == ValueType.Tensor):
                return pyvtk.VtkData(self.mesh.getVTKRepresentation(), pyvtk.CellData(pyvtk.Tensors(self.getMartixForTensor(self.value),**vectorsKw),lookupTable),'Unstructured Grid Example')
示例#3
0
 def visitPrint(self, k):
     """
     Uses PyVTK to write out data in a VisIt Visualization Tool capable format.
     """
     import pyvtk
     uVel = pyvtk.Scalars(self.fluid.u().flatten(), name='u')
     vVel = pyvtk.Scalars(self.fluid.v().flatten(), name='v')
     totp = pyvtk.Scalars(self.fluid.p().flatten(), name='p')
     celldat = pyvtk.PointData(uVel, vVel, totp)
     grid = pyvtk.RectilinearGrid(self.domain.x, self.domain.y,
                                  self.domain.z)
     vtk = pyvtk.VtkData(grid, celldat)
     vtk.tofile('data%i' % k)
示例#4
0
def make_vtk(**kwargs):
    args = []
    if 'grid' in kwargs:
        args.append(kwargs.get('grid'))
    if 'point_data' in kwargs:
        data = kwargs['point_data']
        args.append(pyvtk.PointData(*data))
    if 'cell_data' in kwargs:
        data = kwargs['cell_data']
        args.append(pyvtk.CellData(*data))
    if 'header' in kwargs:
        args.append(kwargs.get('header'))
    return pyvtk.VtkData(*args)
示例#5
0
def savevtk(v, fn='', lons=[], lats=[], levels=[]):
    """save tracer field into vtk format,
    """
    import pyvtk

    def norm(c):
        return (c - c[0]) / (c[-1] - c[0])

    z = levels / abs(levels).max() * (lons.max() - lons.min()) * 0.7

    point_data = pyvtk.PointData(pyvtk.Scalars(v.T.flatten()))
    vtk_object = pyvtk.VtkData(pyvtk.RectilinearGrid(x=lons, y=lats, z=z),
                               point_data)
    vtk_object.tofile(fn)
    return
示例#6
0
    def write_fields_vtk(self,
                         comps=None,
                         iteration=0,
                         format='binary',
                         fixed_zmin=None):
        """
        Convert the given list of scalar and vector fields from the
        openPMD format to a VTK container, and write it to the disk.

        Parameters
        ----------
        comps: list or None
            List of scalar and vector fields to be converted. If None, it
            converts all available components provided by OpenPMDTimeSeries

        iteration: int
            iteration number to treat (default 0)

        format: str
            format for the VTK file, either 'ascii' or 'binary'

        fixed_zmin: float or None
            When treating the simulation data for the animation, in
            some cases (e.g. with moving window) it is useful to
            fix the origin of the visualization domain. If float number
            is given it will be use as z-origin of the visualization domain
        """
        # Check available fields if comps is not defined
        if comps is None:
            comps = self.ts.avail_fields

        # Register z-origin of the visualization domain
        self.fixed_zmin = fixed_zmin

        # Convert the fields one by one and store them to the list
        vtk_container = []
        for comp in comps:
            vtk_container.append(self._convert_field(comp,
                                                     iteration=iteration))

        # Make a numer string for the file to write
        istr = str(iteration)
        while len(istr) < 7:
            istr = '0' + istr

        # Create the VTK data container and write it to the disk
        vtk.VtkData(self.grid, vtk.PointData(*vtk_container))\
            .tofile(self.path+'vtk_fields_'+istr, format=format)
def plot_vtk(A, V, partition) :
   V = numpy.append( V, numpy.zeros((A.shape[0],1)), axis=1 )

   triples = []
   A = scipy.sparse.triu(A,k=1).tocsr()
   for i in range(A.shape[0]-1):
      row = A.indices[A.indptr[i]:A.indptr[i+1]].tolist()
      for t1,t2 in itertools.combinations(row, 2) :
	if A[t1,t2] : 
	   triples.append((i,t1,t2))

   vtkelements = pyvtk.VtkData(
       pyvtk.UnstructuredGrid(V, triangle=triples),
       "Mesh",
       pyvtk.PointData(pyvtk.Scalars(partition, name="partition")))
   vtkelements.tofile('{0}_{1}.vtk'.format(graph_name,num_parts))
    def solnToVTK(self):
        done = False
        lcv = 0

        coords = self.loadVec('coords.dat', 3)
        dims = list(coords.shape[:-1])
        try:
            dx = coords[1, 1, 1] - coords[0, 0, 0]
        except IndexError:
            try:
                dx = coords[0, 1, 1] - coords[0, 0, 0]
            except IndexError:
                try:
                    dx = coords[1, 0, 1] - coords[0, 0, 0]
                except IndexError:
                    dx = coords[1, 1, 0] - coords[0, 0, 0]

        dx = np.where(dx == 0., 0.1, dx)
        if dims[2] == 1:
            dims[2] = 2
        dims = tuple(dims)
        print dims
        dx = tuple(dx)
        vtkgrid = pyvtk.StructuredPoints(dims, coords[0, 0, 0], dx)

        while not done:
            try:
                if not os.path.exists(self._file_prefix + 'prs%03d.dat' % lcv):
                    raise IOError('Nonexistent file')
            except IOError:
                done = True
                print 'Read %d timesteps' % lcv
            else:
                prs_data = self.loadVecToVTK('prs%03d.dat' % lcv, 1)
                vel_data = self.loadVecToVTK('u%03d.dat' % lcv, 3)
                wall_data = self.loadVecToVTK('walls%03d.dat' % lcv, 1)

                pointdata = pyvtk.PointData(prs_data, vel_data, wall_data)
                data = pyvtk.VtkData(
                    vtkgrid,
                    self._prefix.strip('_') + ' step %d' % lcv, pointdata)
                data.tofile(self._file_prefix + 'soln_%03d.vtk' % lcv)
                lcv += 1
        return
示例#9
0
        def save_vtk(self, modei, scale):
            path = str(
                QFileDialog.getExistingDirectory(self,
                                                 "Select Directory")) + '/'
            print path

            U = self.MODOS.coord.copy()
            nnos = self.MODOS.nnos
            ngl = self.MODOS.ngl
            nodesr = self.MODOS.nodesr
            nodesl = list(set(range(nnos)) - set(nodesr))
            scale = float(scale)
            modei = int(modei) - 1
            g = self.MODOS.g
            U_i = zeros((nnos, g), float)
            U_i[nodesl] = self.MODOS.modo[:, modei].reshape(
                self.MODOS.modo[:, modei].size / g, g)
            Ux, Uy, Uz = U_i[:, 0].copy(), U_i[:, 1].copy(), U_i[:, 2].copy()
            Rx, Ry, Rz = U_i[:, 3].copy(), U_i[:, 4].copy(), U_i[:, 5].copy()
            U_i = U_i * scale
            U = U + U_i[:, [0, 1, 2]]

            connec_volume = self.MODOS.connec_nacele
            connec_face = array(self.MODOS.connec_hub.tolist() +
                                self.MODOS.connec_tower.tolist() +
                                self.MODOS.connec_blades.tolist())

            vtk = pyvtk.VtkData(
                pyvtk.UnstructuredGrid(U,
                                       triangle=connec_face,
                                       tetra=connec_volume),
                pyvtk.PointData(pyvtk.Scalars(Ux, name='Ux'),
                                pyvtk.Scalars(Uy, name='Uy'),
                                pyvtk.Scalars(Uz, name='Uz'),
                                pyvtk.Scalars(Rx, name='Rx'),
                                pyvtk.Scalars(Ry, name='Ry'),
                                pyvtk.Scalars(Rz, name='Rz')))

            vtk.tofile(path + 'MODAL_' + str(modei + 1))
示例#10
0
def export_paraview(self):
    if self.export_paraview == 0:
        self.vtk_points = [_v.coord for _v in self.vertices[1:]]
        self.vtk_triangle = [[
            _e.vertices[0].tag - 1, _e.vertices[1].tag - 1,
            _e.vertices[2].tag - 1
        ] for _e in self.elements[1:] if _e.typ == 2]
    pressure = [np.abs(_v.sol[3]) for _v in self.vertices[1:]]

    # Bidouille pour que la tour et les lettres clignotent dans IAGS 20201
    pressure_max = max(pressure)
    light_on = self.export_paraview % 4
    if light_on < 2:
        tower_on = 0
    else:
        tower_on = 1

    for _ent in self.fem_entities:
        if _ent.dim == 2:
            if _ent.mat.MEDIUM_TYPE == "eqf":
                if _ent.mat.name == "tower":
                    for _elem in _ent.elements:
                        for _v in _elem.vertices:
                            _v.sol[3] = (1 +
                                         (-1)**tower_on) * (pressure_max / 2.)
                if _ent.mat.name == "letter":
                    for _elem in _ent.elements:
                        for _v in _elem.vertices:
                            _v.sol[3] = (1 + (-1)**
                                         (tower_on + 1)) * (pressure_max / 2.)

    pressure = [np.abs(_v.sol[3]) for _v in self.vertices[1:]]
    vtk = pyvtk.VtkData(
        pyvtk.UnstructuredGrid(self.vtk_points, triangle=self.vtk_triangle),
        pyvtk.PointData(pyvtk.Scalars(pressure, name='Pressure')))
    vtk.tofile("vtk/" + self.name_project + "-{}".format(self.export_paraview))
    self.export_paraview += 1
示例#11
0
文件: generate.py 项目: Rauell/neuron
# generate the data.
from numpy import *
import scipy
x = (arange(50.0) - 25) / 2.0
y = (arange(50.0) - 25) / 2.0
r = sqrt(x**2 + y**2)
z = x * 5.0 * sin(x)  # Bessel function of order 0
# now dump the data to a VTK file.
import pyvtk
# Flatten the 2D array data as per VTK's requirements.
z1 = reshape(transpose(z), (-1, ))
point_data = pyvtk.PointData(pyvtk.Scalars(z1))
grid = pyvtk.StructuredPoints((50, 50, 1), (-12.5, -12.5, 0), (0.5, 0.5, 1))
data = pyvtk.VtkData(grid, point_data)
data.tofile('/tmp/test.vtk')
示例#12
0
def write_vtk(iproc):
    if args.nproc == 1:
        nc_surf_local = Dataset(args.in_surface_nc, 'r', format='NETCDF4')
        iproc = 0
    else:
        # copy netcdf file for parallel access
        tempnc = args.out_vtk + '/surface_temp.nc' + str(iproc)
        shutil.copy(args.in_surface_nc, tempnc)
        nc_surf_local = Dataset(tempnc, 'r', format='NETCDF4')

    # write vtk
    if args.verbose and iproc == 0:
        clock0 = time.clock()
        print('Generating snapshot...')
    for it, istep in enumerate(steps):
        if it % args.nproc != iproc:
            continue
        if args.min_step is not None:
            if it < args.min_step:
                continue
        if args.max_step is not None:
            if it > args.max_step:
                continue
        disp_curl = np.zeros(nstation)
        eleTag_last = -1
        fourier_last = None
        for istation, dist in enumerate(dists):
            # poles cannot be computed correctly
            if (dist < delta or dist > np.pi - delta):
                disp_curl[istation] = 0.
                continue
            if eleTags[istation] == eleTag_last:
                fourier = fourier_last
            else:
                fourier_r = nc_surf_local.variables['edge_' +
                                                    str(eleTags[istation]) +
                                                    'r'][istep, :]
                fourier_i = nc_surf_local.variables['edge_' +
                                                    str(eleTags[istation]) +
                                                    'i'][istep, :]
                fourier = fourier_r[:] + fourier_i[:] * 1j
                fourier_last = fourier
                eleTag_last = eleTags[istation]
            nu_p_1 = int(len(fourier) / nPntEdge / 3)
            wdotf = np.zeros((3, nu_p_1), dtype=fourier.dtype)
            wdotf1 = np.zeros((3, nu_p_1), dtype=fourier.dtype)
            for idim in np.arange(0, 3):
                start = idim * nPntEdge * nu_p_1
                end = idim * nPntEdge * nu_p_1 + nPntEdge * nu_p_1
                fmat = fourier[start:end].reshape(nPntEdge, nu_p_1)
                wdotf[idim] = weights[istation].dot(fmat)
                wdotf1[idim] = weights1[istation].dot(fmat)
            exparray = 2. * np.exp(np.arange(0, nu_p_1) * 1j * azims[istation])
            exparray[0] = 1.
            exparray1 = 2. * np.exp(
                np.arange(0, nu_p_1) * 1j * (azims[istation] + delta))
            exparray1[0] = 1.
            spz = wdotf.dot(exparray).real
            spz_dist1 = wdotf1.dot(exparray).real
            spz_azim1 = wdotf.dot(exparray1).real
            uR = spz[0] * np.cos(dist) - spz[2] * np.sin(dist)
            uR_azim1 = spz_azim1[0] * np.cos(dist) - spz_azim1[2] * np.sin(
                dist)
            duR = (uR_azim1 - uR) / delta / np.sin(dist)
            uT = spz[1]
            uT_dist1 = spz_dist1[1]
            duT = (uT_dist1 - uT) / (dists1[istation] - dist)
            disp_curl[istation] = duR - duT
        vtk = pyvtk.VtkData(
            vtk_points,
            pyvtk.PointData(pyvtk.Scalars(disp_curl, name='disp_curl')),
            'surface animation')
        vtk.tofile(args.out_vtk + '/surface_vtk_zcurl.' + str(it) + '.vtk',
                   'binary')
        if args.verbose:
            print('    Done with snapshot t = %f s; tstep = %d / %d; iproc = %d' \
                % (var_time[istep], it + 1, len(steps), iproc))
    # close
    nc_surf_local.close()

    # remove temp nc
    if args.nproc > 1:
        os.remove(tempnc)

    if args.verbose and iproc == 0:
        elapsed = time.clock() - clock0
        print('Generating snapshots done, ' + '%f sec elapsed.' % (elapsed))
示例#13
0
# Compute the 2D Delaunay triangulation in the x-y plane
xTmp = list(zip(x, y))
tri = Delaunay(xTmp)

# Generate Cell Data
nCells = tri.nsimplex
cellTemp = np.random.rand(nCells)

# Zip the point co-ordinates for the VtkData input
points = list(zip(x, y, z))

vtk = pyvtk.VtkData(\
  pyvtk.UnstructuredGrid(points,
    triangle=tri.simplices
    ),
pyvtk.PointData(pyvtk.Scalars(pointPressure,name='Pressure')),
pyvtk.CellData(pyvtk.Scalars(cellTemp,name='Temperature')),
'2D Delaunay Example'
                    )
vtk.tofile('Delaunay2D')
vtk.tofile('Delaunay2Db', 'binary')

# Compute the 3D Delaunay triangulation in the x-y plane
xTmp = list(zip(x, y, z))
tri = Delaunay(xTmp)

# Generate Cell Data
nCells = tri.nsimplex
cellTemp = np.random.rand(nCells)

# Zip the point co-ordinates for the VtkData input
示例#14
0
import sys
sys.path = ['..'] + sys.path

#from pyvtk import *
import pyvtk as vtk

structure = vtk.PolyData(points=[[0, 0, 0], [1, 0, 0], [1, 1, 0], [0, 1, 0],
                                 [0, 0, 1], [1, 0, 1], [1, 1, 1], [0, 1, 1]],
                         polygons=[[0, 1, 2, 3], [4, 5, 6, 7], [0, 1, 5, 4],
                                   [2, 3, 7, 6], [0, 4, 7, 3], [1, 2, 6, 5]])
pointdata = vtk.PointData(
    vtk.Scalars([0, 1, 2, 3, 4, 5, 6, 7],
                name='sample_scalars',
                lookup_table='my_table'),
    vtk.LookupTable([[0, 0, 0, 1], [1, 0, 0, 1], [0, 1, 0, 1], [1, 1, 0, 1],
                     [0, 0, 1, 1], [1, 0, 1, 1], [0, 1, 1, 1], [1, 1, 1, 1]],
                    name='my_table'))

celldata = vtk.CellData(
    vtk.Scalars([0, 1, 2, 3, 4, 5], name='cell_scalars'),
    vtk.Normals(
        [[0, 0, -1], [0, 0, 1], [0, -1, 0], [0, 1, 0], [-1, 0, 0], [1, 0, 0]],
        name='cell_normals'),
    vtk.Field('FieldData',
              cellIds=[[0], [1], [2], [3], [4], [5]],
              faceAttributes=[[0, 1], [1, 2], [2, 3], [3, 4], [4, 5], [5, 6]]))

vtkdata = vtk.VtkData(structure, pointdata, celldata)
vtkdata.tofile('example1ascii', 'ascii')

#vtkdata.tofile('example1binary','binary')
示例#15
0
def write_vtk(iproc):
    if args.nproc == 1:
        nc_surf_local = Dataset(args.in_surface_nc, 'r', format='NETCDF4')
        iproc = 0
    else:
        # copy netcdf file for parallel access
        tempnc = args.out_vtk + '/surface_temp.nc' + str(iproc)
        shutil.copy(args.in_surface_nc, tempnc)
        nc_surf_local = Dataset(tempnc, 'r', format='NETCDF4')

    # write vtk
    if args.verbose and iproc == 0:
        clock0 = time.clock()
        print('Generating snapshot...')
    for it, istep in enumerate(steps):
        if it % args.nproc != iproc:
            continue
        if args.min_step is not None:
            if it < args.min_step:
                continue
        if args.max_step is not None:
            if it > args.max_step:
                continue
        if args.norm:
            disp_norm = np.zeros(nstation)
        else:
            disp = np.zeros((nstation, 3))
        eleTag_last = -1
        fourier_last = None
        for istation, dist in enumerate(dists):
            if eleTags[istation] == eleTag_last:
                fourier = fourier_last
            else:
                fourier_r = nc_surf_local.variables['edge_' +
                                                    str(eleTags[istation]) +
                                                    'r'][istep, :]
                fourier_i = nc_surf_local.variables['edge_' +
                                                    str(eleTags[istation]) +
                                                    'i'][istep, :]
                fourier = fourier_r[:] + fourier_i[:] * 1j
                fourier_last = fourier
                eleTag_last = eleTags[istation]
            nu_p_1 = int(len(fourier) / nPntEdge / 3)
            wdotf = np.zeros((3, nu_p_1), dtype=fourier.dtype)
            for idim in np.arange(0, 3):
                start = idim * nPntEdge * nu_p_1
                end = idim * nPntEdge * nu_p_1 + nPntEdge * nu_p_1
                fmat = fourier[start:end].reshape(nPntEdge, nu_p_1)
                wdotf[idim] = weights[istation].dot(fmat)
            exparray = 2. * np.exp(np.arange(0, nu_p_1) * 1j * azims[istation])
            exparray[0] = 1.
            spz = wdotf.dot(exparray).real
            if args.norm:
                disp_norm[istation] = np.linalg.norm(spz)
            else:
                disp[istation,
                     0] = spz[0] * np.cos(dist) - spz[2] * np.sin(dist)
                disp[istation, 1] = spz[1]
                disp[istation,
                     2] = spz[0] * np.sin(dist) + spz[2] * np.cos(dist)
        if args.norm:
            vtk = pyvtk.VtkData(
                vtk_points,
                pyvtk.PointData(pyvtk.Scalars(disp_norm, name='disp_norm')),
                'surface animation')
        else:
            vtk = pyvtk.VtkData(
                vtk_points,
                pyvtk.PointData(pyvtk.Vectors(disp, name='disp_RTZ')),
                'surface animation')
        vtk.tofile(args.out_vtk + '/surface_vtk.' + str(it) + '.vtk', 'binary')
        if args.verbose:
            print('    Done with snapshot t = %f s; tstep = %d / %d; iproc = %d' \
                % (var_time[istep], it + 1, len(steps), iproc))
    # close
    nc_surf_local.close()

    # remove temp nc
    if args.nproc > 1:
        os.remove(tempnc)

    if args.verbose and iproc == 0:
        elapsed = time.clock() - clock0
        print('Generating snapshots done, ' + '%f sec elapsed.' % (elapsed))
示例#16
0
#---------- </LOAD VARIABLES> ----------

#---------- <CREATE VTK FILE> ----------

wc = WavefieldComputer(wavefield, nu, s, z, sem_mesh)

for i_slice in range(SLICES):
    x_slice, y_slice, z_slice, wvf_slice = wc.compute_slice(
        RMIN[i_slice], RMAX[i_slice], PHIS_SLICES[i_slice])
    points_slice = list(zip(x_slice, y_slice, z_slice))
    range_slice = range(len(x_slice))
    for it in range(num_steps):
        vtk = pyvtk.VtkData(
            pyvtk.UnstructuredGrid(points_slice, range_slice),
            pyvtk.PointData(
                pyvtk.Scalars(wvf_slice[it], name='wavefield_slice')),
            'animation')
        vtk.tofile(OUTPUT_DIR + 'slices/' + 'slice_' +
                   str(int(RMIN[i_slice] * 1.e-3)) + '_' +
                   str(int(RMAX[i_slice] * 1.e-3)) + '_' +
                   str(PHIS_SLICES[i_slice]) + '_' + str(it) + '.vtk')

    ### write info file for each slice
    f = open(
        OUTPUT_DIR + 'slices/'
        'slice_' + str(int(RMIN[i_slice] * 1.e-3)) + '_' +
        str(int(RMAX[i_slice] * 1.e-3)) + '_' + str(PHIS_SLICES[i_slice]) +
        '_INFO.txt', 'w')
    f.write('########## SLICE INFO ##########\n')
    f.write('PHI (degrees) ' + str(PHIS_SLICES[i_slice]) + '\n')
    f.write('RMIN (m) ' + str(RMIN[i_slice]) + '\n')
示例#17
0
            fmat_last = fmat
            eleTag_last = etag
        wdotf = np.tensordot(weights[istation], fmat, ([0], [1]))
        exparray = 2. * np.exp(np.arange(0, nu_p_1) * 1j * azims[istation])
        exparray[0] = 1.
        spz = wdotf.dot(exparray).real
        if args.norm:
            disp_norm[istation] = np.linalg.norm(spz)
        else:
            disp[istation, 0] = spz[0] * np.cos(dist) - spz[2] * np.sin(dist)
            disp[istation, 1] = spz[1]
            disp[istation, 2] = spz[0] * np.sin(dist) + spz[2] * np.cos(dist)
    if args.norm:
        vtk = pyvtk.VtkData(
            vtk_points,
            pyvtk.PointData(pyvtk.Scalars(disp_norm, name='disp_norm')),
            'surface animation')
    else:
        vtk = pyvtk.VtkData(
            vtk_points, pyvtk.PointData(pyvtk.Vectors(disp, name='disp_RTZ')),
            'surface animation')
    vtk.tofile(args.out_vtk + '/surface_vtk.' + str(it) + '.vtk', 'binary')
    if args.verbose:
        elapsed = time.clock() - clock0s
        print('    Done with snapshot t = %f s; tstep = %d / %d, rank = %d, elapsed = %f' \
            % (var_time[istep], it + 1, len(steps), mpi_rank, elapsed))

if args.verbose and mpi_rank == 0:
    elapsed = time.clock() - clock0
    print('Generating snapshots done, ' + '%f sec elapsed.' % (elapsed))
示例#18
0
    def write_species_vtk(self,
                          species=None,
                          iteration=0,
                          format='binary',
                          scalars=['ux', 'uy', 'uz', 'w'],
                          select=None,
                          zmin_fixed=None,
                          sample_ptcl=None):
        """
        Convert the given list of species from the openPMD format to
        a VTK container, and write it to the disk.

        Parameters
        ----------
        species: list or None
            List of species names to be converted. If None, it
            converts all available species provided by OpenPMDTimeSeries

        scalars: list of strings
            list of values associated with each paricle to be included.
            ex. : 'charge', 'id', 'mass', 'x', 'y', 'z', 'ux', 'uy', 'uz', 'w'

        iteration: int
            iteration number to treat (default 0)

        select: dict
            dictonary to impoer selection on the particles,
            as it is defined in opnePMD_viewer

        format: str
            format for the VTK file, either 'ascii' or 'binary'

        zmin_fixed: float or None
            When treating the simulation data for the animation, in
            some cases (e.g. with moving window) it is useful to
            fix the origin of the visualization domain. If float number
            is given it will be use as z-origin of the visualization domain

        sample_ptcl: integer or None
            If not None, the species arrays will be reduced by skipping
            elements
        """
        # Check available fields if comps is not defined
        if species is None:
            species = self.ts.avail_species

        # register constants
        self.iteration = iteration
        self.zmin_fixed = zmin_fixed
        self.select = select
        self.scalars = scalars

        if sample_ptcl is None:
            self.sample_ptcl = 1
        else:
            self.sample_ptcl = sample_ptcl

        # Make a numer string for the file to write
        istr = str(self.iteration)
        while len(istr) < 7:
            istr = '0' + istr
        name_base = self.path + 'vtk_specie_{:}_{:}'

        # Convert and save all the species
        for specie in species:
            points, scalars_to_add = self._get_species(specie)

            # Create the points container
            pts_vtk = vtk.PolyData(points)

            # Create the scalars containers
            scalars_vtk = []
            for i, scalar in enumerate(scalars_to_add):
                scalars_vtk.append(vtk.Scalars(scalar, name=self.scalars[i]))

            vtk.VtkData(pts_vtk, vtk.PointData(*scalars_vtk) )\
                .tofile(name_base.format(specie,istr), format=format)
示例#19
0
import os

f, jac = lorenz_attractor(10., 28., 8. / 3)
T = 40.
tau = 1e-2
ipf = 4
n_steps = int(T / tau)
n_particles = 10000
deviation = 0.01
start_point = np.array([0., 0., 0.])
starts = np.random.normal(start_point, deviation, (n_particles, 3))
r_0 = np.linalg.norm(starts - start_point, axis=1, ord=2)

cloud = [RK45Solver(f, x_0, 0., T, n_steps) for x_0 in starts]

for particle in cloud:
    particle.solve()

dirpath = 'vtk/000_10k/'
if not os.path.exists(dirpath):
    os.mkdir(dirpath)

for i in range(0, n_steps + 1, ipf):
    points = [p.x[:, i] for p in cloud]
    velocities = [f(i, x) for x in points]
    vtk = pyvtk.VtkData(
        pyvtk.UnstructuredGrid(points),
        pyvtk.PointData(pyvtk.Vectors(velocities, name='Velocity'),
                        pyvtk.Scalars(r_0, name='r_0')))
    vtk.tofile(dirpath + f'f{i}')
示例#20
0
def VTKgen(lat,
           lon,
           mask,
           depth=None,
           h=None,
           temp=None,
           salt=None,
           rho=None,
           dye1=None,
           dye2=None,
           dye3=None,
           u=None,
           v=None,
           w=None,
           seaice=None,
           shelf_base=None,
           shelf_thick=None,
           writebottom=False,
           fname='test',
           dirname='VTK',
           date=None,
           t=0):
    """ Writes ocean and ice shelf geometry and data (e.g., tracer, vel., sea-ice) into VTK files."""

    import numpy as np

    NY, NX = lat.shape
    if not os.path.isdir(dirname):
        os.system('mkdir ' + dirname)

    NY, NX = lat.shape
    if depth is not None:
        newlat = np.resize(lat, (2, NY, NX))
        newlon = np.resize(lon, (2, NY, NX))
        newdepth, bottom = get_depth(h, depth, mask)
        pp = f3(newlon, newlat, newdepth)
        structure = pyvtk.StructuredGrid([2, NY, NX], pp)
        path_to_file = str('%s/%s-bathymetry.vtk' % (dirname, fname))
        if os.path.isfile(path_to_file):
            print ' \n' + '==> ' + 'Bathymetry has already been written, moving on ...\n' + ''
        else:
            # create bottom/shape and depths
            newdepth = f1(newdepth)
            bottom = f1(bottom)
            pointdata = pyvtk.PointData(
                pyvtk.Scalars(newdepth, name='Depth'),
                pyvtk.Scalars(bottom, name='Bottom9999'))
            # saving the data
            vtk = pyvtk.VtkData(structure, pointdata)
            vtk.tofile(dirname + '/' + fname + '-bathymetry', 'binary')

        if writebottom == True:
            print ' \n' + '==> ' + 'Writing tracers/vel. just at the bottom layer ...\n' + ''
            data = []
            if temp is not None:
                tmp = np.zeros((2, NY, NX))
                if len(
                        temp.shape
                ) == 2:  # in case the user provides 2D array with bottom data
                    tmp[:, :, :] = temp[:, :]
                else:
                    tmp[:, :, :] = temp[-1, :, :]

                temp = f1(tmp)
                data.append("pyvtk.Scalars(temp,name='Temp')")

            if salt is not None:
                tmp = np.zeros((2, NY, NX))
                if len(salt.shape) == 2:
                    tmp[:, :, :] = salt[:, :]
                else:
                    tmp[:, :, :] = salt[-1, :, :]

                salt = f1(tmp)
                data.append("pyvtk.Scalars(salt,name='Salt')")

            if rho is not None:
                tmp = np.zeros((2, NY, NX))
                if len(rho.shape) == 2:
                    tmp[:, :, :] = rho[:, :]
                else:
                    tmp[:, :, :] = rho[-1, :, :]

                rho = f1(tmp)
                data.append("pyvtk.Scalars(rho,name='Rho')")

            if dye1 is not None:
                tmp = np.zeros((2, NY, NX))
                if len(dye1.shape) == 2:
                    tmp[:, :, :] = dye1[:, :]
                else:
                    tmp[:, :, :] = dye1[-1, :, :]

                dye1 = f1(tmp)
                data.append("pyvtk.Scalars(dye1,name='Dye1')")

            if dye2 is not None:
                tmp = np.zeros((2, NY, NX))
                if len(dye2.shape) == 2:
                    tmp[:, :, :] = dye2[:, :]
                else:
                    tmp[:, :, :] = dye2[-1, :, :]

                dye2 = f1(tmp)
                data.append("pyvtk.Scalars(dye2,name='Dye2')")

            if dye3 is not None:
                tmp = np.zeros((2, NY, NX))
                if len(dye3.shape) == 2:
                    tmp[:, :, :] = dye3[:, :]
                else:
                    tmp[:, :, :] = dye3[-1, :, :]

                dye3 = f1(tmp)
                data.append("pyvtk.Scalars(dye3,name='Dye3')")

            if u is not None and v is not None:
                w = np.zeros((2, NY, NX))  # no w vel for now
                tmpu = np.zeros((2, NY, NX))
                tmpv = np.zeros((2, NY, NX))
                if len(u.shape) == 2:
                    tmpu[:, :, :] = u[:, :]
                    tmpv[:, :, :] = v[:, :]
                else:
                    tmpu[:, :, :] = u[-1, :, :]
                    tmpv[:, :, :] = v[-1, :, :]

                vel = f3(tmpu, tmpv, w)
                data.append("pyvtk.Vectors(vel,name='Velocity')")

            if temp is not None or salt is not None or rho is not None or u is not None:

                for d in range(len(data)):
                    if d == 0:
                        tmp = data[d]
                    else:
                        tmp = tmp + ',' + data[d]

                s = str("pyvtk.PointData(%s)" % (tmp))
                pointdata = eval(s)
                # saving the data
                vtk = pyvtk.VtkData(structure, pointdata)
                if date is not None:
                    s = str("vtk.tofile('%s/%s-%s-bottom-%05d','binary')" %
                            (dirname, fname, date, t))
                    eval(s)

                else:
                    s = str("vtk.tofile('%s/%s-bottom-%05d','binary')" %
                            (dirname, fname, t))
                    eval(s)

    if shelf_base is not None and shelf_thick is not None:
        NZ_IS = 2
        newlat = np.resize(lat, (NZ_IS, NY, NX))
        newlon = np.resize(lon, (NZ_IS, NY, NX))
        dum, z = get_iceshelf(shelf_base, shelf_thick, NZ_IS)
        iceshelf = f1(dum)
        pp = f3(newlon, newlat, z)
        structure = pyvtk.StructuredGrid([NZ_IS, NY, NX], pp)
        pointdata = pyvtk.PointData(
            pyvtk.Scalars(iceshelf, name='IceShelf9999'))
        vtk = pyvtk.VtkData(structure, pointdata)
        if t > 0:
            s = str("vtk.tofile('%s/%s-ice-shelf-%05d','binary')" %
                    (dirname, fname, t))
            eval(s)

        else:
            vtk.tofile(dirname + '/' + fname + '-ice-shelf', 'binary')

    if writebottom == False:
        data = []
        if temp is not None:
            temp = f1(temp)
            data.append("pyvtk.Scalars(temp,name='Temp')")

        if salt is not None:
            salt = f1(salt)
            data.append("pyvtk.Scalars(salt,name='Salt')")

        if rho is not None:
            rho = f1(rho)
            data.append("pyvtk.Scalars(rho,name='Rho')")

        if dye1 is not None:
            dye1 = f1(dye1)
            data.append("pyvtk.Scalars(dye1,name='Dye1')")

        if dye2 is not None:
            dye2 = f1(dye2)
            data.append("pyvtk.Scalars(dye2,name='Dye2')")

        if dye3 is not None:
            dye3 = f1(dye3)
            data.append("pyvtk.Scalars(dye3,name='Dye3')")

        if u is not None and v is not None:
            if w is not None:
                vel = f3(u, v, w)
            else:
                w = np.zeros(u.shape)
                vel = f3(u, v, w)

            data.append("pyvtk.Vectors(vel,name='Velocity')")

        if seaice is not None:
            NZ, NY, NX = h.shape
            sice1 = np.zeros((NZ, NY, NX))
            sice1[0, :, :] = seaice[:, :]
            sice2 = np.zeros((NZ, NY, NX))
            seaice[seaice >= 0.15] = 1.0  # all values >=15% are unit
            sice2[0, :, :] = seaice[:, :]
            seaice1 = f1(sice1)
            seaice2 = f1(sice2)
            data.append("pyvtk.Scalars(seaice1,name='Sea-ice')")
            data.append("pyvtk.Scalars(seaice2,name='Sea-ice-binary')")

        if temp is not None or salt is not None or rho is not None or u is not None or seaice is not None:
            NZ, NY, NX = h.shape
            # resize lon lat for real mesh
            newlat = np.resize(lat, (NZ, NY, NX))
            newlon = np.resize(lon, (NZ, NY, NX))
            pp = f3(newlon, newlat, h)
            structure = pyvtk.StructuredGrid([NZ, NY, NX], pp)

            for d in range(len(data)):
                if d == 0:
                    tmp = data[d]
                else:
                    tmp = tmp + ',' + data[d]

            s = str("pyvtk.PointData(%s)" % (tmp))
            pointdata = eval(s)
            # saving the data
            vtk = pyvtk.VtkData(structure, pointdata)
            if date is not None:
                s = str("vtk.tofile('%s/%s-%s-%05d','binary')" %
                        (dirname, fname, date, t))
                eval(s)

            else:
                s = str("vtk.tofile('%s/%s-%05d','binary')" %
                        (dirname, fname, t))
                eval(s)
示例#21
0
def ovf2vtk_main():
    start_time = time.time()
    
    banner_doc = 70*"-"+\
    "\novf2vtk --- converting ovf files to vtk files"+"\n"+\
    "Hans Fangohr, Richard Boardman, University of Southampton\n"""+70*"-"

    #extract command line arguments
    additions,params = getopt.getopt( sys.argv[1:], 'Vvhbta:', ["verbose","help","add=","binary","text","ascii","surface-effects","version","datascale=","posscale="] )

    #Note (fangohr 30/12/2006 20:52): the use of getopt is historic,
    #and so is the use of the name 'additions'. 

    #default value
    surfaceEffects = False
    datascale = 0.0 #0.0 has special meaning -- see help text
    posscale = 0.0  #0.0 has special meaning -- see help text

    #provide data from getopt.getopt (additions) in form of hash table
    options = {}
    for item in additions:
        if item[1]=='':
            options[item[0]] = None
        else:
            options[item[0]] = item[1]
    keys = options.keys()
            
    if "--surface-effects" in keys:
        surfaceEffects = True

    if "--posscale" in keys:
        posscale = float(options["--posscale"])

    if "--datascale" in keys:
        datascale = float(options["--datascale"])

    if "-v" in keys or "--verbose" in keys:
        print "running in verbose mode"
        debug = True
    else:
        debug = False

    if "-h" in keys or "--help" in keys:
        print __doc__
        sys.exit(0)

    if "-V" in keys or "--version" in keys:
        print "This is version %s." % ovf2vtk.__version__
        sys.exit(0)

    if len( params ) == 0:
        print __doc__
        print "ERROR: An input file (and an output file need to be specified)."
        sys.exit(1)
    else:
        infile = params[0]

    if len( params ) == 1:
        print __doc__
        print "ERROR: An input file AND an output file need to be specified."
        print "specify output file"
        sys.exit(1)
    else:
        outfile = params[1]

    # okay: it seems the essential parameters are given. Let's check for others:

    print banner_doc

    if debug:
        print "infile  = ", infile
        print "outfile = ",outfile
        print "additions= ",additions
        print "options = ",options
        print "datascale=",datascale
        print "posscale=",posscale

    #read data from infile
    vf = read_structured_omf_file( infile, debug )

    #compute magnitude for all cells 
    Ms = magnitude( vf )
    
    # Compute number of cells with non-zero Ms (rpb01r) 
    Ms_num_of_nonzeros = Numeric.sum( Numeric.not_equal( Ms, 0.0 ) ) 
    print "(%5.2f%% of %d cells filled)" % (100.0*Ms_num_of_nonzeros/len(Ms), len(Ms))


    #read metadata in data file 
    ovf_run  = analyze( infile  )



    #scale magnetisation data as required:
    if datascale == 0.0:
        scale = max( Ms )
        print "Will scale data down by %f" % scale
    else:
        scale = datascale
    vf = Numeric.divide( vf, scale )

    datatitle = ovf_run["Title:"]+"/%g" % (scale)

    #
    #need x, y and z vectors for vtk format
    #

    #taking actual spacings for dx, dy and dz results generally in
    #poor visualisation results (in particular for thin films, one
    #would like to have some magnification in z-direction).  Also:vtk
    #is not happy with positions on the 10e-9 scale, so one better
    #scales this to something closer to unity.



    #extract dimensions from file
    dimensions = ( int( ovf_run["xnodes:"] ), \
                   int( ovf_run["ynodes:"] ), \
                   int( ovf_run["znodes:"] ))

    if posscale != 0.0:  #scale data by given factor

        #find dx, dy, dz in SI units:
        Lx = abs(float(ovf_run["xmax:"])-float(ovf_run["xmin:"]))
        Ly = abs(float(ovf_run["ymax:"])-float(ovf_run["ymin:"]))
        Lz = abs(float(ovf_run["zmax:"])-float(ovf_run["zmin:"]))

        dx = Lx / float( ovf_run["xnodes:"] )
        dy = Ly / float( ovf_run["ynodes:"] )
        dz = Lz / float( ovf_run["znodes:"] )

        #find scale factor that OOMMF uses for xstepsize and xnodes,
        #etc. (Don't know how to get this directly.)
        xscale = Lx / (float( ovf_run["xnodes:"])*float(ovf_run["xstepsize:"]))
        yscale = Ly / (float( ovf_run["ynodes:"])*float(ovf_run["ystepsize:"]))
        zscale = Lz / (float( ovf_run["znodes:"])*float(ovf_run["zstepsize:"]))

        #extract x, y and z positions from ovf file.
        xbasevector = [None] * dimensions[0] #create empty vector
        for i in range( dimensions[0] ):
            #data is stored for 'centre' of each cuboid, therefore (i+0.5)
            xbasevector[i] = float( ovf_run["xbase:"] ) +\
                             (i+0.5) *float( ovf_run["xstepsize:"] )*xscale

        ybasevector = [None]* dimensions[1]
        for i in range( dimensions[1] ):
            ybasevector[i] = float( ovf_run["ybase:"] ) + (i+0.5) *float( ovf_run["ystepsize:"] )*yscale

        zbasevector = [None]* dimensions[2]
        for i in range( dimensions[2] ):
            zbasevector[i] = float( ovf_run["zbase:"] ) + (i+0.5) *float( ovf_run["zstepsize:"] )*zscale


        #finally, convert list to numerix (need to have this consistent)
        xbasevector = Numeric.array(xbasevector)/float(posscale)
        ybasevector = Numeric.array(ybasevector)/float(posscale)
        zbasevector = Numeric.array(zbasevector)/float(posscale)
        
    else: #posscale == 0.0
        #
        #this generally looks better:
        #
        xbasevector = Numeric.arange( dimensions[0] )
        ybasevector = Numeric.arange( dimensions[1] )
        zbasevector = Numeric.arange( dimensions[2] )

    #
    # write ascii or binary vtk-file (default is binary)
    #
    vtk_data = 'binary'
        
    if '--ascii' in keys or '-t' in keys or '--text' in keys:
        vtk_data = 'ascii'
        if debug:
            print "switching to ascii vtk-data"
    if '--binary' in keys or '-b' in keys:
        vtk_data = 'binary'
        if debug:
            print "switching to binary vtk-data"

    #
    #and now open vtk-file
    #
    vtkfilecomment =  "Output from ovf2vtk (version %s), %s, infile=%s. " % (ovf2vtk.__version__,\
                                                                   time.asctime(),\
                                                                   infile)
    vtkfilecomment += "Calling command line was '%s' executed in '%s'" % (" ".join(sys.argv),\
                                                                        os.getcwd())

    vtk = pyvtk.VtkData(    pyvtk.RectilinearGrid(xbasevector.tolist(),ybasevector.tolist(),zbasevector.tolist()),
                            vtkfilecomment,
                            pyvtk.PointData(pyvtk.Vectors( vf.tolist(), datatitle ) ),
                            format=vtk_data)

    #
    # now compute all the additional data such as angles, etc
    #

    # check whether we should do all
    keys = map(lambda x:x[1], additions)
    if "all" in keys:
        additions = []
        for add in add_features:
            additions.append(("--add",add))


    # when ovf2vtk was re-written using Numeric, I had to group
    # certain operations to make them fast. Now some switches are
    # unneccessary. (fangohr 25/08/2003 01:35)
    # To avoid executing the
    # same code again, we remember what we have computed already:
    
    done_angles = 0
    done_comp   = 0

    for add in additions:
        if add[0]=="-a" or add[0]=="--add":
            print "working on",add

            data=[]

            #compute observables that need more than one field value, i.e. div, rot
            if add[1][0:6] == "divrot":  #rotation = vorticity, curl
                
                (div, rot, rotx, roty, rotz, rotmag) = divergence_and_curl( vf, surfaceEffects, ovf_run )
                    
                comment = "curl, x-comp" 
                vtk.point_data.append( pyvtk.Scalars( rotx.tolist() , comment , lookup_table='default') )
                Comment = "curl, y-comp" 
                vtk.point_data.append( pyvtk.Scalars( roty.tolist() , comment , lookup_table='default') )
                comment = "curl, z-comp" 
                vtk.point_data.append( pyvtk.Scalars( rotz.tolist() , comment , lookup_table='default') )
                comment = "curl, magnitude" 
                vtk.point_data.append( pyvtk.Scalars( rotmag.tolist(), comment , lookup_table='default') )
                comment = "curl" 
                vtk.point_data.append( pyvtk.Vectors( rot.tolist() , comment ) )

                comment = "divergence"
                vtk.point_data.append( pyvtk.Scalars( div.tolist() , comment , lookup_table='default') )

                done_div_rot = True
            elif add[1] in ["Mx","My","Mz","Ms"]:                # components
                if not done_comp:
                    done_comp = 1
                                
                    comments = "x-component", "y-component", "z-component"

                    for data, comment in zip( components( vf ), comments):
                        vtk.point_data.append( pyvtk.Scalars( data.tolist(), comment,lookup_table='default' ) )
    
                    # magnitude of magnitisation
                    Mmag = magnitude( vf )
                    vtk.point_data.append( pyvtk.Scalars(Mmag.tolist(), "Magnitude",lookup_table='default' ) )

            elif add[1] in ["xy","xz","yz"]:
                if not done_angles:
                    done_angles = 1

                    # in-plane angles
                    comments = "xy in-plane angle", "yz in-plane angle", "xz in-plane angle"

                    for data, comment in zip( plane_angles( vf ), comments):
                        vtk.point_data.append( pyvtk.Scalars( data.tolist(), comment, lookup_table='default' )  )

            else:
                print "only xy, xz, Mx, My, Mz, divergence, Ms, or 'all' allowed after -a or --add"
                print "Current choice is",add
                print __doc__
                sys.exit(1)


    #
    #eventually, write the file
    #
    print "saving file (%s)" % (outfile)
    vtk.tofile(outfile,format=vtk_data)

    print "finished conversion (execution time %5.3s seconds)" % (time.time()-start_time)
示例#22
0
            fi.close()

    data = np.array(datal, dtype=float).T
    elems = np.array(elems, dtype=int).T

    pl = []
    for i in np.arange(data.shape[1]):
        pl.append(tuple(data[:3, i]))

    el = []
    for i in np.arange(elems.shape[1]):
        el.append(tuple(elems[:, i]))

    grid = pyvtk.UnstructuredGrid(pl, tetra=el)

    pointdata = pyvtk.PointData()
    for j, var in enumerate(vars[3:]):
        pointdata.append(
            pyvtk.Scalars(data[j + 3, :].tolist(),
                          name=var,
                          lookup_table='default'))

    vtk = pyvtk.VtkData(grid, pointdata, title)
    if binary:
        vtk.tofile(t_step + '.vtk', 'binary')
    else:
        vtk.tofile(t_step + '.vtk', 'ascii')

    print t_step + '.vtk written'
示例#23
0
def ovf2vtk_main():
    start_time = time.time()

    banner_doc = 70 * "-" + \
        "\novf2vtk --- converting ovf files to vtk files" + "\n" + \
        "Hans Fangohr, Richard Boardman, University of Southampton\n"""\
        + 70 * "-"

    # extracts command line arguments
    # If any of the arguments given appear in the command line, a list of...
    # ...these args and corresponding values (if any) is returned -> ('args').
    # Any arguments that dont dont match the given ones are retuned in a...
    # ...separate list -> ('params')
    # Note (fangohr 30/12/2006 20:52): the use of getopt is historic,
    args, params = getopt.getopt(sys.argv[1:], 'Vvhbta:', [
        "verbose", "help", "add=", "binary", "text", "ascii",
        "surface-effects", "version", "datascale=", "posscale="
    ])

    # default value
    surfaceEffects = False
    datascale = 0.0  # 0.0 has special meaning -- see help text
    posscale = 0.0  # 0.0 has special meaning -- see help text

    # provide data from getopt.getopt (args) in form of dictionary
    options = {}
    for item in args:
        if item[1] == '':
            options[item[0]] = None
        else:
            options[item[0]] = item[1]
    keys = options.keys()

    # set system responses to arguments given
    if "--surface-effects" in keys:
        surfaceEffects = True

    if "--posscale" in keys:
        posscale = float(options["--posscale"])

    if "--datascale" in keys:
        datascale = float(options["--datascale"])

    if "-v" in keys or "--verbose" in keys:
        print("running in verbose mode")
        debug = True
    else:
        debug = False

    if "-h" in keys or "--help" in keys:
        print(__doc__)
        sys.exit(0)

    if "-V" in keys or "--version" in keys:
        print("This is version {:s}.".format(version))
        sys.exit(0)

    if len(params) == 0:
        print(__doc__)
        print("ERROR: An input file (and an output file need to be "
              "specified).")
        sys.exit(1)
    else:
        infile = params[0]

    if len(params) == 1:
        print(__doc__)
        print("ERROR: An input file AND an output file need to be specified.")
        print("specify output file")
        sys.exit(1)
    else:
        outfile = params[1]

    # okay: it seems the essential parameters are given.
    # Let's check for others:

    print(banner_doc)

    if debug:
        print("infile = {}".format(infile))
        print("outfile = {}".format(outfile))
        print("args = {}".format(args))
        print("options = {}".format(options))
        print("datascale = {}".format(datascale))
        print("posscale = {}".format(posscale))

    # read data from infile
    vf = omf.read_structured_omf_file(infile, debug)

    # compute magnitude for all cells
    Ms = ana.magnitude(vf)

    # Compute number of cells with non-zero Ms (rpb01r)
    Ms_num_of_nonzeros = Numeric.sum(Numeric.not_equal(Ms, 0.0))
    print("({:5.2f}% of {:d} cells filled)".format(
        100.0 * Ms_num_of_nonzeros / len(Ms), len(Ms)))

    # scale magnetisation data as required:
    if datascale == 0.0:
        scale = max(Ms)
        print("Will scale data down by {:f}".format(scale))
    else:
        scale = datascale
    # normalise vectorfield by scale
    vf = Numeric.divide(vf, scale)

    # read metadata in data file
    ovf_run = omf.analyze(infile)
    datatitle = ovf_run["Title:"] + "/{:g}".format(scale)

    #
    # need x, y and z vectors for vtk format
    #
    # taking actual spacings for dx, dy and dz results generally in
    # poor visualisation results (in particular for thin films, one
    # would like to have some magnification in z-direction).  Also:vtk
    # is not happy with positions on the 10e-9 scale, so one better
    # scales this to something closer to unity.

    # extract dimensions from file
    dimensions = (int(ovf_run["xnodes:"]), int(ovf_run["ynodes:"]),
                  int(ovf_run["znodes:"]))

    # scale data by given factor
    if posscale != 0.0:

        # find range between max and min values of components
        xrange = abs(float(ovf_run["xmax:"]) - float(ovf_run["xmin:"]))
        yrange = abs(float(ovf_run["ymax:"]) - float(ovf_run["ymin:"]))
        zrange = abs(float(ovf_run["zmax:"]) - float(ovf_run["zmin:"]))

        # define no. of x,y,z nodes
        xnodes = float(ovf_run["xnodes:"])
        ynodes = float(ovf_run["ynodes:"])
        znodes = float(ovf_run["znodes:"])

        # define stepsizes
        xstepsize = float(ovf_run["xstepsize:"])
        ystepsize = float(ovf_run["ystepsize:"])
        zstepsize = float(ovf_run["zstepsize:"])

        # define bases
        xbase = float(ovf_run["xbase:"])
        ybase = float(ovf_run["ybase:"])
        zbase = float(ovf_run["zbase:"])

        # find dx, dy, dz in SI units:
        dx = xrange / xnodes
        dy = yrange / ynodes
        dz = zrange / znodes

        # find scale factor that OOMMF uses for xstepsize and xnodes,
        # etc. (Don't know how to get this directly.)
        xscale = dx * xstepsize
        yscale = dy * ystepsize
        zscale = dz * zstepsize

        # extract x, y and z positions from ovf file.
        xbasevector = [None] * dimensions[0]  # create empty vector
        for i in range(dimensions[0]):
            # data is stored for 'centre' of each cuboid, therefore (i+0.5)
            xbasevector[i] = xbase + (i + 0.5) * xstepsize * xscale

        ybasevector = [None] * dimensions[1]
        for i in range(dimensions[1]):
            ybasevector[i] = ybase + (i + 0.5) * ystepsize * yscale

        zbasevector = [None] * dimensions[2]
        for i in range(dimensions[2]):
            zbasevector[i] = zbase + (i + 0.5) * zstepsize * zscale

        # finally, convert list to numeric (need to have this consistent)
        xbasevector = Numeric.array(xbasevector) / float(posscale)
        ybasevector = Numeric.array(ybasevector) / float(posscale)
        zbasevector = Numeric.array(zbasevector) / float(posscale)

    else:
        # posscale == 0.0
        # this generally looks better:
        xbasevector = Numeric.arange(dimensions[0])
        ybasevector = Numeric.arange(dimensions[1])
        zbasevector = Numeric.arange(dimensions[2])

    #
    # write ascii or binary vtk-file (default is binary)
    #
    vtk_data = 'binary'

    if '--ascii' in keys or '-t' in keys or '--text' in keys:
        vtk_data = 'ascii'
        if debug:
            print("switching to ascii vtk-data")

    if '--binary' in keys or '-b' in keys:
        vtk_data = 'binary'
        if debug:
            print("switching to binary vtk-data")

    #
    # and now open vtk-file
    #
    vtkfilecomment = "Output from ovf2vtk (version {:s}), {:s}, infile={:s}. "\
        .format(version, time.asctime(), infile)
    vtkfilecomment += "Calling command line was '{:s}' executed in '{:s}'"\
        .format(" ".join(sys.argv), os.getcwd())

    # define inputs
    RecGrid = pyvtk.RectilinearGrid(xbasevector.tolist(), ybasevector.tolist(),
                                    zbasevector.tolist())

    PData = pyvtk.PointData(pyvtk.Vectors(vf.tolist(), datatitle))

    # define vtk file.
    vtk = pyvtk.VtkData(RecGrid, vtkfilecomment, PData, format=vtk_data)

    # now compute all the additional data such as angles, etc

    # check whether we should do all
    keys = map(lambda x: x[1], args)
    if "all" in keys:
        args = []
        for add_arg in add_features:
            args.append(("--add", add_arg))

    # when ovf2vtk was re-written using Numeric, I had to group
    # certain operations to make them fast. Now some switches are
    # unneccessary. (fangohr 25/08/2003 01:35)
    # To avoid executing the
    # same code again, we remember what we have computed already:

    done_angles = 0
    done_comp = 0

    for arg in args:
        if arg[0] == "-a" or arg[0] == "--add":
            print("working on {}".format(arg))

            data = []
            lookup_table = 'default'

            # compute observables that need more than one field value
            # i.e. div, rot
            if arg[1][0:6] == "divrot":  # rotation = vorticity, curl

                (div, rot, rotx, roty, rotz, rotmag) = \
                    ana.divergence_and_curl(vf, surfaceEffects, ovf_run)
                # change order of observables for upcoming loop
                observables = (rotx, roty, rotz, rotmag, rot, div)

                comments = [
                    "curl, x-comp", "curl, y-comp", "curl, z-comp",
                    "curl, magnitude", "curl", "divergence"
                ]

                # append data to vtk file
                for obs, comment in zip(observables, comments):
                    # for rotx, roty, rotz, rotmag, div
                    if comment != "curl":
                        vtk.point_data.append(
                            pyvtk.Scalars(obs.tolist(), comment, lookup_table))
                    # for rot
                    else:
                        vtk.point_data.append(
                            pyvtk.Vectors(obs.tolist(), comment))

            # components
            elif arg[1] in ["Mx", "My", "Mz", "Ms"]:
                if done_comp == 0:
                    done_comp = 1
                    comments = "x-component", "y-component", "z-component"

                    for data, comment in zip(ana.components(vf), comments):
                        vtk.point_data.append(
                            pyvtk.Scalars(data.tolist(), comment,
                                          lookup_table))

                    # magnitude of magnitisation
                    Mmag = ana.magnitude(vf)
                    vtk.point_data.append(
                        pyvtk.Scalars(Mmag.tolist(), "Magnitude",
                                      lookup_table))

            elif arg[1] in ["xy", "xz", "yz"]:
                if done_angles == 0:
                    done_angles = 1
                    # in-plane angles
                    comments = ("xy in-plane angle", "yz in-plane angle",
                                "xz in-plane angle")
                    for data, comment in zip(ana.plane_angles(vf), comments):
                        vtk.point_data.append(
                            pyvtk.Scalars(data.tolist(), comment,
                                          lookup_table))

            else:
                print("only xy, xz, Mx, My, Mz, divergence, Ms, or 'all' \
allowed after -a or --add")
                print("Current choice is {}".format(arg))
                print(__doc__)
                sys.exit(1)

    #
    # eventually, write the file
    #
    print("saving file ({:s})".format(outfile))
    vtk.tofile(outfile, format=vtk_data)

    print("finished conversion (execution time {:5.3s} seconds)".format(
        str(time.time() - start_time)))
示例#24
0
    def _getStructure(self):

        ##maxX = self.distanceVar.mesh.faceCenters[0].max()
        ##minX = self.distanceVar.mesh.faceCenters[0].min()

        IDs = numerix.nonzero(self.distanceVar._cellInterfaceFlag)[0]
        coordinates = numerix.take(
            numerix.array(self.distanceVar.mesh.cellCenters).swapaxes(0, 1),
            IDs)

        coordinates -= numerix.take(
            numerix.array(self.distanceVar.grad * self.distanceVar).swapaxes(
                0, 1), IDs)

        coordinates *= self.zoomFactor

        shiftedCoords = coordinates.copy()
        shiftedCoords[:, 0] = -coordinates[:, 0]  ##+ (maxX - minX)
        coordinates = numerix.concatenate((coordinates, shiftedCoords))

        from lines import _getOrderedLines

        lines = _getOrderedLines(
            range(2 * len(IDs)),
            coordinates,
            thresholdDistance=self.distanceVar.mesh._cellDistances.min() * 10)

        data = numerix.take(self.surfactantVar, IDs)

        data = numerix.concatenate((data, data))

        tmpIDs = numerix.nonzero(data > 0.0001)[0]
        if len(tmpIDs) > 0:
            val = numerix.take(data, tmpIDs).min()
        else:
            val = 0.0001

        data = numerix.where(data < 0.0001, val, data)

        for line in lines:
            if len(line) > 2:
                for smooth in range(self.smooth):
                    for arr in (coordinates, data):
                        tmp = numerix.take(arr, line)
                        tmp[1:-1] = tmp[2:] * 0.25 + tmp[:-2] * 0.25 + tmp[
                            1:-1] * 0.5
                        if len(arr.shape) > 1:
                            for i in range(len(arr[0])):
                                arrI = arr[:, i].copy()
                                numerix.put(arrI, line, tmp[:, i])
                                arr[:, i] = arrI
                        else:
                            numerix.put(arrI, line, tmp)

        name = self.title
        name = name.strip()
        if name == '':
            name = None

        coords = numerix.zeros((coordinates.shape[0], 3), 'd')
        coords[:, :coordinates.shape[1]] = coordinates

        import pyvtk

        ## making lists as pyvtk doesn't know what to do with numpy arrays

        coords = list(coords)
        coords = map(
            lambda coord: [float(coord[0]),
                           float(coord[1]),
                           float(coord[2])], coords)

        data = list(data)
        data = map(lambda item: float(item), data)

        return (pyvtk.UnstructuredGrid(points=coords, poly_line=lines),
                pyvtk.PointData(pyvtk.Scalars(data, name=name)))
示例#25
0
    def write_fields_vtk(self, flds=None, iteration=0,
                         format='binary', zmin_fixed=None,
                         Nth=24, CommonMesh=True):
        """
        Convert the given list of scalar and vector fields from the
        openPMD format to a VTK container, and write it to the disk.

        Parameters
        ----------
        flds: list or None
            List of scalar and vector fields to be converted. If None, it
            converts all available components provided by OpenPMDTimeSeries

        iteration: int
            iteration number to treat (default 0)

        format: str
            format for the VTK file, either 'ascii' or 'binary'

        zmin_fixed: float or None
            When treating the simulation data for the animation, in
            some cases (e.g. with moving window) it is useful to
            fix the origin of the visualization domain. If float number
            is given it will be use as z-origin of the visualization domain

        Nth: int, optional
            Number of nodes of the theta-axis of a cylindric grid in case
            of thetaMode geometry. Note: for high the Nth>>10 the convertion
            may become rather slow.

        CommonMesh: bool
            If True, the same mesh will be used and fields will be converted
            to the scalar and vector types. If False, each component will be
            saved as a separate file with its own grid.
        """
        # Check available fields if comps is not defined
        if flds is None:
            flds = self.ts.avail_fields

        # Register constant parameters
        self.iteration = iteration
        self.zmin_fixed = zmin_fixed
        self.CommonMesh = CommonMesh
        self.Nth = Nth

        # Set grid to None, in order to recomupte it
        self.grid = None

        # Make a numer string for the file to write
        istr = str(self.iteration)
        while len(istr)<7 : istr='0'+istr

        if self.CommonMesh:

            # get and store fields Scalars and Vectors
            vtk_container = []
            for fld in flds:
                field_type = self.ts.fields_metadata[fld]['type']
                if field_type=='vector':
                    vtk_container.append( self._convert_field_vec_full(fld) )
                elif field_type=='scalar':
                    vtk_container.append( self._convert_field_scl(fld) )

            # write VTK file
            vtk.VtkData(self.grid, vtk.PointData(*vtk_container))\
                .tofile(self.path+'vtk_fields_'+istr, format=format)
        else:
            for fld in flds:
                field_type = self.ts.fields_metadata[fld]['type']
                if field_type=='vector':
                    comps = ['x', 'y', 'z']
                    for comp in comps:
                        fld_full = fld + comp
                        file_name = self.path+'vtk_fields_{}_{}'\
                           .format(fld_full, istr)

                        VtkFld = self._convert_field_vec_comp(fld, comp)

                        # write VTK file
                        vtk.VtkData(self.grid, vtk.PointData(VtkFld))\
                           .tofile(file_name, format=format)

                elif field_type=='scalar':
                    file_name = self.path+'vtk_fields_{}_{}'\
                       .format(fld, istr)

                    VtkFld = self._convert_field_scl(fld)
                    # write VTK file
                    vtk.VtkData(self.grid, vtk.PointData(VtkFld))\
                       .tofile(file_name, format=format)
示例#26
0
# -*- coding: utf-8 -*-
"""
Created on Wed Jun  7 14:58:44 2017

@author: Jonas Lindemann
"""

import numpy as np
import pyvtk as vtk

print("Reading from uvw.dat...")
xyzuvw = np.loadtxt('uvw.dat', skiprows=2)

print("Converting to points and vectors")
points = xyzuvw[:, 0:3].tolist()
vectors = xyzuvw[:, 3:].tolist()

pointdata = vtk.PointData(vtk.Vectors(vectors, name="vec1"),
                          vtk.Vectors(vectors, name="vec2"))
data = vtk.VtkData(vtk.StructuredGrid([96, 65, 48], points), pointdata)
data.tofile('uvw', 'ascii')
示例#27
0
文件: visual.py 项目: anyint/nmag-src
def _vtk_createVtkData(meshpoints,
                       meshsimplices,
                       data,
                       name,
                       header="Data header (unused)"):
    #If data is list of float, convert into list of lists, each list containing one float
    if type(data[0]) in [types.FloatType, types.IntType]:
        #raw data in scalar data set, convert float a into [a]:
        data = map(lambda a: [a], data)

    #Due to inflexibility in pyvtk, we need to distinguish 4 different cases for
    #2d-meshes with scalar data
    #2d-meshes with vector data
    #3d-meshes with scalar data
    #3d-meshes with vector data

    #Here we go:
    if len(meshpoints[0]) == 2:
        log.debug("Mesh seems 2d")
        #make 3d for pyvtk
        meshpoints = map(lambda pos: pos + [0.], meshpoints)
        if len(data[0]) == 1:
            log.debug("Data seems 1d (scalar)")
            log.debug("Creating vtk data structure with dof '%s'" % name)
            vtk = pyvtk.VtkData(
                pyvtk.UnstructuredGrid(meshpoints, triangle=meshsimplices),
                pyvtk.PointData(
                    pyvtk.Scalars(data, name=name, lookup_table='default')),
                header)
        elif len(data[0]) in [2, 3]:
            if len(data[0]) == 2:
                log.debug("Data seems 2d (vector)")
                #make 3d for pyvtk
                data = map(lambda a: a + [0.], data)
            else:
                log.debug("Data seems 3d (vector)")
            log.debug("Creating vtk data structure with dof '%s'" % name)
            vtk = pyvtk.VtkData(
                pyvtk.UnstructuredGrid(meshpoints, triangle=meshsimplices),
                pyvtk.PointData(pyvtk.Vectors(data, name=name)), header)
        else:
            raise NfemValueError, "Can only deal with scalar or vector data"

    elif len(meshpoints[0]) == 3:
        log.debug("Mesh seems 3d")
        if len(data[0]) == 1:
            log.debug("Data seems 1d (scalar)")
            log.debug("Creating vtk data structure with dof '%s'" % name)
            vtk = pyvtk.VtkData(
                pyvtk.UnstructuredGrid(meshpoints, tetra=meshsimplices),
                pyvtk.PointData(
                    pyvtk.Scalars(data, name=name, lookup_table='default')),
                header)
        elif len(data[0]) in [2, 3]:
            if len(data[0]) == 2:
                log.debug("Data seems 2d (vector)")
                #make 3d for pyvtk
                data = map(lambda a: a + [0.], data)
            else:
                log.debug("Data seems 3d (vector)")
            log.debug("Creating vtk data structure with dof '%s'" % name)
            vtk = pyvtk.VtkData(
                pyvtk.UnstructuredGrid(meshpoints, tetra=meshsimplices),
                pyvtk.PointData(pyvtk.Vectors(data, name=name)), header)
        else:
            raise NfemValueError, "Can only deal with scalar or vector data"

    elif len(meshpoints[0]) == 1:
        log.debug("Mesh seems 1d")
        #make 3d for pyvtk
        meshpoints = map(lambda pos: pos + [0., 0.], meshpoints)
        if len(data[0]) == 1:
            log.debug("Data seems 1d (scalar)")
            log.debug("Creating vtk data structure with dof '%s'" % name)
            vtk = pyvtk.VtkData(
                pyvtk.UnstructuredGrid(meshpoints, line=meshsimplices),
                pyvtk.PointData(
                    pyvtk.Scalars(data, name=name, lookup_table='default')),
                header)
        elif len(data[0]) in [2, 3]:
            if len(data[0]) == 2:
                log.debug("Data seems 2d (vector)")
                #make 3d for pyvtk
                data = map(lambda a: a + [0.], data)
            else:
                log.debug("Data seems 3d (vector)")
            log.debug("Creating vtk data structure with dof '%s'" % name)
            vtk = pyvtk.VtkData(
                pyvtk.UnstructuredGrid(meshpoints, line=meshsimplices),
                pyvtk.PointData(pyvtk.Vectors(data, name=name)), header)
        else:
            raise NfemValueError, "Can only deal with scalar or vector data"

    else:
        NfemValueError, "Mesh seems to be %d dimensional. Can only do 1, 2 or 3." % len(
            meshpoints[0])

    return vtk
示例#28
0
def export_vtk_stress(filename,
                      coords,
                      topo,
                      a=None,
                      el_scalar=None,
                      el_vec1=None,
                      el_vec2=None):
    """
    Export mesh and results for a 2D stress problem.
    
    Parameters:
    
        filename            Filename of vtk-file
        coords              Element coordinates (np.array)
        topo                Element topology (not dof topology). mesh.topo. (np.array)
        a                   Element displacements 2-dof (np.array)
        el_scalar           Scalar values for each element (list)
        el_vec1             Vector value for each element (list)
        el_vec2             Vector value for each element (list)
    """

    points = coords.tolist()
    polygons = (topo - 1).tolist()

    displ = []

    point_data = None
    scalars = None
    vectors1 = None
    vectors2 = None
    cell_data = None

    if a is not None:
        for i in range(0, len(a), 2):
            displ.append([np.asscalar(a[i]), np.asscalar(a[i + 1]), 0.0])

        point_data = vtk.PointData(vtk.Vectors(displ, name="displacements"))

    if el_scalar is not None:
        scalars = vtk.Scalars(el_scalar, name="scalar")
    if el_vec1 is not None:
        vectors1 = vtk.Vectors(el_vec1, name="principal1")
    if el_vec2 is not None:
        vectors2 = vtk.Vectors(el_vec2, name="principal2")

    if el_scalar is not None and el_vec1 is None and el_vec2 is None:
        cell_data = vtk.CellData(scalars)
    if el_scalar is not None and el_vec1 is None and el_vec2 is not None:
        cell_data = vtk.CellData(scalars, vectors2)
    if el_scalar is not None and el_vec1 is not None and el_vec2 is None:
        cell_data = vtk.CellData(scalars, vectors1)
    if el_scalar is not None and el_vec1 is not None and el_vec2 is None:
        cell_data = vtk.CellData(scalars, vectors1, vectors2)
    if el_scalar is None and el_vec1 is None and el_vec2 is not None:
        cell_data = vtk.CellData(vectors2)
    if el_scalar is None and el_vec1 is not None and el_vec2 is None:
        cell_data = vtk.CellData(vectors1)
    if el_scalar is None and el_vec1 is not None and el_vec2 is None:
        cell_data = vtk.CellData(vectors1, vectors2)

    structure = vtk.PolyData(points=points, polygons=polygons)

    if cell_data is not None and point_data is not None:
        vtk_data = vtk.VtkData(structure, cell_data, point_data)
    if cell_data is None and point_data is not None:
        vtk_data = vtk.VtkData(structure, point_data)
    if cell_data is None and point_data is None:
        vtk_data = vtk.VtkData(structure)

    vtk_data.tofile("exm6.vtk", "ascii")
示例#29
0
文件: MeshQ.py 项目: wangkiller/Morph
    for i in DegRep:
        DegNd = DegNd[DegNd <> i]
    DegNd = np.r_[DegNd, DegRep]
    NCS[DegNd, ] = NCSprev[DegNd, ] + DS[DegNd, ]
    #DegNd = np.array(find_repeats(np.r_[DegNd,outer])[0],int)
    DegNd.sort()
    PointConst = np.zeros((NCS.shape[0], ))
    PointConst[outer, ] = 1
    PointConst[DegNd, ] = 0
    PointConst = np.array(PointConst, int)

    print 'Construct VTK object for optimization'
    ##ADD CONTRAINTS AS POINT SCALARS
    skvtk = pv.VtkData(pv.UnstructuredGrid(points=NCS, tetra=TetT),
                       'skull 4 symm',
                       pv.PointData(pv.Scalars(PointConst, 'fixed')))
    skvtk.tofile(fname[0:6] + '015constr.vtk')
    os.system('~/Software/Trilinos/Mesquite/msqshape -s ./' + fname[0:6] +
              '015constr.vtk ./' + fname[0:6] + '015constrOUT.vtk')

    fil = open(fname[0:6] + '015constrOUT.vtk', 'r+')
    fil.writelines('# vtk DataFile Version 2.0\n')
    fil.close()
    skvtk = pv.VtkData(fname[0:6] + '015constrOUT.vtk')
    NCS = np.array(skvtk.structure.points)
    NCSprev = np.r_[NCS]
    EQ, delt, Sn2, Sig = qu.elemQual_mu(np.array(range(TetT.shape[0])), NCS,
                                        TetT)
    print '					Average Element Quality: 	', np.average(EQ)
    print '					Degenerate (q<0.15): 		', np.where(EQ < 0.15)[0].size
    print '					Inverted Elements: 		', np.where(Sig < 0)[0].size
示例#30
0
# -*- coding: utf-8 -*-
"""
Created on Thu Jun  8 12:03:31 2017

@author: Jonas Lindemann
"""
import numpy as np
import matplotlib.pyplot as plt
import pyvtk as vtk
import os

f = open("colorado_elev.vit", "rb")  # reopen the file
f.seek(268, os.SEEK_SET)  # seek

x = np.fromfile(f, dtype=np.ubyte)  # read the data into numpy

elevation = np.reshape(x, (400, 400))
plt.imshow(elevation)

plt.show()

pointdata = vtk.PointData(vtk.Scalars(x))
data = vtk.VtkData(vtk.StructuredPoints([400, 400]), pointdata)
data.tofile('elevation', 'ascii')