def PlaneSphereActors(): ps = vtk.vtkPlaneSource() ps.SetXResolution(10) ps.SetYResolution(10) ss = vtk.vtkSphereSource() ss.SetRadius(0.3) group = vtk.vtkMultiBlockDataGroupFilter() group.AddInputConnection(ps.GetOutputPort()) group.AddInputConnection(ss.GetOutputPort()) ag = vtk.vtkRandomAttributeGenerator() ag.SetInputConnection(group.GetOutputPort()) ag.GenerateCellScalarsOn() ag.AttributesConstantPerBlockOn() n = vtk.vtkPolyDataNormals() n.SetInputConnection(ag.GetOutputPort()) n.Update() actors = [] it = n.GetOutputDataObject(0).NewIterator() it.InitTraversal() while not it.IsDoneWithTraversal(): pm = vtk.vtkPolyDataMapper() pm.SetInputData(it.GetCurrentDataObject()) a = vtk.vtkActor() a.SetMapper(pm) actors.append(a) it.GoToNextItem() return actors
def procrustes(sources, rigid=False, legend=None): ''' Return an Assembly of aligned source actors with the vtkProcrustesAlignmentFilter class. Assembly is normalized in space. Takes N set of points and aligns them in a least-squares sense to their mutual mean. The algorithm is iterated until convergence, as the mean must be recomputed after each alignment. ''' group = vtk.vtkMultiBlockDataGroupFilter() for source in sources: if sources[0].N() != source.N(): vc.printc('Procrustes error in align():' , c=1) vc.printc(' sources have different nr of points', c=1) exit(0) group.AddInputData(source.polydata()) procrustes = vtk.vtkProcrustesAlignmentFilter() procrustes.StartFromCentroidOn() procrustes.SetInputConnection(group.GetOutputPort()) if rigid: procrustes.GetLandmarkTransform().SetModeToRigidBody() procrustes.Update() acts = [] for i in range(len(sources)): poly = procrustes.GetOutput().GetBlock(i) actor = Actor(poly) actor.SetProperty(sources[i].GetProperty()) acts.append(actor) assem = Assembly(acts, legend=legend) assem.info['transform'] = procrustes.GetLandmarkTransform() return assem
def PlaneSphereActors(): ps = vtk.vtkPlaneSource() ps.SetXResolution(10) ps.SetYResolution(10) ss = vtk.vtkSphereSource() ss.SetRadius (0.3) group = vtk.vtkMultiBlockDataGroupFilter() group.AddInputConnection(ps.GetOutputPort()) group.AddInputConnection(ss.GetOutputPort()) ag = vtk.vtkRandomAttributeGenerator() ag.SetInputConnection(group.GetOutputPort()) ag.GenerateCellScalarsOn() ag.AttributesConstantPerBlockOn() n = vtk.vtkPolyDataNormals() n.SetInputConnection(ag.GetOutputPort()) n.Update (); actors = [] it = n.GetOutputDataObject(0).NewIterator() it.InitTraversal() while not it.IsDoneWithTraversal(): pm = vtk.vtkPolyDataMapper() pm.SetInputData(it.GetCurrentDataObject()) a = vtk.vtkActor() a.SetMapper(pm) actors.append (a) it.GoToNextItem() return actors
def ComputeMeanLandmarks(targetNameList, dirlandmarks, dirOutput, sourceName): # loading landmarks into vtk polydata object group = vtk.vtkMultiBlockDataGroupFilter() for name in targetNameList: filename = os.path.join(dirlandmarks, name + '.txt') if not os.path.isfile(filename): print('Can not read landmark file: %s' % filename) continue npLandmarks = np.loadtxt(filename, skiprows=2) numberOfLandmarks = int(np.size(npLandmarks) / 3) vtkLandmarks = vtk.vtkPoints() for landmark in npLandmarks: vtkLandmarks.InsertNextPoint(landmark) vtkPolyDataLandmarks = vtk.vtkPolyData() vtkPolyDataLandmarks.SetPoints(vtkLandmarks) group.AddInputData(vtkPolyDataLandmarks) group.Update() Procrustes = vtk.vtkProcrustesAlignmentFilter() Procrustes.SetInputConnection(group.GetOutputPort()) Procrustes.GetLandmarkTransform().SetModeToRigidBody() #=========================================================================== # Procrustes.GetLandmarkTransform().SetModeToSimilarity() #=========================================================================== Procrustes.Update() MeanPoints = Procrustes.GetMeanPoints() return MeanPoints
def ComputeMeanSuface(targetNameList, dirSurface, dirOutput, sourceName, StatsFile=None, thRMS=100): print("================ STATSFILE ======================") print(StatsFile) rmsMetric = np.loadtxt(StatsFile, dtype=str, delimiter=',') RMS = rmsMetric[:, -1] nameList = rmsMetric[:, 0] count = 1 skibedSurface = 0 # loading surface into vtk group group = vtk.vtkMultiBlockDataGroupFilter() for targetName in targetNameList: filename = os.path.join(dirSurface, targetName + '.vtk') if not os.path.isfile(filename): print('Can not read file: %s' % filename) continue nameIdx = -1 for name in nameList: # if '%d' % int(name) == targetName: if name == targetName: break nameIdx += 1 if np.float(RMS[nameIdx]) > thRMS: print('To high RMS for surface: %s RMS = %0.2f' % (targetName, np.float(RMS[nameIdx]))) count += 1 skibedSurface += 1 continue reader = vtk.vtkPolyDataReader() reader.SetFileName(filename) reader.Update() group.AddInputConnection(reader.GetOutputPort()) count += 1 Procrustes = vtk.vtkProcrustesAlignmentFilter() Procrustes.SetInputConnection(group.GetOutputPort()) Procrustes.GetLandmarkTransform().SetModeToRigidBody() #=========================================================================== # Procrustes.GetLandmarkTransform().SetModeToSimilarity() #=========================================================================== Procrustes.Update() polydata = reader.GetOutput() polydata.SetPoints(Procrustes.GetMeanPoints()) polydata = computeNormals(polydata) writer = vtk.vtkPolyDataWriter() filename = os.path.join(dirOutput, sourceName + '.vtk') writer.SetFileName(filename) writer.SetInputData(polydata) writer.Write()
def tovtk(fdrw, fvtk): """ Reads drw file ``fvtk`` and writes ``fvtk``. """ # All separate ugirds represetning each element are collected together with # the data group filter gf = vtk.vtkMultiBlockDataGroupFilter() for e in read_drw(open(fdrw, 'r')): ug = _2ugrid(e) gf.AddInputData(ug) # Save to file w = vtk.vtkXMLMultiBlockDataWriter() w.SetFileName(fvtk) w.SetInputConnection(gf.GetOutputPort()) w.Update() return
def procrustesAlignment(sources, rigid=False): """ Return an ``Assembly`` of aligned source meshes with the `Procrustes` algorithm. The output ``Assembly`` is normalized in size. `Procrustes` algorithm takes N set of points and aligns them in a least-squares sense to their mutual mean. The algorithm is iterated until convergence, as the mean must be recomputed after each alignment. The set of average points generated by the algorithm can be accessed with ``algoutput.info['mean']`` as a numpy array. :param bool rigid: if `True` scaling is disabled. |align3| |align3.py|_ """ from vedo.mesh import Mesh group = vtk.vtkMultiBlockDataGroupFilter() for source in sources: if sources[0].N() != source.N(): colors.printc("Error in procrustesAlignment():", c='r') colors.printc(" sources have different nr of points", c='r') raise RuntimeError() group.AddInputData(source.polydata()) procrustes = vtk.vtkProcrustesAlignmentFilter() procrustes.StartFromCentroidOn() procrustes.SetInputConnection(group.GetOutputPort()) if rigid: procrustes.GetLandmarkTransform().SetModeToRigidBody() procrustes.Update() acts = [] for i, s in enumerate(sources): poly = procrustes.GetOutput().GetBlock(i) mesh = Mesh(poly) mesh.SetProperty(s.GetProperty()) if hasattr(s, 'name'): mesh.name = s.name mesh.flagText = s.flagText acts.append(mesh) assem = Assembly(acts) assem.transform = procrustes.GetLandmarkTransform() assem.info['mean'] = vtk_to_numpy(procrustes.GetMeanPoints().GetData()) return assem
def surface_statistics(target_names, dir_surface, dir_output, source_name, stats_file=None, rms_threshold=100): print('Reading', stats_file) rms_metric = np.loadtxt(stats_file, dtype=str, delimiter=',') n_rms = len(rms_metric) surface_group = vtk.vtkMultiBlockDataGroupFilter() for idx in range(n_rms): name = rms_metric[idx, 0] rms = float(rms_metric[idx, -1]) print('Surface:', name, 'RMS:', rms) if rms < rms_threshold: filename = os.path.join(dir_surface, name + '.vtk') reader = vtk.vtkPolyDataReader() reader.SetFileName(filename) reader.Update() if reader.GetOutput().GetNumberOfPoints() < 3: print('Could not read', filename) else: surface_group.AddInputConnection(reader.GetOutputPort()) else: print('Surface', name, 'excluded due to high RMS:', rms) procrustes = vtk.vtkProcrustesAlignmentFilter() procrustes.SetInputConnection(surface_group.GetOutputPort()) procrustes.GetLandmarkTransform().SetModeToRigidBody() # =========================================================================== # Procrustes.GetLandmarkTransform().SetModeToSimilarity() # =========================================================================== procrustes.Update() polydata = reader.GetOutput() polydata.SetPoints(procrustes.GetMeanPoints()) polydata = computeNormals(polydata) writer = vtk.vtkPolyDataWriter() filename = os.path.join(dir_output, source_name + '_Procrustes_mean.vtk') writer.SetFileName(filename) writer.SetInputData(polydata) writer.Write()
def write(objct, fileoutput, binary=True): """ Write 3D object to file. (same as `save()`). Possile extensions are: - vtk, vti, npy, ply, obj, stl, byu, vtp, vti, mhd, xyz, tif, png, bmp. """ obj = objct if isinstance(obj, Actor): # picks transformation obj = objct.polydata(True) elif isinstance(obj, (vtk.vtkActor, vtk.vtkVolume)): obj = objct.GetMapper().GetInput() elif isinstance(obj, (vtk.vtkPolyData, vtk.vtkImageData)): obj = objct fr = fileoutput.lower() if ".vtk" in fr: writer = vtk.vtkPolyDataWriter() elif ".ply" in fr: writer = vtk.vtkPLYWriter() pscal = obj.GetPointData().GetScalars() if not pscal: pscal = obj.GetCellData().GetScalars() if pscal and pscal.GetName(): writer.SetArrayName(pscal.GetName()) #writer.SetColorMode(0) lut = objct.GetMapper().GetLookupTable() if lut: writer.SetLookupTable(lut) elif ".stl" in fr: writer = vtk.vtkSTLWriter() elif ".obj" in fr: writer = vtk.vtkOBJWriter() elif ".vtp" in fr: writer = vtk.vtkXMLPolyDataWriter() elif ".vtm" in fr: g = vtk.vtkMultiBlockDataGroupFilter() for ob in objct: g.AddInputData(ob) g.Update() mb = g.GetOutputDataObject(0) wri = vtk.vtkXMLMultiBlockDataWriter() wri.SetInputData(mb) wri.SetFileName(fileoutput) wri.Write() return mb elif ".xyz" in fr: writer = vtk.vtkSimplePointsWriter() elif ".facet" in fr: writer = vtk.vtkFacetWriter() elif ".tif" in fr: writer = vtk.vtkTIFFWriter() writer.SetFileDimensionality(len(obj.GetDimensions())) elif ".vti" in fr: writer = vtk.vtkXMLImageDataWriter() elif ".mhd" in fr: writer = vtk.vtkMetaImageWriter() elif ".nii" in fr: writer = vtk.vtkNIFTIImageWriter() elif ".png" in fr: writer = vtk.vtkPNGWriter() elif ".jpg" in fr: writer = vtk.vtkJPEGWriter() elif ".bmp" in fr: writer = vtk.vtkBMPWriter() elif ".npy" in fr: if utils.isSequence(objct): objslist = objct else: objslist = [objct] dicts2save = [] for obj in objslist: dicts2save.append( _np_dump(obj) ) np.save(fileoutput, dicts2save) return dicts2save elif ".xml" in fr: # write tetrahedral dolfin xml vertices = objct.coordinates().astype(str) faces = np.array(objct.faces()).astype(str) ncoords = vertices.shape[0] outF = open(fileoutput, "w") outF.write('<?xml version="1.0" encoding="UTF-8"?>\n') outF.write('<dolfin xmlns:dolfin="http://www.fenicsproject.org">\n') if len(faces[0]) == 4:# write tetrahedral mesh ntets = faces.shape[0] outF.write(' <mesh celltype="tetrahedron" dim="3">\n') outF.write(' <vertices size="' + str(ncoords) + '">\n') for i in range(ncoords): x, y, z = vertices[i] outF.write(' <vertex index="'+str(i)+'" x="'+x+'" y="'+y+'" z="'+z+'"/>\n') outF.write(' </vertices>\n') outF.write(' <cells size="' + str(ntets) + '">\n') for i in range(ntets): v0, v1, v2, v3 = faces[i] outF.write(' <tetrahedron index="'+str(i) + '" v0="'+v0+'" v1="'+v1+'" v2="'+v2+'" v3="'+v3+'"/>\n') elif len(faces[0]) == 3:# write triangle mesh ntri = faces.shape[0] outF.write(' <mesh celltype="triangle" dim="2">\n') outF.write(' <vertices size="' + str(ncoords) + '">\n') for i in range(ncoords): x, y, dummy_z = vertices[i] outF.write(' <vertex index="'+str(i)+'" x="'+x+'" y="'+y+'"/>\n') outF.write(' </vertices>\n') outF.write(' <cells size="' + str(ntri) + '">\n') for i in range(ntri): v0, v1, v2 = faces[i] outF.write(' <triangle index="'+str(i)+'" v0="'+v0+'" v1="'+v1+'" v2="'+v2+'"/>\n') outF.write(' </cells>\n') outF.write(" </mesh>\n") outF.write("</dolfin>\n") outF.close() return objct else: colors.printc("~noentry Unknown format", fileoutput, "file not saved.", c="r") return objct try: if hasattr(writer, 'SetFileTypeToBinary'): if binary: writer.SetFileTypeToBinary() else: writer.SetFileTypeToASCII() writer.SetInputData(obj) writer.SetFileName(fileoutput) writer.Write() colors.printc("~save Saved file: " + fileoutput, c="g") except Exception as e: colors.printc("~noentry Error saving: " + fileoutput, "\n", e, c="r") return objct
df_apoe_f = df_apoe[df_apoe["vol_l"] >= 1500.0] df_apoe_f = df_apoe_f[df_apoe_f["vol_r"] >= 1500.0] df_apoe_f = df_apoe_f[~df_apoe_f["id_alomar"].isin(visual_outliers_list)] df_apoe_f.drop(df_apoe_f.columns[[0]], axis=1, inplace=True) df_apoe_f.to_csv(out_csv, index=False, index_label=False) print(len(df_apoe_f)) # %% markdown # For both left and right hippocampus, use similarity alignment to alineate the meshes, using vtk.vtkProcustesAlignmentFilter() # %% # Group left and right meshes into a MultiBlockData # la patillada del if vol xd # Prova random: calcular el mean a mà, punt a punt. # Left mesh i = 0 meshes_left = vtk.vtkMultiBlockDataGroupFilter() points_left = [] for mesh in X_left: # Add to group vol_r = float(df_ids.iloc[i, :].vol_r) vol_l = float(df_ids.iloc[i, :].vol_l) idb = int(df_ids.iloc[i, :].id_alomar) if vol_r >= 2000 and vol_l >= 1500 and idb not in visual_outliers_list: meshes_left.AddInputData(mesh) # Compute mean points points = ExtractVTKPoints(mesh) points_left.append(points) i += 1 # Right mesh i = 0
def write(objct, fileoutput, binary=True): """ Write 3D object to file. (same as `save()`). Possile extensions are: - vtk, vti, npy, ply, obj, stl, byu, vtp, vti, mhd, xyz, tif, png, bmp. """ obj = objct if isinstance(obj, Mesh): # picks transformation obj = objct.polydata(True) elif isinstance(obj, (vtk.vtkActor, vtk.vtkVolume)): obj = objct.GetMapper().GetInput() elif isinstance(obj, (vtk.vtkPolyData, vtk.vtkImageData)): obj = objct fr = fileoutput.lower() if fr.endswith(".vtk"): writer = vtk.vtkPolyDataWriter() elif fr.endswith(".ply"): writer = vtk.vtkPLYWriter() pscal = obj.GetPointData().GetScalars() if not pscal: pscal = obj.GetCellData().GetScalars() if pscal and pscal.GetName(): writer.SetArrayName(pscal.GetName()) #writer.SetColorMode(0) lut = objct.GetMapper().GetLookupTable() if lut: writer.SetLookupTable(lut) elif fr.endswith(".stl"): writer = vtk.vtkSTLWriter() elif fr.endswith(".vtp"): writer = vtk.vtkXMLPolyDataWriter() elif fr.endswith(".vtm"): g = vtk.vtkMultiBlockDataGroupFilter() for ob in objct: if isinstance(ob, Mesh): # picks transformation ob = ob.polydata(True) elif isinstance(ob, (vtk.vtkActor, vtk.vtkVolume)): ob = ob.GetMapper().GetInput() g.AddInputData(ob) g.Update() mb = g.GetOutputDataObject(0) wri = vtk.vtkXMLMultiBlockDataWriter() wri.SetInputData(mb) wri.SetFileName(fileoutput) wri.Write() return mb elif fr.endswith(".xyz"): writer = vtk.vtkSimplePointsWriter() elif fr.endswith(".facet"): writer = vtk.vtkFacetWriter() elif fr.endswith(".tif"): writer = vtk.vtkTIFFWriter() writer.SetFileDimensionality(len(obj.GetDimensions())) elif fr.endswith(".vti"): writer = vtk.vtkXMLImageDataWriter() elif fr.endswith(".mhd"): writer = vtk.vtkMetaImageWriter() elif fr.endswith(".nii"): writer = vtk.vtkNIFTIImageWriter() elif fr.endswith(".png"): writer = vtk.vtkPNGWriter() elif fr.endswith(".jpg"): writer = vtk.vtkJPEGWriter() elif fr.endswith(".bmp"): writer = vtk.vtkBMPWriter() elif fr.endswith(".npy"): if utils.isSequence(objct): objslist = objct else: objslist = [objct] dicts2save = [] for obj in objslist: dicts2save.append( _np_dump(obj) ) np.save(fileoutput, dicts2save) return dicts2save elif fr.endswith(".obj"): outF = open(fileoutput, "w") outF.write('# OBJ file format with ext .obj\n') outF.write('# File Created by vtkplotter\n') cobjct = objct.clone().clean() for p in cobjct.points(): outF.write('v '+ str(p[0]) +" "+ str(p[1])+" "+ str(p[2])+'\n') for vn in cobjct.normals(cells=False): outF.write('vn '+str(vn[0])+" "+str(vn[1])+" "+str(vn[2])+'\n') #pdata = cobjct.polydata().GetPointData().GetScalars() #if pdata: # ndata = vtk_to_numpy(pdata) # for vd in ndata: # outF.write('vp '+ str(vd) +'\n') #ptxt = cobjct.polydata().GetPointData().GetTCoords() # not working #if ptxt: # ntxt = vtk_to_numpy(ptxt) # print(len(cobjct.faces()), cobjct.points().shape, ntxt.shape) # for vt in ntxt: # outF.write('vt '+ str(vt[0]) +" "+ str(vt[1])+ ' 0\n') for f in cobjct.faces(): fs = '' for fi in f: fs += " "+str(fi+1) outF.write('f' + fs + '\n') #ldata = cobjct.polydata().GetLines().GetData() #print(cobjct.polydata().GetLines()) #if ldata: # ndata = vtk_to_numpy(ldata) # print(ndata) # for l in ndata: # ls = '' # for li in l: # ls += str(li+1)+" " # outF.write('l '+ ls + '\n') outF.close() return objct elif fr.endswith(".xml"): # write tetrahedral dolfin xml vertices = objct.points().astype(str) faces = np.array(objct.faces()).astype(str) ncoords = vertices.shape[0] outF = open(fileoutput, "w") outF.write('<?xml version="1.0" encoding="UTF-8"?>\n') outF.write('<dolfin xmlns:dolfin="http://www.fenicsproject.org">\n') if len(faces[0]) == 4:# write tetrahedral mesh ntets = faces.shape[0] outF.write(' <mesh celltype="tetrahedron" dim="3">\n') outF.write(' <vertices size="' + str(ncoords) + '">\n') for i in range(ncoords): x, y, z = vertices[i] outF.write(' <vertex index="'+str(i)+'" x="'+x+'" y="'+y+'" z="'+z+'"/>\n') outF.write(' </vertices>\n') outF.write(' <cells size="' + str(ntets) + '">\n') for i in range(ntets): v0, v1, v2, v3 = faces[i] outF.write(' <tetrahedron index="'+str(i) + '" v0="'+v0+'" v1="'+v1+'" v2="'+v2+'" v3="'+v3+'"/>\n') elif len(faces[0]) == 3:# write triangle mesh ntri = faces.shape[0] outF.write(' <mesh celltype="triangle" dim="2">\n') outF.write(' <vertices size="' + str(ncoords) + '">\n') for i in range(ncoords): x, y, dummy_z = vertices[i] outF.write(' <vertex index="'+str(i)+'" x="'+x+'" y="'+y+'"/>\n') outF.write(' </vertices>\n') outF.write(' <cells size="' + str(ntri) + '">\n') for i in range(ntri): v0, v1, v2 = faces[i] outF.write(' <triangle index="'+str(i)+'" v0="'+v0+'" v1="'+v1+'" v2="'+v2+'"/>\n') outF.write(' </cells>\n') outF.write(" </mesh>\n") outF.write("</dolfin>\n") outF.close() return objct else: colors.printc("~noentry Unknown format", fileoutput, "file not saved.", c="r") return objct try: if hasattr(writer, 'SetFileTypeToBinary'): if binary: writer.SetFileTypeToBinary() else: writer.SetFileTypeToASCII() writer.SetInputData(obj) writer.SetFileName(fileoutput) writer.Write() except Exception as e: colors.printc("~noentry Error saving: " + fileoutput, "\n", e, c="r") return objct
def write(objct, fileoutput, binary=True): """ Write 3D object to file. (same as `save()`). Possile extensions are: - vtk, vti, ply, obj, stl, byu, vtp, vti, mhd, xyz, tif, png, bmp. """ obj = objct if isinstance(obj, Actor): # picks transformation obj = objct.polydata(True) elif isinstance(obj, (vtk.vtkActor, vtk.vtkVolume)): obj = objct.GetMapper().GetInput() elif isinstance(obj, (vtk.vtkPolyData, vtk.vtkImageData)): obj = objct fr = fileoutput.lower() if ".vtk" in fr: w = vtk.vtkPolyDataWriter() elif ".ply" in fr: w = vtk.vtkPLYWriter() elif ".stl" in fr: w = vtk.vtkSTLWriter() elif ".vtp" in fr: w = vtk.vtkXMLPolyDataWriter() elif ".vtm" in fr: g = vtk.vtkMultiBlockDataGroupFilter() for ob in objct: g.AddInputData(ob) g.Update() mb = g.GetOutputDataObject(0) wri = vtk.vtkXMLMultiBlockDataWriter() wri.SetInputData(mb) wri.SetFileName(fileoutput) wri.Write() return mb elif ".xyz" in fr: w = vtk.vtkSimplePointsWriter() elif ".facet" in fr: w = vtk.vtkFacetWriter() elif ".tif" in fr: w = vtk.vtkTIFFWriter() w.SetFileDimensionality(len(obj.GetDimensions())) elif ".vti" in fr: w = vtk.vtkXMLImageDataWriter() elif ".mhd" in fr: w = vtk.vtkMetaImageWriter() elif ".png" in fr: w = vtk.vtkPNGWriter() elif ".jpg" in fr: w = vtk.vtkJPEGWriter() elif ".bmp" in fr: w = vtk.vtkBMPWriter() elif ".xml" in fr: # write tetrahedral dolfin xml vertices = obj.coordinates() faces = obj.cells() ncoords = vertices.shape[0] ntets = faces.shape[0] outF = open(fileoutput, "w") outF.write('<?xml version="1.0" encoding="UTF-8"?>\n') outF.write('<dolfin xmlns:dolfin="http://www.fenicsproject.org">\n') outF.write(' <mesh celltype="tetrahedron" dim="3">\n') outF.write(' <vertices size="' + str(ncoords) + '">\n') for i in range(ncoords): x, y, z = vertices[i] outF.write(' <vertex index="' + str(i) + '" x="' + str(x) + '" y="' + str(y) + '" z="' + str(z) + '"/>\n') outF.write(' </vertices>\n') outF.write(' <cells size="' + str(ntets) + '">\n') for i in range(ntets): v0, v1, v2, v3 = faces[i] outF.write(' <tetrahedron index="' + str(i) + '" v0="' + str(v0) + '" v1="' + str(v1) + '" v2="' + str(v2) + '" v3="' + str(v3) + '"/>\n') outF.write(' </cells>\n') outF.write(" </mesh>\n") outF.write("</dolfin>\n") outF.close() return objct else: colors.printc("~noentry Unknown format", fileoutput, "file not saved.", c="r") return objct try: if hasattr(w, 'SetFileTypeToBinary'): if binary: w.SetFileTypeToBinary() else: w.SetFileTypeToASCII() w.SetInputData(obj) w.SetFileName(fileoutput) w.Write() colors.printc("~save Saved file: " + fileoutput, c="g") except Exception as e: colors.printc("~noentry Error saving: " + fileoutput, "\n", e, c="r") return objct
map1a.SetInputConnection(sphere.GetOutputPort()) Actor1a = vtk.vtkActor() Actor1a.SetMapper(map1a) Actor1a.GetProperty().SetDiffuseColor(1.0000,0.3882,0.2784) map1b = vtk.vtkPolyDataMapper() map1b.SetInputConnection(transformer1.GetOutputPort()) Actor1b = vtk.vtkActor() Actor1b.SetMapper(map1b) Actor1b.GetProperty().SetDiffuseColor(0.3882,1.0000,0.2784) map1c = vtk.vtkPolyDataMapper() map1c.SetInputConnection(transformer2.GetOutputPort()) Actor1c = vtk.vtkActor() Actor1c.SetMapper(map1c) Actor1c.GetProperty().SetDiffuseColor(0.3882,0.2784,1.0000) # -- align the shapes using Procrustes (using SetModeToRigidBody) -- group = vtk.vtkMultiBlockDataGroupFilter() group.AddInputConnection(sphere.GetOutputPort()) group.AddInputConnection(transformer1.GetOutputPort()) group.AddInputConnection(transformer2.GetOutputPort()) procrustes1 = vtk.vtkProcrustesAlignmentFilter() procrustes1.SetInputConnection(group.GetOutputPort()) procrustes1.GetLandmarkTransform().SetModeToRigidBody() procrustes1.Update() # map the aligned shapes into the second renderer map2a = vtk.vtkPolyDataMapper() map2a.SetInputData(procrustes1.GetOutput().GetBlock(0)) Actor2a = vtk.vtkActor() Actor2a.SetMapper(map2a) Actor2a.GetProperty().SetDiffuseColor(1.0000,0.3882,0.2784) map2b = vtk.vtkPolyDataMapper() map2b.SetInputData(procrustes1.GetOutput().GetBlock(1))
sphere2.SetThetaResolution(15) sphere2.SetPhiResolution(15) sphere2.Update() cylinder = vtk.vtkCylinderSource() cylinder.SetRadius(attrs[0]) cylinder.SetHeight(attrs[1]) cylinder.SetResolution(15) cylinder.Update() data = vtk.vtkMultiBlockDataSet() data.SetNumberOfBlocks(3) data.SetBlock(0, sphere1.GetOutput()) data.SetBlock(1, sphere2.GetOutput()) data.SetBlock(2, cylinder.GetOutput()) source = vtk.vtkMultiBlockDataGroupFilter() add_compatiblity_methods(source) source.AddInputData(data) readers[shape_name] = source mapper = vtk.vtkCompositePolyDataMapper() mapper.SetInputConnection(source.GetOutputPort()) mappers[shape_name] = (x for x in [mapper]) fixed_mappers = dict() for shape_name in io.shapes(): if shape_name not in fixed_mappers: fixed_mappers[shape_name] = mappers[shape_name].next() for instance_name in io.instances():
def compute_mean_surface_and_lm(targetNameList, dirSurface, dirLM, dirOutput, sourceName, StatsFile=None, thRMS=100): rmsMetric = np.loadtxt(StatsFile, dtype=str, delimiter=',') RMS = rmsMetric[:, -1] nameList = rmsMetric[:, 0] count = 1 skibedSurface = 0 # loading surface into vtk group group = vtk.vtkMultiBlockDataGroupFilter() lm_group = vtk.vtkMultiBlockDataGroupFilter() for targetName in targetNameList: filename = os.path.join(dirSurface, targetName + '.vtk') if not os.path.isfile(filename): print('Can not read file: %s' % filename) continue nameIdx = 0 for name in nameList: # if '%d' % int(name) == targetName: if name == targetName: break nameIdx += 1 if nameIdx >= len(nameList): print('Can not read RMS stat for file: %s' % filename) continue if np.float(RMS[nameIdx]) > thRMS: print('To high RMS for surface: %s RMS = %0.2f' % (targetName, np.float(RMS[nameIdx]))) count += 1 skibedSurface += 1 continue reader = vtk.vtkPolyDataReader() reader.SetFileName(filename) reader.Update() group.AddInputConnection(reader.GetOutputPort()) group.Update() lmfilename = os.path.join(dirLM, targetName + '.txt') npLandmarks = np.loadtxt(lmfilename, skiprows=2) vtkLandmarks = vtk.vtkPoints() for landmark in npLandmarks: vtkLandmarks.InsertNextPoint(landmark) vtkPolyDataLandmarks = vtk.vtkPolyData() vtkPolyDataLandmarks.SetPoints(vtkLandmarks) lm_group.AddInputData(vtkPolyDataLandmarks) lm_group.Update() count += 1 Procrustes = vtk.vtkProcrustesAlignmentFilter() Procrustes.SetInputConnection(group.GetOutputPort()) Procrustes.GetLandmarkTransform().SetModeToRigidBody() Procrustes.Update() polydata = reader.GetOutput() polydata.SetPoints(Procrustes.GetMeanPoints()) polydata = computeNormals(polydata) writer = vtk.vtkPolyDataWriter() filename = os.path.join(dirOutput, sourceName + '.vtk') writer.SetFileName(filename) writer.SetInputData(polydata) writer.Write() lm_procrustes = vtk.vtkProcrustesAlignmentFilter() lm_procrustes.SetInputConnection(lm_group.GetOutputPort()) lm_procrustes.StartFromCentroidOff() lm_procrustes.GetLandmarkTransform().SetModeToRigidBody() lm_procrustes.Update() mean_points = lm_procrustes.GetMeanPoints() filename = os.path.join(dirOutput, sourceName + '.txt') WriteLandmarkFile(mean_points, filename)
for j in range(nPt): pts_str = pts_data[j][i] pts_str_parsed = ''.join( (ch if ch in '0123456789.-e' else ' ') for ch in pts_str) pt_j = [float(pt_j_str) for pt_j_str in pts_str_parsed.split()] pts_list_i.append(pt_j) pts_list.append(pts_list_i) # Procrustes Filter procrustes_filter = vtk.vtkProcrustesAlignmentFilter() group = vtk.vtkMultiBlockDataGroupFilter() pts_polyData_list = [] scale_obs_list = [] # Check # nObs = 3 for i in range(nObs): pt_polyData = vtk.vtkPolyData() pt_Points = vtk.vtkPoints() pt_lines = vtk.vtkCellArray() # Set Points for j in range(nPt):
import vtk.numpy_interface.dataset_adapter as dsa import vtk.numpy_interface.algorithms as algs w = vtk.vtkRTAnalyticSource() bp = vtk.vtkBrownianPoints() bp.SetInputConnection(w.GetOutputPort()) bp.Update() elev = vtk.vtkElevationFilter() elev.SetInputConnection(bp.GetOutputPort()) elev.SetLowPoint(-10, 0, 0) elev.SetHighPoint(10, 0, 0) elev.SetScalarRange(0, 20) g = vtk.vtkMultiBlockDataGroupFilter() g.AddInputConnection(elev.GetOutputPort()) g.AddInputConnection(elev.GetOutputPort()) g.Update() elev2 = vtk.vtkElevationFilter() elev2.SetInputConnection(bp.GetOutputPort()) elev2.SetLowPoint(0, -10, 0) elev2.SetHighPoint(0, 10, 0) elev2.SetScalarRange(0, 20) g2 = vtk.vtkMultiBlockDataGroupFilter() g2.AddInputConnection(elev2.GetOutputPort()) g2.AddInputConnection(elev2.GetOutputPort())
sphere2.SetThetaResolution(15) sphere2.SetPhiResolution(15) sphere2.Update() cylinder = vtk.vtkCylinderSource() cylinder.SetRadius(attrs[0]) cylinder.SetHeight(attrs[1]) cylinder.SetResolution(15) cylinder.Update() data = vtk.vtkMultiBlockDataSet() data.SetNumberOfBlocks(3) data.SetBlock(0, sphere1.GetOutput()) data.SetBlock(1, sphere2.GetOutput()) data.SetBlock(2, cylinder.GetOutput()) source = vtk.vtkMultiBlockDataGroupFilter() source.AddInput(data) readers.append(source) for instance in instances: mapper = vtk.vtkCompositePolyDataMapper() mapper.SetInputConnection(readers[shape[int(instance)]].GetOutputPort()) mappers.append(mapper) actor = vtk.vtkActor() if int(instance) >= 0: actor.GetProperty().SetOpacity(0.7) actor.GetProperty().SetColor(random_color()) actor.SetMapper(mapper) actors.append(actor) renderer.AddActor(actor)
if len(args) > 0: io_filename = args[0] else: usage() exit(1) transforms = dict() transformers = dict() data_connectors_v = dict() data_connectors_t = dict() data_connectors_d = dict() big_data_source = vtk.vtkMultiBlockDataGroupFilter() add_compatiblity_methods(big_data_source) big_data_writer = vtk.vtkXMLMultiBlockDataWriter() add_compatiblity_methods(big_data_writer) contactors = dict() offsets = dict() vtkmath = vtk.vtkMath() class Quaternion(): def __init__(self, *args): self._data = vtk.vtkQuaternion[float](*args)