def Nneighbours(node, NC, Tri, layer): # determine neighbouring nodes of a specific node up to a set of "layers" # inputs: # node: specific node to query neighbours, NC&Tri: nodal coord and connectivity # layer: up to how many degrees of separation a neighbour shoud be searched aa = np.where(Tri == node)[0] pn = Tri[aa, ].reshape((aa.size * 3, )) neigh = find_repeats(pn)[0] if layer > 1: for l in range(1, layer): for nn in neigh: aa = np.where(Tri == nn)[0] pn = Tri[aa, ].reshape((aa.size * 3, )) pn = find_repeats(pn)[0] for i in pn: if find_repeats(np.r_[neigh, i])[0].size == 0: neigh = np.r_[neigh, i] neigh = np.array(neigh[np.where(neigh <> node)[0], ], int) #if layer==1: #if only actual neighbours are calculated, returmn then in right hand rule order for retriangulation purposes #neighC=np.empty((neigh.size,)) #neighC[0,]=neigh[0,] #rp,cp=np.where(Tri[neighpos,]==neigh[0,]) #for i in range(1,neigh.size): #r,c=np.where((Tri[neighpos[rp,]]<>neighC[i-1,])&(Tri[neighpos[rp,]]<>node)) #neighC[i,]=max(((cp+1==c)|((cp==2)*(c==0)))*Tri[neighpos[rp,],c]) #rp,cp=np.where(Tri[neighpos,]==neighC[i,]) #neigh = np.array(neighC,int) return neigh
def AllowableNaT(NC, Tconnect, FeatAreaNodes, AllowNodes, Rlines, Vlines, keepLNnodes, doTri=1): AllowTri = [] RemoveNodes = np.array([]) LineNodes = np.array([]) print "Get line nodes and set up kd-tree" for i in range(1, Rlines[0] + 1): LineNodes = np.array(np.r_[LineNodes, Rlines[i]], int) for i in range(1, Vlines[0] + 1): LineNodes = np.array(np.r_[LineNodes, Vlines[i]], int) kdtLN = KDTree(NC[LineNodes, ], 5) print "Find allowable and remove additional nodes" for i in FeatAreaNodes: cn = LineNodes[np.array(kdtLN.query(NC[i, ])[1], int)] if np.where(keepLNnodes == cn)[0].size > 0: AllowNodes = np.r_[AllowNodes, i] else: RemoveNodes = np.r_[RemoveNodes, i] AllowNodes, RemoveNodes = np.array(AllowNodes, int), np.array(RemoveNodes, int) # remove possible repeated indices from "AllowNodes" #remove=find_repeats(AllowNodes)[0] if doTri == 1: print "Update allowable triangles" for i in range(Tconnect.shape[0]): if find_repeats(np.r_[AllowNodes, Tconnect[i, ]])[0].size > 0: AllowTri.append(i) AllowTri = np.array(AllowTri, int) return AllowNodes, RemoveNodes, AllowTri
def _rank1d(data, keep_na="False"): """ From numpy.stats.mstats. Difference: Tie breaking is randomly resolved. Arguments: data {[type]} -- [description] Raises: ValueError -- [description] NotImplementedError -- [description] Returns: [type] -- [description] """ data = np.ma.array(data, copy=False) n = data.count() rk = np.empty(data.size, dtype=float) idx = data.argsort() rk[idx[:n]] = np.arange(1, n + 1) repeats = mstats.find_repeats(data.copy()) for r in repeats[0]: condition = (data == r).filled(False) rk[condition] = np.random.permutation(rk[condition]) # keep nas if (keep_na): rk[np.isnan(data)] = np.nan return rk
def NodeselectFP(NC,Tri,Neig,vv): #spheres&saddles: spsad=np.where((Neig[:,0]<1.5*Neig[:,1])&(Neig[:,1]<1.5*Neig[:,2])&(Neig[:,2]>0))[0] rest=np.where((Neig[:,0]>=1.5*Neig[:,1])|(Neig[:,1]>=1.5*Neig[:,2])|(Neig[:,2]<=0))[0] print "Nr of Spheres & Sadles: ",spsad.size #ridges and valleys ridgeval1=np.where((Neig[rest,0]<1.5*Neig[rest,1])&(Neig[rest,1]>10*Neig[rest,2])&(np.abs(Neig[rest,2])<0.001))[0] ridgeval=rest[ridgeval1,] rest1=np.where((Neig[rest,0]>=1.5*Neig[rest,1])|(Neig[rest,1]<=10*Neig[rest,2])|(np.abs(Neig[rest,2])>=0.001))[0] rest=rest[rest1,] print "Nr of Ridges & Valleys: ",ridgeval.size #planes: plane1=np.where((Neig[rest,0]>10*Neig[rest,1])&(np.abs(Neig[rest,1])<0.001)&(np.abs(Neig[rest,2])<0.001))[0] plane=rest[plane1,] rest1=np.where((Neig[rest,0]<=10*Neig[rest,1])|(np.abs(Neig[rest,1])>=0.001)|(np.abs(Neig[rest,2])>=0.001))[0] rest=rest[rest1,] print "Nr of Planes: ",plane.size #keep all for eig1 < 3*eig2 keep1=np.where(Neig[rest,0]<5*Neig[rest,1])[0] keep = np.r_[spsad,ridgeval,keep1] # take all features&sharp edges except planes rest=np.where(Neig[rest,0]>=5*Neig[rest,1])[0] track=0 ##keep every 2nd for eig1<20*eig2 and every 3rd for eig<500*eig2: #Neig=np.ma.load('EigvalDolph1n.txt') ##vv=np.array([50,100,1000]) for inc in vv: track=track+1 print "Step ",track," of ",vv.size rows2=np.where(Neig[rest,0]<inc*Neig[rest,1])[0] rest1=np.where(Neig[rest,0]>=inc*Neig[rest,1])[0] rows2=rest[rows2,] rest=rest[rest1,] if rows2.size > 0: cont=1 while cont>0: node = rows2[0,] keep=np.r_[keep,node] neigh=Nneighbours(node,NC,Tri,track)[1] torem=find_repeats(np.r_[rows2,neigh])[0] for i in np.r_[node,torem]: rows2 = rows2[np.r_[np.where(rows2<i)[0],np.where(rows2>i)[0]],] #print 'possible selections in current category: ',rows2.size if rows2.size==0: cont=0 #for i in rows2: #print "node: ",i #neigh=Nneighbours(i,NC,Tri,track) #if find_repeats(np.r_[keep,neigh])[0].size==0: #keep=np.r_[keep,i] #print " keep" keep=np.array(keep,int) keep.sort() return keep
def NodeselectFP(NC, Tri, layer): Neig = Neigenvalues(NC, Tri, layer) #spheres&saddles: spsad = np.where((Neig[:, 0] < 1.5 * Neig[:, 1]) & (Neig[:, 1] < 1.5 * Neig[:, 2]) & (Neig[:, 2] > 0))[0] rest = np.where((Neig[:, 0] >= 1.5 * Neig[:, 1]) | (Neig[:, 1] >= 1.5 * Neig[:, 2]) | (Neig[:, 2] <= 0))[0] print "Nr of Spheres & Sadles: ", spsad.size #ridges and valleys ridgeval1 = np.where((Neig[rest, 0] < 1.5 * Neig[rest, 1]) & (Neig[rest, 1] > 10 * Neig[rest, 2]) & (np.abs(Neig[rest, 2]) < 0.001))[0] ridgeval = rest[ridgeval1, ] rest1 = np.where((Neig[rest, 0] >= 1.5 * Neig[rest, 1]) | (Neig[rest, 1] <= 10 * Neig[rest, 2]) | (np.abs(Neig[rest, 2]) >= 0.001))[0] rest = rest[rest1, ] print "Nr of Ridges & Valleys: ", ridgeval.size #planes: plane1 = np.where((Neig[rest, 0] > 10 * Neig[rest, 1]) & (np.abs(Neig[rest, 1]) < 0.001) & (np.abs(Neig[rest, 2]) < 0.001))[0] plane = rest[plane1, ] rest1 = np.where((Neig[rest, 0] <= 10 * Neig[rest, 1]) | (np.abs(Neig[rest, 1]) >= 0.001) | (np.abs(Neig[rest, 2]) >= 0.001))[0] rest = rest[rest1, ] print "Nr of Planes: ", planes.size #keep all for eig1 < 3*eig2 keep1 = np.where(Neig[rest, 0] < 3 * Neig[rest, 1])[0] keep = np.r_[spsad, ridgeval, keep1] # take all features&sharp edges except planes rest = np.where(Neig[rest, 0] >= 3 * Neig[rest, 1])[0] track = 0 ##keep every 2nd for eig1<20*eig2 and every 3rd for eig<500*eig2: #Neig=np.ma.load('EigvalDolph1n.txt') vv = np.array([20, 500]) for inc in vv: track = track + 1 print "Step ", track, " of ", vv.size rows2 = np.where(Neig[rest, 0] < inc * Neig[rest, 1])[0] rest1 = np.where(Neig[rest, 0] >= inc * Neig[rest, 1])[0] rows2 = rest[rows2, ] rest = rest[rest1, ] for i in rows2: print "node: ", i neigh = Nneighbours(i, NC, Tri, track) if find_repeats(np.r_[keep, neigh])[0].size == 0: keep = np.r_[keep, i] print " keep" keep = np.array(keep, int) keep.sort() return keep
def TriEdge(node,NC,Tri): # determine edge returned as neighbouring node and triangles on either side of edges triangles = [] aa=np.where(Tri==node)[0] pn=Tri[aa,].reshape((aa.size*3,)) neigh=find_repeats(pn)[0] neighOne = np.array(neigh,int) neighOne = neighOne[np.where(neighOne<>node)[0]] for i in neighOne: tr = np.where(Tri[aa,]==i)[0] triangles = triangles+[[aa[tr[0]],aa[tr[1]]]] triangles = np.array(triangles,int) return neighOne,triangles
def EQweight(NCinner, NC, inner, Tet, EQprev, deltPrescr): EQ = np.r_[EQprev] NCcur = np.r_[NC] NCDIFF = NCinner.reshape((inner.size, 3)) - NC[inner, ] NCcur[inner, ] = NCinner.reshape((inner.size, 3)) nodes = inner[np.where(np.sum(NCDIFF * NCDIFF, 1) > 0)[0]] tets = np.array([]) if nodes.size > 0: for i in nodes: tets = np.r_[tets, np.where(Tet == i)[0]] #tets = np.array(tets,int) tets = np.array(find_repeats(np.r_[tets, tets])[0], int) if tets.size > 0: EQ[tets] = elemQual_mu(tets, NCcur, Tet, 0, deltPrescr)[0] COST = np.sum(1 / EQ) return COST
def elasticsurf(NCB, ConnectB, LandmB, LandmB_NC, AllowableBI, NCT, ConnectT, AllowableT, UseN_B, UseN_T, k_max, USENORMALS, gamm=2, sigm0=10, f=1.0715): # Elastic surface registration: #inputs: # NCB,NCT: nodal coordinates of base and target surfaces # ConnectB;ConnectT: Base&target connectivity # LandmB,LandmB_NC landmarks that have to have a 1 to 1 correspondence (input 0 no landmarks are present) # UseN_B & AllowableB: Feature dependant nodes on Base-mesh (indices in NCB) and allowable triangles to match. # UseN_T & AllowableT: Selective Feature preserving nodes and triangles (indices in NCT and ConnectT) on target mesh. # k_max: maximum number of iterations ######## ADDITIONAL SETTINGS REQUIRED ARE SET INTERNAL TO CODE ######### print print "SELECTIVE MESH MORPHING ALGORITHM USING ELASTIC SURFACE REGISTRATION" print " -G.J.J.v.Rensburg - 22/04/2010-" t_start = time.clock() ConnectB = np.array(ConnectB, int) ConnectT = np.array(ConnectT, int) #LandmB = np.array(LandmB[:,0],int) # do -1 later to be consistent with python indexing, first need to do other "temporary landmarks"& check that they dont fall on actual landmark positions! # Settings for elastic surface registration: m = 20 # nearest neighbour parameter alph = 0.5 # normilization factor #gamm=2 # smoothing parameter1 #sigm0=10 # smoothing parameter2 #f=1.0715 # smoothing parameter3 Tol = 0.0001 # stopping criteria # determine N1,N2,T1 and T2: N1 = NCB.shape[0] N2 = NCT.shape[0] T1 = ConnectB.shape[0] T2 = ConnectT.shape[0] NL = LandmB.shape[0] # For parallel programming devide Nr of computations by number of parallel processes (LIM) NPP1 = N1 / LIM NPP2 = N2 / LIM ################################ INITIALIZE & NODES OF CONCERN: ################################# ######################################################################################################## print print print "Set up 1-ring neighbor list for all points on the generic mesh" #neighbList = [[0]]*N1 #results = pprocess.Map(limit=LIM) #calc = results.manage(pprocess.MakeParallel(Get1neigh)) #for j in range(0,LIM): #calc(np.array(range(0,NPP1))+j*NPP1,NCB,ConnectB) #for j in range(0,LIM): #neighbList[j*NPP1:(1+j)*NPP1] = results[j] #neighbList[LIM*NPP1:N1]=Get1neigh(np.array(range(LIM*NPP1,N1)),NCB,ConnectB) #np.ma.dump(neighbList,'SkullSurf_neighbList') neighbList = np.ma.load('SkullSurf_neighbList') print print "INITIALIZE SURFACE DEFORMATION" CONV = [] print " enquire nodes where required displacement is checked" ###remove Landmarks from FDNB and SFPNT: #for i in range(0,NL): #if find_repeats(np.r_[UseN_B,LandmB[i,]])[0].size>0: #r=np.where(UseN_B==LandmB[i,])[0] #UseN_B = np.r_[UseN_B[0:r,],UseN_B[r+1:UseN_B.size,]] SamplingB = UseN_B.size SamplingT = UseN_T.size ## Full list of nodes used in Surface registration: LMB = np.r_[ UseN_B] #,LandmB] # Last NL entries are reserved for Landmarks that HAVE TO FIT points on the target mesh LMT = np.r_[UseN_T] # For parallel programming devide Nr of computations by number of parallel processes (LIM) SBPP = SamplingB / LIM STPP = SamplingT / LIM FMorph = 0 print print "COARSE SURFACE REGISTRATION" #print " Compute known displacement for Base_Landmarks " #knownC = NCB[LandmB,] #knownD = LandmB_NC-knownC ####print " using landmark displacements to deform using RBF" ####W_km1 = RBFmorph(NCB,knownC,knownD) ####tic = time.clock() ####W_km1 = MeshSmooth(W_km1,neighbList,10) ####print " Smoothing done in ",time.clock()-tic," seconds" ####np.ma.dump(W_km1,'TempElasNodes_Iter'+str(k-1)+'_Time'+time.ctime()) #print 'Smooth Gaussian Weight deformation to align Landmarks to target positions' #k=0 #Err = 2 #W_km1 = np.r_[NCB] #while (k<100)|(Err>Tol): #k=k+1 #print 'Iteration : ',k #DS = np.zeros((N1,3)) #knownC = W_km1[LandmB,] #knownD = LandmB_NC-knownC #knownD[np.isnan(knownD)]=0 ## Deform mesh using Gaussian smoothing as suggested in paper by R.Bryan et al. #sigma_k2 = np.power(np.power(f,-k)*20,2) #results = pprocess.Map(limit=LIM) #calc = results.manage(pprocess.MakeParallel(GaussianSmooth)) #for j in range(0,LIM): #calc(np.array(range(0,NPP1))+j*NPP1,W_km1,knownC,knownD,sigma_k2,gamm) #for j in range(0,LIM): #DS[np.array(range(0,NPP1))+j*NPP1,:] = results[j] #DS[range(LIM*NPP1,N1),:]=GaussianSmooth(np.array(range(LIM*NPP1,N1)),W_km1,knownC,knownD,sigma_k2,gamm) #DS[np.isnan(DS)]=0 #W_km1 = W_km1+DS #Err = np.sum(np.sqrt(np.sum(DS*DS,1)),0)/N1 #W_km1 = MeshSmooth(W_km1,neighbList,10) #np.ma.dump(W_km1,'TempElasNodes_Iter0_TimeWedMar14_2011_20') ###np.ma.dump(W_km1,'TempElasNodes_Iter0_Time'+time.ctime()) W_km1 = NCB ################################ MAIN MESH DEFORMATION ALGORITHM: ################################# ######################################################################################################## k = 1 print print "ELASTIC SURFACE REGISTRATION" print "determine vertex normals of target surface" #Compute target-mesh triangle centroids: print "determining centroids of target surface triangles" S_2_centr = np.c_[np.sum( np.c_[NCT[ConnectT[:, 0], 0], NCT[ConnectT[:, 1], 0], NCT[ConnectT[:, 2], 0]], 1) / 3, np.sum( np.c_[NCT[ConnectT[:, 0], 1], NCT[ConnectT[:, 1], 1], NCT[ConnectT[:, 2], 1]], 1) / 3, np.sum( np.c_[NCT[ConnectT[:, 0], 2], NCT[ConnectT[:, 1], 2], NCT[ConnectT[:, 2], 2]], 1) / 3] print "determine triangle and vertex normals of target surface" TNORMT = np.cross(NCT[ConnectT[:, 1], :] - NCT[ConnectT[:, 0], :], NCT[ConnectT[:, 2], :] - NCT[ConnectT[:, 0], :]) TNORMT = (TNORMT.T / (np.ones( (3, 1)) * np.sqrt(np.sum(np.array([TNORMT * TNORMT]), 2)))).T VNORMT = vrtxnormal(NCT, ConnectT, S_2_centr, TNORMT) print "determining kd-trees of target surface centroids and nodal coordinates" KDT_TC = KDTree(S_2_centr, m) KDT_TN = KDTree(NCT, m) print 'initialize absolute Gaussian weight for final displacement to preserve element quality' GW = np.ones((SamplingB + SamplingT, 1)) while k <= k_max: D1 = np.zeros((SamplingB, 3)) D2 = np.zeros((SamplingT, 3)) DS = np.zeros((N1, 3)) AllowableB = np.r_[AllowableBI] print print "MESH DEFORMATION ITERATION", k print " determining known displacement of landmarks" if NL > 0: knownD = LandmB_NC - W_km1[LandmB, ] print " determining centroids of deforming mesh" W_km1_centr = np.c_[ np.sum( np.c_[W_km1[ConnectB[:, 0], 0], W_km1[ConnectB[:, 1], 0], W_km1[ConnectB[:, 2], 0]], 1) / 3, np.sum( np.c_[W_km1[ConnectB[:, 0], 1], W_km1[ConnectB[:, 1], 1], W_km1[ConnectB[:, 2], 1]], 1) / 3, np.sum( np.c_[W_km1[ConnectB[:, 0], 2], W_km1[ConnectB[:, 1], 2], W_km1[ConnectB[:, 2], 2]], 1) / 3] print " determine triangle and vertex normals of deforming surface" TNORMB = np.cross(W_km1[ConnectB[:, 1], :] - W_km1[ConnectB[:, 0], :], W_km1[ConnectB[:, 2], :] - W_km1[ConnectB[:, 0], :]) TNORMB = (TNORMB.T / (np.ones( (3, 1)) * np.sqrt(np.sum(np.array([TNORMB * TNORMB]), 2)))).T VNORMB = vrtxnormal(W_km1, ConnectB, W_km1_centr, TNORMB) print " determining kd-tree of current deforming surface centroids and nodal coordinates" KDT_KC = KDTree(W_km1_centr, m) KDT_KN = KDTree(W_km1, m) #if find_repeats(np.r_[USENORMALS,k])[0].size>0: #print " ### Use triangle and vertex normals in setting up point correspondence" print " setting up D1(i,d)" tic = time.clock() results = pprocess.Map(limit=LIM) calc = results.manage(pprocess.MakeParallel(DsetupNorm)) for j in range(0, LIM): calc( np.array(range(0, SBPP)) + j * SBPP, W_km1, VNORMB, NCT, TNORMT, VNORMT, ConnectT, S_2_centr, AllowableT, LMB, D1) for j in range(0, LIM): D1[np.array(range(0, SBPP)) + j * SBPP, :] = results[j] D1[range(LIM * SBPP, SamplingB), :] = DsetupNorm( range(LIM * SBPP, SamplingB), W_km1, VNORMB, NCT, TNORMT, VNORMT, ConnectT, S_2_centr, AllowableT, LMB, D1) #D1=np.r_[D1,knownD] print " ", time.clock() - tic, " seconds" print " update allowable triangles on generic mesh:" remP = D1[:, 0] + D1[:, 1] + D1[:, 2] == 0 removeP = LMB[remP] print " unregistered points on generic mesh: ", removeP.size print " number of original generic triangles allowed: ", AllowableB.shape[ 0] for rp in removeP: rowsNo = np.where(AllowableB == rp)[0] rowsNo.sort for rr in rowsNo[::-1]: AllowableB = AllowableB[np.where( range(AllowableB.shape[0]) <> rr)[0], ] print " number of generic triangles allowed for current iteration: ", AllowableB.shape[ 0] if find_repeats(np.r_[USENORMALS, k])[0].size > 0: print " ### Use triangle and vertex normals in setting up point correspondence" print " setting up D2(j,c)" tic = time.clock() results = pprocess.Map(limit=LIM) calc = results.manage(pprocess.MakeParallel(DsetupNorm)) for j in range(0, LIM): calc( np.array(range(0, STPP)) + j * STPP, NCT, VNORMT, W_km1, TNORMB, VNORMB, ConnectB, W_km1_centr, AllowableB, LMT, D2) for j in range(0, LIM): D2[np.array(range(0, STPP)) + j * STPP, :] = results[j] D2[range(LIM * STPP, SamplingT), :] = DsetupNorm( range(LIM * STPP, SamplingT), NCT, VNORMT, W_km1, TNORMB, VNORMB, ConnectB, W_km1_centr, AllowableB, LMT, D2) print " ", time.clock() - tic, " seconds" else: print " Simple closest point search iteration " #print " setting up D1(i,d)" #tic = time.clock() #results = pprocess.Map(limit=LIM) #calc = results.manage(pprocess.MakeParallel(Dsetup)) #for j in range(0,LIM): #calc(np.array(range(0,SBPP))+j*SBPP,W_km1,NCT,ConnectT,S_2_centr,AllowableT,LMB,D1,KDT_TC,KDT_TN) #for j in range(0,LIM): #D1[np.array(range(0,SBPP))+j*SBPP,:] = results[j] #D1[range(LIM*SBPP,SamplingB),:]=Dsetup(range(LIM*SBPP,SamplingB),W_km1,NCT,ConnectT,S_2_centr,AllowableT,LMB,D1,KDT_TC,KDT_TN) ##D1=np.r_[D1,knownD] #print " ",time.clock()-tic," seconds" #remP = D1[:,0]+D1[:,1]+D1[:,2]==0 #removeP = LMB[remP] #print " unregistered points on generic mesh: ",removeP.size #print " number of original generic triangles allowed: ",AllowableB.shape[0] #for rp in removeP: #rowsNo = np.where(AllowableB==rp)[0] #rowsNo.sort #for rr in rowsNo[::-1]: #AllowableB = AllowableB[np.where(range(AllowableB.shape[0])<>rr)[0],] #print " number of generic triangles allowed for current iteration: ",AllowableB.shape[0] print " setting up D2(j,c)" tic = time.clock() results = pprocess.Map(limit=LIM) calc = results.manage(pprocess.MakeParallel(Dsetup)) for j in range(0, LIM): calc( np.array(range(0, STPP)) + j * STPP, NCT, W_km1, ConnectB, W_km1_centr, AllowableB, LMT, D2, KDT_KC, KDT_KN) for j in range(0, LIM): D2[np.array(range(0, STPP)) + j * STPP, :] = results[j] D2[range(LIM * STPP, SamplingT), :] = Dsetup( range(LIM * STPP, SamplingT), NCT, W_km1, ConnectB, W_km1_centr, AllowableB, LMT, D2, KDT_KC, KDT_KN) print " ", time.clock() - tic, " seconds" # Compute displacement update for each node using suggested Gaussian radial basis function: print " determining smoothed displacement field" tic = time.clock() NCp = np.r_[W_km1[LMB, :], NCT[LMT, :] + D2] DD = np.r_[D1, -D2] # Mask Nan and Inf values if any: DD[np.isnan(DD)] = 0 DD[np.isinf(DD)] = 0 #keepP = DD[:,0]+DD[:,1]+DD[:,2]<>0 #print keepP #NCp,DD = NCp[keepP,:],DD[keepP,:] #KDTp = KDTree(NCp,5) # Deform mesh using Gaussian smoothing as suggested in paper by R.Bryan et al. sigma_k2 = np.power(np.power(f, -k) * sigm0, 2) results = pprocess.Map(limit=LIM) calc = results.manage(pprocess.MakeParallel(GaussianSmooth)) for j in range(0, LIM): calc( np.array(range(0, NPP1)) + j * NPP1, W_km1, NCp, DD, sigma_k2, gamm) for j in range(0, LIM): DS[np.array(range(0, NPP1)) + j * NPP1, :] = results[j] DS[range(LIM * NPP1, N1), :] = GaussianSmooth( np.array(range(LIM * NPP1, N1)), W_km1, NCp, DD, sigma_k2, gamm) print " ", time.clock() - tic, " seconds" # Mask Nan and Inf if any: DS[np.isnan(DS)] = 0 DS[np.isinf(DS)] = 0 #print 'Check if current iteration reduces element quality to below allowable and stiffen mesh accordingly' print print print 'Convergence History' print CONV print print # Determine Jacobian of all elements and if unsattisfied apply stiffening (Decrease GW <1) untill this doesn't happen # determine wheter convergence is acheived #TotalMorph = np.sum(np.sqrt(np.sum(DS*DS,1)),0)/NCB.shape[0] TotalMorph = np.sum(np.sqrt(np.sum(DD * DD, 1))) / (DD.size / 3) CONV = CONV + [TotalMorph] FMorph = (k == 1) * TotalMorph + FMorph print " average nodal displacement for current deformation iteration:" print TotalMorph if (TotalMorph < Tol): print print "CONVERGED SOLUTION OBTAINED" #CONV = CONV + [TotalMorph] k = k_max * 10 + 1 W_km1 = W_km1 + DS elif (k < 10) | (TotalMorph < 10 * FMorph): print "problem not yet converged at iteration", k #CONV = CONV + [TotalMorph] k = k + 1 # Deform mesh: print " deforming mesh (update of W_{k-1})" W_km1 = W_km1 + DS #np.ma.dump(W_km1,'Femur2NC_'+str(k)) else: print "PROBLEM DIVERGING" k = k_max * 10 - 1 #np.ma.dump(W_km1,'TempElasNodes_Iter'+str(k-1)+'_Time'+time.ctime()) if (k > 2) & (np.mod(k - 1, 5) == 0): print #np.ma.dump(W_km1,'TempElasNodes_Iter'+str(k-1)+'_Time'+time.ctime()) #W_km1 = RBFmorph(W_km1,W_km1[LandmB,],LandmB_NC-W_km1[LandmB,]) tic = time.clock() W_km1 = MeshSmooth(W_km1, neighbList, 10) np.ma.dump( W_km1, 'SkullUnique2_gamm' + str(gamm) + '_sigN' + str(sigm0) + '_iter' + str(k - 1)) print " Smoothing done in ", time.clock() - tic, " seconds" #print "COARSE SURFACE REGISTRATION" #print " using landmark displacements to deform using RBF" #W_km1 = RBFmorph(W_km1,W_km1[LandmB,],LandmB_NC-W_km1[LandmB,]) print if k == k_max + 1: print print "SOLUTION TERMINATED: maximum iterations,(", k_max, ") reached" print print "TOTAL TIME FOR ELASTIC SURFACE REGISTRATION : ", time.clock( ) - t_start, "seconds" CONV = np.array(CONV) return W_km1, CONV
for j in range(0, LIM): DS[np.array(range(0, NPP1)) + j * NPP1, :] = results[j] DS[range(LIM * NPP1, N1), :] = ptet.GaussianSmooth( np.array(range(LIM * NPP1, N1)), np.r_[NCS], NCp, DD, sigma_k2, 2) NCS[outer, ] = NCS[outer, ] + DispOuter NCS[inner, ] = NCS[inner, ] + DS[inner, ] EQ, delt, Sn2, Sig = qu.elemQual_mu(np.array(range(TetT.shape[0])), NCS, TetT) print ' Average Element Quality: ', np.average(EQ) print ' Degenerate (q<0.15): ', np.where(EQ < 0.15)[0].size print ' Inverted Elements: ', np.where(Sig < 0)[0].size TetDeg = np.where(EQ < 0.15)[0] DegNd = TetT[TetDeg, ] DegNd = DegNd.reshape((DegNd.size, )) DegRep = np.array(find_repeats(DegNd)[0], int) for i in DegRep: DegNd = DegNd[DegNd <> i] DegNd = np.r_[DegNd, DegRep] NCS[DegNd, ] = NCSprev[DegNd, ] + DS[DegNd, ] #DegNd = np.array(find_repeats(np.r_[DegNd,outer])[0],int) DegNd.sort() PointConst = np.zeros((NCS.shape[0], )) PointConst[outer, ] = 1 PointConst[DegNd, ] = 0 PointConst = np.array(PointConst, int) print 'Construct VTK object for optimization' ##ADD CONTRAINTS AS POINT SCALARS skvtk = pv.VtkData(pv.UnstructuredGrid(points=NCS, tetra=TetT), 'skull 4 symm',
def elasticsurf(NCB, ConnectB, LandmB, NCT, ConnectT, LandmT, FDNB, SFPNT, SFPTT, k_max): # Elastic surface registration: #inputs: # NCB,NCT: nodal coordinates of base and target surfaces # ConnectB;ConnectT: Base&target connectivity # LandmB,LandmT landmarks that have to have a 1 to 1 correspondence (input 0 no landmarks are present) # FDN: Feature dependant nodes on Base-mesh (indices in NCB) # SFPNT&SFPTT: Selective Feature preserving nodes and triangles (indices in NCT and ConnectT) on target mesh. # k_max: maximum number of iterations # 1) The base mesh is first morphed into the target using 1 to 1 correspondence with target mesh Landmarks. # 2) Elastic surface registration is done using FDNB&SFPNT to target and base mesh respectively # if FDNB is displaced to a triangle other than SFPTT, displacement of concern is discarded in order # to retain only selected features. ######## ADDITIONAL SETTINGS REQUIRED ARE SET INTERNAL TO CODE ######### print print "SELECTIVE MESH MORPHING ALGORITHM USING ELASTIC SURFACE REGISTRATION" print " -G.J.J.v.Rensburg - 22/04/2010-" t_start = time.clock() ConnectB = np.array(ConnectB, int) ConnectT = np.array(ConnectT, int) LandmB = np.array( LandmB[:, 0], int ) # do -1 later to be consistent with python indexing, first need to do other "temporary landmarks"& check that they dont fall on actual landmark positions! LandmT = np.array(LandmT[:, 0], int) # Settings for elastic surface registration: m = 10 # nearest neighbour parameter alph = 0.5 # normilization factor gamm = 2 # smoothing parameter1 sigm0 = 10 # smoothing parameter2 f = 1.0715 # smoothing parameter3 Tol = 0.0001 # stopping criteria # determine N1,N2,T1 and T2: N1 = np.array(np.shape(NCB))[0, ] N2 = np.array(np.shape(NCT))[0, ] T1 = np.array(np.shape(ConnectB))[0, ] T2 = np.array(np.shape(ConnectT))[0, ] NL = np.array(np.shape(LandmB))[0, ] ################################ INITIALIZE & NODES OF CONCERN: ################################# ######################################################################################################## print print "INITIALIZE SURFACE DEFORMATION" k = 1 CONV = np.zeros((k_max, 1)) print " enquire nodes where required displacement is checked" # remove Landmarks from FDNB and SFPNT: for i in range(0, NL): if find_repeats(np.r_[FDNB, LandmB[i, ]])[0].size > 0: r = np.where(FDNB == LandmB[i, ])[0] FDNB = np.r_[FDNB[0:r, ], FDNB[r + 1:FDNB.size, ]] if find_repeats(np.r_[SFPNT, LandmT[i, ]])[0].size > 0: r = np.where(SFPNT == LandmT[i, ])[0] SFPNT = np.r_[SFPNT[0:r, ], SFPNT[r + 1:SFPNT.size, ]] SamplingB = FDNB.size SamplingT = SFPNT.size LMB = np.r_[ FDNB, LandmB] # Last NL entries are reserved for Landmarks that HAVE TO FIT points on the target mesh LMT = np.r_[SFPNT, LandmT] print " Compute known displacement for Base_Landmark to Target_Landmark" knownC = NCB[LandmB, ] knownD = NCT[LandmT, ] - knownC print print "COARSE SURFACE REGISTRATION" print " using landmark displacements to deform using RBF" W_km1 = RBFmorph(NCB, NCB[LandmB, ], NCT[LandmT, ] - NCB[LandmB]) ################################ MAIN MESH DEFORMATION ALGORITHM: ################################# ######################################################################################################## print print "ELASTIC SURFACE REGISTRATION" #Compute target-mesh triangle centroids: print "determining centroids of target surface triangles" S_2_centr = np.c_[np.sum( np.c_[NCT[ConnectT[:, 0], 0], NCT[ConnectT[:, 1], 0], NCT[ConnectT[:, 2], 0]], 1) / 3, np.sum( np.c_[NCT[ConnectT[:, 0], 1], NCT[ConnectT[:, 1], 1], NCT[ConnectT[:, 2], 1]], 1) / 3, np.sum( np.c_[NCT[ConnectT[:, 0], 2], NCT[ConnectT[:, 1], 2], NCT[ConnectT[:, 2], 2]], 1) / 3] print "determining kd-trees of target surface centroids and nodal coordinates" KDT_TC = KDTree(S_2_centr, m) KDT_TN = KDTree(NCT, m) while k <= k_max: D1 = np.zeros((SamplingB, 3)) D2 = np.zeros((SamplingT, 3)) DS = np.zeros((N1, 3)) print print "MESH DEFORMATION ITERATION", k print "determining known displacement of landmarks" knownD = NCT[LandmT, ] - W_km1[LandmB, ] print " determining centroids of deforming mesh" W_km1_centr = np.c_[ np.sum( np.c_[W_km1[ConnectB[:, 0], 0], W_km1[ConnectB[:, 1], 0], W_km1[ConnectB[:, 2], 0]], 1) / 3, np.sum( np.c_[W_km1[ConnectB[:, 0], 1], W_km1[ConnectB[:, 1], 1], W_km1[ConnectB[:, 2], 1]], 1) / 3, np.sum( np.c_[W_km1[ConnectB[:, 0], 2], W_km1[ConnectB[:, 1], 2], W_km1[ConnectB[:, 2], 2]], 1) / 3] print " determining kd-tree of current deforming surface centroids and nodal coordinates" KDT_KC = KDTree(W_km1_centr, m) KDT_KN = KDTree(W_km1, m) print " setting up D1(i,d)" tic = time.clock() for i in range(0, SamplingB): nn = LMB[i] # query kd-tree for closest triangle to node: ncl = KDT_TC.query(W_km1[nn, :])[1] # check if if np.where(SFPTT == ncl)[0].size > 0: ## determine target triangle normal vector: #tnorm = np.cross(NCT[ConnectT[ncl,1],:]-NCT[ConnectT[ncl,0],:],NCT[ConnectT[ncl,2],:]-NCT[ConnectT[ncl,0],:]) #tnorm = tnorm/np.sqrt(np.sum(tnorm*tnorm)) ## determine current vertex normal #vnorm=vertexnormal(W_km1,ConnectB[np.where(ConnectB==nn)[0],]) #if np.dot(vnorm[nn,],tnorm)>0: #check for correlation between closest triangle directional normal&base-mesh curvature ## move to triangle / closest node D1[i, :] = Ndisp(W_km1[nn, :], ncl, NCT, ConnectT, S_2_centr, KDT_TN) D1 = np.r_[D1, knownD] print " ", time.clock() - tic, " seconds" print " setting up D2(j,c)" tic = time.clock() for j in range(0, SamplingT): nn = LMT[j] ncl = KDT_KC.query(NCT[nn, :])[1] #tnorm = np.cross(W_km1[ConnectB[ncl,1],:]-W_km1[ConnectB[ncl,0],:],W_km1[ConnectB[ncl,2],:]-W_km1[ConnectB[ncl,0],:]) #tnorm = tnorm/np.sqrt(np.sum(tnorm*tnorm)) #vnorm=vertexnormal(NCT,ConnectT[np.where(ConnectT==nn)[0],]) #if np.dot(vnorm[nn,],tnorm)>0: D2[j, :] = Ndisp(NCT[nn, :], ncl, W_km1, ConnectB, W_km1_centr, KDT_KN) #else: #D2[j,:] = np.array([0,0,0]) D2 = np.r_[D2, -knownD] print " ", time.clock() - tic, " seconds" # Compute displacement update for each node using suggested Gaussian radial basis function: print " determining smoothed displacement field" tic = time.clock() # Deform mesh using Gaussian smoothing as suggested in paper by R.Bryan et al. sigma_k2 = np.power(np.power(f, -k) * sigm0, 2) for nodes in range(0, N1): G1node = np.array([W_km1[nodes, :]]).T * np.ones( (1, SamplingB + NL)) - W_km1[LMB, :].T G1node = np.exp(-np.sum(G1node * G1node, 0) / sigma_k2) G2node = np.array([W_km1[nodes, :]]).T * np.ones( (1, SamplingT + NL)) - NCT[LMT, :].T - D2.T G2node = np.exp(-np.sum(G2node * G2node, 0) / sigma_k2) DS[nodes, :] = ( np.sum(D1.T * G1node, 1) / np.sum(G1node, 0) - np.sum(D2.T * G2node, 1) / np.sum(G2node, 0)) / gamm print " ", time.clock() - tic, " seconds" # determine wheter convergence is acheived TotalMorph = np.sum(np.sqrt(np.sum(DS * DS, 1)), 0) print " total displacement for current deformation iteration:" print TotalMorph if (TotalMorph < Tol): print print "CONVERGED SOLUTION OBTAINED" CONV[k - 1, 0] = TotalMorph k = k_max * 10 + 1 W_km1 = W_km1 + DS elif (k < 10) | (TotalMorph <= CONV[0, ]): print "problem not yet converged at iteration", k CONV[k - 1, 0] = TotalMorph k = k + 1 # Deform mesh: print " deforming mesh (update of W_{k-1})" W_km1 = W_km1 + DS else: print "PROBLEM DIVERGING" k = k_max * 10 - 1 if (k > 1) & (np.mod(k - 1, 10) == 0): print print "Do 3 iterations of Laplacian Smoothing to improve element quality" tic = time.clock() W_km1 = LaplacianSmooth(W_km1, ConnectB, 3) print " Smoothing done in ", time.clock() - tic, " seconds" print "COARSE SURFACE REGISTRATION" print " using landmark displacements to deform using RBF" W_km1 = RBFmorph(W_km1, W_km1[LandmB, ], NCT[LandmT, ] - W_km1[LandmB, ]) print if k == k_max + 1: print print "SOLUTION TERMINATED: maximum iterations,(", k_max, ") reached" print print "TOTAL TIME FOR ELASTIC SURFACE REGISTRATION : ", time.clock( ) - t_start, "seconds" return W_km1, CONV