Example #1
0
	def chowliu_tree(data):
		'''
		Learn a chowliu tree structure based on give data
		data: S*N numpy array, where S is #samples, N is #RV (Discrete)
		'''
		S,D = data.shape
		marginals = {}
		# compute single r.v. marginals
		totalnum = D + (D*(D-1))/2
		nownum = 0
		for i in range(D):
			nownum += 1; progress(nownum,totalnum,'Learning chowliu tree')
			values, counts = np.unique(data[:,i], return_counts=True)
			marginals[i] = dict(zip(values, counts))

		# compute joint marginal for each pair
		for i,j in crossprod(range(D),range(D)):
			nownum += 1; progress(nownum,totalnum,'Learning chowliu tree')
			values, counts = np.unique(data[:,(i,j)], axis=0 ,return_counts=True)
			values = list(map(lambda x:tuple(x),values))
			marginals[i,j] = dict(zip(values, counts))
			allcomb = crossprod(list(marginals[i].keys()),list(marginals[j].keys()),'full')
			for v in allcomb:
				if v not in marginals[i,j]: marginals[i,j][v] = 0

		# normalize all marginals
		for key in marginals:
			dist = marginals[key]
			summation = sum(dist.values())
			for k in dist: dist[k] = (dist[k]+1) / float(summation) # 1- correction

		mutual = {}
		# compute mutual information
		for i,j in crossprod(range(D),range(D)):
			mutual[i,j] = 0
			for vi,vj in marginals[i,j]:
				mutual[i,j] += np.log(marginals[i,j][vi,vj] / (marginals[i][vi] * marginals[j][vj])) * marginals[i,j][vi,vj]

		# find the maximum spanning tree
		G = Graph(digraph=False)
		for i in range(D):
			node = DiscreteRV(desc = 'N{}'.format(i), domain = list(marginals[i].keys()))
			G.add_vertice(node)

		for i,j in mutual:
			G.add_edge(i,j,weight = mutual[i,j])

		G = G.max_spanning_tree()
		root = int(D/2)
		G = G.todirect(root)
		return G
Example #2
0
	def get_domain(self,nids):
		n0 = self.G2.V[nids[0]]
		D = n0.domain
		for i in nids[1:]:
			node = self.G2.V[i]
			D = crossprod(D,node.domain,flag='full')
		return D
	def get_domain(self,nids):
		n0 = self.G2.V[nids[0]]
		D = n0.domain
		for i in nids[1:]:
			node = self.G2.V[i]
			D = utils.crossprod(D,node.domain)
		return D
	def fit(self,traindata):
		'''
		MLE learning, basically empirical distribution
		traindata: S*N numpy array, where S is #samples, N is #RV (Discrete)
		'''
		_,D = traindata.shape
		assert(self.graph.N == D), "Input data not valid"

		self.cpt = {}
		for i in range(self.graph.N):
			domain = self.graph.V[i].domain
			parents = self.graph.find_parents(i)

			if len(parents) == 0: # root node
				# learn the node potential
				values, counts = np.unique(traindata[:,i], return_counts=True)
				dist = dict(zip(values, counts))
				for v in domain:
					if v not in dist: dist[v] = 1 # 1-correction
				# normalize
				summation = sum(dist.values())
				for k in dist:dist[k] /= float(summation)
				self.cpt[i] = dist

			else:
				# create uniform node potential
				self.cpt[i] = dict(zip(domain, [1]*len(domain) ))
				# learn the edge potential
				dist = {}
				assert(len(parents) == 1), "Each vertice can only have at most one parent!"
				j = parents[0]
				jdomain = self.graph.V[j].domain
				values, counts = np.unique(traindata[:,(i,j)], axis=0 ,return_counts=True)
				values = list(map(lambda x:tuple(x),values))
				dist= dict(zip(values, counts))
				allcomb = utils.crossprod(domain,jdomain)
				for v in allcomb:
					if v not in dist: dist[v] = 1 #1-correction
				# normalize
				for vj in jdomain:
					summation = sum(map(lambda vi:dist[vi,vj],domain))
					for vi in domain:
						dist[vi,vj] /= float(summation)
				self.cpt[i,j] = dist
		return self
Example #5
0
	def chowliu_tree(data):
		N,D = data.shape
		maxN = (D*(D-1))/2
		curN = 0
		g = Graph(digraph=False)
		for i in range(D):
			n = Node('x{}'.format(i))
			g.add_vertice(n)

		allpair = crossprod(range(D),range(D))
		for i,j in allpair:
			curN += 1; progress(curN,maxN,'Calculate mutual info')
			mu = np.mean(data[:,(i,j)],axis=0)
			var = np.cov(data[:,(i,j)],rowvar=False)
			coef = var[0,1] / np.sqrt(var[0,0]*var[1,1])
			# mutual = - np.log(1-coef*coef)
			g.add_edge(i,j,weight=coef)

		g = g.max_spanning_tree()
		g = g.todirect(0)
		return g
Example #6
0
	def smooth(self,data,numnodes=4,smooth=True):
		assert(numnodes > 1)
		st = 0
		appro = []
		while st < len(self.SV):
			ed = st + numnodes
			if ed > len(self.SV):
				ed = len(self.SV)
			appro.append(self.SV[st:ed])
			st = ed

		# create junction tree J1
		T1G = deepcopy(self.G)
		T1G = T1G.moralize()
		for bkc in appro:
			for s,t in crossprod(bkc,bkc):
				T1G.add_edge(s,t)

		self.J1 = T1G.junction_tree(preserve=self.G)

		# find come and out node
		self.J1.out = []
		for bkc in appro:
			self.J1.out.append( self.min_clique(self.J1,bkc) )
		self.J1.come = deepcopy(self.J1.out)

		# create junction tree Jt
		T2G = self.G2.moralize()
		for bkc in appro:
			for s,t in crossprod(bkc,bkc):
				T2G.add_edge(s,t)

			fbkc = list(map(lambda x:self.M[x],bkc))
			for s,t in crossprod(fbkc,fbkc):
				T2G.add_edge(s,t)

		self.J2 = T2G.junction_tree(preserve = self.G2)

		# find come and out node
		self.J2.out = []
		for bkc in appro:
			self.J2.out.append( self.min_clique(self.J2,bkc) )

		self.J2.come = []
		for bkc in appro:
			fbkc = list(map(lambda x:self.M[x],bkc))
			self.J2.come.append( self.min_clique(self.J2,fbkc) )


		T,N = data.shape
		assert(N == self.G.N)

		fmsg = {}
		for t in range(T):
			progress(t+1,T, 'Forward')

			fmsg[t] = {}
			evidence = data[t,:]

			if t==0:
				self.init_message(self.J1,fmsg[t])
				self.multiply_CPT(self.J1,evidence,fmsg[t],init=True)
				# collect message to out node for each bk cluster
				npt = deepcopy(fmsg[t])
				message = self.calculate_msg(self.J1,npt)
				for i in self.J1.out:
					fmsg[t][i] = self.collect_msg(self.J1,i,npt,message)

			else:
				pt = t-1
				self.init_message(self.J2,fmsg[t])
				self.multiply_CPT(self.J2,evidence,fmsg[t])
				# absorb message from the previous time slice
				for i,inid in enumerate(self.J2.come):
					if pt == 0:
						outid = self.J1.out[i]
					else:
						outid = self.J2.out[i]

					msg = self.get_message(fmsg[pt][outid],fmsg[t][inid],timestep = 1)
					fmsg[pt][outid,-1] = msg
					fmsg[t][inid] = self.multiply_potential(msg,fmsg[t][inid])

				npt = deepcopy(fmsg[t])
				message = self.calculate_msg(self.J2,npt)
				for i in self.J2.out:
					fmsg[t][i] = self.collect_msg(self.J2,i,npt,message)

			if t==(T-1):
				for i,outid in enumerate(self.J2.out):
					inid = self.J2.come[i]
					fmsg[t][outid,-1] = self.get_message(fmsg[t][outid],fmsg[t][inid],timestep = 1)

		if smooth:
			endtime = -1
		else:
			endtime = T

		bmsg = {}
		for t in range(T-1,endtime,-1):
			progress(T-t,T, 'Backward')

			bmsg[t] = {}
			evidence = data[t,:]

			if t==(T-1):
				curG = self.J2
				self.init_message(curG,bmsg[t])
				self.multiply_CPT(curG,evidence,bmsg[t])
				npt = deepcopy(bmsg[t])
				message = self.calculate_msg(curG,npt)
				for i,inid in enumerate(curG.come):
					bmsg[t][inid] = self.collect_msg(curG,inid,npt,message)
					outid = curG.out[i]
					bmsg[t][-1,outid] = self.init_potential(appro[i])

			if t<(T-1):
				nt = t+1
				curG = self.J2
				if t==0:
					curG = self.J1
				# initialize message
				self.init_message(curG,bmsg[t])
				if t==0:
					self.multiply_CPT(curG,evidence,bmsg[t],init=True)
				else:
					self.multiply_CPT(curG,evidence,bmsg[t])
				# absorb message from the previous time slice
				for i,outid in enumerate(curG.out):
					inid = self.J2.come[i]
					msg = self.get_message(bmsg[nt][inid],bmsg[t][outid],timestep = -1)
					bmsg[t][-1,outid] = msg
					bmsg[t][outid] = self.multiply_potential(msg,bmsg[t][outid])

				npt = deepcopy(bmsg[t])
				message = self.calculate_msg(curG,npt)
				for i in curG.come:
					bmsg[t][i] = self.collect_msg(curG,i,npt,message)


		prediction = deepcopy(data)
		for t in range(T):
			if t==0:
				tg = self.J1
			else:
				tg = self.J2

			for bki,outid in enumerate(tg.out):
				fP = fmsg[t][outid,-1]
				fP.ids = list(map(lambda x:self.rM[x],fP.ids))
				potential = fP
				if smooth:
					bP = bmsg[t][-1,outid]
					potential =  self.multiply_potential(potential,bP)
				P = potential.P/np.sum(potential.P)
				idx = np.unravel_index(P.argmax(), P.shape)
				for v in appro[bki]:
					prediction[t,v] = idx[fP.ids.index(v)]

		return prediction