+ "_anchors" + str(N) + "." + str(complexity) + ".pk" ) ) counter = {} for q in queries: counter[q] = moments[tuple(sorted(q))].transpose(index_sort(q)) CPDs = {} lCPDs = {} for k, val in counter.items(): CPDs[header[k[0]], tuple([header[z] for z in k[1:]])] = create_CPD(np.array(val + 1e-6, dtype=float)) model = models.TreeModel(CPDs, latents, format="CPDs") for k in sorted(model.lCPD): print k, model.lCPD[k] model_ll = 0 data = file(networkdir + "/samples/" + str(N) + "_samples.dat").readlines() def eval(dat): if len(dat) == 0: return 0 dat = dat.split()[2:] Y = [int(str(z) in dat) for z in labels]
res,val,steps,gap = expGrad(_f, x_indep.copy(), 1e-5, verbose=False, lower=0) #print 'X', np.array(res).reshape(original_shape) return tuple(new_k), np.array(res).reshape(original_shape), val CPDs = {} for j in labels: parents = [z for z in labels if (labels.index(z),labels.index(j)) in edges] t,s = tags[labels.index(j)], tuple([tags[labels.index(i)] for i in parents]) key = tuple([j] + parents) first_index = sorted(key).index(j) other_indices = [sorted(key).index(p) for p in parents] transpose_order = tuple([first_index]+other_indices) m = marginals[tuple(sorted(key))].transpose(transpose_order) smoothing = 0#10**(-6) CPDs[t,s] = create_CPD(m, smoothing=smoothing, verbose=False) tree = TreeModel(CPDs, #dictionary of CPDs tags, #variable ids format="CPDs" #latent structure already holds cpds ) anchor_failures = {} anchor_noise = {} anchor_dict = {} for l,a in anchors.items(): anchor_dict[header[l]] = header[a] print 'a is', a #noise[a] holds P(anchor | latent)
for j in xrange(L): parents_of[latents[j]] = [latents[i] for i in np.nonzero(adj[:,j])[0]] children_of[latents[j]] = [latents[i] for i in np.nonzero(adj[j,:])[0]] queries.append(tuple([inv_header[latents[j]]] + [inv_header[i] for i in parents_of[latents[j]]])) moments = pickle.load(file(networkdir+'/pickles/estimated_moments.'+'.'.join(args)+'.'+anchor_source+'_anchors'+str(N)+'.'+str(complexity)+'.pk')) counter = {} for q in queries: counter[q] = moments[tuple(sorted(q))].transpose(index_sort(q)) CPDs = {} lCPDs = {} for k,val in counter.items(): CPDs[header[k[0]], tuple([header[z] for z in k[1:]])] = create_CPD(np.array(val + 1e-6, dtype=float)) model = models.TreeModel(CPDs, latents, format='CPDs') for k in sorted(model.lCPD): print k, model.lCPD[k] model_ll = 0 data = file(networkdir+'/samples/'+str(N)+'_samples.dat').readlines() def eval(dat): if len(dat) == 0: return 0 dat = dat.split()[2:] Y = [int(str(z) in dat) for z in labels] e = eval_likelihood(model, Y, debug=True)
return tuple(new_k), np.array(res).reshape(original_shape), val CPDs = {} for j in labels: parents = [ z for z in labels if (labels.index(z), labels.index(j)) in edges ] t, s = tags[labels.index(j)], tuple( [tags[labels.index(i)] for i in parents]) key = tuple([j] + parents) first_index = sorted(key).index(j) other_indices = [sorted(key).index(p) for p in parents] transpose_order = tuple([first_index] + other_indices) m = marginals[tuple(sorted(key))].transpose(transpose_order) smoothing = 0 #10**(-6) CPDs[t, s] = create_CPD(m, smoothing=smoothing, verbose=False) tree = TreeModel( CPDs, #dictionary of CPDs tags, #variable ids format="CPDs" #latent structure already holds cpds ) anchor_failures = {} anchor_noise = {} anchor_dict = {} for l, a in anchors.items(): anchor_dict[header[l]] = header[a] print 'a is', a #noise[a] holds P(anchor | latent)