def information_decomposition(dist, src, to=""): rvs = src + to P = dist.marginal(rvs) variables = P._rvs q_SXY = admUI.computeQUI(distSXY=P) h_SgXY = dit.shannon.conditional_entropy(q_SXY, 'S', 'XY') ui_SX_Y = dit.shannon.conditional_entropy(q_SXY, 'S', 'Y') - h_SgXY ui_SY_X = dit.shannon.conditional_entropy(q_SXY, 'S', 'X') - h_SgXY si_SXY_1 = dit.shannon.mutual_information(q_SXY, 'S', 'X') - ui_SX_Y si_SXY_2 = dit.shannon.mutual_information(q_SXY, 'S', 'Y') - ui_SY_X # sanity check assert math.isclose( si_SXY_1, si_SXY_2, abs_tol=1e-6), "SI_S_XY: %f | %f" % (si_SXY_1, si_SXY_2) si_SXY = si_SXY_1 ci_SXY = si_SXY - dit.multivariate.coinformation(P, rvs) i_S_XY = dit.shannon.mutual_information(P, 'S', to) # sanity check assert math.isclose(i_S_XY, si_SXY + ci_SXY + ui_SX_Y + ui_SY_X, abs_tol=1e-6), \ "MI = decompose : %f | %f" % (i_S_XY, si_SXY + ci_SXY + ui_SX_Y + ui_SY_X) uis = [ui_SX_Y, ui_SY_X] return { "variables": tuple(map(lambda x: rvs_to_name[x], to)), "metrics": { "mi": i_S_XY, "si": si_SXY, "ci": ci_SXY, "ui_0": uis[variables[to[0]] - 1], "ui_1": uis[variables[to[1]] - 1] } }
ltimecv = np.empty(shape=(ndist, nsmax)) for ns in range(1, nsmax): ny = ns nz = ns print("--------------- ns = %s ---------------" % (ns + 1)) for i in range(0, ndist): Pt = npy[:, i, ns] P = Pt[Pt != 0] Ps = P.reshape(nz + 1, ny + 1, ns + 1) d = Distribution.from_ndarray(Ps) d.set_rv_names('SXY') ## admUI start_time = time.time() Q = computeQUI(d) UIX = dit.shannon.conditional_entropy( Q, 'S', 'Y') + dit.shannon.conditional_entropy( Q, 'X', 'Y') - dit.shannon.conditional_entropy(Q, 'SX', 'Y') lapsed_time = time.time() - start_time UIv[i, ns] = UIX ltimev[i, ns] = lapsed_time ## dit try: pid = algorithms.pid_broja(d, ['X', 'Y'], 'S') except dit.exceptions.ditException: print(i, "ditException: P = ", P, ", ns=ny=nz=", ns + 1, ", i=", i) dit_errorcnt = dit_errorcnt + 1 UIpv[i, ns] = 0 ltimepv[i, ns] = 0
print("Time: ",itoc_us-itic_us,"secs") #^ if # Prepare pdf for admUI or dit if s != 0: dpdf = Distribution(pdf) dpdf.set_rv_names('SXY') #^ if # Compute PID using ComputeUI if s == 1 or s == 3 or s == 5: print("Run ComputeUI.computeQUI()") itic_comUI = time.process_time() Q = computeQUI(distSXY = dpdf, DEBUG = True) UIY = dit.shannon.conditional_entropy(Q, 'S', 'Y') + dit.shannon.conditional_entropy(Q, 'X', 'Y') \ - dit.shannon.conditional_entropy(Q, 'SX', 'Y') UIZ = dit.shannon.conditional_entropy(Q, 'S', 'X') + dit.shannon.conditional_entropy(Q, 'Y', 'X') \ - dit.shannon.conditional_entropy(Q, 'SY', 'X') CI = dit.shannon.conditional_entropy(Q, 'S', 'XY') - dit.shannon.conditional_entropy(dpdf, 'S', 'XY') SI = dit.shannon.entropy(Q, 'S')\ -dit.shannon.conditional_entropy(Q, 'S', 'XY') \ - dit.shannon.conditional_entropy(Q, 'S', 'Y') - dit.shannon.conditional_entropy(Q, 'X', 'Y') \ + dit.shannon.conditional_entropy(Q, 'SX', 'Y')\ - dit.shannon.conditional_entropy(Q, 'S', 'X') - dit.shannon.conditional_entropy(Q, 'Y', 'X') \ + dit.shannon.conditional_entropy(Q, 'SY', 'X') itoc_comUI = time.process_time() print("Partial information decomposition ComputeUI: ") print("UIY_comUI: ", UIY) print("UIZ_comUI: ", UIZ)
0.0869196091623, 0.0218631235533, 0.133963681059, 0.164924698739, 0.429533105427, 0.16279578206 ]) ] # count some statistics max_delta = 0. total_time_admUI = 0. total_time_dit = 0. for d in examples: d.set_rv_names('SXY') print(d.to_dict()) # admUI start_time = time.time() Q = computeQUI(distSXY=d, DEBUG=False) admUI_time = time.time() - start_time total_time_admUI += admUI_time UIX = (dit.shannon.conditional_entropy(Q, 'S', 'Y') + dit.shannon.conditional_entropy(Q, 'X', 'Y') - dit.shannon.conditional_entropy(Q, 'SX', 'Y')) # UIX2 = (dit.shannon.entropy(Q, 'SY') + dit.shannon.entropy(Q, 'XY') # - dit.shannon.entropy(Q, 'SXY') - dit.shannon.entropy(Q, 'Y')) # print(abs(UIX - UIX2) < 1e-10) UIY = (dit.shannon.conditional_entropy(Q, 'S', 'X') + dit.shannon.conditional_entropy(Q, 'Y', 'X') - dit.shannon.conditional_entropy(Q, 'SY', 'X')) SI = dit.shannon.mutual_information(Q, 'S', 'X') - UIX # SI2 = (dit.shannon.entropy(Q, 'S') + dit.shannon.entropy(Q, 'X') # - dit.shannon.entropy(Q, 'SX') - UIX) # SI3 = (dit.shannon.entropy(Q, 'S') + dit.shannon.entropy(Q, 'X')
for ns in range(1, nsmax): ny = ns nz = ns print("--------------- ns= %s ---------------" % (ns + 1)) for i in range(0, ndist): Pt = npy[:, i, ns] P = Pt[Pt != 0] Ps = P.reshape(nz + 1, ny + 1, ns + 1) d = dit.Distribution.from_ndarray(Ps) d.set_rv_names('SXY') print(i) # admUI start_time = time.time() Q = computeQUI(distSXY=d) # , DEBUG=True) UIX = (dit.shannon.conditional_entropy(Q, 'S', 'Y') + dit.shannon.conditional_entropy(Q, 'X', 'Y') - dit.shannon.conditional_entropy(Q, 'SX', 'Y')) lapsed_time = time.time() - start_time UIv[i, ns] = UIX ltimev[i, ns] = lapsed_time if logging: cvsfile.write("{}, ".format(ns + 1)) cvsfile.write("{:.15f}, {:.15f}, ".format(UIX, lapsed_time)) else: print("admUI = %.15f" % UIX, " time = %.15f" % lapsed_time) # cvxopt pdf = dict(zip(d.outcomes, d.pmf)) start_timec = time.time()