rvs_id.append(np.where(cluster_mat[:, i] == 1)[0])

rvs_id = np.concatenate(rvs_id, axis=None)
data = well_t[rvs_id, :t]

print(len(rvs_id))

domain = Domain((-4, 4), continuous=True, integral_points=linspace(-4, 4, 30))

kmf = KalmanFilter(domain, np.eye(len(rvs_id)), 1, np.eye(len(rvs_id)), 1)

result = []
for i in range(t):
    # i = t - 1
    g, rvs_table = kmf.grounded_graph(i + 1, data)
    bp = HybridLBP(g, n=50, proposal_approximation='simple')
    # bp = GaLBP(g)
    print('number of vr', len(g.rvs))
    num_evidence = 0
    for rv in g.rvs:
        if rv.value is not None:
            num_evidence += 1
    print('number of evidence', num_evidence)

    start_time = time.time()
    bp.run(6, c2f=0, log_enable=False)
    print('time lapse', time.time() - start_time)

    # for i in range(t):
    temp = []
    for idx, rv in enumerate(rvs_table[i]):
Пример #2
0
# save ans
ans_array = list()
for key in key_list:
    ans_array.append((f'{key[0]},{key[1]}', ans[key]))
np.save('Data/smoker_ans', np.array(ans_array))

# # load ans
# ans_array = np.load('Data/smoker_ans.npy')
# for line in ans_array:
#     key = tuple([x.strip() for x in line[0].split(',')])
#     ans[key] = float(line[1])

name = 'LEPBP'
res = np.zeros((len(key_list), num_test))
for j in range(num_test):
    bp = HybridLBP(g, n=10, proposal_approximation='simple')
    start_time = time.process_time()
    bp.run(10, log_enable=True)
    time_cost[name] = (time.process_time() -
                       start_time) / num_test + time_cost.get(name, 0)
    print(name, f'time {time.process_time() - start_time}')
    for i, key in enumerate(key_list):
        res[i, j] = bp.probability(0.8, 1, rvs_table[key])
variance[name] = np.average(np.var(res, axis=1))
for i, key in enumerate(key_list):
    res[i, :] -= ans[key]
avg_diff[name] = np.average(np.average(abs(res), axis=1))
print(name, 'var', variance[name])
print(name, 'diff', avg_diff[name])

name = 'C2FEPBP'
Пример #3
0
    bp.run(15, log_enable=False)
    time_cost[name] = (time.process_time() -
                       start_time) / num_test + time_cost.get(name, 0)
    print(name, f'time {time.process_time() - start_time}')
    err = []
    for key in key_list:
        if key not in data:
            err.append(abs(bp.map(rvs_table[key]) - ans[key]))
    avg_err[name] = np.average(err) / num_test + avg_err.get(name, 0)
    max_err[name] = np.max(err) / num_test + max_err.get(name, 0)
    err_var[name] = np.average(err)**2 / num_test + err_var.get(name, 0)
    print(name, f'avg err {np.average(err)}')
    print(name, f'max err {np.max(err)}')

    name = 'LEPBP'
    bp = HybridLBP(g, n=20)
    start_time = time.process_time()
    bp.run(15, c2f=False, log_enable=False)
    time_cost[name] = (time.process_time() -
                       start_time) / num_test + time_cost.get(name, 0)
    print(name, f'time {time.process_time() - start_time}')
    err = []
    for key in key_list:
        if key not in data:
            err.append(abs(bp.map(rvs_table[key]) - ans[key]))
    avg_err[name] = np.average(err) / num_test + avg_err.get(name, 0)
    max_err[name] = np.max(err) / num_test + max_err.get(name, 0)
    err_var[name] = np.average(err)**2 / num_test + err_var.get(name, 0)
    print(name, f'avg err {np.average(err)}')
    print(name, f'max err {np.max(err)}')
Пример #4
0
from utils import log_likelihood
from VarInference import VarInference as VI
from LiftedVarInference import VarInference as LVI
from C2FVarInference import VarInference as C2FVI
from GaBP import GaBP
from HybridMaxWalkSAT import HybridMaxWalkSAT as HMWS
from HybridLBPLogVersion import HybridLBP

rel_g = generate_rel_graph()
rel_g.ground_graph()

data = {('recession', 'all'): 25}

g, rvs_dict = rel_g.add_evidence(data)

infer = HybridLBP(g, n=10, proposal_approximation='simple')
infer.run(10, c2f=0, log_enable=False)

# infer = HMWS(g)
# infer.run(max_tries=1, max_flips=10000, epsilon=0.0, noise_std=0.5)

# infer = LVI(g, num_mixtures=1, num_quadrature_points=3)
# infer.run(200, lr=0.2)

# infer = GaBP(g)
# infer.run(20)

map_res = dict()
for key, rv in rvs_dict.items():
    map_res[rv] = infer.map(rv)
    print(key, map_res[rv])
# create hidden-hidden factors
pxy = ImageEdgePotential(0, 3.5, 25)
for i in range(row):
    for j in range(col - 1):
        fs.append(F(pxy, (rvs[i * col + j], rvs[i * col + j + 1])))
for i in range(row - 1):
    for j in range(col):
        fs.append(F(pxy, (rvs[i * col + j], rvs[(i + 1) * col + j])))

g = Graph()
g.rvs = rvs + evidence
g.factors = fs
g.init_nb()

bp = HybridLBP(g, n=10, proposal_approximation='simple')

# def initial_proposal():
#     for i in range(row):
#         for j in range(col):
#             bp.q[rvs[i * col + j]] = (m[i, j], 2)
#
# bp.custom_initial_proposal = initial_proposal

start_time = time.process_time()
bp.run(10, c2f=0, log_enable=False)
print('time', time.process_time() - start_time)

print(len(bp.g.rvs))

# reconstruct image