コード例 #1
0
def test_BayesianNetwork_with_initial():

    dim = np.array([10, 30, 100])
    alpha_k = 0.5
    sigma_k = np.exp(-1)
    c = np.exp(7)
    pi = 0.5
    p = 1.0
    BayesianNetwork_init = False

    BayesianNetwork1_prev = BayesianNetwork(dim, alpha_k, sigma_k, c, pi, p,
                                            BayesianNetwork_init)
    BayesianNetwork1 = BayesianNetwork(
        dim,
        alpha_k,
        sigma_k,
        c,
        pi,
        p,
        BayesianNetwork_init=BayesianNetwork1_prev)

    check1 = (BayesianNetwork1.Linear_layer[0].mu.weight.data.numpy(
    ) == BayesianNetwork1_prev.Linear_layer[0].mu.weight.data.numpy()).all()

    new_weights = torch.tensor(
        BayesianNetwork1.Linear_layer[0].mu.weight.data.numpy() + 2)
    BayesianNetwork1.Linear_layer[0].mu.weight.data = new_weights
    check2 = (
        BayesianNetwork1.Linear_layer[0].mu.weight.data.numpy() !=
        BayesianNetwork1_prev.Linear_layer[0].mu.weight.data.numpy()).all()

    # print(BayesianNetwork1.Linear_layer[0].mu.weight.shape)
    # print(BayesianNetwork1.Linear_layer[1].rho.bias.shape)

    assert (check1 and check2)
コード例 #2
0
def test_BayesianNetwork_update():

    dim = np.array([10, 30, 10])
    alpha_k = 0.5
    sigma_k = np.exp(-1)
    c = np.exp(7)
    pi = 0.5
    p = 1.0
    BayesianNetwork_init = False

    BayesianNetwork_prova_prev = BayesianNetwork(dim, alpha_k, sigma_k, c, pi,
                                                 p, BayesianNetwork_init)
    BayesianNetwork_prova = BayesianNetwork(
        dim,
        alpha_k,
        sigma_k,
        c,
        pi,
        p,
        BayesianNetwork_init=BayesianNetwork_prova_prev)

    # print( BayesianNetwork_prova.Linear_layer[1].mu.weight.data.numpy()[5, :] )
    # print( BayesianNetwork_prova_prev.Linear_layer[1].mu.weight.data.numpy()[5, :] )

    check_equal = (BayesianNetwork_prova.Linear_layer[0].mu.weight.data.numpy(
    ) == BayesianNetwork_prova_prev.Linear_layer[0].mu.weight.data.numpy()
                   ).all()

    optimizer = optim.Adam(BayesianNetwork_prova.parameters())

    x = torch.tensor(np.random.uniform(0, 5, (20, 10)), dtype=torch.float64)
    y = torch.tensor(np.random.choice(range(0, 10), 20), dtype=torch.long)

    # for iter in range(1):

    output_prova = BayesianNetwork_prova(x)
    loss_prova = F.cross_entropy(output_prova, y)

    loss_prova.backward()
    optimizer.step()

    check_diff = (
        BayesianNetwork_prova.Linear_layer[1].mu.weight.data.numpy() !=
        BayesianNetwork_prova_prev.Linear_layer[1].mu.weight.data.numpy()
    ).any()

    # print( BayesianNetwork_prova.Linear_layer[1].mu.weight.data.numpy()[5, :] )
    # print( BayesianNetwork_prova_prev.Linear_layer[1].mu.weight.data.numpy()[5, :] )

    assert (check_equal and check_diff)
コード例 #3
0
def test_BayesianNetwork_input():

    dim = np.array([10, 30, 10])
    alpha_k = 0.5
    sigma_k = np.exp(-1)
    c = np.exp(7)
    pi = 0.5
    p = 1.0
    BayesianNetwork_init = False

    BayesianNetwork_prova_prev = BayesianNetwork(dim, alpha_k, sigma_k, c, pi,
                                                 p, BayesianNetwork_init)
    BayesianNetwork_prova = BayesianNetwork(
        dim,
        alpha_k,
        sigma_k,
        c,
        pi,
        p,
        BayesianNetwork_init=BayesianNetwork_prova_prev)

    new_weights = torch.tensor(
        BayesianNetwork_prova.Linear_layer[1].mu.bias.data.numpy() + 2)
    BayesianNetwork_prova.Linear_layer[1].mu.bias.data = new_weights

    x = torch.tensor(np.random.uniform(0, 1, (20, 10)), dtype=torch.float64)
    y = torch.tensor(np.random.choice(range(0, 10), 20), dtype=torch.long)

    output_prova = BayesianNetwork_prova(x)

    output_prova_softmax = F.log_softmax(output_prova, 1)
    loss_nll_soft = F.nll_loss(output_prova_softmax, y).data.numpy()

    loss_cross_entr = F.cross_entropy(output_prova, y).data.numpy()

    check1 = (loss_cross_entr == loss_nll_soft)

    output_prova_prev = BayesianNetwork_prova_prev(x)

    loss_prova_prev = F.cross_entropy(output_prova_prev, y).data.numpy()

    check2 = (loss_prova_prev != loss_cross_entr)

    assert (check1 and check2)
コード例 #4
0
def test_BayesianNetwork_stack():

    dim = np.array([10, 30, 10])
    alpha_k = 0.5
    sigma_k = np.exp(-1)
    c = np.exp(7)
    pi = 0.5
    p = 1.0
    BayesianNetwork_init = False

    BayesianNetwork_stack = BayesianNetwork(dim, alpha_k, sigma_k, c, pi, p,
                                            BayesianNetwork_init)

    x = torch.tensor(np.random.uniform(0, 5, (20, 10)), dtype=torch.float64)

    output_prova = BayesianNetwork_stack(x)

    mu_stack, rho_stack, w_stack = BayesianNetwork_stack.stack()

    assert ((mu_stack.shape == w_stack.shape) and
            (rho_stack.data.numpy().shape[0] == (10 * 30 + 30 + 30 * 10 + 10)))
コード例 #5
0
def constructAlarmNetwork():
    #creating nodes from parent to child order.
    cpt_burglary = {'T': 0.001, 'F': 0.999}
    node_burglary = Node('B', ['T', 'F'], None, cpt_burglary)
    cpt_earthquake = {'T': 0.002, 'F': 0.998}
    node_earthquake = Node('E', ['T', 'F'], None, cpt_earthquake)
    cpt_alarm = {
        'T': {
            'B=T,E=T': 0.95,
            'B=T,E=F': 0.94,
            'B=F,E=T': 0.29,
            'B=F,E=F': 0.001
        },
        'F': {
            'B=T,E=T': 0.05,
            'B=T,E=F': 0.06,
            'B=F,E=T': 0.71,
            'B=F,E=F': 0.999
        }
    }
    node_alarm = Node('A', ['T', 'F'], [node_burglary, node_earthquake],
                      cpt_alarm)
    cpt_john_c = {
        'T': {
            'A=T': 0.90,
            'A=F': 0.05
        },
        'F': {
            'A=T': 0.1,
            'A=F': 0.95
        }
    }
    node_john_c = Node('J', ['T', 'F'], [node_alarm], cpt_john_c)
    cpt_mary_c = {
        'T': {
            'A=T': 0.70,
            'A=F': 0.01
        },
        'F': {
            'A=T': 0.3,
            'A=F': 0.99
        }
    }
    node_mary_c = Node('M', ['T', 'F'], [node_alarm], cpt_mary_c)

    alarmNetwork = BayesianNetwork(
        [node_burglary, node_earthquake, node_alarm, node_john_c, node_mary_c])
    #print (node_mary_c)
    return alarmNetwork
コード例 #6
0
def test_stack_index():

    torch.manual_seed(0)
    np.random.seed(0)

    dim = np.array([10, 30, 10])
    L = 3
    alpha_k = 0.5
    sigma_k = np.exp(-1)
    c = np.exp(7)
    pi = 0.5
    p = 1.0
    BayesianNetwork_init = False

    BayesianNetwork_prev = BayesianNetwork(dim, alpha_k, sigma_k, c, pi, p,
                                           BayesianNetwork_init)
    BayesianNetwork_1 = BayesianNetwork(
        dim,
        alpha_k,
        sigma_k,
        c,
        pi,
        p=0.8,
        BayesianNetwork_init=BayesianNetwork_prev)

    x = torch.tensor(np.random.uniform(0, 5, (20, 10)), dtype=torch.float64)
    y = torch.tensor(np.random.choice(range(0, 10), 20), dtype=torch.long)

    torch.manual_seed(0)
    np.random.seed(0)
    call1 = BayesianNetwork_1(x)

    mu, rho, w = BayesianNetwork_1.stack()

    assert (mu[-(dim[L - 2] * dim[L - 1] + dim[L - 1]):] ==
            BayesianNetwork_1.Linear_layer[L - 2].mu.stack()).all()
コード例 #7
0
def test_BayesianNetwork_without_initial():

    dim = np.array([10, 30, 100])
    alpha_k = 0.5
    sigma_k = np.exp(-1)
    c = np.exp(7)
    pi = 0.5
    p = 1.0
    BayesianNetwork_init = False

    BayesianNetwork1 = BayesianNetwork(dim, alpha_k, sigma_k, c, pi, p,
                                       BayesianNetwork_init)

    # print(BayesianNetwork1.Linear_layer[0].mu.weight.shape)
    # print(BayesianNetwork1.Linear_layer[1].rho.bias.shape)

    assert (
        BayesianNetwork1.Linear_layer[0].mu.weight.data.numpy().shape[0] == 30
        and BayesianNetwork1.Linear_layer[0].mu.weight.data.numpy().shape[1]
        == 10 and
        BayesianNetwork1.Linear_layer[1].rho.bias.data.numpy().shape[0] == 100)
コード例 #8
0
def test_BayesianNetwork_priorvariance():

    dim = np.array([10, 30, 10])
    alpha_k = 0.0
    sigma_k = np.exp(-1)
    c = np.exp(7)
    pi = 0.5
    p = 1.0
    BayesianNetwork_init = False

    BayesianNetwork_prova_prev = BayesianNetwork(dim, alpha_k, sigma_k, c, pi,
                                                 p, BayesianNetwork_init)
    BayesianNetwork_prova = BayesianNetwork(
        dim,
        alpha_k,
        sigma_k,
        c,
        pi,
        p,
        BayesianNetwork_init=BayesianNetwork_prova_prev)

    x = torch.tensor(np.random.uniform(0, 5, (20, 10)), dtype=torch.float64)
    y = torch.tensor(np.random.choice(range(0, 10), 20), dtype=torch.long)

    call = BayesianNetwork_prova_prev(x)
    output_prova = BayesianNetwork_prova(x)

    mu_prev, rho_prev, w_prev = BayesianNetwork_prova_prev.stack()
    print(mu_prev)
    loss_prior1 = BayesianNetwork_prova.get_gaussiandistancefromprior(
        mu_prev, mu_prev, rho_prev)
    # print( loss_prior1 )

    mu_prev.copy_(10 + mu_prev.clone().detach().zero_())
    rho_prev.copy_(10 + rho_prev.clone().detach().zero_())
    loss_prior2 = BayesianNetwork_prova.get_gaussiandistancefromprior(
        mu_prev, mu_prev, rho_prev)
    # print( loss_prior2 )

    assert loss_prior1.data.numpy() == loss_prior2.data.numpy()
コード例 #9
0
def main():
    probabilities = [
        0.9, 0.1, 0.83, 0.17, 0.2, 0.8, 0.21, 0.79, 0.05, 0.95, 0.8, 0.2, 0.78,
        0.22, 0.69, 0.31, 0.05, 0.95, 0.8, 0.2, 0.6, 0.4
    ]

    adj_matrix = np.array([[0, 1, 1, 0, 0], [0, 0, 0, 1, 0], [0, 0, 0, 1, 1],
                           [0, 0, 0, 0, 0], [0, 0, 0, 0, 0]])

    network = BayesianNetwork(5, adj_matrix)
    nodes = network.nodes
    prob_dataset = probabilities_dataset(nodes)

    x = []
    y = []
    start = 10
    end = 5010
    attempt = 50
    iteration = 100
    for n in range(start, end, iteration):
        x.append(n)
        z = []
        for j in range(attempt):
            dataset = dataset_gen(n, adj_matrix, nodes, prob_dataset)
            qn = learning(nodes, dataset, n)
            divergence = js_divergence(probabilities, qn)
            z.append(divergence)
        y.append(sum(z) / attempt)

    print("x: ", x)
    print("y: ", y)
    plt.title("Learning curve of Jensen-Shannon divergence")
    plt.xlabel("Dimensione del dataset")
    plt.ylabel("Divergenza tra probabilità e qn")
    plt.plot(x, y)
    plt.savefig('Jensen-Shannon divergence.png')
    plt.clf()
コード例 #10
0
parser.add_argument("-o",
                    "--output",
                    dest="output",
                    help="encoding output file")

parser.add_argument("-enc2", dest="enc2", action="store_true", default=False)
parser.add_argument("-c",
                    "--cachet",
                    dest="to_cachet",
                    action="store_true",
                    default=False)

if __name__ == '__main__':
    args = parser.parse_args()

    network = BayesianNetwork.create_from_file(args.file)

    if not args.enc2:
        enc1_cnf = network.to_enc1()
        enc1_cnf.convert()
        # enc1_cnf.elimEquiv()
        # print("converted enc1: \n", enc1_cnf.elimEquiv())
        enc1 = enc1_cnf.elimEquiv().toDimac(toCachet=args.to_cachet)
        # print("dimac:\n", enc1)
        f = open(f"{args.output}", "w")
        f.write(enc1)
        f.close()
    else:
        enc2_cnf = network.to_enc2()
        enc2_cnf.convert()
        # print("converted enc2: \n", enc2_cnf.elimEquiv())
コード例 #11
0
ファイル: test.py プロジェクト: onlycjp/ML_Algorithms
    'HasPneumonia': ['IsSummer'],
    'HasRespiratoryProblems':
    ['HasFlu', 'HasHayFever', 'HasPneumonia', 'HasFoodPoisoning'],
    'HasGastricProblems': ['HasFlu', 'HasFoodPoisoning'],
    'HasRash': ['HasFoodPoisoning', 'HasHayFever'],
    'Coughs': ['HasFlu', 'HasPneumonia', 'HasRespiratoryProblems'],
    'IsFatigues': ['HasFlu', 'HasHayFever', 'HasPneumonia'],
    'Vomits': ['HasFoodPoisoning', 'HasGastricProblems'],
    'HasFever': ['HasFlu', 'HasPneumonia']
}

feature_list = [
    'HasFever', 'Vomits', 'IsFatigues', 'Coughs', 'HasRash',
    'HasGastricProblems', 'HasRespiratoryProblems', 'HasPneumonia',
    'HasHayFever', 'HasFoodPoisoning', 'HasFlu', 'IsSummer'
]

model = BayesianNetwork(settings2, feature_list)
model.fit(data)
pred_table = model.predict(joint_probs)
score = compute_accuracy(pred_table, joint_probs)

query1 = query_from_table('HasFlu', feature_list, joint_probs,
                          ('HasFever', True), ('Coughs', True))
pred_query1 = query_from_table('HasFlu', feature_list, pred_table,
                               ('HasFever', True), ('Coughs', True))
query2 = query_from_table('Vomits', feature_list, joint_probs,
                          ('IsSummer', True))
pred_query2 = query_from_table('Vomits', feature_list, pred_table,
                               ('IsSummer', True))
コード例 #12
0
def test_evolution():

    torch.manual_seed(0)
    np.random.seed(0)

    dim = np.array([10, 30, 10])
    L = 3
    alpha_k = 0.5
    sigma_k = np.exp(-1)
    c = np.exp(7)
    pi = 0.5
    p = 1.0
    BayesianNetwork_init = False

    BayesianNetwork_prev = BayesianNetwork(dim, alpha_k, sigma_k, c, pi, p,
                                           BayesianNetwork_init)
    BayesianNetwork_1 = BayesianNetwork(
        dim,
        alpha_k,
        sigma_k,
        c,
        pi,
        p,
        BayesianNetwork_init=BayesianNetwork_prev)
    BayesianNetwork_2 = BayesianNetwork(
        dim,
        alpha_k,
        sigma_k,
        c,
        pi,
        p,
        BayesianNetwork_init=BayesianNetwork_prev)

    x = torch.tensor(np.random.uniform(0, 5, (20, 10)), dtype=torch.float64)
    y = torch.tensor(np.random.choice(range(0, 10), 20), dtype=torch.long)

    torch.manual_seed(0)
    np.random.seed(0)
    call1 = BayesianNetwork_1(x)
    torch.manual_seed(0)
    np.random.seed(0)
    call2 = BayesianNetwork_2(x)

    call_prova = BayesianNetwork_prev(x)

    mu_prev = {}
    rho_prev = {}

    with torch.no_grad():
        for i in range(0, L - 1):
            mu_i = {}
            rho_i = {}

            mu_i["weight"] = BayesianNetwork_prev.Linear_layer[
                i].mu.weight.data.clone().detach()
            mu_i["bias"] = BayesianNetwork_prev.Linear_layer[
                i].mu.bias.data.clone().detach()

            rho_i["weight"] = BayesianNetwork_prev.Linear_layer[
                i].rho.weight.data.clone().detach()
            rho_i["bias"] = BayesianNetwork_prev.Linear_layer[
                i].rho.bias.data.clone().detach()

            mu_prev[str(i)] = mu_i
            rho_prev[str(i)] = rho_i

    pi = 0.5
    alpha_k = 0.5
    sigma_k = np.exp(-1)
    c = np.exp(7)
    model = BayesianNetwork_1
    p = 1.0

    check1 = (BayesianNetwork_2.Linear_layer[0].mu.weight.data.numpy() ==
              BayesianNetwork_1.Linear_layer[0].mu.weight.data.numpy()).all()
    # print(check1)

    # print( pi, alpha_k, sigma_k, c, p )
    # print( BayesianNetwork_prova.pi, BayesianNetwork_prova.alpha_k, BayesianNetwork_prova.sigma_k, BayesianNetwork_prova.c, BayesianNetwork_prova.p )

    optimizer1 = optim.SGD(BayesianNetwork_1.parameters(), lr=0.001)
    optimizer1.zero_grad()

    loss_prior_met1 = first_likelihood(pi, mu_prev, alpha_k, sigma_k, c, model,
                                       mu_prev, rho_prev, p, L)
    loss_net1 = F.cross_entropy(call1, y)
    # print(loss_net1)
    # print(loss_prior_met1)
    loss1 = loss_net1  #+ loss_prior_met1

    loss1.backward()
    optimizer1.step()

    optimizer2 = optim.SGD(BayesianNetwork_2.parameters(), lr=0.001)
    optimizer2.zero_grad()

    mu_prev2, rho_prev2, w_prev2 = BayesianNetwork_prev.stack()
    mu2, rho2, w2 = BayesianNetwork_2.stack()
    # print(mu2)

    loss_prior_met2 = BayesianNetwork_2.get_gaussiandistancefromprior(
        mu_prev2, mu_prev2, rho_prev2)
    loss_net2 = F.cross_entropy(call2, y)
    # print(loss_net2)
    # print(loss_prior_met2)
    loss2 = loss_net2  #+ loss_prior_met2

    loss2.backward()
    BayesianNetwork_2.Linear_layer[
        0].mu.weight.data = BayesianNetwork_2.Linear_layer[
            0].mu.weight.data - 0.001 * BayesianNetwork_2.Linear_layer[
                0].mu.weight.grad.data

    # print( (np.abs(BayesianNetwork_1.Linear_layer[0].mu.weight.data.numpy() - BayesianNetwork_2.Linear_layer[0].mu.weight.data.numpy())).sum() )

    assert (np.abs((BayesianNetwork_2.Linear_layer[0].mu.weight.data.numpy() -
                    BayesianNetwork_1.Linear_layer[0].mu.weight.data.numpy()
                    ) < np.exp(-10))).all()
コード例 #13
0
def test_BayesianNetwork_prior_stack_evolution():

    dim = np.array([10, 30, 10])
    alpha_k = 0.5
    sigma_k = np.exp(-1)
    c = np.exp(7)
    pi = 0.5
    p = 1.0
    BayesianNetwork_init = False

    BayesianNetwork_prova_prev = BayesianNetwork(dim, alpha_k, sigma_k, c, pi,
                                                 p, BayesianNetwork_init)
    BayesianNetwork_prova1 = BayesianNetwork(
        dim,
        alpha_k,
        sigma_k,
        c,
        pi,
        p,
        BayesianNetwork_init=BayesianNetwork_prova_prev)
    BayesianNetwork_prova2 = BayesianNetwork(
        dim,
        alpha_k,
        sigma_k,
        c,
        pi,
        p,
        BayesianNetwork_init=BayesianNetwork_prova_prev)

    optimizer1 = optim.Adam(BayesianNetwork_prova1.parameters())
    optimizer1.zero_grad()
    optimizer2 = optim.Adam(BayesianNetwork_prova2.parameters())
    optimizer2.zero_grad()

    x = torch.tensor(np.random.uniform(0, 5, (20, 10)), dtype=torch.float64)
    y = torch.tensor(np.random.choice(range(0, 10), 20), dtype=torch.long)

    # for iter in range(1):
    call = BayesianNetwork_prova_prev(x)

    torch.manual_seed(0)
    np.random.seed(0)
    output_prova1 = BayesianNetwork_prova1(x)

    torch.manual_seed(0)
    np.random.seed(0)
    output_prova2 = BayesianNetwork_prova2(x)

    loss_network1 = F.cross_entropy(output_prova1, y)
    # print(loss_network1)
    loss_network2 = F.cross_entropy(output_prova2, y)
    # print(loss_network2)

    mu_prev, rho_prev, w_prev = BayesianNetwork_prova_prev.stack()
    loss_prior1 = BayesianNetwork_prova1.get_gaussiandistancefromprior(
        mu_prev, mu_prev, rho_prev)
    loss_prior2 = BayesianNetwork_prova2.get_gaussiandistancefromprior(
        mu_prev, mu_prev, rho_prev)

    mu2, rho2, w2 = BayesianNetwork_prova2.stack()

    loss1 = loss_network1 + loss_prior1
    loss2 = loss_network2 + loss_prior2 + (10 * mu2).sum()

    # print( BayesianNetwork_prova1.Linear_layer[0].mu.weight.data.numpy() - (BayesianNetwork_prova2.Linear_layer[0].mu.weight.data.numpy()) )

    loss1.backward()
    loss2.backward()

    # print( BayesianNetwork_prova1.Linear_layer[0].mu.weight.grad.data.numpy() - (BayesianNetwork_prova2.Linear_layer[0].mu.weight.grad.data.numpy()) )

    # optimizer1.step()
    # optimizer2.step()

    # print( BayesianNetwork_prova1.Linear_layer[0].mu.weight.grad.data.numpy() - (BayesianNetwork_prova2.Linear_layer[0].mu.weight.grad.data.numpy()) )

    assert (np.abs(
        (BayesianNetwork_prova1.Linear_layer[0].mu.weight.grad.data.numpy() -
         (BayesianNetwork_prova2.Linear_layer[0].mu.weight.grad.data.numpy() -
          10)) < np.exp(-5))).all()
コード例 #14
0
    if len(variable_A.CPD.parameters)==2 and \
        len(variable_B.CPD.parameters)==4 and \
        len(variable_C.CPD.parameters)==4 and \
        len(variable_D.CPD.parameters)==4 and \
        len(variable_E.CPD.parameters)==4:
        print bcolors.GREEN+"CPDs have the correct number of parameters! (Success)"+bcolors.ENDC
    else:
        print bcolors.RED+"CPDs don't have the correct number of parameters... (Failed)"+bcolors.ENDC
except:
    print bcolors.RED+"CPDs don't have the correct number of parameters... (Failed)"+bcolors.ENDC
    

# Create Bayesian Network
edges = (e1,e2,e3,e4)
variables = (variable_A, variable_B, variable_C, variable_D, variable_E)
bn = BayesianNetwork(variables, edges, 'Example BN')

try:
    if bn.variables==set([variable_A, variable_B, variable_C, variable_D, variable_E]) and \
        bn.relations==set([e1,e2,e3,e4]):
        print bcolors.GREEN+"Bayesian Network created! (Success)"+bcolors.ENDC
    else:
        print bcolors.RED+"The Bayesian Network could not be created... (Failed)"+bcolors.ENDC
except:
    print bcolors.RED+"The Bayesian Network could not be created... (Failed)"+bcolors.ENDC
    

# Return a variable by its name
try:
    if bn.variables_dict['A']==variable_A and bn.variables_dict['B']==variable_B:
        print bcolors.GREEN+"Bayesian network variable corrrectly returned! (Success)"+bcolors.ENDC
コード例 #15
0
def test_BayesianNetwork_prior():

    dim = np.array([10, 30, 10])
    alpha_k = 0.5
    sigma_k = np.exp(-1)
    c = np.exp(7)
    pi = 0.5
    p = 1.0
    BayesianNetwork_init = False

    BayesianNetwork_prova_prev_prev = BayesianNetwork(dim, alpha_k, sigma_k, c,
                                                      pi, p,
                                                      BayesianNetwork_init)
    BayesianNetwork_prova_prev = BayesianNetwork(
        dim,
        alpha_k,
        sigma_k,
        c,
        pi,
        p,
        BayesianNetwork_init=BayesianNetwork_prova_prev_prev)
    BayesianNetwork_prova = BayesianNetwork(
        dim,
        alpha_k,
        sigma_k,
        c,
        pi,
        p,
        BayesianNetwork_init=BayesianNetwork_prova_prev)

    # print( BayesianNetwork_prova.Linear_layer[1].mu.weight.data.numpy()[5, :] )
    # print( BayesianNetwork_prova_prev.Linear_layer[1].mu.weight.data.numpy()[5, :] )

    optimizer = optim.Adam(BayesianNetwork_prova.parameters())

    x = torch.tensor(np.random.uniform(0, 5, (20, 10)), dtype=torch.float64)
    y = torch.tensor(np.random.choice(range(0, 10), 20), dtype=torch.long)

    # for iter in range(1):
    call = BayesianNetwork_prova_prev(x)
    output_prova = BayesianNetwork_prova(x)

    loss_network = F.cross_entropy(output_prova, y)

    mu_prev, rho_prev, w_prev = BayesianNetwork_prova_prev.stack()
    loss_prior = BayesianNetwork_prova.get_gaussiandistancefromprior(
        mu_prev, mu_prev, rho_prev)

    loss = loss_network + loss_prior

    loss.backward()
    optimizer.step()

    check_diff = True
    for layer in range(0, 1):
        check_diff = (
            check_diff and
            (BayesianNetwork_prova.Linear_layer[layer].mu.weight.data.numpy()
             != BayesianNetwork_prova_prev.Linear_layer[layer].mu.weight.data.
             numpy()).any())

        check_diff = (
            check_diff and
            (BayesianNetwork_prova.Linear_layer[layer].rho.weight.data.numpy()
             != BayesianNetwork_prova_prev.Linear_layer[layer].rho.weight.data.
             numpy()).any())

        check_diff = (
            check_diff and
            (BayesianNetwork_prova.Linear_layer[layer].rho.bias.data.numpy() !=
             BayesianNetwork_prova_prev.Linear_layer[layer].rho.bias.data.
             numpy()).any())

        check_diff = (check_diff
                      and (BayesianNetwork_prova_prev_prev.Linear_layer[layer].
                           mu.bias.data.numpy() == BayesianNetwork_prova_prev.
                           Linear_layer[layer].mu.bias.data.numpy()).any())

    # print( BayesianNetwork_prova.Linear_layer[1].mu.weight.data.numpy()[5, :] )
    # print( BayesianNetwork_prova_prev.Linear_layer[1].mu.weight.data.numpy()[5, :] )

    assert (check_diff)
コード例 #16
0
        M = True

cond = []  #conditions
if 'given' in argv:
    for g in range(argv.index('given') + 1, len(argv)):
        cond.append(argv[g][0])


def buildTruth(truthTable, truth):  #truth tables
    if truth.count(None) == 0:
        truthTable.append(truth)
        return truthTable
    else:
        noneIdx = truth.index(None)
        t = deepcopy(truth)
        t[noneIdx] = True
        f = deepcopy(truth)
        f[noneIdx] = False
        buildTruth(truthTable, t)
        buildTruth(truthTable, f)
        return truthTable


truthTable = buildTruth([], [B, E, A, J, M])

bn = BayesianNetwork()  #instantiating the class
final_probability = 0.0
for t in truthTable:
    final_probability += bn.calculateProb(t[0], t[1], t[2], t[3], t[4], cond)

print 'The answer of probability is : {}'.format(final_probability)
コード例 #17
0
def test_prior_withdiffcomp():

    dim = np.array([10, 30, 10])
    L = 3
    alpha_k = 0.5
    sigma_k = np.exp(-1)
    c = np.exp(7)
    pi = 0.5
    p = 1.0
    BayesianNetwork_init = False

    BayesianNetwork_prova_prev = BayesianNetwork(dim, alpha_k, sigma_k, c, pi,
                                                 p, BayesianNetwork_init)
    BayesianNetwork_prova = BayesianNetwork(
        dim,
        alpha_k,
        sigma_k,
        c,
        pi,
        p,
        BayesianNetwork_init=BayesianNetwork_prova_prev)

    x = torch.tensor(np.random.uniform(0, 5, (20, 10)), dtype=torch.float64)

    call1 = BayesianNetwork_prova(x)
    call2 = BayesianNetwork_prova_prev(x)

    mu_prev = {}
    rho_prev = {}

    with torch.no_grad():
        for i in range(0, L - 1):
            mu_i = {}
            rho_i = {}

            mu_i["weight"] = BayesianNetwork_prova_prev.Linear_layer[
                i].mu.weight.data.clone().detach()
            mu_i["bias"] = BayesianNetwork_prova_prev.Linear_layer[
                i].mu.bias.data.clone().detach()

            rho_i["weight"] = BayesianNetwork_prova_prev.Linear_layer[
                i].rho.weight.data.clone().detach()
            rho_i["bias"] = BayesianNetwork_prova_prev.Linear_layer[
                i].rho.bias.data.clone().detach()

            mu_prev[str(i)] = mu_i
            rho_prev[str(i)] = rho_i

    pi = 0.5
    alpha_k = 0.5
    sigma_k = np.exp(-1)
    c = np.exp(7)
    model = BayesianNetwork_prova
    p = 1.0

    # print( pi, alpha_k, sigma_k, c, p )
    # print( BayesianNetwork_prova.pi, BayesianNetwork_prova.alpha_k, BayesianNetwork_prova.sigma_k, BayesianNetwork_prova.c, BayesianNetwork_prova.p )

    loss_prior_metold = first_likelihood(pi, mu_prev, alpha_k, sigma_k, c,
                                         model, mu_prev, rho_prev, p, L)

    mu_prev2, rho_prev2, w_prev2 = BayesianNetwork_prova_prev.stack()

    loss_prior_metnew = BayesianNetwork_prova.get_gaussiandistancefromprior(
        mu_prev2, mu_prev2, rho_prev2)

    # print(loss_prior_metnew - loss_prior_metold)

    assert (np.abs((loss_prior_metnew.data.numpy() -
                    loss_prior_metold.data.numpy()) < np.exp(-8)))
コード例 #18
0
cond = []
if 'given' in argv:
    for g in range(argv.index('given') + 1, len(argv)):
        cond.append(argv[g][0])


# Build a truth table via dfs
def buildTruth(truthTable, truth):
    if truth.count(None) == 0:
        truthTable.append(truth)
        return truthTable
    else:
        noneIdx = truth.index(None)
        t = deepcopy(truth)
        t[noneIdx] = True
        f = deepcopy(truth)
        f[noneIdx] = False
        buildTruth(truthTable, t)
        buildTruth(truthTable, f)
        return truthTable


truthTable = buildTruth([], [B, E, A, J, M])

# Instantiate class
bn = BayesianNetwork()
final_probability = 0.0
for t in truthTable:
    final_probability += bn.calculateProb(t[0], t[1], t[2], t[3], t[4], cond)

print 'The answer of probability is : {}'.format(final_probability)
コード例 #19
0
ファイル: test.py プロジェクト: damingnju/ML_Algorithms
    }

settings2 = {
    'IsSummer': [],
    'HasFlu': ['IsSummer'],
    'HasFoodPoisoning': [],
    'HasHayFever': [],
    'HasPneumonia': ['IsSummer'],
    'HasRespiratoryProblems': ['HasFlu', 'HasHayFever', 'HasPneumonia', 'HasFoodPoisoning'],
    'HasGastricProblems': ['HasFlu', 'HasFoodPoisoning'],
    'HasRash': ['HasFoodPoisoning', 'HasHayFever'],
    'Coughs': ['HasFlu', 'HasPneumonia', 'HasRespiratoryProblems'],
    'IsFatigues': ['HasFlu', 'HasHayFever', 'HasPneumonia'],
    'Vomits': ['HasFoodPoisoning', 'HasGastricProblems'],
    'HasFever': ['HasFlu', 'HasPneumonia']
    }

feature_list = ['HasFever', 'Vomits', 'IsFatigues', 'Coughs', 'HasRash',
                'HasGastricProblems', 'HasRespiratoryProblems', 'HasPneumonia',
                'HasHayFever', 'HasFoodPoisoning', 'HasFlu', 'IsSummer']

model = BayesianNetwork(settings2, feature_list)
model.fit(data)
pred_table = model.predict(joint_probs)
score = compute_accuracy(pred_table, joint_probs)

query1 = query_from_table('HasFlu', feature_list, joint_probs, ('HasFever', True), ('Coughs', True))
pred_query1 = query_from_table('HasFlu', feature_list, pred_table, ('HasFever', True), ('Coughs', True))
query2 = query_from_table('Vomits', feature_list, joint_probs, ('IsSummer', True))
pred_query2 = query_from_table('Vomits', feature_list, pred_table, ('IsSummer', True))