예제 #1
0
파일: bn.py 프로젝트: MarioAlL/delp3Exp
 def make_CPTs(self, nodes, alpha):
     for node in nodes:
         parents = list(self.bn.parents(node))
         if len(parents) != 0:
             parValues = list(itertools.product([1, 0],
                                                repeat=len(parents)))
             for parVal in parValues:
                 prnode = "{:.2f}".format(random.uniform(alpha, 1))
                 complementnode = "{:.2f}".format(1.00 - float(prnode))
                 change_prob = np.random.random()
                 if change_prob > 0.50:
                     newCPT = [float(complementnode), float(prnode)]
                 else:
                     newCPT = [float(prnode), float(complementnode)]
                 self.bn.cpt(node)[{
                     str(parents[index]): value
                     for index, value in enumerate(parVal)
                 }] = newCPT
         else:
             prnode = "{:.2f}".format(random.uniform(alpha, 1))
             complementnode = "{:.2f}".format(1.00 - float(prnode))
             change_prob = np.random.random()
             if change_prob > 0.50:
                 newCPT = [float(complementnode), float(prnode)]
             else:
                 newCPT = [float(prnode), float(complementnode)]
             self.bn.cpt(node).fillWith(newCPT)
     othersnodes = list(self.bn.nodes())
     for othernode in othersnodes:
         if not othernode in nodes:
             self.bn.generateCPT(othernode)
     gum.saveBN(self.bn, self.path + self.name + '.bifxml')
예제 #2
0
def generate_BN_explanations(instance, label_lst, feature_names, class_var,
                             encoder, scaler, model, path, dataset_name):

    # necessary for starting Numpy generated random numbers in an initial state
    np.random.seed(515)

    # Necessary for starting core Python generated random numbers in a state
    rn.seed(515)

    indx = instance['index']
    prediction_type = instance['prediction_type'].lower() + "s"
    prediction_type = prediction_type.replace(" ", "_")

    # generate permutations
    df = generate_permutations(instance, label_lst, feature_names, class_var,
                               encoder, scaler, model)

    # discretize data
    df_discr = discretize_dataframe(df, class_var, num_bins=4)

    # save discretised dataframe (for debugging and reproduceability purposes)
    path_to_permutations = path + "feature_permutations/" + dataset_name.replace(
        ".csv", "") + "/" + prediction_type + "/" + str(indx) + ".csv"
    df_discr.to_csv(path_to_permutations, index=False)

    # normalise dataframe
    normalise_dataframe(path_to_permutations)

    # learn BN
    bn, infoBN, essencGraph = learnBN(
        path_to_permutations.replace(".csv", "_norm.csv"))

    # perform inference
    inference = gnb.getInference(bn,
                                 evs={},
                                 targets=df_discr.columns.to_list(),
                                 size='12')

    # show networks
    gnb.sideBySide(
        *[bn, inference, infoBN],
        captions=["Bayesian Network", "Inference", "Information Network"])

    # save to file
    path_to_explanation = path + "explanations/" + dataset_name.replace(
        ".csv", "") + "/BN/" + prediction_type + "/"
    gum.lib.bn2graph.dotize(bn, path_to_explanation + str(indx) + "_BN")
    gum.saveBN(bn, path_to_explanation + str(indx) + "_BN.net")

    return [bn, inference, infoBN]
예제 #3
0
def transform():
    bn1 = gum.loadBN("data/test_level_0.o3prm", system="aSys")
    print("transform : prm loaded")

    gum.saveBN(bn1, "data/test_level_0.bif")
    print("transform : bn written")

    bn = gum.loadBN("data/test_level_0.bif")
    print("transform : bn loaded")

    for i in bn.ids():
        bn.cpt(i).translate(1e-2).normalizeAsCPT()
    print("transform : bn normalized")

    gum.saveBN(bn, "data/test_level_0_1.bif")
    print("transform : bn written")
예제 #4
0
파일: bn.py 프로젝트: MarioAlL/delp3Exp
 def build_save_BN(self, dGraphNodes, dGraphEdges, randomCPTs):
     dGraphEdges = [(str(A), str(B)) for (A, B) in dGraphEdges]
     bn = gum.BayesNet(self.name)
     [
         bn.add(gum.LabelizedVariable(str(var), str(var), 2))
         for var in dGraphNodes
     ]
     for edge in dGraphEdges:
         bn.addArc(edge[0], edge[1])
     if randomCPTs:
         # For generate all CPTs
         bn.generateCPTs()
     # To graph and save BN
     gumGraph.dotize(bn, self.path + self.name, 'pdf')
     gum.saveBN(bn, self.path + self.name + '.bifxml')
     self.generator = gum.BNDatabaseGenerator(bn)
     self.ie = gum.LazyPropagation(bn)
     self.bn = bn
     self.structure = [dGraphNodes, dGraphEdges]
예제 #5
0
    def testReadAfterWrite(self):
        bn = gum.BayesNet()
        bn.add(gum.RangeVariable("1", "", 0, 1))
        bn.add(
            gum.DiscretizedVariable("2",
                                    "").addTick(0.0).addTick(0.5).addTick(1.0))
        bn.add(gum.LabelizedVariable("3", "", 2))
        bn.add(gum.LabelizedVariable("4", "", 2))
        bn.add(gum.LabelizedVariable("5", "", 3))

        bn.addArc("1", "3")
        bn.addArc("1", "4")
        bn.addArc("3", "5")
        bn.addArc("4", "5")
        bn.addArc("2", "4")
        bn.addArc("2", "5")

        bn.cpt("1").fillWith([0.2, 0.8])
        bn.cpt("2").fillWith([0.3, 0.7])
        bn.cpt("3").fillWith([0.1, 0.9, 0.9, 0.1])
        bn.cpt("4").fillWith([0.4, 0.6, 0.5, 0.5, 0.5, 0.5, 1.0, 0.0])
        bn.cpt("5").fillWith([
            0.3, 0.6, 0.1, 0.5, 0.5, 0.0, 0.5, 0.5, 0.0, 1.0, 0.0, 0.0, 0.4,
            0.6, 0.0, 0.5, 0.5, 0.0, 0.5, 0.5, 0.0, 0.0, 0.0, 1.0
        ])

        gum.saveBN(bn, self.agrumSrcDir("o3prm/BNO3PRMIO_file.o3prm"))

        bn2 = gum.loadBN(self.agrumSrcDir("o3prm/BNO3PRMIO_file.o3prm"),
                         system="bayesnet")

        self.assertEqual(bn.dim(), bn2.dim())
        self.assertEqual(bn.log10DomainSize(), bn2.log10DomainSize())
        for n in bn.names():
            self.assertEqual(bn.variable(n).name(), bn2.variable(n).name())
            self.assertEqual(
                bn.variable(n).varType(),
                bn2.variable(n).varType())
            self.assertEqual(
                bn.variable(n).domainSize(),
                bn2.variable(n).domainSize())
def learn_bn(learn_algo):
    ## create and learn BN, and setup inference
    dataset_path = 'discr_wps.csv'
    bn = inf.learn_bn(learn_algo, dataset_path)
    bn_url = "BN.bif"
    agrum.saveBN(bn, bn_url)
    print("BN saved in " + bn_url)

    ## BN validation : identify unknown effects
    ## effect = [goal_value, vector_orient_value]
    #    unk_aff_vector, nb_aff_dataset, nb_aff_total = check.find_unk_affordances(bn_url)
    #
    #    if nb_aff_total != 0:
    #        print('\nRESULT:')
    ##        print(nb_aff_total, 'possible affordances in the experiment')
    ##        print(nb_aff_dataset, 'possible affordances in the dataset')
    #        print(len(unk_aff_vector),'affordances not learned yet')
    #    else :
    #        print('\nNOT VALID DATASET. Only',nb_aff_dataset,'lines read')

    return bn
def bayesNet(evs):

    # Creating BayesNet with 4 variables
    bayesNets = gum.BayesNet('Quality Prediction')

    # Adding nodes the long way
    commitNumber = bayesNets.add(
        gum.LabelizedVariable(
            'Commit Number', 'cloudy ?',
            0).addLabel("Low").addLabel("Medium").addLabel("High"))
    numberOfDevloper = bayesNets.add(
        gum.LabelizedVariable(
            "Number Of Developer", "Devs",
            0).addLabel("Low").addLabel("Medium").addLabel("High"))
    buildFailures = bayesNets.add(
        gum.LabelizedVariable(
            'Build Failures', 'cloudy ?',
            0).addLabel("Low").addLabel("Medium").addLabel("High"))
    numberOfFixedBug = bayesNets.add(
        gum.LabelizedVariable('Number Of Fixed Bug', 'cloudy ?', 2))
    nbFunctionalEvolution = bayesNets.add(
        gum.LabelizedVariable(
            "Number Of Functional Evolution", "Evol",
            0).addLabel("Low").addLabel("Medium").addLabel("High"))
    categoryOfIncident = bayesNets.add(
        gum.LabelizedVariable("Incident Category", "Devs", 0).addLabel(
            "Tertiaire").addLabel("Secondaire").addLabel("Prmaire"))

    # creation of the links between  nodes
    for link in [(commitNumber, buildFailures),
                 (numberOfDevloper, buildFailures),
                 (nbFunctionalEvolution, numberOfFixedBug),
                 (buildFailures, numberOfFixedBug),
                 (buildFailures, categoryOfIncident),
                 (numberOfFixedBug, categoryOfIncident)]:
        bayesNets.addArc(*link)
        print(bayesNets)

    bayesNets.cpt(commitNumber)[:] = [0.5, 0.3, 0.2]
    bayesNets.cpt(numberOfDevloper)[:] = [0.5, 0.4, 0.5]
    bayesNets.cpt(nbFunctionalEvolution)[:] = [0.5, 0.4, 0.5]
    print(bayesNets.cpt(numberOfFixedBug).var_names)
    bayesNets.cpt(buildFailures).var_names
    bayesNets.cpt(numberOfDevloper)
    #bayesNets.cpt(numberOfFixedBug)[{'buildFailures': 1, 'Number Of Functional Evolution': "Medium"}]=[0.5,0.4]
    #bayesNets.cpt(numberOfFixedBug)[{'buildFailures': 1, 'Number Of Functional Evolution': "High"}]=[0.54,0.46]
    #bayesNets.cpt(numberOfFixedBug)[{'buildFailures': 1, 'Number Of Functional Evolution': "Low"}]=[0.54,0.46]
    bayesNets.cpt(numberOfFixedBug)[{
        'Build Failures': 0,
        'Number Of Functional Evolution': "Medium"
    }] = [0.5, 0.4]
    bayesNets.cpt(numberOfFixedBug)[{
        'Build Failures': 0,
        'Number Of Functional Evolution': "High"
    }] = [0.54, 0.46]
    bayesNets.cpt(numberOfFixedBug)[{
        'Build Failures': 0,
        'Number Of Functional Evolution': "Low"
    }] = [0.53, 0.47]
    bayesNets.cpt(numberOfFixedBug)

    bayesNets.cpt(buildFailures)[{
        'Commit Number': "Low",
        'Number Of Developer': "Low"
    }] = 0.2
    bayesNets.cpt(buildFailures)[{
        'Commit Number': "Low",
        'Number Of Developer': "High"
    }] = 0.4
    bayesNets.cpt(buildFailures)[{
        'Commit Number': "High",
        'Number Of Developer': "Low"
    }] = 0.2
    bayesNets.cpt(buildFailures)[{
        'Commit Number': "High",
        'Number Of Developer': "High"
    }] = 0.9
    bayesNets.cpt(buildFailures)[{
        'Commit Number': "Low",
        'Number Of Developer': "Medium"
    }] = 1
    bayesNets.cpt(buildFailures)[{
        'Commit Number': "Low",
        'Number Of Developer': "Medium"
    }] = 0.9
    bayesNets.cpt(buildFailures)[{
        'Commit Number': "High",
        'Number Of Developer': "Medium"
    }] = 0.1
    bayesNets.cpt(buildFailures)[{
        'Commit Number': "Medium",
        'Number Of Developer': "Medium"
    }] = 0.6
    bayesNets.cpt(buildFailures)

    bayesNets.cpt(categoryOfIncident)[{
        'Build Failures': 0,
        'Number Of Fixed Bug': 0
    }] = [0.2, 0.3, 0.5]
    bayesNets.cpt(categoryOfIncident)[{
        'Build Failures': 0,
        'Number Of Fixed Bug': 1
    }] = [0.5, 0.3, 0.2]
    bayesNets.cpt(categoryOfIncident)
    ie = gum.LazyPropagation(bayesNets)
    gum.saveBN(bayesNets, "QualtiyPrediction.bifxml")
    bn = gum.loadBN("QualtiyPrediction.bifxml")

    bn

    output_parameters_labels = ['Incident Cateogry']
    ie = gum.LazyPropagation(bayesNet)
    ie.setEvidence(evs)
    ie.makeInference()
    resultCSV = []
    resultCSV.append('Parameter, Low, Medium, High')
    for output_parameter_label in output_parameters_labels:
        results = ie.posterior(output_parameter_label).tolist()
        resultCSV.append(output_parameter_label + ', ' +
                         str(round(results[0], 3)) + ', ' +
                         str(round(results[1], 3)) + ', ' +
                         str(round(results[2])))

    #gnb.showInference(bn,evs={})
    resultBytes = BNinference2dot(bn, evs=evs).create(format='png')
    resultBytesStr = base64.b64encode(resultBytes)

    return resultBytesStr, resultCSV
예제 #8
0
learner = gum.BNLearner("WholeLog.csv")

#learner.learnParameters(bn)

print(learner.names())
learner.useScoreAIC()
learner.setSliceOrder([[0, 1, 2, 3, 4, 5, 6, 7, 8], [9, 10, 11, 12]])

learner.useAprioriSmoothing(10e-2)
learner.useGreedyHillClimbing()
#learner.useLocalSearchWithTabuList()
#learner.useK2([0,1,2,3,4,5,6,7,8, 9, 10, 11, 12])

bn = learner.learnBN()
gum.saveBN(bn, "Many.bif")

generator = CSharpGenerator()
#generator = phpGenerator.PhpGenerator()
filename = "Many.cs"

import pyAgrum as gum
import metaGenBayes.compiler as Compiler

targets = ['move?', 'turn?', 'shell?', "shield?"]
un_sur_neuf = [1.0 / 9.0] * 9
evs = {
    'distance?': [0.25, 0.25, 0.25, 0.25],
    "direction?": [
        1.0 / 9.0, 1.0 / 9.0, 1.0 / 9.0, 1.0 / 9.0, 1.0 / 9.0, 1.0 / 9.0,
        1.0 / 9.0, 1.0 / 9.0, 1.0 / 9.0
예제 #9
0
learner=gum.BNLearner("WholeLog.csv")

#learner.learnParameters(bn)

print(learner.names())
learner.useScoreAIC()
learner.setSliceOrder([[0,1,2,3,4,5,6,7,8], [9, 10, 11, 12]])

learner.useAprioriSmoothing(10e-2)
learner.useGreedyHillClimbing()
#learner.useLocalSearchWithTabuList()
#learner.useK2([0,1,2,3,4,5,6,7,8, 9, 10, 11, 12])

bn = learner.learnBN()
gum.saveBN(bn, "Many.bif")

generator = CSharpGenerator()
#generator = phpGenerator.PhpGenerator()
filename="Many.cs"

import pyAgrum as gum
import metaGenBayes.compiler as Compiler


targets = ['move?', 'turn?', 'shell?', "shield?"]
un_sur_neuf = [1.0/9.0]*9
evs = {'distance?':[0.25, 0.25, 0.25, 0.25],
          "direction?":[1.0/9.0, 1.0/9.0, 1.0/9.0, 1.0/9.0, 1.0/9.0, 1.0/9.0, 1.0/9.0, 1.0/9.0, 1.0/9.0],
          'dist_coll_1?':[1.0/3.0, 1.0/3.0, 1.0/3.0],
          'dir_coll_1?':[1.0/9.0, 1.0/9.0, 1.0/9.0, 1.0/9.0, 1.0/9.0, 1.0/9.0, 1.0/9.0, 1.0/9.0, 1.0/9.0],
예제 #10
0
import pyAgrum as gum
from gumLib.pyAgrum_header import pyAgrum_header

pyAgrum_header(2011)

bn = gum.BayesNet('exo1')

# sexe = 0(H)/1(F)
# daltonisme = 0(D) / 1 (nonD)

sexe, daltonisme = [
    bn.add(gum.LabelizedVar(nom, '', 2)) for nom in 'sexe daltonisme'.split()
]
bn.insertArc(sexe, daltonisme)

bn.cpt(sexe)[:] = [0.5, 0.5]

bn.cpt(daltonisme)[0, :] = [0.08, 0.92]
bn.cpt(daltonisme)[1, :] = [0.005, 0.995]

bn.saveBIF("exo1.bif")
for line in open("exo1.bif"):
    print line,

print("for gum.loadBN or gum.saveBN, possible files ext are =" +
      gum.availableBNExts())
gum.saveBN(bn, "exo1.dsl")
for line in open("exo1.dsl"):
    print line,
예제 #11
0
def save_bn(bn, bn_url):
    agrum.saveBN(bn, bn_url)
    print("BN saved in " + bn_url)
예제 #12
0
파일: genereBN.py 프로젝트: agrumery/aGrUM
while (len(bn.parents(CLASSE)) < NBRPARENTSMIN):
    nvparent = int(gum.randomProba() * bn.size())
    try:
        bn.addArc(nvparent, CLASSE)
    except gum.Exception:
        print(f"  Failed in {bn.parents(0)} with {nvparent}")

print("(*) re-generating CPTs")
bn.generateCPTs()

fileid = "{0}-{1}-{2}-{3}".format(
    datetime.datetime.now().strftime("%Y%m%d-%H%M%S"), NBRNOEUDS, NBRARCS,
    MODMAX)

print("(*) saving bif file {0}".format(fileid))
gum.saveBN(bn, "BN{0}.bif".format(fileid))

print("(*) saving learning csv file {0}".format(fileid))
print
LLlearn = bn2csv.generateCSV(bn, "BN{0}-{1}.csv".format(fileid, NBCASELEARN),
                             NBCASELEARN, True)

print("(*) saving test csv file {0}".format(fileid))
print
LLtest = bn2csv.generateCSV(bn, "BN{0}-{1}.csv".format(fileid, NBCASETEST),
                            NBCASETEST, True)

print("classe : {0}".format(CLASSE))
print("Modalités : {0}".format(bn.variable(CLASSE)))
print("parents :{0}".format(str(bn.parents(CLASSE))))
print("log-likelihood (learning): {0}".format(LLlearn))
예제 #13
0
def save_bn(bn, bn_url):
    agrum.saveBN(bn, bn_url)
    if sim_param.debug:
        print("BN saved in " + bn_url)
bn.cpt(s).var_names

bn.cpt(w).var_names     # [r, s, w]
bn.cpt(w)[0,0,:] = [1, 0]     # when r=0, s=0
bn.cpt(w)[0,1,:] = [0.1, 0.9]      # r=0, s=1
bn.cpt(w)[1,0,:] = [0.1, 0.9]      # r=1, s=0
bn.cpt(w)[1,1,:] = [0.01, 0.99]    # r=1, s=1

# use dictionaries to introduce data!!  -- it facilitates and avoids common errors

bn.cpt(r)[{'c': 0}] = [0.8, 0.2]
bn.cpt(r)[{'c': 1}] = [0.2, 0.8]
# use the name (string) of the variable 'c' , do not use the variable itself c (it won't work)

#   NOW YOUR BN IS COMPLETE!!   #   

# ------------------------------------------

# formats that we can save our bayesian network
# print(gum.availableBNExts())
# out: bif|dsl|net|bifxml|o3prm|uai

# Saving the BN in an archive 

# gum.saveBN(bn, os.path.join("out","WaterSprinkler.bif"))
gum.saveBN(bn, "WaterSprinkler.bif")

# Loading a BN from an archive
bn2 = gum.loadBN("WaterSprinkler.bif")

예제 #15
0
print("(*) forcing node {0} to have at least {1} parents".format(CLASSE,NBRPARENTSMIN))
while (len(bn.parents(CLASSE))<NBRPARENTSMIN):
  nvparent=int(gum.randomProba()*bn.size())
  try:
    bn.addArc(nvparent,CLASSE)
  except gum.Exception:
    print("  Failed in {0} with {1}".format(str(bn.parents(0)),nvparent))

print("(*) re-generating CPTs")
bn.generateCPTs()

fileid="{0}-{1}-{2}-{3}".format(datetime.datetime.now().strftime("%Y%m%d-%H%M%S"),NBRNOEUDS,NBRARCS,MODMAX)

print("(*) saving bif file {0}".format(fileid))
gum.saveBN(bn,"BN{0}.bif".format(fileid))

print("(*) saving learning csv file {0}".format(fileid))
print
LLlearn=bn2csv.generateCSV(bn,"BN{0}-{1}.csv".format(fileid,NBCASELEARN),NBCASELEARN,True)

print("(*) saving test csv file {0}".format(fileid))
print
LLtest=bn2csv.generateCSV(bn,"BN{0}-{1}.csv".format(fileid,NBCASETEST),NBCASETEST,True)

print("classe : {0}".format(CLASSE))
print("Modalités : {0}".format(bn.variable(CLASSE)))
print("parents :{0}".format(str(bn.parents(CLASSE))))
print("log-likelihood (learning): {0}".format(LLlearn))
print("log-likelihood (testing): {0}".format(LLtest))
예제 #16
0
#OR PERFORMANCE OF THIS SOFTWARE!# -*- coding: utf-8 -*-

import sys

import pyAgrum as gum
from gumLib.pyAgrum_header import pyAgrum_header

pyAgrum_header(2011)

bn=gum.BayesNet('exo1')

# sexe = 0(H)/1(F)
# daltonisme = 0(D) / 1 (nonD)

sexe,daltonisme=[bn.add(gum.LabelizedVar(nom,'',2)) for nom in 'sexe daltonisme'.split()]
bn.insertArc(sexe,daltonisme)

bn.cpt(sexe)[:]=[0.5, 0.5]

bn.cpt(daltonisme)[0,:]=[0.08, 0.92]
bn.cpt(daltonisme)[1,:]=[0.005,0.995]

bn.saveBIF("exo1.bif")
for line in open("exo1.bif"):
    print line,

print("for gum.loadBN or gum.saveBN, possible files ext are ="+gum.availableBNExts())
gum.saveBN(bn,"exo1.dsl")
for line in open("exo1.dsl"):
    print line,