示例#1
0
    def learning(self, sample):
        if self.method == "cpc":
            learner = otagr.ContinuousPC(sample, self.parameters['binNumber'],
                                         self.parameters['alpha'])

            start = time.time()
            ndag = learner.learnDAG()
            end = time.time()

            # TTest = otagr.ContinuousTTest(sample, self.parameters['alpha'])
            # jointDistributions = []
            # for i in range(ndag.getSize()):
            # d = 1+ndag.getParents(i).getSize()
            # if d == 1:
            # bernsteinCopula = ot.Uniform(0.0, 1.0)
            # else:
            # K = TTest.GetK(len(sample), d)
            # indices = [int(n) for n in ndag.getParents(i)]
            # indices = [i] + indices
            # bernsteinCopula = ot.EmpiricalBernsteinCopula(sample.getMarginal(indices), K, False)
            # jointDistributions.append(bernsteinCopula)

        elif self.method == "cbic":
            #print(sample.getDescription())
            max_parents = self.parameters['max_parents']
            n_restart_hc = self.parameters['hc_restart']
            cmode = self.parameters['cmode']
            learner = otagr.TabuList(sample, max_parents, n_restart_hc, 5)
            learner.setCMode(cmode)
            start = time.time()
            ndag = learner.learnDAG()
            end = time.time()
            #bn = dag_to_bn(dag, Tstruct.names())

        elif self.method == "cmiic":
            cmode = self.parameters['cmode']
            kmode = self.parameters['kmode']
            learner = otagr.ContinuousMIIC(sample)
            learner.setCMode(cmode)
            learner.setKMode(kmode)
            learner.setAlpha(self.kalpha)
            # learner.setBeta(self.kbeta)
            start = time.time()
            ndag = learner.learnDAG()
            end = time.time()
            # bn = gu.named_dag_to_bn(ndag)

        elif self.method == "dmiic":
            # learner.setBeta(self.kbeta)
            ndag, start, end = dsc.learnDAG(sample)
            # bn = gu.named_dag_to_bn(ndag)

        elif self.method == "lgbn":
            start = time.time()
            end = time.time()

        else:
            print("Wrong entry for method argument !")

        return ndag, end - start
def testSpecificInstance():
    size = 1000
    data = generateDataForSpecificInstance(size)
    learner = otagrum.ContinuousMIIC(data)

    # skel = learner.learnSkeleton()
    # print(skel.toDot())

    dag = learner.learnDAG()
    print(dag.toDot())
    sys.stdout.flush()
def testAsiaDirichlet():
    data = ot.Sample.ImportFromTextFile(
        os.path.join(os.path.dirname(__file__), "asia_dirichlet_5000.csv"),
        ",")
    learner = otagrum.ContinuousMIIC(data)
    learner.setVerbosity(True)
    pdag = learner.learnPDAG()
    # print(pdag)
    print(pdag.toDot())
    sys.stdout.flush()
    dag = learner.learnDAG()
    print(dag.toDot())
    sys.stdout.flush()
def MIIC_learning(data, alpha):
    # Try an estimation of the coefficients distribution using
    # univariate kernel smoothing for the marginals and MIIC to learn the structure
    # of dependence parameterized by Bernstein copula
    dimension = data.getDimension()
    print("Build MIIC coefficients distribution")
    t0 = time()
    print("    Learning structure")
    t1 = time()
    learner = otagrum.ContinuousMIIC(data)
    learner.setAlpha(alpha)
    dag = learner.learnDAG()
    print("Nodes: ", dag.getDAG().sizeArcs())
    with open("dags/new_MIIC_dag_{}.dot".format(alpha), "w") as f:
        f.write(dag.toDot())
    print("    t=", time() - t1, "s")

    cbn = CBN_parameter_learning(data, dag)
    # plot_marginals("marginals_MIIC", marginals)
    print("t=", time() - t0, "s")
    # distribution = ot.ComposedDistribution(marginals, cbn)
    return cbn
示例#5
0
mpl.rc('font', family='serif')

sizes = np.linspace(1000, 20000, 20, dtype=int)

data_vs = ot.Sample.ImportFromTextFile(
    "../data/samples/dirichlet/vStruct/sample01.csv", ',')[:20000]
# data_vs = (data_vs.rank()+1)/(data_vs.getSize()+2)

list_01 = []
list_02 = []
list_12 = []
list_01_2 = []
for size in sizes:
    data = data_vs[0:size]
    print('Size : ', size)
    learner_vs = otagrum.ContinuousMIIC(data)
    pdag_vs = learner_vs.learnPDAG()
    dag_vs = learner_vs.learnDAG()
    cache = learner_vs.getIcache()

    I_01 = cache[(frozenset({0, 1}), frozenset({}))]
    I_02 = cache[(frozenset({0, 2}), frozenset({}))]
    I_12 = cache[(frozenset({1, 2}), frozenset({}))]
    I_01_2 = learner_vs._ContinuousMIIC__compute2PtInformation(0, 1, [2])

    list_01.append(I_01)
    list_02.append(I_02)
    list_12.append(I_12)
    list_01_2.append(I_01_2)

fig, ax = plt.subplots()
y_major_ticks = np.arange(0, 25, 5)
y_minor_ticks = np.arange(0, 25, 1)

ax.set_xticks(x_major_ticks)
ax.set_xticks(x_minor_ticks, minor=True)
ax.set_yticks(y_major_ticks)
ax.set_yticks(y_minor_ticks, minor=True)

ax.set_xlim(0, 0.5)
ax.set_ylim(0, 25)

n_arcs = []
for alpha in alphas:
    print("Processing alpha={}".format(alpha))
    learner = otagr.ContinuousMIIC(data_ref) # Using CMIIC algorithm
    learner.setAlpha(alpha)
    cmiic_dag = learner.learnDAG() # Learning DAG
    n_arcs.append(cmiic_dag.getDAG().sizeArcs())
    write_graph(cmiic_dag,
                structure_path.joinpath("cmiic_dag_"+str(alpha).replace('.','') + '_' + dataset_name + '.dot'))

plt.plot(alphas, n_arcs)
# ax.legend()
plt.savefig(figure_path.joinpath("alpha_curve_" + dataset_name + ".pdf"), transparent=True)
print("Saving figure in {}".format(figure_path.joinpath("alpha_curve_" + dataset_name + ".pdf")))

'''
#################LEARNING PARAMETERS########################

#REF DATA
示例#7
0
for i in range(len(b)):
    for j in range(i + 1):
        sigma_ordered[order[i], order[j]] = sigma[i, j]
print("Sigma matrix: ", sigma_ordered)

# Marginal mean, mu vector:
mu = [0.0] * 6

distribution = ot.Normal(mu, sigma_ordered)
distribution.setDescription(description)
size = 100000
sample = distribution.getSample(size)
sample.exportToCSVFile("../data/sample.csv")

# print("ContinuousPC")
# alpha = 0.1
# binNumber = 4
# learner = otagrum.ContinuousPC(sample, binNumber, alpha)
# learner.setVerbosity(True)
# pdag = learner.learnPDAG()
# print(learner.PDAGtoDot(pdag))
# dag = learner.learnDAG()
# print(dag.toDot())

print("ContinuousMIIC")
learner = otagrum.ContinuousMIIC(sample)
learner.setCMode(otagrum.CorrectedMutualInformation.CModeTypes_Gaussian)
learner.setVerbosity(True)
dag = learner.learnDAG()
print(dag.toDot())
示例#8
0
                         sys.argv[1] == "beta")
suffix = sys.argv[1]
print("suffix=", suffix)

likelihood_curves = []
bic_curves = []
splits = kf.split(data_ref)
# for (i, (train, test)) in enumerate(splits):
for (i, (train, test)) in enumerate(list(splits)):
    print("Learning with fold number {}".format(i))
    likelihood_curve = []
    bic_curve = []
    for alpha in alphas:
        ot.Log.Show(ot.Log.NONE)
        print("\tLearning with alpha={}".format(alpha))
        learner = otagr.ContinuousMIIC(
            data_ref.select(train))  # Using CMIIC algorithm
        learner.setAlpha(alpha)
        cmiic_dag = learner.learnDAG()  # Learning DAG
        ot.Log.Show(ot.Log.NONE)
        if True:
            cmiic_cbn = otagr.ContinuousBayesianNetworkFactory(
                ot.KernelSmoothing(ot.Normal()), ot.BernsteinCopulaFactory(),
                cmiic_dag, 0.05, 4, False).build(data_ref.select(train))
        else:
            cmiic_cbn = otagr.ContinuousBayesianNetworkFactory(
                ot.NormalFactory(), ot.NormalCopulaFactory(), cmiic_dag, 0.05,
                4, False).build(data_ref.select(train))
        # sampled = cmiic_cbn.getSample(1000)
        # sampled = (sampled.rank() +1)/(sampled.getSize()+2)
        # pairs(sampled, figure_path.joinpath('pairs_test.pdf')
        ll = 0
示例#9
0
import pyAgrum as gum
import openturns as ot
import otagrum as otagr

print('Importing data')
data = ot.Sample.ImportFromTextFile(
    '../data/Standard_coefficients_0100000.csv', ';')
data = data[0:20000]
data = data.getMarginal(range(0, 12))

print('Initializing the learners')
learners = {
    'cbic': otagr.TabuList(data, 3, 1, 2),
    'cpc': otagr.ContinuousPC(data, 4, 0.01),
    'cmiic': otagr.ContinuousMIIC(data)
}

dags = {}
for (name, learner) in learners.items():
    print('Learning with ', name)
    dags[name] = learner.learnDAG()

for (name, dag) in dags.items():
    dot = dag.toDot()
    with open("dag_{}.dot".format(name), "w") as f:
        f.write(dot)