예제 #1
0
def run_validation_spbn(train_data, folds, patience, result_folder, idx_fold):
    hc = GreedyHillClimbing()
    pool = OperatorPool([ArcOperatorSet(), ChangeNodeTypeSet()])

    for k in folds:
        vl = ValidatedLikelihood(train_data, k=k, seed=0)

        for p in patience:
            fold_folder = result_folder + '/HillClimbing/SPBN_CKDE/Validation_' + str(
                k) + '_' + str(p) + '/' + str(idx_fold)
            pathlib.Path(fold_folder).mkdir(parents=True, exist_ok=True)

            if os.path.exists(fold_folder + '/end.lock'):
                continue

            cb_save = SaveModel(fold_folder)
            node_types = [(name, NodeType.CKDE)
                          for name in train_data.columns.values]
            start_model = SemiparametricBN(list(train_data.columns.values),
                                           node_types)
            bn = hc.estimate(pool,
                             vl,
                             start_model,
                             callback=cb_save,
                             patience=p,
                             verbose=True)
            iters = sorted(glob.glob(fold_folder + '/*.pickle'))
            last_file = os.path.basename(iters[-1])
            number = int(os.path.splitext(last_file)[0])
            bn.save(fold_folder + '/' + str(number + 1).zfill(6) + ".pickle")
            with open(fold_folder + '/end.lock', 'w') as f:
                pass
예제 #2
0
def train_gbn(dataset, instances):
    df = pd.read_csv(dataset + "_" + str(instances) + '.csv')

    hc = GreedyHillClimbing()
    arc_set = ArcOperatorSet()
    result_folder = 'models/' + dataset + '/' + str(
        instances) + '/HillClimbing/GBN_BIC/'
    pathlib.Path(result_folder).mkdir(parents=True, exist_ok=True)
    if not os.path.exists(result_folder + '/end.lock'):
        bic = BIC(df)
        cb_save = SaveModel(result_folder)
        start_model = GaussianNetwork(list(df.columns.values))
        bn = hc.estimate(arc_set, bic, start_model, callback=cb_save)

        iters = sorted(glob.glob(result_folder + '/*.pickle'))
        last_file = os.path.basename(iters[-1])
        number = int(os.path.splitext(last_file)[0])
        bn.save(result_folder + '/' + str(number + 1).zfill(6) + ".pickle")

        with open(result_folder + '/end.lock', 'w') as f:
            pass

    hc = GreedyHillClimbing()
    arc_set = ArcOperatorSet()
    result_folder = 'models/' + dataset + '/' + str(
        instances) + '/HillClimbing/GBN_BGe/'
    pathlib.Path(result_folder).mkdir(parents=True, exist_ok=True)
    if not os.path.exists(result_folder + '/end.lock'):
        bge = BGe(df)
        cb_save = SaveModel(result_folder)
        start_model = GaussianNetwork(list(df.columns.values))
        bn = hc.estimate(arc_set, bge, start_model, callback=cb_save)

        iters = sorted(glob.glob(result_folder + '/*.pickle'))
        last_file = os.path.basename(iters[-1])
        number = int(os.path.splitext(last_file)[0])
        bn.save(result_folder + '/' + str(number + 1).zfill(6) + ".pickle")

        with open(result_folder + '/end.lock', 'w') as f:
            pass
예제 #3
0
def run_bge_gaussian(train_data, result_folder, idx_fold):
    fold_folder = result_folder + '/HillClimbing/Gaussian/BGe/' + str(idx_fold)
    pathlib.Path(fold_folder).mkdir(parents=True, exist_ok=True)

    if os.path.exists(fold_folder + '/end.lock'):
        return

    hc = GreedyHillClimbing()
    arc_set = ArcOperatorSet()

    bge = BGe(train_data)

    cb_save = SaveModel(fold_folder)
    start_model = GaussianNetwork(list(train_data.columns.values))
    
    bn = hc.estimate(arc_set, bge, start_model, callback=cb_save, verbose=True)
    iters = sorted(glob.glob(fold_folder + '/*.pickle'))
    last_file = os.path.basename(iters[-1])
    number = int(os.path.splitext(last_file)[0])
    bn.save(fold_folder + '/' + str(number+1).zfill(6) + ".pickle")
    with open(fold_folder + '/end.lock', 'w') as f:
        pass
예제 #4
0
def run_validation_kdebn(train_data, folds, patience, result_folder, idx_fold):
    hc = GreedyHillClimbing()
    arc_set = ArcOperatorSet()

    for k in folds:
        vl = ValidatedLikelihood(train_data, k=k, seed=0)

        for p in patience:
            fold_folder = result_folder + '/HillClimbing/KDEBN/Validation_' + str(k) + '_' + str(p) + '/' + str(idx_fold)
            pathlib.Path(fold_folder).mkdir(parents=True, exist_ok=True)

            if os.path.exists(fold_folder + '/end.lock'):
                continue

            cb_save = SaveModel(fold_folder)
            start_model = KDENetwork(list(train_data.columns.values))
            bn = hc.estimate(arc_set, vl, start_model, callback=cb_save, patience=p, verbose=True)
            iters = sorted(glob.glob(fold_folder + '/*.pickle'))
            last_file = os.path.basename(iters[-1])
            number = int(os.path.splitext(last_file)[0])
            bn.save(fold_folder + '/' + str(number+1).zfill(6) + ".pickle")
            with open(fold_folder + '/end.lock', 'w') as f:
                pass
예제 #5
0
small_results = pd.DataFrame()
medium_results = pd.DataFrame()
large_results = pd.DataFrame()

for n in experiments_helper.INSTANCES:
    df = pd.read_csv('data/small_' + str(n) + ".csv")

    executions = np.empty((800, ))
    for i in range(800):
        if i % 10 == 0:
            print(str(i) + " executions")
        vl = ValidatedLikelihood(df, k=10, seed=i)
        start_model = SemiparametricBN(list(df.columns.values))
        hc = GreedyHillClimbing()
        pool = OperatorPool([ArcOperatorSet(), ChangeNodeTypeSet()])

        start = time.time()
        bn = hc.estimate(pool, vl, start_model, patience=0)
        end = time.time()

        executions[i] = end - start

    small_results['SPBN_' + str(n)] = pd.Series(executions,
                                                name="SPBN_" + str(n))
    print("Small " + str(n) + " -- Time: " + str(executions.mean()) +
          ", std: " + str(np.std(executions, ddof=1)))

    df = pd.read_csv('data/medium_' + str(n) + ".csv")

    executions = np.empty((200, ))
예제 #6
0
small_results = pd.DataFrame()
medium_results = pd.DataFrame()
large_results = pd.DataFrame()

for n in experiments_helper.INSTANCES:
    df = pd.read_csv('data/small_' + str(n) + ".csv")

    executions = np.empty((20000,))
    for i in range(20000):
        if i % 10 == 0:
            print(str(i) + " executions")
        bic = BIC(df)
        start_model = GaussianNetwork(list(df.columns.values))
        hc = GreedyHillClimbing()
        arcs = ArcOperatorSet()

        start = time.time()
        bn = hc.estimate(arcs, bic, start_model)
        end = time.time()

        executions[i] = end - start

    small_results['GBN_BIC_' + str(n)] = pd.Series(executions, name="GBN_BIC_" + str(n))
    print("Small " + str(n) + " -- Time: " + str(executions.mean()) + ", std: " + str(np.std(executions, ddof=1)))

    df = pd.read_csv('data/medium_' + str(n) + ".csv")

    executions = np.empty((20000,))
    for i in range(20000):
        print(str(i) + " executions")