Exemplo n.º 1
0
def run_experiment(index, dataset_name, name, constraint_getter, master_tree, X, y, out_dir, n_iters=1000, add_constraint=200, add_score=200,
                   add_likelihood=200, should_continue=False):


    N, D = X.shape
    df = Inverse(c=1)

    if dataset_name == 'iris':
        lm = GaussianLikelihoodModel(sigma=np.eye(D) / 9.0, sigma0=np.eye(D) / 2.0, mu0=X.mean(axis=0)).compile()
    elif dataset_name == 'zoo':
        lm = GaussianLikelihoodModel(sigma=np.diag(np.diag(np.cov(X.T))) / 4.0, sigma0=np.eye(D) / 2.0, mu0=X.mean(axis=0)).compile()
    else:
        lm = GaussianLikelihoodModel(sigma=np.diag(np.diag(np.cov(X.T))) / 2.0, sigma0=np.eye(D) / 2.0, mu0=X.mean(axis=0)).compile()
    if should_continue:
        with open(out_dir / name / 'scores-%u.pkl' % index, 'r') as fp:
            scores = pickle.load(fp)
        with open(out_dir / name / 'costs-%u.pkl' % index, 'r') as fp:
            costs = pickle.load(fp)
        with open(out_dir / name / 'final-tree-%u.pkl' % index, 'r') as fp:
            tree = DirichletDiffusionTree(df=df, likelihood_model=lm)
            tree.set_state(pickle.load(fp))
        sampler = MetropolisHastingsSampler(tree, X)
    else:
        scores = []
        costs = []
        tree = DirichletDiffusionTree(df=df, likelihood_model=lm)
        sampler = MetropolisHastingsSampler(tree, X)
        sampler.initialize_assignments()
        if dataset_name == 'zoo':
            sampler.tree = sampler.tree.induced_subtree(master_tree.points())

    current_run = []
    for i in tqdm(xrange(n_iters + 1)):
        sampler.sample()
        current_run.append(sampler.tree)
        if i % add_score == 0:
            scores.append(dist(master_tree, sampler.tree))
        if i % add_likelihood == 0:
            costs.append(sampler.tree.marg_log_likelihood())
        if i != 0 and i % add_constraint == 0:
            if constraint_getter is not None:
                constraint = constraint_getter.get_constraint(current_run)
                if constraint is not None:
                    sampler.add_constraint(constraint)
            current_run = []
    # plot_tree(sampler.tree, y)

    (out_dir / name).mkdir_p()
    with open(out_dir / name / 'scores-%u.pkl' % index, 'w') as fp:
        pickle.dump(scores, fp)
    print len(costs)
    with open(out_dir / name / 'costs-%u.pkl' % index, 'w') as fp:
        pickle.dump(costs, fp)
    # with open(out_dir / name / 'trees-%u.pkl' % index, 'r') as fp:
        # previous_trees = pickle.load(fp)
    # with open(out_dir / name / 'trees-%u.pkl' % index, 'w') as fp:
        # pickle.dump(previous_trees + [t.get_state() for t in trees], fp)
    with open(out_dir / name / 'final-tree-%u.pkl' % index, 'w') as fp:
        pickle.dump(sampler.tree.get_state(), fp)
    return costs, scores, sampler
Exemplo n.º 2
0
    if tree.verify_constraint((a, b, c)):
        return (a, b, c)
    if tree.verify_constraint((a, c, b)):
        return (a, c, b)
    if tree.verify_constraint((b, c, a)):
        return (b, c, a)

if __name__ == "__main__":
    with open('scripts/zoo.tree', 'rb') as fp:
        master_tree = pickle.load(fp)
    points = master_tree.root.points()

    tree1 = DirichletDiffusionTree(df=df, likelihood_model=lm)
    sampler1 = MetropolisHastingsSampler(tree1, X)
    sampler1.initialize_assignments()
    sampler1.tree = sampler1.tree.induced_subtree(points)

    tree2 = DirichletDiffusionTree(df=df, likelihood_model=lm)
    sampler2 = MetropolisHastingsSampler(tree2, X)
    sampler2.initialize_assignments()
    sampler2.tree = sampler2.tree.induced_subtree(points)

    all_constraints = list(master_tree.generate_constraints())

    np.random.seed(0)
    np.random.shuffle(all_constraints)
    test_constraints = all_constraints[:10000]

    satisfied = [set(), set()]

    iterate(sampler1, 100)
Exemplo n.º 3
0
def run_experiment(index,
                   dataset_name,
                   name,
                   constraint_getter,
                   master_tree,
                   X,
                   y,
                   out_dir,
                   n_iters=1000,
                   add_constraint=200,
                   add_score=200,
                   add_likelihood=200,
                   should_continue=False):

    N, D = X.shape
    df = Inverse(c=1)

    if dataset_name == 'iris':
        lm = GaussianLikelihoodModel(sigma=np.eye(D) / 9.0,
                                     sigma0=np.eye(D) / 2.0,
                                     mu0=X.mean(axis=0)).compile()
    elif dataset_name == 'zoo':
        lm = GaussianLikelihoodModel(sigma=np.diag(np.diag(np.cov(X.T))) / 4.0,
                                     sigma0=np.eye(D) / 2.0,
                                     mu0=X.mean(axis=0)).compile()
    else:
        lm = GaussianLikelihoodModel(sigma=np.diag(np.diag(np.cov(X.T))) / 2.0,
                                     sigma0=np.eye(D) / 2.0,
                                     mu0=X.mean(axis=0)).compile()
    if should_continue:
        with open(out_dir / name / 'scores-%u.pkl' % index, 'r') as fp:
            scores = pickle.load(fp)
        with open(out_dir / name / 'costs-%u.pkl' % index, 'r') as fp:
            costs = pickle.load(fp)
        with open(out_dir / name / 'final-tree-%u.pkl' % index, 'r') as fp:
            tree = DirichletDiffusionTree(df=df, likelihood_model=lm)
            tree.set_state(pickle.load(fp))
        sampler = MetropolisHastingsSampler(tree, X)
    else:
        scores = []
        costs = []
        tree = DirichletDiffusionTree(df=df, likelihood_model=lm)
        sampler = MetropolisHastingsSampler(tree, X)
        sampler.initialize_assignments()
        if dataset_name == 'zoo':
            sampler.tree = sampler.tree.induced_subtree(master_tree.points())

    current_run = []
    for i in tqdm(xrange(n_iters + 1)):
        sampler.sample()
        current_run.append(sampler.tree)
        if i % add_score == 0:
            scores.append(dist(master_tree, sampler.tree))
        if i % add_likelihood == 0:
            costs.append(sampler.tree.marg_log_likelihood())
        if i != 0 and i % add_constraint == 0:
            if constraint_getter is not None:
                constraint = constraint_getter.get_constraint(current_run)
                if constraint is not None:
                    sampler.add_constraint(constraint)
            current_run = []
    # plot_tree(sampler.tree, y)

    (out_dir / name).mkdir_p()
    with open(out_dir / name / 'scores-%u.pkl' % index, 'w') as fp:
        pickle.dump(scores, fp)
    print len(costs)
    with open(out_dir / name / 'costs-%u.pkl' % index, 'w') as fp:
        pickle.dump(costs, fp)
    # with open(out_dir / name / 'trees-%u.pkl' % index, 'r') as fp:
    # previous_trees = pickle.load(fp)
    # with open(out_dir / name / 'trees-%u.pkl' % index, 'w') as fp:
    # pickle.dump(previous_trees + [t.get_state() for t in trees], fp)
    with open(out_dir / name / 'final-tree-%u.pkl' % index, 'w') as fp:
        pickle.dump(sampler.tree.get_state(), fp)
    return costs, scores, sampler
Exemplo n.º 4
0
        return (a, b, c)
    if tree.verify_constraint((a, c, b)):
        return (a, c, b)
    if tree.verify_constraint((b, c, a)):
        return (b, c, a)


if __name__ == "__main__":
    with open('scripts/zoo.tree', 'rb') as fp:
        master_tree = pickle.load(fp)
    points = master_tree.root.points()

    tree1 = DirichletDiffusionTree(df=df, likelihood_model=lm)
    sampler1 = MetropolisHastingsSampler(tree1, X)
    sampler1.initialize_assignments()
    sampler1.tree = sampler1.tree.induced_subtree(points)

    tree2 = DirichletDiffusionTree(df=df, likelihood_model=lm)
    sampler2 = MetropolisHastingsSampler(tree2, X)
    sampler2.initialize_assignments()
    sampler2.tree = sampler2.tree.induced_subtree(points)

    all_constraints = list(master_tree.generate_constraints())

    np.random.seed(0)
    np.random.shuffle(all_constraints)
    test_constraints = all_constraints[:10000]

    satisfied = [set(), set()]

    iterate(sampler1, 100)