コード例 #1
0
ファイル: cvae.py プロジェクト: DristiPatel/RebelReindeers
    def create_cspn(self, z):
        num_copies = 8
        num_splits = 2
        num_recursions = 2     # need 18000 params for 3, > 2560 for 2
        num_leaf_param = int(num_copies * num_splits * num_recursions)

        # TODO check sum parameters, they are unchanged
        if args.dataset == "mnist":
            sum_weights, leaf_weights = model_vae.build_nn_mnist_latentspace(
                z, self.x_shape, 2600, num_leaf_param, self.configuration)
        else:
            sum_weights, leaf_weights = model_vae.build_nn_celeb_latentspace(
                z, self.x_shape, 2600, num_leaf_param, self.configuration)
        param_provider = RAT_SPN.ScopeBasedParamProvider(sum_weights, leaf_weights)

        with tf.variable_scope("spn"):
            rg = region_graph.RegionGraph(range(int(np.prod(self.x_shape[1:]))))
            for _ in range(0, num_copies):
                rg.random_split(num_splits, num_recursions)

            spn_args = RAT_SPN.SpnArgs()
            spn_args.normalized_sums = True
            spn_args.param_provider = param_provider
            spn_args.num_sums = 8
            spn_args.num_gauss = int(num_leaf_param / num_copies)

            spn_args.dist = 'Bernoulli'
            spn = RAT_SPN.RatSpn(1, region_graph=rg, name="spn", args=spn_args)
            print("created SPN")
        return spn
コード例 #2
0
ファイル: model.py プロジェクト: yyht/supair
    def __init__(self, conf, mean=0.0, seed=None):
        self.conf = conf
        self.lstm_units = conf.lstm_units
        self.lstm = rnn.BasicLSTMCell(self.lstm_units)
        with tf.variable_scope('detection-rnn'):
            self.output_mlp = relu_mlp.ReluMLP(self.lstm_units, [50, 9],
                                               ['s', 'l'],
                                               xavier=True)
        self.default_mean = mean

        # build object SPN
        num_dims_patch = conf.patch_height * conf.patch_width * conf.channels
        rg = region_graph.RegionGraph(range(num_dims_patch), seed=seed)
        if conf.random_structure:
            for _ in range(6):
                rg.random_split(2, 2)
        else:
            rg.make_poon_structure(conf.patch_height, conf.patch_width, 10, 2)

        spn_args = rat_spn.SpnArgs()
        spn_args.gauss_min_var = conf.obj_min_var
        spn_args.gauss_max_var = conf.obj_max_var
        self.obj_spn = rat_spn.RatSpn(1,
                                      region_graph=rg,
                                      args=spn_args,
                                      name='obj-spn',
                                      mean=self.default_mean)

        # build bg SPN
        num_dims_bg = conf.scene_height * conf.scene_width * conf.channels
        rg = region_graph.RegionGraph(range(num_dims_bg), seed=seed)
        if conf.random_structure:
            for _ in range(3):
                rg.random_split(2, 1)
        else:
            rg.make_poon_structure(conf.scene_height, conf.scene_width, 20, 2)

        spn_args = rat_spn.SpnArgs()
        spn_args.num_gauss = 6
        spn_args.num_sums = 3
        spn_args.gauss_min_var = conf.bg_min_var
        spn_args.gauss_max_var = conf.bg_max_var
        self.bg_spn = rat_spn.RatSpn(1,
                                     region_graph=rg,
                                     args=spn_args,
                                     name='bg-spn',
                                     mean=self.default_mean)
コード例 #3
0
    def __init__(self):
        super().__init__()
        self.region_graph = region_graph.RegionGraph(range(1))
        for i in range(VALUE_CSPN_NUM_RECURSIONS):
            self.region_graph.random_split(1, VALUE_CSPN_DEPTH // 2)

        self.cspn = rat_cspn.CSPN(self.region_graph, 1, A2CConditionalNN(), A2C_CONDITIONAL_NN_BOTTLENECK_DIM,
                                  continuous=True)
コード例 #4
0
    def __init__(self):
        super().__init__()
        self.region_graph = region_graph.RegionGraph(range(1))
        for i in range(POLICY_CSPN_NUM_RECURSIONS):
            self.region_graph.random_split(1, POLICY_CSPN_DEPTH // 2)

        self.cspn = rat_cspn.CSPN(self.region_graph, 1, A2CConditionalNN(), A2C_CONDITIONAL_NN_BOTTLENECK_DIM,
                                  continuous=CONTINOUS, rv_domain=range(ACTION_DIM))
コード例 #5
0
    def __init__(self):
        super().__init__()
        self.region_graph = region_graph.RegionGraph(range(LATENT_DIM + 1))
        for i in range(FORWARD_MODEL_CSPN_NUM_RECURSIONS):
            self.region_graph.random_split(2, FORWARD_MODEL_CSPN_DEPTH // 2)

        self.cspn = rat_cspn.CSPN(self.region_graph, LATENT_DIM + 1, ForwardModelConditionalNN(),
                                  FORWARD_MODEL_CONDITIONAL_NN_BOTTLENECK_DIM, continuous=True)
コード例 #6
0
def celeb_attr(model_name=""):
    conf = TrainerConfig()
    batch_size = conf.batch_size
    x_shape = (batch_size, 128, 128, 3)
    y_shape = (batch_size, 40)
    x_dims = y_dims = 1
    for dim in x_shape[1:]:
        x_dims *= dim
    for dim in y_shape[1:]:
        y_dims *= dim

    x_ph = tf.placeholder(tf.float32, x_shape)

    if model_name == "mdn":
        k = 10
        output_shape = y_shape[1] * k + k
        params = model.build_nn_celeb_baseline(x_ph,
                                               (batch_size, output_shape))

        spn = model.MixtureDensityNetwork(params, k, y_shape[1])
        conf.ckpt_dir = './checkpoints/baseline'
    elif model_name == "meanfield":
        params = model.build_nn_celeb_baseline(x_ph, y_shape)
        spn = model.MeanField(params)
        conf.ckpt_dir = './checkpoints/baseline'
    else:
        sum_weights, leaf_weights = model.build_nn_celeb(
            x_ph, y_shape, 2600, 32)
        param_provider = RAT_SPN.ScopeBasedParamProvider(
            sum_weights, leaf_weights)

        rg = region_graph.RegionGraph(range(y_dims))
        for _ in range(0, 8):
            rg.random_split(2, 2)

        args = RAT_SPN.SpnArgs()
        args.normalized_sums = True
        args.param_provider = param_provider
        args.num_sums = 8
        args.num_gauss = 4
        args.dist = 'Bernoulli'
        spn = RAT_SPN.RatSpn(1, region_graph=rg, name="spn", args=args)
        print("TOTAL", spn.num_params())

    dataset = CelebDataset()

    sess = tf.Session()
    trainer = CspnTrainer(spn, dataset, x_ph, conf, sess=sess)
    trainer.run_training()
コード例 #7
0
def create_simple_spn():
    num_copies = 2
    # create simple spn
    rg = region_graph.RegionGraph(range(4 * 4))
    for _ in range(0, num_copies):
        rg.random_split(2, 2)

    spn_args = RAT_SPN.SpnArgs()
    spn_args.normalized_sums = True
    # spn_args.param_provider = param_provider
    spn_args.num_sums = 20  # why though?
    spn_args.num_gauss = 3  # int(num_leaf_param / num_copies)

    spn_args.dist = 'Bernoulli'
    spn = RAT_SPN.RatSpn(1, region_graph=rg, name="spn", args=spn_args)
    print("created SPN")
    return spn
コード例 #8
0
def mnist_completion():
    batch_size = 100
    x_shape = (batch_size, 28, 14)
    y_shape = (batch_size, 28, 14)
    x_dims = y_dims = 28 * 14

    x_ph = tf.placeholder(tf.float32, [batch_size, x_dims])

    if True:
        sum_weights, leaf_weights = build_nn_mnist(x_ph, y_shape, train_ph3000,
                                                   64)
        param_provider = RAT_SPN.ScopeBasedParamProvider(
            sum_weights, leaf_weights)
    else:
        mlp = ReluMLP(x_dims, [1000, 1000, 32256], ['r', 'r', 'l'])
        mlp_output = mlp.forward(x_ph)
        param_provider = RAT_SPN.UnorderedParamProvider(mlp_output)

    rg = region_graph.RegionGraph(range(y_dims))
    for _ in range(0, 8):
        rg.random_split(2, 2)

    args = RAT_SPN.SpnArgs()
    args.normalized_sums = True
    args.param_provider = param_provider
    args.num_sums = 8
    args.num_gauss = 4
    spn = RAT_SPN.RatSpn(1, region_graph=rg, name="spn", args=args)
    print("num_params", spn.num_params())

    dataset = MnistDataset()
    sess = tf.Session()
    train_cspn(spn,
               dataset,
               x_ph,
               batch_size=batch_size,
               num_epochs=1000,
               sess=sess)
コード例 #9
0
def faces_completion(mask='left'):
    if mask == 'left':
        x_shape = (batch_size, 64, 32)
        y_shape = (batch_size, 64, 32)
    elif mask == 'bottom':
        x_shape = (batch_size, 32, 64)
        y_shape = (batch_size, 32, 64)
    else:
        raise ValueError('Mask must be left or bottom')

    x_dims = y_dims = 64 * 32
    x_ph = tf.placeholder(tf.float32, list(x_shape) + [1])

    sum_weights, leaf_weights = main.build_nn(x_ph, y_shape, 3000, 64)
    param_provider = RAT_SPN.ScopeBasedParamProvider(sum_weights, leaf_weights)

    rg = region_graph.RegionGraph(range(y_dims))
    for _ in range(0, 8):
        rg.random_split(2, 2)

    args = RAT_SPN.SpnArgs()
    args.normalized_sums = True
    args.param_provider = param_provider
    args.num_sums = 8
    args.num_gauss = 4
    spn = RAT_SPN.RatSpn(1, region_graph=rg, name="spn", args=args)
    print("num_params", spn.num_params())

    dataset = FacesDataset(mask)

    sess = tf.Session()

    main.train_cspn(spn,
                    dataset,
                    x_ph,
                    batch_size=batch_size,
                    num_epochs=1000,
                    sess=sess)
コード例 #10
0
def fashion_mnist_attr(conf):
    batch_size = conf.batch_size
    if conf.dataset == 'celeb':
        x_shape = (batch_size, 128, 128, 3)
        y_shape = (batch_size, 40)
        x_dims = y_dims = 1
        for dim in x_shape[1:]:
            x_dims *= dim
        for dim in y_shape[1:]:
            y_dims *= dim
    else:
        x_shape = (batch_size, 28, 28, 1)
        y_shape = (batch_size, 16)
        x_dims = 28 * 28
        y_dims = 16

    x_ph = tf.placeholder(tf.float32, x_shape)
    train_ph = tf.placeholder(tf.bool)

    if conf.model_name == "mdn":
        k = 10
        output_shape = y_shape[1] * k + k
        params = model.build_nn_mnist_baseline(x_ph,
                                               (batch_size, output_shape),
                                               train_ph)

        spn = model.MixtureDensityNetwork(params, k, y_shape[1])
        conf.ckpt_dir = './checkpoints/fashion-mdn'
    elif conf.model_name == "meanfield":
        print('mean field')
        params = model.build_nn_mnist_baseline(x_ph, y_shape, train_ph)
        spn = model.MeanField(params)
        conf.ckpt_dir = './checkpoints/fashion-meanfield'
    elif conf.model_name == 'cspn':
        sum_weights, leaf_weights = model.build_nn_mnist(
            x_ph, y_shape, train_ph, 2600, 32)
        param_provider = RAT_SPN.ScopeBasedParamProvider(
            sum_weights, leaf_weights)

        rg = region_graph.RegionGraph(range(y_dims))
        for _ in range(0, 8):
            rg.random_split(2, 2)

        args = RAT_SPN.SpnArgs()
        args.normalized_sums = True
        args.param_provider = param_provider
        args.num_sums = 8
        args.num_gauss = 4
        args.dist = 'Bernoulli'
        spn = RAT_SPN.RatSpn(1, region_graph=rg, name="spn", args=args)
        print("TOTAL", spn.num_params())
    else:
        raise ValueError('Unknown model name ' + str(conf.model_name))

    if conf.dataset == 'mnist':
        dataset = MnistDataset()
    elif conf.dataset == 'fashion':
        dataset = FashionDataset()
    elif conf.dataset == 'celeb':
        dataset = CelebDataset()
        conf.num_epochs = 20
    else:
        raise ValueError('Unknown dataset ' + dataset)

    sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True,
                                            log_device_placement=True))
    trainer = CspnTrainer(spn, dataset, x_ph, train_ph, conf, sess=sess)
    trainer.run_training()
コード例 #11
0
                vector.init_params(init_fn)

    def num_params(self):
        result = 0
        params_per_dim = [0] * self.num_dims
        for i, layer in enumerate(self.vector_list):
            layer_result = 0
            for vector in layer:
                layer_result += vector.num_params()
                if i == 0:
                    for dim in vector.scope:
                        params_per_dim[dim] += vector.size

            print("Layer {} has {} parameters.".format(i, layer_result))
            result += layer_result
        # print(params_per_dim)
        return result


if __name__ == '__main__':
    rg = region_graph.RegionGraph(range(30))
    for _ in range(0, 8):
        rg.random_split(2, 2)

    args = SpnArgs()
    args.num_sums = 8
    args.num_gauss = 4
    spn = RatSpn(1, region_graph=rg, name="spn", args=args)
    spn.num_params()
    print(spn.forward(torch.rand(1, 30)))
コード例 #12
0
                                      transform=transform)
trainloader = torch.utils.data.DataLoader(trainset,
                                          batch_size=batch_size,
                                          shuffle=True,
                                          num_workers=2)

testset = torchvision.datasets.MNIST(root='./data',
                                     train=False,
                                     download=True,
                                     transform=transform)
testloader = torch.utils.data.DataLoader(testset,
                                         batch_size=batch_size,
                                         shuffle=True,
                                         num_workers=2)

rg = region_graph.RegionGraph(range(28 * 28))
for _ in range(0, 8):
    rg.random_split(2, 2)

args = SpnArgs()
args.num_sums = 20
args.num_gauss = 10
spn = RatSpn(10, region_graph=rg, name="spn", args=args).cuda()
spn.num_params()

criterion = nn.CrossEntropyLoss()
# print(list(spn.parameters()))
optimizer = optim.Adam(spn.parameters())

for epoch in range(20):
    running_loss = 0.0