コード例 #1
0
ファイル: cifar10.py プロジェクト: shaoxuan92/treeano
    from treeano.sandbox.nodes import activation_transformation
    concat_node = activation_transformation.ConcatenateNegationNode

num_filters_1 = 96
num_filters_2 = 192

# based off of architecture from "Scalable Bayesian Optimization Using
# Deep Neural Networks" http://arxiv.org/abs/1502.05700
model = tn.HyperparameterNode(
    "model",
    tn.SequentialNode("seq", [
        tn.InputNode("x", shape=(None, 3, 32, 32)),
        tn.DnnConv2DWithBiasNode("conv1", num_filters=num_filters_1),
        concat_node("concat1"),
        tn.ReLUNode("relu1"),
        tn.DropoutNode("do1"),
        tn.DnnConv2DWithBiasNode("conv2", num_filters=num_filters_1),
        concat_node("concat2"),
        tn.ReLUNode("relu2"),
        tn.DropoutNode("do2"),
        tn.MaxPool2DNode("mp1"),
        tn.DnnConv2DWithBiasNode("conv3", num_filters=num_filters_2),
        concat_node("concat3"),
        tn.ReLUNode("relu3"),
        tn.DropoutNode("do3"),
        tn.DnnConv2DWithBiasNode("conv4", num_filters=num_filters_2),
        concat_node("concat4"),
        tn.ReLUNode("relu4"),
        tn.DropoutNode("do4"),
        tn.DnnConv2DWithBiasNode("conv5", num_filters=num_filters_2),
        concat_node("concat5"),
コード例 #2
0
# - the batch size can be provided as `None` to make the network
#   work for multiple different batch sizes
model = tn.HyperparameterNode(
    "model",
    tn.SequentialNode("seq", [
        tn.InputNode("x", shape=(None, 1, 28, 28)),
        tn.DnnConv2DWithBiasNode("conv1"),
        tn.ReLUNode("relu1"),
        tn.DnnMaxPoolNode("mp1"),
        tn.DnnConv2DWithBiasNode("conv2"),
        tn.ReLUNode("relu2"),
        tn.DnnMaxPoolNode("mp2"),
        tn.DenseNode("fc1"),
        tn.ReLUNode("relu3"),
        tn.DropoutNode("do1"),
        tn.DenseNode("fc2", num_units=10),
        tn.SoftmaxNode("pred"),
    ]),
    num_filters=32,
    filter_size=(5, 5),
    pool_size=(2, 2),
    num_units=256,
    dropout_probability=0.5,
    inits=[treeano.inits.XavierNormalInit()],
)

with_updates = tn.HyperparameterNode(
    "with_updates",
    tn.AdamNode(
        "adam", {
コード例 #3
0
 def make_network(p):
     return tn.SequentialNode(
         "s",
         [tn.InputNode("i", shape=(3, 4, 5)),
          tn.DropoutNode("do", p=p)]).network()
コード例 #4
0
# ############################### prepare data ###############################

train, valid, test = canopy.sandbox.datasets.mnist()

# ############################## prepare model ##############################

BATCH_SIZE = 500
NUM_EPOCHS = 25

model = tn.HyperparameterNode(
    "model",
    tn.SequentialNode("seq", [
        tn.InputNode("x", shape=(None, 1, 28, 28)),
        tn.DenseNode("fc1"),
        tn.ReLUNode("relu1"),
        eb.GradualDropoutNode("gdo1", tn.DropoutNode("do1")),
        tn.DenseNode("fc2"),
        tn.ReLUNode("relu2"),
        eb.GradualDropoutNode("gdo2", tn.DropoutNode("do2")),
        tn.DenseNode("fc3", num_units=10),
        tn.SoftmaxNode("pred"),
    ]),
    num_units=512,
    dropout_probability=0.5,
    inits=[treeano.inits.XavierNormalInit()],
    expected_batches=NUM_EPOCHS * len(train["x"]) / BATCH_SIZE,
)

with_updates = tn.HyperparameterNode(
    "with_updates",
    tn.AdamNode(
コード例 #5
0
def test_dropout_node_serialization():
    tn.check_serialization(tn.DropoutNode("a"))
    tn.check_serialization(tn.DropoutNode("a", p=0.5))