예제 #1
0
def make_nn(model_path):
    nn = Model(model_path)
    nn.build_graph(
        image_shape=(32, 32, 1),
        n_classes=10,
        layers=[
            Flatten(),
            Dense(32 * 32, activation='relu'),
            Dense(10),
        ],
        alpha=1e-4,
    )

    return nn
예제 #2
0
def make_simplenet(model_path):
    cnn = Model(model_path)

    cnn.build_graph(
        image_shape=(32, 32, 1),
        n_classes=10,
        layers=[
            Conv2D(64, (3, 3), activation='relu', batch_normal=0.95),  #1
            Conv2D(32, (3, 3), activation='relu', batch_normal=0.95),  #2
            Conv2D(32, (3, 3), activation='relu', batch_normal=0.95),  #3
            Conv2D(32, (3, 3), activation='relu', batch_normal=0.95),  #4
            MaxPooling2D((2, 2)),
            Conv2D(32, (3, 3), activation='relu', batch_normal=0.95),  #5
            Conv2D(32, (3, 3), activation='relu', batch_normal=0.95),  #6
            Conv2D(64, (3, 3), activation='relu', batch_normal=0.95),  #7
            MaxPooling2D((2, 2)),
            Conv2D(64, (3, 3), activation='relu', batch_normal=0.95),  #8
            Conv2D(64, (3, 3), activation='relu', batch_normal=0.95),  #9
            MaxPooling2D((2, 2)),
            Conv2D(128, (3, 3), activation='relu', batch_normal=0.95),  #10
            Conv2D(256, (1, 1), activation='relu', batch_normal=0.95),  #11
            Conv2D(64, (1, 1), activation='relu', batch_normal=0.95),  #12
            MaxPooling2D((2, 2)),
            Conv2D(64, (3, 3), activation='relu', batch_normal=0.95),  #13
            MaxPooling2D((2, 2)),
            Flatten(),
            Dense(10),
        ],
        alpha=1e-3,
    )

    return cnn
예제 #3
0
def make_cnn(model_path):
    cnn = Model(model_path)

    cnn.build_graph(
        image_shape=(32, 32, 1),
        n_classes=10,
        layers=[
            Conv2D(32, (3, 3), activation='relu'),
            BatchNorm(),
            MaxPooling2D((2, 2)),
            Flatten(),
            Dense(128, activation='relu'),
            Dropout(0.5),
            Dense(10),
        ],
        alpha=1e-3,
    )

    return cnn
예제 #4
0
    def __init__(self,
                 input_dim,
                 output_dim,
                 model_size="small",
                 neigh_input_dim=None,
                 dropout=0.,
                 bias=False,
                 act=tf.nn.relu,
                 name=None,
                 concat=False,
                 **kwargs):
        super(MeanPoolingAggregator, self).__init__(**kwargs)

        self.dropout = dropout
        self.bias = bias
        self.act = act
        self.concat = concat

        if neigh_input_dim is None:
            neigh_input_dim = input_dim

        if name is not None:
            name = '/' + name
        else:
            name = ''

        if model_size == "small":
            hidden_dim = self.hidden_dim = 512
        elif model_size == "big":
            hidden_dim = self.hidden_dim = 1024

        self.mlp_layers = []
        self.mlp_layers.append(
            Dense(input_dim=neigh_input_dim,
                  output_dim=hidden_dim,
                  act=tf.nn.relu,
                  dropout=dropout,
                  sparse_inputs=False,
                  logging=self.logging))

        with tf.variable_scope(self.name + name + '_vars'):
            self.vars['neigh_weights'] = glorot([hidden_dim, output_dim],
                                                name='neigh_weights')

            self.vars['self_weights'] = glorot([input_dim, output_dim],
                                               name='self_weights')
            if self.bias:
                self.vars['bias'] = zeros([self.output_dim], name='bias')

        if self.logging:
            self._log_vars()

        self.input_dim = input_dim
        self.output_dim = output_dim
        self.neigh_input_dim = neigh_input_dim
예제 #5
0
def make_lr(model_path):
    lr = Model(model_path)
    lr.build_graph(image_shape=(32, 32, 1),
                   n_classes=10,
                   layers=[
                       Flatten(),
                       Dense(10),
                   ],
                   alpha=1e-3)

    return lr
예제 #6
0
def make_simplenet_dropout(model_path):
    cnn = Model(model_path)

    cnn.build_graph(
        image_shape=(32, 32, 1),
        n_classes=10,
        layers=[
            Conv2D(64, (3, 3), activation='relu', batch_normal=0.95),  #1
            Dropout(0.8),
            Conv2D(32, (3, 3), activation='relu', batch_normal=0.95),  #2
            Dropout(0.8),
            Conv2D(32, (3, 3), activation='relu', batch_normal=0.95),  #3
            Dropout(0.8),
            Conv2D(32, (3, 3), activation='relu', batch_normal=0.95),  #4
            Dropout(0.8),
            MaxPooling2D((2, 2)),
            Conv2D(32, (3, 3), activation='relu', batch_normal=0.95),  #5
            Dropout(0.8),
            Conv2D(32, (3, 3), activation='relu', batch_normal=0.95),  #6
            Dropout(0.8),
            Conv2D(64, (3, 3), activation='relu', batch_normal=0.95),  #7
            Dropout(0.8),
            MaxPooling2D((2, 2)),
            Conv2D(64, (3, 3), activation='relu', batch_normal=0.95),  #8
            Dropout(0.8),
            Conv2D(64, (3, 3), activation='relu', batch_normal=0.95),  #9
            Dropout(0.8),
            MaxPooling2D((2, 2)),
            Conv2D(128, (3, 3), activation='relu', batch_normal=0.95),  #10
            Dropout(0.8),
            Conv2D(256, (1, 1), activation='relu', batch_normal=0.95),  #11
            Dropout(0.8),
            Conv2D(64, (1, 1), activation='relu', batch_normal=0.95),  #12
            Dropout(0.8),
            MaxPooling2D((2, 2)),
            Conv2D(64, (3, 3), activation='relu', batch_normal=0.95),  #13
            Dropout(0.8),
            MaxPooling2D((2, 2)),
            Flatten(),
            Dense(10),
        ],
        alpha=1e-3,
    )

    return cnn