def block17(net, scale=1.0, activation="relu"):
    tower_conv = relu(
        batch_normalization(
            conv_2d(net,
                    192,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_1x1')))
    tower_conv_1_0 = relu(
        batch_normalization(
            conv_2d(net,
                    128,
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_0a_1x1')))
    tower_conv_1_1 = relu(
        batch_normalization(
            conv_2d(tower_conv_1_0,
                    160, [1, 7],
                    bias=False,
                    activation=None,
                    name='Conv2d_0b_1x7')))
    tower_conv_1_2 = relu(
        batch_normalization(
            conv_2d(tower_conv_1_1,
                    192, [7, 1],
                    bias=False,
                    activation=None,
                    name='Conv2d_0c_7x1')))
    tower_mixed = merge([tower_conv, tower_conv_1_2], mode='concat', axis=3)
    tower_out = relu(
        batch_normalization(
            conv_2d(tower_mixed,
                    net.get_shape()[3],
                    1,
                    bias=False,
                    activation=None,
                    name='Conv2d_1x1')))
    net += scale * tower_out
    if activation:
        if isinstance(activation, str):
            net = activations.get(activation)(net)
        elif hasattr(activation, '__call__'):
            net = activation(net)
        else:
            raise ValueError("Invalid Activation.")
    return net
                  padding='valid',
                  activation='relu',
                  regularizer="L2")
branch2 = conv_1d(network,
                  128,
                  4,
                  padding='valid',
                  activation='relu',
                  regularizer="L2")
branch3 = conv_1d(network,
                  128,
                  5,
                  padding='valid',
                  activation='relu',
                  regularizer="L2")
network = merge([branch1, branch2, branch3], mode='concat', axis=1)
network = tf.expand_dims(network, 2)
network = global_max_pool(network)
network = dropout(network, 0.5)
network = fully_connected(network, 2, activation='softmax')
network = regression(network,
                     optimizer='adam',
                     learning_rate=0.001,
                     loss='categorical_crossentropy',
                     name='target')
# Training
model = zqtflearn.DNN(network, tensorboard_verbose=0)
model.fit(trainX,
          trainY,
          n_epoch=5,
          shuffle=True,
tower_pool3_0 = avg_pool_2d(maxpool5a_3_3,
                            3,
                            strides=1,
                            padding='same',
                            name='AvgPool_5b_b3_0a_3x3')
tower_conv3_1 = relu(
    batch_normalization(
        conv_2d(tower_pool3_0,
                64,
                1,
                bias=False,
                activation=None,
                name='Conv2d_5b_b3_0b_1x1')))

tower_5b_out = merge([tower_conv, tower_conv1_1, tower_conv2_2, tower_conv3_1],
                     mode='concat',
                     axis=3)

net = repeat(tower_5b_out, 10, block35, scale=0.17)

tower_conv = relu(
    batch_normalization(
        conv_2d(net,
                384,
                3,
                bias=False,
                strides=2,
                activation=None,
                padding='VALID',
                name='Conv2d_6a_b0_0a_3x3')))
tower_conv1_0 = relu(
Ejemplo n.º 4
0
                           32,
                           filter_size=5,
                           activation='relu',
                           name='inception_3a_5_5')
inception_3a_pool = max_pool_2d(pool2_3_3,
                                kernel_size=3,
                                strides=1,
                                name='inception_3a_pool')
inception_3a_pool_1_1 = conv_2d(inception_3a_pool,
                                32,
                                filter_size=1,
                                activation='relu',
                                name='inception_3a_pool_1_1')
inception_3a_output = merge([
    inception_3a_1_1, inception_3a_3_3, inception_3a_5_5, inception_3a_pool_1_1
],
                            mode='concat',
                            axis=3)

# 3b
inception_3b_1_1 = conv_2d(inception_3a_output,
                           128,
                           filter_size=1,
                           activation='relu',
                           name='inception_3b_1_1')
inception_3b_3_3_reduce = conv_2d(inception_3a_output,
                                  128,
                                  filter_size=1,
                                  activation='relu',
                                  name='inception_3b_3_3_reduce')
inception_3b_3_3 = conv_2d(inception_3b_3_3_reduce,