Beispiel #1
0
 def __calc_conv3(self):
     self.conv3_1 = tf.nn.relu(conv(self.pool2, 3, 3, 256, 1, 1, name='conv3_1', data=self.weights_data,
                                    retrain_layers=self.retrain_layers))
     self.conv3_2 = tf.nn.relu(conv(self.conv3_1, 3, 3, 256, 1, 1, name='conv3_2', data=self.weights_data,
                                    retrain_layers=self.retrain_layers))
     self.conv3_3 = tf.nn.relu(conv(self.conv3_2, 3, 3, 256, 1, 1, name='conv3_3', data=self.weights_data,
                                    retrain_layers=self.retrain_layers))
     self.pool3 = max_pool(self.conv3_3, 2, 2, 2, 2, name='pool3')
Beispiel #2
0
 def __calc_conv5(self):
     self.conv5_1 = tf.nn.relu(conv(self.pool4, 3, 3, 512, 1, 1, name='conv5_1', data=self.weights_data,
                                    retrain_layers=self.retrain_layers))
     self.conv5_2 = tf.nn.relu(conv(self.conv5_1, 3, 3, 512, 1, 1, name='conv5_2', data=self.weights_data,
                                    retrain_layers=self.retrain_layers))
     self.conv5_3 = tf.nn.relu(conv(self.conv5_2, 3, 3, 512, 1, 1, name='conv5_3', data=self.weights_data,
                                    retrain_layers=self.retrain_layers))
     self.pool5 = max_pool(self.conv5_3, 2, 2, 2, 2, name='pool5')
def highway(x, size=None, activation=None,
            num_layers=2, scope="highway", dropout=0.0, reuse=None):
    with tf.variable_scope(scope, reuse):
        if size is None:
            size = x.shape.as_list()[-1]
        else:
            x = conv(x, size, name="input_projection", reuse=reuse)
        for i in range(num_layers):
            T = conv(x, size, bias=True, activation=tf.sigmoid,
                     name="gate_%d" % i, reuse=reuse)
            H = conv(x, size, bias=True, activation=activation,
                     name="activation_%d" % i, reuse=reuse)
            H = tf.nn.dropout(H, 1.0 - dropout)
            x = H * T + x * (1.0 - T)
        return x
Beispiel #4
0
    def __identity_block(self, input_layer, name, data, num_filters):

        with tf.name_scope('identity_block'):
            x = conv(input_layer,
                     1,
                     1,
                     num_filters,
                     1,
                     1,
                     padding='SAME',
                     name='res' + name + '_branch2a',
                     data=self.weights_data,
                     retrain_layers=self.retrain_layers)
            x = batch_norm_layer(x, data=data, name='bn' + name + '_branch2a')
            x = tf.nn.relu(x)

            x = conv(x,
                     3,
                     3,
                     num_filters,
                     1,
                     1,
                     padding='SAME',
                     name='res' + name + '_branch2b',
                     data=self.weights_data,
                     retrain_layers=self.retrain_layers)
            x = batch_norm_layer(x, data=data, name='bn' + name + '_branch2b')
            x = tf.nn.relu(x)

            x = conv(x,
                     1,
                     1,
                     num_filters * 4,
                     1,
                     1,
                     padding='SAME',
                     name='res' + name + '_branch2c',
                     data=self.weights_data,
                     retrain_layers=self.retrain_layers)
            x = batch_norm_layer(x, data=data, name='bn' + name + '_branch2c')

            x = tf.add(x, input_layer)

        return x
Beispiel #5
0
 def __calc_conv3(self):
     # 3rd Layer: Conv (w ReLu)
     self.conv3 = tf.nn.relu(
         conv(self.norm2,
              3,
              3,
              384,
              1,
              1,
              name='conv3',
              data=self.weights_data,
              retrain_layers=self.retrain_layers))
Beispiel #6
0
 def __calc_conv4(self):
     # 4th Layer: Conv (w ReLu) splitted into two groups
     self.conv4 = tf.nn.relu(
         conv(self.conv3,
              3,
              3,
              384,
              1,
              1,
              name='conv4',
              groups=2,
              data=self.weights_data,
              retrain_layers=self.retrain_layers))
Beispiel #7
0
 def __calc_conv1(self):
     temp = conv(self.preprocessed_image,
                 7,
                 7,
                 64,
                 2,
                 2,
                 padding='VALID',
                 name='conv1',
                 data=self.weights_data,
                 retrain_layers=self.retrain_layers)
     temp = batch_norm_layer(temp, 'bn_conv1', self.weights_data)
     self.conv1 = tf.nn.relu(temp)
     self.pool1 = max_pool(self.conv1,
                           3,
                           3,
                           2,
                           2,
                           padding='SAME',
                           name='pool1')
Beispiel #8
0
 def __calc_conv5(self):
     # 5th Layer: Conv (w ReLu) -> Pool splitted into two groups
     self.conv5 = tf.nn.relu(
         conv(self.conv4,
              3,
              3,
              256,
              1,
              1,
              name='conv5',
              groups=2,
              data=self.weights_data,
              retrain_layers=self.retrain_layers))
     self.pool5 = max_pool(self.conv5,
                           3,
                           3,
                           2,
                           2,
                           padding='VALID',
                           name='pool5')
Beispiel #9
0
 def __calc_conv2(self):
     # 2nd Layer: Conv (w ReLu) -> Pool -> Lrn with 2 groups
     self.conv2 = tf.nn.relu(
         conv(self.norm1,
              5,
              5,
              256,
              1,
              1,
              name='conv2',
              groups=2,
              data=self.weights_data,
              retrain_layers=self.retrain_layers))
     self.pool2 = max_pool(self.conv2,
                           3,
                           3,
                           2,
                           2,
                           padding='VALID',
                           name='pool2')
     self.norm2 = lrn(self.pool2, 2, 2e-05, 0.75, name='norm2')
Beispiel #10
0
 def __calc_conv1(self):
     # 1st layer: conv (w relu) -> pool -> lrn
     self.conv1 = tf.nn.relu(
         conv(self.preprocessed_image,
              11,
              11,
              96,
              4,
              4,
              padding='VALID',
              name='conv1',
              data=self.weights_data,
              retrain_layers=self.retrain_layers))
     self.pool1 = max_pool(self.conv1,
                           3,
                           3,
                           2,
                           2,
                           padding='VALID',
                           name='pool1')
     self.norm1 = lrn(self.pool1, 2, 2e-05, 0.75, name='norm1')
Beispiel #11
0
 def __calc_conv2(self):
     self.conv2_1 = tf.nn.relu(conv(self.pool1, 3, 3, 128, 1, 1, name='conv2_1', data=self.weights_data,
                                    retrain_layers=self.retrain_layers))
     self.conv2_2 = tf.nn.relu(conv(self.conv2_1, 3, 3, 128, 1, 1, name='conv2_2', data=self.weights_data,
                                    retrain_layers=self.retrain_layers))
     self.pool2 = max_pool(self.conv2_2, 2, 2, 2, 2, name='pool2')
Beispiel #12
0
 def __calc_conv1(self):
     self.conv1_1 = tf.nn.relu(conv(self.preprocessed_image, 3, 3, 64, 1, 1, name='conv1_1', data=self.weights_data,
                                    retrain_layers=self.retrain_layers, padding='VALID'))
     self.conv1_2 = tf.nn.relu(conv(self.conv1_1, 3, 3, 64, 1, 1, name='conv1_2', data=self.weights_data,
                                    retrain_layers=self.retrain_layers, padding='VALID'))
     self.pool1 = max_pool(self.conv1_2, 2, 2, 2, 2, name='pool1', padding='SAME')
Beispiel #13
0
    def __conv_block(self,
                     input_layer,
                     name,
                     data,
                     num_filters,
                     stride_x=2,
                     stride_y=2):

        with tf.name_scope('conv_block'):
            x = conv(input_layer,
                     1,
                     1,
                     num_filters,
                     1,
                     1,
                     padding='SAME',
                     name='res' + name + '_branch2a',
                     data=self.weights_data,
                     retrain_layers=self.retrain_layers)
            x = batch_norm_layer(x, data=data, name='bn' + name + '_branch2a')
            x = tf.nn.relu(x)

            x = conv(x,
                     3,
                     3,
                     num_filters,
                     stride_x,
                     stride_y,
                     padding='SAME',
                     name='res' + name + '_branch2b',
                     data=self.weights_data,
                     retrain_layers=self.retrain_layers)
            x = batch_norm_layer(x, data=data, name='bn' + name + '_branch2b')
            x = tf.nn.relu(x)

            x = conv(x,
                     1,
                     1,
                     num_filters * 4,
                     1,
                     1,
                     padding='SAME',
                     name='res' + name + '_branch2c',
                     data=self.weights_data,
                     retrain_layers=self.retrain_layers)
            x = batch_norm_layer(x, data=data, name='bn' + name + '_branch2c')

            shortcut = conv(input_layer,
                            1,
                            1,
                            num_filters * 4,
                            stride_x,
                            stride_y,
                            padding='SAME',
                            name='res' + name + '_branch1',
                            data=self.weights_data,
                            retrain_layers=self.retrain_layers)
            shortcut = batch_norm_layer(shortcut,
                                        data=data,
                                        name='bn' + name + '_branch1')

            x = tf.add(x, shortcut)

        return x