Esempio n. 1
0
  def build(self):
    """Create the network graph."""
    # 1st Layer: Conv (w ReLu) -> Lrn -> Pool
    conv1 = conv(self.x, 11, 11, 96, 4, 4, padding='VALID', name='conv1')
    norm1 = lrn(conv1, 2, 1e-05, 0.75, name='norm1')
    pool1 = max_pool(norm1, 3, 3, 2, 2, padding='VALID', name='pool1')

    # 2nd Layer: Conv (w ReLu)  -> Lrn -> Pool with 2 groups
    conv2 = conv(pool1, 5, 5, 256, 1, 1, groups=2, name='conv2')
    norm2 = lrn(conv2, 2, 1e-05, 0.75, name='norm2')
    pool2 = max_pool(norm2, 3, 3, 2, 2, padding='VALID', name='pool2')

    # 3rd Layer: Conv (w ReLu)
    conv3 = conv(pool2, 3, 3, 384, 1, 1, name='conv3')

    # 4th Layer: Conv (w ReLu) splitted into two groups
    conv4 = conv(conv3, 3, 3, 384, 1, 1, groups=2, name='conv4')

    # 5th Layer: Conv (w ReLu) -> Pool splitted into two groups
    conv5 = conv(conv4, 3, 3, 256, 1, 1, groups=2, name='conv5')
    pool5 = max_pool(conv5, 3, 3, 2, 2, padding='VALID', name='pool5')

    # 6th Layer: Flatten -> FC (w ReLu) -> Dropout
    flattened = tf.reshape(pool5, [-1, 6 * 6 * 256])
    fc6 = fc(flattened, 6 * 6 * 256, 4096, name='fc6')

    # 7th Layer: FC (w ReLu) -> Dropout
    fc7 = fc(fc6, 4096, 4096, name='fc7')

    # 8th Layer: FC and return unscaled activations
    self.fc8 = fc(fc7, 4096, self.num_classes, relu=False, name='fc8')
Esempio n. 2
0
    def build_graph(self):
        self.iterator = tf.data.Iterator.from_structure(
            (tf.float32, tf.int32),
            (tf.TensorShape([None, 227, 227, 3]), tf.TensorShape([None]))
        )
        self.inputs, self.labels = self.iterator.get_next()

        self.conv1 = layers.conv(self.inputs, [11, 11], 96, [4, 4],
                                 padding='VALID', name='conv1', mask=True)
        norm1 = layers.lrn(self.conv1, 2, 1e-05, 0.75, name='norm1')
        pool1 = layers.max_pool(norm1, [3, 3], [2, 2], padding='VALID',
                                name='pool1')

        self.conv2 = layers.conv(pool1, [5, 5], 256, [1, 1], groups=2,
                                 name='conv2', mask=True)
        norm2 = layers.lrn(self.conv2, 2, 1e-05, 0.75, name='norm2')
        pool2 = layers.max_pool(norm2, [3, 3], [2, 2], padding='VALID',
                                name='pool2')

        self.conv3 = layers.conv(pool2, [3, 3], 384, [1, 1], name='conv3',
                                 mask=True)

        self.conv4 = layers.conv(self.conv3, [3, 3], 384, [1, 1], groups=2,
                                 name='conv4', mask=True)

        self.conv5 = layers.conv(self.conv4, [3, 3], 256, [1, 1], groups=2,
                                 name='conv5', mask=True)
        pool5 = layers.max_pool(self.conv5, [3, 3], [2, 2], padding='VALID',
                                name='pool5')

        self.keep_prob = tf.get_variable('keep_prob', shape=(),
                                         trainable=False)

        flattened = tf.reshape(pool5, [-1, 6 * 6 * 256])
        fc6 = layers.fc(flattened, 4096, name='fc6')
        dropout6 = layers.dropout(fc6, self.keep_prob)

        fc7 = layers.fc(dropout6, 4096, name='fc7')
        dropout7 = layers.dropout(fc7, self.keep_prob)

        self.logits = layers.fc(dropout7, self.num_classes, relu=False,
                                name='fc8')
        self.probs_op = tf.nn.softmax(self.logits)
        self.pred_op = tf.argmax(input=self.logits, axis=1)
        corrects_op = tf.equal(tf.cast(self.pred_op, tf.int32),
                               self.labels)
        self.acc_op = tf.reduce_mean(tf.cast(corrects_op, tf.float32))
Esempio n. 3
0
    def _build_graph(self):
        self.x = tf.placeholder(tf.float32, [None, 227, 227, 3])
        self.y = tf.placeholder(tf.float32, [None, 2])
        self.keep_prob = tf.placeholder_with_default(1.0,
                                                     shape=[],
                                                     name='dropout_keep_prob')

        # 1st Layer: Conv (w ReLu) -> Lrn -> Pool
        conv1 = conv(self.x, 11, 11, 96, 4, 4, padding='VALID', name='conv1')
        norm1 = lrn(conv1, 2, 1e-05, 0.75, name='norm1')
        pool1 = max_pool(norm1, 3, 3, 2, 2, padding='VALID', name='pool1')

        # 2nd Layer: Conv (w ReLu)  -> Lrn -> Pool with 2 groups
        conv2 = conv(pool1, 5, 5, 256, 1, 1, groups=2, name='conv2')
        norm2 = lrn(conv2, 2, 1e-05, 0.75, name='norm2')
        pool2 = max_pool(norm2, 3, 3, 2, 2, padding='VALID', name='pool2')

        # 3rd Layer: Conv (w ReLu)
        conv3 = conv(pool2, 3, 3, 384, 1, 1, name='conv3')

        # 4th Layer: Conv (w ReLu) split into two groups
        conv4 = conv(conv3, 3, 3, 384, 1, 1, groups=2, name='conv4')

        # 5th Layer: Conv (w ReLu) -> Pool split into two groups
        conv5 = conv(conv4, 3, 3, 256, 1, 1, groups=2, name='conv5')
        pool5 = max_pool(conv5, 3, 3, 2, 2, padding='VALID', name='pool5')

        # 6th Layer: Flatten -> FC (w ReLu) -> Dropout
        flattened = tf.reshape(pool5, [-1, 6 * 6 * 256])
        fc6 = fc(flattened, 6 * 6 * 256, 4096, name='fc6')
        dropout6 = dropout(fc6, self.keep_prob)

        # 7th Layer: FC (w ReLu) -> Dropout
        fc7 = fc(dropout6, 4096, 4096, name='fc7')
        dropout7 = dropout(fc7, self.keep_prob)

        # 8th Layer: FC and return unscaled activations
        self.fc8 = fc(dropout7, 4096, self.num_classes, relu=False, name='fc8')
        self.prob = tf.nn.softmax(self.fc8, name='prob')
Esempio n. 4
0
  def build(self):
    """Create the network graph."""
    # 1st Layer: Conv (w ReLu) -> Lrn -> Pool
    conv1 = conv(self.x, 11, 11, 96, 4, 4, padding='VALID', name='conv1')
    norm1 = lrn(conv1, 2, 1e-05, 0.75, name='norm1')
    pool1 = max_pool(norm1, 3, 3, 2, 2, padding='VALID', name='pool1')

    # 2nd Layer: Conv (w ReLu)  -> Lrn -> Pool with 2 groups
    conv2 = conv(pool1, 5, 5, 256, 1, 1, groups=2, name='conv2')
    norm2 = lrn(conv2, 2, 1e-05, 0.75, name='norm2')
    pool2 = max_pool(norm2, 3, 3, 2, 2, padding='VALID', name='pool2')

    # 3rd Layer: Conv (w ReLu)
    conv3 = conv(pool2, 3, 3, 384, 1, 1, name='conv3')

    # 4th Layer: Conv (w ReLu) splitted into two groups
    conv4 = conv(conv3, 3, 3, 384, 1, 1, groups=2, name='conv4')

    # 5th Layer: Conv (w ReLu) -> Pool splitted into two groups
    conv5 = conv(conv4, 3, 3, 256, 1, 1, groups=2, name='conv5')
    pool5 = max_pool(conv5, 3, 3, 2, 2, padding='VALID', name='pool5')

    # 6th Layer: Flatten -> FC (w ReLu) -> Dropout
    flattened = tf.reshape(pool5, [-1, 6 * 6 * 256])
    fc6 = fc(flattened, 6 * 6 * 256, 4096, name='fc6')

    fc7_factor = fc(fc6, 4096, self.num_factor_units, 'fc7_factor')
    fc7_shared = fc(fc6, 4096, 4096 - self.num_factor_units, 'fc7_shared')

    with tf.variable_scope('fc7_factor', reuse=True):
      self.assign_factor = tf.group(tf.get_variable('weights').assign(self.factor_weights),
                                    tf.get_variable('biases').assign(self.factor_biases))

    fc7_concat = tf.concat([fc7_factor, fc7_shared], axis=1, name='fc7_concat')

    # 8th Layer: FC and return unscaled activations
    self.fc8 = fc(fc7_concat, 4096, self.num_classes, relu=False, name='fc8')
Esempio n. 5
0
    def _build_graph(self):
        self.x = tf.placeholder(tf.float32, [None, 227, 227, 3])
        self.y = tf.placeholder(tf.float32, [None, 2])

        if self.factor_layer == 'conv1':
            self.factor_weights = tf.placeholder(
                tf.float32, shape=[11, 11, 3, self.num_factors])
        elif self.factor_layer == 'conv3':
            self.factor_weights = tf.placeholder(
                tf.float32, shape=[3, 3, 128, self.num_factors])
        elif self.factor_layer == 'conv5':
            self.factor_weights = tf.placeholder(
                tf.float32, shape=[3, 3, 192, self.num_factors])
        elif self.factor_layer == 'fc7':
            self.factor_weights = tf.placeholder(
                tf.float32, shape=[4096, self.num_factors])

        self.factor_biases = tf.placeholder(tf.float32,
                                            shape=[self.num_factors])

        # 1st Layer: Conv (w ReLu) -> Lrn -> Pool
        if self.factor_layer == 'conv1':
            conv1_factor = conv(self.x,
                                11,
                                11,
                                self.num_factors,
                                4,
                                4,
                                padding='VALID',
                                name='conv1_factor')
            conv1_shared = conv(self.x,
                                11,
                                11,
                                96 - self.num_factors,
                                4,
                                4,
                                padding='VALID',
                                name='conv1_shared')
            splits = tf.split(axis=3, num_or_size_splits=2, value=conv1_factor)
            conv1 = tf.concat([splits[0], conv1_shared, splits[1]],
                              axis=3,
                              name='conv1_concat')
        else:
            conv1 = conv(self.x,
                         11,
                         11,
                         96,
                         4,
                         4,
                         padding='VALID',
                         name='conv1')

        norm1 = lrn(conv1, 2, 1e-05, 0.75, name='norm1')
        pool1 = max_pool(norm1, 3, 3, 2, 2, padding='VALID', name='pool1')

        # 2nd Layer: Conv (w ReLu)  -> Lrn -> Pool with 2 groups
        conv2 = conv(pool1, 5, 5, 256, 1, 1, groups=2, name='conv2')
        norm2 = lrn(conv2, 2, 1e-05, 0.75, name='norm2')
        pool2 = max_pool(norm2, 3, 3, 2, 2, padding='VALID', name='pool2')

        # 3rd Layer: Conv (w ReLu)
        if self.factor_layer == 'conv3':
            conv3_factor = conv(pool2,
                                3,
                                3,
                                self.num_factors,
                                1,
                                1,
                                groups=2,
                                name='conv3_factor')
            conv3_shared = conv(pool2,
                                3,
                                3,
                                384 - self.num_factors,
                                1,
                                1,
                                name='conv3_shared')
            splits = tf.split(axis=3, num_or_size_splits=2, value=conv3_factor)
            conv3 = tf.concat([splits[0], conv3_shared, splits[1]],
                              axis=3,
                              name='conv3_concat')
        else:
            conv3 = conv(pool2, 3, 3, 384, 1, 1, name='conv3')

        # 4th Layer: Conv (w ReLu) split into two groups
        conv4 = conv(conv3, 3, 3, 384, 1, 1, groups=2, name='conv4')

        # 5th Layer: Conv (w ReLu) -> Pool split into two groups
        if self.factor_layer == 'conv5':
            conv5_factor = conv(conv4,
                                3,
                                3,
                                self.num_factors,
                                1,
                                1,
                                groups=2,
                                name='conv5_factor')
            conv5_shared = conv(conv4,
                                3,
                                3,
                                256 - self.num_factors,
                                1,
                                1,
                                groups=2,
                                name='conv5_shared')
            splits = tf.split(axis=3, num_or_size_splits=2, value=conv5_factor)
            conv5 = tf.concat([splits[0], conv5_shared, splits[1]],
                              axis=3,
                              name='conv5_concat')
        else:
            conv5 = conv(conv4, 3, 3, 256, 1, 1, groups=2, name='conv5')

        pool5 = max_pool(conv5, 3, 3, 2, 2, padding='VALID', name='pool5')

        # 6th Layer: Flatten -> FC (w ReLu)
        flattened = tf.reshape(pool5, [-1, 6 * 6 * 256])
        fc6 = fc(flattened, 6 * 6 * 256, 4096, name='fc6')

        if self.factor_layer == 'fc7':
            fc7_factor = fc(fc6, 4096, self.num_factors, 'fc7_factor')
            fc7_shared = fc(fc6, 4096, 4096 - self.num_factors, 'fc7_shared')
            fc7 = tf.concat([fc7_factor, fc7_shared],
                            axis=1,
                            name='fc7_concat')
        else:
            fc7 = fc(fc6, 4096, 4096, name='fc7')

        with tf.variable_scope('{}_factor'.format(self.factor_layer),
                               reuse=True):
            self.assign_factor = tf.group(
                tf.get_variable('weights').assign(self.factor_weights),
                tf.get_variable('biases').assign(self.factor_biases))

        # 8th Layer: FC and return unscaled activations
        self.fc8 = fc(fc7, 4096, self.num_classes, relu=False, name='fc8')
        self.prob = tf.nn.softmax(self.fc8, name='prob')