Ejemplo n.º 1
0
    def __call__(self, x, is_reuse=False, is_train=True):
        with tf.variable_scope('classifier') as scope:

            if is_reuse:
                scope.reuse_variables()

            unit_n = self.smallest_unit_n
            conv_ns = [2, 2]

            for layer_i, conv_n in enumerate(conv_ns):
                with tf.variable_scope('layer{}'.format(layer_i)):
                    for conv_i in range(conv_n):
                        x = conv2d(x, unit_n, self.k_size, 1, 'SAME', name='conv2d_{}'.format(conv_i))
                        x = tf.nn.relu(x)
                    x = maxpool2d(x, self.k_size, 2, 'SAME')
                unit_n *= 2

            unit_n = 256
            fc_n = 1

            for layer_i in range(len(conv_ns), len(conv_ns) + fc_n):
                with tf.variable_scope('layer{}'.format(layer_i)):
                    x = fc(x, unit_n)
                    x = tf.nn.relu(x)
                    x = batch_norm(x, is_train)
                    x = tf.nn.dropout(x, 0.5)

            with tf.variable_scope('output'.format(layer_i)):
                x = fc(x, self.class_n)

            return x
Ejemplo n.º 2
0
    def conv_net(self, x, weights, biases, dropout):
        x = tf.reshape(x, shape=[-1, 28, 28, 1])

        conv1 = conv2d_t(x, weights['wc1'], biases['bc1'])
        conv1 = maxpool2d(conv1, k=2)

        conv2 = conv2d_t(conv1, weights['wc2'], biases['bc2'])
        conv2 = maxpool2d(conv2, k=2)

        fc1 = tf.reshape(conv2, [-1, weights['wd1'].get_shape().as_list()[0]])
        fc1 = tf.add(tf.matmul(fc1, weights['wd1']), biases['bd1'])
        fc1 = tf.nn.relu(fc1)
        fc1 = tf.nn.dropout(fc1, dropout)

        out = tf.add(tf.matmul(fc1, weights['out']), biases['out'])
        #out = tf.nn.softmax(out)

        return out
def densenet(image, options, reuse=False, name='densenet'):
    
    divide = 2
    
    h_conv1 = conv2d(image, options.nk, ks=options.ks, name=name+'_conv1')
    h_db1 = denseblock(h_conv1, options, name=name+'_db1')    
    h_maxpool1 = maxpool2d(h_db1, name=name+'_pool1')
    h_db2 = denseblock(h_maxpool1, options, name=name+'_db2')
    
    pooled_size = int(options.image_size / divide)
    
    h_flat = tf.reshape(h_db2, [-1, pooled_size * pooled_size * options.nk])
    h_fc1 = fully_connected(h_flat, options.nk * options.nk, name=name+'_fc1')
    h_fc2 = fully_connected(h_fc1, options.n_pred, name=name+'_fc2')
    
    return h_fc2
Ejemplo n.º 4
0
    def __call__(self, input):
        with tf.variable_scope(self.name, reuse=self._reuse):
            if not self._reuse:
                log.warn(self.name)
            _ = input
            print('input tensor')
            print(_)

            # conv layers
            num_conv_channel = [32, 64, 128, 256, 256, 512, 512, 512, 512]
            #num_conv_channel = [8, 16, 32, 32, 32, 32, 32, 32]
            conv_list = []
            actv_list = []
            fc_list = []
            assert self._num_conv <= 10 and self._num_conv >= 0
            if self._num_conv == 0:
                _ = maxpool2d(_,
                              info=not self._reuse,
                              name="maxpool{}".format(1))
            else:
                for i in range(min(self._num_conv, len(num_conv_channel))):
                    _, pre_act = conv2d(_,
                                        num_conv_channel[i],
                                        self._is_train,
                                        info=not self._reuse,
                                        norm=self._norm_type,
                                        name='conv{}'.format(i + 1))
                    conv_list.append(pre_act)
                    actv_list.append(_)
                    print(_)

            # flatten: [bs, h, w, c] -> [bs, v]
            _ = flatten(_, info=not self._reuse)

            # fc layers
            num_fc_channel = [256, 64, 32, 16]
            for i in range(min(self._num_fc, len(num_fc_channel))):
                _ = fc(_,
                       num_fc_channel[i],
                       self._is_train,
                       info=not self._reuse,
                       norm=self._norm_type,
                       name='fc{}'.format(i + 1))
                fc_list.append(_)

            _ = fc(_,
                   self._output_dim,
                   self._is_train,
                   info=not self._reuse,
                   activation_fn=self._output_act_fn,
                   norm='none',
                   name='fc{}'.format(i + 2))

            self._reuse = True
            self.var_list = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES,
                                              self.name)
            """
            self.allvar = tf.get_collection(tf.GraphKeys.MODEL_VARIABLES, self.name)
            log.infov('var list')
            log.infov(self.var_list)
            log.infov('all var')
            log.infov(self.allvar)
            """
            return _, conv_list, actv_list, fc_list