示例#1
0
    def create_model(self, x):

        print('0', x.shape)
        x = Conv(x,
                 filter=2 * self.filters,
                 kernel=conv0_weight,
                 stride=conv0_stride,
                 layer_name='conv0')
        print('1', x.shape)
        x = Max_Pool(x)
        print('2', x.shape)

        for i in range(self.num_blocks):
            x = self.dense_block(x,
                                 num_layers=num_layers_1,
                                 layer_name='dense_%d' % i)
            print('3', x.shape)
            x = self.transition(x, scope='trans_%d' % i)
            print('4', x.shape)

        x = self.dense_block_last(x,
                                  num_layers=num_layers_last,
                                  layer_name='dense_final')
        print('5', x.shape)
        x = BN(x, training=self.training, scope='linear_batch')
        x = lrelu(x)

        x = tf.reshape(x, shape=[-1, ts])
        print('6', x.shape)
        return x
示例#2
0
    def bottleneck_last(self, x, scope):
        with tf.name_scope(scope):
            x = BN(x, training=self.training, scope=scope + '_batch1_last')
            x = lrelu(x)
            x = Conv(x,
                     filter=4 * self.filters_last,
                     kernel=[1, 1],
                     layer_name=scope + '_conv1_last')
            #            x = Dropout(x, rate=dropout_rate, training=self.training)

            x = BN(x, training=self.training, scope=scope + '_batch2')
            x = lrelu(x)
            x = Conv(x,
                     filter=self.filters_last,
                     kernel=conv_weight,
                     layer_name=scope + '_conv2_last')
            #            x = Dropout(x, rate=dropout_rate, training=self.training)

            return x
示例#3
0
    def transition(self, x, scope):
        with tf.name_scope(scope):
            x = BN(x, training=self.training, scope=scope + '_batch1')
            x = lrelu(x)
            x = Conv(x,
                     filter=self.filters,
                     kernel=[1, 1],
                     layer_name=scope + '_conv1')
            #            x = Dropout(x, rate=dropout_rate, training=self.training)
            x = Avg_Pool(x)

            return x
示例#4
0
    def create_model(self, x):

        print('0', x.shape)
        x = Conv(x,
                 filter=2 * self.filters,
                 kernel=conv0_weight,
                 stride=conv0_stride,
                 layer_name='conv0')
        print('1', x.shape)
        x = Max_Pool(x)
        print('2', x.shape)

        for i in range(self.num_blocks):
            x = self.dense_block(x,
                                 num_layers=num_layers_1,
                                 layer_name='dense_%d' % i)
            print('3', x.shape)
            x = self.transition(x, scope='trans_%d' % i)
            print('4', x.shape)

        x = self.dense_block_last(x,
                                  num_layers=num_layers_last,
                                  layer_name='dense_final')
        print('5', x.shape)
        x = BN(x, training=self.training, scope='linear_batch')
        x = lrelu(x)

        x = tf.reshape(x, shape=[-1, ts])
        print('6', x.shape)

        y_final_prime2 = x
        y_final_ = tf.slice(x, [0, 0], [-1, 95])
        y_final_prime1 = tf.concat([x_init, y_final_], 1)

        w1_final = tf.Variable(tf.random_normal([96, 96]))
        y_pred = tf.add(tf.matmul(y_final_prime1, w1_final), y_final_prime2)
        x = lrelu(y_pred)
        return x