Esempio n. 1
0
    def _setup(self, data):
        image = data['image']
        label = data['label']

        t4.Conv.default.filter_initializer = t4.GlorotNormalInitializer
        conv = t4.DeepConv(1, 64, 1024)
        lin = t4.Linear(7 * 7 * 1024, 10)

        dense = t4.image.to_dense(image)
        flat = t4.image.to_flat(conv(dense, t4.relu))
        pred = tf.nn.softmax(lin(flat))

        loss = t4.categorical_cross_entropy_loss(
            pred, tf.one_hot(label, 10, dtype=t4.Widget.default.float_dtype))
        acc = t4.correct_prediction(tf.argmax(pred, axis=1), label)

        optimizer = tf.train.AdamOptimizer().minimize(loss)

        self.train = self._add_slot('train',
                                    outputs=(loss, acc),
                                    updates=optimizer,
                                    summaries=lin.summaries)

        self.valid = self._add_slot(
            'valid',
            outputs=(loss, acc),
            # givens={t4.BatchNorm.default.shift: False}
        )
Esempio n. 2
0
    def _setup(self, data):
        image = data['image']
        label = data['label']

        dense = t4.image.to_dense(image)
        flat = t4.image.to_flat(dense)
        lin = t4.Linear(28 * 28, 10)
        pred = tf.nn.softmax(lin(flat))

        loss = t4.categorical_cross_entropy_loss(pred, tf.one_hot(label, 10, dtype=t4.Widget.default.float_dtype))
        acc = t4.correct_prediction(tf.argmax(pred, axis=1), label)

        optimizer = tf.train.AdamOptimizer().minimize(loss)

        self.train = self._add_slot(
            'train',
            outputs=(loss, acc),
            updates=optimizer,
            summaries=lin.summaries
        )

        self.valid = self._add_slot(
            'valid',
            outputs=(loss, acc)
        )
Esempio n. 3
0
    def _setup(self, data):
        image = data['image']
        label = data['label'] - 1

        k = 2
        n = 4
        t4.L2Regularizer.default.rate = 0.0005

        t4.SimpleResidualConv.default.block = t4.ConvNoBias
        conv = t4.Conv(3, 16, 3, 3, 1, 1)
        residual = t4.DeepResidualConv((16 * k, n), (32 * k, n), (64 * k, n),
                                       (128 * k, n),
                                       input_channel=16,
                                       block=t4.SimpleResidualConv)

        lin = t4.Linear(128 * k, 1000)

        shift = tf.placeholder_with_default(True, ())
        image = t4.image.randomize_flip(image, 0.5, switch=shift)
        image = t4.image.randomize_crop(image, switch=shift)
        dense = t4.image.to_dense(image, std=64)

        dense = conv(dense)
        dense = t4.batch_normalization(dense)
        dense = t4.relu(dense)

        dense = residual(dense, t4.batch_normalization, t4.relu)

        flat = t4.flat_pool(dense)
        pred = tf.nn.softmax(t4.batch_normalization(lin(flat)))

        loss = t4.categorical_cross_entropy_loss(pred,
                                                 t4.OneHot(1000)(label),
                                                 with_false=False)

        acc = t4.correct_prediction(t4.argmax(pred), label)

        loss_ma = t4.MovingAverage()(loss)
        acc_ma = t4.MovingAverage()(acc)

        self.lr = lr = tf.placeholder(t4.Widget.default.float_dtype)
        regularizers = tf.reduce_sum(self.losses)

        optimizer = tf.train.MomentumOptimizer(lr, 0.9).minimize(
            loss + regularizers, colocate_gradients_with_ops=True)

        self.train = self._add_slot('train',
                                    outputs=(loss_ma, acc_ma, regularizers),
                                    updates=optimizer)

        self.valid = self._add_slot('valid',
                                    outputs=(loss, acc),
                                    givens={
                                        t4.BatchNorm.default.shift: False,
                                        shift: False
                                    })
Esempio n. 4
0
    def _setup(self, data):
        image = data['image']
        label = data['label']

        t4.ResidualConv.default.block = t4.ConvNoBias
        conv = t4.ConvNoBias(3, 64, 7, 7, 2, 2)

        residual = t4.DeepResidualConv(
            (256, 3), (512, 4), (1024, 6), (2048, 3),
            input_channel=64, block=t4.BottleNeckResidualConvOS
        )
        lin = t4.Linear(2048, 90)

        # image = tf.image.resize_images(image, [224, 224])
        mean = tf.Variable(tf.constant([128, 128, 128], dtype=self.default_float_dtype), trainable=False)
        dense = t4.image.to_dense(image, mean=mean, std=1)

        dense = conv(dense)
        dense = t4.batch_normalization(dense)
        dense = t4.relu(dense)
        dense = t4.max_pool(dense, 3, 3, 2, 2)
        dense = residual(dense, t4.batch_normalization, t4.relu)

        flat = t4.flat_pool(dense)
        pred = tf.nn.softmax(lin(flat))

        loss = t4.categorical_cross_entropy_loss(pred, t4.OneHot(90)(label))
        acc = t4.correct_prediction(t4.argmax(pred), label)

        with tf.device('/gpu:1'):
            optimizer = tf.train.GradientDescentOptimizer(0.01).minimize(loss)

        self.train = self._add_slot(
            'train',
            outputs=(loss, acc),
            updates=optimizer
        )

        self.valid = self._add_slot(
            'valid',
            outputs=(loss, acc),
            givens={
                t4.BatchNorm.default.shift: False
            }
        )
Esempio n. 5
0
#!/usr/bin/env python3
# -- coding: utf8 --
# :author: nvagus
# :time: 11/8/17-5:19 PM
# :package: tforce.test

import code

import tensorflow as tf

import tforce as t4

if __name__ == '__main__':
    with tf.Session() as sess:
        x = tf.constant([1., 2., 3.], dtype=t4.Widget.default.float_dtype)
        lin = t4.Linear(3, 5)
        y = lin(x)
        sess.run(tf.global_variables_initializer())
        print(sess.run(y))
        print(
            lin.global_variables,
            lin.trainable_variables,
            lin.weight_variables,
            lin.bias_variables,
            lin.losses,
            lin.summaries,
            sep='\n'
        )
        code.interact(local=locals())