Exemple #1
0
    def __init__(self, num_class, name='VGG', trainable=True, **kwargs):
        super(Model, self).__init__(name=name, **kwargs)

        def kwargs(**kwargs):
            return kwargs

        setattr(
            tcl.Conv2d, 'pre_defined',
            kwargs(use_biases=False, activation_fn=None, trainable=trainable))
        setattr(tcl.BatchNorm, 'pre_defined',
                kwargs(activation_fn=None, trainable=trainable))
        setattr(tcl.FC, 'pre_defined', kwargs(trainable=trainable))
        self.Layers = {}
        self.filter_depth = [
            64, 64, 128, 128, 256, 256, 256, 512, 512, 512, 512, 512, 512
        ]
        for i, c in enumerate(self.filter_depth):
            self.Layers['conv%d' % i] = tcl.Conv2d([3, 3],
                                                   c,
                                                   name='conv%d' % i)
            self.Layers['bn%d' % i] = tcl.BatchNorm(name='bn%d' % i)

            if i == 0:
                self.Layers['conv%d' % i].type = 'input'
            else:
                self.Layers['conv%d' % i].type = 'mid'
        self.max_pool = tf.keras.layers.MaxPool2D()
        self.Layers['fc'] = tcl.FC(num_class, name='fc')
        self.Layers['fc'].type = 'VGG_class'
    def __init__(self, num_layer, weight_decay, num_class):
        super(Model, self).__init__(num_layer, weight_decay, num_class)

        def kwargs(**kwargs):
            return kwargs

        setattr(
            tcl.Conv2d, 'pre_defined',
            kwargs(kernel_regularizer=tf.keras.regularizers.l2(weight_decay),
                   use_biases=False,
                   activation_fn=None))
        setattr(
            tcl.FC, 'pre_defined',
            kwargs(kernel_regularizer=tf.keras.regularizers.l2(weight_decay),
                   activation_fn=tf.nn.softmax))

        self.resnet_layers = {}
        network_argments = {
            18: {
                'nb_resnet_layers': [2, 2, 2],
                'depth': [16, 32, 64],
                'strides': [1, 2, 2]
            }
        }
        self.net_args = network_argments[num_layer]
        self.net_name = 'ResNet'
        with tf.name_scope(self.net_name):
            self.resnet_layers[self.net_name + '/conv0'] = tcl.Conv2d(
                [3, 3], self.net_args['depth'][0])
            self.resnet_layers[self.net_name + '/bn0'] = tcl.BatchNorm(
                activation_fn=tf.nn.relu)

            for i, (nb_resnet_layers, depth, strides) in enumerate(
                    zip(self.net_args['nb_resnet_layers'],
                        self.net_args['depth'], self.net_args['strides'])):
                for j in range(nb_resnet_layers):
                    block_name = '/BasicBlock%d.%d' % (i, j)
                    with tf.name_scope(block_name[1:]):
                        nb_name = self.net_name + block_name
                        if i != 0:
                            strides = 1
                        self.resnet_layers[nb_name + '/conv0'] = tcl.Conv2d(
                            [3, 3], depth, strides=strides)
                        self.resnet_layers[nb_name + '/bn0'] = tcl.BatchNorm(
                            activation_fn=tf.nn.relu)
                        self.resnet_layers[nb_name + '/conv1'] = tcl.Conv2d(
                            [3, 3], depth)
                        self.resnet_layers[nb_name + '/bn1'] = tcl.BatchNorm()

                        if strides > 1 or depth != self.net_args['depth'][max(
                                0, i - 1)]:
                            self.resnet_layers[nb_name +
                                               '/conv2'] = tcl.Conv2d(
                                                   [1, 1],
                                                   depth,
                                                   strides=strides)
            self.resnet_layers['FC'] = tcl.FC(num_class)
    def __init__(self, architecture, num_class, name = 'WResNet', trainable = True, **kwargs):
        super(Model, self).__init__(name = name, **kwargs)
        def kwargs(**kwargs):
            return kwargs
        setattr(tcl.Conv2d, 'pre_defined', kwargs(use_biases = False, activation_fn = None, trainable = trainable))
        setattr(tcl.BatchNorm, 'pre_defined', kwargs(activation_fn = tf.nn.relu, trainable = trainable))
        setattr(tcl.FC, 'pre_defined', kwargs(trainable = trainable))
        
        self.Layers = {}
        depth, widen_factor = architecture
        self.nChannels = [16, 16*widen_factor, 32*widen_factor, 64*widen_factor]
        self.stride = [1,2,2]
        self.n = (depth-4)//6
        
        self.Layers['conv'] = tcl.Conv2d([3,3], self.nChannels[0], name = 'conv')
        in_planes = self.nChannels[0]
        prev_conv_name = 'conv'
        
        for i, (c, s) in enumerate(zip(self.nChannels[1:], self.stride)):
            for j in range(self.n):
                block_name = 'BasicBlock%d.%d/'%(i,j)
                equalInOut = in_planes == c
                in_planes = c
                    
                self.Layers[block_name + 'bn']   = tcl.BatchNorm(name = block_name + 'bn')
                self.Layers[prev_conv_name.replace('conv','bn')] = self.Layers[block_name + 'bn']

                if not(equalInOut):
                    self.Layers[block_name + 'conv2'] = tcl.Conv2d([1,1], c, strides = s if j == 0 else 1, name = block_name + 'conv2')

                self.Layers[block_name + 'conv0'] = tcl.Conv2d([3,3], c, strides = s if j == 0 else 1, name = block_name + 'conv0')
                self.Layers[block_name + 'bn0']   = tcl.BatchNorm(name = block_name + 'bn0')
                self.Layers[block_name + 'conv1'] = tcl.Conv2d([3,3], c, strides = 1, name = block_name + 'conv1')
                prev_conv_name = block_name + 'conv1'

                if not(equalInOut):
                    prev_conv_name = block_name + 'conv2'

        self.Layers['bn_last']= tcl.BatchNorm(name = 'bn_last')
        self.Layers[prev_conv_name.replace('conv','bn')] = self.Layers['bn_last']

        self.Layers['fc'] = tcl.FC(num_class, name = 'fc')
Exemple #4
0
    def __init__(self,
                 architecture,
                 weight_decay,
                 num_class,
                 name='WResNet',
                 trainable=True,
                 **kwargs):
        super(Model, self).__init__(name=name, **kwargs)

        def kwargs(**kwargs):
            return kwargs

        setattr(
            tcl.Conv2d, 'pre_defined',
            kwargs(kernel_regularizer=tf.keras.regularizers.l2(weight_decay),
                   use_biases=False,
                   activation_fn=None,
                   trainable=trainable))
        setattr(
            tcl.BatchNorm, 'pre_defined',
            kwargs(param_regularizers={
                'gamma': tf.keras.regularizers.l2(weight_decay),
                'beta': tf.keras.regularizers.l2(weight_decay)
            },
                   trainable=trainable))
        setattr(
            tcl.FC, 'pre_defined',
            kwargs(kernel_regularizer=tf.keras.regularizers.l2(weight_decay),
                   biases_regularizer=tf.keras.regularizers.l2(weight_decay),
                   trainable=trainable))

        self.wresnet_layers = {}
        depth, widen_factor = architecture
        self.nChannels = [
            16, 16 * widen_factor, 32 * widen_factor, 64 * widen_factor
        ]
        self.stride = [1, 2, 2]
        self.n = (depth - 4) // 6

        self.wresnet_layers['conv0'] = tcl.Conv2d([3, 3],
                                                  self.nChannels[0],
                                                  name='conv0')
        self.wresnet_layers['bn0'] = tcl.BatchNorm(activation_fn=tf.nn.relu,
                                                   name='bn0')
        in_planes = self.nChannels[0]

        for i, (c, s) in enumerate(zip(self.nChannels[1:], self.stride)):
            for j in range(self.n):
                block_name = 'BasicBlock%d.%d' % (i, j)
                with tf.name_scope(block_name):
                    equalInOut = in_planes == c

                    self.wresnet_layers[block_name +
                                        '/bn0'] = tcl.BatchNorm(name='bn0')
                    self.wresnet_layers[block_name + '/conv1'] = tcl.Conv2d(
                        [3, 3], c, strides=s if j == 0 else 1, name='conv1')
                    self.wresnet_layers[block_name + '/bn1'] = tcl.BatchNorm(
                        activation_fn=tf.nn.relu, name='bn1')
                    self.wresnet_layers[block_name +
                                        '/drop'] = tcl.Dropout(0.7)
                    self.wresnet_layers[block_name + '/conv2'] = tcl.Conv2d(
                        [3, 3], c, strides=1, name='conv2')

                    if not (equalInOut):
                        self.wresnet_layers[block_name +
                                            '/conv3'] = tcl.Conv2d(
                                                [1, 1],
                                                c,
                                                strides=s if j == 0 else 1,
                                                name='conv3')
                    in_planes = c
        self.wresnet_layers['bn1'] = tcl.BatchNorm(name='bn1')
        self.wresnet_layers['fc'] = tcl.FC(num_class, name='fc')
Exemple #5
0
    def __init__(self,
                 num_layers,
                 num_class,
                 name='WResNet',
                 trainable=True,
                 **kwargs):
        super(Model, self).__init__(name=name, **kwargs)

        def kwargs(**kwargs):
            return kwargs

        setattr(
            tcl.Conv2d, 'pre_defined',
            kwargs(use_biases=False, activation_fn=None, trainable=trainable))
        setattr(tcl.BatchNorm, 'pre_defined', kwargs(trainable=trainable))
        setattr(tcl.FC, 'pre_defined', kwargs(trainable=trainable))

        self.Layers = {}
        network_argments = {
            56: {
                'blocks': [9, 9, 9],
                'depth': [16, 32, 64],
                'strides': [1, 2, 2]
            }
        }
        self.net_args = network_argments[num_layers]

        self.Layers['conv'] = tcl.Conv2d([3, 3],
                                         self.net_args['depth'][0],
                                         name='conv',
                                         layertype='input')
        self.Layers['bn'] = tcl.BatchNorm(name='bn')
        in_depth = self.net_args['depth'][0]
        for i, (nb_resnet_layers, depth, strides) in enumerate(
                zip(self.net_args['blocks'], self.net_args['depth'],
                    self.net_args['strides'])):
            for j in range(nb_resnet_layers):
                name = '/BasicBlock%d.%d/' % (i, j)
                if j != 0:
                    strides = 1

                if strides > 1 or depth != in_depth:
                    self.Layers[name + 'conv2'] = tcl.Conv2d([1, 1],
                                                             depth,
                                                             strides=strides,
                                                             name=name +
                                                             'conv2')
                    self.Layers[name + 'bn2'] = tcl.BatchNorm(name=name +
                                                              'bn2')

                self.Layers[name + 'conv0'] = tcl.Conv2d([3, 3],
                                                         depth,
                                                         strides=strides,
                                                         name=name + 'conv0')
                self.Layers[name + 'bn0'] = tcl.BatchNorm(name=name + 'bn0')
                self.Layers[name + 'conv1'] = tcl.Conv2d([3, 3],
                                                         depth,
                                                         name=name + 'conv1')
                self.Layers[name + 'bn1'] = tcl.BatchNorm(name=name + 'bn1')
                in_depth = depth

        self.Layers['fc'] = tcl.FC(num_class, name='fc')
    def __init__(self, architecture, weight_decay, num_class):
        super(Model, self).__init__(architecture, weight_decay, num_class)

        def kwargs(**kwargs):
            return kwargs

        setattr(
            tcl.Conv2d, 'pre_defined',
            kwargs(kernel_regularizer=tf.keras.regularizers.l2(weight_decay),
                   use_biases=False,
                   activation_fn=None))
        setattr(
            tcl.BatchNorm, 'pre_defined',
            kwargs(
                param_regularizers={
                    'gamma': tf.keras.regularizers.l2(weight_decay),
                    'beta': tf.keras.regularizers.l2(weight_decay)
                }))
        setattr(
            tcl.FC, 'pre_defined',
            kwargs(kernel_regularizer=tf.keras.regularizers.l2(weight_decay),
                   biases_regularizer=tf.keras.regularizers.l2(weight_decay)))

        self.wresnet_layers = {}
        depth, widen_factor = architecture
        self.nChannels = [
            16, 16 * widen_factor, 32 * widen_factor, 64 * widen_factor
        ]
        self.stride = [1, 2, 2]
        self.n = (depth - 4) // 6
        self.net_name = 'WResNet'
        self.feature = []
        self.feature_noact = []
        self.last_embedded = []
        self.logits = []

        with tf.name_scope(self.net_name):
            self.wresnet_layers[self.net_name + '/conv0'] = tcl.Conv2d(
                [3, 3], self.nChannels[0])
            self.wresnet_layers[self.net_name + '/bn0'] = tcl.BatchNorm(
                activation_fn=tf.nn.relu)
            in_planes = self.nChannels[0]

            for i, (c, s) in enumerate(zip(self.nChannels[1:], self.stride)):
                for j in range(self.n):
                    block_name = '/BasicBlock%d.%d' % (i, j)
                    with tf.name_scope(block_name[1:]):
                        equalInOut = in_planes == c
                        nb_name = self.net_name + block_name

                        self.wresnet_layers[nb_name + '/bn0'] = tcl.BatchNorm()
                        self.wresnet_layers[nb_name + '/conv1'] = tcl.Conv2d(
                            [3, 3], c, strides=s if j == 0 else 1)
                        self.wresnet_layers[nb_name + '/bn1'] = tcl.BatchNorm(
                            activation_fn=tf.nn.relu)
                        self.wresnet_layers[nb_name +
                                            '/drop'] = tcl.Dropout(0.7)
                        self.wresnet_layers[nb_name + '/conv2'] = tcl.Conv2d(
                            [3, 3], c, strides=1)

                        if not (equalInOut):
                            self.wresnet_layers[nb_name +
                                                '/conv3'] = tcl.Conv2d(
                                                    [1, 1],
                                                    c,
                                                    strides=s if j == 0 else 1)
                        in_planes = c
            self.wresnet_layers[self.net_name + '/bn1'] = tcl.BatchNorm()
            self.wresnet_layers['FC'] = tcl.FC(num_class)
Exemple #7
0
    def __init__(self,
                 num_layers,
                 num_class,
                 name='ResNet',
                 trainable=True,
                 **kwargs):
        super(Model, self).__init__(name=name, **kwargs)

        def kwargs(**kwargs):
            return kwargs

        setattr(
            tcl.Conv2d, 'pre_defined',
            kwargs(use_biases=False, activation_fn=None, trainable=trainable))
        setattr(tcl.BatchNorm, 'pre_defined', kwargs(trainable=trainable))
        setattr(tcl.FC, 'pre_defined', kwargs(trainable=trainable))

        self.num_layers = num_layers

        self.Layers = {}
        network_argments = {
            ## ILSVRC
            18: {
                'blocks': [2, 2, 2, 2],
                'depth': [64, 128, 256, 512],
                'strides': [1, 2, 2, 2]
            },
            50: {
                'blocks': [3, 4, 6, 3],
                'depth': [64, 128, 256, 512],
                'strides': [1, 2, 2, 2]
            },

            ## CIFAR
            56: {
                'blocks': [9, 9, 9],
                'depth': [16, 32, 64],
                'strides': [1, 2, 2]
            },
        }
        self.net_args = network_argments[self.num_layers]

        if num_class == 1000:
            self.Layers['conv'] = tcl.Conv2d([7, 7],
                                             self.net_args['depth'][0],
                                             strides=2,
                                             name='conv')
            self.Layers['bn'] = tcl.BatchNorm(name='bn')
            self.maxpool_3x3 = tf.keras.layers.MaxPool2D((3, 3),
                                                         strides=2,
                                                         padding='SAME')

        else:
            self.Layers['conv'] = tcl.Conv2d([3, 3],
                                             self.net_args['depth'][0],
                                             name='conv')
            self.Layers['bn'] = tcl.BatchNorm(name='bn')

        self.expansion = 1 if self.num_layers in {18, 56} else 4
        in_depth = self.net_args['depth'][0]
        for i, (nb_resnet_layers, depth, strides) in enumerate(
                zip(self.net_args['blocks'], self.net_args['depth'],
                    self.net_args['strides'])):
            for j in range(nb_resnet_layers):
                name = 'BasicBlock%d.%d/' % (i, j)
                if j != 0:
                    strides = 1

                if strides > 1 or depth * self.expansion != in_depth:
                    self.Layers[name + 'conv3'] = tcl.Conv2d(
                        [1, 1],
                        depth * self.expansion,
                        strides=strides,
                        name=name + 'conv3')
                    self.Layers[name + 'bn3'] = tcl.BatchNorm(name=name +
                                                              'bn3')

                if self.num_layers in {18, 56}:
                    self.Layers[name + 'conv1'] = tcl.Conv2d([3, 3],
                                                             depth,
                                                             strides=strides,
                                                             name=name +
                                                             'conv1')
                    self.Layers[name + 'bn1'] = tcl.BatchNorm(name=name +
                                                              'bn1')
                    self.Layers[name + 'conv2'] = tcl.Conv2d(
                        [3, 3], depth * self.expansion, name=name + 'conv2')
                    self.Layers[name + 'bn2'] = tcl.BatchNorm(name=name +
                                                              'bn2')

                else:
                    self.Layers[name + 'conv0'] = tcl.Conv2d([1, 1],
                                                             depth,
                                                             name=name +
                                                             'conv0')
                    self.Layers[name + 'bn0'] = tcl.BatchNorm(name=name +
                                                              'bn0')
                    self.Layers[name + 'conv1'] = tcl.Conv2d([3, 3],
                                                             depth,
                                                             strides=strides,
                                                             name=name +
                                                             'conv1')
                    self.Layers[name + 'bn1'] = tcl.BatchNorm(name=name +
                                                              'bn1')
                    self.Layers[name + 'conv2'] = tcl.Conv2d(
                        [1, 1], depth * self.expansion, name=name + 'conv2')
                    self.Layers[name + 'bn2'] = tcl.BatchNorm(name=name +
                                                              'bn2', )
                    #param_initializers = {'gamma': tf.keras.initializers.Zeros()})

                in_depth = depth * self.expansion

        self.Layers['fc'] = tcl.FC(num_class, name='fc')