Exemple #1
0
    def __init__(self, nfm, first=False, strides=1, batch_norm=False):

        self.trunk = None
        self.side_path = None
        main_path = [
            Convolution(
                **conv_params(1, nfm, strides=strides, batch_norm=batch_norm)),
            Convolution(**conv_params(3, nfm, batch_norm=batch_norm)),
            Convolution(**conv_params(1, nfm *
                                      4, relu=False, batch_norm=False))
        ]

        if first or strides == 2:
            self.side_path = Convolution(**conv_params(
                1, nfm * 4, strides=strides, relu=False, batch_norm=False))
        else:
            if batch_norm:
                main_path = [BatchNorm(), Activation(Rectlin())] + main_path
            else:
                main_path = [Activation(Rectlin())] + main_path

        if strides == 2:
            if batch_norm:
                self.trunk = Sequential([BatchNorm(), Activation(Rectlin())])
            else:
                self.trunk = Sequential([Activation(Rectlin())])

        self.main_path = Sequential(main_path)
Exemple #2
0
    def __init__(self,
                 inputs,
                 stage_depth,
                 batch_norm=True,
                 activation=True,
                 preprocess=True):
        nfms = [
            2**(stage + 4) for stage in sorted(list(range(3)) * stage_depth)
        ]
        strides = [
            1 if cur == prev else 2 for cur, prev in zip(nfms[1:], nfms[:-1])
        ]
        layers = []
        if preprocess:
            layers = Preprocess(functor=cifar_mean_subtract)
        parallel_axis = inputs['image'].axes.batch_axes()
        with ng.metadata(device_id=('1', '2'), parallel=parallel_axis[0]):
            layers.append(
                Convolution(**conv_params(3, 16, batch_norm=batch_norm)))
            layers.append(f_module(nfms[0], first=True))

            for nfm, stride in zip(nfms[1:], strides):
                layers.append(f_module(nfm, strides=stride))

        if batch_norm:
            layers.append(BatchNorm())
        if activation:
            layers.append(Activation(Rectlin()))
        layers.append(Pool2D(8, strides=2, op='avg'))
        layers.append(
            Affine(axes=ax.Y,
                   weight_init=KaimingInit(),
                   batch_norm=batch_norm,
                   activation=Softmax()))
        self.layers = layers
Exemple #3
0
    def __init__(self, inputs, dataset, stage_depth,
                 batch_norm=False, activation=False, preprocess=False):
        nfms = [2**(stage + 4) for stage in sorted(list(range(3)) * stage_depth)]
        strides = [1 if cur == prev else 2 for cur, prev in zip(nfms[1:], nfms[:-1])]
        layers = []
        if preprocess and dataset == 'cifar10':
            layers = Preprocess(functor=cifar_mean_subtract)
        layers.append(Convolution(**conv_params(3, 16, batch_norm=batch_norm)))
        layers.append(f_module(nfms[0], first=True, batch_norm=batch_norm))

        for nfm, stride in zip(nfms[1:], strides):
            layers.append(f_module(nfm, strides=stride, batch_norm=batch_norm))

        if batch_norm:
            layers.append(BatchNorm())
        if activation:
            layers.append(Activation(Rectlin()))
        layers.append(Pool2D(8, strides=2, op='avg'))
        if dataset == 'cifar10':
            ax.Y.length = 10
            layers.append(Affine(axes=ax.Y, weight_init=KaimingInit(),
                                 batch_norm=batch_norm, activation=Softmax()))
        elif dataset == 'i1k':
            ax.Y.length = 1000
            layers.append(Affine(axes=ax.Y, weight_init=KaimingInit(),
                                 batch_norm=batch_norm, activation=Softmax()))
        else:
            raise ValueError("Incorrect dataset provided")
        super(mini_residual_network, self).__init__(layers=layers)
    def __init__(self, stage_depth):
        nfms = [2**(stage + 4) for stage in sorted(list(range(3)) * stage_depth)]
        print(nfms)
        strides = [1 if cur == prev else 2 for cur, prev in zip(nfms[1:], nfms[:-1])]

        layers = [Preprocess(functor=cifar_mean_subtract),
                  Convolution(**conv_params(3, 16)),
                  f_module(nfms[0], first=True)]

        for nfm, stride in zip(nfms[1:], strides):
            layers.append(f_module(nfm, strides=stride))

        layers.append(BatchNorm())
        layers.append(Activation(Rectlin()))
        layers.append(Pooling((8, 8), pool_type='avg'))
        layers.append(Affine(axes=ax.Y,
                             weight_init=KaimingInit(),
                             activation=Softmax()))
        super(residual_network, self).__init__(layers=layers)
 def __init__(self, net_type, resnet_size, bottleneck, num_resnet_mods):
     # For CIFAR10 dataset
     if net_type == 'cifar10':
         # Number of Filters
         num_fils = [16, 32, 64]
         # Network Layers
         layers = [
             # Subtracting mean as suggested in paper
             Preprocess(functor=cifar10_mean_subtract),
             # First Conv with 3x3 and stride=1
             Convolution(**conv_params(3, 16))
         ]
         first_resmod = True  # Indicates the first residual module
         # Loop 3 times for each filter.
         for fil in range(3):
             # Lay out n residual modules so that we have 2n layers.
             for resmods in range(num_resnet_mods):
                 if (resmods == 0):
                     if (first_resmod):
                         # Strides=1 and Convolution side path
                         main_path, side_path = self.get_mp_sp(
                             num_fils[fil], net_type, direct=False)
                         layers.append(ResidualModule(main_path, side_path))
                         layers.append(Activation(Rectlin()))
                         first_resmod = False
                     else:
                         # Strides=2 and Convolution side path
                         main_path, side_path = self.get_mp_sp(
                             num_fils[fil],
                             net_type,
                             direct=False,
                             strides=2)
                         layers.append(ResidualModule(main_path, side_path))
                         layers.append(Activation(Rectlin()))
                 else:
                     # Strides=1 and direct connection
                     main_path, side_path = self.get_mp_sp(
                         num_fils[fil], net_type)
                     layers.append(ResidualModule(main_path, side_path))
                     layers.append(Activation(Rectlin()))
         # Do average pooling --> fully connected--> softmax.
         layers.append(Pooling([8, 8], pool_type='avg'))
         layers.append(
             Affine(axes=ax.Y, weight_init=KaimingInit(), batch_norm=True))
         layers.append(Activation(Softmax()))
     # For I1K dataset
     elif net_type == "i1k":
         # Number of Filters
         num_fils = [64, 128, 256, 512]
         # Number of residual modules we need to instantiate at each level
         num_resnet_mods = num_i1k_resmods(resnet_size)
         # Network layers
         layers = [
             # Subtracting mean
             Preprocess(functor=i1k_mean_subtract),
             # First Conv layer
             Convolution((7, 7, 64),
                         strides=2,
                         padding=3,
                         batch_norm=True,
                         activation=Rectlin(),
                         filter_init=KaimingInit()),
             # Max Pooling
             Pooling([3, 3], strides=2, pool_type='max', padding=1)
         ]
         first_resmod = True  # Indicates the first residual module for which strides are 1
         # Loop 4 times for each filter
         for fil in range(4):
             # Lay out residual modules as in num_resnet_mods list
             for resmods in range(num_resnet_mods[fil]):
                 if (resmods == 0):
                     if (first_resmod):
                         # Strides=1 and Convolution Side path
                         main_path, side_path = self.get_mp_sp(
                             num_fils[fil],
                             net_type,
                             direct=False,
                             bottleneck=bottleneck)
                         layers.append(ResidualModule(main_path, side_path))
                         layers.append(Activation(Rectlin()))
                         first_resmod = False
                     else:
                         # Strides=2 and Convolution side path
                         main_path, side_path = self.get_mp_sp(
                             num_fils[fil],
                             net_type,
                             direct=False,
                             bottleneck=bottleneck,
                             strides=2)
                         layers.append(ResidualModule(main_path, side_path))
                         layers.append(Activation(Rectlin()))
                 else:
                     # Strides=1 and direct connection
                     main_path, side_path = self.get_mp_sp(
                         num_fils[fil], net_type, bottleneck=bottleneck)
                     layers.append(ResidualModule(main_path, side_path))
                     layers.append(Activation(Rectlin()))
         # Do average pooling --> fully connected--> softmax.
         layers.append(Pooling([7, 7], pool_type='avg'))
         layers.append(
             Affine(axes=ax.Y, weight_init=KaimingInit(), batch_norm=True))
         layers.append(Activation(Softmax()))
     else:
         raise NameError(
             "Incorrect dataset. Should be --dataset cifar10 or --dataset i1k"
         )
     super(BuildResnet, self).__init__(layers=layers)