Beispiel #1
0
    def __init__(self,
                 num_classes,
                 w_initializer=None,
                 w_regularizer=None,
                 b_initializer=None,
                 b_regularizer=None,
                 acti_func='leaky-relu',
                 name='UNet2D'):
        BaseNet.__init__(self, num_classes=num_classes, name=name)
        self.n_fea = [64, 128, 256, 512, 1024]

        net_params = {
            'padding': 'SAME',
            'with_bias': True,
            'with_bn': False,
            'acti_func': acti_func,
            'w_initializer': w_initializer,
            'b_initializer': b_initializer,
            'w_regularizer': w_regularizer,
            'b_regularizer': b_regularizer
        }

        self.conv_params = {'kernel_size': 3, 'stride': 1}
        self.deconv_params = {'kernel_size': 2, 'stride': 2}
        self.pooling_params = {'kernel_size': 2, 'stride': 2}

        self.conv_params.update(net_params)
        self.deconv_params.update(net_params)
Beispiel #2
0
    def __init__(self,
                 decay,
                 affine_w_initializer=None,
                 affine_b_initializer=None,
                 disp_w_initializer=None,
                 disp_b_initializer=None,
                 acti_func='relu',
                 interp='linear',
                 boundary='replicate',
                 name='inet-hybrid-pre-warp'):
        """

        :param decay: float, regularisation decay
        :param affine_w_initializer: weight initialisation for affine registration network
        :param affine_b_initializer: bias initialisation for affine registration network
        :param disp_w_initializer: weight initialisation for dense registration network
        :param disp_b_initializer: bias initialisation for dense registration network
        :param acti_func: activation function to use
        :param interp: string, type of interpolation for the resampling [default:linear]
        :param boundary: string, padding mode to deal with image boundary
        :param name: layer name
        """
        BaseNet.__init__(self, name=name)
        self.global_net = INetAffine(decay=decay,
                                     affine_w_initializer=affine_w_initializer,
                                     affine_b_initializer=affine_b_initializer,
                                     acti_func=acti_func,
                                     name='inet-global')
        self.local_net = INetDense(decay=decay,
                                   disp_w_initializer=disp_w_initializer,
                                   disp_b_initializer=disp_b_initializer,
                                   acti_func=acti_func,
                                   name='inet-local')
        self.interp = interp
        self.boundary = boundary
    def __init__(self,
                 decay=1e-6,
                 affine_w_initializer=None,
                 affine_b_initializer=None,
                 acti_func='relu',
                 name='inet-affine'):
        """
        This network estimates affine transformations from
        a pair of moving and fixed image:

            Hu et al., Label-driven weakly-supervised learning for
            multimodal deformable image registration, arXiv:1711.01666
            https://arxiv.org/abs/1711.01666

        :param decay:
        :param affine_w_initializer:
        :param affine_b_initializer:
        :param acti_func:
        :param name:
        """

        BaseNet.__init__(self, name=name)

        self.fea = [4, 8, 16, 32, 64]
        self.k_conv = 3
        self.affine_w_initializer = affine_w_initializer
        self.affine_b_initializer = affine_b_initializer
        self.res_param = {
            'w_initializer': GlorotUniform.get_instance(''),
            'w_regularizer': regularizers.l2_regularizer(decay),
            'acti_func': acti_func}
        self.affine_param = {
            'w_regularizer': regularizers.l2_regularizer(decay),
            'b_regularizer': None}
Beispiel #4
0
    def __init__(self,
                 num_classes,
                 w_initializer=None,
                 w_regularizer=None,
                 b_initializer=None,
                 b_regularizer=None,
                 acti_func='relu',
                 name='UNet2D'):
        BaseNet.__init__(self,
                         num_classes=num_classes,
                         name=name)
        self.n_fea = [64, 128, 256, 512, 1024]

        net_params = {'padding': 'VALID',
                      'with_bias': True,
                      'with_bn': False,
                      'acti_func': acti_func,
                      'w_initializer': w_initializer,
                      'b_initializer': b_initializer,
                      'w_regularizer': w_regularizer,
                      'b_regularizer': b_regularizer}

        self.conv_params = {'kernel_size': 3, 'stride': 1}
        self.deconv_params = {'kernel_size': 2, 'stride': 2}
        self.pooling_params = {'kernel_size': 2, 'stride': 2}

        self.conv_params.update(net_params)
        self.deconv_params.update(net_params)
Beispiel #5
0
    def __init__(self,
                 num_classes,
                 w_initializer=None,
                 w_regularizer=None,
                 b_initializer=None,
                 b_regularizer=None,
                 acti_func='relu',
                 name='UNet2D'):
        BaseNet.__init__(self,
                         num_classes=num_classes,
                         name=name)
        self.n_fea = [64, 128, 256, 512, 1024]

        net_params = {'padding': 'VALID',
                      'with_bias': True,
                      'feature_normalization': 'batch',
                      'group_size': -1,
                      'acti_func': acti_func,
                      'w_initializer': w_initializer,
                      'b_initializer': b_initializer,
                      'w_regularizer': w_regularizer,
                      'b_regularizer': b_regularizer}

        self.conv_params = {'kernel_size': 3, 'stride': 1}
        self.deconv_params = {'kernel_size': 2, 'stride': 2}
        self.pooling_params = {'kernel_size': 2, 'stride': 2}

        self.conv_params.update(net_params)
        self.deconv_params.update(net_params)
Beispiel #6
0
    def __init__(self,
                 decay=1e-6,
                 affine_w_initializer=None,
                 affine_b_initializer=None,
                 acti_func='relu',
                 name='inet-affine'):
        """

        :param decay: float, regularisation decay
        :param affine_w_initializer: weight initialisation for affine registration network
        :param affine_b_initializer: bias initialisation for affine registration network
        :param acti_func: activation function to use
        :param name: layer name
        """

        BaseNet.__init__(self, name=name)

        self.fea = [4, 8, 16, 32, 64]
        self.k_conv = 3
        self.affine_w_initializer = affine_w_initializer
        self.affine_b_initializer = affine_b_initializer
        self.res_param = {
            'w_initializer': GlorotUniform.get_instance(''),
            'w_regularizer': regularizers.l2_regularizer(decay),
            'acti_func': acti_func
        }
        self.affine_param = {
            'w_regularizer': regularizers.l2_regularizer(decay),
            'b_regularizer': None
        }
Beispiel #7
0
    def __init__(self,
                 decay=0.0,
                 smoothing=0,
                 disp_w_initializer=None,
                 disp_b_initializer=None,
                 acti_func='relu',
                 multi_scale_fusion=True,
                 name='inet-dense'):
        """

        :param decay: float, regularisation decay
        :param smoothing: float, smoothing factor for dense displacement field
        :param disp_w_initializer: initialisation of the displacement fields
        :param disp_b_initializer: initialisation of the displacement fields
        :param acti_func: activation function to use
        :param multi_scale_fusion: True/False indicating whether to use
            multiscale feature fusion.
        :param name: layer name
        """
        BaseNet.__init__(self, name=name)

        # self.fea = [40, 80, 160, 320, 640]
        # self.fea = [32, 64, 128, 256, 512]
        self.fea = [30, 60, 120, 240, 480]
        # self.fea = [16, 32, 64, 128, 256]
        self.k_conv = 3
        self.multi_scale_fusion = multi_scale_fusion

        self.down_res_param = {
            'w_initializer': GlorotUniform.get_instance(''),
            'w_regularizer': regularizers.l2_regularizer(decay),
            'acti_func': acti_func}

        self.up_res_param = {
            'acti_func': acti_func,
            'w_initializer': GlorotUniform.get_instance(''),
            'w_regularizer': regularizers.l2_regularizer(decay),
            'is_residual_upsampling': True,
            'type_string': 'bn_acti_conv'}

        # displacement initialiser & regulariser
        if disp_w_initializer is None:
            disp_b_initializer = tf.constant_initializer(0.0)
            #disp_w_initializer = tf.random_normal_initializer(0, 1e-4)
        if disp_b_initializer is None:
            disp_b_initializer = tf.constant_initializer(0.0)
            #disp_w_initializer = tf.random_normal_initializer(0, 0.0)
        self.disp_param = {
            'w_initializer': disp_w_initializer,
            'w_regularizer': regularizers.l2_regularizer(decay),
            'b_initializer': disp_b_initializer,
            'b_regularizer': None}

        if smoothing > 0:
            self.smoothing_func = _smoothing_func(smoothing)
        else:
            self.smoothing_func = None
Beispiel #8
0
    def __init__(self,
                 decay,
                 affine_w_initializer=None,
                 affine_b_initializer=None,
                 disp_w_initializer=None,
                 disp_b_initializer=None,
                 acti_func='relu',
                 interp='linear',
                 boundary='replicate',
                 name='inet-hybrid-pre-warp'):
        """
        Re-implementation of the registration network proposed in:

            Hu et al., Label-driven weakly-supervised learning for
            multimodal deformable image registration, arXiv:1711.01666
            https://arxiv.org/abs/1711.01666

            Hu et al., Weakly-Supervised Convolutional Neural Networks for
            Multimodal Image Registration, Medical Image Analysis (2018)
            https://doi.org/10.1016/j.media.2018.07.002

        see also:
            https://github.com/YipengHu/label-reg

        :param decay:
        :param affine_w_initializer:
        :param affine_b_initializer:
        :param disp_w_initializer:
        :param disp_b_initializer:
        :param acti_func:
        :param interp:
        :param boundary:
        :param name:
        """
        BaseNet.__init__(self, name=name)
        self.global_net = INetAffine(decay=decay,
                                     affine_w_initializer=affine_w_initializer,
                                     affine_b_initializer=affine_b_initializer,
                                     acti_func=acti_func,
                                     name='inet-global')
        self.local_net = INetDense(decay=decay,
                                   disp_w_initializer=disp_w_initializer,
                                   disp_b_initializer=disp_b_initializer,
                                   acti_func=acti_func,
                                   name='inet-local')
        self.interp = interp
        self.boundary = boundary
Beispiel #9
0
    def __init__(self,
                 decay=1e-6,
                 affine_w_initializer=None,
                 affine_b_initializer=None,
                 acti_func='relu',
                 name='inet-affine'):
        """
        This network estimates affine transformations from
        a pair of moving and fixed image:

            Hu et al., Label-driven weakly-supervised learning for
            multimodal deformable image registration, arXiv:1711.01666
            https://arxiv.org/abs/1711.01666

            Hu et al., Weakly-Supervised Convolutional Neural Networks for
            Multimodal Image Registration, Medical Image Analysis (2018)
            https://doi.org/10.1016/j.media.2018.07.002

        see also:
            https://github.com/YipengHu/label-reg

        :param decay:
        :param affine_w_initializer:
        :param affine_b_initializer:
        :param acti_func:
        :param name:
        """

        BaseNet.__init__(self, name=name)

        self.fea = [4, 8, 16, 32, 64]
        self.k_conv = 3
        self.affine_w_initializer = affine_w_initializer
        self.affine_b_initializer = affine_b_initializer
        self.res_param = {
            'w_initializer': GlorotUniform.get_instance(''),
            'w_regularizer': regularizers.l2_regularizer(decay),
            'acti_func': acti_func}
        self.affine_param = {
            'w_regularizer': regularizers.l2_regularizer(decay),
            'b_regularizer': None}
    def __init__(self,
                 decay,
                 affine_w_initializer=None,
                 affine_b_initializer=None,
                 disp_w_initializer=None,
                 disp_b_initializer=None,
                 acti_func='relu',
                 interp='linear',
                 boundary='replicate',
                 name='inet-hybrid-pre-warp'):
        """
        Re-implementation of the registration network proposed in:

            Hu et al., Label-driven weakly-supervised learning for
            multimodal deformable image registration, arXiv:1711.01666
            https://arxiv.org/abs/1711.01666

        :param decay:
        :param affine_w_initializer:
        :param affine_b_initializer:
        :param disp_w_initializer:
        :param disp_b_initializer:
        :param acti_func:
        :param interp:
        :param boundary:
        :param name:
        """
        BaseNet.__init__(self, name=name)
        self.global_net = INetAffine(decay=decay,
                                     affine_w_initializer=affine_w_initializer,
                                     affine_b_initializer=affine_b_initializer,
                                     acti_func=acti_func,
                                     name='inet-global')
        self.local_net = INetDense(decay=decay,
                                   disp_w_initializer=disp_w_initializer,
                                   disp_b_initializer=disp_b_initializer,
                                   acti_func=acti_func,
                                   name='inet-local')
        self.interp = interp
        self.boundary = boundary
Beispiel #11
0
    def __init__(self,
                 num_classes,
                 w_initializer=None,
                 w_regularizer=None,
                 b_initializer=None,
                 b_regularizer=None,
                 acti_func='leakyrelu',
                 name='naive_net'):

        BaseNet.__init__(self, num_classes=num_classes, name=name)
        self.n_fea = [512, 512, 2]

        net_params = {
            'padding': 'SAME',
            'with_bias': True,
            'with_bn': False,
            'acti_func': acti_func,
            'w_initializer': w_initializer,
            'b_initializer': b_initializer,
            'w_regularizer': w_regularizer,
            'b_regularizer': b_regularizer
        }
 def __init__(self,
              decay,
              affine_w_initializer=None,
              affine_b_initializer=None,
              disp_w_initializer=None,
              disp_b_initializer=None,
              acti_func='relu',
              interp='linear',
              boundary='replicate',
              name='inet-hybrid-two-stream'):
     BaseNet.__init__(self, name=name)
     self.global_net = INetAffine(decay=decay,
                                  affine_w_initializer=affine_w_initializer,
                                  affine_b_initializer=affine_b_initializer,
                                  acti_func=acti_func,
                                  name='inet-global')
     self.local_net = INetDense(decay=decay,
                                disp_w_initializer=disp_w_initializer,
                                disp_b_initializer=disp_b_initializer,
                                acti_func=acti_func,
                                name='inet-local')
     self.interp = interp
     self.boundary = boundary
Beispiel #13
0
    def __init__(self,
                 decay=1e-6,
                 affine_w_initializer=None,
                 affine_b_initializer=None,
                 acti_func='relu',
                 name='inet-affine'):
        """
        This network estimates affine transformations from
        a pair of moving and fixed image:

            Hu et al., Label-driven weakly-supervised learning for
            multimodal deformable image registration, arXiv:1711.01666
            https://arxiv.org/abs/1711.01666

        :param decay:
        :param affine_w_initializer:
        :param affine_b_initializer:
        :param acti_func:
        :param name:
        """

        BaseNet.__init__(self, name=name)

        self.fea = [4, 8, 16, 32, 64]
        self.k_conv = 3
        self.affine_w_initializer = affine_w_initializer
        self.affine_b_initializer = affine_b_initializer
        self.res_param = {
            'w_initializer': GlorotUniform.get_instance(''),
            'w_regularizer': regularizers.l2_regularizer(decay),
            'acti_func': acti_func
        }
        self.affine_param = {
            'w_regularizer': regularizers.l2_regularizer(decay),
            'b_regularizer': None
        }
Beispiel #14
0
 def __init__(self):
     BaseNet.__init__(self, name='G')
Beispiel #15
0
    def __init__(self,
                 decay=0.0,
                 smoothing=0,
                 disp_w_initializer=None,
                 disp_b_initializer=None,
                 acti_func='relu',
                 multi_scale_fusion=True,
                 name='inet-dense'):
        """
        The network estimates dense displacement fields from a pair
        of moving and fixed images:

            Hu et al., Label-driven weakly-supervised learning for
            multimodal deformable image registration, arXiv:1711.01666
            https://arxiv.org/abs/1711.01666

            Hu et al., Weakly-Supervised Convolutional Neural Networks for
            Multimodal Image Registration, Medical Image Analysis (2018)
            https://doi.org/10.1016/j.media.2018.07.002

        see also:
            https://github.com/YipengHu/label-reg

        :param decay:
        :param smoothing:
        :param disp_w_initializer: initialisation of the displacement fields
        :param disp_b_initializer: initialisation of the dis
        :param acti_func:
        :param multi_scale_fusion: True/False indicating whether to use
            multiscale feature fusion.
        :param name:
        """
        BaseNet.__init__(self, name=name)

        # self.fea = [40, 80, 160, 320, 640]
        # self.fea = [32, 64, 128, 256, 512]
        self.fea = [30, 60, 120, 240, 480]
        # self.fea = [16, 32, 64, 128, 256]
        self.k_conv = 3
        self.multi_scale_fusion = multi_scale_fusion

        self.down_res_param = {
            'w_initializer': GlorotUniform.get_instance(''),
            'w_regularizer': regularizers.l2_regularizer(decay),
            'acti_func': acti_func
        }

        self.up_res_param = {
            'acti_func': acti_func,
            'w_initializer': GlorotUniform.get_instance(''),
            'w_regularizer': regularizers.l2_regularizer(decay),
            'is_residual_upsampling': True,
            'type_string': 'bn_acti_conv'
        }

        # displacement initialiser & regulariser
        if disp_w_initializer is None:
            disp_b_initializer = tf.constant_initializer(0.0)
            #disp_w_initializer = tf.random_normal_initializer(0, 1e-4)
        if disp_b_initializer is None:
            disp_b_initializer = tf.constant_initializer(0.0)
            #disp_w_initializer = tf.random_normal_initializer(0, 0.0)
        self.disp_param = {
            'w_initializer': disp_w_initializer,
            'w_regularizer': regularizers.l2_regularizer(decay),
            'b_initializer': disp_b_initializer,
            'b_regularizer': None
        }

        if smoothing > 0:
            self.smoothing_func = _smoothing_func(smoothing)
        else:
            self.smoothing_func = None
Beispiel #16
0
 def __init__(self):
     BaseNet.__init__(self, name='G')
Beispiel #17
0
 def __init__(self):
     BaseNet.__init__(self, name='tinynet')
     self.d_net = DNet()
     self.g_net = GNet()
    def __init__(self,
                 decay=0.0,
                 smoothing=0,
                 disp_w_initializer=None,
                 disp_b_initializer=None,
                 acti_func='relu',
                 multi_scale_fusion=True,
                 name='inet-dense'):
        """
        The network estimates dense displacement fields from a pair
        of moving and fixed images:

            Hu et al., Label-driven weakly-supervised learning for
            multimodal deformable image registration, arXiv:1711.01666
            https://arxiv.org/abs/1711.01666

        :param decay:
        :param smoothing:
        :param disp_w_initializer: initialisation of the displacement fields
        :param disp_b_initializer: initialisation of the dis
        :param acti_func:
        :param multi_scale_fusion: True/False indicating whether to use
            multiscale feature fusion.
        :param name:
        """
        BaseNet.__init__(self, name=name)

        # self.fea = [40, 80, 160, 320, 640]
        # self.fea = [32, 64, 128, 256, 512]
        self.fea = [30, 60, 120, 240, 480]
        # self.fea = [16, 32, 64, 128, 256]
        self.k_conv = 3
        self.multi_scale_fusion = multi_scale_fusion

        self.down_res_param = {
            'w_initializer': GlorotUniform.get_instance(''),
            'w_regularizer': regularizers.l2_regularizer(decay),
            'acti_func': acti_func}

        self.up_res_param = {
            'acti_func': acti_func,
            'w_initializer': GlorotUniform.get_instance(''),
            'w_regularizer': regularizers.l2_regularizer(decay),
            'is_residual_upsampling': True,
            'type_string': 'bn_acti_conv'}

        # displacement initialiser & regulariser
        if disp_w_initializer is None:
            disp_b_initializer = tf.constant_initializer(0.0)
            #disp_w_initializer = tf.random_normal_initializer(0, 1e-4)
        if disp_b_initializer is None:
            disp_b_initializer = tf.constant_initializer(0.0)
            #disp_w_initializer = tf.random_normal_initializer(0, 0.0)
        self.disp_param = {
            'w_initializer': disp_w_initializer,
            'w_regularizer': regularizers.l2_regularizer(decay),
            'b_initializer': disp_b_initializer,
            'b_regularizer': None}

        if smoothing > 0:
            self.smoothing_func = _smoothing_func(smoothing)
        else:
            self.smoothing_func = None
Beispiel #19
0
 def __init__(self):
     BaseNet.__init__(self, name='tinynet')
     self.d_net = DNet()
     self.g_net = GNet()