Пример #1
0
def network(input, is_training, num_outputs_embedding, data_format='channels_first'):
    kernel_initializer = he_initializer
    activation = tf.nn.relu
    with tf.variable_scope('unet_0'):
        unet = UnetParallel2D(num_filters_base=64, kernel=[3, 3], num_levels=7, data_format=data_format, kernel_initializer=kernel_initializer, activation=activation, is_training=is_training, name='unet')
        embeddings = conv2d(unet(input, is_training), kernel_size=[3, 3], name='embeddings', filters=num_outputs_embedding, kernel_initializer=kernel_initializer, activation=None, data_format=data_format)
    with tf.variable_scope('unet_1'):
        embeddings_normalized = tf.nn.l2_normalize(embeddings, axis=1)
        input_concat = concat_channels([input, embeddings_normalized], name='input_concat', data_format=data_format)
        unet = UnetParallel2D(num_filters_base=64, kernel=[3, 3], num_levels=7, data_format=data_format, kernel_initializer=kernel_initializer, activation=activation, is_training=is_training, name='unet')
        embeddings_2 = conv2d(unet(input_concat, is_training), kernel_size=[3, 3], name='embeddings', filters=num_outputs_embedding, kernel_initializer=kernel_initializer, activation=None, data_format=data_format)
    return embeddings, embeddings_2
Пример #2
0
def network_downsampling(input,
                         num_landmarks,
                         is_training,
                         data_format='channels_first'):
    num_filters = 128
    kernel_size = [5, 5]
    num_levels = 3
    padding = 'same'
    kernel_initializer = he_initializer
    activation = tf.nn.relu
    heatmap_initializer = tf.truncated_normal_initializer(stddev=0.0001)
    heatmap_activation = None
    node = input
    with tf.variable_scope('downsampling'):
        for i in range(num_levels):
            with tf.variable_scope('level' + str(i)):
                node = conv2d(node,
                              num_filters,
                              kernel_size=kernel_size,
                              name='conv0',
                              activation=activation,
                              kernel_initializer=kernel_initializer,
                              padding=padding,
                              data_format=data_format,
                              is_training=is_training)
                node = conv2d(node,
                              num_filters,
                              kernel_size=kernel_size,
                              name='conv1',
                              activation=activation,
                              kernel_initializer=kernel_initializer,
                              padding=padding,
                              data_format=data_format,
                              is_training=is_training)
                if i != num_levels - 1:
                    node = avg_pool2d(node, [2, 2],
                                      name='downsampling',
                                      data_format=data_format)
        heatmaps = conv2d(node,
                          num_landmarks,
                          kernel_size=[1, 1],
                          name='heatmaps',
                          activation=heatmap_activation,
                          kernel_initializer=heatmap_initializer,
                          padding=padding,
                          data_format=data_format,
                          is_training=is_training)
    return heatmaps
def network_unet(input,
                 num_landmarks,
                 is_training,
                 data_format='channels_first'):
    num_filters = 128
    num_levels = 5
    padding = 'same'
    kernel_initializer = he_initializer
    activation = tf.nn.relu
    heatmap_activation = None
    heatmap_initializer = tf.truncated_normal_initializer(stddev=0.0001)
    with tf.variable_scope('unet'):
        unet = UnetClassic2D(num_filters,
                             num_levels,
                             activation=activation,
                             kernel_initializer=kernel_initializer,
                             data_format=data_format,
                             padding=padding)
        node = unet(input, is_training=is_training)
        heatmaps = conv2d(node,
                          num_landmarks,
                          kernel_size=[1, 1],
                          name='heatmaps',
                          activation=heatmap_activation,
                          kernel_initializer=heatmap_initializer,
                          padding=padding,
                          data_format=data_format,
                          is_training=is_training)
    return heatmaps
 def conv(self, node, current_level, postfix, is_training):
     return conv2d(node,
                   self.num_filters(current_level), [3] * 2,
                   name='conv' + postfix,
                   activation=self.activation,
                   normalization=self.normalization,
                   is_training=is_training,
                   data_format=self.data_format,
                   padding=self.padding)
 def conv(self, node, current_level, postfix, is_training):
     return conv2d(node,
                   self.num_filters(current_level), [3, 3],
                   name='conv' + postfix,
                   activation=self.activation,
                   normalization=None,
                   is_training=is_training,
                   data_format=self.data_format,
                   kernel_initializer=self.kernel_initializer,
                   padding=self.padding)
Пример #6
0
 def w_conv(self, input):
     return conv2d(input,
                   self._filters,
                   self._kernel,
                   data_format=self._data_format,
                   name='w',
                   padding=self._padding,
                   normalization=None,
                   activation=None,
                   is_training=self._is_training,
                   kernel_initializer=selu_initializer)
Пример #7
0
 def call(self, node, states):
     node, output_states = self.unet_recurrent(node, list(states), self.is_training)
     node = conv2d(node,
                   self.num_outputs,
                   self.kernel,
                   'output',
                   data_format=self.data_format,
                   padding=self.padding,
                   activation=self.activation,
                   is_training=self.is_training)
     return node, tuple(output_states)
Пример #8
0
 def call(self, node, states):
     node = conv2d(node,
                   self.num_outputs_first_conv, [1, 1],
                   'input',
                   data_format=self.data_format,
                   padding=self.padding,
                   kernel_initializer=self.kernel_initializer,
                   activation=self.input_activation,
                   is_training=self.is_training)
     node, output_states = self.unet_recurrent(node, list(states),
                                               self.is_training)
     node = conv2d(node,
                   self.num_outputs, [1, 1],
                   'output',
                   data_format=self.data_format,
                   padding=self.padding,
                   kernel_initializer=self.kernel_initializer,
                   activation=self.output_activation,
                   is_training=self.is_training)
     return node, tuple(output_states)
Пример #9
0
 def u_conv(self, input):
     return conv2d(input,
                   self._filters,
                   self._kernel,
                   data_format=self._data_format,
                   name='u',
                   padding=self._padding,
                   normalization=self._normalization,
                   activation=None,
                   is_training=self._is_training,
                   kernel_initializer=tf.constant_initializer(0),
                   use_bias=False)
Пример #10
0
def network2d(input, is_training, num_outputs_embedding, actual_network, filters=64, levels=5, activation='relu', normalize=True, data_format='channels_first', padding='same'):
    if activation == 'selu':
        activation = tf.nn.selu
        kernel_initializer = selu_initializer
    elif activation == 'relu':
        activation = tf.nn.relu
        kernel_initializer = he_initializer
    elif activation == 'tanh':
        activation = tf.nn.tanh
        kernel_initializer = selu_initializer
    padding = padding
    embedding_axis = 1 if data_format == 'channels_first' else 4
    if normalize:
        embeddings_activation = lambda x, name: tf.nn.l2_normalize(x, dim=embedding_axis, name=name, epsilon=1e-4)
    else:
        if activation == tf.nn.selu:
            embeddings_activation = tf.nn.selu
        else:
            embeddings_activation = None

    embeddings_normalization = lambda x, name: tf.nn.l2_normalize(x, dim=embedding_axis, name=name, epsilon=1e-4)
    batch_size, channels, (num_frames, height, width) = get_batch_channel_image_size(input, data_format=data_format)

    with tf.variable_scope('unet_0'):
        unet = actual_network(num_filters_base=filters, kernel=[3, 3], num_levels=levels, data_format=data_format, kernel_initializer=kernel_initializer, activation=activation, is_training=is_training, name='unet', padding=padding)
        unet_out = unet(input[:, 0, :, :, :], is_training)
        embeddings_2d = conv2d(unet_out, kernel_size=[1, 1], name='embeddings', filters=num_outputs_embedding * num_frames, kernel_initializer=kernel_initializer, activation=embeddings_activation, data_format=data_format, is_training=is_training, padding=padding)
        embeddings = tf.reshape(embeddings_2d, [batch_size, num_outputs_embedding, num_frames, height, width])
    with tf.variable_scope('unet_1'):
        normalized_embeddings = embeddings_normalization(embeddings, 'embeddings_normalized')
        normalized_embeddings_2d = tf.reshape(embeddings_2d, [batch_size, num_outputs_embedding * num_frames, height, width])
        input_concat = concat_channels([input[:, 0, :, :, :], normalized_embeddings_2d], name='input_concat', data_format=data_format)
        unet = actual_network(num_filters_base=filters, kernel=[3, 3], num_levels=levels, data_format=data_format, kernel_initializer=kernel_initializer, activation=activation, is_training=is_training, name='unet', padding=padding)
        unet_out = unet(input_concat, is_training)
        embeddings_2_2d = conv2d(unet_out, kernel_size=[1, 1], name='embeddings', filters=num_outputs_embedding * num_frames, kernel_initializer=kernel_initializer, activation=embeddings_activation, data_format=data_format, is_training=is_training, padding=padding)
        embeddings_2 = tf.reshape(embeddings_2_2d, [batch_size, num_outputs_embedding, num_frames, height, width])
    return embeddings, embeddings_2
Пример #11
0
def network_ud(input, is_training, num_labels=6, data_format='channels_first'):
    kernel_initializer = he_initializer
    activation = tf.nn.relu
    local_kernel_initializer = he_initializer
    local_activation = None
    padding = 'reflect'
    with tf.variable_scope('local'):
        padding = 'reflect'
        unet = UnetClassicAvgLinear2D(num_filters_base=16,
                                      num_levels=4,
                                      data_format=data_format,
                                      double_filters_per_level=False,
                                      kernel_initializer=kernel_initializer,
                                      activation=activation)
        prediction = unet(input, is_training=is_training)
        prediction = conv2d(prediction,
                            num_labels, [1, 1],
                            name='output',
                            padding=padding,
                            kernel_initializer=local_kernel_initializer,
                            activation=local_activation,
                            is_training=is_training)
    return prediction
def network_scn_mia(input,
                    num_landmarks,
                    is_training,
                    data_format='channels_first'):
    num_filters_base = 128
    activation = lambda x, name: tf.nn.leaky_relu(x, name=name, alpha=0.1)
    padding = 'same'
    heatmap_layer_kernel_initializer = tf.truncated_normal_initializer(
        stddev=0.0001)
    downsampling_factor = 16
    dim = 2
    node = conv2d(input,
                  filters=num_filters_base,
                  kernel_size=[3] * dim,
                  name='conv0',
                  activation=activation,
                  kernel_initializer=he_initializer,
                  data_format=data_format,
                  is_training=is_training)
    scnet_local = SCNetLocal(num_filters_base=num_filters_base,
                             num_levels=4,
                             double_filters_per_level=False,
                             normalization=None,
                             kernel_initializer=he_initializer,
                             activation=activation,
                             data_format=data_format,
                             padding=padding)
    unet_out = scnet_local(node, is_training)
    local_heatmaps = conv2d(
        unet_out,
        filters=num_landmarks,
        kernel_size=[3] * dim,
        name='local_heatmaps',
        kernel_initializer=heatmap_layer_kernel_initializer,
        activation=None,
        data_format=data_format,
        is_training=is_training)
    downsampled = avg_pool2d(local_heatmaps, [downsampling_factor] * dim,
                             name='local_downsampled',
                             data_format=data_format)
    conv = conv2d(downsampled,
                  filters=num_filters_base,
                  kernel_size=[11] * dim,
                  kernel_initializer=he_initializer,
                  name='sconv0',
                  activation=activation,
                  data_format=data_format,
                  is_training=is_training,
                  padding=padding)
    conv = conv2d(conv,
                  filters=num_filters_base,
                  kernel_size=[11] * dim,
                  kernel_initializer=he_initializer,
                  name='sconv1',
                  activation=activation,
                  data_format=data_format,
                  is_training=is_training,
                  padding=padding)
    conv = conv2d(conv,
                  filters=num_filters_base,
                  kernel_size=[11] * dim,
                  kernel_initializer=he_initializer,
                  name='sconv2',
                  activation=activation,
                  data_format=data_format,
                  is_training=is_training,
                  padding=padding)
    conv = conv2d(conv,
                  filters=num_landmarks,
                  kernel_size=[11] * dim,
                  name='spatial_downsampled',
                  kernel_initializer=heatmap_layer_kernel_initializer,
                  activation=tf.nn.tanh,
                  data_format=data_format,
                  is_training=is_training,
                  padding=padding)
    spatial_heatmaps = upsample2d_cubic(conv,
                                        factors=[downsampling_factor] * dim,
                                        name='spatial_heatmaps',
                                        data_format=data_format,
                                        padding='valid_cropped')

    heatmaps = local_heatmaps * spatial_heatmaps

    return heatmaps
Пример #13
0
def network_scn(input,
                num_landmarks,
                is_training,
                data_format='channels_first'):
    num_filters = 128
    local_kernel_size = [5, 5]
    spatial_kernel_size = [15, 15]
    downsampling_factor = 8
    padding = 'same'
    kernel_initializer = he_initializer
    activation = tf.nn.relu
    heatmap_initializer = tf.truncated_normal_initializer(stddev=0.0001)
    local_activation = None
    spatial_activation = None
    with tf.variable_scope('local_appearance'):
        node = conv2d(input,
                      num_filters,
                      kernel_size=local_kernel_size,
                      name='conv1',
                      activation=activation,
                      kernel_initializer=kernel_initializer,
                      padding=padding,
                      data_format=data_format,
                      is_training=is_training)
        node = conv2d(node,
                      num_filters,
                      kernel_size=local_kernel_size,
                      name='conv2',
                      activation=activation,
                      kernel_initializer=kernel_initializer,
                      padding=padding,
                      data_format=data_format,
                      is_training=is_training)
        node = conv2d(node,
                      num_filters,
                      kernel_size=local_kernel_size,
                      name='conv3',
                      activation=activation,
                      kernel_initializer=kernel_initializer,
                      padding=padding,
                      data_format=data_format,
                      is_training=is_training)
        local_heatmaps = conv2d(node,
                                num_landmarks,
                                kernel_size=local_kernel_size,
                                name='local_heatmaps',
                                activation=local_activation,
                                kernel_initializer=heatmap_initializer,
                                padding=padding,
                                data_format=data_format,
                                is_training=is_training)
    with tf.variable_scope('spatial_configuration'):
        local_heatmaps_downsampled = avg_pool2d(
            local_heatmaps, [downsampling_factor, downsampling_factor],
            name='local_heatmaps_downsampled',
            data_format=data_format)
        channel_axis = get_channel_index(local_heatmaps_downsampled,
                                         data_format)
        local_heatmaps_downsampled_split = tf.split(local_heatmaps_downsampled,
                                                    num_landmarks,
                                                    channel_axis)
        spatial_heatmaps_downsampled_split = []
        for i in range(num_landmarks):
            local_heatmaps_except_i = tf.concat([
                local_heatmaps_downsampled_split[j]
                for j in range(num_landmarks) if i != j
            ],
                                                name='h_app_except_' + str(i),
                                                axis=channel_axis)
            h_acc = conv2d(local_heatmaps_except_i,
                           1,
                           kernel_size=spatial_kernel_size,
                           name='h_acc_' + str(i),
                           activation=spatial_activation,
                           kernel_initializer=heatmap_initializer,
                           padding=padding,
                           data_format=data_format,
                           is_training=is_training)
            spatial_heatmaps_downsampled_split.append(h_acc)
        spatial_heatmaps_downsampled = tf.concat(
            spatial_heatmaps_downsampled_split,
            name='spatial_heatmaps_downsampled',
            axis=channel_axis)
        spatial_heatmaps = upsample2d_linear(
            spatial_heatmaps_downsampled,
            [downsampling_factor, downsampling_factor],
            name='spatial_prediction',
            padding='valid_cropped',
            data_format=data_format)
    with tf.variable_scope('combination'):
        heatmaps = local_heatmaps * spatial_heatmaps
    return heatmaps
Пример #14
0
def network_scn_mmwhs(input,
                      num_landmarks,
                      is_training,
                      data_format='channels_first'):
    downsampling_factor = 8
    num_filters = 128
    num_levels = 4
    spatial_kernel_size = [5, 5]
    kernel_initializer = he_initializer
    activation = tf.nn.relu
    local_kernel_initializer = tf.truncated_normal_initializer(stddev=0.0001)
    local_activation = tf.nn.tanh
    spatial_kernel_initializer = tf.truncated_normal_initializer(stddev=0.0001)
    spatial_activation = None
    padding = 'reflect'
    with tf.variable_scope('unet'):
        unet = UnetClassicAvgLinear2D(num_filters,
                                      num_levels,
                                      data_format=data_format,
                                      double_filters_per_level=False,
                                      kernel_initializer=kernel_initializer,
                                      activation=activation,
                                      padding=padding)
        local_prediction = unet(input, is_training=is_training)
        local_prediction = conv2d(local_prediction,
                                  num_landmarks, [1, 1],
                                  name='local_prediction',
                                  padding=padding,
                                  kernel_initializer=local_kernel_initializer,
                                  activation=local_activation,
                                  is_training=is_training)
    with tf.variable_scope('spatial_configuration'):
        local_prediction_pool = avg_pool2d(local_prediction,
                                           [downsampling_factor] * 2,
                                           name='local_prediction_pool')
        scconv = conv2d(local_prediction_pool,
                        num_filters,
                        spatial_kernel_size,
                        name='scconv0',
                        padding=padding,
                        kernel_initializer=kernel_initializer,
                        activation=activation,
                        is_training=is_training)
        scconv = conv2d(scconv,
                        num_filters,
                        spatial_kernel_size,
                        name='scconv1',
                        padding=padding,
                        kernel_initializer=kernel_initializer,
                        activation=activation,
                        is_training=is_training)
        scconv = conv2d(scconv,
                        num_filters,
                        spatial_kernel_size,
                        name='scconv2',
                        padding=padding,
                        kernel_initializer=kernel_initializer,
                        activation=activation,
                        is_training=is_training)
        spatial_prediction_pool = conv2d(
            scconv,
            num_landmarks,
            spatial_kernel_size,
            name='spatial_prediction_pool',
            padding=padding,
            kernel_initializer=spatial_kernel_initializer,
            activation=spatial_activation,
            is_training=is_training)
        spatial_prediction = upsample2d_linear(spatial_prediction_pool,
                                               [downsampling_factor] * 2,
                                               name='spatial_prediction',
                                               padding='valid_cropped')
    with tf.variable_scope('combination'):
        prediction = local_prediction * spatial_prediction
    return prediction
Пример #15
0
def network_conv(input,
                 num_landmarks,
                 is_training,
                 data_format='channels_first'):
    num_filters = 128
    kernel_size = [11, 11]
    padding = 'same'
    kernel_initializer = he_initializer
    activation = tf.nn.relu
    heatmap_initializer = tf.truncated_normal_initializer(stddev=0.0001)
    heatmap_activation = None
    node = input
    with tf.variable_scope('downsampling'):
        node = conv2d(node,
                      num_filters,
                      kernel_size=kernel_size,
                      name='conv0',
                      activation=activation,
                      kernel_initializer=kernel_initializer,
                      padding=padding,
                      data_format=data_format,
                      is_training=is_training)
        node = conv2d(node,
                      num_filters,
                      kernel_size=kernel_size,
                      name='conv1',
                      activation=activation,
                      kernel_initializer=kernel_initializer,
                      padding=padding,
                      data_format=data_format,
                      is_training=is_training)
        node = conv2d(node,
                      num_filters,
                      kernel_size=kernel_size,
                      name='conv2',
                      activation=activation,
                      kernel_initializer=kernel_initializer,
                      padding=padding,
                      data_format=data_format,
                      is_training=is_training)
        node = conv2d(node,
                      num_filters,
                      kernel_size=kernel_size,
                      name='conv3',
                      activation=activation,
                      kernel_initializer=kernel_initializer,
                      padding=padding,
                      data_format=data_format,
                      is_training=is_training)
        node = conv2d(node,
                      num_filters,
                      kernel_size=kernel_size,
                      name='conv4',
                      activation=activation,
                      kernel_initializer=kernel_initializer,
                      padding=padding,
                      data_format=data_format,
                      is_training=is_training)
        node = conv2d(node,
                      num_filters,
                      kernel_size=kernel_size,
                      name='conv5',
                      activation=activation,
                      kernel_initializer=kernel_initializer,
                      padding=padding,
                      data_format=data_format,
                      is_training=is_training)
        heatmaps = conv2d(node,
                          num_landmarks,
                          kernel_size=[1, 1],
                          name='heatmaps',
                          activation=heatmap_activation,
                          kernel_initializer=heatmap_initializer,
                          padding=padding,
                          data_format=data_format,
                          is_training=is_training)
    return heatmaps