Exemplo n.º 1
0
def qnet(observation_space, action_space, net_name, net_size):
    num_actions = action_space.n
    net_size = int(net_size)
    net_name = net_name.lower()
    state, feature, net = _atari_state_feature_net(observation_space, net_name)

    # dueling or regular dqn/drqn
    if 'dueling' in net_name:
        value1 = net(net_size, activation='relu')(feature)
        adv1 = net(net_size, activation='relu')(feature)
        value2 = Dense(1)(value1)
        adv2 = Dense(num_actions)(adv1)
        mean_adv2 = Lambda(lambda x: K.mean(x, axis=1))(adv2)
        ones = K.ones([1, num_actions])
        lambda_exp = lambda x: K.dot(K.expand_dims(x, axis=1), -ones)
        exp_mean_adv2 = Lambda(lambda_exp)(mean_adv2)
        sum_adv = layers.add([exp_mean_adv2, adv2])
        exp_value2 = Lambda(lambda x: K.dot(x, ones))(value2)
        q_value = layers.add([exp_value2, sum_adv])
    else:
        hid = net(net_size, activation='relu')(feature)
        q_value = Dense(num_actions)(hid)

    # build model
    return models.Model(inputs=state, outputs=q_value)
Exemplo n.º 2
0
def spatial_attention(cost_volume):
    feature = 4 * 9
    k = 9
    label = 9
    dres0 = convbn_3d(cost_volume, feature / 2, 3, 1)
    dres0 = Activation('relu')(dres0)
    dres0 = convbn_3d(dres0, 1, 3, 1)
    cost0 = Activation('relu')(dres0)

    cost0 = Lambda(lambda x: K.permute_dimensions(K.squeeze(x, -1),
                                                  (0, 2, 3, 1)))(cost0)

    cost1 = convbn(cost0, label // 2, (1, k), 1, 1)
    cost1 = Activation('relu')(cost1)
    cost1 = convbn(cost1, 1, (k, 1), 1, 1)
    cost1 = Activation('relu')(cost1)

    cost2 = convbn(cost0, label // 2, (k, 1), 1, 1)
    cost2 = Activation('relu')(cost2)
    cost2 = convbn(cost2, 1, (1, k), 1, 1)
    cost2 = Activation('relu')(cost2)

    cost = add([cost1, cost2])
    cost = Activation('sigmoid')(cost)

    cost = Lambda(lambda y: K.repeat_elements(K.expand_dims(y, 1), 9, 1))(cost)
    cost = Lambda(lambda y: K.repeat_elements(y, feature, 4))(cost)
    return multiply([cost, cost_volume])
Exemplo n.º 3
0
def BasicBlock(input, planes, stride, downsample, dilation):
    conv1 = convbn(input, planes, 3, stride, dilation)
    conv1 = Activation('relu')(conv1)
    conv2 = convbn(conv1, planes, 3, 1, dilation)
    if downsample is not None:
        input = downsample

    conv2 = add([conv2, input])
    return conv2
Exemplo n.º 4
0
def basic(cost_volume):
    feature = 2 * 75
    dres0 = convbn_3d(cost_volume, feature, 3, 1)
    dres0 = Activation('relu')(dres0)
    dres0 = convbn_3d(dres0, feature, 3, 1)
    cost0 = Activation('relu')(dres0)

    dres1 = convbn_3d(cost0, feature, 3, 1)
    dres1 = Activation('relu')(dres1)
    dres1 = convbn_3d(dres1, feature, 3, 1)
    cost0 = add([dres1, cost0])

    dres4 = convbn_3d(cost0, feature, 3, 1)
    dres4 = Activation('relu')(dres4)
    dres4 = convbn_3d(dres4, feature, 3, 1)
    cost0 = add([dres4, cost0])

    classify = convbn_3d(cost0, feature, 3, 1)
    classify = Activation('relu')(classify)
    cost = Conv3D(1, 3, 1, 'same', data_format='channels_last',
                  use_bias=False)(classify)

    return cost
Exemplo n.º 5
0
def Conv_Block(inpt,
               nb_filter,
               kernel_size,
               strides=(1, 1),
               with_conv_shortcut=False):
    x = _BN_ReLU_Conv2d(inpt,
                        nb_filter=nb_filter,
                        kernel_size=kernel_size,
                        strides=strides,
                        padding='same')
    x = _BN_ReLU_Conv2d(x,
                        nb_filter=nb_filter,
                        kernel_size=kernel_size,
                        padding='same')
    if with_conv_shortcut:
        shortcut = _BN_ReLU_Conv2d(inpt,
                                   nb_filter=nb_filter,
                                   strides=strides,
                                   kernel_size=kernel_size)
        x = add([x, shortcut])
        return x
    else:
        x = add([x, inpt])
        return x
Exemplo n.º 6
0
    def _create_model(self,
                      middle_flow_repeats: int = 8,
                      dropout: float = 0.,
                      weight_decay: float = 0.,
                      **kwargs) -> None:
        """
        Craete XCeption model.

        :param middle_flow_repeats: number of middle flow block repeats
        :param dropout: dropout rate of the extracted features
        :param weight_decay: weight decay regularization
        """

        images = tf.placeholder(tf.float32,
                                shape=[None] + self._dataset.shape,
                                name='images')
        labels = tf.placeholder(tf.int64, shape=[None], name='labels')

        regularizer = K.regularizers.l2(weight_decay)

        with tf.variable_scope('model'):
            net = Conv2D(32, (3, 3),
                         strides=(2, 2),
                         use_bias=False,
                         name='block1_conv1',
                         kernel_regularizer=regularizer)(images)
            net = BatchNormalization(name='block1_conv1_bn')(
                net, training=self.is_training)
            net = Activation('relu', name='block1_conv1_act')(net)
            net = Conv2D(64, (3, 3),
                         use_bias=False,
                         name='block1_conv2',
                         kernel_regularizer=regularizer)(net)
            net = BatchNormalization(name='block1_conv2_bn')(
                net, training=self.is_training)
            net = Activation('relu', name='block1_conv2_act')(net)

            residual = Conv2D(128, (1, 1),
                              strides=(2, 2),
                              padding='same',
                              use_bias=False,
                              kernel_regularizer=regularizer)(net)
            residual = BatchNormalization()(residual,
                                            training=self.is_training)

            net = SeparableConv2D(128, (3, 3),
                                  padding='same',
                                  use_bias=False,
                                  name='block2_sepconv1',
                                  depthwise_regularizer=regularizer,
                                  pointwise_regularizer=regularizer)(net)
            net = BatchNormalization(name='block2_sepconv1_bn')(
                net, training=self.is_training)
            net = Activation('relu', name='block2_sepconv2_act')(net)
            net = SeparableConv2D(128, (3, 3),
                                  padding='same',
                                  use_bias=False,
                                  name='block2_sepconv2',
                                  depthwise_regularizer=regularizer,
                                  pointwise_regularizer=regularizer)(net)
            net = BatchNormalization(name='block2_sepconv2_bn')(
                net, training=self.is_training)

            net = MaxPooling2D((3, 3),
                               strides=(2, 2),
                               padding='same',
                               name='block2_pool')(net)
            net = layers.add([net, residual])

            residual = Conv2D(256, (1, 1),
                              strides=(2, 2),
                              padding='same',
                              use_bias=False,
                              kernel_regularizer=regularizer)(net)
            residual = BatchNormalization()(residual,
                                            training=self.is_training)

            net = Activation('relu', name='block3_sepconv1_act')(net)
            net = SeparableConv2D(256, (3, 3),
                                  padding='same',
                                  use_bias=False,
                                  name='block3_sepconv1',
                                  depthwise_regularizer=regularizer,
                                  pointwise_regularizer=regularizer)(net)
            net = BatchNormalization(name='block3_sepconv1_bn')(
                net, training=self.is_training)
            net = Activation('relu', name='block3_sepconv2_act')(net)
            net = SeparableConv2D(256, (3, 3),
                                  padding='same',
                                  use_bias=False,
                                  name='block3_sepconv2',
                                  depthwise_regularizer=regularizer,
                                  pointwise_regularizer=regularizer)(net)
            net = BatchNormalization(name='block3_sepconv2_bn')(
                net, training=self.is_training)

            net = MaxPooling2D((3, 3),
                               strides=(2, 2),
                               padding='same',
                               name='block3_pool')(net)
            net = layers.add([net, residual])

            residual = Conv2D(728, (1, 1),
                              strides=(2, 2),
                              padding='same',
                              use_bias=False,
                              kernel_regularizer=regularizer)(net)
            residual = BatchNormalization()(residual,
                                            training=self.is_training)

            net = Activation('relu', name='block4_sepconv1_act')(net)
            net = SeparableConv2D(728, (3, 3),
                                  padding='same',
                                  use_bias=False,
                                  name='block4_sepconv1',
                                  depthwise_regularizer=regularizer,
                                  pointwise_regularizer=regularizer)(net)
            net = BatchNormalization(name='block4_sepconv1_bn')(
                net, training=self.is_training)
            net = Activation('relu', name='block4_sepconv2_act')(net)
            net = SeparableConv2D(728, (3, 3),
                                  padding='same',
                                  use_bias=False,
                                  name='block4_sepconv2',
                                  depthwise_regularizer=regularizer,
                                  pointwise_regularizer=regularizer)(net)
            net = BatchNormalization(name='block4_sepconv2_bn')(
                net, training=self.is_training)

            net = MaxPooling2D((3, 3),
                               strides=(2, 2),
                               padding='same',
                               name='block4_pool')(net)
            net = layers.add([net, residual])

            for i in range(middle_flow_repeats):
                residual = net
                prefix = 'block' + str(i + 5)

                net = Activation('relu', name=prefix + '_sepconv1_act')(net)
                net = SeparableConv2D(728, (3, 3),
                                      padding='same',
                                      use_bias=False,
                                      name=prefix + '_sepconv1',
                                      depthwise_regularizer=regularizer,
                                      pointwise_regularizer=regularizer)(net)
                net = BatchNormalization(name=prefix + '_sepconv1_bn')(
                    net, training=self.is_training)
                net = Activation('relu', name=prefix + '_sepconv2_act')(net)
                net = SeparableConv2D(728, (3, 3),
                                      padding='same',
                                      use_bias=False,
                                      name=prefix + '_sepconv2',
                                      depthwise_regularizer=regularizer,
                                      pointwise_regularizer=regularizer)(net)
                net = BatchNormalization(name=prefix + '_sepconv2_bn')(
                    net, training=self.is_training)
                net = Activation('relu', name=prefix + '_sepconv3_act')(net)
                net = SeparableConv2D(728, (3, 3),
                                      padding='same',
                                      use_bias=False,
                                      name=prefix + '_sepconv3',
                                      depthwise_regularizer=regularizer,
                                      pointwise_regularizer=regularizer)(net)
                net = BatchNormalization(name=prefix + '_sepconv3_bn')(
                    net, training=self.is_training)

                net = layers.add([net, residual])

            residual = Conv2D(1024, (1, 1),
                              strides=(2, 2),
                              padding='same',
                              use_bias=False,
                              kernel_regularizer=regularizer)(net)
            residual = BatchNormalization()(residual,
                                            training=self.is_training)

            net = Activation('relu', name='block13_sepconv1_act')(net)
            net = SeparableConv2D(728, (3, 3),
                                  padding='same',
                                  use_bias=False,
                                  name='block13_sepconv1',
                                  depthwise_regularizer=regularizer,
                                  pointwise_regularizer=regularizer)(net)
            net = BatchNormalization(name='block13_sepconv1_bn')(
                net, training=self.is_training)
            net = Activation('relu', name='block13_sepconv2_act')(net)
            net = SeparableConv2D(1024, (3, 3),
                                  padding='same',
                                  use_bias=False,
                                  name='block13_sepconv2',
                                  depthwise_regularizer=regularizer,
                                  pointwise_regularizer=regularizer)(net)
            net = BatchNormalization(name='block13_sepconv2_bn')(
                net, training=self.is_training)

            net = MaxPooling2D((3, 3),
                               strides=(2, 2),
                               padding='same',
                               name='block13_pool')(net)
            net = layers.add([net, residual])

            net = SeparableConv2D(1536, (3, 3),
                                  padding='same',
                                  use_bias=False,
                                  name='block14_sepconv1',
                                  depthwise_regularizer=regularizer,
                                  pointwise_regularizer=regularizer)(net)
            net = BatchNormalization(name='block14_sepconv1_bn')(
                net, training=self.is_training)
            net = Activation('relu', name='block14_sepconv1_act')(net)

            net = SeparableConv2D(2048, (3, 3),
                                  padding='same',
                                  use_bias=False,
                                  name='block14_sepconv2',
                                  depthwise_regularizer=regularizer,
                                  pointwise_regularizer=regularizer)(net)
            net = BatchNormalization(name='block14_sepconv2_bn')(
                net, training=self.is_training)
            net = Activation('relu', name='block14_sepconv2_act')(net)
            logging.info('Output shape: %s', net.shape)

        with tf.variable_scope('classifier'):
            net = GlobalAveragePooling2D(name='avg_pool')(net)
            if dropout > 0:
                net = Dropout(dropout)(net, training=self.is_training)
            logits = Dense(self._dataset.num_classes, activation=None)(net)

        # outputs
        loss = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels,
                                                              logits=logits)
        tf.identity(loss, name='loss')
        tf.nn.softmax(logits, 1, name='predictions')
        tf.reduce_mean(
            tf.cast(tf.equal(tf.argmax(logits, 1), labels),
                    tf.float32,
                    name='accuracy'))