def inception_mod(inputs, hyperparams: dict, name: str):
        """
        Well-tuned inception module used in the subsequent GoogLeNet-like CNN
        :param inputs: tensor
        :param hyperparams: dictionnary of hyperparameters for the sizes of the kernels
        :param name: for variable scope
        """
        with tf.variable_scope(name):
            # 1x1 pathway
            x1 = Net.conv2d(layer_name='1x1_conv',
                            inputs=inputs,
                            kernel_shape=hyperparams["1x1_conv_kernel"],
                            strides=1,
                            activation_func=tf.nn.tanh,
                            padding='SAME')

            # 1x1 to 3x3 pathway
            x2 = Net.conv2d(layer_name='3x3_conv1',
                            inputs=inputs,
                            kernel_shape=hyperparams["3x3_conv_kernel2"],
                            strides=1,
                            activation_func=tf.nn.tanh,
                            padding='SAME',
                            kernel_shape_pre=hyperparams["3x3_conv_kernel1"])

            # 1x1 to 5x5 pathway
            x3 = Net.conv2d(layer_name='5x5_conv1',
                            inputs=inputs,
                            kernel_shape=hyperparams["5x5_conv_kernel2"],
                            strides=1,
                            activation_func=tf.nn.tanh,
                            padding='SAME',
                            kernel_shape_pre=hyperparams["5x5_conv_kernel1"])

            # 3x3 to 1x1 pathway
            x4 = tf.nn.max_pool(inputs,
                                ksize=[1, 3, 3, 1],
                                strides=[1, 1, 1, 1],
                                padding='SAME',
                                name="pooling1")

            x4 = Net.conv2d(layer_name='pooling1_conv',
                            inputs=x4,
                            kernel_shape=hyperparams["pooling1_conv_kernel"],
                            strides=1,
                            activation_func=tf.nn.tanh,
                            padding='SAME')

            x = tf.concat([x1, x2, x3, x4],
                          axis=3)  # Concat in the 4th dim to stack
            outputs = tf.tanh(x)

            return outputs
 def projection_shortcut(inputs):
     return Net.conv2d(inputs=inputs,
                       kernel_shape=(1, 1, inputs.shape[-1],
                                     filters_out),
                       strides=strides,
                       padding='SAME',
                       activation_func=tf.identity,
                       layer_name='projection')
    def _building_block(inputs, filters, training, projection_shortcut,
                        strides, name: str):
        """
        :param inputs: A tensor of size[batch, height_in, width_in, channels]
        :param filters:  The number of filters for the first convolution of the layer.
        :param training: Either True or False, whether we are currently training the
            model. Needed for batch norm.
        :param projection_shortcut: The function to use for projection shortcuts
            (typically a 1x1 convolution when downsampling the input).
          strides: The block's stride. If greater than 1, this block will ultimately
            downsample the input.
        :param strides: The block's stride. If greater than 1, this block will ultimately
            downsample the input.
        :param name: A string name for the tensor output of the block layer.
        :return: The output tensor of the block; shape should match inputs.
        """
        with tf.variable_scope(name):
            shortcut = inputs
            inputs = Net.batch_normalization(inputs, training)
            inputs = tf.nn.relu(inputs)

            # The projection shortcut should come after the first batch norm and ReLU
            # since it performs a 1x1 convolution.
            if projection_shortcut is not None:
                shortcut = projection_shortcut(inputs)

            inputs = Net.conv2d(inputs=inputs,
                                kernel_shape=(3, 3, inputs.shape[-1], filters),
                                strides=strides,
                                padding='SAME',
                                activation_func=tf.identity,
                                layer_name='conv1_building_block')

            inputs = Net.batch_normalization(inputs, training)
            inputs = tf.nn.relu(inputs)
            inputs = Net.conv2d(inputs=inputs,
                                kernel_shape=(3, 3, inputs.shape[-1], filters),
                                strides=1,
                                padding='SAME',
                                activation_func=tf.identity,
                                layer_name='conv2_building_block')
            return inputs + shortcut