def fc1(X, dropout=True):
    with tf.name_scope('fc1_layer'):
        logging.info('FC_1 .........................')
        X = fc_layers(X, scope_name='fc1')
        logging.info('FC_1: shape %s', str(X.shape))
        # X = batch_norm(X, X.get_shape().as_list()[-1], axis=[0, 1, 2], scope_name='bn2')
        # logging.info('batch_norm2: shape %s', str(X.shape))
        X = activation(X, type='relu')
        logging.info('RELU_5: shape %s', str(X.shape))
        if dropout:
            X = tf.nn.dropout(X, netParams['fc1']['keep_prob'])#, seed=config.seed_arr[10])
    return X
def conv_4(X):
    with tf.name_scope('conv4_layer'):
        logging.info('CONV_4 .........................')
        X = conv_layer(X, scope_name='conv4')
        logging.info('CONV_4: shape %s', str(X.shape))
        X = batch_norm(X, X.get_shape().as_list()[-1], axis=[0, 1, 2], scope_name='bn4')
        logging.info('BN_3: shape %s', str(X.shape))
        X = activation(X, type='relu')
        logging.info('RELU_4: shape %s', str(X.shape))
        
        X = tf.layers.max_pooling2d(X, pool_size=netParams['conv4']['pool_size'],
                                    padding=netParams['conv4']['pool_pad'],
                                    strides=netParams['conv4']['pool_stride'], data_format='channels_last')
        logging.info('MAXPOOL_4: shape %s', str(X.shape))
        # X = tf.nn.dropout(X, netParams['conv2']['keep_prob'], seed=config.seed_arr[2])
    
    return X