Пример #1
0
def test_luongAttention_local():
    reset_session()
    # Construct the layer
    from rinokeras.core.v1x.common.attention import LuongAttention
    luong_attention_layer = LuongAttention(local=True,
                                           stddev=1.0,
                                           regularizer=None)
    assert luong_attention_layer is not None

    # Encoded values
    encoded_values = np.random.sample((2, 10, 64))
    query_values = np.random.sample((2, 64))
    position_values = np.random.randint(0, 10, (2, ))

    # Get some sample input tensors
    encoder_tensor = tf.constant(encoded_values)  # BS x SEQLEN x ENC_CELL_SIZE
    query_tensor = tf.constant(query_values)  # BS x DEC_CELL_SIZE
    position_tensor = tf.constant(position_values)  # BS

    value = luong_attention_layer(
        (query_tensor, encoder_tensor, position_tensor))

    # Construct the session
    output = run_simple_session(value, None)

    assert output is not None  # Make sure the value is correct
    assert output.shape == (2, 64)  # Make sure the output shape is correct

    # Do regression testing
    check_regression('luong_attention_local_expected_output',
                     output,
                     __file__,
                     'regression_outputs/test_attention_outputs.json',
                     debug=_RK_REBUILD_REGRESSION)
Пример #2
0
def test_contextQueryAttention():
    reset_session()
    # Construct the layer
    from rinokeras.core.v1x.common.attention import ContextQueryAttention
    attention_map = ContextQueryAttention(similarity_metric='trilinear')
    assert attention_map is not None

    # Encoded values
    context_values = np.random.sample((2, 8, 12))
    query_values = np.random.sample((2, 10, 12))
    mask_values = np.random.choice([0, 1], size=(2, 8, 10))

    # Get some sample input tensors
    context_tensor = tf.constant(context_values)
    query_tensor = tf.constant(query_values)
    mask_tensor = tf.constant(mask_values)

    value = attention_map(inputs=(context_tensor, query_tensor),
                          mask=mask_tensor)

    # Construct the session
    output = run_simple_session(value, None)

    assert output is not None  # Make sure the value is not none
    assert output.shape == (2, 8, 4 * 12)

    check_regression('context_query_attention_expected_output',
                     output,
                     __file__,
                     'regression_outputs/test_attention_outputs.json',
                     debug=_RK_REBUILD_REGRESSION)
Пример #3
0
def test_scaledDotProductSimilarity():
    reset_session()
    # Construct the layer
    from rinokeras.core.v1x.common.attention import ScaledDotProductSimilarity
    sdp_layer = ScaledDotProductSimilarity()
    assert sdp_layer is not None

    # Encoded values
    query_values = np.random.sample((2, 10, 64))
    context_values = np.random.sample((2, 5, 64))

    # Get some sample input tensors
    query_tensor = tf.constant(query_values)
    context_tensor = tf.constant(context_values)

    value = sdp_layer((context_tensor, query_tensor))

    # Construct the session
    output = run_simple_session(value, None)

    assert output is not None  # Make sure the value is correct
    assert output.shape == (2, 5, 10)  # Make sure the output shape is correct

    # Do regression testing
    check_regression('scaled_dot_product_similarity_expected_output',
                     output,
                     __file__,
                     'regression_outputs/test_attention_outputs.json',
                     debug=_RK_REBUILD_REGRESSION)
Пример #4
0
def test_multiHeadAttentionMap():
    reset_session()
    # Construct the layer
    from rinokeras.core.v1x.common.attention import MultiHeadAttentionMap
    from rinokeras.core.v1x.common.attention import ScaledDotProductSimilarity
    sdp = ScaledDotProductSimilarity()
    attention_map = MultiHeadAttentionMap(similarity_metric=sdp,
                                          n_heads=4,
                                          attention_function=tf.nn.softmax)
    assert attention_map is not None
    assert sdp is not None

    # Encoded values
    query_values = np.random.sample((2, 8, 12))
    key_values = np.random.sample((2, 20, 12))
    value_values = np.random.sample((2, 20, 12))
    mask_values = np.random.choice([0, 1], size=(2, 8, 20))

    # Get some sample input tensors
    query_tensor = tf.constant(query_values)
    key_tensor = tf.constant(key_values)
    value_tensor = tf.constant(value_values)
    mask_tensor = tf.constant(mask_values)

    value = attention_map(inputs=(query_tensor, key_tensor, value_tensor),
                          mask=mask_tensor,
                          return_attention_weights=True)

    # Construct the session
    output = run_simple_session(value, None)

    assert output[0] is not None  # Make sure the value is not none
    assert output[1] is not None  # Make sure the value is not none
    assert output[0].shape == (2, 8, 12)
    assert output[1].shape == (2, 4, 8, 20)

    masked_vals = np.squeeze(output[1][:, 0, :, :])[np.where(mask_values == 0)]
    assert np.isclose(masked_vals, np.zeros_like(masked_vals)).all()

    check_regression('multihead_attention_map_expected_output',
                     output,
                     __file__,
                     'regression_outputs/test_attention_outputs.json',
                     debug=_RK_REBUILD_REGRESSION)
Пример #5
0
def test_convert_to_attention_mask_2():
    reset_session()
    # Construct the layer
    from rinokeras.core.v1x.utils import convert_to_attention_mask
    # Encoded values
    input_tensor, _ = random_sequence_tensor(2, 32, 128)
    input_mask, _ = random_mask_tensor(2, 32)

    # Get the output of the layer
    value = convert_to_attention_mask(input_tensor, input_mask)
    # Construct the session
    output = run_simple_session(inputs=[value], feed={})
    assert_not_none(output)
    assert_expected_shapes(output, [(2, 32, 32)])
    # Do regression testing
    check_regression('convert_to_attention_mask_2',
                     output,
                     __file__,
                     'regression_outputs/test_utils_outputs.json',
                     debug=_RK_REBUILD_REGRESSION)
Пример #6
0
def test_selfAttention():
    reset_session()
    # Construct the layer
    from rinokeras.core.v1x.common.attention import SelfAttention
    attention_map = SelfAttention(similarity_metric='scaled_dot', n_heads=4)
    assert attention_map is not None

    # Encoded values
    sa_values = np.random.sample((4, 64, 12))
    mask_values = np.random.choice([0, 1], size=(4, 64, 64))

    # Get some sample input tensors
    sa_tensor = tf.constant(sa_values)
    mask_tensor = tf.constant(mask_values)

    value = attention_map(inputs=sa_tensor,
                          mask=mask_tensor,
                          return_attention_weights=True)

    # Construct the session
    output = run_simple_session(value, None)

    assert output[0] is not None  # Make sure the value is not none
    assert output[1] is not None  # Make sure the value is not none
    assert output[0].shape == (4, 64, 12)
    assert output[1].shape == (4, 4, 64, 64)

    # WARNING: This might fail because probability
    masked_vals = np.squeeze(output[1][:, 0, :, :])[np.where(mask_values == 0)]
    assert np.isclose(masked_vals, np.zeros_like(masked_vals)).all()

    check_regression('self_attention_expected_output',
                     output,
                     __file__,
                     'regression_outputs/test_attention_outputs.json',
                     debug=_RK_REBUILD_REGRESSION)

    # Check that you can instantiate a layer from the config
    check_from_config(SelfAttention, attention_map)
Пример #7
0
def test_attentionMap():
    reset_session()
    # Construct the layer
    from rinokeras.core.v1x.common.attention import AttentionMap
    from rinokeras.core.v1x.common.attention import ScaledDotProductSimilarity
    sdp = ScaledDotProductSimilarity()
    attention_map = AttentionMap(similarity_metric=sdp,
                                 attention_function=tf.nn.softmax)
    assert attention_map is not None
    assert sdp is not None

    # Encoded values
    query_values = np.random.sample((2, 8, 12))
    key_values = np.random.sample((2, 20, 12))
    value_values = np.random.sample((2, 20, 12))
    mask_values = np.random.choice([0, 1], size=(2, 8, 20))

    # Get some sample input tensors
    query_tensor = tf.constant(query_values)
    key_tensor = tf.constant(key_values)
    value_tensor = tf.constant(value_values)
    mask_tensor = tf.constant(mask_values)

    value = attention_map(inputs=(query_tensor, key_tensor, value_tensor),
                          mask=mask_tensor)

    # Construct the session
    output = run_simple_session(value, None)

    assert output[0] is not None  # Make sure the value is not none
    assert output[1] is not None  # Make sure the value is not none
    assert output[0].shape == (2, 8, 12)
    assert output[1].shape == (2, 8, 20)

    check_regression('attention_map_expected_output',
                     output,
                     __file__,
                     'regression_outputs/test_attention_outputs.json',
                     debug=_RK_REBUILD_REGRESSION)
Пример #8
0
def test_attentionQKVProjection():
    reset_session()
    # Construct the layer
    from rinokeras.core.v1x.common.attention import AttentionQKVProjection
    attention_qkv_projection = AttentionQKVProjection(key_depth=8,
                                                      value_depth=12)
    assert attention_qkv_projection is not None

    # Encoded values
    query_values = np.random.sample((2, 10, 64))
    key_values = np.random.sample((2, 5, 64))
    value_values = np.random.sample((2, 5, 64))

    # Get some sample input tensors
    query_tensor = tf.constant(query_values)
    key_tensor = tf.constant(key_values)
    value_tensor = tf.constant(value_values)

    value = attention_qkv_projection((query_tensor, key_tensor, value_tensor))

    # Construct the session
    output = run_simple_session(value, None)

    assert output is not None  # Make sure the value is correct
    # Make sure the output shape is correct
    assert output[0].shape == (2, 10, 8)
    # Make sure the output shape is correct
    assert output[1].shape == (2, 5, 8)
    # Make sure the output shape is correct
    assert output[2].shape == (2, 5, 12)

    # Do regression testing
    check_regression('attentionqkv_projection_expected_output',
                     output,
                     __file__,
                     'regression_outputs/test_attention_outputs.json',
                     debug=_RK_REBUILD_REGRESSION)