def test_transformer_decoder(): reset_session() # Construct the layer from rinokeras.core.v1x.models.transformer import TransformerDecoder, TransformerInputEmbedding tie = TransformerInputEmbedding(128, False) layer = TransformerDecoder(embedding_layer=tie, output_layer=tf.keras.layers.Dense(128), n_layers=2, n_heads=4, d_model=128, d_filter=32) # Make sure that the layer is not None assert layer is not None # Encoded values source_tensor, _ = random_tensor((2,32,128)) target_tensor, _ = random_tensor((2,32,64)) # Get random masking values source_mask, _ = random_mask_tensor(2, 32) target_mask, _ = random_mask_tensor(2, 32) source_mask = convert_to_attention_mask(source_tensor, source_mask) target_mask = convert_to_attention_mask(target_tensor, target_mask) # Get the output of the layer value = layer((source_tensor, target_tensor), mask=(source_mask, target_mask)) # Create a named temporary file for save/restore testing with tempfile.TemporaryFile() as weights_file: # Construct the session output = run_simple_session_save_weights(inputs=[value], feed={}, weights=[layer], weights_file=weights_file) assert_not_none(output) assert_expected_shapes(output, [(2,32,128)]) # Check loading and restoring load_restore_test(output=output, inputs=[value], feed={}, weights=[layer], weights_file=weights_file) # Do regression testing check_regression('transformer_decoder_output', output, __file__, 'regression_outputs/test_transformer_decoder_outputs.json', debug=_RK_REBUILD_REGRESSION) # Do a config test from_config_test(TransformerDecoder, layer)
def test_transformer_base(): reset_session() # Construct the layer from rinokeras.core.v1x.models.transformer import Transformer layer = Transformer(discrete=True, n_symbols_in=16, n_symbols_out=16, n_layers=6, n_heads=4, d_model=32, d_filter=16) # Make sure that the layer is not None assert layer is not None # Encoded values source_tensor, _ = random_sequence_tensor(2, 32, 16) target_tensor, _ = random_sequence_tensor(2, 32, 16) source_mask, _ = random_mask_tensor(2, 32) target_mask, _ = random_mask_tensor(2, 32) # Get the output of the layer value = layer((source_tensor, target_tensor), mask=(source_mask, target_mask)) # Create a named temporary file for save/restore testing with tempfile.TemporaryFile() as weights_file: # Construct the session output = run_simple_session_save_weights(inputs=[value], feed={}, weights=[layer], weights_file=weights_file) assert_not_none(output) assert_expected_shapes(output, [(2, 32, 16)]) # Check loading and restoring load_restore_test(output=output, inputs=[value], feed={}, weights=[layer], weights_file=weights_file) # Do regression testing check_regression('transformer_base', output, __file__, 'regression_outputs/test_transformer_outputs.json', debug=_RK_REBUILD_REGRESSION) # Do a config test from_config_test(Transformer, layer)
def test_transformer_encoder_masking_with_conv(): reset_session() # Construct the layer from rinokeras.core.v1x.models.transformer import TransformerEncoder layer = TransformerEncoder(embedding_layer=tf.keras.layers.Dense(64), n_layers=2, n_heads=4, d_model=64, d_filter=128) # Make sure that the layer is not None assert layer is not None # Encoded values input_tensor, _ = random_tensor((16, 32, 128)) input_mask, _ = random_mask_tensor(16, 32) input_mask = convert_to_attention_mask(input_tensor, input_mask) conv_mask, _ = random_mask_tensor(16, 32) conv_mask = convert_sequence_length_to_sequence_mask( input_tensor, conv_mask) # Get the output of the layer value = layer(input_tensor, mask=(input_mask, conv_mask)) # Create a named temporary file for save/restore testing with tempfile.TemporaryFile() as weights_file: # Construct the session output = run_simple_session_save_weights(inputs=[value], feed={}, weights=[layer], weights_file=weights_file) assert_not_none(output) assert_expected_shapes(output, [(16, 32, 64)]) # Check loading and restoring load_restore_test(output=output, inputs=[value], feed={}, weights=[layer], weights_file=weights_file) # Do regression testing check_regression( 'transformer_encoder_output_masking_with_conv', output, __file__, 'regression_outputs/test_transformer_encoder_outputs.json', debug=_RK_REBUILD_REGRESSION) # Do a config test from_config_test(TransformerEncoder, layer)
def test_qanet_base(): reset_session() # Construct the layer from rinokeras.core.v1x.models.qanet import QANet layer = QANet(n_chars=32, n_symbols=32) # Make sure that the layer is not None assert layer is not None # Encoded values context_tensor, _ = random_sequence_tensor(2, 8, 32) query_tensor, _ = random_sequence_tensor(2, 8, 32) context_char_tensor = tf.convert_to_tensor( np.random.randint(0, 32, (2, 8, 16))) query_char_tensor = tf.convert_to_tensor( np.random.randint(0, 32, (2, 8, 16))) # Get the output of the layer value = layer( (context_tensor, query_tensor, context_char_tensor, query_char_tensor)) # Create a named temporary file for save/restore testing with tempfile.TemporaryFile() as weights_file: # Construct the session output = run_simple_session_save_weights(inputs=[value], feed={}, weights=[layer], weights_file=weights_file) assert_not_none(output) assert_expected_shapes(output, [(2, 8, 128)]) # Check loading and restoring load_restore_test(output=output, inputs=[value], feed={}, weights=[layer], weights_file=weights_file) # Do regression testing check_regression('qanet_base_expected_output', output, __file__, 'regression_outputs/test_qanet_outputs.json', debug=_RK_REBUILD_REGRESSION) # Do a config test from_config_test(QANet, layer)
def test_transformer_decoder_fast_beam_decode_discrete(): reset_session() # Construct the layer from rinokeras.core.v1x.models.transformer import TransformerDecoder, TransformerInputEmbedding tie = TransformerInputEmbedding(128, discrete=True, n_symbols=256) layer = TransformerDecoder(embedding_layer=tie, output_layer=tf.keras.layers.Dense(256), n_layers=2, n_heads=4, d_model=128, d_filter=32) # Make sure that the layer is not None assert layer is not None # Encoded values source_tensor, _ = random_tensor((2,32,128)) # Get the output of the layer value, scores = layer.fast_beam_decode(source_tensor, 20, batch_size=2, n_beams=4) # Create a named temporary file for save/restore testing with tempfile.TemporaryFile() as weights_file: # Construct the session output = run_simple_session_save_weights(inputs=[value, scores], feed={}, weights=[layer], weights_file=weights_file) assert_not_none(output) assert_expected_shapes(output, [(2,4, 20), (2,4)]) # Check loading and restoring load_restore_test(output=output, inputs=[value], feed={}, weights=[layer], weights_file=weights_file) # Do regression testing check_regression('transformer_decoder_fast_beam_decode', output, __file__, 'regression_outputs/test_transformer_decoder_outputs.json', debug=_RK_REBUILD_REGRESSION) # Do a config test from_config_test(TransformerDecoder, layer)
def test_transformer_multi_attention(): reset_session() # Construct the layer from rinokeras.core.v1x.models.transformer.transformer_attention import TransformerMultiAttention layer = TransformerMultiAttention(n_heads=4) # Make sure that the layer is not None assert layer is not None # Encoded values encoder_tensor, _ = random_tensor((16, 32, 64)) decoder_tensor, _ = random_tensor((16, 32, 64)) # Get the output of the layer value = layer((encoder_tensor, decoder_tensor)) # Create a named temporary file for save/restore testing with tempfile.TemporaryFile() as weights_file: # Construct the session output = run_simple_session_save_weights(inputs=[value], feed={}, weights=[layer], weights_file=weights_file) assert_not_none(output) assert_expected_shapes(output, [(16, 32, 64)]) # Check loading and restoring load_restore_test(output=output, inputs=[value], feed={}, weights=[layer], weights_file=weights_file) # Do regression testing check_regression( 'transformer_multi_attention_expected_output', output, __file__, 'regression_outputs/test_transformer_attention_outputs.json', debug=_RK_REBUILD_REGRESSION) # Do a config test from_config_test(TransformerMultiAttention, layer)
def test_transformer_input_embedding_non_discrete(): reset_session() # Construct the layer from rinokeras.core.v1x.models.transformer import TransformerInputEmbedding layer = TransformerInputEmbedding(embed_size=128, discrete=False) # Make sure that the layer is not None assert layer is not None # Encoded values input_tensor, _ = random_tensor((16, 32, 64)) # Get the output of the layerembedding_layer value = layer(input_tensor) # Create a named temporary file for save/restore testing with tempfile.TemporaryFile() as weights_file: # Construct the session output = run_simple_session_save_weights(inputs=[value], feed={}, weights=[layer], weights_file=weights_file) assert_not_none(output) assert_expected_shapes(output, [(16, 32, 128)]) # Check loading and restoring load_restore_test(output=output, inputs=[value], feed={}, weights=[layer], weights_file=weights_file) # Do regression testing check_regression( 'transformer_input_embedding_non_discrete_expected_output', output, __file__, 'regression_outputs/test_transformer_embedding_outputs.json', debug=_RK_REBUILD_REGRESSION) # Do a config test from_config_test(TransformerInputEmbedding, layer)
def test_qanet_encoder_no_mask(): reset_session() # Construct the layer from rinokeras.core.v1x.models.qanet import QANetEncoderBlock layer = QANetEncoderBlock(n_conv=2, n_heads=4, filter_size=128, hidden_size=128) # Make sure that the layer is not None assert layer is not None # Encoded values input_tensor, _ = random_tensor((2,8,128)) # Get the output of the layer value = layer(input_tensor) # Create a named temporary file for save/restore testing with tempfile.TemporaryFile() as weights_file: # Construct the session output = run_simple_session_save_weights(inputs=[value], feed={}, weights=[layer], weights_file=weights_file) assert_not_none(output) assert_expected_shapes(output, [(2,8,128)]) # Check loading and restoring load_restore_test(output=output, inputs=[value], feed={}, weights=[layer], weights_file=weights_file) # Do regression testing check_regression('qanet_encoder_no_mask_expected_output', output, __file__, 'regression_outputs/test_qanet_encoder_outputs.json', debug=_RK_REBUILD_REGRESSION) # Do a config test from_config_test(QANetEncoderBlock, layer)