def test_predict_dataframe_with_feature_columns(): predictor = TensorflowPredictor(model_definition=build_model, model_weights=weights) data = pd.DataFrame([[1, 2], [3, 4]], columns=["A", "B"]) predictions = predictor.predict(data, feature_columns=["A"]) assert len(predictions) == 2 assert predictions.to_numpy().flatten().tolist() == [1, 3]
def test_predict_dataframe(use_gpu): predictor = TensorflowPredictor(model_definition=build_model_multi_input, use_gpu=use_gpu) data_batch = pd.DataFrame({"A": [0.0, 0.0, 0.0], "B": [1.0, 2.0, 3.0]}) predictions = predictor.predict(data_batch) assert len(predictions) == 3 assert predictions.to_numpy().flatten().tolist() == [1.0, 2.0, 3.0]
def test_predict_array(use_gpu): predictor = TensorflowPredictor(model_definition=build_model, model_weights=weights, use_gpu=use_gpu) data_batch = np.asarray([1, 2, 3]) predictions = predictor.predict(data_batch) assert len(predictions) == 3 assert predictions.flatten().tolist() == [2, 4, 6]
def test_predict(batch_type): predictor = TensorflowPredictor(model_definition=build_model_multi_input) raw_batch = pd.DataFrame({"A": [0.0, 0.0, 0.0], "B": [1.0, 2.0, 3.0]}) data_batch = convert_pandas_to_batch_type(raw_batch, type=TYPE_TO_ENUM[batch_type]) raw_predictions = predictor.predict(data_batch) predictions = convert_batch_type_to_pandas(raw_predictions) assert len(predictions) == 3 assert predictions.to_numpy().flatten().tolist() == [1.0, 2.0, 3.0]
def test_predict_array_with_preprocessor(): preprocessor = DummyPreprocessor() predictor = TensorflowPredictor(model_definition=build_model, preprocessor=preprocessor, model_weights=weights) data_batch = np.array([[1], [2], [3]]) predictions = predictor.predict(data_batch) assert len(predictions) == 3 assert predictions.to_numpy().flatten().tolist() == [2, 4, 6] assert hasattr(predictor.preprocessor, "_batch_transformed")
def test_predict_multi_output(use_gpu): predictor = TensorflowPredictor(model_definition=build_model_multi_output, use_gpu=use_gpu) data_batch = np.array([1, 2, 3]) predictions = predictor.predict(data_batch) # Model outputs two tensors assert len(predictions) == 2 for k, v in predictions.items(): # Each tensor is of size 3 assert len(v) == 3 assert v.flatten().tolist() == [1, 2, 3]
def test_predict_array_with_input_shape_unspecified(): def model_definition(): return tf.keras.models.Sequential( tf.keras.layers.Lambda(lambda tensor: tensor)) predictor = TensorflowPredictor(model_definition=model_definition, model_weights=[]) data_batch = np.array([[1], [2], [3]]) predictions = predictor.predict(data_batch) assert len(predictions) == 3 assert predictions.to_numpy().flatten().tolist() == [1, 2, 3]
def test_init(): preprocessor = DummyPreprocessor() predictor = TensorflowPredictor(model_definition=build_model, preprocessor=preprocessor, model_weights=weights) checkpoint = {MODEL_KEY: weights, PREPROCESSOR_KEY: preprocessor} checkpoint_predictor = TensorflowPredictor.from_checkpoint( Checkpoint.from_dict(checkpoint), build_model) assert checkpoint_predictor.model_definition == predictor.model_definition assert checkpoint_predictor.model_weights == predictor.model_weights assert checkpoint_predictor.preprocessor == predictor.preprocessor
def test_predict_array_with_preprocessor(use_gpu): preprocessor = DummyPreprocessor() predictor = TensorflowPredictor( model_definition=build_model, preprocessor=preprocessor, model_weights=weights, use_gpu=use_gpu, ) data_batch = np.array([1, 2, 3]) predictions = predictor.predict(data_batch) assert len(predictions) == 3 assert predictions.flatten().tolist() == [4, 8, 12]
def test_predict_array(): checkpoint = {MODEL_KEY: weights} predictor = TensorflowPredictor.from_checkpoint( Checkpoint.from_dict(checkpoint), build_model) data_batch = np.array([[1], [2], [3]]) predictions = predictor.predict(data_batch) assert len(predictions) == 3 assert predictions.to_numpy().flatten().tolist() == [1, 2, 3]
def test_keras_callback_e2e(): epochs = 3 config = { "epochs": epochs, } trainer = TensorflowTrainer( train_loop_per_worker=train_func, train_loop_config=config, scaling_config=ScalingConfig(num_workers=2), datasets={TRAIN_DATASET_KEY: get_dataset()}, ) checkpoint = trainer.fit().checkpoint checkpoint_dict = checkpoint.to_dict() assert MODEL_KEY in checkpoint_dict predictor = TensorflowPredictor.from_checkpoint( checkpoint, model_definition=build_model) items = np.random.uniform(0, 1, size=(10, 1)) predictor.predict(data=items)
def __init__(self): self.pred = TensorflowPredictor.from_checkpoint( result.checkpoint, build_model)
TensorflowCheckpoint, TensorflowPredictor, ) def build_model() -> tf.keras.Model: model = tf.keras.Sequential([ tf.keras.layers.InputLayer(input_shape=(1, )), tf.keras.layers.Dense(1), ]) return model model = build_model() checkpoint = TensorflowCheckpoint.from_model(model) predictor = TensorflowPredictor.from_checkpoint(checkpoint, model_definition=build_model) data = np.array([1, 2, 3, 4]) predictions = predictor.predict(data) print(predictions) # [[-1.6930283] # [-3.3860567] # [-5.079085 ] # [-6.7721133]] # __use_predictor_end__ # __batch_prediction_start__ import pandas as pd from ray.train.batch_predictor import BatchPredictor batch_predictor = BatchPredictor(checkpoint,