Пример #1
0
def run(args):
    """Load the data, train, evaluate, and export the model for serving and
     evaluating.

    Args:
      args: experiment parameters.
    """
    # Open our dataset
    train_dataset, test_dataset = utils.load_data(args)

    label_list = train_dataset.unique("label")
    num_labels = len(label_list)

    # Create the model, loss function, and optimizer
    text_classifier = model.create(num_labels=num_labels)

    # Train / Test the model
    trainer = train(args, text_classifier, train_dataset, test_dataset)

    # Export the trained model
    trainer.save_model(os.path.join("/tmp", args.model_name))

    # Save the model to GCS
    if args.job_dir:
        utils.save_model(args)
Пример #2
0
def fit_model(
    input_path: str,
    model_dir: str,
    degree: int = 1,
    alpha: int = 0
) -> None:
    """Train, evaluate and save model given model configuration"""
    print(f"Fitting model with degree={args.degree} and alpha={args.alpha}")

    # Split datasets into training and testing
    train_feature, eval_feature, train_target, eval_target = utils.load_data(
        input_path)

    # Create sklearn pipeline for a polynomial model defined in model.py"""
    polynomial_model = model.polynomial_model(degree, alpha)

    # Fit the sklearn model
    print("Fitting model...")
    polynomial_model.fit(train_feature, train_target)

    # Evaluate the model
    print("Evaluating model...")
    pred_target = polynomial_model.predict(eval_feature)
    mae = mean_absolute_error(eval_target, pred_target)

    print(f"Done. Model had MAE={mae}")
# [END ai_platform_sklearn_task_fit]

# [START ai_platform_sklearn_task_export]
    # Save model to GCS
    print("Saving model")
    matches = re.match("gs://(.*?)/(.*)", model_dir)
    bucket = matches.group(1)
    blob = matches.group(2)

    model_dump = "model.joblib"
    joblib.dump(polynomial_model, model_dump)

    blob_name = os.path.join(blob, model_dump)
    client = storage.Client()
    client.bucket(bucket).blob(blob_name).upload_from_filename(model_dump)
    print("Model saved")
Пример #3
0
def train_and_evaluate(input_path: str,
                       model_dir: str,
                       num_epochs: int = 5,
                       batch_size: int = 128,
                       learning_rate: float = 0.01) -> None:
    """Trains and evaluates the Keras model.

    Uses the Keras model defined in model.py. Saves the trained model in TensorFlow SavedModel
    format to the path defined in part by the --job-dir argument."""

    # Split datasets into training and testing
    train_feature, eval_feature, train_target, eval_target = utils.load_data(
        input_path)
    # [END ai_platform_tfkeras_task_train_and_evaluate_load]

    # [START ai_platform_tfkeras_task_train_and_evaluate_dimensions]
    # Extract dimensions of the data
    num_train_examples, input_dim = train_feature.shape
    num_eval_examples = eval_feature.shape[1]
    output_dim = train_target.shape[1]
    # [END ai_platform_tfkeras_task_train_and_evaluate_dimensions]

    # [START ai_platform_tfkeras_task_train_and_evaluate_model]
    # Create the Keras Model
    keras_model = model.create_keras_model(
        input_dim=input_dim,
        output_dim=output_dim,
        learning_rate=learning_rate,
    )
    # [END ai_platform_tfkeras_task_train_and_evaluate_model]

    # [START ai_platform_tfkeras_task_train_and_evaluate_training_data]
    # Pass a numpy array by passing DataFrame.values
    training_dataset = model.input_fn(
        features=train_feature.values,
        labels=train_target.values,
        shuffle=True,
        num_epochs=num_epochs,
        batch_size=batch_size,
    )
    # [END ai_platform_tfkeras_task_train_and_evaluate_training_data]

    # [START ai_platform_tfkeras_task_train_and_evaluate_validation_data]
    # Pass a numpy array by passing DataFrame.values
    validation_dataset = model.input_fn(
        features=eval_feature.values,
        labels=eval_target.values,
        shuffle=False,
        num_epochs=num_epochs,
        batch_size=num_eval_examples,
    )
    # [END ai_platform_tfkeras_task_train_and_evaluate_validation_data]

    # [START ai_platform_tfkeras_task_train_and_evaluate_fit_export]
    # Train model
    keras_model.fit(
        training_dataset,
        steps_per_epoch=int(num_train_examples / batch_size),
        epochs=num_epochs,
        validation_data=validation_dataset,
        validation_steps=1,
        verbose=1,
    )

    # Export model
    keras_model.save(model_dir)
    print(f"Model exported to: {model_dir}")
Пример #4
0
def train_and_evaluate(hparams):
    """Trains and evaluates the keras model

    Uses the Keras model defined in model.py and trains on data loaded and
    preprocessed in preprocessing.py. Saves the trained model in TensorFlow
    SavedModel format to the path defined by the --job-dir argument.

    Args:
        args: dictionary of arguments - see get_args() for details
    """
    utils.load_data(hparams.train_file)

    model = m.create_keras_model(hparams)

    train_datagen = tf.keras.preprocessing.image.ImageDataGenerator(
        rescale=1. / 255,
        rotation_range=40,
        width_shift_range=0.2,
        height_shift_range=0.2,
        shear_range=0.2,
        zoom_range=0.2,
        horizontal_flip=True,
        fill_mode='nearest')

    train_generator = train_datagen.flow_from_directory(
        'train',
        target_size=(500, 500),
        batch_size=hparams.batch_size,
        class_mode='categorical')

    validation_generator = train_datagen.flow_from_directory(
        'validation',
        target_size=(500, 500),
        batch_size=hparams.batch_size,
        class_mode='categorical')

    cp = tf.keras.callbacks.ModelCheckpoint(filepath="artwork_cnn.h5",
                                            save_best_only=True,
                                            verbose=0)

    tb = tf.keras.callbacks.TensorBoard(log_dir='./logs',
                                        histogram_freq=0,
                                        write_graph=True,
                                        write_images=True)

    es = tf.keras.callbacks.EarlyStopping(monitor='val_acc',
                                          mode='max',
                                          min_delta=1)

    history = model.fit_generator(train_generator,
                                  steps_per_epoch=100,
                                  epochs=hparams.num_epochs,
                                  validation_data=validation_generator,
                                  validation_steps=30,
                                  verbose=1,
                                  callbacks=[cp, tb, es])

    model_filename = 'final_artwork_cnn.h5'
    model.save(model_filename)
    model_folder = datetime.datetime.now().strftime('imdb_%Y%m%d_%H%M%S')

    gcs_model_path = os.path.join('gs://', hparams.bucket_name, 'results',
                                  model_folder, model_filename)
    subprocess.check_call(['gsutil', 'cp', model_filename, gcs_model_path],
                          stderr=sys.stdout)
    os.remove(model_filename)
Пример #5
0
def train_and_evaluate(input_path: str,
                       job_dir: str,
                       num_epochs: int = 5,
                       batch_size: int = 128,
                       learning_rate: float = 0.01) -> None:
    """Trains and evaluates the Keras model.

    Uses the Keras model defined in model.py. Saves the trained model in TensorFlow SavedModel
    format to the path defined in part by the --job-dir argument."""

    # Split datasets into training and testing
    train_feature, eval_feature, train_target, eval_target = utils.load_data(
        input_path)
    # [END ai_platform_tfkeras_task_train_and_evaluate_load]

    # [START ai_platform_tfkeras_task_train_and_evaluate_dimensions]
    # Extract dimensions of the data
    num_train_examples, input_dim = train_feature.shape
    num_eval_examples = eval_feature.shape[1]
    output_dim = train_target.shape[1]
    # [END ai_platform_tfkeras_task_train_and_evaluate_dimensions]

    # [START ai_platform_tfkeras_task_train_and_evaluate_model]
    # Create the Keras Model
    keras_model = model.create_keras_model(
        input_dim=input_dim,
        output_dim=output_dim,
        learning_rate=learning_rate,
    )
    # [END ai_platform_tfkeras_task_train_and_evaluate_model]

    # [START ai_platform_tfkeras_task_train_and_evaluate_training_data]
    # Pass a numpy array by passing DataFrame.values
    training_dataset = model.input_fn(
        features=train_feature.values,
        labels=train_target.values,
        shuffle=True,
        num_epochs=num_epochs,
        batch_size=batch_size,
    )
    # [END ai_platform_tfkeras_task_train_and_evaluate_training_data]

    # [START ai_platform_tfkeras_task_train_and_evaluate_validation_data]
    # Pass a numpy array by passing DataFrame.values
    validation_dataset = model.input_fn(
        features=eval_feature.values,
        labels=eval_target.values,
        shuffle=False,
        num_epochs=num_epochs,
        batch_size=num_eval_examples,
    )
    # [END ai_platform_tfkeras_task_train_and_evaluate_validation_data]

    # [START ai_platform_tfkeras_task_train_and_evaluate_tensorboard]
    # Setup Learning Rate decay.
    lr_decay_cb = tf.keras.callbacks.LearningRateScheduler(
        lambda epoch: learning_rate + 0.02 * (0.5**(1 + epoch)), verbose=True)

    # Setup TensorBoard callback.
    tensorboard_cb = tf.keras.callbacks.TensorBoard(os.path.join(
        job_dir, "keras_tensorboard"),
                                                    histogram_freq=1)
    # [END ai_platform_tfkeras_task_train_and_evaluate_tensorboard]

    # [START ai_platform_tfkeras_task_train_and_evaluate_fit_export]
    # Train model
    keras_model.fit(
        training_dataset,
        steps_per_epoch=int(num_train_examples / batch_size),
        epochs=num_epochs,
        validation_data=validation_dataset,
        validation_steps=1,
        verbose=1,
        callbacks=[lr_decay_cb, tensorboard_cb],
    )

    # Export model
    export_path = os.path.join(job_dir, "tfkeras_model/")
    tf.keras.models.save_model(keras_model, export_path)
    print(f"Model exported to: {export_path}")