示例#1
0
def train_model(args):
    train_features, test_features, train_labels, test_labels = \
        data_utils.load_data(args)

    sonar_model = model.sonar_model()

    sonar_model.fit(train_features, train_labels, epochs=args.epochs,
                    batch_size=args.batch_size)

    score = sonar_model.evaluate(test_features, test_labels,
                                 batch_size=args.batch_size)
    print(score)

    # Export the trained model
    sonar_model.save(args.model_name)

    if args.artifacts_dir:
        # Save the model to GCS
        data_utils.save_artifacts(args.artifacts_dir, RUN_ID, args.model_name)
示例#2
0
def train_model(args):
    train_features, test_features, train_labels, test_labels = \
        data_utils.load_data(args)

    sonar_model = model.sonar_model()
    keras_weight_file = load_weight_from_gcs()
    sonar_model.load_weights(keras_weight_file)
    sonar_model.fit(train_features,
                    train_labels,
                    epochs=args.epochs,
                    batch_size=args.batch_size)

    score = sonar_model.evaluate(test_features,
                                 test_labels,
                                 batch_size=args.batch_size)
    print(score)

    # Export the trained model
    sonar_model.save(args.model_name)

    if args.model_dir:
        # Save the model to GCS
        data_utils.save_model(args.model_dir, args.model_name)
示例#3
0
import datetime
from google.cloud import storage
import tempfile
import os
import numpy as np
import tempfile
import model
client = storage.Client()
bucket_name = 'output-aiplatform'
folder_name = 'sonar_20210323_084454'
file_name = 'sonar_model.h5'
blobs = list(client.list_blobs(bucket_name, prefix=folder_name))
blob = blobs[0]
_, _ext = os.path.splitext(blob.name)
_, temp_local_filename = tempfile.mkstemp(suffix=_ext)
blob.download_to_filename(temp_local_filename)
print(temp_local_filename)
sonar_model = model.sonar_model()
sonar_model.load_weights(temp_local_filename)
sonar_model.summary()