コード例 #1
0
def run():
    model_path = "resnet-dogs"
    img_size = 224
    batch_size = 64
    train_data, test_data, num_classes = get_data(img_size, batch_size)
    model = get_model(img_size, num_classes)
    callbacks = get_callbacks(model_path, GCP_BUCKET)
    if tfc.remote():
        epochs = 500
        model.fit(
            train_data, epochs=epochs, callbacks=callbacks, validation_data=test_data, verbose=2
        )
        save_path = os.path.join("gs://", GCP_BUCKET, model_path)
        model.save(save_path)

        model = tf.keras.models.load_model(save_path)
        model.evaluate(test_data)

    tfc.run(
        requirements_txt="requirements.txt",
        distribution_strategy="auto",
        chief_config=tfc.MachineConfig(
            cpu_cores=8,
            memory=30,
            accelerator_type=tfc.AcceleratorType.NVIDIA_TESLA_T4,
            accelerator_count=2,
        ),
        docker_image_bucket_name=GCP_BUCKET,
    )
コード例 #2
0
to store your dataset in your Cloud Storage bucket, however TensorFlow Cloud can
also accomodate datasets stored locally. That's covered in the Multi-file section
of this guide.
"""

(x_train, y_train), (x_test, y_test) = keras.datasets.mnist.load_data()

"""
The [TensorFlow Cloud](https://github.com/tensorflow/cloud) API provides the
`remote()` function to determine whether code is being executed locally or on
the cloud. This allows for the separate designation of `fit()` parameters for
local and remote execution, and provides means for easy debugging without overloading
your local machine.
"""

if tfc.remote():
    epochs = 100
    callbacks = callbacks
    batch_size = 128
else:
    epochs = 1
    callbacks = None
    batch_size = None

model.fit(x_train, y_train, epochs=epochs, callbacks=callbacks, batch_size=batch_size)

"""
Let's save the model in GCS after the training is complete.
"""

save_path = os.path.join("gs://", gcp_bucket, "mnist_example")
コード例 #3
0
 def test_remote(self):
     self.assertFalse(tfc.remote())
コード例 #4
0
MODEL_CHECKPOINT_DIR = os.path.join(GCS_BASE_PATH, "checkpoints")
SAVED_MODEL_DIR = os.path.join(GCS_BASE_PATH, "saved_model")
"""
## Authenticating the notebook to use your Google Cloud Project

This code authenticates the notebook, checking your valid Google Cloud
credentials and identity. It is inside the `if not tfc.remote()` block to ensure
that it is only run in the notebook, and will not be run when the notebook code
is sent to Google Cloud.

Note: For Kaggle Notebooks click on "Add-ons"->"Google Cloud SDK" before running
the cell below.
"""

# Using tfc.remote() to ensure this code only runs in notebook
if not tfc.remote():

    # Authentication for Kaggle Notebooks
    if "kaggle_secrets" in sys.modules:
        from kaggle_secrets import UserSecretsClient

        UserSecretsClient().set_gcloud_credentials(project=GCP_PROJECT_ID)

    # Authentication for Colab Notebooks
    if "google.colab" in sys.modules:
        from google.colab import auth

        auth.authenticate_user()
        os.environ["GOOGLE_CLOUD_PROJECT"] = GCP_PROJECT_ID
"""
## Model and data setup