Exemple #1
0
                                             batch_size=batch_size,
                                             shuffling=False,
                                             cache_path=None)

                for lr in learning_rates:
                    for opt in optimizer_list:

                        #### IF SOMETHING BREAKS PICKUP WHERE WE LEFT OFF
                        if model_count < 152:
                            print(f'Skipping: {model_count}')
                            model_count += 1
                            continue
                        ####

                        if lr == 'custom':
                            lr = CustomSchedule()

                        if opt == 'rmsprop':
                            opt_m = tf.keras.optimizers.RMSprop(lr)

                        elif opt == 'adam':
                            opt_m = tf.keras.optimizers.Adam(lr)

                        elif opt == 'adagrad':
                            opt_m = tf.keras.optimizers.Adagrad(lr)

                        elif opt == 'adamax':
                            opt_m = tf.keras.optimizers.Adamax(lr)

                        elif opt == 'sgd':
                            opt_m = tf.keras.optimizers.SGD(lr)
Exemple #2
0
# ### Build Train/ Validation =>  Model Ready Input

tf_train_ds = create_dataset(tf_train_ds,
                             batch_size=batch_size,
                             shuffling=True,
                             cache_path=None)
tf_valid_ds = create_dataset(tf_valid_ds,
                             batch_size=batch_size,
                             shuffling=False,
                             cache_path=None)

# ### Custom Learning Rate Scheduler

#Example
plt.style.use('ggplot')
schedule = CustomSchedule()
plt.plot(schedule(tf.range(25000, dtype=tf.float32)))
plt.xlabel("Steps")
plt.ylabel("Learning rate")

# ### Setup Callbacks for Tensorboard

# +
start_profile_batch = steps + 10
stop_profile_batch = start_profile_batch + 100
profile_range = f"{start_profile_batch},{stop_profile_batch}"

log_path = log_dir + "/" + datetime.datetime.now().strftime(
    "%Y-%m-%d_%H:%M:%S")
tensorboard_callback = tf.keras.callbacks.TensorBoard(
    log_dir=log_path,
Exemple #3
0
# ### Build Train/ Validation =>  Model Ready Input

tf_train_ds = create_dataset(tf_train_ds,
                             batch_size=batch_size,
                             shuffling=True,
                             cache_path=None)
tf_valid_ds = create_dataset(tf_valid_ds,
                             batch_size=batch_size,
                             shuffling=False,
                             cache_path=None)

# ### Custom Learning Rate Scheduler

#Example
plt.style.use('ggplot')
schedule = CustomSchedule()
plt.plot(schedule(tf.range(25000, dtype=tf.float32)))
plt.xlabel("Steps")
plt.ylabel("Learning rate")

# ### Setup Callbacks for Tensorboard

# +
start_profile_batch = steps + 10
stop_profile_batch = start_profile_batch + 100
profile_range = f"{start_profile_batch},{stop_profile_batch}"

log_path = log_dir + "/" + datetime.datetime.now().strftime(
    "%Y-%m-%d_%H:%M:%S")
tensorboard_callback = tf.keras.callbacks.TensorBoard(
    log_dir=log_path,