Esempio n. 1
0
def _test_optimizer(optimizer, target=0.75):
    x_train, y_train = get_test_data()
    model = get_model(x_train.shape[1], 10, y_train.shape[1])
    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizer,
                  metrics=['accuracy'])
    history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
    assert history.history['acc'][-1] >= target
    config = optimizers.serialize(optimizer)
    optim = optimizers.deserialize(config)
    new_config = optimizers.serialize(optim)
    new_config['class_name'] = new_config['class_name'].lower()
    assert config == new_config
Esempio n. 2
0
def _test_optimizer(optimizer, target=0.75):
    x_train, y_train = get_test_data()

    model = Sequential()
    model.add(Dense(10, input_shape=(x_train.shape[1],)))
    model.add(Activation('relu'))
    model.add(Dense(y_train.shape[1]))
    model.add(Activation('softmax'))
    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizer,
                  metrics=['accuracy'])

    history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
    assert history.history['acc'][-1] >= target
    config = optimizers.serialize(optimizer)
    optim = optimizers.deserialize(config)
    new_config = optimizers.serialize(optim)
    new_config['class_name'] = new_config['class_name'].lower()
    assert config == new_config

    # Test constraints.
    model = Sequential()
    dense = Dense(10,
                  input_shape=(x_train.shape[1],),
                  kernel_constraint=lambda x: 0. * x + 1.,
                  bias_constraint=lambda x: 0. * x + 2.,)
    model.add(dense)
    model.add(Activation('relu'))
    model.add(Dense(y_train.shape[1]))
    model.add(Activation('softmax'))
    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizer,
                  metrics=['accuracy'])
    model.train_on_batch(x_train[:10], y_train[:10])
    kernel, bias = dense.get_weights()
    assert_allclose(kernel, 1.)
    assert_allclose(bias, 2.)
Esempio n. 3
0
 def __init__(self,
              model,
              optimizer,
              loss,
              loss_weights,
              metrics=["accuracy"],
              features_col="features",
              label_col="label",
              batch_size=32,
              num_epoch=1,
              learning_rate=1.0):
     assert isinstance(
         optimizer,
         (str, Optimizer
          )), "'optimizer' must be a string or a Keras Optimizer instance"
     assert isinstance(
         features_col,
         (str,
          list)), "'features_col' must be a string or a list of strings"
     assert isinstance(
         label_col,
         (str, list)), "'label_col' must be a string or a list of strings"
     self.model = model
     self.optimizer = {
         'class_name': optimizer,
         'config': {}
     } if isinstance(optimizer, str) else serialize(optimizer)
     self.loss = loss
     self.loss_weights = loss_weights
     self.metrics = metrics
     self.features_column = [features_col] if isinstance(
         features_col, str) else features_col
     self.label_column = [label_col] if isinstance(label_col,
                                                   str) else label_col
     self.batch_size = batch_size
     self.num_epoch = num_epoch
     self.max_mini_batches = 100
     self.prefetching_thread = None
     self.mini_batches = None
     self.is_prefetching = True
     self.worker_id = -1
     self.learning_rate = learning_rate
     self.num_inputs = len(self.features_column)
     self.num_outputs = len(self.label_column)
     self.current_epoch = 0
    def testSerializationWithCustomOptimizer(self):
        class MySGD(gradient_descent.SGD):
            def __init__(self, *args, **kwargs):
                super(MySGD, self).__init__(*args, **kwargs)
                self.my_attribute = 123

        opt = MySGD(2., momentum=0.5)
        opt = loss_scale_optimizer.LossScaleOptimizer(opt,
                                                      initial_scale=2.,
                                                      dynamic_growth_steps=3.)
        config = optimizers.serialize(opt)
        custom_objects = {'MySGD': MySGD}
        opt = optimizers.deserialize(config, custom_objects=custom_objects)
        # Force hyperparameters to be created
        opt.lr  # pylint: disable=pointless-statement
        self.evaluate(tf.compat.v1.global_variables_initializer())

        self.assertEqual(self.evaluate(opt.lr), 2.)
        self.assertEqual(self.evaluate(opt.inner_optimizer.momentum), 0.5)
        self.assertEqual(self.evaluate(opt.loss_scale), 2.)
        self.assertEqual(opt.dynamic_growth_steps, 3.)
        self.assertEqual(opt.inner_optimizer.my_attribute, 123)
  def testSerializationWithCustomOptimizer(self, opt_cls):
    sgd_cls = type(create_sgd(opt_cls))

    class MySGD(sgd_cls):

      def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)
        self.my_attribute = 123

    opt = MySGD(2., momentum=0.5)
    opt = create_lso(opt, initial_scale=2., dynamic_growth_steps=3.)
    config = optimizers.serialize(opt)
    custom_objects = {'MySGD': MySGD}
    opt = optimizers.deserialize(config, custom_objects=custom_objects)
    # Force hyperparameters to be created
    opt.learning_rate  # pylint: disable=pointless-statement
    self.evaluate(tf.compat.v1.global_variables_initializer())

    self.assertEqual(self.evaluate(opt.learning_rate), 2.)
    self.assertEqual(self._eval_if_tensor(opt.inner_optimizer.momentum), 0.5)
    self.assertEqual(self.evaluate(opt.loss_scale), 2.)
    self.assertEqual(opt.dynamic_growth_steps, 3.)
    self.assertEqual(opt.inner_optimizer.my_attribute, 123)
Esempio n. 6
0
 def __init__(self, model, optimizer, loss, loss_weights, metrics=["accuracy"], features_col="features", label_col="label",
              batch_size=32, num_epoch=1, learning_rate=1.0):
     assert isinstance(optimizer, (str, Optimizer)), "'optimizer' must be a string or a Keras Optimizer instance"
     assert isinstance(features_col, (str, list)), "'features_col' must be a string or a list of strings"
     assert isinstance(label_col, (str, list)), "'label_col' must be a string or a list of strings"
     self.model = model
     self.optimizer = {'class_name': optimizer, 'config': {}} if isinstance(optimizer, str) else serialize(optimizer)
     self.loss = loss
     self.loss_weights = loss_weights
     self.metrics= metrics
     self.features_column = [features_col] if isinstance(features_col, str) else features_col
     self.label_column = [label_col] if isinstance(label_col, str) else label_col
     self.batch_size = batch_size
     self.num_epoch = num_epoch
     self.max_mini_batches = 100
     self.prefetching_thread = None
     self.mini_batches = None
     self.is_prefetching = True
     self.worker_id = -1
     self.learning_rate = learning_rate
     self.num_inputs = len(self.features_column)
     self.num_outputs = len(self.label_column)
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(10))
model.add(Activation('softmax'))

# Create Spark context
conf = SparkConf().setAppName('Mnist_Spark_MLP').setMaster('local[8]')
sc = SparkContext(conf=conf)

# Build RDD from numpy features and labels
df = to_data_frame(sc, x_train, y_train, categorical=True)
test_df = to_data_frame(sc, x_test, y_test, categorical=True)

sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
sgd_conf = optimizers.serialize(sgd)

# Initialize Spark ML Estimator
estimator = ElephasEstimator()
estimator.set_keras_model_config(model.to_yaml())
estimator.set_optimizer_config(sgd_conf)
estimator.set_mode("synchronous")
estimator.set_loss("categorical_crossentropy")
estimator.set_metrics(['acc'])
estimator.set_epochs(epochs)
estimator.set_batch_size(batch_size)
estimator.set_validation_split(0.1)
estimator.set_categorical_labels(True)
estimator.set_nb_classes(nb_classes)

# Fitting a model returns a Transformer
Esempio n. 8
0
def _test_optimizer(optimizer, target=0.75):
    x_train, y_train = get_test_data()

    model = Sequential()
    model.add(Dense(10, input_shape=(x_train.shape[1],)))
    model.add(Activation('relu'))
    model.add(Dense(y_train.shape[1]))
    model.add(Activation('softmax'))
    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizer,
                  metrics=['accuracy'])

    history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
    assert history.history['acc'][-1] >= target
    config = optimizers.serialize(optimizer)
    optim = optimizers.deserialize(config)
    new_config = optimizers.serialize(optim)
    new_config['class_name'] = new_config['class_name'].lower()
    assert config == new_config

    # Test constraints.
    model = Sequential()
    dense = Dense(10,
                  input_shape=(x_train.shape[1],),
                  kernel_constraint=lambda x: 0. * x + 1.,
                  bias_constraint=lambda x: 0. * x + 2.,)
    model.add(dense)
    model.add(Activation('relu'))
    model.add(Dense(y_train.shape[1]))
    model.add(Activation('softmax'))
    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizer,
                  metrics=['accuracy'])
    model.train_on_batch(x_train[:10], y_train[:10])
    kernel, bias = dense.get_weights()
    assert_allclose(kernel, 1.)
    assert_allclose(bias, 2.)

    # Test lr_mult

    model = Sequential()
    dense = (Dense(10, input_shape=(x_train.shape[1],), lr_mult=0.0))
    model.add(dense)
    model.add(Dense(10, lr_mult=0.0))
    model.add(Activation('relu'))
    model.add(Dense(y_train.shape[1]))
    model.add(Activation('softmax'))
    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizer,
                  metrics=['accuracy'])

    kernel, bias = dense.get_weights()
    history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
    kernel_new, bias_new = dense.get_weights()
    assert np.array_equal(kernel_new, kernel)
    assert np.array_equal(bias_new, bias)

    model = Sequential()
    dense = (Dense(10, input_shape=(x_train.shape[1],), lr_mult=1.0))
    model.add(dense)

    dense2 = (Dense(10, lr_mult=0.0))
    model.add(dense2)
    model.add(Activation('relu'))
    model.add(Dense(y_train.shape[1]))
    model.add(Activation('softmax'))
    model.compile(loss='categorical_crossentropy',
                  optimizer=optimizer,
                  metrics=['accuracy'])

    kernel, bias = dense.get_weights()
    kernel2, bias2 = dense2.get_weights()
    history = model.fit(x_train, y_train, epochs=2, batch_size=16, verbose=0)
    kernel_new, bias_new = dense.get_weights()
    kernel2_new, bias2_new = dense2.get_weights()

    assert not np.array_equal(kernel_new, kernel)
    assert not np.array_equal(bias_new, bias)

    assert np.array_equal(kernel2_new, kernel2)
    assert np.array_equal(bias2_new, bias2)
Esempio n. 9
0
model.add(Activation('relu'))
model.add(Dropout(rate=0.3))
model.add(Dense(256, activity_regularizer=regularizers.l2(0.01)))
model.add(Activation('relu'))
model.add(Dropout(rate=0.3))
model.add(Dense(nb_classes))
model.add(Activation('sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam')

# Model Summary
print(model.summary())

#Distributed Deep Learning
# Set and Serialize Optimizer
optimizer_conf = optimizers.Adam(lr=0.01)
opt_conf = optimizers.serialize(optimizer_conf)

# Initialize SparkML Estimator and Get Settings
estimator = ElephasEstimator()
estimator.setFeaturesCol("features")
estimator.setLabelCol("label_index")
estimator.set_keras_model_config(model.to_yaml())
estimator.set_categorical_labels(True)
estimator.set_nb_classes(nb_classes)
estimator.set_num_workers(1)
estimator.set_epochs(25)
estimator.set_batch_size(64)
estimator.set_verbosity(1)
estimator.set_validation_split(0.10)
estimator.set_optimizer_config(opt_conf)
estimator.set_mode("synchronous")
Esempio n. 10
0
test_batchsize = 1
image_preprocessing = preprocess_image(normalize_lighting=normalize_lighting, min_value=min_value, max_value=max_value)

run_name = "run_" + datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
weights_dir = os.path.join("weights", run_name)
if not os.path.isdir(weights_dir):
    print("Create directory " + weights_dir + " for saving weights")
    os.makedirs(weights_dir)
results_dir = os.path.join("results", run_name)
if not os.path.isdir(results_dir):
    print("Create directory " + results_dir + " for saving results")
    os.makedirs(results_dir)
config = dict(height=height, width=width, channels=channels, normalize_lighting=normalize_lighting,
              min_value=min_value, max_value=max_value, background_as_class=background_as_class,
              augmentation_args=augmentation_args, unet_args=unet_args, use_custom_losses=use_custom_losses,
              optimizer=optimizers.serialize(optimizer), seed=seed, data_dir=data_dir, epochs=epochs,
              train_batchsize=train_batchsize)
with open(os.path.join(weights_dir, "config.json"), "w") as file:
    json.dump(config, file, indent=4)
with open(os.path.join(results_dir, "config.json"), "w") as file:
    json.dump(config, file, indent=4)

print("\nTrain dataset statistics:")
train_generator = ImageMaskGenerator(
    train_dir,
    augmentation_args=augmentation_args,
    image_preprocessing=image_preprocessing,
    mask_preprocessing=preprocess_mask,
    background_as_class=background_as_class,
    target_size=(height, width),
    image_color_mode="rgb",
        elapsed_training_Vectors = time.time() - start
        elapsed_training_VectorsCpu = time.clock() - startCpu
        elapsed_validation_Vectors = time.time() - start
        elapsed_validation_VectorsCpu = time.clock() - startCpu

        start = time.time()
        startCpu = time.clock()

        input_dim = train_df.select("features").first()[0].shape
        logger.info(
            f"We have {num_classes} classes and {input_dim[0]} features")

        model = get_model(train_df, input_dim)
        model.compile(optimizer=optimizer, loss=mywloss, metrics=['accuracy'])
        adam = optimizers.nadam(lr=0.01)
        opt_conf = optimizers.serialize(adam)

        # Initialize SparkML Estimator and set all relevant properties
        estimator = ElephasEstimator()
        estimator.setFeaturesCol(
            "features")  # These two come directly from pyspark,
        estimator.setLabelCol("target")  # hence the camel case. Sorry :)
        estimator.set_keras_model_config(
            model.to_yaml())  # Provide serialized Keras model
        estimator.set_categorical_labels(True)
        estimator.set_nb_classes(num_classes)
        estimator.set_num_workers(
            10)  # We just use one worker here. Feel free to adapt it.
        estimator.set_epochs(2)  # was max-epochs
        estimator.set_batch_size(batch_size)  # was 128
        estimator.set_verbosity(2)  # was 1