Example #1
0
 def __init__(self, keras_model, loss, worker_optimizer):
     self.master_model = serialize_keras_model(keras_model)
     self.loss = loss
     self.worker_optimizer = worker_optimizer
     self.history = []
     self.training_time_start = 0
     self.training_time_end = 0
     self.training_time = 0
Example #2
0
 def __init__(self, keras_model, loss, worker_optimizer):
     set_keras_base_directory()
     self.master_model = serialize_keras_model(keras_model)
     self.loss = loss
     self.worker_optimizer = worker_optimizer
     self.history = []
     self.training_time_start = 0
     self.training_time_end = 0
     self.training_time = 0
     self.max_mini_batches_prefetch = 100
Example #3
0
 def __init__(self, keras_model, loss, worker_optimizer, metrics=["accuracy"], loss_weights=None):
     set_keras_base_directory()
     self.master_model = serialize_keras_model(keras_model)
     self.loss = loss
     self.loss_weights = loss_weights
     self.worker_optimizer = worker_optimizer
     self.metrics = metrics
     self.history = []
     self.training_time_start = 0
     self.training_time_end = 0
     self.training_time = 0
     self.max_mini_batches_prefetch = 100
Example #4
0
        def destroy_job():
            secret = request.args.get('secret')
            job = self.get_submitted_job(secret)
            if job is not None and not job.running():
                with self.mutex:
                    model = self.jobs[secret].get_trained_model()
                    history = self.jobs[secret].get_history()
                    model = pickle_object(
                        serialize_keras_model(model)).encode('hex_codec')
                    history = pickle_object(history).encode('hex_codec')
                    d = {}
                    d['model'] = model
                    d['history'] = history
                    del self.jobs[secret]
                return json.dumps(d), 200

            return '', 400
Example #5
0
    def average_models(self, models):
        """Averages the specified list of Keras models, and assigns the
        averaged model as the master model.

        # Arguments:
            models: list. A list of serialized Keras models.
        """
        num_models = len(models)
        # Get all weights of the models.
        for i in range(0, num_models):
            weights = np.asarray(deserialize_keras_model(models[i]).get_weights())
            self.parameter_buffer += weights
        # Average the parameters.
        self.parameter_buffer /= num_models
        temp_model = deserialize_keras_model(self.master_model)
        temp_model.set_weights(self.parameter_buffer)
        self.master_model = serialize_keras_model(temp_model)
Example #6
0
    def train(self, worker_id, iterator):
        """Training procedure for the worker node.

        # Arguments
            worker_id: int. Partition index provided by Spark. Can be used as a worker_id.
            iterator: iterator. Data iterator.
        """
        # Prepare the optimization procedure.
        self.start_prefetching_thread(iterator)
        self.set_worker_id(worker_id)
        self.prepare_model()
        # Start the optimization procedure.
        try:
            self.optimize()
        except:
            pass
        # Wait for the prefetching thread to stop.
        self.prefetching_thread.join()

        return iter([serialize_keras_model(self.model)])
Example #7
0
    def train(self, worker_id, iterator):
        """Training procedure for the worker node.

        # Arguments
            worker_id: int. Partition index provided by Spark. Can be used as a worker_id.
            iterator: iterator. Data iterator.
        """
        # Prepare the optimization procedure.
        self.start_prefetching_thread(iterator)
        self.set_worker_id(worker_id)
        self.prepare_model()
        # Start the optimization procedure.
        try:
            self.optimize()
        except Exception as e:
            # Stop the prefetching process.
            self.is_prefetching = False
            print(e)
        # Wait for the prefetching thread to stop.
        self.prefetching_thread.join()

        return iter([serialize_keras_model(self.model)])
Example #8
0
    def train(self, worker_id, iterator):
        """Training procedure with sequential gradient updates.

        # Returns
            Trained serialized Keras model.
        """
        # Prepare the model.
        self.prepare_model()
        try:
            while True:
                # Fetch the next mini-batch.
                batch = [next(iterator) for _ in range(self.batch_size)]
                # Retrieve the feature and label vectors.
                feature_iterator, label_iterator = tee(batch, 2)
                X = np.asarray(
                    [x[self.features_column] for x in feature_iterator])
                Y = np.asarray([x[self.label_column] for x in label_iterator])
                self.model.train_on_batch(X, Y)
        except StopIteration:
            pass

        return iter([serialize_keras_model(self.model)])
Example #9
0
 def __init__(self, keras_model):
     self.model = serialize_keras_model(keras_model)
Example #10
0
 def set_model(self, model):
     """Sets the master model to be used by the trainer."""
     self.master_model = serialize_keras_model(model)
Example #11
0
 def __init__(self, keras_model):
     self.model = serialize_keras_model(keras_model)
Example #12
0
 def set_model(self, model):
     """Sets the master model to be used by the trainer."""
     self.master_model = serialize_keras_model(model)