def train(self, batch_size=1024, epochs=1, initial_epoch=0, verbose=1): if batch_size >= self.node_size: if batch_size > self.node_size: print('batch_size({0}) > node_size({1}),set batch_size = {1}'. format(batch_size, self.node_size)) batch_size = self.node_size return self.model.fit( [self.A.todense(), self.L.todense()], [self.A.todense(), self.L.todense()], batch_size=batch_size, epochs=epochs, initial_epoch=initial_epoch, verbose=verbose, shuffle=False, ) else: steps_per_epoch = (self.node_size - 1) // batch_size + 1 hist = History() hist.on_train_begin() logs = {} for epoch in range(initial_epoch, epochs): start_time = time.time() losses = np.zeros(3) for i in range(steps_per_epoch): index = np.arange( i * batch_size, min((i + 1) * batch_size, self.node_size)) A_train = self.A[index, :].todense() L_mat_train = self.L[index][:, index].todense() inp = [A_train, L_mat_train] batch_losses = self.model.train_on_batch(inp, inp) losses += batch_losses losses = losses / steps_per_epoch logs['loss'] = losses[0] logs['2nd_loss'] = losses[1] logs['1st_loss'] = losses[2] epoch_time = int(time.time() - start_time) hist.on_epoch_end(epoch, logs) if verbose > 0: print('Epoch {0}/{1}'.format(epoch + 1, epochs)) print( '{0}s - loss: {1: .4f} - 2nd_loss: {2: .4f} - 1st_loss: {3: .4f}' .format(epoch_time, losses[0], losses[1], losses[2])) return hist
class EvolModel(Model): """ EvolModel forwards all tasks to keras if the optimizer is NOT genetic. In case the optimizer is genetic, fitting methods from Evolutionary_Optimizers.py are being used. """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self.is_genetic = False self.opt_instance = None self.history_info = History() def parse_optimizer(self, optimizer): """ Checks whether the optimizer is genetic and creates and optimizer instance in case a string was given as input. """ # Checks (if the optimizer input is a string) and whether it is in the 'optimizers' # dictionary if isinstance(optimizer, str) and optimizer.lower() in optimizer_dict.keys(): opt = optimizer_dict.get(optimizer.lower()) # And instanciate it with default values optimizer = opt() optimizer.on_compile(self) # Check whether the optimizer is an evolutionary optimizer if isinstance(optimizer, Evolutionary_Optimizers.EvolutionaryStrategies): self.is_genetic = True self.opt_instance = optimizer optimizer.on_compile(self) def compile(self, optimizer="rmsprop", **kwargs): """ When the optimizer is genetic, compiles the model in keras setting an arbitrary keras supported optimizer """ self.parse_optimizer(optimizer) self.history_info.set_model(self) if self.is_genetic: super().compile(optimizer="rmsprop", **kwargs) else: super().compile(optimizer=optimizer, **kwargs) def perform_genetic_fit(self, x=None, y=None, epochs=1, verbose=0, validation_data=None, callbacks=None): """ Parameters ---------- x: array or list of arrays input data y: array or list of arrays target values epochs: int number of generations of mutants verbose: int verbose, prints to log.info the loss per epoch """ # Prepare the history for the initial epoch self.history_info.on_train_begin() callbacks.on_train_begin() # Validation data is currently not being used!! if validation_data is not None: log.warning( "Validation data is not used at the moment by the Genetic Algorithms!!" ) if isinstance(self.opt_instance, Evolutionary_Optimizers.CMA) and epochs != 1: epochs = 1 log.warning( "The optimizer determines the number of generations, epochs will be ignored." ) for epoch in range(epochs): callbacks.on_epoch_begin(epoch=epoch) # Generate the best mutant score, best_mutant = self.opt_instance.run_step(x=x, y=y) training_metric = next(iter(score)) # Ensure the best mutant is the current one self.set_weights(best_mutant) if verbose == 1: loss = score[training_metric] information = f" > epoch: {epoch+1}/{epochs}, {loss} " log.info(information) callbacks.on_epoch_end(epoch=epoch, logs=score) # Fill keras history history_data = score self.history_info.on_epoch_end(epoch, history_data) callbacks.on_train_end(logs=score) return self.history_info def fit( self, x=None, y=None, validation_data=None, epochs=1, verbose=0, callbacks=None, **kwargs, ): """ If the optimizer is genetic, the fitting procedure consists on executing `run_step` for the given number of epochs. """ if self.is_genetic: # Container that configures and calls `tf.keras.Callback`s. if not isinstance(callbacks, callbacks_module.CallbackList): callbacks = callbacks_module.CallbackList( callbacks, add_history=True, add_progbar=False, model=self, verbose=verbose, epochs=epochs, ) callbacks.on_train_begin() result = self.perform_genetic_fit( x=x, y=y, epochs=epochs, verbose=verbose, validation_data=validation_data, callbacks=callbacks, ) else: result = super().fit( x=x, y=y, validation_data=validation_data, epochs=epochs, verbose=verbose, callbacks=callbacks, **kwargs, ) return result