def build_model(self): timer = Timer() timer.start() print('[Model] Creating model..') configs = self.c for layer in configs['model']['layers']: neurons = layer['neurons'] if 'neurons' in layer else None dropout_rate = layer['rate'] if 'rate' in layer else None activation = layer['activation'] if 'activation' in layer else None input_timesteps = layer['input_timesteps'] if 'input_timesteps' in layer else None input_features = layer['input_features'] if 'input_features' in layer else None #print('input_features %s input_timesteps %s ' % ( input_features, input_timesteps)) if layer['type'] == 'mlp': # batch_size 'e o input_timesteps # a 2D input with shape `(batch_size, input_dim)`. self.model.add(Dense(neurons, input_dim=input_features*input_timesteps, kernel_initializer='normal', activation=activation)) if layer['type'] == 'dense': self.model.add(Dense(neurons, kernel_initializer='normal', activation=activation)) if layer['type'] == 'dropout': self.model.add(Dropout(dropout_rate)) if layer['type'] == 'activation': self.model.add(Activation(activation)) print(self.model.summary()) self.model.compile(loss=configs['model']['loss'], optimizer=configs['model']['optimizer'], metrics=['accuracy']) print('[Model] Model Compiled with structure:', self.model.inputs) self.save_architecture(self.save_fname) timer.stop()
def train(self, x, y, epochs, batch_size): timer = Timer() timer.start() print('[Model] Training Started') print('[Model] %s epochs, %s batch size' % (epochs, batch_size)) #print('[Model] Shape of data train: ', x.shape) #save_fname = os.path.join(save_dir, '%s-e%s.h5' % (dt.datetime.now().strftime('%d%m%Y-%H%M%S'), str(epochs))) callbacks = [ EarlyStopping(monitor='val_loss', patience=2), ModelCheckpoint(filepath=self.save_fnameh5, monitor='val_loss', save_best_only=True) ] history = self.model.fit(x, y, validation_split=0.33, epochs=epochs, batch_size=batch_size, callbacks=callbacks) if self.save == True: self.save_model(self.save_fnameh5) print('[Model] Training Completed. Model h5 saved as %s' % self.save_fnameh5) print('[Model] Model train with structure:', self.model.inputs) timer.stop() return history
def build_model(self): timer = Timer() timer.start() print('[Model] Creating model..') configs = self.c self.model = None for layer in configs['model']['layers']: neurons = layer['neurons'] if 'neurons' in layer else None dropout_rate = layer['rate'] if 'rate' in layer else None activation = layer['activation'] if 'activation' in layer else None return_seq = layer['return_seq'] if 'return_seq' in layer else None input_timesteps = layer[ 'input_timesteps'] if 'input_timesteps' in layer else None input_features = layer[ 'input_features'] if 'input_features' in layer else None dropout = layer['dropout'] if 'dropout' in layer else None stateful = layer['stateful'] if 'stateful' in layer else None inputs = Input(shape=(input_timesteps, input_features)) outputs = LSTM(neurons, return_sequences=False, activation='tanh')(inputs) #outputs = LSTM(neurons, return_sequences=False, activation='tanh')(outputs) outputs = Dense(2, activation='linear')(outputs) distributions = Lambda(gaussian_layer_2d)(outputs) #################### self.model = Model(inputs=inputs, outputs=distributions) if configs['model']['optimizer'] == 'adam': opt = Adam(lr=configs['model']['learningrate']) elif configs['model']['optimizer'] == 'rmsprop': opt = RMSprop(lr=configs['model']['learningrate']) print(self.model.summary()) self.model.compile(loss=gaussian_nll, optimizer=opt, metrics=['accuracy']) print('[Model] Model Compiled with structure:', self.model.inputs) self.save_architecture(self.save_fname) timer.stop()
def build_model(self): timer = Timer() timer.start() print('[Model] Creating model..') self.model = None configs = self.c for layer in configs['model']['layers']: neurons = layer['neurons'] if 'neurons' in layer else None dropout_rate = layer['rate'] if 'rate' in layer else None activation = layer['activation'] if 'activation' in layer else None input_timesteps = layer[ 'input_timesteps'] if 'input_timesteps' in layer else None input_features = layer[ 'input_features'] if 'input_features' in layer else None #print('input_features %s input_timesteps %s ' % ( input_features, input_timesteps)) if layer['type'] == 'input': inputs = Input(shape=(input_features * input_timesteps, )) outputs = Dense(neurons, activation=activation)(inputs) if layer['type'] == 'dense': outputs = Dense(neurons, activation=activation)(outputs) if layer['type'] == 'gaussian_layer': distribuitions = Lambda(gaussian_layer)(outputs) self.model = Model(inputs=inputs, outputs=distribuitions) if configs['model']['optimizer'] == 'adam': opt = Adam(lr=configs['model']['learningrate']) elif configs['model']['optimizer'] == 'rmsprop': opt = RMSprop(lr=configs['model']['learningrate']) print(self.model.summary()) self.model.compile(loss=gaussian_loss, optimizer=opt, metrics=['accuracy']) print('[Model] Model Compiled with structure:', self.model.inputs) self.save_architecture(self.save_fname) timer.stop()
def build_model(self): timer = Timer() timer.start() print('[Model] Creating model..') # this model is not sequencial self.model = None configs = self.c for layer in configs['model']['layers']: neurons = layer['neurons'] if 'neurons' in layer else None dropout_rate = layer['rate'] if 'rate' in layer else None activation = layer['activation'] if 'activation' in layer else None return_seq = layer['return_seq'] if 'return_seq' in layer else None input_timesteps = layer['input_timesteps'] if 'input_timesteps' in layer else None input_features = layer['input_features'] if 'input_features' in layer else None #create Neural Network if layer['type'] == 'lstm-paralel': first_input = Input(shape=(input_timesteps, input_features)) first_output = LSTM(neurons, return_sequences=return_seq, return_state=False)(first_input) second_input = Input(shape=(input_timesteps, 1)) # without number of features, just with input_timesteps second_output = LSTM(neurons, return_sequences=return_seq, return_state=False)(second_input) output = concatenate([first_output, second_output], axis=-1) if layer['type'] == 'dense': output = Dense(neurons, activation = activation)(output) if layer['type'] == 'dropout': output = Dropout(dropout_rate)(output) self.model = Model(inputs=[first_input, second_input], outputs=output) self.model.compile(loss=configs['model']['loss'], optimizer=configs['model']['optimizer'], metrics=['accuracy']) print(self.model.summary()) print('[Model] Model Compiled with structure:', self.model.inputs) self.save_architecture(self.save_fname) timer.stop()
def build_model(self): timer = Timer() timer.start() print('[Model] Creating model..') configs = self.c for layer in configs['model']['layers']: neurons = layer['neurons'] if 'neurons' in layer else None dropout_rate = layer['rate'] if 'rate' in layer else None activation = layer['activation'] if 'activation' in layer else None return_seq = layer['return_seq'] if 'return_seq' in layer else None input_timesteps = layer['input_timesteps'] if 'input_timesteps' in layer else None input_features = layer['input_features'] if 'input_features' in layer else None print('input_features %s input_timesteps %s ' % ( input_features, input_timesteps)) if layer['type'] == 'lstm': self.model.add(LSTM(neurons, input_shape=(input_timesteps, input_features), return_sequences=return_seq)) if layer['type'] == 'dense': self.model.add(Dense(neurons, activation=activation)) if layer['type'] == 'dropout': self.model.add(Dropout(dropout_rate)) if layer['type'] == 'repeatvector': self.model.add(RepeatVector(neurons)) if layer['type'] == 'timedistributed': self.model.add(TimeDistributed(Dense(neurons))) if layer['type'] == 'activation': self.model.add(Activation('linear')) print(self.model.summary()) self.model.compile(loss=configs['model']['loss'], optimizer=configs['model']['optimizer'], metrics=['accuracy']) print('[Model] Model Compiled with structure:', self.model.inputs) self.save_architecture(self.save_fname) timer.stop()
def train_generator(self, data_gen, epochs, batch_size, steps_per_epoch, save_dir): timer = Timer() timer.start() print('[Model] Training Started') print('[Model] %s epochs, %s batch size, %s batches per epoch' % (epochs, batch_size, steps_per_epoch)) save_fname = os.path.join( save_dir, '%s-e%s.h5' % (dt.datetime.now().strftime('%d%m%Y-%H%M%S'), str(epochs))) callbacks = [ ModelCheckpoint(filepath=save_fname, monitor='loss', save_best_only=True) ] self.model.fit_generator(data_gen, steps_per_epoch=steps_per_epoch, epochs=epochs, callbacks=callbacks, workers=1) print('[Model] Training Completed. Model saved as %s' % save_fname) timer.stop()
def build_model(self): timer = Timer() timer.start() print('[Model] Creating model..') configs = self.c for layer in configs['model']['layers']: neurons = layer['neurons'] if 'neurons' in layer else None dropout_rate = layer['rate'] if 'rate' in layer else None activation = layer['activation'] if 'activation' in layer else None return_seq = layer['return_seq'] if 'return_seq' in layer else None input_timesteps = layer[ 'input_timesteps'] if 'input_timesteps' in layer else None input_features = layer[ 'input_features'] if 'input_features' in layer else None dropout = layer['dropout'] if 'dropout' in layer else None stateful = layer['stateful'] if 'stateful' in layer else None #print('input_features %s input_timesteps %s ' % ( input_features, input_timesteps)) #print('batch_size: ', self.batch_size) if layer['type'] == 'lstm': if dropout is None: if stateful: #inp = Input(batch_shape= (batch_size, input_timesteps, input_features), name="input") # if stateful is True the shuffle parameter must be False self.stateful = stateful self.model.add( LSTM(neurons, batch_input_shape=(self.batch_size, input_timesteps, input_features), return_sequences=return_seq, stateful=stateful)) else: self.model.add( LSTM(neurons, input_shape=(input_timesteps, input_features), return_sequences=return_seq)) else: # Dropout can be applied to the input connection within the LSTM nodes. #self.model.add(LSTM(neurons, batch_input_shape=(self.batch_size, input_timesteps, input_features), # return_sequences=return_seq, stateful=True, dropout=dropout)) # applied to input signal of lstm units self.model.add( LSTM(neurons, batch_input_shape=(self.batch_size, input_timesteps, input_features), return_sequences=return_seq, stateful=True, recurrent_dropout=dropout)) if layer['type'] == 'dense': self.model.add(Dense(neurons, activation=activation)) if layer['type'] == 'dropout': self.model.add(Dropout(dropout_rate)) if layer['type'] == 'repeatvector': self.model.add(RepeatVector(neurons)) if layer['type'] == 'timedistributed': self.model.add(TimeDistributed(Dense(neurons))) if layer['type'] == 'activation': self.model.add(Activation('linear')) if configs['model']['optimizer'] == 'adam': opt = Adam(lr=configs['model']['learningrate']) elif configs['model']['optimizer'] == 'rmsprop': opt = RMSprop(lr=configs['model']['learningrate']) self.model.compile(loss=configs['model']['loss'], optimizer=opt, metrics=configs['model']['metrics']) print(self.model.summary()) print('[Model] Model Compiled with structure:', self.model.inputs) #self.save_architecture(self.save_fname) timer.stop()
def build_model(self): timer = Timer() timer.start() print('[Model] Creating model..') # this model is not sequencial self.model = None configs = self.c for layer in configs['model']['layers']: neurons = layer['neurons'] if 'neurons' in layer else None dropout_rate = layer['rate'] if 'rate' in layer else None activation = layer['activation'] if 'activation' in layer else None return_seq = layer['return_seq'] if 'return_seq' in layer else None input_timesteps = layer[ 'input_timesteps'] if 'input_timesteps' in layer else None input_features = layer[ 'input_features'] if 'input_features' in layer else None dropout = layer['dropout'] if 'dropout' in layer else None stateful = layer['stateful'] if 'stateful' in layer else None #create Neural Network if layer['type'] == 'lstm': first_input = Input(shape=(input_timesteps, input_features)) first_output = LSTM(neurons, return_sequences=return_seq, return_state=False)(first_input) second_input = Input(shape=( input_timesteps, input_features )) # without number of features, just with input_timesteps second_output = LSTM(neurons, return_sequences=return_seq, return_state=False)(second_input) third_input = Input(shape=( input_timesteps, input_features )) # without number of features, just with input_timesteps third_output = LSTM(neurons, return_sequences=return_seq, return_state=False)(third_input) output = concatenate( [first_output, second_output, third_output]) if layer['type'] == 'dense': output = Dense(neurons, activation=activation)(output) if layer['type'] == 'dropout': output = Dropout(dropout_rate)(output) if configs['model']['optimizer'] == 'adam': opt = Adam(lr=configs['model']['learningrate']) elif configs['model']['optimizer'] == 'rmsprop': opt = RMSprop(lr=configs['model']['learningrate']) self.model = Model(inputs=[first_input, second_input, third_input], outputs=output) self.model.compile(loss=configs['model']['loss'], optimizer=opt, metrics=configs['model']['metrics']) print(self.model.summary()) print('[Model] Model Compiled with structure:', self.model.inputs) timer.stop()