Exemple #1
0
    def train(self, dataset=None, initial_epoch=0):
        callbacks = get_callbacks(self.model_name, self.tb_path,
                                  self.model_path_new_train,
                                  self.config['lr_dec'], self.config['lr'])

        if dataset == None:
            dataset = Dataset(self.data_name, self.config_path)
        dataset_train, dataset_val = dataset.get_tf_data()

        self.model.compile(optimizer=tf.keras.optimizers.Adam(
            learning_rate=self.config['lr']),
                           loss=[marginLoss, 'mse'],
                           loss_weights=[1., self.config['lmd_gen']],
                           metrics={'Original_CapsNet': 'accuracy'})

        print('-' * 30 + f'{self.data_name} train' + '-' * 30)

        history = self.model.fit(dataset_train,
                                 epochs=self.config['epochs'],
                                 validation_data=(dataset_val),
                                 batch_size=self.config['batch_size'],
                                 initial_epoch=initial_epoch,
                                 callbacks=callbacks,
                                 workers=self.config['num_workers'])

        self.model.save_weights(
            os.path.join(self.config['saved_model_dir'], f"{self.model_name}",
                         f"{self.model_name}_{self.data_name}.h5"))

        return history
Exemple #2
0
    def train(self, dataset=None, initial_epoch=0):
        callbacks = get_callbacks(self.model_name,
                                  self.tb_path,
                                  self.model_path_new_train,
                                  self.config['lr_dec'],
                                  self.config['lr'],
                                  optimizer=self.optimizer)

        if dataset is None:
            dataset = Dataset(self.data_name, self.config_path)
        dataset_train, dataset_val = dataset.get_tf_data()

        if self.optimizer == 'Adam':
            self.model.compile(optimizer=tf.keras.optimizers.Adam(
                lr=self.config['lr'], momentum=0.9),
                               loss=[marginLoss, 'mse'],
                               loss_weights=[1., self.config['lmd_gen']],
                               metrics={self.model_name: 'accuracy'})
        else:
            self.model.compile(
                optimizer=tf.keras.optimizers.SGD(lr=self.config['lr']),
                loss=[marginLoss, 'mse'],
                loss_weights=[1., self.config['lmd_gen']],
                metrics={self.model_name: 'accuracy'})

        # self.model.compile(optimizer=tf.keras.optimizers.Adam(learning_rate=self.config['lr']),
        #                    loss=[marginLoss, 'mse'],
        #                    loss_weights=[1., self.config['lmd_gen']],
        #                    metrics={self.model_name: 'accuracy'})
        steps = None

        print('-' * 30 + f'{self.data_name} train' + '-' * 30)

        history = self.model.fit(dataset_train,
                                 epochs=self.config[f'epochs'],
                                 steps_per_epoch=steps,
                                 validation_data=dataset_val,
                                 batch_size=self.config['batch_size'],
                                 initial_epoch=initial_epoch,
                                 callbacks=callbacks,
                                 workers=self.config['num_workers'])

        self.model.save_weights(
            os.path.join(self.config['saved_model_dir'], f"{self.model_name}",
                         f"{self.model_name}_{self.data_name}.h5"))

        return history
Exemple #3
0
    def train(self, dataset=None, initial_epoch=0, resume=False):
        callbacks = get_callbacks(self.model_path_new_train,
                                  optimizer=self.optimizer)

        if dataset is None:
            dataset = Dataset(self.data_name, self.config_path)
        dataset_train, dataset_val = dataset.get_tf_data(for_capsule=False)

        if self.optimizer == 'SGD':
            self.model.compile(optimizer=tf.keras.optimizers.SGD(
                lr=self.config['ETC_MODEL_LR'], momentum=0.9),
                               loss='categorical_crossentropy',
                               metrics='accuracy')
        elif self.optimizer == 'Adam':
            self.model.compile(optimizer=tf.keras.optimizers.Adam(
                lr=self.config['ETC_MODEL_LR']),
                               loss='categorical_crossentropy',
                               metrics='accuracy')
        else:
            print("optimizer must be select in ['Adam', 'SGD']")
            raise ValueError

        steps = None
        if resume:
            self.load_graph_weights()

        print('-' * 30 + f'{self.data_name} train' + '-' * 30)

        history = self.model.fit(dataset_train,
                                 epochs=self.config[f'ETC_MODEL_EPOCHS'],
                                 steps_per_epoch=steps,
                                 validation_data=dataset_val,
                                 batch_size=self.config['batch_size'],
                                 initial_epoch=initial_epoch,
                                 callbacks=callbacks,
                                 workers=self.config['num_workers'])

        self.model.save_weights(
            os.path.join(self.config['saved_model_dir'], f"{self.model_name}",
                         f"{self.model_name}_{self.data_name}.h5"))

        return history
    def train(self, dataset=None, initial_epoch=0):
        callbacks = get_callbacks(self.tb_path, self.model_path_new_train,
                                  self.config['lr_dec'], self.config['lr'])

        if dataset == None:
            dataset = Dataset(self.model_name, self.config_path)
        dataset_train, dataset_val = dataset.get_tf_data()

        if self.model_name == 'MULTIMNIST':
            self.model.compile(optimizer=tf.keras.optimizers.Adam(
                learning_rate=self.config['lr']),
                               loss=[marginLoss, 'mse', 'mse'],
                               loss_weights=[
                                   1., self.config['lmd_gen'] / 2,
                                   self.config['lmd_gen'] / 2
                               ],
                               metrics={'Efficient_CapsNet': multiAccuracy})
            steps = 10 * int(
                dataset.y_train.shape[0] / self.config['batch_size'])
        else:
            self.model.compile(optimizer=tf.keras.optimizers.Adam(
                learning_rate=self.config['lr']),
                               loss=[marginLoss, 'mse'],
                               loss_weights=[1., self.config['lmd_gen']],
                               metrics={'Efficient_CapsNet': 'accuracy'})
            steps = None

        print('-' * 30 + f'{self.model_name} train' + '-' * 30)

        history = self.model.fit(dataset_train,
                                 epochs=self.config[f'epochs'],
                                 steps_per_epoch=steps,
                                 validation_data=(dataset_val),
                                 batch_size=self.config['batch_size'],
                                 initial_epoch=initial_epoch,
                                 callbacks=callbacks)

        return history
Exemple #5
0
    def train(self, dataset=None, initial_epoch=0):

        if dataset is None:
            dataset = Dataset(self.model_name, self.config_path)
        dataset_train, dataset_val = dataset.get_tf_data()
        self.model.compile(optimizer=tf.keras.optimizers.Adam(
            learning_rate=self.config['lr']),
                           loss=[marginLoss, 'mse', 'mse'],
                           loss_weights=[
                               1., self.config['lmd_gen'] / 2,
                               self.config['lmd_gen'] / 2
                           ],
                           metrics={'Efficient_CapsNet': 'accuracy'})
        steps = 10 * int(dataset.y_train.shape[0] / self.config['batch_size'])

        print('-' * 30 + f'{self.model_name} train' + '-' * 30)

        history = self.model.fit(dataset_train,
                                 epochs=self.config['epoch'],
                                 steps_per_epoch=steps,
                                 validation_data=(dataset_val),
                                 batch_size=self.config['batch_size'],
                                 initial_epoch=initial_epoch)
        return history