示例#1
0
    def set_model(self, inference=False):
        self.model_type = 'linear' if 'phone' in self.task else 'rnn'
        input_dim = int(self.config['downstream'][self.model_type]['input_dim']) if \
                    self.config['downstream'][self.model_type]['input_dim'] != 'None' else None
        if 'mockingjay' in self.task:
            self.mockingjay = Tester(self.mock_config, self.mock_paras)
            if self.fine_tune and inference:
                self.mockingjay.load = False  # Do not load twice when testing the fine-tuned model, load only for fine-tune training
            self.mockingjay.set_model(inference=True,
                                      with_head=self.paras.with_head)
            self.dr = self.mockingjay.dr
            if input_dim is None:
                input_dim = self.mock_config['mockingjay']['hidden_size']
        elif 'apc' in self.task:
            self.apc = get_apc_model(path=self.paras.apc_path)
            if input_dim is None:
                input_dim = self.mock_config['mockingjay'][
                    'hidden_size']  # use identical dim size for fair comparison
        elif 'baseline' in self.task:
            if input_dim is None:
                input_dim = mel_dim
        else:
            raise NotImplementedError('Invalid Task!')

        if self.model_type == 'linear':
            self.classifier = LinearClassifier(
                input_dim=input_dim,
                class_num=self.dataloader.dataset.class_num,
                task=self.task,
                dconfig=self.config['downstream']['linear'],
                sequencial=False).to(self.device)
        elif self.model_type == 'rnn':
            self.classifier = RnnClassifier(
                input_dim=input_dim,
                class_num=self.dataloader.dataset.class_num,
                task=self.task,
                dconfig=self.config['downstream']['rnn']).to(self.device)

        if not inference and self.fine_tune:
            # Setup Fine tune optimizer
            self.mockingjay.mockingjay.train()
            param_optimizer = list(
                self.mockingjay.mockingjay.named_parameters()) + list(
                    self.classifier.named_parameters())
            self.optimizer = get_mockingjay_optimizer(
                params=param_optimizer,
                lr=self.learning_rate,
                warmup_proportion=self.config['optimizer']
                ['warmup_proportion'],
                training_steps=self.total_steps)
        elif not inference:
            self.optimizer = Adam(self.classifier.parameters(),
                                  lr=self.learning_rate,
                                  betas=(0.9, 0.999))
            self.classifier.train()
        else:
            self.classifier.eval()

        if self.load:  # This will be set to True by default when Tester is running set_model()
            self.load_model(inference=inference)
示例#2
0
def get_downstream_model(args, input_dim, class_num, config):
    
    model_name = args.run.split('_')[-1].replace('frame', 'linear') # support names: ['linear', '1hidden', 'concat', 'utterance']
    model_config = config['model'][model_name]

    if args.task == 'speaker' and 'utterance' in args.run:
        downstream_model = RnnClassifier(input_dim, class_num, model_config)
    else:
        downstream_model = LinearClassifier(input_dim, class_num, model_config)
    
    return downstream_model
    def set_model(self, inference=False):
        input_dim = int(self.config['downstream'][self.model_type]['input_dim']) if \
                    self.config['downstream'][self.model_type]['input_dim'] != 'None' else None
        if 'transformer' in self.task:
            self.upstream_tester = Tester(self.upstream_config,
                                          self.upstream_paras)
            if self.fine_tune and inference:
                self.upstream_tester.load = False  # During inference on fine-tuned model, load with `load_downstream_model()`
            self.upstream_tester.set_model(
                inference=True, with_head=self.paras.with_head
            )  # inference should be set True so upstream solver won't create optimizer
            self.dr = self.upstream_tester.dr
            if input_dim is None:
                input_dim = self.transformer_config['hidden_size']
        elif 'apc' in self.task:
            self.apc = get_apc_model(path=self.paras.apc_path)
            if input_dim is None:
                input_dim = self.transformer_config[
                    'hidden_size']  # use identical dim size for fair comparison
        elif 'baseline' in self.task:
            if input_dim is None:
                if 'input_dim' in self.transformer_config:
                    input_dim = self.transformer_config['input_dim']
                else:
                    raise ValueError(
                        'Please update your config file to include the attribute `input_dim`.'
                    )
        else:
            raise NotImplementedError('Invalid Task!')

        if self.model_type == 'linear':
            self.classifier = LinearClassifier(
                input_dim=input_dim,
                class_num=self.dataloader.dataset.class_num,
                dconfig=self.config['downstream']['linear']).to(self.device)
        elif self.model_type == 'rnn':
            self.classifier = RnnClassifier(
                input_dim=input_dim,
                class_num=self.dataloader.dataset.class_num,
                dconfig=self.config['downstream']['rnn']).to(self.device)

        if not inference and self.fine_tune:
            # Setup Fine tune optimizer
            self.upstream_tester.transformer.train()
            param_optimizer = list(
                self.upstream_tester.transformer.named_parameters()) + list(
                    self.classifier.named_parameters())
            self.optimizer = get_optimizer(
                params=param_optimizer,
                lr=self.learning_rate,
                warmup_proportion=self.config['optimizer']
                ['warmup_proportion'],
                training_steps=self.total_steps)
        elif not inference:
            self.optimizer = Adam(self.classifier.parameters(),
                                  lr=self.learning_rate,
                                  betas=(0.9, 0.999))
            self.classifier.train()
        else:
            self.classifier.eval()

        if self.load:  # This will be set to True by default when Tester is running set_model()
            self.load_downstream_model(inference=inference)