Example #1
0
        # Create the model
        bert = DistilBertForSequenceClassification.from_pretrained(
            bert_model, config=bert_config).to(device)
        multi_xformer = MultiDistilBertClassifier(bert_model,
                                                  bert_config,
                                                  n_domains=len(train_dls) -
                                                  1).to(device)
        if args.pretrained_multi_xformer is not None:
            multi_xformer.load_state_dict(
                torch.load(
                    f"{args.pretrained_multi_xformer}/model_{domain}.pth"))
            (val_loss, acc, P, R,
             F1), _ = validation_evaluator.evaluate(multi_xformer)
            print(f"Validation acc multi-xformer: {acc}")

        shared_bert = VanillaBert(bert).to(device)
        if args.pretrained_bert is not None:
            shared_bert.load_state_dict(
                torch.load(f"{args.pretrained_bert}/model_{domain}.pth"))
            (val_loss, acc, P, R,
             F1), _ = validation_evaluator.evaluate(shared_bert)
            print(f"Validation acc shared bert: {acc}")

        model = torch.nn.DataParallel(
            MultiViewTransformerNetworkProbabilitiesAdversarial(
                multi_xformer,
                shared_bert,
                supervision_layer=args.supervision_layer)).to(device)
        # (val_loss, acc, P, R, F1), _ = validation_evaluator.evaluate(model)
        # print(f"Validation acc starting: {acc}")
Example #2
0
                       shuffle=True,
                       collate_fn=collate_batch_transformer)
            for subset in subsets
        ]

        val_ds = [subset[1] for subset in subsets]
        # for vds in val_ds:
        #     print(vds.indices)
        validation_evaluator = MultiDatasetClassificationEvaluator(
            val_ds, device)

        bert = DistilBertForSequenceClassification.from_pretrained(
            bert_model, config=bert_config).to(device)
        # Create the model
        init_weights = None
        shared_bert = VanillaBert(bert).to(device)

        multi_xformer = MultiDistilBertClassifier(
            bert_model,
            bert_config,
            n_domains=len(train_dls),
            init_weights=init_weights).to(device)

        model = torch.nn.DataParallel(
            MultiViewTransformerNetworkSelectiveWeight(multi_xformer,
                                                       shared_bert)).to(device)

        # Load the best weights
        model.load_state_dict(
            torch.load(f'{args.pretrained_model}/model_{domain}.pth'))
Example #3
0
        shared_bert_config = DistilBertConfig.from_pretrained(
            bert_model, num_labels=len(train_dls))
        bert = DistilBertForSequenceClassification.from_pretrained(
            bert_model, config=shared_bert_config).to(device)
        # Create the model
        init_weights = None
        if args.pretrained_bert is not None:
            init_weights = {
                k: v
                for k, v in torch.load(args.pretrained_bert).items()
                if "classifier" not in k
            }
            model_dict = bert.state_dict()
            model_dict.update(init_weights)
            bert.load_state_dict(model_dict)
        shared_bert = VanillaBert(bert).to(device)

        # 1) Create a domain classifier with BERT
        set_sizes = [len(subset[0]) for subset in subsets]
        weights = [1 - (len(subset[0]) / sum(set_sizes)) for subset in subsets]
        domain_classifier_train_dset = ConcatDataset(
            [subset[0] for subset in subsets])
        domain_classifier_train_dl = DataLoader(
            domain_classifier_train_dset,
            batch_size=batch_size,
            shuffle=True,
            collate_fn=collate_batch_transformer)
        domain_classifier_val_dset = ConcatDataset(
            [subset[1] for subset in subsets])
        domain_classifier_val_evaluator = DomainClassifierEvaluator(
            domain_classifier_val_dset, device)