コード例 #1
0
    def test_from_pretrained_with_tuple_values(self):
        # For the auto model mapping, FunnelConfig has two models: FunnelModel and FunnelBaseModel
        model = TFAutoModel.from_pretrained("sgugger/funnel-random-tiny")
        self.assertIsInstance(model, TFFunnelModel)

        config = copy.deepcopy(model.config)
        config.architectures = ["FunnelBaseModel"]
        model = TFAutoModel.from_config(config)
        self.assertIsInstance(model, TFFunnelBaseModel)

        with tempfile.TemporaryDirectory() as tmp_dir:
            model.save_pretrained(tmp_dir)
            model = TFAutoModel.from_pretrained(tmp_dir)
            self.assertIsInstance(model, TFFunnelBaseModel)
コード例 #2
0
 def __init__(self, tokenizer):
     super().__init__()
     self.tokenizer = tokenizer
     config = AutoConfig.from_pretrained(TINY_MODEL_CHECKPOINT)
     self.bert = TFAutoModel.from_config(config)
コード例 #3
0
    def instantiate_layer(
        self,
        load_pretrained_weights=True
    ) -> Union[object, TFAutoModel, TFBertModel]:
        """
        Instanciate a transformer to be loaded in a Keras layer using the availability method of the pre-trained transformer.
        """
        if self.loading_method == LOADING_METHOD_HUGGINGFACE_NAME:
            if load_pretrained_weights:
                transformer_model = TFAutoModel.from_pretrained(self.name,
                                                                from_pt=True)
                self.transformer_config = transformer_model.config
                return transformer_model
            else:
                config_path = os.path.join(".", self.local_dir_path,
                                           TRANSFORMER_CONFIG_FILE_NAME)
                self.transformer_config = AutoConfig.from_pretrained(
                    config_path)
                return TFAutoModel.from_config(self.transformer_config)

        elif self.loading_method == LOADING_METHOD_LOCAL_MODEL_DIR:
            if load_pretrained_weights:
                transformer_model = TFAutoModel.from_pretrained(
                    self.local_dir_path, from_pt=True)
                self.transformer_config = transformer_model.config
                return transformer_model
            else:
                config_path = os.path.join(".", self.local_dir_path,
                                           TRANSFORMER_CONFIG_FILE_NAME)
                self.transformer_config = AutoConfig.from_pretrained(
                    config_path)
                #self.transformer_config = AutoConfig.from_pretrained(self.local_dir_path)
                return TFAutoModel.from_config(self.transformer_config)

        elif self.loading_method == LOADING_METHOD_PLAIN_MODEL:
            if load_pretrained_weights:
                self.transformer_config = AutoConfig.from_pretrained(
                    self.local_config_file)
                # transformer_model = TFBertModel.from_pretrained(self.local_weight_file, from_tf=True)
                raise NotImplementedError(
                    "The load of TF weights from huggingface automodel classes is not yet implemented. \
                    Please use load from Hugging Face Hub or from directory for the initial loading of the transformers weights."
                )
            else:
                config_path = os.path.join(".", self.local_dir_path,
                                           TRANSFORMER_CONFIG_FILE_NAME)
                self.transformer_config = AutoConfig.from_pretrained(
                    config_path)
                return TFBertModel.from_config(self.transformer_config)

        else:
            # TODO: revise this
            if load_pretrained_weights:
                transformer_model = TFAutoModel.from_pretrained(
                    self.local_dir_path, from_pt=True)
                self.transformer_config = transformer_model.config
                return transformer_model
            else:
                config_path = os.path.join(".", self.local_dir_path,
                                           TRANSFORMER_CONFIG_FILE_NAME)
                self.transformer_config = AutoConfig.from_pretrained(
                    config_path)
                return TFAutoModel.from_config(self.transformer_config)