Exemplo n.º 1
0
 def finetune(self, Xs, Y=None, batch_size=None):
     """
     :param \*Xs: lists of text inputs, shape [batch, n_fields]
     :param Y: floating point targets
     :param batch_size: integer number of examples per batch. When N_GPUS > 1, this number
                        corresponds to the number of training examples provided to each GPU.
     """
     return BaseModel.finetune(self, Xs, Y=Y, batch_size=batch_size)
Exemplo n.º 2
0
    def predict(self, Xs, context=None, **kwargs):
        """
        Produces list of most likely class labels as determined by the fine-tuned model.

        :param \*Xs: lists of text inputs, shape [batch, n_fields]
        :returns: list of class labels.
        """
        return BaseModel.predict(self, Xs, context=context, **kwargs)
Exemplo n.º 3
0
    def predict_proba(self, Xs, context=None, **kwargs):
        """
        Produces probability distribution over classes for each example in X.

        :param \*Xs: lists of text inputs, shape [batch, n_fields]
        :returns: list of dictionaries.  Each dictionary maps from X2 class label to its assigned class probability.
        """
        return BaseModel.predict_proba(self, Xs, context=context, **kwargs)
Exemplo n.º 4
0
    def featurize(self, Xs, **kwargs):
        """
        Embeds inputs in learned feature space. Can be called before or after calling :meth:`finetune`.

        :param \*Xs: lists of text inputs, shape [batch, n_fields]
        :returns: np.array of features of shape (n_examples, embedding_size).
        """
        return BaseModel.featurize(self, Xs, **kwargs)
Exemplo n.º 5
0
 def finetune(self, Xs, Y=None, batch_size=None):
     """
     :param \*Xs: lists of text inputs, shape [batch, n_fields]
     :param Y: integer or string-valued class labels. It is necessary for the items of Y to be sortable.
     :param batch_size: integer number of examples per batch. When N_GPUS > 1, this number
                        corresponds to the number of training examples provided to each GPU.
     """
     return BaseModel.finetune(self, Xs, Y=Y, batch_size=batch_size)
Exemplo n.º 6
0
    def featurize(self, pairs):
        """
        Embeds inputs in learned feature space. Can be called before or after calling :meth:`finetune`.

        :param pairs: Array of text, shape [batch, 2]
        :returns: np.array of features of shape (n_examples, embedding_size).
        """
        return BaseModel.featurize(self, pairs)
Exemplo n.º 7
0
    def featurize(self, questions, answers):
        """
        Embeds inputs in learned feature space. Can be called before or after calling :meth:`finetune`.

        :param questions: List or array of text, shape [batch]
        :param answers: List or array of text, shape [n_answers, batch]
        :returns: np.array of features of shape (n_examples, embedding_size).
        """
        return BaseModel.featurize(self, zip(questions, answers))
Exemplo n.º 8
0
    def predict(self, pairs):
        """
        Produces a list of most likely class labels as determined by the fine-tuned model.


        :param pairs: Array of text, shape [batch, 2]
        :returns: list of class labels.
        """
        return BaseModel.predict(self, pairs)
Exemplo n.º 9
0
    def predict_proba(self, pairs):
        """
        Produces a probability distribution over classes for each example in X.


        :param pairs: Array of text, shape [batch, 2]
        :returns: list of dictionaries.  Each dictionary maps from a class label to its assigned class probability.
        """
        return BaseModel.predict_proba(self, pairs)
Exemplo n.º 10
0
    def predict(self, Xs, max_length=None):
        """
        Produces list of most likely class labels as determined by the fine-tuned model.

        :param \*Xs: lists of text inputs, shape [batch, n_fields]
        :param max_length: the number of tokens to be included in the document representation.
                           Providing more than `max_length` tokens as input will result in truncation.
        :returns: list of class labels.
        """
        return BaseModel.predict(self, Xs, max_length=max_length)
Exemplo n.º 11
0
    def featurize(self, pairs, max_length=None):
        """
        Embeds inputs in learned feature space. Can be called before or after calling :meth:`finetune`.

        :param pairs: Array of text, shape [batch, 2]
        :param max_length: the number of byte-pair encoded tokens to be included in the document representation.
                           Providing more than `max_length` tokens as input will result in truncation.
        :returns: np.array of features of shape (n_examples, embedding_size).
        """
        return BaseModel.featurize(self, pairs, max_length=max_length)
Exemplo n.º 12
0
    def predict_proba(self, Xs, max_length=None):
        """
        Produces probability distribution over classes for each example in X.

        :param \*Xs: lists of text inputs, shape [batch, n_fields]
        :param max_length: the number of tokens to be included in the document representation.
                           Providing more than `max_length` tokens as input will result in truncation.
        :returns: list of dictionaries.  Each dictionary maps from X2 class label to its assigned class probability.
        """
        return BaseModel.predict_proba(self, Xs, max_length=max_length)
Exemplo n.º 13
0
    def predict(self, pairs, max_length=None):
        """
        Produces a list of most likely class labels as determined by the fine-tuned model.


        :param pairs: Array of text, shape [batch, 2]
        :param max_length: the number of byte-pair encoded tokens to be included in the document representation.
                           Providing more than `max_length` tokens as input will result in truncation.
        :returns: list of class labels.
        """
        return BaseModel.predict(self, pairs, max_length=max_length)
Exemplo n.º 14
0
    def predict_proba(self, pairs, max_length=None):
        """
        Produces a probability distribution over classes for each example in X.


        :param pairs: Array of text, shape [batch, 2]
        :param max_length: the number of byte-pair encoded tokens to be included in the document representation.
                           Providing more than `max_length` tokens as input will result in truncation.
        :returns: list of dictionaries.  Each dictionary maps from a class label to its assigned class probability.
        """
        return BaseModel.predict_proba(self, pairs, max_length=max_length)
Exemplo n.º 15
0
    def predict(self, questions, answers):
        """
        Produces a list of most likely class labels as determined by the fine-tuned model.


        :param question: List or array of text, shape [batch]
        :param answers: List or array of text, shape [batch, n_answers]
        :returns: list of class labels.
        """
        raw_ids = BaseModel.predict(self, list(zip(questions, answers)))
        return [ans[i] for ans, i in zip(answers, raw_ids)]
Exemplo n.º 16
0
    def predict_proba(self, X1, X2, max_length=None):
        """
        Produces a probability distribution over classes for each example in X.


        :param X1: List or array of text, shape [batch]
        :param X2: List or array of text, shape [batch]
        :param max_length: the number of byte-pair encoded tokens to be included in the document representation.
                           Providing more than `max_length` tokens as input will result in truncation.
        :returns: list of dictionaries.  Each dictionary maps from a class label to its assigned class probability.
        """
        return BaseModel.predict_proba(self, X1, X2, max_length=max_length)
Exemplo n.º 17
0
    def predict(self, questions, answers, max_length=None):
        """
        Produces a list of most likely class labels as determined by the fine-tuned model.


        :param question: List or array of text, shape [batch]
        :param answers: List or array of text, shape [batch, n_answers]
        :param max_length: the number of byte-pair encoded tokens to be included in the document representation.
                           Providing more than `max_length` tokens as input will result in truncation.
        :returns: list of class labels.
        """
        raw_ids = BaseModel.predict(self,
                                    list(zip(questions, answers)),
                                    max_length=max_length)
        return [ans[i] for ans, i in zip(zip(*answers), raw_ids)]