예제 #1
0
 def from_params(cls, params: Params) -> 'SnliReader':
     tokenizer = Tokenizer.from_params(params.pop('tokenizer', {}))
     token_indexers = {}
     token_indexer_params = params.pop('token_indexers', Params({}))
     for name, indexer_params in token_indexer_params.items():
         token_indexers[name] = TokenIndexer.from_params(indexer_params)
     # The default parameters are contained within the class,
     # so if no parameters are given we must pass None.
     if token_indexers == {}:
         token_indexers = None
     params.assert_empty(cls.__name__)
     return SnliReader(tokenizer=tokenizer, token_indexers=token_indexers)
예제 #2
0
 def from_params(cls, params: Params) -> 'SrlReader':
     """
     Parameters
     ----------
     token_indexers: ``List[Params]``, optional
     """
     token_indexers = {}
     token_indexer_params = params.pop('token_indexers', Params({}))
     for name, indexer_params in token_indexer_params.items():
         token_indexers[name] = TokenIndexer.from_params(indexer_params)
     # The default parameters are contained within the class,
     # so if no parameters are given we must pass None.
     if token_indexers == {}:
         token_indexers = None
     params.assert_empty(cls.__name__)
     return SrlReader(token_indexers=token_indexers)
예제 #3
0
 def from_params(cls, params: Params) -> 'SquadReader':
     """
     Parameters
     ----------
     tokenizer : ``Params``, optional (default=``{}``)
     token_indexers: ``Params``, optional (default=``{}``)
     """
     tokenizer = Tokenizer.from_params(params.pop('tokenizer', {}))
     token_indexers = {}
     token_indexer_params = params.pop('token_indexers', {})
     for name, indexer_params in token_indexer_params.items():
         token_indexers[name] = TokenIndexer.from_params(indexer_params)
     # The default parameters are contained within the class, so if no parameters are given we
     # must pass None.
     if token_indexers == {}:
         token_indexers = None
     params.assert_empty(cls.__name__)
     return cls(tokenizer=tokenizer, token_indexers=token_indexers)
예제 #4
0
 def from_params(cls, params: Params) -> 'LanguageModelingReader':
     """
     Parameters
     ----------
     filename : ``str``
     tokens_per_instance : ``int``, optional (default=``None``)
     tokenizer : ``Params``, optional
     token_indexers: ``List[Params]``, optional
     """
     tokens_per_instance = params.pop('tokens_per_instance', None)
     tokenizer = Tokenizer.from_params(params.pop('tokenizer', {}))
     token_indexers = {}
     token_indexer_params = params.pop('token_indexers', Params({}))
     for name, indexer_params in token_indexer_params.items():
         token_indexers[name] = TokenIndexer.from_params(indexer_params)
     # The default parameters are contained within the class,
     # so if no parameters are given we must pass None.
     if token_indexers == {}:
         token_indexers = None
     params.assert_empty(cls.__name__)
     return LanguageModelingReader(tokens_per_instance=tokens_per_instance,
                                   tokenizer=tokenizer,
                                   token_indexers=token_indexers)
예제 #5
0
    def from_params(cls, params: Params) -> 'SequenceTaggingDatasetReader':
        """
        Parameters
        ----------
        token_indexers: ``Dict[Params]``, optional
        """
        token_indexers = {}
        token_indexer_params = params.pop('token_indexers', Params({}))
        for name, indexer_params in token_indexer_params.items():
            token_indexers[name] = TokenIndexer.from_params(indexer_params)
        # The default parameters are contained within the class,
        # so if no parameters are given we must pass None.
        if token_indexers == {}:
            token_indexers = None

        word_tag_delimiter = params.pop("word_tag_delimiter",
                                        DEFAULT_WORD_TAG_DELIMITER)
        token_delimiter = params.pop("token_delimiter", None)

        params.assert_empty(cls.__name__)
        return SequenceTaggingDatasetReader(
            token_indexers=token_indexers,
            word_tag_delimiter=word_tag_delimiter,
            token_delimiter=token_delimiter)
예제 #6
0
 def from_params(cls, params: Params) -> 'SquadSentenceSelectionReader':
     """
     Parameters
     ----------
     negative_sentence_selection : ``str``, optional (default="paragraph")
     tokenizer : ``Params``, optional
     token_indexers: ``List[Params]``, optional
     """
     negative_sentence_selection = params.pop('negative_sentence_selection',
                                              'paragraph')
     tokenizer = Tokenizer.from_params(params.pop('tokenizer', {}))
     token_indexers = {}
     token_indexer_params = params.pop('token_indexers', Params({}))
     for name, indexer_params in token_indexer_params.items():
         token_indexers[name] = TokenIndexer.from_params(indexer_params)
     # The default parameters are contained within the class,
     # so if no parameters are given we must pass None.
     if token_indexers == {}:
         token_indexers = None
     params.assert_empty(cls.__name__)
     return SquadSentenceSelectionReader(
         negative_sentence_selection=negative_sentence_selection,
         tokenizer=tokenizer,
         token_indexers=token_indexers)