def from_params(cls, params: Params) -> "Elmo": # Add files to archive params.add_file_to_archive("options_file") params.add_file_to_archive("weight_file") options_file = params.pop("options_file") weight_file = params.pop("weight_file") requires_grad = params.pop("requires_grad", False) num_output_representations = params.pop("num_output_representations") do_layer_norm = params.pop_bool("do_layer_norm", False) keep_sentence_boundaries = params.pop_bool("keep_sentence_boundaries", False) dropout = params.pop_float("dropout", 0.5) scalar_mix_parameters = params.pop("scalar_mix_parameters", None) params.assert_empty(cls.__name__) return cls( options_file=options_file, weight_file=weight_file, num_output_representations=num_output_representations, requires_grad=requires_grad, do_layer_norm=do_layer_norm, keep_sentence_boundaries=keep_sentence_boundaries, dropout=dropout, scalar_mix_parameters=scalar_mix_parameters, )
def from_params(cls, vocab: Vocabulary, params: Params) -> 'ElmoTokenEmbedder': # type: ignore # pylint: disable=arguments-differ params.add_file_to_archive('options_file') params.add_file_to_archive('weight_file') options_file = params.pop('options_file') weight_file = params.pop('weight_file') requires_grad = params.pop('requires_grad', False) do_layer_norm = params.pop_bool('do_layer_norm', False) dropout = params.pop_float("dropout", 0.5) namespace_to_cache = params.pop("namespace_to_cache", None) if namespace_to_cache is not None: vocab_to_cache = list( vocab.get_token_to_index_vocabulary(namespace_to_cache).keys()) else: vocab_to_cache = None projection_dim = params.pop_int("projection_dim", None) params.assert_empty(cls.__name__) return cls(options_file=options_file, weight_file=weight_file, do_layer_norm=do_layer_norm, dropout=dropout, requires_grad=requires_grad, projection_dim=projection_dim, vocab_to_cache=vocab_to_cache)
def from_params(cls, vocab: Vocabulary, params: Params) -> 'ElmoTokenEmbedder': # type: ignore # pylint: disable=arguments-differ params.add_file_to_archive('options_file') params.add_file_to_archive('weight_file') options_file = params.pop('options_file') weight_file = params.pop('weight_file') requires_grad = params.pop('requires_grad', False) do_layer_norm = params.pop_bool('do_layer_norm', False) dropout = params.pop_float("dropout", 0.5) namespace_to_cache = params.pop("namespace_to_cache", None) if namespace_to_cache is not None: vocab_to_cache = list(vocab.get_token_to_index_vocabulary(namespace_to_cache).keys()) else: vocab_to_cache = None projection_dim = params.pop_int("projection_dim", None) scalar_mix_parameters = params.pop('scalar_mix_parameters', None) params.assert_empty(cls.__name__) return cls(options_file=options_file, weight_file=weight_file, do_layer_norm=do_layer_norm, dropout=dropout, requires_grad=requires_grad, projection_dim=projection_dim, vocab_to_cache=vocab_to_cache, scalar_mix_parameters=scalar_mix_parameters)
def from_params( # type: ignore cls, vocab: Vocabulary, params: Params, **extras) -> "ElmoTokenEmbedder": params.add_file_to_archive("options_file") params.add_file_to_archive("weight_file") options_file = params.pop("options_file") weight_file = params.pop("weight_file") requires_grad = params.pop("requires_grad", False) do_layer_norm = params.pop_bool("do_layer_norm", False) dropout = params.pop_float("dropout", 0.5) namespace_to_cache = params.pop("namespace_to_cache", None) if namespace_to_cache is not None: vocab_to_cache = list( vocab.get_token_to_index_vocabulary(namespace_to_cache).keys()) else: vocab_to_cache = None projection_dim = params.pop_int("projection_dim", None) scalar_mix_parameters = params.pop("scalar_mix_parameters", None) params.assert_empty(cls.__name__) return cls( options_file=options_file, weight_file=weight_file, do_layer_norm=do_layer_norm, dropout=dropout, requires_grad=requires_grad, projection_dim=projection_dim, vocab_to_cache=vocab_to_cache, scalar_mix_parameters=scalar_mix_parameters, )
def from_params(cls, params: Params) -> 'Elmo': # Add files to archive params.add_file_to_archive('options_file') params.add_file_to_archive('weight_file') options_file = params.pop('options_file') weight_file = params.pop('weight_file') requires_grad = params.pop('requires_grad', False) char_map_file = params.pop('char_map_file', None) num_output_representations = params.pop('num_output_representations') do_layer_norm = params.pop_bool('do_layer_norm', False) keep_sentence_boundaries = params.pop_bool('keep_sentence_boundaries', False) dropout = params.pop_float('dropout', 0.5) scalar_mix_parameters = params.pop('scalar_mix_parameters', None) params.assert_empty(cls.__name__) return cls(options_file=options_file, weight_file=weight_file, num_output_representations=num_output_representations, char_map_file=char_map_file, requires_grad=requires_grad, do_layer_norm=do_layer_norm, keep_sentence_boundaries=keep_sentence_boundaries, dropout=dropout, scalar_mix_parameters=scalar_mix_parameters)
def from_params(cls, params: Params) -> 'B': params.add_file_to_archive("filename") filename = params.pop("filename") c_params = params.pop("c") c = C.from_params(c_params) return cls(filename, c)
def from_params(cls, params: Params) -> 'B': params.add_file_to_archive("filename") filename = params.pop("filename") c_params = params.pop("c") c = C.from_params(c_params) return cls(filename, c)
def from_params(cls, vocab: Vocabulary, params: Params) -> 'ElmoTokenEmbedder': params.add_file_to_archive('options_file') params.add_file_to_archive('weight_file') options_file = params.pop('options_file') weight_file = params.pop('weight_file') do_layer_norm = params.pop('do_layer_norm', False) dropout = params.pop("dropout", 0.5) params.assert_empty(cls.__name__) return cls(options_file, weight_file, do_layer_norm, dropout)
def from_params(cls, vocab: Vocabulary, params: Params) -> 'ElmoTokenEmbedder': params.add_file_to_archive('options_file') params.add_file_to_archive('weight_file') options_file = params.pop('options_file') weight_file = params.pop('weight_file') requires_grad = params.pop('requires_grad', False) do_layer_norm = params.pop_bool('do_layer_norm', False) dropout = params.pop_float("dropout", 0.5) params.assert_empty(cls.__name__) return cls(options_file, weight_file, do_layer_norm, dropout, requires_grad=requires_grad)
def from_params(cls, params: Params) -> 'Elmo': # Add files to archive params.add_file_to_archive('options_file') params.add_file_to_archive('weight_file') options_file = params.pop('options_file') weight_file = params.pop('weight_file') num_output_representations = params.pop('num_output_representations') do_layer_norm = params.pop('do_layer_norm', False) params.assert_empty(cls.__name__) return cls(options_file, weight_file, num_output_representations, do_layer_norm)
def from_params(cls, params: Params) -> 'Elmo': # Add files to archive params.add_file_to_archive('options_file') params.add_file_to_archive('weight_file') options_file = params.pop('options_file') weight_file = params.pop('weight_file') requires_grad = params.pop('requires_grad', False) num_output_representations = params.pop('num_output_representations') do_layer_norm = params.pop_bool('do_layer_norm', False) params.assert_empty(cls.__name__) return cls(options_file, weight_file, num_output_representations, requires_grad=requires_grad, do_layer_norm=do_layer_norm)
def from_params(cls, params: Params) -> 'PretrainedVAE': # Add files to archive params.add_file_to_archive('model_archive') model_archive = params.pop('model_archive') device = params.pop('device') background_frequency = params.pop('background_frequency') requires_grad = params.pop('requires_grad', False) dropout = params.pop_float('dropout', None) scalar_mix = params.pop('scalar_mix', None) params.assert_empty(cls.__name__) return cls(model_archive=model_archive, device=device, background_frequency=background_frequency, requires_grad=requires_grad, scalar_mix=scalar_mix, dropout=dropout)
def from_params(cls, vocab: Vocabulary, params: Params) -> 'ElmoTokenEmbedder': params.add_file_to_archive('options_file') params.add_file_to_archive('weight_file') options_file = params.pop('options_file') weight_file = params.pop('weight_file') requires_grad = params.pop('requires_grad', False) do_layer_norm = params.pop_bool('do_layer_norm', False) dropout = params.pop_float("dropout", 0.5) projection_dim = params.pop_int("projection_dim", None) params.assert_empty(cls.__name__) return cls(options_file=options_file, weight_file=weight_file, do_layer_norm=do_layer_norm, dropout=dropout, requires_grad=requires_grad, projection_dim=projection_dim)
def from_params(cls, params: Params) -> 'Elmo': # Add files to archive params.add_file_to_archive('options_file') params.add_file_to_archive('weight_file') options_file = params.pop('options_file') weight_file = params.pop('weight_file') requires_grad = params.pop('requires_grad', False) num_output_representations = params.pop('num_output_representations') do_layer_norm = params.pop_bool('do_layer_norm', False) dropout = params.pop_float('dropout', 0.5) params.assert_empty(cls.__name__) return cls(options_file=options_file, weight_file=weight_file, num_output_representations=num_output_representations, requires_grad=requires_grad, do_layer_norm=do_layer_norm, dropout=dropout)
def from_params(cls, params: Params) -> 'Elmo': # Add files to archive params.add_file_to_archive('options_file') params.add_file_to_archive('weight_file') options_file = params.pop('options_file') weight_file = params.pop('weight_file') requires_grad = params.pop('requires_grad', False) num_output_representations = params.pop('num_output_representations') do_layer_norm = params.pop_bool('do_layer_norm', False) keep_sentence_boundaries = params.pop_bool('keep_sentence_boundaries', False) dropout = params.pop_float('dropout', 0.5) scalar_mix_parameters = params.pop('scalar_mix_parameters', None) params.assert_empty(cls.__name__) return cls(options_file=options_file, weight_file=weight_file, num_output_representations=num_output_representations, requires_grad=requires_grad, do_layer_norm=do_layer_norm, keep_sentence_boundaries=keep_sentence_boundaries, dropout=dropout, scalar_mix_parameters=scalar_mix_parameters)
def from_params( cls, vocab: Vocabulary, # pylint: disable=unused-argument params: Params ) -> 'VampireTokenEmbedder': # type: ignore # pylint: disable=arguments-differ params.add_file_to_archive('model_archive') model_archive = params.pop('model_archive') device = params.pop_int('device') background_frequency = params.pop('background_frequency') requires_grad = params.pop('requires_grad', False) scalar_mix = params.pop("scalar_mix", None) dropout = params.pop_float("dropout", None) expand_dim = params.pop_float("expand_dim", False) projection_dim = params.pop_int("projection_dim", None) params.assert_empty(cls.__name__) return cls(expand_dim=expand_dim, scalar_mix=scalar_mix, background_frequency=background_frequency, device=device, model_archive=model_archive, dropout=dropout, requires_grad=requires_grad, projection_dim=projection_dim)
def from_params(cls, params: Params) -> 'C': params.add_file_to_archive("c_file") c_file = params.pop("c_file") return cls(c_file)
def from_params(cls, params: Params) -> 'C': params.add_file_to_archive("c_file") c_file = params.pop("c_file") return cls(c_file)