Пример #1
0
 def fit(self, trn_data, dev_data, save_dir,
         n_embed=100,
         pretrained_embed=None,
         embed_dropout=.33,
         n_lstm_hidden=400,
         n_lstm_layers=3,
         lstm_dropout=.33,
         n_mlp_arc=500,
         n_mlp_rel=100,
         mlp_dropout=.33,
         optimizer='adam',
         lr=2e-3,
         mu=.9,
         nu=.9,
         epsilon=1e-12,
         clip=5.0,
         decay=.75,
         decay_steps=5000,
         patience=100,
         arc_loss='sparse_categorical_crossentropy',
         rel_loss='sparse_categorical_crossentropy',
         metrics=('UAS', 'LAS'),
         n_buckets=32,
         batch_size=5000,
         epochs=50000,
         early_stopping_patience=100,
         tree=False,
         punct=False,
         min_freq=2,
         run_eagerly=False, logger=None, verbose=True,
         **kwargs):
     return super().fit(**merge_locals_kwargs(locals(), kwargs))
Пример #2
0
Файл: tok.py Проект: zuoqy/HanLP
 def fit(self, trn_data: Any, dev_data: Any, save_dir: str, word_embed: Union[str, int, dict] = 200,
         ngram_embed: Union[str, int, dict] = 50, embedding_trainable=True, window_size=4, kernel_size=3,
         filters=(200, 200, 200, 200, 200), dropout_embed=0.2, dropout_hidden=0.2, weight_norm=True,
         loss: Union[tf.keras.losses.Loss, str] = None,
         optimizer: Union[str, tf.keras.optimizers.Optimizer] = 'adam', metrics='f1', batch_size=100,
         epochs=100, logger=None, verbose=True, **kwargs):
     return super().fit(**merge_locals_kwargs(locals(), kwargs))
Пример #3
0
 def fit(self,
         trn_data: Any,
         dev_data: Any,
         save_dir: str,
         word_embed: Union[str, int, dict] = 200,
         ngram_embed: Union[str, int, dict] = 50,
         embedding_trainable=True,
         window_size=4,
         kernel_size=3,
         filters=(200, 200, 200, 200, 200),
         dropout_embed=0.2,
         dropout_hidden=0.2,
         weight_norm=True,
         loss: Union[tf.keras.losses.Loss, str] = None,
         optimizer: Union[str, tf.keras.optimizers.Optimizer] = 'adam',
         metrics='accuracy',
         batch_size=100,
         epochs=100,
         logger=None,
         verbose=True,
         **kwargs):
     assert kwargs.get('run_eagerly',
                       True), 'NgramConvTaggingModel can only run eagerly'
     kwargs['run_eagerly'] = True
     return super().fit(**merge_locals_kwargs(locals(), kwargs))
Пример #4
0
 def __init__(self, config: SerializableDict = None, map_x=True, map_y=True, lower=True, n_buckets=32,
              n_tokens_per_batch=5000, min_freq=2,
              **kwargs) -> None:
     super().__init__(**merge_locals_kwargs(locals(), kwargs))
     self.form_vocab: Vocab = None
     self.cpos_vocab: Vocab = None
     self.rel_vocab: Vocab = None
     self.puncts: tf.Tensor = None
Пример #5
0
 def __init__(self,
              config: SerializableDict = None,
              map_x=True,
              map_y=True,
              use_char=False,
              **kwargs) -> None:
     super().__init__(**merge_locals_kwargs(locals(), kwargs))
     self.word_vocab: Optional[Vocab] = None
     self.tag_vocab: Optional[Vocab] = None
     self.char_vocab: Optional[Vocab] = None
Пример #6
0
 def fit(self,
         trn_data: Any,
         dev_data: Any,
         save_dir: str,
         transformer: str,
         max_length: int = 128,
         optimizer='adamw',
         warmup_steps_ratio=0.1,
         use_amp=False,
         batch_size=32,
         epochs=3,
         logger=None,
         verbose=1,
         **kwargs):
     return super().fit(**merge_locals_kwargs(locals(), kwargs))
Пример #7
0
 def fit(self, trn_data, dev_data, save_dir,
         transformer,
         optimizer='adamw',
         learning_rate=5e-5,
         weight_decay_rate=0,
         epsilon=1e-8,
         clipnorm=1.0,
         warmup_steps_ratio=0,
         use_amp=False,
         max_seq_length=128,
         batch_size=32,
         epochs=3,
         metrics='accuracy',
         run_eagerly=False,
         logger=None,
         verbose=True,
         **kwargs):
     return super().fit(**merge_locals_kwargs(locals(), kwargs))
Пример #8
0
 def fit(self,
         trn_data: str,
         dev_data: str = None,
         save_dir: str = None,
         embeddings=100,
         embedding_trainable=False,
         rnn_input_dropout=0.2,
         rnn_units=100,
         rnn_output_dropout=0.2,
         epochs=20,
         lower=False,
         logger=None,
         loss: Union[tf.keras.losses.Loss, str] = None,
         optimizer: Union[str, tf.keras.optimizers.Optimizer] = 'adam',
         metrics='f1',
         batch_size=32,
         dev_batch_size=32,
         lr_decay_per_epoch=None,
         verbose=True,
         **kwargs):
     return super().fit(**merge_locals_kwargs(locals(), kwargs))