def _generator(): while True: yield gen_batch_inputs(generate_input_by_batch(X), token_dict, token_list, seq_len=512, mask_rate=0.3)
def _generator(): while True: yield gen_batch_inputs(sentence_tuples, token_dict, token_list, mask_rate=0.15, seq_len=seq_len, swap_sentence_rate=1.0, batch_size=16)
def _generator(batch_size=4): while True: idx = np.random.permutation(X.shape[0]) for i in range(0, idx.shape[0], batch_size): yield gen_batch_inputs(X[i:i + batch_size], token_dict, token_list, seq_len=512, mask_rate=0.3)
def _generator(): while True: yield gen_batch_inputs( sentence_pairs, token_dict, token_list, seq_len=20, mask_rate=0.3, swap_sentence_rate=1.0, )
def _generator(): while True: for pair in sentence_pairs: yield gen_batch_inputs( [pair], token_dict, token_list, seq_len=512, mask_rate=0.3, swap_sentence_rate=0, )
def my_generator(samples, batch_size): while True: start_index = 0 while (start_index + batch_size) < len(samples): if False: print( u'DEBUG\nstart_index={}\nphrase1 len={} words={}\nphrase2 len={} words={}\n' .format(start_index, len(samples[start_index][0]), u' '.join(samples[start_index][0]), len(samples[start_index][1]), u' '.join(samples[start_index][1]))) yield gen_batch_inputs(samples[start_index:start_index + batch_size], token_dict, token_list, seq_len=max_seq_len, mask_rate=0.3, swap_sentence_rate=1.0) start_index += batch_size
from keras_bert import gen_batch_inputs gen_batch_inputs()