def summ(step, wtr = tf.summary.FileWriter(pform(P.log, C.trial)) , summary = tf.summary.merge( ( tf.summary.scalar('step_errt', model.errt) , tf.summary.scalar('step_loss', model.loss)))): errt, loss = map(comp(np.mean, np.concatenate), zip(*chain(*( batch_run(sess, m, (m.errt_samp, m.loss_samp), s, t, batch= C.batch_valid) for m, (s, t) in zip(valid, data_valid))))) wtr.add_summary(sess.run(summary, {model.errt: errt, model.loss: loss}), step) wtr.flush()
def trans(sents, model, vocab): for preds in batch_run(sess, model, model.pred, sents, batch=C.batch_infer): yield from decode(vocab, preds)