Example #1
0
# from util_tf import profile
# m = Transformer.new().data()
# forcing = m.forcing(trainable= False)
# autoreg = m.autoreg(trainable= False)
# feed = {m.src_: src_train[:batch_size], m.tgt_: tgt_train[:batch_size]}
# with tf.Session() as sess:
#     tf.global_variables_initializer().run()
#     with tf.summary.FileWriter(join(logdir, "graph"), sess.graph) as wtr:
#         profile(sess, wtr, forcing.loss, feed, tag= 'forcing')
#         profile(sess, wtr, autoreg.loss, feed, tag= 'autoreg')

####################
# validation model #
####################

model = Transformer.new()
model_valid = model.data(*batch((src_valid, tgt_valid), batch_size), len_cap)
forcing_valid = model_valid.forcing(trainable= False)
autoreg_valid = model_valid.autoreg(trainable= False)

idx_tgt = PointedIndex(np.load("trial/data/index_tgt.npy").item())
def trans(path, m= autoreg_valid, src= src_valid, idx= idx_tgt, len_cap= len_cap, batch_size= batch_size):
    rng = range(0, len(src) + batch_size, batch_size)
    with open(path, 'w') as f:
        for i, j in zip(rng, rng[1:]):
            for p in m.pred.eval({m.src: src[i:j], m.tgt: src[i:j,:1], m.len_tgt: len_cap}):
                print(decode(idx, p), file= f)

# from util_io import encode
# idx_src = PointedIndex(np.load("trial/data/index_src.npy").item())
# def auto(s, m= autoreg_valid, idx_src= idx_src, idx_tgt= idx_tgt, len_cap= len_cap):
Example #2
0
    names = names.astype(np.str)
    x = vpack(map(load, names), complex('(nan+nanj)'), 1, 1)
    # x = vpack(map(comp(load, path), names), complex('(nan+nanj)'), 1, 1)
    x[:, 0] = 0j
    x = c2r(x)
    _, t, d = x.shape
    assert t <= len_cap
    assert d == dim_tgt
    return x


####################
# validation model #
####################

model = Transformer.new(dim_src=len(index), dim_tgt=dim_tgt)
model_valid = model.data(texts[:split], load_batch(names[:split]), len_cap)
forcing_valid = model_valid.forcing(trainable=False)
autoreg_valid = model_valid.autoreg(trainable=False)

# # for profiling
# from util_tf import profile
# with tf.Session() as sess:
#     tf.global_variables_initializer().run()
#     with tf.summary.FileWriter(join(logdir, "graph"), sess.graph) as wtr:
#         profile(sess, wtr, forcing_valid.loss, tag= 'forcing')
#         profile(sess, wtr, autoreg_valid.loss, tag= 'autoreg')

# ' according to their categories or crimes.\n'
src, tgt = texts[978:979, :42], load_batch(names[978:979])[:, :-1]
synth_forcing = {forcing_valid.src: src, forcing_valid.tgt: tgt}