Exemplo n.º 1
0
Arquivo: idnet.py Projeto: surban/ml
def generate_new_data():
    global trn_inputs, trn_targets
    #print "Generating new data..."
    trn_inputs, trn_targets = \
        generate_id_data(cfg.x_len, cfg.n_batch)
Exemplo n.º 2
0
Arquivo: idnet.py Projeto: surban/ml
f_output = function(inputs=[ps.flat, x], outputs=out_re)
f_loss = function(inputs=[ps.flat, x, t], outputs=loss)
f_pure_loss = function(inputs=[ps.flat, x, t], outputs=pure_loss)
f_dloss = function(inputs=[ps.flat, x, t], outputs=T.grad(loss, ps.flat))

# separate gradients wrt layer weights
if show_gradient:
    f_grads = {}
    for wname, wvar in ps.vars.iteritems():
        f_grads[wname] = function(inputs=[ps.flat, x, t], outputs=T.grad(loss, wvar))

if do_weight_plots:
    plt.figure()

print "Generating validation data..."
val_inputs, val_targets = generate_id_data(cfg.x_len, cfg.n_val_samples)
tst_inputs, tst_targets = generate_id_data(cfg.x_len, cfg.n_val_samples)
print "Done."
                                    
# optimizer
def generate_new_data():
    global trn_inputs, trn_targets
    #print "Generating new data..."
    trn_inputs, trn_targets = \
        generate_id_data(cfg.x_len, cfg.n_batch)

def f_trn_loss(p):
    global trn_inputs, trn_targets
    if check_nans:
        assert np.all(np.isfinite(gather(p))), "NaN in p given to f_trn_loss"
    return f_loss(p, trn_inputs, trn_targets)