log_reg = LogisticRegression(reg_input,lstm_2_hidden, 41)

#lin_reg = LinearRegression(reg_input,lstm_2_hidden,1,True)

log_reg.reconstruct(log_reg.p_y_given_x)
#lin_reg.reconstruct(lin_reg.E_y_given_x)

#reconstructed_regressions = T.concatenate([log_reg.reconstructed_x,lin_reg.reconstructed_x],axis=1)
#
#reverse_layer = LinearRegression(reconstructed_regressions, 2*lstm_2_hidden, lstm_2_hidden,False)

lstm_3 = LSTM(rng,log_reg.reconstructed_x,lstm_2_hidden,lstm_1_hidden)

lstm_4 = LSTM(rng,lstm_3.output,lstm_1_hidden,30)

init_reg.reconstruct(lstm_4.output)

difference = (ahead-init_reg.reconstructed_x) ** 2

encoder_cost = T.mean( difference )

cross_entropy_cost = T.mean(log_reg.cross_entropy_binary(y))

#y_hat_mean = T.mean(log_reg.p_y_given_x,axis=0)
#
#z_hat_mean = T.mean(lin_reg.E_y_given_x,axis=0)
#
#z_variance = lin_reg.E_y_given_x - z_hat_mean
#z_var = z_variance.reshape((60,2,1)) #must reshape for outer product
#
#y_variance = log_reg.p_y_given_x - y_hat_mean
Ejemplo n.º 2
0
log_reg = LogisticRegression(reg_input, lstm_2_hidden, 41)

#lin_reg = LinearRegression(reg_input,lstm_2_hidden,1,True)

log_reg.reconstruct(log_reg.p_y_given_x)
#lin_reg.reconstruct(lin_reg.E_y_given_x)

#reconstructed_regressions = T.concatenate([log_reg.reconstructed_x,lin_reg.reconstructed_x],axis=1)
#
#reverse_layer = LinearRegression(reconstructed_regressions, 2*lstm_2_hidden, lstm_2_hidden,False)

lstm_3 = LSTM(rng, log_reg.reconstructed_x, lstm_2_hidden, lstm_1_hidden)

lstm_4 = LSTM(rng, lstm_3.output, lstm_1_hidden, 30)

init_reg.reconstruct(lstm_4.output)

difference = (ahead - init_reg.reconstructed_x)**2

encoder_cost = T.mean(difference)

cross_entropy_cost = T.mean(log_reg.cross_entropy_binary(y))

#y_hat_mean = T.mean(log_reg.p_y_given_x,axis=0)
#
#z_hat_mean = T.mean(lin_reg.E_y_given_x,axis=0)
#
#z_variance = lin_reg.E_y_given_x - z_hat_mean
#z_var = z_variance.reshape((60,2,1)) #must reshape for outer product
#
#y_variance = log_reg.p_y_given_x - y_hat_mean