Пример #1
0
)  # the data is presented as a vector of inputs with many exchangeable examples of this vector

is_train = T.iscalar(
    'is_train')  # pseudo boolean for switching between training and prediction

rng = numpy.random.RandomState(1234)

# Architecture: input --> LSTM --> predict one-ahead

lstm_1 = LSTM(rng,
              x,
              n_in=data_set_x.get_value(borrow=True).shape[1],
              n_out=n_hidden)

output = LogisticRegression(input=lstm_1.output,
                            n_in=n_hidden,
                            n_out=data_set_x.get_value(borrow=True).shape[1])

################################
# Objective function and GD
################################

print 'defining cost, parameters, and learning function...'

# the cost we minimize during training is the negative log likelihood of
# the model
cost = T.mean(output.cross_entropy_binary(y))

#Defining params
params = lstm_1.params + output.params
ahead = T.matrix('ahead')	
sent = T.matrix('sentence')
phonemes = T.imatrix('phonemes')

rng = numpy.random.RandomState(1234)

init_reg = LinearRegression(x, 60, 30,True)

lstm_1 = LSTM(rng,init_reg.E_y_given_x,30,lstm_1_hidden)

lstm_2 = LSTM(rng,lstm_1.output,lstm_1_hidden,lstm_2_hidden)

reg_input = lstm_2.output

#need log_reg and cross covariate layers
log_reg = LogisticRegression(reg_input,lstm_2_hidden, 41)

#lin_reg = LinearRegression(reg_input,lstm_2_hidden,1,True)

log_reg.reconstruct(log_reg.p_y_given_x)
#lin_reg.reconstruct(lin_reg.E_y_given_x)

#reconstructed_regressions = T.concatenate([log_reg.reconstructed_x,lin_reg.reconstructed_x],axis=1)
#
#reverse_layer = LinearRegression(reconstructed_regressions, 2*lstm_2_hidden, lstm_2_hidden,False)

lstm_3 = LSTM(rng,log_reg.reconstructed_x,lstm_2_hidden,lstm_1_hidden)

lstm_4 = LSTM(rng,lstm_3.output,lstm_1_hidden,30)

init_reg.reconstruct(lstm_4.output)
Пример #3
0
ahead = T.matrix('ahead')
sent = T.matrix('sentence')
phonemes = T.imatrix('phonemes')

rng = numpy.random.RandomState(1234)

init_reg = LinearRegression(x, 1, 30, True)

lstm_1 = LSTM(rng, init_reg.E_y_given_x, 30, lstm_1_hidden)

lstm_2 = LSTM(rng, lstm_1.output, lstm_1_hidden, lstm_2_hidden)

reg_input = lstm_2.output

#need log_reg and cross covariate layers
log_reg = LogisticRegression(reg_input, lstm_2_hidden, 41)

#lin_reg = LinearRegression(reg_input,lstm_2_hidden,1,True)

log_reg.reconstruct(log_reg.p_y_given_x)
#lin_reg.reconstruct(lin_reg.E_y_given_x)

#reconstructed_regressions = T.concatenate([log_reg.reconstructed_x,lin_reg.reconstructed_x],axis=1)
#
#reverse_layer = LinearRegression(reconstructed_regressions, 2*lstm_2_hidden, lstm_2_hidden,False)

lstm_3 = LSTM(rng, log_reg.reconstructed_x, lstm_2_hidden, lstm_1_hidden)

lstm_4 = LSTM(rng, lstm_3.output, lstm_1_hidden, 30)

init_reg.reconstruct(lstm_4.output)
Пример #4
0
    poolsize=(1, 1),
    dim2 = 1
)

layer3 = LeNetConvPoolLayer(
    rng,
    input=layer2.output,
    image_shape=(minibatch_size, layer2_filters, 15, 15),
    filter_shape=( layer3_filters, layer2_filters, 2, 2),
    poolsize=(1, 1),
    dim2 = 1
)

reg_input = layer3.output.flatten(2)

log_reg = LogisticRegression(reg_input,15*15*layer3_filters, 41)

lin_reg = LinearRegressionRandom(reg_input,15*15*layer3_filters,2,True)

log_input = log_reg.p_y_given_x
lin_input = lin_reg.E_y_given_x

log_reg.reconstruct(log_input)
lin_reg.reconstruct(lin_input)

reconstructed_regressions = T.concatenate([log_reg.reconstructed_x,lin_reg.reconstructed_x],axis=1)

reverse_layer = LinearRegression(reconstructed_regressions, 2*15*15*layer3_filters, 15*15*layer3_filters,False)

reconstruct = reverse_layer.E_y_given_x.reshape((minibatch_size,layer3_filters,15,15))
Пример #5
0
                                          2),
                            poolsize=(1, 1),
                            dim2=1)

layer3 = LeNetConvPoolLayer(rng,
                            input=layer2.output,
                            image_shape=(minibatch_size, layer2_filters, 15,
                                         15),
                            filter_shape=(layer3_filters, layer2_filters, 2,
                                          2),
                            poolsize=(1, 1),
                            dim2=1)

reg_input = layer3.output.flatten(2)

log_reg = LogisticRegression(reg_input, 15 * 15 * layer3_filters, 41)

lin_reg = LinearRegressionRandom(reg_input, 15 * 15 * layer3_filters, 2, True)

log_input = log_reg.p_y_given_x
lin_input = lin_reg.E_y_given_x

log_reg.reconstruct(log_input)
lin_reg.reconstruct(lin_input)

reconstructed_regressions = T.concatenate(
    [log_reg.reconstructed_x, lin_reg.reconstructed_x], axis=1)

reverse_layer = LinearRegression(reconstructed_regressions,
                                 2 * 15 * 15 * layer3_filters,
                                 15 * 15 * layer3_filters, False)
Пример #6
0
# allocate symbolic variables for the data
index = T.lscalar()  # index to a [mini]batch
x = T.matrix('x')  # the data is presented as a vector of inputs with many exchangeable examples of this vector
x = clip_gradient(x,1.0)     
y = T.matrix('y')  # the data is presented as a vector of inputs with many exchangeable examples of this vector

is_train = T.iscalar('is_train') # pseudo boolean for switching between training and prediction

rng = numpy.random.RandomState(1234)

# Architecture: input --> LSTM --> predict one-ahead

lstm_1 = LSTM(rng, x, n_in=data_set_x.get_value(borrow=True).shape[1], n_out=n_hidden)

output = LogisticRegression(input=lstm_1.output, n_in=n_hidden, n_out=data_set_x.get_value(borrow=True).shape[1])


################################
# Objective function and GD
################################

print 'defining cost, parameters, and learning function...'

# the cost we minimize during training is the negative log likelihood of
# the model 
cost = T.mean(output.cross_entropy_binary(y))

#Defining params
params = lstm_1.params + output.params