def createnet(): return createmodel( rnn_layer_layers=RNN_HIDDEN_LAYERS, isbrnn=RNN_IS_BIDIRECTIONAL, batch_size=BATCH_SIZE, n_features=N_FEATURES, n_classes=N_CLASSES, layer_type_rnn=RNN_LAYER_TYPE, padded_seq_len=MAX_SEQ_LENGTH, input_layers=INPUT_LAYERS, input_layer_dropout=INPUT_DROPOUT_RATE, output_layers=OUTPUT_LAYERS, final_output_layer=OUTPUT, learn_init=LEARN_INIT, dropout_rnn=RNN_DROPOUT_RATE, output_layer_dropout=OUTPUT_DROPOUT_RATE, unittype_rnn=UNITTYPE_RNN, relucap=RELU_CAP, reluleakyness=RELU_LEAKYNESS)
l_in, num_units=102, peepholes=peepholes, learn_init=True) recout = lasagne.layers.BidirectionalLSTMLayer( recout, num_units=156, peepholes=peepholes, learn_init=True) l_reshape = lasagne.layers.ReshapeLayer( recout, (BATCH_SIZE*MAX_SEQ_LENGTH, recout.get_output_shape()[-1])) l_rec_out = lasagne.layers.DenseLayer( l_reshape, num_units=N_CLASSES, nonlinearity=lasagne.nonlinearities.softmax) l_out = lasagne.layers.ReshapeLayer( l_rec_out, (BATCH_SIZE, MAX_SEQ_LENGTH, N_CLASSES)) else: l_out, _, l_in = createmodel(rnn_layer_layers=[3,4,5], isbrnn=True, batch_size=BATCH_SIZE, n_features=N_FEATURES, n_classes=N_CLASSES, layer_type_rnn="LSTMFAST", padded_seq_len=MAX_SEQ_LENGTH, output_layers=[7], input_layers=None, learn_init=True, final_output_layer="softmax") # createnet in LSTMTrainingFunctions can setup networks with a number of # different architectures. # Cross entropy cost function. # Note that we use the mask to ignore masked sequences during cost calculation def costfun(p_y_given_x, y, mask,db='COST:'): shape = (BATCH_SIZE*MAX_SEQ_LENGTH, N_CLASSES) y_reshape = y.flatten()