model.add_node(shift_layer, name="shift", input="sequence_input") model.add_node(rnn_layer, name="rnn", input="shift") model.add_node(TimeDistributedDense(input_dim), name="tdd", input="rnn") model.add_node(Activation("softmax"), name="softmax", input="tdd") model.add_output(name="output", input="softmax") model.compile(loss={"output": "categorical_crossentropy"}, optimizer="adam") for iteration in range(1, 200): print() print("-" * 50) print("Iteration", iteration) model.fit( {"initial_value": i_train, "sequence_input": X_train, "output": y_train}, batch_size=BATCH_SIZE, nb_epoch=1, validation_data={"initial_value": i_val, "sequence_input": X_val}, show_accuracy=True, ) ### # Select 10 samples from the validation set at random so we can visualize errors for i in range(10): rowi, rowX, rowy = i_val[np.array([i])], X_val[np.array([i])], y_val[np.array([i])] proba = model.predict_classes({"initial_value": rowi, "sequence_input": rowX}, verbose=0)["output"] preds = proba.argmax(axis=-1) start = convertor.decode(rowX[0]) correct = convertor.decode(rowy[0]) guess = convertor.decode(preds[0], calc_argmax=False) print("Start : ", start)
model.add_node(Dense(1024, activation='relu'), name='layer4', inputs=['n11_f', 'n22_f', 'n23_f', 'n24_f'], merge_mode='concat') model.add_node(Dense(10, activation='softmax'), name='layer5', input='layer4') model.add_output(name='output1', input='layer5') print 'Training....' model.compile(loss={'output1': 'categorical_crossentropy'}, optimizer='adam', metrics=['accuracy']) model.fit({ 'n00': x_train, 'output1': y_train }, nb_epoch=nb_epoch, batch_size=batch_size, validation_split=0.3, shuffle=True, verbose=1) #Model result: loss_and_metrics = model.evaluate(x_train, y_train, batch_size=batch_size, verbose=1) print 'Done!' print 'Loss: ', loss_and_metrics[0] print ' Acc: ', loss_and_metrics[1] #Saving Model