n_chars = len(alphabet) n_in_seq_length = n_numbers * ceil(log10(largest + 1)) + n_numbers - 1 n_out_seq_length = ceil(log10(n_numbers * (largest + 1))) n_batch = 100 n_epoch = 500 model = Sequential([ LSTM(100, input_shape=(n_in_seq_length, n_chars)), RepeatVector(n_out_seq_length), LSTM(50, return_sequences=True), TimeDistributed(Dense(n_chars, activation='softmax')) ]) model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) for i in range(n_epoch): x, y = generate_data(n_samples, largest, alphabet) model.fit(x, y, epochs=1, batch_size=n_batch) model.save('training/keras_classifier.h5') # evaluate on some new patterns x, y = generate_data(n_samples, largest, alphabet) result = model.predict(x, batch_size=n_batch, verbose=0) # calculate error expected = [invert(x, alphabet) for x in y] predicted = [invert(x, alphabet) for x in result] # show some examples for i in range(20): print('Expected=%s, Predicted=%s' % (expected[i], predicted[i]))
n_out_seq_length = ceil(log10(numbers * (largest + 1))) model = Sequential([ LSTM(100, input_shape=(converter.max_number_length, num_chars)), RepeatVector(n_out_seq_length), LSTM(50, return_sequences=True), TimeDistributed(Dense(1, activation='softmax')) ]) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy']) for i in range(epoch): x, y = generator.build_sample(samples, largest) print(converter.max_number_length, num_chars) print(len(x[0]), len(x[0][0])) model.fit(x, y, epochs=1, batch_size=batch) model.save('training/classifier.h5') # evaluate on some new patterns x, y = generator.build_sample(samples, largest) result = model.predict(x, batch_size=batch, verbose=0) # calculate error expected = [converter.invert(x) for x in y] predicted = [converter.invert(x) for x in result] # show some examples for i in range(20): print('Expected=%s, Predicted=%s' % (expected[i], predicted[i]))