look_back = 3
num_features = 

nb_samples = X_train.shape[0] - look_back

x_train_reshaped = np.zeros((nb_samples, look_back, num_features))
y_train_reshaped = np.zeros((nb_samples))

for i in range(nb_samples):
    y_position = i + look_back
    x_train_reshaped[i] = X_train[i:y_position]
    y_train_reshaped[i] = y_train[y_position]

model = tf.keras.Sequential()
model.add(tf.keras.layers.Dense(units, activation='relu'))
# model.add(tf.keras.layers.Embedding(len(test_encoding), 64))
model.add(tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(64)))
# model.add(tf.keras.layers.Bidirectional(tf.keras.layers.LSTM(64)))
# One or more dense layers.
# Edit the list in the `for` line to experiment with layer sizes.
for units in [64, 64]:
  model.add(tf.keras.layers.Dense(units, activation='relu'))

# Output layer. The first argument is the number of labels.
model.add(tf.keras.layers.Dense(1, activation='sigmoid'))

model.compile(optimizer='adam',
              loss='binary_crossentropy',
              metrics=['accuracy', tf.keras.metrics.TruePositives(name='tp'),
      tf.keras.metrics.FalsePositives(name='fp'),