print(pred_1) eval_1 = model_1.evaluate(x_test, y_test) print(eval_1) model_1.summary() #plotting the val_acc plt.plot(training.history['val_acc'], 'b', training_1.history['val_acc'], 'r') plt.xlabel('Epochs') plt.ylabel('Validation score') plt.show() #Creating a TSNE model model_1 = TSNE(learning_rate=150, perplexity=30) transformed = model_1.fit_transform(corona) xs = transformed[:, 0] ys = transformed[:, 1] #Labelling plt.scatter(xs, ys, alpha=0.5) for a, b, c_virus in zip(xs, ys, samples): plt.annotate(c_virus, (a, b), fontsize=5, alpha=0.75) plt.show() plt.show() #Illustrate the feedforward network model plot_model(model, to_file='corona_model.png') data = plt.imread('corona_model.png') plt.imshow(data) plt.show()
model.add(Dense(8, activation='elu')) model.add(Dense(10, activation='elu')) model.add(Dense(X.shape[1], activation='sigmoid')) model.compile(loss='mean_squared_error', optimizer=Adam()) model.fit(X, X, batch_size=128, epochs=100, shuffle=True, verbose=1) encoder = Model(model.input, model.get_layer('bottleneck').output) bottleneck_representation = encoder.predict(X) model_tsne_auto = TSNE(learning_rate=200, n_components=2, random_state=123, perplexity=90, n_iter=1000, verbose=1) tsne_auto = model_tsne_auto.fit_transform(bottleneck_representation) plt.scatter(tsne_auto[:, 0], tsne_auto[:, 1], c=Y, cmap='tab20', s=10) plt.title('tSNE on Autoencoder: 8 Layers') plt.xlabel("tSNE1") plt.ylabel("tSNE2") from umap import UMAP model = UMAP(n_neighbors=30, min_dist=0.3, n_components=2) umap = model.fit_transform(X_reduced) umap_coords = pd.DataFrame({'UMAP1': umap[:, 0], 'UMAP2': umap[:, 1]}) umap_coords.to_csv('umap_coords_10X_1.3M_MouseBrain.txt', sep='\t') plt.scatter(umap[:, 0], umap[:, 1], c=Y, cmap='tab20', s=1) plt.title('UMAP') plt.xlabel("UMAP1") plt.ylabel("UMAP2")
print(newProba[11]) print(newProba[21]) print("1") print(newProba[0:9]) print("2") print(newProba[10:19]) print("3") print(newProba[20:-1]) print(labels) print(model.predict_classes(X)) from sklearn.manifold import TSNE model = TSNE(n_components=nb_classes, random_state=0, init="pca") toPlot = model.fit_transform(newProba) title = "t-SNE embedding of the spectrograms" x_min, x_max = np.min(toPlot, 0), np.max(toPlot, 0) toPlot = (toPlot - x_min) / (x_max - x_min) print(toPlot.shape) labelsName = ["bob", "steve", "dave"] cmap = sns.color_palette("Set2", n_colors=3) plt.figure() for i in range(toPlot.shape[0]): plt.text(
from keras.layers import Dense from keras.applications import MobileNetV2 datagen = ImageDataGenerator(rescale=1./255,shear_range=0.2,zoom_range=0.3) train_generator = datagen.flow_from_directory('data/dataset/train', target_size=(224, 224), batch_size=64, class_mode='categorical') test_generator = datagen.flow_from_directory('data/dataset/test', target_size=(224, 224), batch_size=64, class_mode='categorical') mobile = MobileNetV2(include_top=False, weights="imagenet", input_shape=(224,224,3), pooling="avg") model = Sequential() model.add(mobile) model.add(Dense(5, activation='softmax')) model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy']) model.fit_transform(train_generator, epochs=10, steps_per_epoch=2360/64, validation_data=test_generator, validation_steps=263/64) model.save('data/model.h5')
print(newProba[2]) print(newProba[11]) print(newProba[21]) print("1") print(newProba[0:9]) print("2") print(newProba[10:19]) print("3") print(newProba[20:-1]) print(labels) print(model.predict_classes(X)) from sklearn.manifold import TSNE model = TSNE(n_components=nb_classes, random_state=0, init='pca') toPlot = model.fit_transform(newProba) title = "t-SNE embedding of the spectrograms" x_min, x_max = np.min(toPlot, 0), np.max(toPlot, 0) toPlot = (toPlot - x_min) / (x_max - x_min) print(toPlot.shape) labelsName = ["bob", "steve", "dave"] cmap = sns.color_palette("Set2", n_colors=3) plt.figure() for i in range(toPlot.shape[0]): plt.text(toPlot[i, 0], toPlot[i, 1],