Exemple #1
0
def load_extract_dataset(size, samples):
    dataset_path = "/home/shouki/Desktop/Programming/Python/AI/Datasets/ImageData/ISSM/Extracted"

    if os.path.exists(
            os.path.join(dataset_path,
                         "extracted_{}_{}_vgg16.pkl".format(samples,
                                                            size))) == False:
        x_train, _ = load_regulized_train_dataset(samples, size)
        prepare_extract_dataset(x_train, size, samples)

    with open(
            os.path.join(dataset_path,
                         "extracted_{}_{}_vgg16.pkl".format(samples, size)),
            "rb") as f:
        x_train = pickle.load(f)

        return x_train
Exemple #2
0
from unbalance import load_regulized_train_dataset
import numpy as np
import matplotlib.pyplot as plt

x_trian, y_train = load_regulized_train_dataset(1000)

unique, count = np.unique(y_train, return_counts=True)

plt.bar(unique, count)
plt.xlabel("Labels")
plt.ylabel("Label Count")
plt.title("ISSM Dataset Labels")
plt.savefig("./issm_regulized_label.png")
Exemple #3
0
from keras.preprocessing.image import ImageDataGenerator
import numpy as np
import pickle
import tensorflow as tf
from sklearn.model_selection import train_test_split

physical_devices = tf.config.experimental.list_physical_devices("GPU")
tf.config.experimental.set_memory_growth(physical_devices[0], True)

batch_size = 64
epochs = 200
samples = 20000
size = 64
input_size = (size, size, 3)

x_train, y_train = load_regulized_train_dataset(samples, size)
y_train = to_categorical(y_train)
x_train, x_validation, y_train, y_validation = train_test_split(
    x_train, y_train, test_size=0.2, stratify=y_train)

model = cnn_model(input_size)

callbacks = [
    ModelCheckpoint(filepath="./models/model_{epoch:02d}.h5"),
    TensorBoard(log_dir="./logs")
]

model_history = model.fit(x_train,
                          y_train,
                          epochs=epochs,
                          callbacks=callbacks,