Exemplo n.º 1
0
from libs.hdf5datasetwriter import HDF5DatasetWriter
model=load_model("DenseNet121")
model1=keras.Sequential()
model.pop()
model.pop()
for layer in model.layers:
    model1.add(layer)
model=model1
print(model.summary())
BATCH_SIZE=32

trainGen = HDF5DatasetGenerator("hdf5/train2.hdf5", BATCH_SIZE, classes=5)
valGen = HDF5DatasetGenerator("hdf5/val.hdf5", BATCH_SIZE, classes=5)
print(trainGen.db["images"].shape[0])
print(valGen.db["images"].shape[0])
train_dataset=HDF5DatasetWriter((trainGen.db["images"].shape[0],1024),"hdf5/train_features_DenseNet121.hdf5",dataKey="features",bufSize=1000)

features=model1.predict(trainGen.db["images"])
#f1=[]
#for feature in features:
#    f1.append(feature.flatten())
#features=np.array(f1)
print(features.shape)
train_dataset.add(features,trainGen.db["labels"])
train_dataset.close()



val_dataset=HDF5DatasetWriter((valGen.db["images"].shape[0],1024),"hdf5/val_features_DenseNet121.hdf5",dataKey="features",bufSize=1000)

features=model1.predict(valGen.db["images"])
Exemplo n.º 2
0
model = load_model("MobileNetV2")
model1 = keras.Sequential()
model.pop()
model.pop()
for layer in model.layers:
    model1.add(layer)
model = model1
print(model.summary())
BATCH_SIZE = 32

trainGen = HDF5DatasetGenerator("hdf5/train.hdf5", BATCH_SIZE, classes=5)
valGen = HDF5DatasetGenerator("hdf5/val.hdf5", BATCH_SIZE, classes=5)
print(trainGen.db["images"].shape[0])
print(valGen.db["images"].shape[0])
train_dataset = HDF5DatasetWriter((trainGen.db["images"].shape[0], 1280),
                                  "hdf5/train_features_MobileNetV2.hdf5",
                                  dataKey="features",
                                  bufSize=1000)

features = model1.predict(trainGen.db["images"])
#f1=[]
#for feature in features:
#    f1.append(feature.flatten())
#features=np.array(f1)
print(features.shape)
train_dataset.add(features, trainGen.db["labels"])
train_dataset.close()

val_dataset = HDF5DatasetWriter((valGen.db["images"].shape[0], 1280),
                                "hdf5/val_features_MobileNetV2.hdf5",
                                dataKey="features",
                                bufSize=1000)
Exemplo n.º 3
0
testPaths=list(paths.list_images("../../test_resize_224"))
testLabels=test.diagnosis.values
split = train_test_split(trainPaths, trainLabels,
test_size=500, stratify=trainLabels,
random_state=42)
(trainPaths, valPaths, trainLabels, valLabels) = split
datasets = [
("train1", trainPaths[:4000], trainLabels[:4000], "hdf5/train1.hdf5"),
("train2", trainPaths[4000:], trainLabels[4000:], "hdf5/train2.hdf5"),
("val", valPaths, valLabels, "hdf5/val.hdf5"),
("test", testPaths, testLabels, "hdf5/test.hdf5")]
(R, G, B) = ([], [], [])

for (dType, paths, labels, outputPath) in datasets:
    print("[INFO] building {}...".format(outputPath))
    writer = HDF5DatasetWriter((len(paths), 224, 224, 3), outputPath)
    for (i, (path, label)) in enumerate(tqdm(zip(paths, labels))):
        image = cv2.imread(path)
        if (dType == "train1") | (dType=="train2"):
            (b, g, r) = cv2.mean(image)[:3]
            R.append(r)
            G.append(g)
            B.append(b)
        if image is None:
            print(path)
        writer.add([image], [label])
    writer.close()

print("[INFO] serializing means...")
D = {"R": np.mean(R), "G": np.mean(G), "B": np.mean(B)}
f = open("output/mean.json", "w")
Exemplo n.º 4
0
model = load_model("ResNet50")
model1 = keras.Sequential()
model.pop()
model.pop()
for layer in model.layers:
    model1.add(layer)
model = model1
print(model.summary())
BATCH_SIZE = 32

trainGen = HDF5DatasetGenerator("hdf5/train.hdf5", BATCH_SIZE, classes=5)
valGen = HDF5DatasetGenerator("hdf5/val.hdf5", BATCH_SIZE, classes=5)
print(trainGen.db["images"].shape[0])
print(valGen.db["images"].shape[0])
train_dataset = HDF5DatasetWriter((trainGen.db["images"].shape[0], 2048),
                                  "hdf5/train_features_ResNet50.hdf5",
                                  dataKey="features",
                                  bufSize=1000)

features = model1.predict(trainGen.db["images"])
#f1=[]
#for feature in features:
#    f1.append(feature.flatten())
#features=np.array(f1)
print(features.shape)
train_dataset.add(features, trainGen.db["labels"])
train_dataset.close()

val_dataset = HDF5DatasetWriter((valGen.db["images"].shape[0], 2048),
                                "hdf5/val_features_ResNet50.hdf5",
                                dataKey="features",
                                bufSize=1000)