示例#1
0
from model import generate_classifier
from keras.preprocessing import image
import numpy as np
import os
import cv2
import json
from filter_with_model import crop_double_check
input_dirs = []

save_dirs = ["col_pad_bo", "col_pad_bu", "col_pad_pu", "col_pad_no"]
crop_dir = "col_pad_crop"
cutoff = 0.8
m = generate_classifier()
m.load_weights("weights/lastClassifier2.h5")
os.makedirs(crop_dir, exist_ok=True)
for save_dir in save_dirs:
    if not os.path.exists(save_dir):
        print("creating_save_dir")
        os.makedirs(save_dir)


def classify(imgname, cutoff=0.9):
    try:
        if imgname[-3:] != "jpg":
            return None, -1
        try:
            img = image.load_img(imgname)
        except:
            return None, -1

        img = image.img_to_array(img)
示例#2
0
DATA_FOLDER = "data2"  #C:\programmering\differentiate"
BATCH_SIZE = 128
IMAGE_SIZE = 128
LEARNING_RATE = 0.0005
LOAD_WEIGHTS = False
EPOCHS = 500
classes = len(os.listdir(DATA_FOLDER))

gen = generator.generator(batch_size=BATCH_SIZE, data_folder=DATA_FOLDER)

folders = next(gen)
with open("weights/categories.txt", "w") as f:
    f.write(str(folders))

total = next(gen)
modl = model.generate_classifier(IMAGE_SIZE, classes, LEARNING_RATE)

if LOAD_WEIGHTS:
    modl.load_weights("weights/accurate.h5")
print(modl.summary())

check1 = ModelCheckpoint(filepath="weights/last.h5",
                         save_best_only=False,
                         save_weights_only=True,
                         verbose=1)
check2 = ModelCheckpoint(filepath="weights/accurate.h5",
                         monitor="loss",
                         save_best_only=True,
                         save_weights_only=True,
                         verbose=1)
modl.fit(gen,
示例#3
0
        #save.save("just_bordered/"+ str(index)+ "-" + str(int(use1)) + ".jpg")
        batch_img.append(img)
        label = np.zeros(classes)
        label[current_class] = 1
        batch_labels.append(label)
        #print(x1,y1,x2,y2)
        if (len(batch_img) == batch_size):
            batch_img = np.array(batch_img)
            #print(batch_img.shape)
            batch_labels = np.array(batch_labels)
            #print(batch_labels.shape)
            yield (batch_img, batch_labels)
            batch_img = []
            batch_labels = []
        current_class += 1
        if current_class == classes:
            current_class = 0


model = generate_classifier()
if load_weights:
    model.load_weights("weights/lastClassifier3.h5")
print(model.summary())
check1 = ModelCheckpoint(filepath="weights/lastClassifier3.h5",
                         save_best_only=False,
                         save_weights_only=True,
                         verbose=1)
model.fit_generator(generator=generator(),
                    steps_per_epoch=sum // batch_size,
                    epochs=100,
                    callbacks=[check1])
示例#4
0
import cv2
import numpy as np
from tensorflow.keras.callbacks import ModelCheckpoint

data_folder = "D:\data\\filer\\reddit_sub_pasta"
batch_size = 1
image_size = 128
learning_rate = 0.005
classes = len(os.listdir("data"))

gen = test_generator.generator(batch_size=batch_size,
                               data_folder=data_folder,
                               total_classes=classes)
foldesr = next(gen)
total = next(gen)
modl = model.generate_classifier(image_size, classes, learning_rate)
modl.load_weights("weights/accurate.h5")

fold = os.listdir("data")

with open("weights/categories.txt", "r") as f:
    folders = f.read()[1:-1]
    folders = folders.split(",")
    folders = [folder.strip()[1:-1] for folder in folders]

output = "result\\session"
os.makedirs(output, exist_ok=True)

for folds in folders:
    os.makedirs(os.path.join(output, folds), exist_ok=True)