Example #1
0
def classify(args):
    image = img_to_array(
        load_img(args.image_path, target_size=(args.input_height, args.input_width)))
    image = image.reshape([1, args.input_height, args.input_width, -1])
    image = preprocess_input(image)

    tf.logging.info("Loading model: {}".format(args.model_root_dir))
    model = load_keras_model(args.model_root_dir)
    model.compile(optimizer="sgd", loss="categorical_crossentropy")

    results = model.predict(image)

    results = np.squeeze(results)

    top_k = results.argsort()[-5:][::-1]

    tf.logging.info(top_k)
    labels = load_labels(args.labels)
    tf.logging.debug(labels)

    for i in top_k:
        print(labels[i], results[i])
Example #2
0
validation_split = 0.10
logfile = "evaluation_log_3.txt"
graph_dir = "Graphs/minibatches64epoch1000/"
dir = "Saved_Model_3/"
save_dir = "Saved_Model_4/"

print("[INFO] Searching Latest checkpoint... ")
checkpoints = [m for m in os.listdir(dir)]
checkpoints = [int(x) for x in checkpoints]
checkpoints.sort()
checkpoints = [str(x) for x in checkpoints]

# create the base pre-trained model
if G <= 1:
    print("[INFO] training with 1 GPU...")
    model = saved_model.load_keras_model(dir + checkpoints[-1])
else:
    print("[INFO] training with {} GPUs...".format(G))
    with tf.device("/cpu:0"):
        model = saved_model.load_keras_model(dir + checkpoints[-1])
    model = multi_gpu_model(model, gpus=G)

# we need to recompile the model for these modifications to take effect
# we use SGD with a low learning rate
print("[INFO] Compiling Model ... ")
from tensorflow.keras.optimizers import SGD
model.compile(optimizer=SGD(lr=INIT_LR, momentum=0.9),
              loss='categorical_crossentropy',
              metrics=['accuracy'])

print("[INFO] Loading Data... ")
    baseLR = INIT_LR
    power = 1.0
    alpha = baseLR * (1 - (epoch/float(maxEpochs)))**power
    return alpha

	
print("[INFO] Searching Latest checkpoint... ")
dir = "Saved_Model/"
checkpoints = [m for m in os.listdir(dir)]
checkpoints = [int(x) for x in checkpoints]
checkpoints.sort()
checkpoints = [str(x) for x in checkpoints]

if G<= 1:
    print("[INFO] training with 1 GPU...")
    model = saved_model.load_keras_model(dir + "1548336946")
else:
    print("[INFO] training with {} GPUs...".format(G))
    with tf.device("/cpu:0"):
        model = saved_model.load_keras_model(dir + "1548336946")
    model = multi_gpu_model(model, gpus=G)
	
print("[INFO] compiling model...")
model.compile(optimizer=SGD(lr=INIT_LR, momentum=0.9), loss= 'categorical_crossentropy', metrics=['accuracy'])

print("[INFO] Loading Data... ")
filename = "data0.txt"
filename2 = "labels0.txt"
counter = 1
le = preprocessing.LabelEncoder()
def load_model(model_dir):
    return load_keras_model(model_dir)
Example #5
0
from tensorflow.contrib.saved_model import load_keras_model

new_model = load_keras_model("models/model_weigth.h5")
new_model.summary()