Ejemplo n.º 1
0
#     max_scale=5,
#     noise_level=5,
#     interp='bicubic',
#     sub_pixel_flow=True)

inputs, targets = gen.generate_batch(1000)

# 0 prediction

error3 = np.mean(np.square(targets - 0), axis=1)

# CNN

cnn = CNN(split=False, normalize=True)

prediction = cnn.predict('checkpoints/normal/step81000.ckpt', inputs)

error = np.mean(np.square(prediction - targets), axis=1)

# CNN-split

cnn2 = CNN(split=True, normalize=True, fully_connected=500)

prediction = cnn2.predict('checkpoints/split/step44000.ckpt', inputs)

error4 = np.mean(np.square(prediction - targets), axis=1)

# FAST+LK

fastlk = FastLK(40, True)
Ejemplo n.º 2
0
    #         print(probs)
    #         index = np.argmax(index)
    #         word.append("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"[index])
    #         print(word)
    # =============================================================================

    # repeat test
    for i in range(patches.shape[0]):
        t = patches[i]
        t_unscaled = tf.cast((t + .5) * 255.0, tf.int32)
        #repeat_img = np.repeat(t_unscaled,2, axis=1)

        repeat_img = tf.keras.backend.repeat_elements(t_unscaled, 2, axis=1)

        print(repeat_img.shape)
        show_img(repeat_img, scaled=False)
        repeat_img = tf.cast(repeat_img, tf.float32)
        t_scaled = repeat_img / 255.0 - .5
        t_scaled = tf.expand_dims(t_scaled, axis=0)
        index = cnn.predict(t_scaled)
        pred_prob = int(np.max(index) * 100)
        probs.append(pred_prob)
        index = np.argmax(index)
        pred = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"[
            index]
        word.append(pred)
# =============================================================================
#     print(probs)
#     print(word)
# =============================================================================