Example #1
0
    rf_simi = RandomForest()
    model_path = "data/model/rf_same_region.pkl"
    rf_simi.load_model(model_path)
    rf_sal = RandomForest()
    model_path = "data/model/rf_salience.pkl"
    rf_sal.load_model(model_path)

    im_data.get_multi_segs(rf_simi)
    segs_num = len(im_data.rlists)
    height = im_data.rmat.shape[0]
    width = im_data.rmat.shape[1]
    salience_map = np.zeros([segs_num, height, width])
    for i, rlist in enumerate(im_data.rlists):
        Y = rf_sal.predict(im_data.feature93s[i])[:, 1]
        for j, r in enumerate(rlist):
            salience_map[i][r] = Y[j]
    X_test = salience_map.reshape([-1, height*width]).T

    mlp = MLP()
    model_path = "data/model/mlp.pkl"
    mlp.load_model(model_path)
    Y = mlp.predict(X_test).reshape([height, width])*255

    img = np.zeros([height, width*2, 3], dtype=np.uint8)
    img[:, :width, :] = cv2.imread(img_path)
    img[:, width:, :] = Y.repeat(3).reshape([height, width, 3])
    print("finished~( •̀ ω •́ )y")
    cv2.imshow("result", img)
    cv2.waitKey(0)
parser.add_argument('--test_data',
                    default=PATH_PARSED_DATA_TEST,
                    type=str,
                    help='Test Data Path')
parser.add_argument('--dataset_size',
                    default=DEFAULT_DATASET_SIZE,
                    type=int,
                    help='Train Dataset size')
parser.add_argument('--ctx', default=DEFAULT_CTX, type=str, help='Context')
args = parser.parse_args()

net = MLP(drop_out=args.drop_out, hidden_units=args.hidden_units)

net.set_ctx(args.ctx)

net.load_model()

data_attr = {
    'path': args.test_data,
    'dataset_size': args.dataset_size,
    'batch_size': args.batch_size,
    'shuffle_data': False,
}

cumulative_accuracy = 0
set_count = 0
data_gen = net.prepare_data(**data_attr)

for test_data in data_gen:
    set_count += 1
    set_acc = net.evaluation(test_data)