help="path to pre-trained model") args = vars(ap.parse_args()) # initialize the class labels classLabels = ["cat", "dog"] print("[INFO] sampling images...") #grab the list of images in the dataset then randomly sample imagePaths = np.array(list(paths.list_images(args['dataset']))) idxs = np.random.randint(0, len(imagePaths), size=(10, )) imagePaths = imagePaths[idxs] sp = SimplePreProcessor(32, 32) ima = ImageToArrayPreprocessor() sdl = SimpleDatasetLoader([sp, ima]) (data, labels) = sdl.load(imagePaths, verbose=500) data = data.astype(dtype='float32') / 255.0 #TODO ********************************* # load the pre-trained network print("[INFO] loading pre-trained network...") model = load_model(args['model']) # ************************************** print("[INFO] predicting...") preds = model.predict(data, batch_size=32).argmax(axis=1) for (i, imagePath) in enumerate(imagePaths): image = cv2.imread(imagePath)
# construct the argument parse and parse the arguments ap = argparse.ArgumentParser() ap.add_argument("-d", "--dataset", required=True, help="path to input dataset") args = vars(ap.parse_args()) # grab the list of images that we'll be describing print("[INFO] loading images...") imagePaths = list(paths.list_images(args["dataset"])) # initialize the image preprocessors sp = SimplePreprocessor(32, 32) iap = ImageToArrayPreprocessor() # load the dataset from disk then scale the raw pixel intensities # to the range [0, 1] sdl = SimpleDatasetLoader(preprocessors=[sp, iap]) (data, labels) = sdl.load(imagePaths, verbose=500) data = data.astype("float") / 255.0 # partition the data into training and testing splits using 75% of # the data for training and the remaining 25% for testing (trainX, testX, trainY, testY) = train_test_split(data, labels, test_size=0.25, random_state=42) # convert the labels from integers to vectors trainY = LabelBinarizer().fit_transform(trainY) testY = LabelBinarizer().fit_transform(testY) # initialize the optimizer and model
classLabels = ["cat", "dog", "panda"] # grab the list of images in the dataset then randomly sample # indexes into the image paths list print("[INFO] sampling images...") imagePaths = np.array(list(paths.list_images(args["dataset"]))) idxs = np.random.randint(0, len(imagePaths), size=(10, )) imagesPaths = imagePaths[idxs] # initialize the image preprocessors sp = SimplePreprocessor(32, 32) iap = ImageToArrayPreprocessor() # load the dataset from disk then scale the raw pixel intensities # to the range [0, 1] sdl = SimpleDatasetLoader(preprocessor=[sp, iap]) (data, labels) = sdl.load(imagePaths) data = data.astype("float") / 255.0 ################################################ # load the pre-trained network print("[INFO] loading pre-trained network...") model = load_model(args["model"]) ################################################ # make prediction on the images print("[INFO] predicting...") preds = model.predict(data, batch_size=32).argmax(axis=1) # loop over the sample images for (i, imagePath) in enumerate(imagePaths):