예제 #1
0
                             required=True,
                             help='Path to input dataset.')
argument_parser.add_argument('-m',
                             '--model',
                             required=True,
                             help='Path to the output model.')

arguments = vars(argument_parser.parse_args())

print('[INFO] Loading images...')
image_paths = list(paths.list_images(arguments['dataset']))

simple_preprocessor = SimplePreprocessor(32, 32)
image_to_array_preprocessor = ImageToArrayPreprocessor()

simple_dataset_loader = SimpleDatasetLoader(
    preprocessors=[simple_preprocessor, image_to_array_preprocessor])
data, labels = simple_dataset_loader.load(image_paths, verbose=500)
data = data.astype('float') / 255.0

X_train, X_test, y_train, y_test = train_test_split(data,
                                                    labels,
                                                    test_size=.25,
                                                    random_state=42)

y_train = LabelBinarizer().fit_transform(y_train)
y_test = LabelBinarizer().fit_transform(y_test)

print('[INFO] Compiling model...')
optimizer = SGD(lr=.005)

model = ShallowNet.build(width=32, height=32, depth=3, classes=3)
예제 #2
0
                help="# of nearest neighbors for classification")
ap.add_argument("-j",
                "--jobs",
                type=int,
                default=-1,
                help="# of jobs for knn distance(-1 uses all avaiable cores)")
args = vars(ap.parse_args())

# Grab the list of each image's absolute path.
# for example: imagePaths[:2]=['F:\\pyimagesearch\\datasets\\animals\\cats\\cats_00001.jpg', 'F:\\pyimagesearch\\datasets\\animals\\cats\\cats_00002.jpg']
print("[INFO] loading image...")
imagePaths = list(paths.list_images(args["dataset"]))
#print(imagePaths[:3])

sp = SimplePreprocessor(32, 32)
sdl = SimpleDatasetLoader(preprocessors=[sp])
(data, labels) = sdl.load(imagePaths, verbose=500)
data = data.reshape((data.shape[0], 32 * 32 * 3))

print("[INFO] feature matrix: {:.1f}MB".format(data.nbytes / (1024 * 1000.0)))

# Encode the labels as integers.
le = LabelEncoder()
labels = le.fit_transform(labels)

(trainX, testX, trainY, testY) = train_test_split(data,
                                                  labels,
                                                  test_size=0.25,
                                                  random_state=42)

model = KNeighborsClassifier(n_neighbors=args["neighbors"],
# grab the list of images in the dataset then randomly sample
# indexes into the image paths list
print("[INFO] sampling images...")
imagePaths = np.array(list(paths.list_images(args["dataset"])))
idxs = np.random.randint(0, len(imagePaths), size=(10,))
imagePaths = imagePaths[idxs]


# initialize the image preporcessors
sp = SimplePreprocessor(32, 32)
iap = ImageToArrayPreprocessor()

# load the dataset from disk then scale the raw pixel intensities
# to range [0, 1]
sdl = SimpleDatasetLoader(preprocessors=[sp, iap])
data, labels = sdl.load(imagePaths)
data = data.astype(float) / 255.0

# load the pre-trained network
print('[INFO] loading pre-trained network')
model = load_model(f'{args["model"]}/14.shallownet_weights.hdf5')

# making predictions
print("[INFO] predicting...")
preds = model.predict(data, batch_size=32).argmax(axis=1)


# loop over the sample images
for (i, imagePath) in enumerate(imagePaths):
    # load the example image, draw the prediction, and display it
예제 #4
0
    "-j",
    "--jobs",
    type=int,
    default=-1,
    help="# of jobs for k-NN distance (-1 uses all available cores)")

args = vars(ap.parse_args())

# grab the list of images that we’ll be describing
print("[INFO] loading images...")
imagePaths = list(paths.list_images(
    args["dataset"]))  # a list of the path for each labeled image

sp = SimplePreprocessor(32, 32)  #resize each image to 32*32 pixels
sdl = SimpleDatasetLoader(preprocessors=[
    sp
])  #implying that sp will be applied to every image in the dataset
(data, labels) = sdl.load(
    imagePaths, verbose=500
)  #load resized data with labels   3000images=> output shape: (3000, 32, 32,3)

data = data.reshape(
    (data.shape[0], 3072)
)  #flatten our images from a 3D representation to a single list of pixel intensities=> output shape:(3000, 3072)
print("[INFO] feature matrix: {:.1f}MB".format(
    data.nbytes /
    (1024 *
     1000.0)))  #how much memory it takes to store these 3,000 images in memory

#building our training and testing splits
le = LabelEncoder()
예제 #5
0
import matplotlib.pyplot as plt
import numpy as np

ap = argparse.ArgumentParser()
ap.add_argument("-d", "--dataset", required=True, help="path to input dataset")
ap.add_argument("-o", "--output", required=True, help="path to save model")
args = vars(ap.parse_args())

print("[INFO] loading images...")
image_paths = list(paths.list_images(args["dataset"]))

sp = SimplePreprocessor(32, 32)
iap = ImageToArrayPreprocessor()

sdl = SimpleDatasetLoader(preprocessors=[sp, iap])
data, labels = sdl.load(image_paths, verbose=500)
data = data.astype("float") / 255

labels = LabelBinarizer().fit_transform(labels)
train_X, train_y, test_X, test_y = train_test_split(data, labels, test_size=0.2)

print("[INFO] compiling model...")
model = ShallowNet().build(width=32, height=32, depth=3, classes=3)
model.compile(optimizer=optimizers.RMSprop(lr=1e-4), loss=losses.categorical_crossentropy, metrics=[metrics.binary_accuracy])

H = model.fit(train_X, train_y, batch_size=32, epochs=100, validation_data=(test_X, test_y))

print("[INFO] serializing network...")
model.save(args["model"])
예제 #6
0
                        type=int,
                        default=1,
                        help="Number of nearest neighbours for classification")
arg_parser.add_argument(
    "-j",
    "--jobs",
    type=int,
    default=1,
    help="Number of jobs for k-NN distance (-1 uses all available cores)")
args = vars(arg_parser.parse_args())

# Load the dataset
print("[INFO] loading images...")
image_paths = list(paths.list_images(args["dataset"]))
resize_preprocessor = SimplePreprocessor(32, 32)
dataset_loader = SimpleDatasetLoader(preprocessors=[resize_preprocessor])
data, labels = dataset_loader.load(image_paths, verbose=500)
data = data.reshape((data.shape[0], -1))
print(f"[INFO] features matrix {data.nbytes / 1024 * 1000:1f}MB")

# Encode labels to integers
label_encoder = LabelEncoder()
labels = label_encoder.fit_transform(labels)

# Splitting the dataset
train_X, test_X, train_y, test_y = train_test_split(data,
                                                    labels,
                                                    test_size=0.2)

# Evaluating the Model
model = KNeighborsClassifier(n_neighbors=args["neighbours"],
예제 #7
0
ap = argparse.ArgumentParser()
ap.add_argument('-d', '--dataset', required=True, help='path to input dataset')
args = vars(ap.parse_args())

print('[INFO] loading dataset')
image_paths = list(paths.list_images(args['dataset']))
class_names = [
    image_path.split(os.path.sep)[LABEL_PATH_INDEX]
    for image_path in image_paths
]
class_names = [str(x) for x in np.unique(class_names)]

aap = AspectAwarePreprocessor(WIDTH, HEIGHT)
iap = ImageToArrayPreprocessor()

sdl = SimpleDatasetLoader([aap, iap])
data, labels = sdl.load(image_paths, verbose=500)
data = data.astype('float') / 255.0

train_X, test_X, train_y, test_y = train_test_split(data,
                                                    labels,
                                                    test_size=0.25)
label_binarizer = LabelBinarizer()
train_y = label_binarizer.fit_transform(train_y)
test_y = label_binarizer.transform(test_y)

aug = ImageDataGenerator(rotation_range=30,
                         width_shift_range=0.1,
                         height_shift_range=0.1,
                         shear_range=0.2,
                         zoom_range=0.2,