def main():
    #Set up argument parser for console input
    parser = argparse.ArgumentParser(description='Predict category of flower')
    parser.add_argument('image_path', help='path of image to be analyzed')
    parser.add_argument(
        'checkpoint_dir',
        help=
        'directory containing /checkpoint.pth with pre-trained model to be used for prediction'
    )
    parser.add_argument('--top_k',
                        help='number of top K most likely classes',
                        default=1,
                        type=int)
    parser.add_argument('--category_names', help='Select JSON file')
    parser.add_argument('--gpu', help='Enable GPU', action='store_true')

    args = parser.parse_args()

    #Load pre-trained model from checkpoint
    loaded_model, optimizer, criterion, epochs = fu.load_checkpoint(
        args.checkpoint_dir + '/checkpoint.pth')

    #Set mode
    device = torch.device(
        'cuda' if torch.cuda.is_available() and args.gpu == True else 'cpu')
    print('Device is: ', device)

    #Inference calculation
    probs, classes = fu.predict(args.image_path, loaded_model, args.top_k,
                                device, args.category_names)
    print(probs)
    print(classes)
Example #2
0
def main():
    # user inputs from command line
    in_arg = get_input_args()
    # load model checkpoint
    model_checkpoint = load_checkpoint(in_arg.checkpoint)
    # load and process unseen image data
    new_image = process_image(in_arg.image_path)
    # predict on unseen image data
    probs, classes = predict(new_image, model_checkpoint, in_arg.topk,
                             in_arg.gpu)
    # get labels
    get_labels(probs, classes, model_checkpoint)
Example #3
0
def main():

    # Get CLI arguments
    args = get_input_args()

    # Build model from checkpoint
    model = functions.load_checkpoint(args.checkpoint)

    # Get probabilities, labels and flower name from prediction function
    top_probs, top_labels, top_flowers = functions.predict(
        args.image_path, model, args.category_name, args.gpu, args.top_k)

    # Print result
    for i in zip(top_probs, top_labels, top_flowers):
        print(i)
Example #4
0
def main():
    parser = argparse.ArgumentParser(
        description='This program predicts a flower name from an image')
    parser.add_argument('image_path', type=str, help='Image path')
    parser.add_argument('checkpoint', type=str, help='Checkpoint')
    parser.add_argument('--top_k',
                        type=int,
                        default='1',
                        help='Top k probablities')
    parser.add_argument('--category_names',
                        type=str,
                        default='cat_to_name.json',
                        help='Mappings of indices and class names')
    parser.add_argument('--gpu',
                        action='store_true',
                        help='Use GPU',
                        default=False)

    args = parser.parse_args()
    device = torch.device("cuda:0" if torch.cuda.is_available() else 'cpu'
                          ) if args.gpu else 'cpu'

    with open(args.category_names, 'r') as f:
        cat_to_name = json.load(f)

        model_load, optimizer_load, scheduler_load, epoch = load_checkpoint(
            device, args.checkpoint)

        probs, classes, flowers = predict(model_load,
                                          process_image(args.image_path),
                                          cat_to_name, args.top_k)

        print("Predictions for {}: {}".format(args.image_path, flowers))

        probs = np.array(probs)[0]
        print("Probablities: {}".format(probs))
parser = argparse.ArgumentParser(
    description='''Make a flower prediction from a PyTorch checkpoint''',
    formatter_class=argparse.ArgumentDefaultsHelpFormatter)

parser.add_argument('image_path',
                    default=None,
                    help='path to image for prediction')
parser.add_argument('checkpoint',
                    default=None,
                    help='path to PyTorch checkpoint for loading a model')
parser.add_argument('--top_k',
                    default=1,
                    type=int,
                    help='return top K most likely classes')
parser.add_argument('--category_names',
                    default=None,
                    help='json file mapping of categories to real names')
parser.add_argument('--gpu', action='store_true', help='predict on a GPU')

# Get the args
args = parser.parse_args()

# Load up a model checkpoint
model, checkpoint = functions.load_checkpoint(args.checkpoint, args.gpu)

# Preprocess a image to be used for the prediction in the model
image = functions.process_image(args.image_path, model)

# Make a prediction
functions.predict(args.image_path, model, args.gpu, args.top_k,
                  args.category_names)
results = parser.parse_args()

save_dir = results.save_directory
image = results.image_path
top_k = results.topk
gpu_mode = results.gpu
cat_names = results.cat_name_dir
with open(cat_names, 'r') as f:
    cat_to_name = json.load(f)

# Establish model template
pre_tr_model = results.pretrained_model
model = getattr(models, pre_tr_model)(pretrained=True)

# Load model
loaded_model = load_checkpoint(model, save_dir, gpu_mode)

# Preprocess image - assumes jpeg format
processed_image = process_image(image)

if gpu_mode == True:
    processed_image = processed_image.to('cuda')
else:
    pass

# Carry out prediction
probs, classes = predict(processed_image, loaded_model, top_k, gpu_mode)

# Print probabilities and predicted classes
print(probs)
print(classes)
import pandas as pd
import os
import shutil
import functions

# 1. Directory Setting
save_directory = r'D:/#.Secure Work Folder/1. Data/1. CMI/1. FLD/test4_result/5-4/'
image_directory = r"D:\#.Secure Work Folder\1. Data\1. CMI\1. FLD\test3\5-4\20200830"

# 2. Create DataFrame for results
result_df = pd.DataFrame(columns=['ID', 'FILE', '1', '2', '3', 'JUDGE'])

# 3. load trained model
model_save_path = r"C:/Users/LG/Desktop/ksb/3. CODE/model/"
filename = 'sepa_image_classifier_crop.pth'
model = functions.load_checkpoint(model_save_path, filename)

# 4. Classfiying images and reorganize image based on the JUDGE from model

for root, dirs, files in os.walk(image_directory):

    for file in files:

        if os.path.isdir(save_directory + root[-13:]) == False:
            os.mkdir(save_directory + root[-13:])
        if file.endswith("NG(0).JPG"):
            print(root[-13:])
            print(file)
            probs, classes = functions.predict(os.path.join(root, file), model)
            result_df = result_df.append(
                {
Example #8
0
args = parser.parse_args()
device = torch.device(
    'cuda:0' if torch.cuda.is_available() else 'cpu') if args.gpu else 'cpu'
top_k = args.k_value
category_names = args.cat_names
image_path = args.image_dir
save_dir = args.save_directory
arch = args.model_arch

# Label mapping
with open(category_names, 'r') as f:
    cat_to_name = json.load(f)

# Load the model
model = getattr(models, arch)(pretrained=True)
model = load_checkpoint(model, save_dir)

# Make predictions
probs, classes = predict(process_image(image_path), model, top_k, device)
flower_names = [cat_to_name[item] for item in classes]

# Print predicted flower names with top K classes
print("The flower is most likely to be a:")
for k in range(top_k):
    print(
        "Number: {}/{}.. ".format(k + 1, top_k),
        "Class name: {}.. ".format(flower_names[k]),
        "Probability: {:.3f}..% ".format(probs[k] * 100),
    )
#   Example call:
#    python check.py flowers --arch resnet18 --learning_rate 0.001 --epochs 1 --dropout 0.2 --gpu

import torch
import train
import predict
import functions as fu


original_model, test_loader, criterion = train.main()

#Load pre-trained model from checkpoint
loaded_model, optimizer, criterion, epochs = fu.load_checkpoint('/home/workspace/aipnd-project/checkpoints/checkpoint.pth')

#Set mode
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') 
print('Device is: ', device)

fu.compare_orig_vs_loaded(device, original_model, loaded_model, test_loader, criterion)
                    action="store",
                    default='cat_to_name.json')

#call parser and store variables
pa = parser.parse_args()
checkpoint = pa.checkpoint
device = pa.device
input_image = pa.input_image
top_k = pa.top_k
category_names = pa.category_names

#check input
print("The input variables are:\n", pa)

#LOAD CHECKPOINT
my_model, optimizer = functions.load_checkpoint(checkpoint)
print('\nCheckpoint Loaded')

#PROCESS IMAGE
test = functions.process_image(input_image)
print("\nImage processed")

#depending on cpu/gpu usage
if torch.cuda.is_available() and device == 'gpu':
    device = 'cuda'
    my_model.to(device)

#PREDICT CATEGORY NAME
with open(category_names, 'r') as f:
    cat_to_name = json.load(f)
                    dest='top_k',
                    help='The number of predictions returned.')

results = parser.parse_args()

image = results.image_path
gpu_mode = results.gpu
category_names = results.cat_to_name
pretrained_model = results.arch
top_k = results.top_k

with open(category_names, 'r') as f:
    cat_to_name = json.load(f)

model = getattr(models, pretrained_model)(pretrained=True)
model_loaded = load_checkpoint(model, checkpoint_path)

image_processed = process_image(image)

probs, classes = predict(image_processed, model_loaded, top_k)

print(probs)
print(classes)

names = []
for i in classes:
    names += [cat_to_name[i]]
final_percent = round(probs[0] * 100, 4)
print("This flower is predicted to be a {} with a probability of {}% ".format(
    names[0], final_percent))
Example #12
0
checkpoint_dir = results.checkpoint_dir
test_image_path = results.test_image_path
topk = results.topk
cat_to_name = results.cat_to_name
gpu_mode = results.gpu

with open(cat_to_name, 'r') as f:
    cat_to_name = json.load(f)

# load model backbone
checkpoint = torch.load(checkpoint_dir)
load_arch = checkpoint['model structure']
model = getattr(models, load_arch)(pretrained=True)

# load model checkpoint
loaded_model = load_checkpoint(model, checkpoint_dir)

# Image processing (included in the 'predict' function) and predict the top classification
probs, classes = predicting(gpu_mode, test_image_path, loaded_model, topk=topk)

print(probs)
print(classes)

names = []
index = 0
for i in classes:
    names += [cat_to_name[i]]
    print("This flower is likely to be: ", names[index],
          ", with a probability of ", probs[index])
    index += 1
Example #13
0
def main(argv):

    parser = argparse.ArgumentParser(
        description='Predict flower name from an image.',
        prog='predict.py',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)
    parser.add_argument('image_path',
                        type=str,
                        nargs=1,
                        default=['flowers/test/1/image_06754.jpg'],
                        help='Path to the image to use for prediction.')
    parser.add_argument('checkpoint',
                        type=str,
                        nargs=1,
                        default=['checkpoint.pth'],
                        help='Path to the checkpoint containing the model.')
    parser.add_argument('--top_k',
                        type=int,
                        nargs=1,
                        default=[1],
                        help='Return top K most likely classes.')
    parser.add_argument('--category_names',
                        type=open,
                        nargs=1,
                        default=['cat_to_name.json'],
                        help='Use a mapping of categories to real names.')
    parser.add_argument('--gpu',
                        nargs='?',
                        const='cuda',
                        help='Use GPU for inference.')
    args = vars(parser.parse_args(argv))
    image_path = args['image_path'][0]
    checkpoint = args['checkpoint'][0]
    topK = args['top_k'][0]
    category_names_file = args['category_names'][0]
    device = args['gpu']
    if device == None:
        device = 'cpu'

    print('parameters used for prediction:')
    print('-------------------------------')
    print('image_path = {}'.format(image_path))
    print('checkpoint = {}'.format(checkpoint))
    print('topK = {}'.format(topK))
    print('category_names_file = {}'.format(category_names_file))
    print('device = {}'.format(device))

    model = functions.load_checkpoint(checkpoint)
    model.eval()
    model.to(device)

    data_transforms = {
        'train':
        transforms.Compose([
            transforms.RandomRotation(45),
            transforms.RandomResizedCrop(224),
            transforms.RandomHorizontalFlip(),
            transforms.ToTensor(),
            transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
        ]),
        'valid':
        transforms.Compose([
            transforms.Resize(256),
            transforms.CenterCrop(224),
            transforms.ToTensor(),
            transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
        ]),
        'test':
        transforms.Compose([
            transforms.Resize(256),
            transforms.CenterCrop(224),
            transforms.ToTensor(),
            transforms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
        ])
    }

    # TODO: Load the datasets with ImageFolder
    image_datasets = {
        data_type: datasets.ImageFolder(os.path.join('flowers', data_type),
                                        data_transforms[data_type])
        for data_type in ['train', 'valid', 'test']
    }

    # TODO: Using the image datasets and the trainforms, define the dataloaders
    dataloaders = {
        data_type: torch.utils.data.DataLoader(image_datasets[data_type],
                                               batch_size=32,
                                               shuffle=True)
        for data_type in ['train', 'valid', 'test']
    }

    # TODO: Display an image along with the top 5 classes
    with open(category_names_file, 'r') as f:
        cat_to_name = json.load(f)
    flower_class = image_path.split('/')[2]
    title = cat_to_name[flower_class]

    probas, classes = functions.predict(image_path,
                                        model,
                                        cat_to_name,
                                        topk=topK)

    print()
    print('image of {} has been predicted as:'.format(title))
    print('------------------------------------------')
    for index in range(len(classes)):
        print('{} : {:.2f}%'.format(classes[index], 100 * probas[index]))
Example #14
0
from functions import load_checkpoint, predict, show_results

parser = argparse.ArgumentParser(description='Create Your Own Image Classifier - Command Line Application - Predict')

#Image
parser.add_argument('image_path', action='store', default='./flowers/test/78/image_01856.jpg', help = 'Enter an image path', type=str)
#Checkpoint
parser.add_argument('checkpoint', action='store', default='checkpoint.pth', help = 'Enter a checkpoint to load', type=str)
#Choose GPU use
parser.add_argument('--gpu', dest='gpu', action='store_true', default=False, help = 'Turn on the use of GPU')
#topk
parser.add_argument('--top_k', dest='top_k', action='store', default=5, help = 'Return top K predictions', type=int)
#category_names
parser.add_argument('--category_names', dest='category_names', action='store', default="cat_to_name.json", help = 'Category names to match with model', type=str)

arguments = parser.parse_args()

image_path = arguments.image_path
checkpoint = arguments.checkpoint
top_k = arguments.top_k
gpu = arguments.gpu
category_names = arguments.category_names

#Load checkpoint
print("Loading model from : {}\n".format(checkpoint))
model = load_checkpoint(checkpoint)
#Predict
probs, classes = predict(image_path, model, gpu, top_k)
#Show results
show_results(image_path, probs, classes, category_names)
#Load inputs
inputs = inputs()
print(inputs)

#Assign inputs to variables // Not prompting user for a) saving directory and b) pre-trained model
data_dir = inputs.data_dir
learning_rate = inputs.learning_rate
hidden_layer1 = inputs.hidden_layer1
hidden_layer2 = inputs.hidden_layer2
dropout_prob1 = inputs.dropout_prob1
dropout_prob2 = inputs.dropout_prob2
image_path = inputs.image_path
top_k = inputs.top_k
use_gpu = inputs.gpu

#image processing and predicting

model, optimizer, criterion = load_checkpoint('checkpoint_part2.pth',
                                              hidden_layer1, hidden_layer2,
                                              dropout_prob1, dropout_prob2)

tensor_image = process_image(image_path)

top_probs, top_labels, top_flowers = predict(tensor_image, model, cat_to_name,
                                             top_k)

for i in range(0, top_k):
    print(
        "Likelihood #{}, with probability {:.4%}, is that this flower is a {} that can be seen in folder with label {}"
        .format(i + 1, top_probs[i], top_flowers[i], top_labels[i]))
Example #16
0
                default='cat_to_name.json')
ap.add_argument('--gpu', default="gpu", action="store", dest="gpu")

pa = ap.parse_args()
path_image = pa.input_img
number_of_outputs = pa.top_k
power = pa.gpu
input_img = pa.input_img
path = pa.checkpoint

model, _, _ = functions.setup()

training_loader, testing_loader, validation_loader, train_data = functions.data(
)

functions.load_checkpoint(path)

with open('cat_to_name.json', 'r') as json_file:
    cat_to_name = json.load(json_file)

probabilities = functions.predict(path_image, model, number_of_outputs, power)

labels = [
    cat_to_name[str(index + 1)] for index in np.array(probabilities[1][0])
]
probability = np.array(probabilities[0][0])

i = 0
while i < number_of_outputs:
    print("{} with a probability of {}".format(labels[i], probability[i]))
    i += 1
gpu = arguments.gpu
arch = arguments.arch

# set torch.device cuda/cpu
device = set_device(gpu)

# open the json file with the flower classes
with open(cat_file, 'r') as f:
    cat_to_name = json.load(f)

# get the specified model
model = getattr(models, arch)(pretrained=True)

# load our checkpoint
print('Loading checkpoint...\n')
model = load_checkpoint(model, device, checkpoint_path)
print('Checkpoint loaded!\n')

# process the image
print('Processing image...\n')
proc_image = process_image(input_path)

# run the prediction
print('Running prediction...\n')
probs, classes = predict(proc_image, model, top_k, device)

# get the flower names
flowers = [cat_to_name[i] for i in classes]

# print the top_k number of flowers and probabilities
print('\nThe top ' + str(top_k) + ' classes and probabilities are: ')
import functions
import argparse

parser = argparse.ArgumentParser(description='parser_for_predict')
parser.add_argument('image_path',  default="./flowers/test/100/image_07896.jpg", type=str)
parser.add_argument('checkpoint', default="./checkpoint.pth", type=str)
parser.add_argument('--top_k', dest="top_k", default = 5, type=int)
parser.add_argument('--category_names', dest="category_names", default='cat_to_name.json')
parser.add_argument('--gpu', dest="gpu", default=True, type=bool)
parsed = parser.parse_args()
image_path = parsed.image_path
checkpoint = parsed.checkpoint
top_k = parsed.top_k
category_names = parsed.category_names
gpu = parsed.gpu

model, optimizer, idx_to_class = functions.load_checkpoint(checkpoint)
cat_to_name = functions.map_label(category_names)
functions.predict(image_path, model, top_k, gpu, cat_to_name, idx_to_class)
Example #19
0
                    default='cat_to_name.json',
                    help='Mapping of categories to real names')
parser.add_argument('gpu',
                    action='store',
                    help='Use GPU',
                    choices=['GPU', 'CPU'])

args = parser.parse_args()

image_path = args.path_to_image
model_arch = args.arch
save_directory = args.save_directory
topk = args.top_k
category_names = args.category_names
gpu = args.gpu

if gpu == 'GPU':
    device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
else:
    device = "cpu"

with open('cat_to_name.json', 'r') as f:
    cat_to_name = json.load(f)

model = functions.load_checkpoint(model_arch, save_directory, gpu)

probs, top_classes = functions.predict(image_path, model, topk)
names = [cat_to_name[cl] for cl in top_classes]
print(probs)
print(top_classes)
print(names)
                                              shuffle=False)
    loader_test2, idx_to_class_test2 = f.loader(root=DATA_PATH_TEST2,
                                                transform=TRANSFORM,
                                                batch_size=BATCH_SIZE,
                                                shuffle=False)
    n_batches = len(loader_train)
    n_batches_test = len(loader_test)

    # Networks
    m = network.DQN(RESOLUTION, RESOLUTION, N_ACTIONS)
    m = m.to(DEVICE)
    o = optim.Adam(m.parameters(), lr=1e-5)

    start_epoch = 0
    run = f.Run(CHECKPOINT_DIR)
    start_epoch, m, o = f.load_checkpoint(run.get_checkpoint('32'), m, o)

    # validate_model(m, loader_test, idx_to_class_test)

    for epoch in range(N_EPOCHS):
        print('\n Epoch {}'.format(start_epoch + epoch))
        train_model(m, o, loader_train, idx_to_class_train)

        checkpoint = {
            'epoch': start_epoch + epoch + 1,
            'state_dict': m.state_dict(),
            'optimizer': o.state_dict()
        }
        f.save_checkpoint(checkpoint, CHECKPOINT_DIR, start_epoch + epoch)

        print('FIRST VALIDATION')
Example #21
0
                    default=5,
                    type=int)
parser.add_argument('--category_names',
                    dest="category_names",
                    action="store",
                    default='cat_to_name.json')

results = parser.parse_args()

image_path = results.img_file
checkpoint = results.checkpoint
gpu = results.gpu
topk = results.top_k
cat = results.category_names

model = functions.load_checkpoint(checkpoint)

with open(cat, 'r') as f:
    cat_to_name = json.load(f)

probs, classes = functions.predict(image_path, model, topk)

print(probs)
print(classes)

names = []
for i in classes:
    names += [cat_to_name[i]]

print("Name: {} Probability {}% ".format(names[0], round(100 * acc / total,
                                                         3)))
    args = parser.parse_args()
    print('arguments are', args)

    checkpoint_path = str(Path(args.save_dir) / args.chpt_fn)
    pre_folder = str(Path(args.pre_dir) / args.pre_folder)
    pre_img = random.choice(os.listdir(pre_folder))
    pre_img_path = pre_folder + '/' + pre_img
    print(pre_img_path)

    topk = args.topk

    with open(args.category_names, 'r') as f:
        cat_to_name = json.load(f)

    # rebuild the model from checkpoint
    model_2, optimizer_2 = functions.load_checkpoint(checkpoint_path)
    print('load finished!')
    #print(model_2)

    # open an image for prediction
    im = Image.open(pre_img_path)
    #plt.imshow(im)
    # process the image to tensor
    im_ts = functions.process_image(im)
    #helper.imshow(im_ts)
    print("finish image process")

    # predict image
    probs, classes, index = functions.predict(im_ts, model_2, topk)
    print(
        f"top {topk:.0f} probabilities are: {list(map(lambda x:round(x, 4), probs)) }"
Example #23
0
                    action="store",
                    dest="cat_names",
                    default="cat_to_name.json")
parser.add_argument('--top_k',
                    action="store",
                    dest="top_k",
                    type=int,
                    default=1)
parser.add_argument('--gpu', action="store", dest="gpu", default="gpu")

parse_args = parser.parse_args()

# 'flowers/test/1/image_06752.jpg'
image_path = parse_args.path_to_image
# 'checkpoint.pth'
save_path = parse_args.checkpoint

cat_names = parse_args.cat_names
top_k = parse_args.top_k
gpu = parse_args.gpu

#load saved model
model, device, output_units, criterion, optimizer = load_checkpoint(
    save_path, gpu)

#category names
cat_to_name = label_mapping(cat_names)

#predict
plotting(image_path, model, top_k, cat_to_name)