def predict(path):
    y_true, inputs, files = get_inputs_and_trues(path)

    if config.model == config.MODEL_VGG16:
        if args.store_activations:
            import train_relativity
            util.save_activations(model, inputs, files, 'fc2')
            train_relativity.train_relativity()
        if args.check_relativity:
            af = util.get_activation_function(model, 'fc2')
            acts = util.get_activations(af, [inputs[0]])
            relativity_clf = joblib.load(config.relativity_model_path)
            predicted_relativity = relativity_clf.predict(acts)[0]
            print(relativity_clf.__classes[predicted_relativity])

    if not args.store_activations:
        out = model.predict(np.array(inputs))
        predictions = np.argmax(out, axis=1)

        for i, p in enumerate(predictions):
            recognized_class = classes_in_keras_format.keys()[
                classes_in_keras_format.values().index(p)]
            print '{} ({}) ---> {} ({})'.format(y_true[i],
                                                files[i].split(os.sep)[-2], p,
                                                recognized_class)

        if args.accuracy:
            print 'accuracy {}'.format(
                accuracy_score(y_true=y_true, y_pred=predictions))
def predict(path):
    files = get_files(path)
    n_files = len(files)
    print('Found {} files'.format(n_files))

    if args.novelty_detection:
        activation_function = util.get_activation_function(model, model_module.noveltyDetectionLayerName)
        novelty_detection_clf = joblib.load(config.get_novelty_detection_model_path())

    y_trues = []
    predictions = np.zeros(shape=(n_files,))
    nb_batch = int(np.ceil(n_files / float(args.batch_size)))
    for n in range(0, nb_batch):
        print('Batch {}'.format(n))
        n_from = n * args.batch_size
        n_to = min(args.batch_size * (n + 1), n_files)

        y_true, inputs = get_inputs_and_trues(files[n_from:n_to])
        y_trues += y_true

        if args.store_activations:
            util.save_activations(model, inputs, files[n_from:n_to], model_module.noveltyDetectionLayerName, n)

        if args.novelty_detection:
            activations = util.get_activations(activation_function, [inputs[0]])
            nd_preds = novelty_detection_clf.predict(activations)[0]
            print(novelty_detection_clf.__classes[nd_preds])

        if not args.store_activations:
            # Warm up the model
            if n == 0:
                print('Warming up the model')
                start = time.clock()
                model.predict(np.array([inputs[0]]))
                end = time.clock()
                print('Warming up took {} s'.format(end - start))

            # Make predictions
            start = time.clock()
            out = model.predict(np.array(inputs))
            end = time.clock()
            predictions[n_from:n_to] = np.argmax(out, axis=1)
            print('Prediction on batch {} took: {}'.format(n, end - start))

    if not args.store_activations:
        for i, p in enumerate(predictions):
            recognized_class = list(classes_in_keras_format.keys())[list(classes_in_keras_format.values()).index(p)]
            print('| should be {} ({}) -> predicted as {} ({})'.format(y_trues[i], files[i].split(os.sep)[-2], p,
                                                                       recognized_class))

        if args.accuracy:
            print('Accuracy {}'.format(accuracy_score(y_true=y_trues, y_pred=predictions)))

        if args.plot_confusion_matrix:
            cnf_matrix = confusion_matrix(y_trues, predictions)
            util.plot_confusion_matrix(cnf_matrix, config.classes, normalize=False)
            util.plot_confusion_matrix(cnf_matrix, config.classes, normalize=True)
Exemplo n.º 3
0
def predict(path):
    files = get_files(path)
    n_files = len(files)
    print('Found {} files'.format(n_files))

    if args.novelty_detection:
        activation_function = util.get_activation_function(model, model_module.noveltyDetectionLayerName)
        novelty_detection_clf = joblib.load(config.get_novelty_detection_model_path())

    y_trues = []
    predictions = np.zeros(shape=(n_files,))
    nb_batch = int(np.ceil(n_files / float(args.batch_size)))
    for n in range(0, nb_batch):
        print('Batch {}'.format(n))
        n_from = n * args.batch_size
        n_to = min(args.batch_size * (n + 1), n_files)

        y_true, inputs = get_inputs_and_trues(files[n_from:n_to])
        y_trues += y_true

        if args.store_activations:
            util.save_activations(model, inputs, files[n_from:n_to], model_module.noveltyDetectionLayerName, n)

        if args.novelty_detection:
            activations = util.get_activations(activation_function, [inputs[0]])
            nd_preds = novelty_detection_clf.predict(activations)[0]
            print(novelty_detection_clf.__classes[nd_preds])

        if not args.store_activations:
            # Warm up the model
            if n == 0:
                print('Warming up the model')
                start = time.clock()
                model.predict(np.array([inputs[0]]))
                end = time.clock()
                print('Warming up took {} s'.format(end - start))

            # Make predictions
            start = time.clock()
            out = model.predict(np.array(inputs))
            end = time.clock()
            predictions[n_from:n_to] = np.argmax(out, axis=1)
            print('Prediction on batch {} took: {}'.format(n, end - start))

    if not args.store_activations:
        for i, p in enumerate(predictions):
            recognized_class = list(classes_in_keras_format.keys())[list(classes_in_keras_format.values()).index(p)]
            print('| should be {} ({}) -> predicted as {} ({})'.format(y_trues[i], files[i].split(os.sep)[-2], p,
                                                                       recognized_class))

        if args.accuracy:
            print('Accuracy {}'.format(accuracy_score(y_true=y_trues, y_pred=predictions)))

        if args.plot_confusion_matrix:
            cnf_matrix = confusion_matrix(y_trues, predictions)
            util.plot_confusion_matrix(cnf_matrix, config.classes, normalize=False)
            util.plot_confusion_matrix(cnf_matrix, config.classes, normalize=True)
Exemplo n.º 4
0
def predict(path):
    files = get_files(path)
    n_files = len(files)
    print('Found {} files'.format(n_files))

    if args.novelty_detection:
        activation_function = util.get_activation_function(
            model, model_module.noveltyDetectionLayerName)
        novelty_detection_clf = joblib.load(
            config.get_novelty_detection_model_path())

    y_trues = []
    predictions = np.zeros(shape=(n_files, ))
    nb_batch = int(np.ceil(n_files / float(args.batch_size)))
    for n in range(0, nb_batch):
        print('Batch {}'.format(n))
        n_from = n * args.batch_size
        n_to = min(args.batch_size * (n + 1), n_files)

        y_true, inputs = get_inputs_and_trues(files[n_from:n_to])
        y_trues += y_true

        if args.store_activations:
            util.save_activations(model, inputs, files[n_from:n_to],
                                  model_module.noveltyDetectionLayerName, n)

        if args.novelty_detection:
            activations = util.get_activations(activation_function,
                                               [inputs[0]])
            nd_preds = novelty_detection_clf.predict(activations)[0]
            print(novelty_detection_clf.__classes[nd_preds])

        if not args.store_activations:
            # Warm up the model
            if n == 0:
                print('Warming up the model')
                start = time.clock()
                model.predict(np.array([inputs[0]]))
                end = time.clock()
                print('Warming up took {} s'.format(end - start))

            # Make predictions
            start = time.clock()
            out = model.predict(np.array(inputs))
            end = time.clock()
            predictions[n_from:n_to] = np.argmax(out, axis=1)
            print('Prediction on batch {} took: {}'.format(n, end - start))
    freq = {}
    for p in predictions:
        if (str(p) in freq):
            freq[str(p)] += 1
        else:
            freq[str(p)] = 0
    print(freq)
    """
Exemplo n.º 5
0
def predict(
    dir,
    iter_index=0,
    augment_times=1,
    print_detail=True,
):
    """
    对目标数据集进行预测
    :param dir: 待测图片数据文件夹
    :param augment_times: 数据增强倍数
    :param print_detail: 是否打印预测详细信息
    :return: 预测数据
    """
    files = get_files(dir)
    n_files = len(files)
    class_label = dir.split(os.sep)[-2]
    print('Iter {0}, Found {1} files, class is {2}:{3}'.format(
        iter_index, n_files, class_label, labels_en[int(class_label)]))

    if args.novelty_detection:
        activation_function = util.get_activation_function(
            model, model_module.noveltyDetectionLayerName)
        novelty_detection_clf = joblib.load(
            config.get_novelty_detection_model_path())

    y_trues = []
    predictions_cat = np.zeros(shape=(n_files, ))
    predictions_pro = np.zeros(shape=(n_files, ))
    nb_batch = int(np.ceil(n_files / float(args.batch_size)))
    for n in range(0, nb_batch):
        if print_detail: print('Batch {}'.format(n))
        n_from = n * args.batch_size
        n_to = min(args.batch_size * (n + 1), n_files)

        y_true, inputs = get_inputs_and_trues(files[n_from:n_to])
        y_trues += y_true

        if args.store_activations:
            util.save_activations(model, inputs, files[n_from:n_to],
                                  model_module.noveltyDetectionLayerName, n)

        if args.novelty_detection:
            activations = util.get_activations(activation_function,
                                               [inputs[0]])
            nd_preds = novelty_detection_clf.predict(activations)[0]
            if print_detail: print(novelty_detection_clf.__classes[nd_preds])

        if not args.store_activations:
            # Warm up the model
            if n == 0:
                if print_detail: print('Warming up the model')
                start = time.clock()
                model.predict(np.array([inputs[0]]))
                end = time.clock()
                if print_detail:
                    print('Warming up took {} s'.format(end - start))

            # Make predictions
            # start = time.clock()
            # out = model.predict(np.array(inputs))
            # end = time.clock()
            augmented_predictions = get_augment_predictions(
                inputs, augment_times)
            predictions_cat[n_from:n_to] = augmented_predictions["category"]
            predictions_pro[n_from:n_to] = augmented_predictions["probability"]
            if print_detail:
                print('Prediction on batch {} took: {} s'.format(
                    n, end - start))

    predict_stats = {}
    predict_stats["detail"] = []
    predict_stats["summary"] = {"total": 0, "trues": 0, "falses": 0, "acc": 0}

    if not args.store_activations:
        for i, p in enumerate(predictions_cat):
            recognized_class = list(classes_in_keras_format.keys())[list(
                classes_in_keras_format.values()).index(p)]
            if print_detail:
                print(
                    '[{}:{}] should be {} ({}:{}) -> predicted as {} ({}:{}), probability:{}'
                    .format("%02d" % i, files[i].split(os.sep)[-1], y_trues[i],
                            files[i].split(os.sep)[-2],
                            labels_en[int(files[i].split(os.sep)[-2])], p,
                            recognized_class, labels_en[int(recognized_class)],
                            predictions_pro[i]))

            predict_stats["detail"].append(
                [y_trues[i], files[i].split(os.sep)[-2], p, recognized_class])
            predict_stats["summary"]["total"] += 1

            if (files[i].split(os.sep)[-2] == recognized_class + ""):
                predict_stats["summary"]["trues"] += 1
            else:
                predict_stats["summary"]["falses"] += 1

        predict_stats["summary"]["acc"] = float(
            predict_stats["summary"]
            ["trues"]) / predict_stats["summary"]["total"]

        if args.accuracy:
            if print_detail:
                print('Accuracy {}'.format(
                    accuracy_score(y_true=y_trues, y_pred=predictions_cat)))

        if args.plot_confusion_matrix:
            cnf_matrix = confusion_matrix(y_trues, predictions_cat)
            util.plot_confusion_matrix(cnf_matrix,
                                       config.classes,
                                       normalize=False)
            util.plot_confusion_matrix(cnf_matrix,
                                       config.classes,
                                       normalize=True)

    print(predict_stats["summary"])

    return predict_stats
Exemplo n.º 6
0
def predict(path):
    files = get_files(path)  #get files' path list
    n_files = len(files)  #get the number of picture
    print('Found {} files'.format(n_files))  #print information

    if args.novelty_detection:  #do not execute
        activation_function = util.get_activation_function(
            model, model_module.noveltyDetectionLayerName)
        novelty_detection_clf = joblib.load(
            config.get_novelty_detection_model_path())

    y_trues = []
    predictions = np.zeros(shape=(n_files, ))  #creat a matrix: n_files*1
    nb_batch = int(np.ceil(
        n_files / float(args.batch_size)))  #ceil, count the number of batch
    for n in range(0, nb_batch):
        print('Batch {}'.format(n))  #print informatin
        n_from = n * args.batch_size  #the number of beginning in current batch
        n_to = min(args.batch_size * (n + 1),
                   n_files)  #the number of end in current batch

        y_true, inputs = get_inputs_and_trues(files[n_from:n_to])
        y_trues += y_true

        if args.store_activations:  #do not execute
            util.save_activations(model, inputs, files[n_from:n_to],
                                  model_module.noveltyDetectionLayerName, n)

        if args.novelty_detection:  #do not execute
            activations = util.get_activations(activation_function,
                                               [inputs[0]])
            nd_preds = novelty_detection_clf.predict(activations)[0]
            print(novelty_detection_clf.__classes[nd_preds])

        if not args.store_activations:
            # Warm up the model
            if n == 0:
                print('Warming up the model')  #print execution information
                start = time.clock()  #record start time
                model.predict(np.array([inputs[0]]))
                end = time.clock()  #record end time
                print('Warming up took {} s'.format(
                    end - start))  #print execution time

            # Make predictions
            start = time.clock()  #record start time
            out = model.predict(np.array(inputs))  #predict!
            end = time.clock()  #record end time
            predictions[n_from:n_to] = np.argmax(
                out, axis=1
            )  #return the index of the maximum value of specified dimention
            print('Prediction on batch {} took: {}'.format(
                n, end - start))  #print execution time

    if not args.store_activations:
        for i, p in enumerate(predictions):
            recognized_class = list(classes_in_keras_format.keys())[list(
                classes_in_keras_format.values()).index(p)]
            print('| should be {} ({}) -> predicted as {} ({})'.format(
                y_trues[i], files[i].split(os.sep)[-2], p, recognized_class))

        if args.accuracy:  #do not execute
            print('Accuracy {}'.format(
                accuracy_score(y_true=y_trues, y_pred=predictions)))

        if args.plot_confusion_matrix:  #do not execute
            cnf_matrix = confusion_matrix(y_trues, predictions)
            util.plot_confusion_matrix(cnf_matrix,
                                       config.classes,
                                       normalize=False)
            util.plot_confusion_matrix(cnf_matrix,
                                       config.classes,
                                       normalize=True)