Пример #1
0
import tensorflow as tf
import numpy as np
from tensorflow.contrib.learn.python import SKCompat

iris = np.loadtxt('Data/Data/iris_softmax.csv',
                  delimiter=',',
                  dtype=np.float32)

x = iris[:, :-3]
y = iris[:, -3:]
y = np.argmax(y, axis=1)

feature_columns = [tf.feature_column.numeric_column('', shape=[5])]

clf = tf.contrib.learn.DNNClassifier(hidden_units=[10, 20, 5],
                                     feature_columns=feature_columns,
                                     n_classes=3)

clf = SKCompat(clf)

clf.fit(x=x, y=y, max_steps=1000)

print(clf.score(x=x, y=y))
Пример #2
0
    # Train the model
    mnist_classifier.fit(x=train_data,
                         y=train_label_sub,
                         batch_size=128,
                         steps=400,
                         monitors=[logging_hook])

    # Configure the accuracy metric for evaluation
    metrics = {
        "accuracy":
        learn.MetricSpec(metric_fn=tf.metrics.accuracy,
                         prediction_key="classes"),
    }

    # Evaluate the model and print results
    eval_results = mnist_classifier.score(x=test_data,
                                          y=test_label_sub,
                                          metrics=metrics,
                                          batch_size=128)

    print("************************************************************")
    print("epoch: " + str(i + 1))
    print(eval_results)
    print("************************************************************")

#print (train_data.shape)
#print (train_data[1])
#print (test_label_super)
#print (test_label_sub)
#np.savetxt('ytrain1', y_train)
def main(unused_arguments):
    localtime = time.asctime(time.localtime(time.time()))
    print(localtime)
    content = np.loadtxt('train_data')
    train_data = []
    temp = 0
    tmp = []
    for x in content:
        if temp == 255:
            tmp.append(x)
            train_data.append(tmp)
            temp = 0
            tmp = []
        else:
            tmp.append(x)
            temp += 1
    with open('train_labels', 'rb') as fp:
        train_labels = pickle.load(fp)
    train_data = np.array(train_data, dtype=np.float32)
    train_labels = np.array(train_labels, dtype=np.int32)
    # Create the Estimator
    mnist_classifier = SKCompat(
        learn.Estimator(model_fn=trainTypes,
                        model_dir="/Users/praneet/Downloads/model"))
    # Set up logging for predictions
    tensors_to_log = {"probabilities": "softmax_tensor"}
    logging_hook = tf.train.LoggingTensorHook(tensors=tensors_to_log,
                                              every_n_iter=50)
    # Train the model
    mnist_classifier.fit(x=train_data,
                         y=train_labels,
                         batch_size=100,
                         steps=2500,
                         monitors=[logging_hook])
    # Accuracy metric for evaluation
    metrics = {
        "accuracy":
        learn.MetricSpec(metric_fn=tf.metrics.accuracy,
                         prediction_key="classes"),
    }
    # Evaluation
    print('Evaluation: ')
    content = np.loadtxt('eval_data')
    eval_data = []
    temp = 0
    tmp = []
    for x in content:
        if temp == 255:
            tmp.append(x)
            eval_data.append(tmp)
            temp = 0
            tmp = []
        else:
            tmp.append(x)
            temp += 1
    with open('eval_labels', 'rb') as fp:
        eval_labels = pickle.load(fp)
    eval_data = np.array(eval_data, dtype=np.float32)
    eval_labels = np.array(eval_labels, dtype=np.int32)
    # Evaluate the model
    eval_results = mnist_classifier.score(x=eval_data,
                                          y=eval_labels,
                                          metrics=metrics)
    print(eval_results)
    # Predictions
    print('Predictions: ')
    evallst = []
    outputlst = []
    fileCount = 1
    evaluatePath = "/Users/praneet/Downloads/test/"
    lst = []
    lst.append("image_name")
    lst.append("Type_1")
    lst.append("Type_2")
    lst.append("Type_3")
    evallst.append(lst)
    outputlst.append(lst)
    for root, dirs, files in os.walk(evaluatePath):
        for fileName in files:
            if fileName.endswith(".jpg"):
                eval_data = []
                filePath = os.path.abspath(os.path.join(root, fileName))
                img = cv2.imread(filePath)
                img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
                img = processImage(img)
                fixedSize = (256, 256)
                img = cv2.resize(img, dsize=fixedSize)
                img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
                tmp_data = np.zeros((256, 256), dtype=np.float32)
                for x in range(img.shape[0]):
                    for y in range(img.shape[1]):
                        tmp_data[x][y] = img[x][y] / 255.0
                eval_data.append(tmp_data)
                eval_data = np.array(eval_data)
                # Predict values for each image
                predictions = mnist_classifier.predict(x=eval_data)
                print(fileName, predictions)
                lst = []
                lst.append(fileName)
                for x in predictions['probabilities']:
                    for y in x:
                        lst.append(y)
                outputlst.append(lst)
                if lst[1] > 0.25:
                    lst = []
                    lst.append(fileName)
                    lst.append(1)
                    lst.append(0)
                    lst.append(0)
                if lst[3] > 0.29:
                    lst = []
                    lst.append(fileName)
                    lst.append(0)
                    lst.append(0)
                    lst.append(1)
                if lst[2] > 0.58:
                    lst = []
                    lst.append(fileName)
                    lst.append(0)
                    lst.append(1)
                    lst.append(0)
                if lst[1] == 0 or lst[1] == 1:
                    print("Non Ambiguous Prediction")
                evallst.append(lst)
                fileCount += 1
    print('Total files: ', fileCount)
    df = pd.DataFrame(evallst)
    df.to_csv('output_normalized.csv', index=False, header=False)
    df = pd.DataFrame(outputlst)
    df.to_csv('output_integers.csv', index=False, header=False)
    localtime = time.asctime(time.localtime(time.time()))
    print(localtime)