Example #1
0
trainFile = TrainFile("../data/train.csv", True)
trainFile.Read()
testFile = TestFile("../data/test.csv", True)
testFile.Read()
print "Data loaded..."

X = np.array(trainFile.data)
Y = np.array(trainFile.labels)

# just like the face recognition, we compute the avg digit image
avg_digit = compute_avg_digits(X, configs.IMAGE_WIDTH)
print "Avg digit computed ..."

# Substract each input with the avg
X_normalized_avg = normalize_with_avg(X, avg_digit)
X_normalized = preprocessing.normalize(X_normalized_avg)
print "Normalize X ..."

# Eigen Face
n_component = 0.07
pca = PCA(n_components=configs.IMAGE_WIDTH * configs.IMAGE_WIDTH * n_component)
features = pca.fit_transform(X_normalized)
print "Transform done ..."

# split into training and testing
#cutoff = len(Y) * 0.75
#features_train = np.array(features[:cutoff])
#Y_train = np.array(Y[:cutoff])
#features_test = np.array(features[cutoff:])
#Y_test = np.array(Y[cutoff:])
trainFile = TrainFile("../data/train.csv", True)
trainFile.Read()
testFile = TestFile("../data/test.csv", True)
testFile.Read()
print "Data loaded..."

X = np.array(trainFile.data)
Y = np.array(trainFile.labels)

# just like the face recognition, we compute the avg digit image
avg_digit = compute_avg_digits(X, configs.IMAGE_WIDTH)
print "Avg digit computed ..."

# Substract each input with the avg
X_normalized_avg = normalize_with_avg(X, avg_digit)
X_normalized = preprocessing.normalize(X_normalized_avg)
print "Normalize X ..."

# Eigen Face
n_component = 0.07
pca = PCA(n_components=configs.IMAGE_WIDTH * configs.IMAGE_WIDTH * n_component)
features = pca.fit_transform(X_normalized)
print "Transform done ..."

# split into training and testing
# cutoff = len(Y) * 0.75
# features_train = np.array(features[:cutoff])
# Y_train = np.array(Y[:cutoff])
# features_test = np.array(features[cutoff:])
# Y_test = np.array(Y[cutoff:])
trainFile = TrainFile("../data/train.csv", True)
trainFile.Read()
testFile = TestFile("../data/test.csv", True)
testFile.Read()
print "Data loaded..."

X = np.array(trainFile.data)
Y = np.array(trainFile.labels)

# just like the face recognition, we compute the avg digit image
avg_digit = compute_avg_digits(X, configs.IMAGE_WIDTH)
print "Avg digit computed ..."

# Substract each input with the avg
X_normalized_avg = normalize_with_avg(X, avg_digit)
X_normalized = preprocessing.normalize(X_normalized_avg)
print "Normalize X ..."

# Eigen Face
for n_component in [ 0.06, 0.07, 0.08 ]:
    pca = PCA(n_components=configs.IMAGE_WIDTH * configs.IMAGE_WIDTH * n_component)
    features = pca.fit_transform(X_normalized)
    print "Transform done ..."

    features = np.array(features)
    Y = np.array(Y)

    # Using Random forest
    n_trees = 1000
    model = RandomForestClassifier(n_estimators=n_trees)
Example #4
0
trainFile = TrainFile("../data/train.csv", True)
trainFile.Read()
testFile = TestFile("../data/test.csv", True)
testFile.Read()
print "Data loaded..."

X = np.array(trainFile.data)
Y = np.array(trainFile.labels)

# just like the face recognition, we compute the avg digit image
avg_digit = compute_avg_digits(X, configs.IMAGE_WIDTH)
print "Avg digit computed ..."

# Substract each input with the avg
X_normalized_avg = normalize_with_avg(X, avg_digit)
X_normalized = preprocessing.normalize(X_normalized_avg)
print "Normalize X ..."

# ICA Face
ica = FastICA()
features = ica.fit_transform(X_normalized)
print "Transform done ..."

# split into training and testing
cutoff = len(Y) * 0.75
features_train = np.array(features[:cutoff])
Y_train = np.array(Y[:cutoff])
features_test = np.array(features[cutoff:])
Y_test = np.array(Y[cutoff:])