Пример #1
0
"""

from distance_model import DistanceModel
import data_loading
import util
import image_operations as operations
import feature_extraction as extraction
from skimage import feature, color, exposure
import feature_validation as validation

training_images, training_labels, training_classes = data_loading.loadTrainingImagesPoleNumbersAndClasses(
)
size = 100

print("resizing...")
resized = util.loading_map(lambda x: operations.cropAndResize(x, 0, size),
                           training_images)
print("hsv...")
hsv = util.loading_map(color.rgb2hsv, resized)
print("grayscaling...")
grayscaled = util.loading_map(color.rgb2gray, resized)

print("colors")
colors = util.loading_map(
    lambda x: extraction.split_image_features(
        extraction.calculateColorFeatures, 7, x), hsv)

n_folds = 5

print("evaluating colors")
model = DistanceModel()
#from sklearn.linear_model import LogisticRegression
from sklearn.linear_model import LogisticRegression
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler, RobustScaler
from sklearn.decomposition import PCA
from sklearn import random_projection
from sklearn.ensemble import RandomForestClassifier
from sklearn import feature_selection

#preloading
print("loading data...")
size = 100
images, labels, classes = loader.loadTrainingImagesPoleNumbersAndClasses()
amount = len(images)

print("resizing...")
resized = util.loading_map(lambda x: operations.cropAndResize(x, 0, size),
                           images)
print("grayscaling...")
grayscaled = util.loading_map(color.rgb2gray, resized)

n_folds = 10

model = Pipeline([("standard scaler", StandardScaler()),
                  ("logistic regression",
                   LogisticRegression(solver='lbfgs',
                                      multi_class='multinomial'))])

for cpb in range(2, 11):
    ppc = int(100 / cpb)
    print("hog_8_", ppc, "_", cpb, " features")
    hog = util.loading_map(
        lambda x: feature.hog(x,
        if (size1 * size2 * size3 != size1_F * size2_F * size3_F):
            print("size:", size1, ",", size2)
        if (i % 100 == 0): print(i, "/", amount)
        array = numpy.array(resized[i])
        a = numpy.reshape(array, (size1 * size2 * size3))
        reshaped = numpy.concatenate((reshaped, [a]), 0)
    return reshaped


print("loading data...")
size = 32
images, labels, classes = loader.loadTrainingImagesPoleNumbersAndClasses()
amount = len(images)

print("resizing...")
resized = util.loading_map(lambda x: operations.cropAndResize(x, 0, size),
                           images)

print("hsv...")
hsv = util.loading_map(color.rgb2hsv, resized)
hsv = flatten(hsv, 3)

from sklearn import random_projection
n_folds = 5

print("40")
model = Pipeline([("Multi-layer Perceptron",
                   MLPClassifier(algorithm='sgd',
                                 hidden_layer_sizes=(400),
                                 random_state=1,
                                 learning_rate='constant',
                                 max_iter=300))])
from sklearn.linear_model import LogisticRegression
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler, RobustScaler
from sklearn.decomposition import PCA
from sklearn import random_projection
from sklearn.ensemble import RandomForestClassifier
from sklearn import feature_selection

#preloading
print("loading data...")
size = 100
images, labels, classes = loader.loadTrainingImagesPoleNumbersAndClasses()
amount = len(images)

print("resizing...")
resized = util.loading_map(lambda x: operations.cropAndResize(x, 0, size),
                           images)
print("hsv...")
hsv = util.loading_map(color.rgb2hsv, resized)
print("luv...")
luv = util.loading_map(color.rgb2luv, resized)
print("grayscaling...")
grayscaled = util.loading_map(color.rgb2gray, resized)
print("edges...")
edges = util.loading_map(feature.canny, grayscaled)

print("brightness features")
brightness = util.loading_map(extraction.calculateDarktoBrightRatio, resized)
print("luv features")
luv_features = util.loading_map(lambda x: extraction.pixel_features(x, 11),
                                luv)
print("hog features")
import util
import numpy
from skimage import color, exposure
from sklearn import lda
from sklearn.linear_model import LogisticRegression
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
import matplotlib.pyplot as plot

print("loading data...")
size = 100
images, labels, classes = loader.loadTrainingImagesPoleNumbersAndClasses()
amount = len(images)

print("resizing...")
resized = util.loading_map(lambda x: operations.cropAndResize(x, 0, size),
                           images)
print("hsv...")
hsv = util.loading_map(color.rgb2hsv, resized)
print("luv...")
luv = util.loading_map(color.rgb2luv, resized)

model = Pipeline([("standard scaler", StandardScaler()),
                  ("logistic regression",
                   LogisticRegression(solver='lbfgs',
                                      multi_class='multinomial'))])

n_folds = 10

for i in range(1, 14, 2):
    print("rgb", i)
    rgb_f = util.loading_map(lambda x: extraction.pixel_features(x, i),
Пример #6
0
    fd = feature.hog(image,
                     orientations=orient,
                     pixels_per_cell=ppc,
                     cells_per_block=cpb,
                     normalise=normalise)
    return numpy.array(fd).flatten()


#preloading
print("loading data...")
size = 100
images, labels, classes = loader.loadTrainingImagesPoleNumbersAndClasses()
amount = len(images)

print("resizing...")
resized = util.loading_map(lambda x: operations.cropAndResize(x, 0, size),
                           images)
print("hsv...")
hsv = util.loading_map(color.rgb2hsv, resized)
print("luv...")
luv = util.loading_map(color.rgb2luv, resized)
print("hed...")
hed = util.loading_map(color.rgb2hed, resized)
print("rgbcie...")
cie = util.loading_map(color.rgb2rgbcie, resized)
print("grayscaling...")
grayscaled = util.loading_map(color.rgb2gray, resized)
print("normalising...")
normalized = util.loading_map(exposure.equalize_hist, grayscaled)
#print("edges...")
#edges = util.loading_map(feature.canny, grayscaled)
def reduce_features(features, number, classes):
    partial_features = numpy.array(features)[:, 1::10]
    partial_classes = numpy.array(classes)[:, 1::10]
    model = lda.LDA(n_components=number)
    model.fit_transform(partial_features, partial_classes)
    return model.transform(features)


#preloading
print("loading data...")
size = 70
images, labels, classes = loader.loadTrainingImagesPoleNumbersAndClasses()
amount = len(images)
print("resizing...")
resized = util.loading_map(lambda x: operations.cropAndResize(x, 0, size),
                           images)
print("grayscaling...")
grayscaled = util.loading_map(color.rgb2gray, resized)

#feature extraction
#print("split color features...")
#split_color_features            = util.loading_map(lambda x: extraction.split_image_features(extraction.color_features, 3, x), resized)
#print("corner count")
#corner_features                    = util.loading_map(lambda x: corner_count(x), grayscaled)
print("daisy features")
daisy = util.loading_map(
    lambda x: feature.daisy(
        x, step=8, radius=20, rings=2, histograms=4, orientations=4).flatten(),
    grayscaled)

n_folds = 5
Пример #8
0
import feature_validation as validation
import feature_extraction as extraction
import image_operations as operations
import data_loading as loader
import util
import numpy
from skimage import color

#preloading
print("loading data...")
#images, classes = loader.loadUniqueTrainingAndClasses()
images, classes = loader.loadTrainingAndClasses()
amount = len(images)
print("resizing...")
resized = util.loading_map(lambda x: operations.cropAndResize(x, 0, 50),
                           images)
print("normalizing...")
normalized = util.loading_map(operations.normalizeImage, resized)
print("grayscaling...")
grayscale = util.loading_map(color.rgb2gray, resized)
#print("reducing color space...")
#reduced = util.loading_map(operations.reduceColorSpace, resized)

#feature extraction
#print("color features...")
#color_features                  = util.loading_map(extraction.color_features, resized)
#print("normalized color features...")
#normalized_color_features       = util.loading_map(extraction.color_features, normalized)
print("mean channels features...")
split_color_features = util.loading_map(
    lambda x: extraction.split_image_features(extraction.color_features, 3, x),
Пример #9
0
amount = len(images)

print("resizing...")
resized = [image_operations.cropAndResize(img, 0.1,size) for img in images]

print("luv...")
luv =  [color.rgb2luv(img) for img in resized]
print("hed...")
hed =  [color.rgb2hed(img) for img in resized]

print("grayscaling...")
grayscaled =  [color.rgb2gray(img) for img in resized]
#print("edges...")

print("brightness features")
brightness = util.loading_map(extraction.calculateDarktoBrightRatio, resized)

print("luv features")

luv_features = util.loading_map(lambda x: extraction.split_image_features(
    lambda y : extraction.color_features(y, mean = True, std = True), 7, x), luv)
print('\a')
print("hed features")
hed_features = util.loading_map(lambda x: extraction.split_image_features(
    lambda y : extraction.color_features(y, mean = True, std = True), 8, x), hed)

print("hog features")
hog = util.loading_map(lambda x: feature_extraction.calcHOGWrapper(x), grayscaled)


Пример #10
0
from sklearn.linear_model import LogisticRegression
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler, RobustScaler
from sklearn.decomposition import PCA
from sklearn import random_projection
from sklearn.ensemble import RandomForestClassifier
from sklearn import feature_selection

#preloading
print("loading data...")
size = 100
images, labels, classes = loader.loadTrainingImagesPoleNumbersAndClasses()
amount = len(images)

print("resizing...")
resized = util.loading_map(lambda x: operations.cropAndResize(x, 0, size),
                           images)
print("hsv...")
hsv = util.loading_map(color.rgb2hsv, resized)
print("luv...")
luv = util.loading_map(color.rgb2luv, resized)
print("grayscaling...")
grayscaled = util.loading_map(color.rgb2gray, resized)
print("edges...")
edges = util.loading_map(feature.canny, grayscaled)

print("brightness features")
brightness = util.loading_map(extraction.calculateDarktoBrightRatio, resized)
print("hsv 11 + std")
hsv_11_std = util.loading_map(
    lambda x: extraction.split_image_features(
        lambda y: extraction.color_features(y, mean=True, std=True), 11, x),
Пример #11
0
    fd = feature.hog(image,
                     orientations=orient,
                     pixels_per_cell=ppc,
                     cells_per_block=cpb,
                     normalise=normalise)
    return numpy.array(fd).flatten()


#preloading
print("loading data...")
size = 100
images, labels, classes = loader.loadTrainingImagesPoleNumbersAndClasses()
amount = len(images)

print("resizing...")
resized = util.loading_map(lambda x: operations.cropAndResize(x, 0, size),
                           images)
print("hsv...")
hsv = util.loading_map(color.rgb2hsv, resized)
print("luv...")
luv = util.loading_map(color.rgb2luv, resized)
print("hed...")
hed = util.loading_map(color.rgb2hed, resized)
print("rgbcie...")
cie = util.loading_map(color.rgb2rgbcie, resized)
print("grayscaling...")
grayscaled = util.loading_map(color.rgb2gray, resized)
#print("edges...")
#edges = util.loading_map(feature.canny, grayscaled)

print("brightness features")
brightness = util.loading_map(extraction.calculateDarktoBrightRatio, resized)