Exemple #1
0
               startTime=600,
               stopTime=6000,
               label=6)

a.readDataSet(equalLength=False, checkData=False)

useDump = False

if useDump:
    a.loadDumpNormParam(dumpName="MLPClassifier")
    clf = a.loadDumpClassifier("MLPClassifier")
    a.testClassifier(classifier=clf)
    a.setFileSink(fileSinkName="chris", fileSinkPath="../")
    a.startLiveClassification()
else:
    a.initFeatNormalization(dumpName="MLPClassifier")
    from sklearn.neural_network import MLPClassifier
    clf = MLPClassifier()
    a.trainClassifier(classifier=clf)
    a.dumpClassifier(dumpName="MLPClassifier")
    a.testClassifier()

windowedData, windowLabels = a.windowSplitSourceDataTT()

index = np.linspace(0, len(windowedData) - 1, len(windowedData), dtype=int)
random.shuffle(index)

trainingData = []
trainingLabels = []
testData = []
testLabels = []
Exemple #2
0
               startTime=600,
               stopTime=6000,
               label=6)

a.readDataSet(equalLength=False, checkData=False)

useDump = False

if useDump:
    a.loadDumpNormParam(dumpName="dataOnly")
    clf = a.loadDumpClassifier("dataOnly")
    a.testClassifier(classifier=clf)
    a.setFileSink(fileSinkName="chris", fileSinkPath="../")
    a.startLiveClassification()
else:
    a.initFeatNormalization(dumpName="dataOnly")
    from sklearn import svm
    clf = svm.SVC(kernel='rbf')
    a.trainClassifier(classifier=clf)
    a.dumpClassifier(dumpName="dataOnly")
    a.testClassifier()

windowedData, windowLabels = a.windowSplitSourceDataTT()

index = np.linspace(0, len(windowedData) - 1, len(windowedData), dtype=int)
random.shuffle(index)

trainingData = []
trainingLabels = []
testData = []
testLabels = []
Exemple #3
0
               label=5)

a.addDataFiles(fileSourceName="igor.txt",
               fileSourcePath="../",
               startTime=100,
               stopTime=2900,
               label=6)
a.addDataFiles(fileSourceName="igor2.txt",
               fileSourcePath="../",
               startTime=600,
               stopTime=6000,
               label=6)

a.readDataSet(equalLength=False, checkData=False)

useDump = False

if useDump:
    a.loadDumpNormParam(dumpName="KNeighborsClassifier")
    clf = a.loadDumpClassifier("KNeighborsClassifier")
    a.testClassifier(classifier=clf)
    a.setFileSink(fileSinkName="chris", fileSinkPath="../")
    a.startLiveClassification()
else:
    a.initFeatNormalization(dumpName="KNeighborsClassifier")
    from sklearn.neighbors import KNeighborsClassifier
    clf = KNeighborsClassifier(n_neighbors=4, metric='euclidean')
    a.trainClassifier(classifier=clf)
    a.dumpClassifier(dumpName="KNeighborsClassifier")
    a.testClassifier()
Exemple #4
0
               stopTime=22000,
               label=1)

# a.addDataFiles(fileSourceName="chris_c.txt", fileSourcePath="../", startTime=100, stopTime=1600, label=1)

# a.addDataFiles(fileSourceName="ben.txt", fileSourcePath="../", startTime=2000, stopTime=6000, label=1)

# a.addDataFiles(fileSourceName="markus.txt", fileSourcePath="../", startTime=500, stopTime=3300, label=1)

# a.addDataFiles(fileSourceName="igor.txt", fileSourcePath="../", startTime=100, stopTime=2900, label=1)
# a.addDataFiles(fileSourceName="igor2.txt", fileSourcePath="../", startTime=600, stopTime=6000, label=1)

dataSet = a.readDataSet(equalLength=False, checkData=False)
windowData, windowLabels = a.windowSplitSourceDataTT(inputData=dataSet)

features = a.initFeatNormalization(inputData=windowData)
features = a.featureNormalization(features=features, initDone=True)

features = np.array(features)
windowLabels = np.array(windowLabels)

features0 = features[windowLabels == 0]
features1 = features[windowLabels == 1]

m0 = []
m1 = []
diff = []
for i in range(int(len(features[0, ::]))):
    #f = kernelDensityEstimator(x=features0[::, f0], h=0.15)
    #    plt.scatter(i*np.ones(np.size(features0[::, f0])), features0[::, f0], c='r', marker="s", alpha=0.5)
    # m0.append(kernelDensityEstimator(features0[::, f0], h=0.15))
Exemple #5
0
a.addDataFiles(fileSourceName="igor.txt", fileSourcePath="../", startTime=100, stopTime=2900, label=6)
a.addDataFiles(fileSourceName="igor2.txt", fileSourcePath="../", startTime=600, stopTime=6000, label=6)

a.readDataSet(equalLength=False, checkData=False)


useDump = False

if useDump:
    a.loadDumpNormParam(dumpName="DecisionTreeClassifier")
    clf = a.loadDumpClassifier("DecisionTreeClassifier")
    a.testClassifier(classifier=clf)
    a.setFileSink(fileSinkName="chris", fileSinkPath="../")
    a.startLiveClassification()
else:
    a.initFeatNormalization(dumpName="DecisionTreeClassifier")
    from sklearn.tree import DecisionTreeClassifier
    clf = DecisionTreeClassifier()
    a.trainClassifier(classifier=clf)
    a.dumpClassifier(dumpName="DecisionTreeClassifier")
    a.testClassifier()


windowedData, windowLabels = a.windowSplitSourceDataTT()

index = np.linspace(0, len(windowedData) - 1, len(windowedData), dtype=int)
random.shuffle(index)

trainingData = []
trainingLabels = []
testData = []
Exemple #6
0
               startTime=600,
               stopTime=6000,
               label=6)

a.readDataSet(equalLength=False, checkData=False)

useDump = False

if useDump:
    a.loadDumpNormParam(dumpName="GaussianNB")
    clf = a.loadDumpClassifier("GaussianNB")
    a.testClassifier(classifier=clf)
    a.setFileSink(fileSinkName="chris", fileSinkPath="../")
    a.startLiveClassification()
else:
    a.initFeatNormalization(dumpName="GaussianNB")
    from sklearn.naive_bayes import GaussianNB
    clf = GaussianNB()
    a.trainClassifier(classifier=clf)
    a.dumpClassifier(dumpName="GaussianNB")
    a.testClassifier()

windowedData, windowLabels = a.windowSplitSourceDataTT()

index = np.linspace(0, len(windowedData) - 1, len(windowedData), dtype=int)
random.shuffle(index)

trainingData = []
trainingLabels = []
testData = []
testLabels = []
a.addDataFiles(fileSourceName="igor.txt",
               fileSourcePath="../",
               startTime=100,
               stopTime=2900,
               label=6,
               className="igor")
a.addDataFiles(fileSourceName="igor2.txt",
               fileSourcePath="../",
               startTime=600,
               stopTime=6000,
               label=6)

a.readDataSet(equalLength=False, checkData=False)
windows, labels = a.windowSplitSourceDataTT()
features = a.initFeatNormalization(windows)

# create the RFE model and select 3 attributes

features = np.array(features)
labels = np.array(labels)

model = XGBClassifier()
model = RFE(model, 100)
model.fit(features, labels)
# summarize the selection of the attributes
print(model.support_)
print(model.ranking_)
fov = a.returnFeatureIndices()
print(fov)
plt.scatter(fov, model.ranking_)