Beispiel #1
0
def classify(classiFile):
    global previousFile
    if (previousFile == "none"):
        previousFile = classiFile
        FolderWatch.FolderWatch(CLASSIFY_FOLDER, classify)
    else:

        #print("classifying: " + previousFile)

        dataFile = pd.read_csv(previousFile, header=0)

        try:
            print("trying...")
            dataFile = analyzer.normalize(dataFile)
            dataFile = analyzer.autoCorrelate(dataFile)
            #is already being autocorrelated by getBPM
            autoanalyzer = DataAnalyzer.AutoAnalyzer(dataFile)
            output = autoanalyzer.getLastPeakTime()
            detectedBPM = output['bpm']
            time = output['time']

            row = []
            for secondData in training_data:
                row.append(analyzer.DTWSimilarity(dataFile, secondData))

            print("Classify: \t", str(model.predict([row])), ", BPM: ",
                  str(detectedBPM), ", time: ", str(time))
        except:
            print('raising exception')
            pass

        os.remove(previousFile)
        previousFile = classiFile
        FolderWatch.FolderWatch(CLASSIFY_FOLDER, classify)
Beispiel #2
0
def classify(classiFile):
  global previousFile
  if (previousFile == "none"):
    previousFile = classiFile
    FolderWatch.FolderWatch(CLASSIFY_FOLDER, classify)
  else:
    
    #print("classifying: " + previousFile)
    try:
      dataFile = pd.read_csv(previousFile, header=0)
    except:
      print("[EXCEPTION] cant read previous file")
      pass

    try:
      print("Classificating...")
      dataFile = analyzer.normalize(dataFile)
      dataFile = analyzer.autoCorrelate(dataFile)
      #is already being autocorrelated by getBPM
      autoanalyzer = DataAnalyzer.AutoAnalyzer(dataFile)
      output = autoanalyzer.getLastPeakTime() #calculates where the first occuring peak is in the file
      detectedBPM = output['bpm']
      time = output['time']
      peakIndex = output['index'] #index number of the peak in the file
      endPeakIndex = output['endPeriodIndex']

      periodData = autoanalyzer.getPeriodsFromDataIndex(1, peakIndex)['data'] #get one period of data starting from peak index.

      row = []
      for secondData in training_data: # calculate the DTW Similarity between the sample and all items in the training dataset.
        row.append(analyzer.DTWSimilarity(periodData, secondData)) 

      gesture = model.predict([row])[0] #predict the gesture with KNN using the calculated distance matrix

      # if BPM is larger than 200, you know for sure it isn't a movement. Temporary 'knutseloplossing'
      if detectedBPM > 200:
        gesture = "rest"

      print("Classify: \t", str(gesture), ", BPM: ", str(detectedBPM), ", time: ", str(time))  
      socket.sendClassify(str(gesture), detectedBPM, time) #send back the classified gesture through the socket.
      dataFile[peakIndex:endPeakIndex].to_csv(CLASSIFY_SAVE_FOLDER + str(gesture) + "-" + str(detectedBPM) + "-" + str(time) + ".csv")

    except:
      print('[EXCEPTION] during classification')
      pass

    try:
      os.remove(previousFile)
    except:
      print("[EXCEPTION] cant delete previous file")
      pass
    previousFile = classiFile
    FolderWatch.FolderWatch(CLASSIFY_FOLDER, classify)
Beispiel #3
0
def classify(classiFile):
    global previousFile
    if (previousFile == "none"):
        previousFile = classiFile
        FolderWatch.FolderWatch(CLASSIFY_FOLDER, classify)
    else:

        #print("classifying: " + previousFile)
        try:
            dataFile = pd.read_csv(previousFile, header=0)
        except:
            print("[EXCEPTION] cant read previous file")
            pass

        try:
            print("Classificating...")
            dataFile = analyzer.normalize(dataFile)
            dataFile = analyzer.autoCorrelate(dataFile)
            #is already being autocorrelated by getBPM
            autoanalyzer = DataAnalyzer.AutoAnalyzer(dataFile)
            output = autoanalyzer.getLastPeakTime()
            detectedBPM = output['bpm']
            time = output['time']

            periodData = autoanalyzer.getPeriods(1, startIndexPeriod=1)['data']

            row = []
            for secondData in training_data:
                row.append(analyzer.DTWSimilarity(periodData, secondData))

            gesture = model.predict([row])[0]

            #knutseloplossing
            if detectedBPM > 200:
                gesture = "rest"

            print("Classify: \t", str(gesture), ", BPM: ", str(detectedBPM),
                  ", time: ", str(time))

            socket.sendClassify(str(gesture), detectedBPM, time)
            print("tried to send...")

        except:
            print('[EXCEPTION] during classification')
            pass

        try:
            os.remove(previousFile)
        except:
            print("[EXCEPTION] cant delete previous file")
            pass
        previousFile = classiFile
        FolderWatch.FolderWatch(CLASSIFY_FOLDER, classify)
Beispiel #4
0
print("[TESTING]")

test_data = []
test_labels = []
test_data_length = []


files = getDataFileNames("test")
for trainingFile in files:
  dataObject = pd.read_csv(DATA_FOLDER + trainingFile, header = 0)
  #data = [dataFile['accX'], dataFile['accY'], dataFile['accZ']]
  #data = [dataFile['alpha'], dataFile['beta'], dataFile['gamma'], dataFile['accX'], dataFile['accY'], dataFile['accZ']]
  dataObject = analyzer.normalize(dataObject)
  dataObject = analyzer.autoCorrelate(dataObject)

  autoAnalyzer = DataAnalyzer.AutoAnalyzer(dataObject)

  output = autoAnalyzer.getLastPeakTime(periods=2, startingPeriod=1)
  peakIndex = output['index']
  periodData = autoAnalyzer.getPeriodsFromDataIndex(1, peakIndex)['data']

  #periodData = autoAnalyzer.getPeriods(1, startIndexPeriod=1)['data']

  test_data.append(periodData)
  if "updown" in trainingFile:
    test_labels.append("updown")
  elif "leftright" in trainingFile:
    test_labels.append("leftright")
  elif "rotateclock" in trainingFile:
    test_labels.append("rotateclockwise")
  elif "square" in trainingFile:
Beispiel #5
0
    "../data/FLAWED/lessthan1period/updown-35-49-R53P95G3cV93UMlKAAB0-3_1.csv",
    header=0)
da = DataAnalyzer.DataAnalyzer()

#Normalize each stream individually:
'''
dataNorm = dataFile.copy()
dataNorm['accX'] = skNorm(dataNorm['accX'])[0]
dataNorm['accY'] = skNorm(dataNorm['accY'])[0]
dataNorm['accZ'] = skNorm(dataNorm['accZ'])[0]
'''

#dataFile = da.normalize(dataFile)
#dataFile = da.autoCorrelate(dataFile)

daa = DataAnalyzer.AutoAnalyzer(dataFile)

visualizer = Visualizer.Visualizer(dataFile)
#visualizer = Visualizer.Visualizer(dataFile[90:120])
'''
time = np.linspace(0,dataFile[10:110].shape[0], dataFile[10:110].shape[0])
fig = plt.figure(figsize=(12,4))
_ = plt.plot(time, dataFile[10:110]['accZ'], label='Normalized')
_ = plt.plot(time, dataFileCorrelated[10:110]['accZ'], label='Autocorrelated')
#_ = plt.title('DTW distance betw')
_ = plt.ylabel('Amplitude')
_ = plt.xlabel('Time')
_ = plt.legend()
plt.show()

'''