示例#1
0
    def test_getSingleWaves(self):
        eegData = EEGTableFileUtil().readFile(PATH + "example_32.csv")
        eeg = eegData.getColumn("F3")
        nEeg = len(eeg)
        samplingRate = eegData.getSamplingRate()
        waves = self.util.getWaves(eeg, samplingRate)

        delta = self.util.getDeltaWaves(eeg, samplingRate)
        self.assertEqual(len(delta), nEeg)
        self.assertTrue(all([x in waves["delta"] for x in delta]))

        theta = self.util.getThetaWaves(eeg, samplingRate)
        self.assertEqual(len(theta), nEeg)
        self.assertTrue(all([x in waves["theta"] for x in theta]))

        alpha = self.util.getAlphaWaves(eeg, samplingRate)
        self.assertEqual(len(alpha), nEeg)
        self.assertTrue(all([x in waves["alpha"] for x in alpha]))

        beta = self.util.getBetaWaves(eeg, samplingRate)
        self.assertEqual(len(beta), nEeg)
        self.assertTrue(all([x in waves["alpha"] for x in alpha]))
          
        gamma = self.util.getGammaWaves(eeg, samplingRate)
        self.assertEqual(len(gamma), nEeg)
        self.assertTrue(all([x in waves["gamma"] for x in gamma]))
示例#2
0
 def test_getWaves(self):
     eegData = EEGTableFileUtil().readFile(PATH + "example_32.csv")
     eeg = eegData.getColumn("F3")
     nEeg = len(eeg)
     waves = self.util.getWaves(eeg, eegData.getSamplingRate())
     
     self.assertEqual(len(waves), 5)
     for _, wave in waves.iteritems():
         self.assertEqual(len(wave), nEeg)
class EEGTableConverter(object):
    
    def __init__(self, filePath=None, infinite=True):
        '''
        Reads data from ./../../examples/example_4096.csv and builds the data structure
        '''
        self.reader = EEGTableFileUtil()
        self.filepath = filePath
        self.infinite = infinite
        self.hasMore = False
        if filePath == None:
            self.filepath = scriptPath + "/../../examples/example_4096.csv"
            #self.filepath = scriptPath + "/../../../captured_data/janis/2016-07-12-11-15_EEG_1.csv"
        self.data = None
        self.index = 0

    def convert(self):
        self._readHeader()
        self._readRawData()

        self.data = self._buildDataStructure()

    def _readHeader(self):
        self.header = self.reader.readHeader(self.filepath)
        
        fields = self.header[:]
        fields.remove("Timestamp")
        fields.remove("Unknown")
        self.fields = filter(lambda x: not (x.startswith("Q")), fields)

    def _readRawData(self):
        self.rawData = self.reader.readData(self.filepath)
        self.len = len(self.rawData)
        if self.len > 0:
            self.hasMore = True
        print "Using %d dummy datasets" % self.len

    def dequeue(self):
        pass

    def _getNextIndex(self):
        self.index += 1
        if self.index >= len(self.data) and not self.infinite:
            self.hasMore = False
        self.index %= self.len

    def close(self):
        pass
示例#4
0
 def __init__(self, networkFile=None, demo=False, demoFile=None):
     '''Main class for drowsiness detection
     
     :param string networkFile: file name of the saved neural network (path: "/../../data/<networkFile>.nn")
     '''
     self.demo = demo
     self.running = True
     self.config = ConfigProvider()
     self._initNeuralNetwork(networkFile)
     self._initFeatureExtractor(demoFile)
     self.dm = DrowsinessMonitor()
     self.fileUtil = EEGTableFileUtil()
 def __init__(self, filePath=None, infinite=True):
     '''
     Reads data from ./../../examples/example_4096.csv and builds the data structure
     '''
     self.reader = EEGTableFileUtil()
     self.filepath = filePath
     self.infinite = infinite
     self.hasMore = False
     if filePath == None:
         self.filepath = scriptPath + "/../../examples/example_4096.csv"
         #self.filepath = scriptPath + "/../../../captured_data/janis/2016-07-12-11-15_EEG_1.csv"
     self.data = None
     self.index = 0
示例#6
0
class PoSDBoS(object):

    def __init__(self, networkFile=None, demo=False, demoFile=None):
        '''Main class for drowsiness detection
        
        :param string networkFile: file name of the saved neural network (path: "/../../data/<networkFile>.nn")
        '''
        self.demo = demo
        self.running = True
        self.config = ConfigProvider()
        self._initPoSDBoS()
        self._initNeuralNetwork(networkFile)
        self._initFeatureExtractor(demoFile)
        self.dm = DrowsinessMonitor()
        self.fileUtil = EEGTableFileUtil()

    def _initPoSDBoS(self):
        posdbosConfig = self.config.getPoSDBoSConfig()
        self.drowsyMinCount = posdbosConfig.get("drowsyMinCount")
        self.awakeMinCount = posdbosConfig.get("awakeMinCount")
        self.classified = [0, 0]
        self.curClass = 0
        self.classCount = 0
        self.found = 0

    def _initNeuralNetwork(self, networkFile):
        nnCreate = self.config.getNNInitConfig()
        self.nn = NeuralNetwork()
        if networkFile == None:
            self.nn.createNew(**nnCreate)
        else:
            self.nn.load(networkFile)

    def _initFeatureExtractor(self, demoFile):
        self.demoFile = demoFile
        collector = self._initDataCollector(self.demoFile)
        self.fe = FeatureExtractor(collector)
        self.inputQueue = self.fe.extractQueue

    def _initDataCollector(self, demoFile):
        collectorConfig = self.config.getCollectorConfig()
        if self.demo:
            return DummyDataCollector(demoFile, **collectorConfig)
        else:
            return EEGDataCollector(None, **collectorConfig)

    def close(self):
        self.running = False

    def run(self):
        fet = threading.Thread(target=self.fe.start)
        fet.start()
        dmt = threading.Thread(target=self.dm.run)
        dmt.start()
        features = []
        total = 0
        start = time.time()
        c = []
        while self.running and dmt.is_alive():
            try:
                #awake = 0, drowsy = 1
                data = self.inputQueue.get(timeout=1)
                features.append(data)
                clazz = self.nn.activate(data, True)
                c.append([clazz, clazz])
                self.setStatus(clazz)
                total += 1
            except Empty:
                print "needed %sms for %d windows" % (time.time() - start, total) 
                pass
            except KeyboardInterrupt:
                self.close()
            except Exception as e:
                print e.message
                self.close()
        #self.writeFeature(c)
        self.fe.close()
        self.dm.close()
        dmt.join()

    def setStatus(self, clazz):
        self.classified[clazz] += 1
        if self.curClass == clazz:
            self.classCount += 1
        else:
            self.curClass = clazz
            self.classCount = 0

        info = "class %d row (%s)" % (clazz, str(self.classCount))
        if clazz == 1 and self.classCount >= self.drowsyMinCount:
            self.dm.setStatus(clazz, info)
            self.found += 1
        elif clazz == 0 and self.classCount >= self.awakeMinCount:
            self.dm.setStatus(clazz, info)

    def writeFeature(self, data):
        filePath = scriptPath + "/../data/" + "classes.csv"
        #filePath = scriptPath + "/../data/" + "drowsy_full_.csv"

        header = ["clazz", "clazz2"]
        #start = 4
        #end = start + len(data[0])/6
        #for field in self.config.getCollectorConfig().get("fields"):
        #    header.extend([str(x) + "Hz" + field for x in range(start, end)])
        self.fileUtil.writeFile(filePath, data, header)
 def _readData(self):
     self.reader = EEGTableFileUtil()
     self.eegData = self.reader.readFile(self.filePath)
class SignalStatisticUtil(object):
    '''
    class to show some statistical values for a channel
    '''

    def __init__(self, person, filePath, signals=None, save=True, plot=True, logScale=False):
        self.person = person
        self.filePath = filePath
        self._initStatsDict()
        self._readData()
        self._initSignals(signals)
        self.su = SignalUtil()
        self.qu = QualityUtil()
        self.eu = EEGUtil()
        self._initFields()
        self.save = save
        self._initPlotter(person, plot, logScale)
        self.ssPrint = SignalStatisticPrinter(person)
        self.preProcessor = SignalPreProcessor()
        self.processor = SignalProcessor()
        self.windowSize = ConfigProvider().getCollectorConfig().get("windowSize")

    def _initStatsDict(self):
        self.stats = OrderedDict()
        self.stats[GENERAL_KEY] = OrderedDict()
        self.stats[SIGNALS_KEY] = OrderedDict()

    def _readData(self):
        self.reader = EEGTableFileUtil()
        self.eegData = self.reader.readFile(self.filePath)

    def _initSignals(self, signals):
        if not signals:
            signals = ConfigProvider().getEmotivConfig().get("eegFields")
        self.signals = signals

    def _initFields(self):
        self.statFields = STAT_FIELDS
        self.statFields["max"][METHOD] = self.su.maximum 
        self.statFields["min"][METHOD] = self.su.minimum
        self.statFields["mean"][METHOD] = self.su.mean
        self.statFields["std"][METHOD] = self.su.std
        self.statFields["var"][METHOD] = self.su.var
        self.statFields["zeros"][METHOD] = self.qu.countZeros
        self.statFields["seq"][METHOD] = self.qu.countSequences
        self.statFields["out"][METHOD] = self.qu.countOutliners
        self.statFields["nrgy"][METHOD] = self.su.energy
        self.statFields["zcr"][METHOD] = self.su.zcr

    def _initPlotter(self, person, plot, logScale):
        self.plotter = []
        for clazz in PLOTTER:
            plotter = clazz(person, self.eegData, self.signals, self.filePath, self.save, plot, logScale)
            thread = multiprocessing.Process(target=plotter.doPlot)
            self.plotter.append(thread)


    def main(self):
        self.doPlot()

        self.collect_stats()
        self.printStats()

    def doPlot(self):
        for thread in self.plotter:
            thread.start()

    def collect_stats(self):
        self.collectGeneralStats()
        for signal in self.signals:
            self.stats[SIGNALS_KEY][signal] = {}
            self.collectRawStats(signal)

    def collectGeneralStats(self):
        self._addGeneralStatValue("file path", self.filePath)
        self._addGeneralStatValue("sampleRate", ("%f.2" % self.eegData.getSamplingRate()))
        self._addGeneralStatValue("dataLength", ("%d" % self.eegData.len))
        self._addGeneralStatValue("bound", ("%d - %d" % (self.qu.lowerBound, self.qu.upperBound)))

        self._addGeneralTimeStat("start time", "getStartTime", TIME_FORMAT_STRING)
        self._addGeneralTimeStat("end time", "getEndTime", TIME_FORMAT_STRING)
        self._addGeneralTimeStat("duration", "getDuration", DURATION_FORMAT_STRING)

    def _addGeneralTimeStat(self, name, method, formatString):
        time = getattr(self.eegData, method)()
        value = self._buildFormattedTime(time, formatString)
        self._addGeneralStatValue(name, value)

    def _buildFormattedTime(self, time, formatString):
        value = datetime.fromtimestamp(time).strftime(formatString)
        return value
    
    def _addGeneralStatValue(self, name, value):
        self.stats[GENERAL_KEY][name] = value

    def collectRawStats(self, signal):
        data = self.eegData.getColumn(signal)
        self._collectSignalStat(signal, RAW_KEY, data)

    def _collectSignalStat(self, signal, category, data):
        self.stats[SIGNALS_KEY][signal][category] = OrderedDict()
        for field, attributes in self.statFields.iteritems():
            fieldValues = []
            for window in self.getWindows(data):
                fieldValues.append(self._getSignalStat(signal, category, field, attributes["method"], window, 0))
            merged = self._mergeValues(fieldValues, field)
            self._addSignalStatValue(signal, category, field, merged)

    def getWindows(self, raw):
        windows = []
        for start in range(0, len(raw), self.windowSize / 2):
            end = start + self.windowSize
            if end <= len(raw):
                windows.append(raw[start:end])
        return windows

    def _getSignalStat(self, signal, category, name, method, raw, decPlace=2):
        return method(raw)

    def _addSignalStatValue(self, signal, category, name, value):
        self.stats[SIGNALS_KEY][signal][category][name] = value

    def _mergeValues(self, values, field):
        typ = self.statFields[field][TYPE]

        if typ == MAX_TYPE:
            return nanmax(values)
        if typ == MIN_TYPE:
            return nanmin(values)
        if typ == AGGREGATION_TYPE:
            return nansum(values)
        if typ == MEAN_TYPE:
            return nanmean(values)
        if typ == DIFF_TYPE:
            return nanmean(values)

    def setStats(self, stats):
        self.stats = stats

    def printStats(self):
        content = self.ssPrint.getSignalStatsString(self.stats)
        print content
        if self.save:
            filePath = getNewFileName(self.filePath, "txt", "_stats")
            self.ssPrint.saveStats(filePath, content)
示例#9
0
 def setUp(self):
     self.reader = EEGTableFileUtil()
示例#10
0
class TestEEGTableFileUtil(unittest.TestCase):

    def setUp(self):
        self.reader = EEGTableFileUtil()

    def test_readData(self):
        file_path = PATH + "example_32.csv"
        if os.path.isfile(file_path):
            self.reader.readData(file_path)
        else:
            print "'%s' not found" % file_path

    def test_readHeader(self):
        file_path = PATH + "example_32.csv"
        if os.path.isfile(file_path):
            self.reader.readHeader(file_path)
        else:
            print "'%s' not found" % file_path

    def test_readFile(self):
        file_path = PATH + "example_32.csv"
        if os.path.isfile(file_path):
            self.reader.readFile(file_path)
        else:
            print "'%s' not found" % file_path

    def test_writeFile(self):
        filePath = PATH + "test.csv"
        header= ["A", "B", "C"]
        data = np.array([[1.123456789, 2, 3], [-4.123456789, 5, 6], [7.123456789, 8, 99.123]])
        self.reader.writeFile(filePath, data, header)
        
        if os.path.isfile(filePath):
            read = self.reader.readFile(filePath)

            for i in range(len(data)):
                for j in range(len(data[i])):
                    self.assertAlmostEqual(data[i, j], read.data[i, j], delta= 0.001)

        removeFile(filePath)

    def test_writeStructredFile(self):
        filePath = PATH + "test_structured.csv"
        data = {
            "A": {
                "value": [1, 2, 3],
                "quality": [-1, -1, -1]
            },
            "B": {
                "value": [4, 5, 6],
                "quality": [-2, -2, -2]
            },
            "C": {
                "value": [7, 8, 9],
                "quality": [-3, -3, -3]
            }
        }
        self.reader.writeStructredFile(filePath, data)
        
        if os.path.isfile(filePath):
            read = self.reader.readFile(filePath)
            for key, values in data.iteritems():
                self.assertTrue(sameEntries(values["value"], read.getColumn(key)))
        removeFile(filePath)

    def test_readFile_NaNValues(self):
        eegData = self.reader.readFile(PATH + "example_32_empty.csv")
        emptyCol = eegData.getColumn("Y")
        self.assertTrue(np.isnan(emptyCol).any())
        
        nonEmptyCol = eegData.getColumn("F3")
        self.assertFalse(np.isnan(nonEmptyCol).any())

    def test_readFile_SeparatorFallback(self):
        eegData = self.reader.readFile(PATH + "example_32_empty.csv")
        semicolonData = eegData.getColumn("F3")

        eegData = self.reader.readFile(PATH + "example_32_comma.csv")
        commaData = eegData.getColumn("F3")

        self.assertTrue((semicolonData == commaData).all())
示例#11
0
 def __init__(self, files=[]):
     self.files = files
     self.fileUtil = EEGTableFileUtil()
示例#12
0
class NetworkDataUtil(object):
    '''
    Class to load and separate data for a neural network 
    '''

    def __init__(self, files=[]):
        self.files = files
        self.fileUtil = EEGTableFileUtil()

    def get(self, separate=True):
        values0, values1 = self.readFiles(self.files)
        if separate:
            return self.buildTestSet(values0, values1)
        else:
            return self.buildFullTestSet(values0, values1)

    def getNInput(self):
        return self.nInputs

    def readFiles(self, files):
        return self.fileUtil.readData(files[0]), self.fileUtil.readData(files[1])

    def buildFullTestSet(self, values0, values1):
        values0 = self._addClass(values0, 0.)
        values1 = self._addClass(values1, 1.)
        rawData = self._postprocData(values0, values1)
        self.nInputs = len(rawData[0])-1

        return self.createData(self.nInputs, rawData)

    def buildTestSet(self, values0, values1):
        train0, test0 = self._preprocData(values0, 0.)
        train1, test1 = self._preprocData(values1, 1.)

        trainRawData = self._postprocData(train0, train1)
        testRawData = self._postprocData(test0, test1)
        self.nInputs = len(trainRawData[0])-1

        trainData = self.createData(self.nInputs, trainRawData)
        testData = self.createData(self.nInputs, testRawData)
        return trainData, testData

    def _preprocData(self, values, clazz):
        np.random.shuffle(values)
        values0 = self._addClass(values, clazz)
        return self._separateData(values0) 

    def _addClass(self, values, clazz):
        shape = values.shape
        clazzArray = np.full((shape[0], shape[1]+1), clazz)
        clazzArray[:,:-1] = values
        return clazzArray

    def _separateData(self, values):
        l = len(values)
        d = (2 * l / 3)
        return values[0:d], values[d:]

    def _postprocData(self, values0, values1):
        values = np.concatenate((values0, values1), axis=0)
        np.random.shuffle(values)
        return values

    def createXORData(self):
        values = [[0, 0, 0], [0, 1, 1], [1, 0, 1], [1, 1, 0]]
        return self.createData(2, values)

    def createData(self, nInput, values):
        ds = SupervisedDataSet(nInput, N_OUTPUT)
        for value in values:
            ds.addSample(value[:nInput], value[nInput])
        return ds
示例#13
0
class PoSDBoS(object):
    
    def __init__(self, networkFile=None, demo=False, demoFile=None):
        '''Main class for drowsiness detection
        
        :param string networkFile: file name of the saved neural network (path: "/../../data/<networkFile>.nn")
        '''
        self.demo = demo
        self.running = True
        self.config = ConfigProvider()
        self._initNeuralNetwork(networkFile)
        self._initFeatureExtractor(demoFile)
        self.dm = DrowsinessMonitor()
        self.fileUtil = EEGTableFileUtil()

    def _initNeuralNetwork(self, networkFile):
        nn_conf = self.config.getNeuralNetworkConfig()
        self.nn = NeuralNetwork()
        if networkFile == None:
            self.nn.createNew(nn_conf["nInputs"], nn_conf["nHiddenLayers"], nn_conf["nOutput"], nn_conf["bias"])
        else:
            self.nn.load(networkFile)

    def _initFeatureExtractor(self, demoFile):
        collector = self._initDataCollector(demoFile)
        self.fe = FeatureExtractor(collector)
        self.inputQueue = self.fe.extractQueue

    def _initDataCollector(self, demoFile):
        collectorConfig = self.config.getCollectorConfig()
        if self.demo:
            return DummyDataCollector(demoFile, **collectorConfig)
        else:
            return EEGDataCollector(None, **collectorConfig)

    def close(self):
        self.running = False

    def run(self):
        fet = threading.Thread(target=self.fe.start)
        fet.start()
        dmt = threading.Thread(target=self.dm.run)
        dmt.start()
        features = []
        while self.running and dmt.is_alive():
            try:
                data = self.inputQueue.get(timeout=1)
                features.append(data)
                x = random.randint(1, 10)%2
                y = random.randint(1, 10)%2
                data = (x, y)
                
                clazz = self.nn.activate(data)
                info = "%d XOR %d is %d; queue: %d" % (x, y, clazz, self.inputQueue.qsize()) 
                self.dm.setStatus(clazz, info)
                #sleep(1)
            except Empty:
                pass
                #if self.demo:
                #    self.close()
            except KeyboardInterrupt:
                self.close()
            except Exception as e:
                print e.message
                self.close()
        self.writeFeature(features)
        self.fe.close()
        self.dm.close()
        dmt.join()

    def writeFeature(self, data):
        filePath = scriptPath + "/../data/" + "test.csv"
        header = []
        for field in ["F3", "F4", "F7", "F8"]:
            for i in range(1, 5):
                header.append("%s_%s" % (field ,str(i)))
        self.fileUtil.writeFile(filePath, data, header)
示例#14
0
            self._plotSignal(signal, self.data[:,i], axes[i])

        self._configurePlot()

        self.savePlot()
        self.showPlot()
        print "plotting done"

    def _initPlot(self):
        signalCount = self._calcSignalCount()

        fig, axes = plt.subplots(signalCount, figsize=self.figsize, dpi=80, sharex=True, sharey=False)
        return fig, axes

    def _plotSignal(self, header, data, axis):
        axis.yaxis.set_label_position("right")
        axis.set_ylabel(header)

        axis.plot(data)
        mean = np.nanmean(data)
        print header, mean
        axis.plot([mean]*len(data))

if __name__ == '__main__': # pragma: no cover
    #filePath = scriptPath + "/../../data/awake_full_.csv"
    filePath = scriptPath + "/../../data/classes.csv"

    fileUtil = EEGTableFileUtil()
    fp = FeaturePlotter(fileUtil.readData(filePath), fileUtil.readHeader(filePath), filePath)
    fp.doPlot()