Esempio n. 1
0
 def __init__(self):
     self.su = SignalUtil()
     self.windowSeconds = ConfigProvider().getCollectorConfig().get(
         "windowSeconds")
     config = ConfigProvider().getProcessingConfig()
     self.xMax = config.get("xMax")
     self.yMax = config.get("yMax")
 def __init__(self):
     config = ConfigProvider().getProcessingConfig()
     self.lowerFreq = config.get("lowerFreq")
     self.upperFreq = config.get("upperFreq")
     self.samplingRate = ConfigProvider().getEmotivConfig().get(
         "samplingRate")
     self.sigUtil = SignalUtil()
Esempio n. 3
0
    def normGyroData(self):
        config = ConfigProvider().getProcessingConfig()

        for gyroField in self.getGyroHeader():
            gyroCol = self.getColumn(gyroField) - config.get(
                gyroField.lower() + "Ground")
            self.setColumn(gyroField, gyroCol)
Esempio n. 4
0
 def setUp(self):
     self.chain = SignalProcessor()
     self.qualUtil = QualityUtil()
     config = ConfigProvider().getProcessingConfig()
     self.upperBound = config.get("upperBound")
     self.lowerBound = config.get("lowerBound")
     self.minQuality = config.get("minQual")
     self.maxNaNValues = config.get("maxNaNValues")
 def setUp(self):
     self.chain = SignalProcessor()
     self.qualUtil = QualityUtil()
     config = ConfigProvider().getProcessingConfig()
     self.upperBound = config.get("upperBound")
     self.lowerBound = config.get("lowerBound")
     self.minQuality = config.get("minQual")
     self.maxNaNValues = config.get("maxNaNValues")
Esempio n. 6
0
 def __init__(self, verbose=False):
     config = ConfigProvider().getProcessingConfig()
     self.maxNaNValues = config.get("maxNaNValues")
     self.lowerFreq = config.get("lowerFreq")
     self.upperFreq = config.get("upperFreq")
     self.samplingRate = ConfigProvider().getEmotivConfig().get("samplingRate")
     self.qualUtil = QualityUtil()
     self.sigUtil = SignalUtil()
     self.verbose = verbose
Esempio n. 7
0
 def __init__(self):
     self.config = ConfigProvider().getProcessingConfig()
     self.upperBound = self.config.get("upperBound")
     self.lowerBound = self.config.get("lowerBound")
     self.minQuality = self.config.get("minQual")
     self.maxSeqLength = self.config.get("maxSeqLength")
     self.maxNaNValues = self.config.get("maxNaNValues")
     self.windowSeconds = ConfigProvider().getCollectorConfig().get(
         "windowSeconds")
Esempio n. 8
0
    def _initPoSDBoS(demo):
        posdbos = PoSDBoS()
        posdbos.demo = demo
        posdbos.running = True

        posdbosConfig = ConfigProvider().getPoSDBoSConfig()
        posdbos.drowsyMinCount = posdbosConfig.get("drowsyMinCount")
        posdbos.awakeMinCount = posdbosConfig.get("awakeMinCount")

        posdbos.classified = [0, 0]
        posdbos.curClass = 0
        posdbos.classCount = 0
        posdbos.found = 0
        return posdbos
Esempio n. 9
0
    def __init__(self, inputQueue, outputQueue):
        config = ConfigProvider()
        self.eegFields = config.getEmotivConfig()["eegFields"]
        self.gyroFields = config.getEmotivConfig()["gyroFields"]
        self.samplingRate = config.getEmotivConfig()["samplingRate"]
        
        self.processingConfig = config.getProcessingConfig()
        self.preProcessor = SignalPreProcessor()
        self.signalProcessor = SignalProcessor()
        self.fftProcessor = FFTProcessor()

        self.inputQueue = inputQueue
        self.outputQueue = outputQueue
        self.runProcess = True
        self.totalInvalid = 0
        self.totalCount = 0
 def __init__(self, verbose=False):
     self.samplingRate = ConfigProvider().getEmotivConfig().get(
         "samplingRate")
     self.qualUtil = QualityUtil()
     self.fftUtil = FFTUtil()
     self.eegUtil = EEGUtil()
     self.verbose = verbose
Esempio n. 11
0
 def writeFeature(self, data, filePath):
     header = []
     start = 4
     end = start + len(data[0]) / 6
     for field in ConfigProvider().getCollectorConfig().get("eegFields"):
         header.extend([str(x) + "Hz" + field for x in range(start, end)])
     self.fileUtil.saveCSV(filePath, data, header)
Esempio n. 12
0
 def __init__(self):
     self.config = ConfigProvider().getProcessingConfig()
     self.upperBound = self.config.get("upperBound")
     self.lowerBound = self.config.get("lowerBound")
     self.minQuality = self.config.get("minQual")
     self.maxSeqLength = self.config.get("maxSeqLength")
     self.maxNaNValues = self.config.get("maxNaNValues")
Esempio n. 13
0
def plot(proband, filename):
    experiments = ConfigProvider().getExperimentConfig()
    experimentDir = experiments["filePath"]
    #filePath = "%s/test/%s" % (experimentDir, "awake_full.csv")
    filePath = "%s/%s/%s" % (experimentDir, proband, filename)

    dto = FileUtil().getDto(filePath)
    fp = FeaturePlotter(dto.getData(), dto.getHeader(), filePath)
    fp.doPlot()
Esempio n. 14
0
 def __init__(self, nInputs=None, nHiddenLayers=None, bias=True, new=True, fileName=""):
     self.params = {}
     self.config = ConfigProvider().getNNTrainConfig()
     if new:
         self.params["nInputs"] = nInputs
         self.params["nHiddenLayers"] = nHiddenLayers
         self.params["bias"] = bias
         self.nn = NeuralNetwork().createNew(nInputs, nHiddenLayers, N_OUTPUT, bias)
     else:
         self.nn = NeuralNetwork().load(fileName)
Esempio n. 15
0
def loadSingle(fileName):
    #files = [scriptPath + "/../../../data/awake_full.csv", scriptPath + "/../../../data/drowsy_full.csv"]
    experimentDir = ConfigProvider().getExperimentConfig().get("filePath")
    files = [experimentDir + "test/awakes_proc_new4.csv", experimentDir + "test/drowsies_proc_new4.csv"]

    ndu = NetworkDataUtil(files)
    data = ndu.get(False)
    nu = NetworkUtil(new=False, fileName=fileName)

    writeResults(nu, data, fileName, files)
Esempio n. 16
0
 def __init__(self, networkFile=None, demo=False, demoFile=None):
     '''Main class for drowsiness detection
     
     :param string networkFile: file name of the saved neural network (path: "/../../data/<networkFile>.nn")
     '''
     self.demo = demo
     self.running = True
     self.config = ConfigProvider()
     self._initNeuralNetwork(networkFile)
     self._initFeatureExtractor(demoFile)
     self.dm = DrowsinessMonitor()
     self.fileUtil = EEGTableFileUtil()
class ConfigTest(BaseTest):
    def setUp(self):
        self.config = ConfigProvider()

    def test_getConfig(self):
        cfg = self.config.getConfig("test")

        self.assertEqual(type(cfg["int"]), int)
        self.assertEqual(type(cfg["float"]), float)
        self.assertEqual(type(cfg["bool"]), bool)
        self.assertEqual(type(cfg["str"]), str)
        self.assertEqual(type(cfg["list"]), list)
        self.assertEqual(type(cfg["dict"]), dict)

    def test_getUnknownConfig(self):
        with self.assertRaises(ConfigParser.NoSectionError):
            self.config.getConfig("unknown")

    def test_getInvalidConfig(self):
        with self.assertRaises(ValueError):
            self.config.getConfig("fail")
 def __init__(self, verbose=False):
     config = ConfigProvider().getProcessingConfig()
     self.maxNaNValues = config.get("maxNaNValues")
     self.lowerBound = config.get("lowerBound")
     self.upperBound = config.get("upperBound")
     self.normalize = config.get("normalize")
     self.mean = config.get("mean")
     self.samplingRate = ConfigProvider().getEmotivConfig().get(
         "samplingRate")
     self.windowSeconds = ConfigProvider().getCollectorConfig().get(
         "windowSeconds")
     self.qualUtil = QualityUtil()
     self.sigUtil = SignalUtil()
     self.verbose = verbose
Esempio n. 19
0
class ConfigTest(unittest.TestCase):

    def setUp(self):
        self.config = ConfigProvider()

    def test_getConfig(self):
        cfg = self.config.getConfig("test")
        
        self.assertEqual(type(cfg["int"]), int)
        self.assertEqual(type(cfg["float"]), float)
        self.assertEqual(type(cfg["bool"]), bool)
        self.assertEqual(type(cfg["str"]), str)
        self.assertEqual(type(cfg["list"]), list)
        self.assertEqual(type(cfg["dict"]), dict)

    def test_getUnknownConfig(self):
        with self.assertRaises(ConfigParser.NoSectionError):
            self.config.getConfig("unknown")

    def test_getInvalidConfig(self):
        with self.assertRaises(ValueError):
            self.config.getConfig("fail")
Esempio n. 20
0
    def _calcSamplingRate(self):
        '''
        calcs the samplerate for the whole dataset based on the timestamp column   
        
        :return: samplerate
        :rtype: float

        '''
        try:
            duration = self.getDuration()
            return self.len / duration
        except:
            return ConfigProvider().getEmotivConfig().get("samplingRate")
Esempio n. 21
0
 def createEmotivDataCollector(collectedQueue):
     collectorConfig = ConfigProvider().getCollectorConfig()
     fields = collectorConfig.get("eegFields") + collectorConfig.get(
         "gyroFields")
     windowSize = collectorConfig.get("windowSeconds")
     windowCount = collectorConfig.get("windowCount")
     return EEGDataCollector(EmotivConnector(), collectedQueue, fields,
                             windowSize, windowCount)
Esempio n. 22
0
 def createDemoEEGDataCollector(demoFile, collectedQueue):
     collectorConfig = ConfigProvider().getCollectorConfig()
     fields = collectorConfig.get("eegFields") + collectorConfig.get(
         "gyroFields")
     windowSeconds = collectorConfig.get("windowSeconds")
     windowCount = collectorConfig.get("windowCount")
     datasource = Factory.createDummyPacketSource(demoFile)
     return EEGDataCollector(datasource, collectedQueue, fields,
                             windowSeconds, windowCount, 128)
Esempio n. 23
0
def trainSingle(h, name, convergence):
    experimentDir = ConfigProvider().getExperimentConfig().get("filePath")
    files = [experimentDir + "test/awakes_proc_new8.csv", experimentDir + "test/drowsies_proc_new8.csv"]
    #files = [experimentDir + "mp/awakes_full_norm_proc_4.csv", experimentDir + "mp/drowsies_full_norm_proc_4.csv"]
    ndu = NetworkDataUtil(files)
    train, test = ndu.get()

    nu = NetworkUtil(ndu.getNInput(), 1)
    nu.train(train, convergence)
    nu.test()
    fileName = name + "_" + str(h)
    nu.save(fileName)

    data = ndu.get(False, False)
    writeResults(nu, data, fileName, files, convergence, test)
 def __init__(self, experimentDir, experiments=None, signals=None, save=False, plot=False, logScale=False):
     self.experimentDir = experimentDir
     self.experiments = experiments
     self.signals = signals
     self.save = save
     self.plot = plot
     self.logScale = logScale
     self.experimentDir = experimentDir
     if experiments is None:
         self.experiments = ConfigProvider().getExperimentConfig()
     else:
         self.experiments = experiments
     self.stats = []
     self.merge = {}
     self.ssPrint = SignalStatisticPrinter("merge")
     self.dataLen = 0
    def __init__(self):
        self.mneUtil = MNEUtil()
        config = ConfigProvider().getProcessingConfig()
        self.lowerFreq = config.get("lowerFreq")
        self.upperFreq = config.get("upperFreq")
        self.windowSeconds = ConfigProvider().getCollectorConfig().get(
            "windowSeconds")
        self.resampleFreq = config.get("resamplingRate")
        self.eegFields = ConfigProvider().getEmotivConfig().get("eegFields")

        self.si = SignalUtil()
        self.qu = QualityUtil()
        self.eog = EOGExtractor()
Esempio n. 26
0
    def __init__(self, collectedQueue, extractedQueue, eegProcessor,
                 gyroProcessor):
        config = ConfigProvider()
        self.eegFields = config.getEmotivConfig()["eegFields"]
        self.gyroFields = config.getEmotivConfig()["gyroFields"]
        self.samplingRate = config.getEmotivConfig()["samplingRate"]
        self.processingConfig = config.getProcessingConfig()

        self.eegProcessor = eegProcessor
        self.gyroProcessor = gyroProcessor

        self.collectedQueue = collectedQueue
        self.extractedQueue = extractedQueue
        self.runProcess = True
    def getForTesting():
        self = TestFactory

        app = self._get()
        app.nn = self.loadNeuralNetwork(scriptPath + "/test_data/test", False)
        app.dm = DrowsinessMonitor()

        collectorConfig = ConfigProvider().getCollectorConfig()
        fields = collectorConfig.get("eegFields") + collectorConfig.get(
            "gyroFields")
        windowSeconds = collectorConfig.get("windowSeconds")
        windowCount = collectorConfig.get("windowCount")
        samplingRate = 128
        filePath = scriptPath + "/test_data/example_1024.csv"
        app.dc = self.createTestDataCollector(app.collectedQueue, fields,
                                              windowSeconds, samplingRate,
                                              windowCount, filePath)
        app.dp = self.createDataProcessor(app.collectedQueue,
                                          app.extractedQueue)
        return app
Esempio n. 28
0
 def __init__(self):
     config = ConfigProvider().getProcessingConfig()
     self.lowerFreq = config.get("lowerFreq")
     self.upperFreq = config.get("upperFreq")
     self.samplingRate = ConfigProvider().getEmotivConfig().get("samplingRate")
     self.sigUtil = SignalUtil()
Esempio n. 29
0
class Analyzer(object):
    def __init__(self):
        self.config = ConfigProvider()
        exConfig = self.config.getExperimentConfig()
        self.experimentPath = exConfig.get("filePath")
        self.probands = exConfig.get("probands")
        self.lengths = []

    def readCSV(self,
                filePath,
                sep,
                dtype,
                parse_dates=None,
                date_parser=None):
        return pd.read_csv(filePath,
                           sep=sep,
                           dtype=dtype,
                           parse_dates=parse_dates,
                           date_parser=date_parser)

    def mergeColumns(self, awake, drowsy):
        df = pd.concat([awake, drowsy], axis=1, ignore_index=True)
        columns = [col + AWAKE_SUF for col in awake.columns
                   ] + [col + DROWSY_SUF for col in awake.columns]
        df.columns = columns
        return self.sortColumns(df)

    def concatRows(self, dfs):
        self.lengths = [len(df) for df in dfs]
        return pd.concat(dfs, axis=0, ignore_index=True)

    def sortColumns(self, df):
        return df.reindex_axis(sorted(df.columns), axis=1)

    def dropOtherColumns(self, df, keeps):
        columns = df.columns.values
        keeps = np.where(np.in1d(columns, keeps))[0]
        drops = np.delete(np.arange(0, len(columns)), keeps)
        return df.drop(df.columns[drops], axis=1)

    def splitAndMerge(self, df):
        columns = self.dataField
        if type(columns) != list:
            columns = [columns]
        awake, drowsy = self.splitStates(df, columns)
        return self.mergeColumns(awake, drowsy)

    def showDifference(self, merge, name):
        self.boxplot(merge, name)
        #self.plot(merge, name)
        #self.printStats(merge, name)

    def plot(self, df, name=""):
        global lined, fig
        fig, ax = plt.subplots()
        fig.suptitle(name, fontsize=20)

        lines = []
        for column in df:
            line, = ax.plot(df[column])
            lines.append(line)
        leg = ax.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
        leg.get_frame().set_alpha(0.4)

        lined = dict()
        for legline, origline in zip(leg.get_lines(), lines):
            legline.set_picker(5)  # 5 pts tolerance
            lined[legline] = origline
        fig.canvas.mpl_connect('pick_event', onpick)

        l = 0
        print self.lengths
        for length in self.lengths:
            l += length
            plt.axvline(x=l, color='r', linestyle='dashed')

    def boxplot(self, df, name=""):
        fig, ax = plt.subplots()
        fig.suptitle(name, fontsize=20)
        df.plot.box(ax=ax)

    def printStats(self, df, name=""):
        print "\n####################\n" + name + "\n####################\n"
        print df.describe()
        print "energy\t" + "\t".join(
            [str(x) for x in (df**2).sum(axis=0).values])
        print df.quantile([0.01, 0.05, 0.1, 0.2, 0.5, 0.80, 0.90, 0.95, 0.99])

    def resetIndex(self, df):
        return df.reset_index(drop=True)
Esempio n. 30
0
 def __init__(self):
     self.config = ConfigProvider()
     exConfig = self.config.getExperimentConfig()
     self.experimentPath = exConfig.get("filePath")
     self.probands = exConfig.get("probands")
     self.lengths = []
Esempio n. 31
0
class PoSDBoS(object):

    def __init__(self, networkFile=None, demo=False, demoFile=None):
        '''Main class for drowsiness detection
        
        :param string networkFile: file name of the saved neural network (path: "/../../data/<networkFile>.nn")
        '''
        self.demo = demo
        self.running = True
        self.config = ConfigProvider()
        self._initPoSDBoS()
        self._initNeuralNetwork(networkFile)
        self._initFeatureExtractor(demoFile)
        self.dm = DrowsinessMonitor()
        self.fileUtil = EEGTableFileUtil()

    def _initPoSDBoS(self):
        posdbosConfig = self.config.getPoSDBoSConfig()
        self.drowsyMinCount = posdbosConfig.get("drowsyMinCount")
        self.awakeMinCount = posdbosConfig.get("awakeMinCount")
        self.classified = [0, 0]
        self.curClass = 0
        self.classCount = 0
        self.found = 0

    def _initNeuralNetwork(self, networkFile):
        nnCreate = self.config.getNNInitConfig()
        self.nn = NeuralNetwork()
        if networkFile == None:
            self.nn.createNew(**nnCreate)
        else:
            self.nn.load(networkFile)

    def _initFeatureExtractor(self, demoFile):
        self.demoFile = demoFile
        collector = self._initDataCollector(self.demoFile)
        self.fe = FeatureExtractor(collector)
        self.inputQueue = self.fe.extractQueue

    def _initDataCollector(self, demoFile):
        collectorConfig = self.config.getCollectorConfig()
        if self.demo:
            return DummyDataCollector(demoFile, **collectorConfig)
        else:
            return EEGDataCollector(None, **collectorConfig)

    def close(self):
        self.running = False

    def run(self):
        fet = threading.Thread(target=self.fe.start)
        fet.start()
        dmt = threading.Thread(target=self.dm.run)
        dmt.start()
        features = []
        total = 0
        start = time.time()
        c = []
        while self.running and dmt.is_alive():
            try:
                #awake = 0, drowsy = 1
                data = self.inputQueue.get(timeout=1)
                features.append(data)
                clazz = self.nn.activate(data, True)
                c.append([clazz, clazz])
                self.setStatus(clazz)
                total += 1
            except Empty:
                print "needed %sms for %d windows" % (time.time() - start, total) 
                pass
            except KeyboardInterrupt:
                self.close()
            except Exception as e:
                print e.message
                self.close()
        #self.writeFeature(c)
        self.fe.close()
        self.dm.close()
        dmt.join()

    def setStatus(self, clazz):
        self.classified[clazz] += 1
        if self.curClass == clazz:
            self.classCount += 1
        else:
            self.curClass = clazz
            self.classCount = 0

        info = "class %d row (%s)" % (clazz, str(self.classCount))
        if clazz == 1 and self.classCount >= self.drowsyMinCount:
            self.dm.setStatus(clazz, info)
            self.found += 1
        elif clazz == 0 and self.classCount >= self.awakeMinCount:
            self.dm.setStatus(clazz, info)

    def writeFeature(self, data):
        filePath = scriptPath + "/../data/" + "classes.csv"
        #filePath = scriptPath + "/../data/" + "drowsy_full_.csv"

        header = ["clazz", "clazz2"]
        #start = 4
        #end = start + len(data[0])/6
        #for field in self.config.getCollectorConfig().get("fields"):
        #    header.extend([str(x) + "Hz" + field for x in range(start, end)])
        self.fileUtil.writeFile(filePath, data, header)
Esempio n. 32
0
        merge = self.splitAndMerge(df)
        self.showDifference(merge, "%s %s" % (self.name, proband))
        return merge


class EEGAnalyzer(EmotivAnalyzer):
    def __init__(self, fileName="EEG", hasTime=True):
        super(EEGAnalyzer, self).__init__(fileName, hasTime)
        self.name = "EEG"
        self.fields = self.config.getCollectorConfig().get("eegFields")
        #self.fields = self.config.getEmotivConfig().get("eegFields")
        self.dataField = self.fields

    def analyse(self, proband):
        df = self.buildDf(proband)
        merge = self.splitAndMerge(df)
        self.showDifference(merge, "%s %s" % (self.name, proband))
        return merge


probands = ConfigProvider().getExperimentConfig().get("probands")
nProbands = ["1", "2"]
oProbands = ["a", "b", "c", "e"]
ea = GyroAnalyzer("EEGNormed", False)
#for proband in probands:
#    ea.analyse(proband)
ea.analyseMerge(probands)
#ea.analyseMerge(nProbands)
#ea.analyseMerge(oProbands)
#CANAnalyzer().analyse("3")
plt.show()
Esempio n. 33
0
:author: Paul Pasler
:organization: Reutlingen University
'''
import logging
logging.basicConfig(
    level=logging.INFO,
    format=
    '%(asctime)s.%(msecs)03d %(levelname)-8s %(module)s.%(funcName)s:%(lineno)d %(message)s',
    datefmt='%H:%M:%S')
from posdbos.util.file_util import FileUtil

import threading
from config.config import ConfigProvider
from posdbos.factory import Factory

exConfig = ConfigProvider().getExperimentConfig()
probands = exConfig.get("probands")
experimentDir = exConfig.get("filePath")

fileUtil = FileUtil()


def getFilePaths(fileName):
    filePaths = []
    for proband in probands:
        filePath = "%s%s/" % (experimentDir, proband)
        filePaths.append(filePath + fileName)
    return filePaths


def splitDtos(filePaths):
 def __init__(self, verbose=False):
     config = ConfigProvider().getProcessingConfig()
     self.eegFields = config.get("eegFields")
     self.fmin = config.get("fmin")
     self.fmax = config.get("fmax")
     self.mneUtil = MNEUtil()
Esempio n. 35
0
class PoSDBoS(object):
    
    def __init__(self, networkFile=None, demo=False, demoFile=None):
        '''Main class for drowsiness detection
        
        :param string networkFile: file name of the saved neural network (path: "/../../data/<networkFile>.nn")
        '''
        self.demo = demo
        self.running = True
        self.config = ConfigProvider()
        self._initNeuralNetwork(networkFile)
        self._initFeatureExtractor(demoFile)
        self.dm = DrowsinessMonitor()
        self.fileUtil = EEGTableFileUtil()

    def _initNeuralNetwork(self, networkFile):
        nn_conf = self.config.getNeuralNetworkConfig()
        self.nn = NeuralNetwork()
        if networkFile == None:
            self.nn.createNew(nn_conf["nInputs"], nn_conf["nHiddenLayers"], nn_conf["nOutput"], nn_conf["bias"])
        else:
            self.nn.load(networkFile)

    def _initFeatureExtractor(self, demoFile):
        collector = self._initDataCollector(demoFile)
        self.fe = FeatureExtractor(collector)
        self.inputQueue = self.fe.extractQueue

    def _initDataCollector(self, demoFile):
        collectorConfig = self.config.getCollectorConfig()
        if self.demo:
            return DummyDataCollector(demoFile, **collectorConfig)
        else:
            return EEGDataCollector(None, **collectorConfig)

    def close(self):
        self.running = False

    def run(self):
        fet = threading.Thread(target=self.fe.start)
        fet.start()
        dmt = threading.Thread(target=self.dm.run)
        dmt.start()
        features = []
        while self.running and dmt.is_alive():
            try:
                data = self.inputQueue.get(timeout=1)
                features.append(data)
                x = random.randint(1, 10)%2
                y = random.randint(1, 10)%2
                data = (x, y)
                
                clazz = self.nn.activate(data)
                info = "%d XOR %d is %d; queue: %d" % (x, y, clazz, self.inputQueue.qsize()) 
                self.dm.setStatus(clazz, info)
                #sleep(1)
            except Empty:
                pass
                #if self.demo:
                #    self.close()
            except KeyboardInterrupt:
                self.close()
            except Exception as e:
                print e.message
                self.close()
        self.writeFeature(features)
        self.fe.close()
        self.dm.close()
        dmt.join()

    def writeFeature(self, data):
        filePath = scriptPath + "/../data/" + "test.csv"
        header = []
        for field in ["F3", "F4", "F7", "F8"]:
            for i in range(1, 5):
                header.append("%s_%s" % (field ,str(i)))
        self.fileUtil.writeFile(filePath, data, header)
Esempio n. 36
0
 def createNeuralNetwork():
     nnCreate = ConfigProvider().getNNInitConfig()
     return NeuralNetwork().createNew(**nnCreate)
Esempio n. 37
0
 def setUp(self):
     self.config = ConfigProvider()
Esempio n. 38
0
class QualityUtil(object):
    """removes signal data with low quality"""
    
    def __init__(self):
        self.config = ConfigProvider().getProcessingConfig()
        self.upperBound = self.config.get("upperBound")
        self.lowerBound = self.config.get("lowerBound")
        self.minQuality = self.config.get("minQual")
        self.maxSeqLength = self.config.get("maxSeqLength")
        self.maxNaNValues = self.config.get("maxNaNValues")

    def _copyArray(self, data):
        return copy(data[:])

    def replaceOutliners(self, data, value=None, lowerBound=None, upperBound=None):
        """outliner values beyond 'lowerBound' and above 'upperBound' will be set to 'value'
        if value is not set, the values will be set to upper and lower bound
          
        inplace method
        :param numpy.array data: list of values
        :param float lowerBound: values < this param will be set to 'value'
        :param float upperBound: values > this param will be set to 'value'
        :param float value: default value for all outside the bounds
        
        :return: data without outliners 
        :rtype: numpy.array
        """
        if lowerBound == None:
            lowerBound=self.lowerBound
        if upperBound == None:
            upperBound=self.upperBound
        #TODO could be nicer / faster?
        # http://stackoverflow.com/questions/19666626/replace-all-elements-of-python-numpy-array-that-are-greater-than-some-value
        with errstate(invalid='ignore'): #avoid warning because of DEFAULT_REPLACE_VALUE value
            ret = self._copyArray(data)
            if value == None:
                ret[ret > upperBound] = upperBound
                ret[ret < lowerBound] = lowerBound
            else:
                ret[ret > upperBound] = value
                ret[ret < lowerBound] = value
        return ret

    def countOutliners(self, data, lowerBound=None, upperBound=None):
        """counts the outliner values beyond 'lowerBound' and above 'upperBound'
     
        :param numpy.array data: list of values
        :param float lowerBound: values < this param will be set to 'value'
        :param float upperBound: values > this param will be set to 'value'
        
        :return: number of outliners in data 
        :rtype: int
        """
        if lowerBound == None:
            lowerBound=self.lowerBound
        if upperBound == None:
            upperBound=self.upperBound
        
        cdata = copy(data[:])
        with errstate(invalid='ignore'): 
            cdata[cdata > upperBound] = DEFAULT_REPLACE_VALUE
            cdata[cdata < lowerBound] = DEFAULT_REPLACE_VALUE
        return count_nonzero(isnan(cdata))

    def replaceBadQuality(self, data, quality, value, threshold=None):
        """replaces values from data with value where quality < threshold
        
        inplace method
        :param numpy.array data: list of values
        :param numpy.array quality: list of quality
        :param float threshold: param to compare quality with
        :param float value: param to replace data values
        
        :return: data without bad quality values
        :rtype: numpy.array
        """
        if len(data) != len(quality):
            raise ValueError("data and quality must have the same length")
        
        if threshold == None:
            threshold = self.minQuality
        #TODO make me nice
        ret = self._copyArray(data)
        for i, qual in enumerate(quality):
            if qual < threshold:
                ret[i] = value
        return ret

    def countBadQuality(self, data, quality, threshold=None):
        """counts values from data with value where quality < threshold
        
        inplace method
        :param numpy.array data: list of values
        :param numpy.array quality: list of quality
        :param float threshold: param to compare quality with
        
        :return: number of data with bad quality values
        :rtype: int
        """
        if len(data) != len(quality):
            raise ValueError("data and quality must have the same length")
        if threshold == None:
            threshold = self.minQuality
        
        count = 0
        for _, qual in enumerate(quality):
            if qual < threshold:
                count += 1
        return count

    def countZeros(self, data):
        '''calculates the number of countZeros in data

        :param numpy.array data: list of values
        
        :return: zero count
        :rtype: int
        '''
        return len(data) - count_nonzero(data)

    def replaceNans(self, data):
        '''replaces NaNs in data with zero

        :param numpy.array data: list of values
        
        :return: data without Nan
        :rtype: numpy.array
        '''
        return nan_to_num(self._copyArray(data))

    def countNans(self, data):
        '''calculates the number of NaNs in data

        :param numpy.array data: list of values
        
        :return: NaN count
        :rtype: int
        '''
        return count_nonzero(isnan(data))

    def isInvalidData(self, data):
        '''considers a data set invalid, if there are more NaNs than maxNaNValues in the set

        :param numpy.array data: list of values
        
        :return: invalid
        :rtype: boolean
        '''
        return self.maxNaNValues < count_nonzero(isnan(data))

    def replaceZeroSequences(self, data):
        '''replaces zero sequences, which is an unwanted artefact, with DEFAULT_REPLACE_VALUE 
        see http://stackoverflow.com/questions/38584956/replace-a-zero-sequence-with-other-value

        :param numpy.array data: list of values

        :return: zero sequences replaced data
        :rtype: numpy.array
        '''
        a_extm = hstack((True,data!=0,True))
        mask = a_extm == binary_closing(a_extm,structure=ones(self.maxSeqLength))
        return where(~a_extm[1:-1] & mask[1:-1],DEFAULT_REPLACE_VALUE, data)

    def countSequences(self, data):
        seqList = self._getSequenceList(data)
        return len([s for s in seqList if len(s) >= self.maxSeqLength])

    def replaceSequences(self, data):
        '''replaces any sequences of more than MAX_SEQUENCE_LENGTH same numbers in a row with DEFAULT_REPLACE_VALUE 
        see http://stackoverflow.com/questions/38584956/replace-a-zero-sequence-with-other-value

        :param numpy.array data: list of values

        :return: sequences replaced data
        :rtype: numpy.array
        '''
        ret = self._copyArray(data)
        seqList = self._getSequenceList(ret)
        return array( [ item for l in seqList for item in l ] )

    def _getSequenceList(self, data):
        return array([self._getSequence(value, it) for value, it in groupby(data)])

    def _getSequence(self, value, it):
        itLen = sum(1 for _ in it) # length of iterator
    
        if itLen>=self.maxSeqLength:
            return [ DEFAULT_REPLACE_VALUE ]*itLen
        else:
            return [ value ]*itLen
Esempio n. 39
0
 def getGyroHeader(self):
     gyroFields = ConfigProvider().getEmotivConfig().get("gyroFields")
     return [head for head in self.header if head in gyroFields]
Esempio n. 40
0
 def getEEGHeader(self):
     eegFields = ConfigProvider().getEmotivConfig().get("eegFields")
     return [head for head in self.header if head in eegFields]
class SignalStatisticCollector(object):
    
    def __init__(self, experimentDir, experiments=None, signals=None, save=False, plot=False, logScale=False):
        self.experimentDir = experimentDir
        self.experiments = experiments
        self.signals = signals
        self.save = save
        self.plot = plot
        self.logScale = logScale
        self.experimentDir = experimentDir
        if experiments is None:
            self.experiments = ConfigProvider().getExperimentConfig()
        else:
            self.experiments = experiments
        self.stats = []
        self.merge = {}
        self.ssPrint = SignalStatisticPrinter("merge")
        self.dataLen = 0
    
    def main(self):
        for person, fileNames in self.experiments.iteritems():
            for fileName in fileNames:
                filePath =  "%s%s/%s" % (self.experimentDir, person, fileName)
                s = SignalStatisticUtil(person, filePath, signals=self.signals, save=self.save, plot=self.plot, logScale=self.logScale)
                self.dataLen += s.eegData.len
                s.main()
                self.stats.append(s.stats)
        if len(self.stats) > 1:
            self._addCollections()
            self.printCollection()
        else:
            print "did not merge 1 stat"

    def _addCollections(self):
        '''[signals][channel][signal][type]'''
        self.merge = self.stats[0][SIGNALS_KEY]
        for stat in self.stats[1:]:
            self._addChannels(stat[SIGNALS_KEY])

        self._mergeChannels(len(self.stats), self.merge)

    def _addChannels(self, channels):
        for channel, value in channels.iteritems():
            self._addValues(channel, value)

    def _addValues(self, channel, dic):
        for key, field in STAT_FIELDS.iteritems():
            typ = field["type"]
            self._addValue(dic, typ, channel, RAW_KEY, key)

    def _addValue(self, dic, typ, channel, signal, key):
        old = float(self.merge[channel][signal][key])
        new = float(dic[signal][key])
        self.merge[channel][signal][key] = self._getByType(typ, old, new)

    def _getByType(self, typ, old, new):
        if typ == MAX_TYPE:
            return new if new > old else old
        if typ == MIN_TYPE:
            return new if new < old else old
        if typ in [AGGREGATION_TYPE, MEAN_TYPE]:
            return new + old
        if typ == DIFF_TYPE:
            return old-new

    def _mergeChannels(self, count, channels):
        for channel, value in channels.iteritems():
            self._mergeValues(count, channel, value)

    def _mergeValues(self, count, channel, dic):
        for key, field in STAT_FIELDS.iteritems():
            typ = field["type"]
            self._mergeValue(dic, typ, count, channel, RAW_KEY, key)

    def _mergeValue(self, dic, typ, count, channel, signal, key):
        self.merge[channel][signal][key] = self._mergeByType(typ, self.merge[channel][signal][key], count)

    def _mergeByType(self, typ, value, count):
        if typ in [MAX_TYPE, MIN_TYPE, AGGREGATION_TYPE, DIFF_TYPE]:
            return str(value)
        if typ == MEAN_TYPE:
            return str(value / float(count))

    def printCollection(self):
        general = {"dataLength": str(self.dataLen)}
        stats = {SIGNALS_KEY: self.merge, GENERAL_KEY: general}
        content = self.ssPrint.getSignalStatsString(stats)
        print content
        filePath = experimentDir + "merge.txt"
        self.ssPrint.saveStats(filePath, content) 
Esempio n. 42
0
 def setUp(self):
     self.dm = DrowsinessMonitor()
     self.classes = ConfigProvider().getConfig("class")
:author: Paul Pasler
:organization: Reutlingen University
'''

from posdbos.util.signal_util import SignalUtil
from config.config import ConfigProvider
from numpy import ceil

# TODO should be from 0.5 to 4
DELTA_RANGE = (1, 4)
THETA_RANGE = (4, 8)
ALPHA_RANGE = (8, 13)
BETA_RANGE = (13, 30)

samplingRate = ConfigProvider().getEmotivConfig().get("samplingRate")
gammaMax = min(samplingRate / 2, 99)
GAMMA_RANGE = (30, gammaMax)


class EEGUtil(object):
    '''
    This class does useful things with EEG signals
    
    Like splitting by channel
    DELTA =  0.5 -  4hz
    THETA =  4   -  8hz
    ALPHA =  8   - 13hz
    BETA  = 13   - 30hz
    GAMMA = 30   - 99hz