Ejemplo n.º 1
0
def test_match_events():
    port = Port()
    events = [['foo', ''], ['bar', ''], ['baz', ''], ['foo', '']]
    times = pd.date_range(start='2018-01-01', periods=4, freq='1s')
    port.set(events, times, ['label', 'data'])
    indices = match_events(port, 'foo').index.values.tolist()
    expected = [1514764800000000000, 1514764803000000000]
    assert indices == expected
Ejemplo n.º 2
0
    def update(self):

        # Append to main buffer
        if self.i.data is not None:
            if not self.i.data.empty:
                if self._buffer is None:
                    self._buffer = self.i.data
                else:
                    self._buffer = self._buffer.append(self.i.data)

        # Detect onset
        matches = match_events(self.i_events, self._event_trigger)
        if matches is not None:
            for index, row in matches.iterrows():
                # Start a new epoch
                low = index - self._before
                high = index + self._after
                self._epochs.append({
                    'data': self._buffer[low:high],
                    'meta': {
                        'onset': index,
                        'context': row['data'],
                        'before': self._before.total_seconds(),
                        'after': self._after.total_seconds()
                    }
                })

        # Trim main buffer
        if self._buffer is not None:
            low = self._buffer.index[-1] - self._before
            self._buffer = self._buffer[low:]

        # Update epochs
        if self._epochs and self.i.ready():
            complete = 0
            for epoch in self._epochs:
                high = epoch['meta']['onset'] + self._after
                last = self.i.data.index[-1]
                if epoch['data'].empty:
                    low = epoch['meta']['onset'] - self._before
                    mask = (self.i.data.index >= low) & (self.i.data.index <=
                                                         high)
                else:
                    low = epoch['data'].index[-1]
                    mask = (self.i.data.index > low) & (self.i.data.index <=
                                                        high)
                # Append
                epoch['data'] = epoch['data'].append(self.i.data[mask])
                # Send if we have enough data
                if last >= high:
                    o = getattr(self, 'o_' + str(complete))
                    o.data = epoch['data']
                    o.meta = {'epoch': epoch['meta']}
                    complete += 1
            if complete > 0:
                del self._epochs[:complete]  # Unqueue
                self.o = self.o_0  # Bind default output to the first epoch
Ejemplo n.º 3
0
    def update(self):

        # Let's get ready
        self._clear()

        # Are we dealing with continuous data or epochs?
        if self._dimensions is None:
            port_name = "i_training" if self.fit else "i"
            if getattr(self, port_name).ready():
                self._dimensions = 2
            elif len(list(self.iterate(port_name + "_*"))) > 0:
                self._dimensions = 3

        # Set the accumulation boundaries
        if self._accumulation_start is None:
            matches = match_events(self.i_events, self.event_start_accumulation)
            if matches is not None:
                self._accumulation_start = matches.index.values[0]
                self._status = ACCUMULATING
        if self._accumulation_stop is None:
            matches = match_events(self.i_events, self.event_stop_accumulation)
            if matches is not None:
                self._accumulation_stop = matches.index.values[0]

        # Always buffer a few seconds, in case the start event is coming late
        if self._status == IDLE:
            start = (now() - self._buffer_size).to_datetime64()
            stop = max_time()
            self._accumulate(start, stop)

        # Accumulate between boundaries
        if self._status == ACCUMULATING:
            start = self._accumulation_start
            stop = self._accumulation_stop if self._accumulation_stop else max_time()
            self._accumulate(start, stop)

        # Should we start fitting the model?
        if self._status < FITTING:
            if match_events(self.i_events, self.event_start_training) is not None:
                self._status = FITTING
                self._task = Task(
                    self._pipeline, "fit", self._X_train, self._y_train
                ).start()

        # Is the model ready?
        if self._status == FITTING:
            status = self._task.status()
            if status:
                if status["success"]:
                    self._pipeline = status["instance"]
                    self._status = READY
                    self.logger.debug(f"Model fitted in {status['time']} seconds")
                else:
                    self.logger.error(
                        f"An error occured while fitting: {status['exception'].args[0]}"
                    )
                    self.logger.debug(
                        "\nTraceback (most recent call last):\n"
                        + "".join(status["traceback"])
                    )
                    raise WorkerInterrupt()

        # Run the pipeline
        if self._status == READY:
            self._receive()
            if self._X is not None:
                args = [self._X]
                if self.mode.startswith("fit"):
                    args.append(self._y)
                # TODO: optionally loop through epochs instead of sending them all at once
                self._out = getattr(self._pipeline, self.mode)(*args)

        # Set output streams
        self._send()
Ejemplo n.º 4
0
    def update(self):

        # Append to main buffer
        if self.i.data is not None:
            if not self.i.data.empty:
                if self._buffer is None:
                    self._buffer = self.i.data
                else:
                    self._buffer = self._buffer.append(self.i.data)

        # Detect onset
        matches = match_events(self.i_events, self._event_trigger)
        if matches is not None:
            for index, row in matches.iterrows():
                # Start a new epoch
                low = index - self._before
                high = index + self._after
                if not self._buffer.index.is_monotonic:
                    self.logger.warning(
                        f"Index should be monotonic. Skipping epoch {row['data']}."
                    )
                    return
                self._epochs.append({
                    "data": self._buffer[low:high],
                    "meta": {
                        "onset": index,
                        "context": row["data"],
                        "before": self._before.total_seconds(),
                        "after": self._after.total_seconds(),
                    },
                })

        # Trim main buffer
        if self._buffer is not None:
            low = self._buffer.index[-1] - self._before
            self._buffer = self._buffer[low:]

        # Update epochs
        if self._epochs and self.i.ready():
            complete = 0
            for epoch in self._epochs:
                high = epoch["meta"]["onset"] + self._after
                last = self.i.data.index[-1]
                if epoch["data"].empty:
                    low = epoch["meta"]["onset"] - self._before
                    mask = (self.i.data.index >= low) & (self.i.data.index <=
                                                         high)
                else:
                    low = epoch["data"].index[-1]
                    mask = (self.i.data.index > low) & (self.i.data.index <=
                                                        high)
                # Append
                epoch["data"] = epoch["data"].append(self.i.data[mask])
                # Send if we have enough data
                if last >= high:
                    o = getattr(self, "o_" + str(complete))
                    o.data = epoch["data"]
                    o.meta = {"epoch": epoch["meta"]}
                    complete += 1
            if complete > 0:
                del self._epochs[:complete]  # Unqueue
                self.o = self.o_0  # Bind default output to the first epoch
Ejemplo n.º 5
0
def test_match_empty():
    port = Port()
    events = [['foo', ''], ['bar', '']]
    times = pd.date_range(start='2018-01-01', periods=2, freq='1s')
    port.set(events, times, ['label', 'data'])
    assert match_events(port, 'baz') == None