def write_file(cls, data, file_name: str, file_mode: str) -> None: """write data to file, in file data will be transposed :param data: data to store in a file :type data: 2d numpy array :param file_name: file name to store data :type file_name: str :param file_mode: 'w' to rewrite file or 'a' to append data to file :type file_mode: str """ check_memory_layout_row_major(data, 2) try: file = file_name.encode() except: file = file_name try: mode = file_mode.encode() except: mode = file_mode data_flatten = data.flatten() res = DataHandlerDLL.get_instance().write_file(data_flatten, data.shape[0], data.shape[1], file, mode) if res != BrainflowExitCodes.STATUS_OK.value: raise BrainFlowError('unable to write file', res)
def get_psd(cls, data: NDArray[Float64], sampling_rate: int, window: int) -> Tuple: """calculate PSD :param data: data to calc psd, len of data must be a power of 2 :type data: NDArray[Float64] :param sampling_rate: sampling rate :type sampling_rate: int :param window: window function :type window: int :return: amplitude and frequency arrays of len N / 2 + 1 :rtype: tuple """ check_memory_layout_row_major(data, 1) def is_power_of_two(n): return (n != 0) and (n & (n - 1) == 0) if (not is_power_of_two(data.shape[0])): raise BrainFlowError( 'data len is not power of 2: %d' % data.shape[0], BrainflowExitCodes.INVALID_ARGUMENTS_ERROR.value) ampls = numpy.zeros(int(data.shape[0] / 2 + 1)).astype(numpy.float64) freqs = numpy.zeros(int(data.shape[0] / 2 + 1)).astype(numpy.float64) res = DataHandlerDLL.get_instance().get_psd(data, data.shape[0], sampling_rate, window, ampls, freqs) if res != BrainflowExitCodes.STATUS_OK.value: raise BrainFlowError('unable to calc psd', res) return ampls, freqs
def get_avg_band_powers(cls, data: NDArray, channels: List, sampling_rate: int, apply_filter: bool) -> Tuple: """calculate avg and stddev of BandPowers across all channels :param data: 2d array for calculation :type data: NDArray :param channels: channels - rows of data array which should be used for calculation :type channels: List :param sampling_rate: sampling rate :type sampling_rate: int :param apply_filter: apply bandpass and bandstop filtrers or not :type apply_filter: bool :return: avg and stddev arrays for bandpowers :rtype: tuple """ check_memory_layout_row_major(data, 2) avg_bands = numpy.zeros(5).astype(numpy.float64) stddev_bands = numpy.zeros(5).astype(numpy.float64) data_1d = numpy.zeros(len(channels) * data.shape[1]) for i, channel in enumerate(channels): for j in range(data.shape[1]): data_1d[j + data.shape[1] * i] = data[channel][j] res = DataHandlerDLL.get_instance().get_avg_band_powers( data_1d, len(channels), data.shape[1], sampling_rate, int(apply_filter), avg_bands, stddev_bands) if res != BrainflowExitCodes.STATUS_OK.value: raise BrainFlowError('unable to get_avg_band_powers', res) return avg_bands, stddev_bands
def perform_wavelet_transform(cls, data: NDArray[Float64], wavelet: str, decomposition_level: int) -> Tuple: """perform wavelet transform :param data: initial data :type data: NDArray[Float64] :param wavelet: supported vals: db1..db15,haar,sym2..sym10,coif1..coif5,bior1.1,bior1.3,bior1.5,bior2.2,bior2.4,bior2.6,bior2.8,bior3.1,bior3.3,bior3.5 ,bior3.7,bior3.9,bior4.4,bior5.5,bior6.8 :type wavelet: str :param decomposition_level: level of decomposition :type decomposition_level: int :return: tuple of wavelet coeffs in format [A(J) D(J) D(J-1) ..... D(1)] where J is decomposition level, A - app coeffs, D - detailed coeffs, and array with lengths for each block :rtype: tuple """ check_memory_layout_row_major(data, 1) try: wavelet_func = wavelet.encode() except: wavelet_func = wavelet wavelet_coeffs = numpy.zeros(data.shape[0] + 2 * (40 + 1)).astype( numpy.float64) lengths = numpy.zeros(decomposition_level + 1).astype(numpy.int32) res = DataHandlerDLL.get_instance().perform_wavelet_transform( data, data.shape[0], wavelet_func, decomposition_level, wavelet_coeffs, lengths) if res != BrainflowExitCodes.STATUS_OK.value: raise BrainFlowError('unable to perform wavelet transform', res) return wavelet_coeffs[0:sum(lengths)], lengths
def perform_fft(cls, data: NDArray[Float64], window: int) -> NDArray[Complex128]: """perform direct fft :param data: data for fft, len of data must be a power of 2 :type data: NDArray[Float64] :param window: window function :type window: int :return: numpy array of complex values, len of this array is N / 2 + 1 :rtype: NDArray[Complex128] """ check_memory_layout_row_major(data, 1) def is_power_of_two(n): return (n != 0) and (n & (n - 1) == 0) if (not is_power_of_two(data.shape[0])): raise BrainFlowError( 'data len is not power of 2: %d' % data.shape[0], BrainflowExitCodes.INVALID_ARGUMENTS_ERROR.value) temp_re = numpy.zeros(int(data.shape[0] / 2 + 1)).astype(numpy.float64) temp_im = numpy.zeros(int(data.shape[0] / 2 + 1)).astype(numpy.float64) res = DataHandlerDLL.get_instance().perform_fft( data, data.shape[0], window, temp_re, temp_im) if res != BrainflowExitCodes.STATUS_OK.value: raise BrainFlowError('unable to perform fft', res) output = numpy.zeros(int(data.shape[0] / 2 + 1)).astype( numpy.complex128) for i in range(output.shape[0]): output[i] = numpy.complex128(complex(temp_re[i], temp_im[i])) return output
def perform_downsampling(cls, data: NDArray[Float64], period: int, operation: int) -> NDArray[Float64]: """perform data downsampling, it doesnt apply lowpass filter for you, it just aggregates several data points :param data: initial data :type data: NDArray[Float64] :param period: downsampling period :type period: int :param operation: int value from AggOperation enum :type operation: int :return: downsampled data :rtype: NDArray[Float64] """ check_memory_layout_row_major(data, 1) if not isinstance(period, int): raise BrainFlowError( 'wrong type for period', BrainflowExitCodes.INVALID_ARGUMENTS_ERROR.value) if not isinstance(operation, int): raise BrainFlowError( 'wrong type for operation', BrainflowExitCodes.INVALID_ARGUMENTS_ERROR.value) if period <= 0: raise BrainFlowError( 'Invalid value for period', BrainflowExitCodes.INVALID_ARGUMENTS_ERROR.value) downsampled_data = numpy.zeros(int(data.shape[0] / period)).astype( numpy.float64) res = DataHandlerDLL.get_instance().perform_downsampling( data, data.shape[0], period, operation, downsampled_data) if res != BrainflowExitCodes.STATUS_OK.value: raise BrainFlowError('unable to perform downsampling', res) return downsampled_data
def perform_bandstop(cls, data: NDArray[Float64], sampling_rate: int, center_freq: float, band_width: float, order: int, filter_type: int, ripple: float) -> None: """apply band stop filter to provided data :param data: data to filter, filter works in-place :type data: NDArray[Float64] :param sampling_rate: board's sampling rate :type sampling_rate: int :param center_freq: center frequency :type center_freq: float :param band_width: band width :type band_width: float :param order: filter order :type order: int :param filter_type: filter type from special enum :type filter_type: int :param ripple: ripple value for Chebyshev filter :type ripple: float """ check_memory_layout_row_major(data, 1) if not isinstance(sampling_rate, int): raise BrainFlowError( 'wrong type for sampling rate', BrainflowExitCodes.INVALID_ARGUMENTS_ERROR.value) if not isinstance(filter_type, int): raise BrainFlowError( 'wrong type for filter type', BrainflowExitCodes.INVALID_ARGUMENTS_ERROR.value) res = DataHandlerDLL.get_instance().perform_bandstop( data, data.shape[0], sampling_rate, center_freq, band_width, order, filter_type, ripple) if res != BrainflowExitCodes.STATUS_OK.value: raise BrainFlowError('unable to apply band stop filter', res)
def remove_environmental_noise(cls, data: NDArray[Float64], sampling_rate: int, noise_type: float) -> None: """remove env noise using notch filter :param data: data to filter, filter works in-place :type data: NDArray[Float64] :param sampling_rate: board's sampling rate :type sampling_rate: int :param noise_type: noise type :type noise_type: int """ check_memory_layout_row_major(data, 1) if not isinstance(sampling_rate, int): raise BrainFlowError( 'wrong type for sampling rate', BrainflowExitCodes.INVALID_ARGUMENTS_ERROR.value) if not isinstance(noise_type, int): raise BrainFlowError( 'wrong type for noise type', BrainflowExitCodes.INVALID_ARGUMENTS_ERROR.value) res = DataHandlerDLL.get_instance().remove_environmental_noise( data, data.shape[0], sampling_rate, noise_type) if res != BrainflowExitCodes.STATUS_OK.value: raise BrainFlowError('unable to apply notch filter', res)
def detrend(cls, data: NDArray[Float64], detrend_operation: int) -> None: """detrend data :param data: data to calc psd :type data: NDArray[Float64] :param detrend_operation: Type of detrend operation :type detrend_operation: int """ check_memory_layout_row_major(data, 1) res = DataHandlerDLL.get_instance().detrend(data, data.shape[0], detrend_operation) if res != BrainflowExitCodes.STATUS_OK.value: raise BrainFlowError('unable to detrend data', res)
def perform_wavelet_denoising(cls, data: NDArray[Float64], wavelet: str, decomposition_level: int) -> None: """perform wavelet denoising :param data: data to denoise :type data: NDArray[Float64] :param wavelet: supported vals: db1..db15,haar,sym2..sym10,coif1..coif5,bior1.1,bior1.3,bior1.5,bior2.2,bior2.4,bior2.6,bior2.8,bior3.1,bior3.3,bior3.5 ,bior3.7,bior3.9,bior4.4,bior5.5,bior6.8 :type wavelet: str :param decomposition_level: decomposition level :type decomposition_level: int """ check_memory_layout_row_major(data, 1) try: wavelet_func = wavelet.encode() except: wavelet_func = wavelet res = DataHandlerDLL.get_instance().perform_wavelet_denoising( data, data.shape[0], wavelet_func, decomposition_level) if res != BrainflowExitCodes.STATUS_OK.value: raise BrainFlowError('unable to denoise data', res)
def get_psd_welch(cls, data: NDArray[Float64], nfft: int, overlap: int, sampling_rate: int, window: int) -> Tuple: """calculate PSD using Welch method :param data: data to calc psd :type data: NDArray[Float64] :param nfft: FFT Window size, must be power of 2 :type nfft: int :param overlap: overlap of FFT Windows, must be between 0 and nfft :type overlap: int :param sampling_rate: sampling rate :type sampling_rate: int :param window: window function :type window: int :return: amplitude and frequency arrays of len N / 2 + 1 :rtype: tuple """ check_memory_layout_row_major(data, 1) def is_power_of_two(n): return (n != 0) and (n & (n - 1) == 0) if (not is_power_of_two(nfft)): raise BrainFlowError( 'nfft is not power of 2: %d' % nfft, BrainflowExitCodes.INVALID_ARGUMENTS_ERROR.value) ampls = numpy.zeros(int(nfft / 2 + 1)).astype(numpy.float64) freqs = numpy.zeros(int(nfft / 2 + 1)).astype(numpy.float64) res = DataHandlerDLL.get_instance().get_psd_welch( data, data.shape[0], nfft, overlap, sampling_rate, window, ampls, freqs) if res != BrainflowExitCodes.STATUS_OK.value: raise BrainFlowError('unable to calc psd welch', res) return ampls, freqs
def perform_rolling_filter(cls, data: NDArray[Float64], period: int, operation: int) -> None: """smooth data using moving average or median :param data: data to smooth, it works in-place :type data: NDArray[Float64] :param period: window size :type period: int :param operation: int value from AggOperation enum :type operation: int """ check_memory_layout_row_major(data, 1) if not isinstance(period, int): raise BrainFlowError( 'wrong type for period', BrainflowExitCodes.INVALID_ARGUMENTS_ERROR.value) if not isinstance(operation, int): raise BrainFlowError( 'wrong type for operation', BrainflowExitCodes.INVALID_ARGUMENTS_ERROR.value) res = DataHandlerDLL.get_instance().perform_rolling_filter( data, data.shape[0], period, operation) if res != BrainflowExitCodes.STATUS_OK.value: raise BrainFlowError('unable to smooth data', res)