Exemplo n.º 1
0
 def __init__(self, foldername="EEGMA/", filter_order=2, lbf=10, ubf=20,
              sampling_rate=500, download=False, download_colab=False):
     
     # super.__init__(self)
     self.fields = [] 
     self.rows = [] 
     if download:
         print("Downloading...")
         self.download(download=download_colab)
     
     with open(foldername + "subject-info.csv", 'r') as csvfile:
         csvreader = csv.reader(csvfile) 
         self.fields = next(csvreader)  
         for row in csvreader: 
             self.rows.append(row)   
         
     self.X = list()
     self.Y = list()
     
     for i in range(35):
         try:
             signals, signal_headers, header = highlevel.read_edf(foldername + "Subject" + str(i) + "_2.edf")
         except:
             signals, signal_headers, header = highlevel.read_edf(foldername + "Subject0" + str(i) + "_2.edf")
         nyq = sampling_rate/2
         b, a = signal.butter(filter_order, [lbf/nyq, ubf/nyq], btype='band')
         for k in range(21):
             signals[k, :] = signal.lfilter(b, a, signals[k, :])
         self.X.append(tf.reshape(signals, (1, 21, -1, 1)))
         self.Y.append(self.Binary(np.ceil(float(self.rows[i][4]))))
     
     self.X = tf.concat(self.X, 0)
     self.Y = np.array(self.Y)
Exemplo n.º 2
0
    def test_blocksize_auto(self):
        """ test that the blocksize parameter works as intended"""
        file = '{}.edf'.format(self.tmp_testfile)
        siglen = 256 * 155
        signals = np.random.rand(10, siglen)
        sheads = highlevel.make_signal_headers([str(x) for x in range(10)],
                                               sample_frequency=256,
                                               physical_max=1,
                                               physical_min=-1)

        valid_block_sizes = [-1, 1, 5, 31]
        for block_size in valid_block_sizes:
            highlevel.write_edf(file, signals, sheads, block_size=block_size)
            signals2, _, _ = highlevel.read_edf(file)
            np.testing.assert_allclose(signals, signals2, atol=0.01)

        with self.assertRaises(AssertionError):
            highlevel.write_edf(file, signals, sheads, block_size=61)

        with self.assertRaises(AssertionError):
            highlevel.write_edf(file, signals, sheads, block_size=-2)

        # now test non-divisor block_size
        siglen = signals.shape[-1]
        highlevel.write_edf(file, signals, sheads, block_size=60)
        signals2, _, _ = highlevel.read_edf(file)
        self.assertEqual(signals2.shape, (10, 256 * 60 * 3))
        np.testing.assert_allclose(signals2[:, :siglen], signals, atol=0.01)
        np.testing.assert_allclose(signals2[:, siglen:],
                                   np.zeros([10, 25 * 256]),
                                   atol=0.0001)
Exemplo n.º 3
0
def data_length(folder):
    seizure_files = [
        (str(seizure))
        for seizure in sorted(Path(folder).glob('seizures/*.edf'))
    ]
    normal_files = [(str(normal))
                    for normal in sorted(Path(folder).glob('normal/*.edf'))]
    print("Number of files with seizures: " + str(len(seizure_files)))
    print("Number of files without siezures: " + str(len(normal_files)))

    signals, _, _ = highlevel.read_edf(str(seizure_files[20]))
    print(signals[10])

    length_seizures = []
    length_normal = []
    for seizure_file in seizure_files:
        signals, _, _ = highlevel.read_edf(str(seizure_file))
        length_seizures.append(len(signals[0]))
    for normal_file in normal_files:
        signals, _, _ = highlevel.read_edf(str(normal_file))
        length_normal.append(len(signals[0]))

    dis = np.sum(np.array(length_seizures))  # data in seizure
    din = np.sum(np.array(length_normal))  # data in normal

    print("Seizure: " + str(dis) + "fr/ " + str(dis / 256) + "s/ " +
          str(dis / 256 / 3600) + "h")
    print("Normal: " + str(din) + "fr/ " + str(din / 256) + "s/ " +
          str(din / 256 / 3600) + "h")


# data_length("/home/jmsvanrijn/Documents/Afstuderen/Code/low-power-epilepsy-detection/data/processed/")
Exemplo n.º 4
0
    def test_read_write_edf(self):
        startdate = datetime.now()
        t = startdate
        startdate = datetime(t.year, t.month, t.day, t.hour, t.minute,
                             t.second)

        header = highlevel.make_header(technician='tech',
                                       recording_additional='radd',
                                       patientname='name',
                                       patient_additional='padd',
                                       patientcode='42',
                                       equipment='eeg',
                                       admincode='420',
                                       gender='Male',
                                       startdate=startdate,
                                       birthdate='05.09.1980')
        annotations = [[0.01, -1, 'begin'], [0.5, -1, 'middle'],
                       [10, -1, 'end']]
        header['annotations'] = annotations
        signal_headers1 = highlevel.make_signal_headers(
            ['ch' + str(i) for i in range(5)])
        signals = np.random.rand(5, 256 * 300) * 200  #5 minutes of eeg

        success = highlevel.write_edf(self.edfplus_data_file, signals,
                                      signal_headers1, header)
        self.assertTrue(os.path.isfile(self.edfplus_data_file))
        self.assertGreater(os.path.getsize(self.edfplus_data_file), 0)
        self.assertTrue(success)

        signals2, signal_headers2, header2 = highlevel.read_edf(
            self.edfplus_data_file)

        self.assertEqual(len(signals2), 5)
        self.assertEqual(len(signals2), len(signal_headers2))
        for shead1, shead2 in zip(signal_headers1, signal_headers2):
            self.assertDictEqual(shead1, shead2)

        self.assertDictEqual(header, header2)
        np.testing.assert_allclose(signals, signals2, atol=0.01)

        signals = (signals * 100).astype(np.int8)
        success = highlevel.write_edf(self.edfplus_data_file,
                                      signals,
                                      signal_headers1,
                                      header,
                                      digital=True)
        self.assertTrue(os.path.isfile(self.edfplus_data_file))
        self.assertGreater(os.path.getsize(self.edfplus_data_file), 0)
        self.assertTrue(success)

        signals2, signal_headers2, header2 = highlevel.read_edf(
            self.edfplus_data_file, digital=True)

        self.assertEqual(len(signals2), 5)
        self.assertEqual(len(signals2), len(signal_headers2))
        for shead1, shead2 in zip(signal_headers1, signal_headers2):
            self.assertDictEqual(shead1, shead2)

        self.assertDictEqual(header, header2)
        np.testing.assert_array_equal(signals, signals2)
Exemplo n.º 5
0
    def test_drop_channel(self):
        signal_headers = highlevel.make_signal_headers(
            ['ch' + str(i) for i in range(5)])
        signals = np.random.rand(5, 256 * 300) * 200  #5 minutes of eeg
        highlevel.write_edf(self.drop_from, signals, signal_headers)

        dropped = highlevel.drop_channels(self.drop_from,
                                          to_keep=['ch1', 'ch2'],
                                          verbose=True)

        signals2, signal_headers, header = highlevel.read_edf(dropped)

        np.testing.assert_allclose(signals[1:3, :], signals2, atol=0.01)

        highlevel.drop_channels(self.drop_from,
                                self.drop_from[:-4] + '2.edf',
                                to_drop=['ch0', 'ch1', 'ch2'])
        signals2, signal_headers, header = highlevel.read_edf(
            self.drop_from[:-4] + '2.edf')

        np.testing.assert_allclose(signals[3:, :], signals2, atol=0.01)

        with self.assertRaises(AssertionError):
            highlevel.drop_channels(self.drop_from,
                                    to_keep=['ch1'],
                                    to_drop=['ch3'])
Exemplo n.º 6
0
    def test_fortran_write(self):
        # Create Fortran contiguous array
        signals = np.random.randint(-2048, 2048, [4, 5000000])
        signals = np.asfortranarray(signals)
        # Write
        highlevel.write_edf_quick(self.edfplus_data_file,
                                  signals.astype(np.int32),
                                  sfreq=250,
                                  digital=True)
        # Read and check
        signals2, _, _ = highlevel.read_edf(self.edfplus_data_file,
                                            digital=True,
                                            verbose=True)
        np.testing.assert_allclose(signals, signals2)

        # Create Fortran contiguous list
        signals = [np.random.randint(-2048, 2048, (5000000, ))] * 4
        # Write
        highlevel.write_edf_quick(self.edfplus_data_file,
                                  signals,
                                  sfreq=250,
                                  digital=True)
        # Read and check
        signals2, _, _ = highlevel.read_edf(self.edfplus_data_file,
                                            digital=True,
                                            verbose=True)
        np.testing.assert_allclose(signals, signals2)
Exemplo n.º 7
0
 def test_read_write_with_annotations(self):
     signals, signal_headers, header = highlevel.read_edf(self.test_generator)
     expected = [[0.0, -1, 'Recording starts'], [600.0, -1, 'Recording ends']]
     self.assertEqual(header['annotations'], expected)
     
     highlevel.write_edf(self.edfplus_data_file, signals, signal_headers, header)
     signals2, signal_header2s, header2 = highlevel.read_edf(self.edfplus_data_file)
     self.assertEqual(header['annotations'], header2['annotations'])
Exemplo n.º 8
0
 def test_quick_write(self):
     signals = np.random.randint(-2048, 2048, [3, 256*60])
     highlevel.write_edf_quick(self.edfplus_data_file, signals.astype(np.int32), sfreq=256, digital=True)
     signals2, _, _ = highlevel.read_edf(self.edfplus_data_file, digital=True)
     np.testing.assert_allclose(signals, signals2)
     signals = np.random.rand(3, 256*60)
     highlevel.write_edf_quick(self.edfplus_data_file, signals, sfreq=256)
     signals2, _, _ = highlevel.read_edf(self.edfplus_data_file)
     np.testing.assert_allclose(signals, signals2, atol=0.00002)
Exemplo n.º 9
0
def plotEdfs(edfFilename1, edfFilename2):

    signals1, signalHeaders1, header1 = highlevel.read_edf(edfFilename1)
    signals2, signalHeaders2, header2 = highlevel.read_edf(edfFilename2)

    plt.plot(signals1[0], color="green")
    plt.plot(signals2[0], "x", color="blue")
    plt.show()

    plt.plot(signals1[1], color="orange")
    plt.plot(signals2[1], color="red")
    plt.show()
Exemplo n.º 10
0
    def test_read_write_edf(self):
        startdate = datetime.now()
        t = startdate
        startdate = datetime(t.year,t.month,t.day,t.hour, t.minute,t.second)
        
        header = highlevel.make_header(technician='tech', recording_additional='r_add',
                                                patientname='name', patient_additional='p_add',
                                                patientcode='42', equipment='eeg', admincode='120',
                                                gender='Male', startdate=startdate,birthdate='05.09.1980')
        annotations = [[0.01, -1, 'begin'],[0.5, -1, 'middle'],[10, -1, 'end']]

        signal_headers1 = highlevel.make_signal_headers(['ch'+str(i) for i in range(5)])

        for file_type in [-1,0,1,2,3]:
            if file_type in [0, 2]:
                header['annotations'] = []
            else:
                header['annotations'] = annotations

            file = '{}_{}_phys.edf'.format(self.tmp_testfile, file_type)
            signals = np.random.rand(5, 256*300)*200 #5 minutes of eeg
            success = highlevel.write_edf(file, signals, signal_headers1, header, file_type=file_type)
            self.assertTrue(os.path.isfile(file))
            self.assertGreater(os.path.getsize(file), 0)
            self.assertTrue(success)
            
            signals2, signal_headers2, header2 = highlevel.read_edf(file)
    
            self.assertEqual(len(signals2), 5)
            self.assertEqual(len(signals2), len(signal_headers2))
            for shead1, shead2 in zip(signal_headers1, signal_headers2):
                self.assertDictEqual(shead1, shead2)
            np.testing.assert_allclose(signals, signals2, atol=0.01)
            if file_type in [-1, 1, 3]:
                self.assertDictEqual(header, header2)

            file = '{}_{}_dig.edf'.format(self.tmp_testfile, file_type)
            signals = (signals*100).astype(np.int8)
            success = highlevel.write_edf(file, signals,  signal_headers1, header, digital=True)
            self.assertTrue(os.path.isfile(file))
            self.assertGreater(os.path.getsize(file), 0)
            self.assertTrue(success)
            
            signals2, signal_headers2, header2 = highlevel.read_edf(file, digital=True)
    
            self.assertEqual(len(signals2), 5)
            self.assertEqual(len(signals2), len(signal_headers2))
            np.testing.assert_array_equal(signals, signals2)
            for shead1, shead2 in zip(signal_headers1, signal_headers2):
                self.assertDictEqual(shead1, shead2)
            # EDF/BDF header writing does not correctly work yet
            if file_type in [-1, 1, 3]:
                self.assertDictEqual(header, header2)
Exemplo n.º 11
0
 def test_dig2phys_calc(self):
     signals_phys, shead, _ = highlevel.read_edf(self.test_generator)
     signals_dig, _, _ = highlevel.read_edf(self.test_generator, digital=True)
             
     dmin, dmax = shead[0]['digital_min'],  shead[0]['digital_max']
     pmin, pmax = shead[0]['physical_min'],  shead[0]['physical_max']
     
     # convert to physical
     signal_phys2 = highlevel.dig2phys(signals_dig, dmin, dmax, pmin, pmax)        
     np.testing.assert_allclose(signals_phys, signal_phys2)
     
     # convert to digital
     signals_dig2 = highlevel.phys2dig(signals_phys, dmin, dmax, pmin, pmax)
     signals_dig2 = np.rint(signals_dig2)
     np.testing.assert_allclose(signals_dig, signals_dig2)
Exemplo n.º 12
0
def get_data(pathname, filename):
    """reads data from files and returns dataframes for ecg and excel."""
    signals, signal_headers, header = highlevel.read_edf(
        os.path.join(pathname, filename))
    start_date = header['startdate']
    sample_rate = {
        header['label']: header['sample_rate']
        for header in signal_headers
    }
    ecg_timestamp = [
        start_date +
        datetime.timedelta(microseconds=1. / sample_rate['ECG'] * n * 1e6)
        for n in range(len(signals[0]))
    ]  # 0 is index of ECG
    acc_timestamp = [
        start_date + datetime.timedelta(
            microseconds=1. / sample_rate['Accelerometer_X'] * n * 1e6)
        for n in range(len(signals[1]))
    ]  # 1 is index of Accel_X
    ecg = pd.DataFrame(signals[0], index=ecg_timestamp, columns=['ECG'])
    acc = pd.DataFrame(
        np.transpose(signals[1:4]),
        index=acc_timestamp,
        columns=[header['label'] for header in signal_headers[1:4]])
    acc['mag'] = np.sqrt(
        np.square(acc.Accelerometer_X) + np.square(acc.Accelerometer_Y) +
        np.square(acc.Accelerometer_Z))
    acc['total'] = acc['Accelerometer_X'] + acc['Accelerometer_Y'] + acc[
        'Accelerometer_Z']
    grav = np.median(acc.mag)
    acc.Accelerometer_X = acc.Accelerometer_X / grav
    acc.Accelerometer_Y = acc.Accelerometer_Y / grav
    acc.Accelerometer_Z = acc.Accelerometer_Z / grav
    acc.dropna()
    return ecg, acc, sample_rate
Exemplo n.º 13
0
def extract_ecg(edf_file, copy_folder):
    filename = os.path.basename(edf_file)
    new_edf_file = os.path.join(copy_folder, filename)
    if os.path.exists(new_edf_file): return
    try:
        header = highlevel.read_edf_header(edf_file)
    except:
        print(f'error in file {edf_file}')
        return
    channels = header['channels']
    try:
        channels.remove('cs_ECG')
    except:
        print(f'warning, {edf_file} has no cs_ECG')
    ch_names = [x for x in channels if 'ECG' in x.upper()]
    if len(ch_names) > 1:
        print(
            f'Warning, these are present: {ch_names}, selecting {ch_names[0]}')
    ch_name = ch_names[0]

    signals, shead, header = highlevel.read_edf(edf_file,
                                                ch_names=[ch_name],
                                                digital=True,
                                                verbose=False)

    shead[0]['label'] = 'ECG'

    assert len(signals) > 0, 'signal empty'
    try:
        highlevel.write_edf(new_edf_file, signals, shead, header, digital=True)
    except:
        shead[0]['digital_min'] = signals.min()
        shead[0]['digital_max'] = signals.max()
        highlevel.write_edf(new_edf_file, signals, shead, header, digital=True)
Exemplo n.º 14
0
 def test_read_unicode(self):
     signals = np.random.rand(3, 256*60)
     success = highlevel.write_edf_quick(self.edfplus_data_file, signals, sfreq=256)
     self.assertTrue(success)
     shutil.copy(self.edfplus_data_file, self.test_unicode)
     signals2, _, _ = highlevel.read_edf(self.test_unicode)
     self.assertTrue(os.path.isfile(self.test_unicode), 'File does not exist')
Exemplo n.º 15
0
def detect_energy_spikes():
    seizure_file_1 = "/run/media/jmsvanrijn/3707BCE92020A60C/Data_2010_take_2/1.0.0/chb23/chb23_09.edf"
    normal_file = "/run/media/jmsvanrijn/3707BCE92020A60C/Data_2010_take_2/1.0.0/chb23/chb23_10.edf"
    start_time = 2000 # In seconds
    end_time = 3000 # In seconds
    hz = 256
    signals, signal_headers, header = highlevel.read_edf(str(seizure_file_1))

    recorded_seizures = np.array([0, 2589, 2660, 6885, 6947, 8505, 8532, 9580, 9664, len(signals[0])/hz])*hz
    seiz_23_1 = [29, 47]
    seiz_23_2 = [[30, 50], [53, 59]]
    seiz_23_3 = [2, 90]

    y_axis = np.resize([750, 0], len(recorded_seizures))

    z = np.array(signals[2])
    g = z[1:]**2 - z[1:]*z[:-1]
    g_2 = np.convolve(g, [1, 1, 1, 1, 1, 1, 1, 1])

    plt.subplot(211)
    plt.plot(np.transpose(z))
    plt.plot(recorded_seizures, y_axis, drawstyle="steps")

    y_axis = np.resize([np.max(g), 0], len(recorded_seizures))
    plt.subplot(212)
    plt.plot(np.transpose(g))
    plt.plot(np.transpose(g_2))
    plt.plot(recorded_seizures, y_axis, drawstyle="steps")
    plt.show()
Exemplo n.º 16
0
    def get_data(self, name, ignore_skip_status=False, reshape=256):
        record = self.__get_record(name, ignore_skip_status=ignore_skip_status)

        self.__download(name)
        signals, _, _ = highlevel.read_edf('temp.edf')
        channels = record['Channels']
        signals = signals[channels]
        return signals.T.reshape(-1, reshape, 23)
Exemplo n.º 17
0
 def test_read_write_accented(self):
     signals = np.random.rand(3, 256*60)
     highlevel.write_edf_quick(self.test_accented, signals, sfreq=256)
     signals2, _, _ = highlevel.read_edf(self.test_accented)
     
     np.testing.assert_allclose(signals, signals2, atol=0.00002)
     # if os.name!='nt':
     self.assertTrue(os.path.isfile(self.test_accented), 'File does not exist')
 def __getitem__(self, index):
     filename, label = self.items[index]
     # t = time.time()
     signals, signal_headers, header = highlevel.read_edf(filename)
     # print(time.time()-t)
     loc = random.randint(0, len(signals[0]) - self.window_size)
     signals_cut = signals[:, loc: loc + self.window_size]
     return signals_cut, int(label), filename
Exemplo n.º 19
0
def pearson_correlation(indexed_normal, index_seizures):
    frame_normal = pd.DataFrame(np.zeros((23, 23)))
    frame_seiz = pd.DataFrame(np.zeros((23, 23)))

    for i in range(len(index_seizures) - 150):
        signal_nor, _, _ = highlevel.read_edf(str(indexed_normal[i][0]))
        signal_seiz, _, _ = highlevel.read_edf(str(index_seizures[i][0]))

        frame_normal = frame_normal.add(pd.DataFrame(signal_nor[:15 * 256]))
        frame_seiz = frame_seiz.add(pd.DataFrame(signal_seiz[:15 * 256]))

        # frame_normal = frame_normal.add(pd.DataFrame(abs(signal_nor)).T.corr())
        # frame_seiz = frame_seiz.add(pd.DataFrame(abs(signal_seiz)).T.corr())

    normal_corr = frame_normal.T.corr()
    seiz_corr = frame_seiz.T.corr()

    size = 23
    high_correlation = frame_seiz.to_numpy()
    lowest = np.argmin(high_correlation.reshape(-1), axis=0)
    y_corr = math.floor(lowest / size)
    x_corr = lowest % size

    # plt.figure()
    # plt.imshow(normal_corr, origin="lower", cmap="hot", interpolation="nearest")
    # plt.colorbar()
    #
    # plt.figure()
    # plt.imshow(seiz_corr, origin="lower", cmap="hot", interpolation="nearest")
    # plt.colorbar()
    #
    # plt.figure()
    # plt.imshow(np.abs(normal_corr), origin="lower", cmap="hot", interpolation="nearest")
    # plt.colorbar()
    #
    # plt.figure()
    # plt.imshow(np.abs(seiz_corr), origin="lower", cmap="hot", interpolation="nearest")
    # plt.colorbar()
    #
    # plt.figure()
    # plt.imshow(np.abs(seiz_corr)-np.abs(normal_corr), origin="lower", cmap="hot", interpolation="nearest")
    # plt.colorbar()

    # return x_corr, y_corr

    return normal_corr
    def __getitem__(self, index):
        
        filename, label, start = self.items[index]
        # t = time.time()
        signals, signal_headers, header = highlevel.read_edf(filename)
        # print(time.time()-t)

        signal_cut = signals[:, int(start)*self.window_size: (int(start)+1)*self.window_size]
        return signal_cut, int(label)
def max_per_file(path, window_size):
    seizure_files = []
    normal_files = []
    for seizure in sorted(Path(path).glob('seizures/*.edf')):
        signals, _, _ = highlevel.read_edf(str(seizure))
        length_signals = int(math.floor(len(signals[0]) / window_size))
        for i in range(length_signals):
            seizure_files.append((str(seizure), str(1), str(i)))

    for normal in sorted(Path(path).glob('normal/*.edf')):
        signals, _, _ = highlevel.read_edf(str(normal))
        length_signals = int(math.floor(len(signals[0]) / window_size))
        for i in range(length_signals):
            normal_files.append((str(normal), str(0), str(i)))

    print(len(seizure_files + normal_files))

    return random.sample((seizure_files + normal_files),
                         len((seizure_files + normal_files)))
Exemplo n.º 22
0
 def test_read_write_decimal_sample_rates(self):
     signals = np.random.randint(-2048, 2048, [3, 256 * 60])
     highlevel.write_edf_quick(self.edfplus_data_file,
                               signals.astype(np.int32),
                               sfreq=8.5,
                               digital=True)
     signals2, _, _ = highlevel.read_edf(self.edfplus_data_file,
                                         digital=True,
                                         verbose=True)
     np.testing.assert_allclose(signals, signals2)
     signals = np.random.rand(3, 256 * 60)
     highlevel.write_edf_quick(self.edfplus_data_file,
                               signals,
                               sfreq=8.5,
                               digital=False)
     signals2, _, _ = highlevel.read_edf(self.edfplus_data_file,
                                         digital=False,
                                         verbose=True)
     np.testing.assert_allclose(signals, signals2, atol=0.0001)
Exemplo n.º 23
0
    def _load(self, edf_file, mat_file=None):
        if mat_file is None:
            filename = ospath.basename(edf_file)[:-4]
            folder = ospath.dirname(edf_file)
            print(mat_file)
            mat_file = ospath.list_files(folder, patterns=f'{filename}*.mat')
            if len(mat_file)>0: mat_file = mat_file[0]
            if not mat_file or not os.path.exists(mat_file): 
                print('matfile {} not found'.format(mat_file))
                dir = ospath.dirname(edf_file)
                mat_file = misc.choose_file(dir, exts='mat', 
                        title='Select the corresponding MAT file by Kubios')
            
        signals, sheader, header = highlevel.read_edf(edf_file, ch_names='ECG I')
        sfreq =  sheader[0]['sample_rate']
        data = signals[0].squeeze()
        stime = header['startdate']
        self.starttime = (stime.hour * 60 + stime.minute) * 60 + stime.second
        self.data = data
        self.sfreq = sfreq
        
        try:
            mat = mat73.loadmat(mat_file, verbose=False)
            rr = mat['Res']['HRV']['Data']['RR']
            trrs = mat['Res']['HRV']['Data']['T_RR'] - self.starttime
            rrorig = mat['Res']['HRV']['Data']['T_RRorig'] - self.starttime
            corr = mat['Res']['HRV']['Data']['RRcorrtimes'] - self.starttime
            art = mat['Res']['HRV']['TimeVar']['Artifacts']
            altered = trrs[np.where(np.diff(trrs)!=rr)[0]]
            
        except:
            raise FileNotFoundError('Mat file not found.')            

        artefacts_file = edf_file[:-4] + '.npy'  
        if os.path.exists(artefacts_file):
            self.artefacts = np.load(artefacts_file)
        else:
            art = np.nan_to_num(art, nan=99)
            self.artefacts = np.repeat(art>self.threshold, repeats=2, axis=0).T.reshape([-1,2])
            self.detect_flatline()
            
        self.kubios_art = np.nan_to_num(art.squeeze())
        self.mat = mat
        self.altered = altered.squeeze()
        self.rrorig = rrorig.squeeze()
        self.trrs = trrs.squeeze()
        print(trrs[:10])
        self.corr = corr.squeeze()

        self.file = edf_file
        self.mat_file = mat_file
        self.artefacts_file = artefacts_file
        self.max_page = len(data)//sfreq//self.interval//self.gridsize
        
        self.save()
def max_per_file_2(path, window_size):
    all_files = []

    for seizure in sorted(Path(path).glob('seizures/*.edf')):
        signals, _, _ = highlevel.read_edf(str(seizure))
        length_signals = int(math.floor(len(signals[0]) / window_size))
        for i in range(length_signals):
            all_files.append(
                (signals[:, i * window_size:(i + 1) * window_size], 1))

    for normal in sorted(Path(path).glob('normal/*.edf')):
        signals, _, _ = highlevel.read_edf(str(normal))
        length_signals = int(math.floor(len(signals[0]) / window_size))
        for i in range(length_signals):
            all_files.append(
                (signals[:, i * window_size:(i + 1) * window_size], 0))

    print(len(all_files))

    return random.sample(all_files, len(all_files))
    def __init__(self, file_name, sample_spacing, window_size):
        signals, signal_headers, header = highlevel.read_edf(file_name)

        self.sample_spacing = sample_spacing
        self.window_size = window_size
        self.signals = signals
        self.items = range(
            int(
                len(signals[0]) / sample_spacing -
                window_size / sample_spacing - 1))
        self.length = len(self.items)
Exemplo n.º 26
0
def get_edf(fname, channel):
    signals, signal_headers, header = read_edf(fname)
    chn = -1
    for h in range(len(signal_headers)):
        if signal_headers[h]['label'] == channel:
            chn = h
    if chn == -1:
        raise ValueError(channel + " cannot be found in the given EDF.")
    else:
        sf = signal_headers[chn]['sample_rate']
        b = signals[chn]
        return b, sf
Exemplo n.º 27
0
 def test_read_write_diff_sfreq(self):
     
     signals = []
     sfreqs = [1, 64, 128, 200]
     sheaders = []
     for sfreq in sfreqs:
         signals.append(np.random.randint(-2048, 2048, sfreq*60).astype(np.int32))
         shead = highlevel.make_signal_header('ch{}'.format(sfreq), sample_rate=sfreq)
         sheaders.append(shead)
     highlevel.write_edf(self.edfplus_data_file, signals, sheaders, digital=True)
     signals2, sheaders2, _ = highlevel.read_edf(self.edfplus_data_file, digital=True)
     for s1, s2 in zip(signals, signals2):
         np.testing.assert_allclose(s1, s2)
Exemplo n.º 28
0
 def test_annotation_bytestring(self):
     header = highlevel.make_header(technician='tech',
                                    recording_additional='radd',
                                    patientname='name',
                                    patient_additional='padd',
                                    patientcode='42',
                                    equipment='eeg',
                                    admincode='420',
                                    gender='Male',
                                    birthdate='05.09.1980')
     annotations = [[0.01, b'-1', 'begin'], [0.5, b'-1', 'middle'],
                    [10, -1, 'end']]
     header['annotations'] = annotations
     signal_headers = highlevel.make_signal_headers(
         ['ch' + str(i) for i in range(3)])
     signals = np.random.rand(3, 256 * 300) * 200  #5 minutes of eeg
     highlevel.write_edf(self.edfplus_data_file, signals, signal_headers,
                         header)
     _, _, header2 = highlevel.read_edf(self.edfplus_data_file)
     highlevel.write_edf(self.edfplus_data_file, signals, signal_headers,
                         header)
     _, _, header3 = highlevel.read_edf(self.edfplus_data_file)
     self.assertEqual(header2['annotations'], header3['annotations'])
Exemplo n.º 29
0
def load_correlations(select_patients):
    path = "/home/jmsvanrijn/Documents/Afstuderen/Code/low-power-epilepsy-detection/data/processed/"
    index_seizures = [([
        str(i),
        str(i).split("/")[-1].split("_")[-3],
        str(i).split("/")[-1].split("_")[-2]
    ]) for i in sorted(Path(path).glob('seizures/*.edf'))]
    index_normal = [([
        str(i),
        str(i).split("/")[-1].split("_")[-3],
        str(i).split("/")[-1].split("_")[-2]
    ]) for i in sorted(Path(path).glob('normal/*.edf'))]
    seizures = []
    normal = []
    selected_seizures = []
    selected_normal = []

    for i, seizure in enumerate(index_seizures):
        try:
            patient_numbers = int(seizure[1][-2:])
        except:
            patient_numbers = int(seizure[1][-3:-1])

        if patient_numbers in select_patients:
            selected_seizures.append(seizure)
            selected_normal.append(index_normal[i])

    print("AoSeizures: " + str(len(selected_seizures)))
    print("AoNormals: " + str(len(selected_normal)))

    for i in range(len(selected_normal)):
        signal_seiz, _, _ = highlevel.read_edf(str(selected_seizures[i][0]))
        normal_seiz, _, _ = highlevel.read_edf(str(selected_normal[i][0]))
        seizures.append((signal_seiz, str(1)))
        normal.append((normal_seiz, str(0)))

    return seizures, normal
Exemplo n.º 30
0
def load_file(k=0, foldername="EEGMA/"):

    fields = []
    rows = []
    with open(foldername + "subject-info.csv", 'r') as csvfile:
        csvreader = csv.reader(csvfile)
        fields = next(csvreader)
        for row in csvreader:
            rows.append(row)

    #

    try:
        signals, signal_headers, header = highlevel.read_edf(foldername +
                                                             "Subject" +
                                                             str(k) + "_2.edf")
    except:
        signals, signal_headers, header = highlevel.read_edf(foldername +
                                                             "Subject0" +
                                                             str(k) + "_2.edf")
    print("\n", fields[4], " | ", rows[k][0], " | ", rows[k][4])
    print("######## --- > ")

    return signals, signal_headers, header, fields, rows