Exemplo n.º 1
0
def setup_module_load(request):
    """
    Fixture to setup module. Performs check for presence of test files,
    creates initial batch object.
    """
    path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data')
    files = [
        "A00001.hea", "A00001.mat", "A00002.hea", "A00002.mat", "A00004.hea",
        "A00004.mat", "A00005.hea", "A00005.mat", "A00008.hea", "A00008.mat",
        "A00013.hea", "A00013.mat", "REFERENCE.csv"
    ]
    # TODO: make better test for presence of files .hea and
    # REFERENCE.csv

    if np.all([os.path.isfile(os.path.join(path, file)) for file in files]):
        ind = bf.FilesIndex(path=os.path.join(path, 'A*.hea'),
                            no_ext=True,
                            sort=True)
    else:
        raise FileNotFoundError("Test files not found in 'tests/data/'!")

    def teardown_module_load():
        """
        Teardown module
        """

    request.addfinalizer(teardown_module_load)
    return ind, path
Exemplo n.º 2
0
 def test_load_wfdb_annotation(self, setup_module_load):  #pylint: disable=redefined-outer-name
     """
     Testing wfdb loader for annotation.
     """
     # Arrange
     path = setup_module_load[1]
     ind = bf.FilesIndex(path=os.path.join(path, 'sel100.hea'),
                         no_ext=True,
                         sort=True)
     batch = EcgBatch(ind)
     # Act
     batch = batch.load(fmt="wfdb",
                        components=["signal", "annotation", "meta"],
                        ann_ext="pu1")
     # Assert
     assert isinstance(batch.signal, np.ndarray)
     assert isinstance(batch.meta, np.ndarray)
     assert isinstance(batch.annotation, np.ndarray)
     assert batch.signal.shape == (1, )
     assert batch.annotation.shape == (1, )
     assert batch.meta.shape == (1, )
     assert isinstance(batch.signal[0], np.ndarray)
     assert isinstance(batch.annotation[0], dict)
     assert isinstance(batch.meta[0], dict)
     assert 'annsamp' in batch.annotation[0]
     assert 'anntype' in batch.annotation[0]
     del batch
Exemplo n.º 3
0
def setup_class_methods(request):
    """
    Fixture to setup class to test EcgBatch methods separately.
    """
    path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data/')
    ind = bf.FilesIndex(path=os.path.join(path, 'A*.hea'),
                        no_ext=True,
                        sort=True)
    batch_loaded = (EcgBatch(ind, unique_labels=["A", "O", "N"]).load(
        fmt="wfdb", components=["signal", "annotation", "meta"]))

    def teardown_class_methods():
        """
        Teardown class
        """

    request.addfinalizer(teardown_class_methods)
    return batch_loaded
Exemplo n.º 4
0
 def test_load_wav(self, setup_module_load):  #pylint: disable=redefined-outer-name
     """
     Testing EDF loader.
     """
     # Arrange
     path = setup_module_load[1]
     ind = bf.FilesIndex(path=os.path.join(path, 'sample*.wav'),
                         no_ext=True,
                         sort=True)
     batch = EcgBatch(ind)
     # Act
     batch = batch.load(fmt="wav",
                        components=["signal", "annotation", "meta"])
     # Assert
     assert isinstance(batch.signal, np.ndarray)
     assert isinstance(batch.meta, np.ndarray)
     assert isinstance(batch.annotation, np.ndarray)
     assert batch.signal.shape == (1, )
     assert batch.annotation.shape == (1, )
     assert batch.meta.shape == (1, )
     assert isinstance(batch.signal[0], np.ndarray)
     assert isinstance(batch.annotation[0], dict)
     assert isinstance(batch.meta[0], dict)
     del batch
Exemplo n.º 5
0
import os
import cardio.batchflow as bf
import warnings
import numpy as np
from cardio import EcgBatch
from my_tools import calculate_old_metrics
from PanTompkinsAlgorithm import testPipeline, PanTompkinsPipeline

warnings.filterwarnings('ignore')

SIGNALS_PATH = "data\\qt-database-1.0.0"  # set path to QT database
SIGNALS_MASK = os.path.join(SIGNALS_PATH, "*.hea")

index = bf.FilesIndex(path=SIGNALS_MASK, no_ext=True, sort=True)
dtst = bf.Dataset(index, batch_class=EcgBatch)
dtst.split([0.1, 0.9])

pipeline = PanTompkinsPipeline(batch_size=len(dtst.train.indices),
                               annot="pan_tomp_annotation")
ppl_inits = (dtst.train >> pipeline).run()
batch: EcgBatch = ppl_inits.next_batch(len(dtst.train.indices))
parameters = calculate_old_metrics(batch, np.array(list([1]), np.int64), 0,
                                   "pan_tomp_annotation")
print("Pan-Tompkins" + "\tsensitivity= " + str(parameters["sensitivity"]) +
      "\tspecificity= " + str(parameters["specificity"]))

print("end")
Exemplo n.º 6
0
from tensorflow.keras import optimizers
from sklearn.model_selection import train_test_split
#%%

# https://www.physionet.org/content/qtdb/1.0.0/
# where the data was saved
#SIGNALS_FOLDER = "\\qt-database-1.0.0"                  # the data was saved within the same folder

current_dir = os.getcwd()
SIGNALS_FOLDER = current_dir + "\\qt-database-1.0.0"
SIGNALS_MASK = os.path.join(SIGNALS_FOLDER,  "*.hea")  # .hea, header file, describing signal file contents and format

# when data comes from a file system, it might be convenient to use 'FileIndex',
# it is coming from batchflow module.
index = bf.FilesIndex(path=SIGNALS_MASK,  # directory of the file location
                      no_ext=True,        # when filenames contain extensions which are not part of the id, then they maybe stripped with this option
                      sort=True)          # sort the order of your file index

dtst  = bf.Dataset(index, batch_class=EcgBatch)  # batch_class holds the data and contains processing functions. refer to documentation 'batch classes'
    

#%%


    
def expand_annotation(annsamp_0, anntype_0, length):
    """
    Unravel annotation
    
    anntype_0 and annsamp_0 are with the same size.