Ejemplo n.º 1
0
def load_feature_from_input_list(path_to_load, feature_group_name_record_list):
    feature_group_name_extracted = feature_group_name_record_list

    signal_structure = load_signal(path_to_load, feature_group_name_extracted)
    try:

        signal_structure = load_signal(path_to_load,
                                       feature_group_name_extracted)

        extracted_features = [
            get_multiple_records(
                get_one_signal_structure(signal_structure, group_name))
            for group_name in feature_group_name_extracted
        ]

        mdata = [
            get_mdata_dict(
                get_one_signal_structure(signal_structure, group_name))
            for group_name in feature_group_name_extracted
        ]

        return extracted_features, mdata

    except Exception as e:
        print signal_structure
        print e
Ejemplo n.º 2
0
def create_heart_beat_dataset(path, name, group, save_dfile=None):
    X = load_signal(path=path, name=name, group=group) # 1 record per row
    rpeaks = load_signal(path=path, name='rpeaks', group=group)
    signal_to_segment = X['signal'][0,:].T
    heart_beat, rpeaks = ecg.extract_heartbeats(
                                 signal=signal_to_segment,
                                 rpeaks=rpeaks['signal'], sampling_rate=1000.0,
                                 before=0.2, after=0.4
                                 )
    return heart_beat, rpeaks
def load_baseline_removal(path_to_load, group_list_to_filter, name_list, sampling_rate, compute_flag = False):
    group_list_baseline_removal = ['medianFIR']
    try:
        if compute_flag:
            make_new_computation
        signal_structure_baseline_removal = load_signal(path_to_load,zip(group_list_baseline_removal, name_list))
    except Exception as e:
        _logger.debug(e)
        baseline_removal(path_to_load, name_list[0], group_list_to_filter[0], sampling_rate)
        signal_structure_baseline_removal = load_signal(path_to_load,zip(group_list_baseline_removal, name_list))

    one_signal_structure = get_one_signal_structure(signal_structure_baseline_removal, zip(group_list_baseline_removal, name_list)[0])
    records = get_multiple_records(one_signal_structure)
    mdata = get_mdata_dict(one_signal_structure)
    return records, mdata
Ejemplo n.º 4
0
def load_all_feature_windows_from_disk(path_to_load, feature_group_name):
    feature_group_extracted = feature_group_name
    feature_group_name_extracted = list_group_signals(
        path_to_load, feature_group_extracted)['signals']
    feature_group_name_extracted = [
        group_name for group_name in feature_group_name_extracted
        if 'window_' in group_name[1]
    ]

    try:
        print feature_group_extracted
        signal_structure = load_signal(path_to_load,
                                       feature_group_name_extracted)
        extracted_features = [
            get_multiple_records(
                get_one_signal_structure(signal_structure, group_name))
            for group_name in feature_group_name_extracted
        ]
        mdata = [
            get_mdata_dict(
                get_one_signal_structure(signal_structure, group_name))
            for group_name in feature_group_name_extracted
        ]

        return extracted_features, mdata
    except Exception as e:
        _logger.debug(e)

    return extracted_features, mdata
def load_rpeaks(path_to_load, group_list_to_filter, name_list, sampling_rate, compute_flag = False):
    names = lambda label: name_list[0] + '_rpeaks_'+label
    labels = map(str, range(0,5))
    rpeaks_names = map(names, labels)
    group_list = group_list_to_filter * len(labels)

    try:
        if compute_flag:
            make_new_computation
        signal_structure_rpeaks = load_signal(path_to_load ,zip(group_list, rpeaks_names))
    except Exception as e:
        print e
        _logger.debug(e)
        create_rpeak_dataset(path_to_load, zip(group_list_to_filter, name_list), sampling_rate)
        signal_structure_rpeaks = load_signal(path_to_load ,zip(group_list, rpeaks_names))

    rpeaks = [get_multiple_records(get_one_signal_structure(signal_structure_rpeaks, group_name)) for group_name in zip(group_list, rpeaks_names)]
    return rpeaks
Ejemplo n.º 6
0
def records_intensive(path_to_load, signal_group_name_list):
    # print signal_group_name_list
    # stop
    signal_structure = load_signal(path_to_load,
                                           signal_group_name_list)
    feature_groups_extracted = [get_multiple_records(get_one_signal_structure(signal_structure, group_name))
                                for group_name in signal_group_name_list]

    return feature_groups_extracted
def compress(path_to_load, group_name_list):
    # print 'before loading' 
    # print group_name_list

    # Memory loop (one signal at a time)
    for i, group_name in enumerate(group_name_list):
        signal_structure = load_signal(path_to_load, [group_name])
        one_signal_structure = get_one_signal_structure(signal_structure, group_name)
        record = get_multiple_records(one_signal_structure)
Ejemplo n.º 8
0
def _create_win_from_disk(path_to_load, signal_group_name, sampling_rate):
    feature_group = signal_group_name[0]

    # Signal name
    signal_name = signal_group_name[1]

    # Window name
    win_name = 'window_' + signal_name


    win_structure = load_signal(path_to_load, [(feature_group, win_name)])
    window = get_multiple_records_group_name(win_structure, (feature_group, win_name))
    return window
def load_feature(path_to_load, group_to_extract, feature_name, compute_flag=False, even_compress=True):

    group_to_load = [group_to_extract+feature_name]
    print list_group_signals(path_to_load, group_to_load)
    stop
    try:
        load_signal(path_to_load, )
    print list_group_signals(path_to_load, group_to_extract[0])
    stop
    group_file = [group_to_extract+feature_name]
    try:
        if compute_flag:
            make_new_computation
        signal_structure = load_signal(path_to_load,zip(group_file, ['features']))

    except Exception as e:
        _logger.debug(e)
        baseline_removal(path_to_load, name_list[0], group_list_to_filter[0], sampling_rate)
        signal_structure_baseline_removal = load_signal(path_to_load,zip(group_list_baseline_removal, name_list))

    one_signal_structure = get_one_signal_structure(signal_structure_baseline_removal, zip(group_list_baseline_removal, name_list)[0])
    records = get_multiple_records(one_signal_structure)
    mdata = get_mdata_dict(one_signal_structure)
    return records, mdata
Ejemplo n.º 10
0
def create_window_segmentation_signal(path_to_load,
                                      segmentation_signal_group_name):
    signal_group_name = segmentation_signal_group_name

    # Feature group
    feature_group = signal_group_name[0]

    # Signal name
    signal_name = signal_group_name[1]

    rpeaks_group_name = segmentation_rpeak_feature(signal_group_name)
    rpeaks_structure = load_signal(path_to_load, [(rpeaks_group_name, signal_name)])
    rpeaks_array = get_multiple_records(get_one_signal_structure(rpeaks_structure, (rpeaks_group_name, signal_name)))
    windows = rpeaks_array[0]
    return windows
def load_kalman(path_to_load, group_list_to_filter, name_list, sampling_rate, compute_flag = False):
    group_list_esksmooth = ['esksmooth']
    try:
        if compute_flag:
            make_new_computation
        signal_structure_baseline_removal = load_signal(path_to_load,zip(group_list_esksmooth, name_list))
    except Exception as e:
        _logger.debug(e)
        pass

    one_signal_structure = get_one_signal_structure(signal_structure_baseline_removal, zip(group_list_esksmooth, name_list)[0])
    records = get_multiple_records(one_signal_structure)
    mdata = get_mdata_dict(one_signal_structure)
    print 'records'
    print records
    return records, mdata
Ejemplo n.º 12
0
def load_single_feature_from_disk(path_to_load, feature_group_name_record):
    feature_group_name_extracted = [feature_group_name_record]
    try:
        signal_structure = load_signal(path_to_load,
                                       feature_group_name_extracted)
        extracted_features = [
            get_multiple_records(
                get_one_signal_structure(signal_structure, group_name))
            for group_name in feature_group_name_extracted
        ]
        mdata = [
            get_mdata_dict(
                get_one_signal_structure(signal_structure, group_name))
            for group_name in feature_group_name_extracted
        ]

        return extracted_features, mdata

    except Exception as e:
        _logger.debug(e)
def test_load_signal():
    path_to_load = '~/Desktop/HSM_data.h5'
    patient_number = 1

    # Load all seizures files
    group_name_list = list_seizures_files(path_to_load=path_to_load,
                                     patient_number=patient_number)
    X = load_signal(path =path_to_load,
                    group_name_list=group_name_list)

    # test handlers for a single seizure file
    group_name = group_name_list[0]
    one_signal_structure = X[group_name]
    Fs = get_sampling_frequency(one_signal_structure)
    seizure_times = get_seizure_times_seconds(one_signal_structure)
    records = get_multiple_records(one_signal_structure)
    record = get_record(one_signal_structure)

    assert Fs==1000
    assert seizure_times == [4051.593]
    assert np.shape(records) == (2*Fs*3600,1)
    assert np.shape(record) == (2*Fs*3600,)
def compress(path_to_load, group_name_list):
    print 'before loading' 
    print group_name_list

    # Memory loop (one signal at a time)
    for i, group_name in enumerate(group_name_list):
    signal_structure = load_signal(path_to_load, [group_name])
    one_signal_structure = get_one_signal_structure(signal_structure, group_name)
    record = get_multiple_records(one_signal_structure)


def load_feature(path_to_load, group_to_extract, feature_name, compute_flag=False, even_compress=True):

    group_to_load = [group_to_extract+feature_name]
    print list_group_signals(path_to_load, group_to_load)
    stop
    try:
        load_signal(path_to_load, )
    print list_group_signals(path_to_load, group_to_extract[0])
    stop
    group_file = [group_to_extract+feature_name]
    try:
        if compute_flag:
            make_new_computation
        signal_structure = load_signal(path_to_load,zip(group_file, ['features']))

    except Exception as e:
        _logger.debug(e)
        baseline_removal(path_to_load, name_list[0], group_list_to_filter[0], sampling_rate)
        signal_structure_baseline_removal = load_signal(path_to_load,zip(group_list_baseline_removal, name_list))

    one_signal_structure = get_one_signal_structure(signal_structure_baseline_removal, zip(group_list_baseline_removal, name_list)[0])
    records = get_multiple_records(one_signal_structure)
    mdata = get_mdata_dict(one_signal_structure)
    return records, mdata


def load_baseline_removal(path_to_load, group_list_to_filter, name_list, sampling_rate, compute_flag = False):
    group_list_baseline_removal = ['medianFIR']
    try:
        if compute_flag:
            make_new_computation
        signal_structure_baseline_removal = load_signal(path_to_load,zip(group_list_baseline_removal, name_list))
    except Exception as e:
        _logger.debug(e)
        baseline_removal(path_to_load, name_list[0], group_list_to_filter[0], sampling_rate)
        signal_structure_baseline_removal = load_signal(path_to_load,zip(group_list_baseline_removal, name_list))

    one_signal_structure = get_one_signal_structure(signal_structure_baseline_removal, zip(group_list_baseline_removal, name_list)[0])
    records = get_multiple_records(one_signal_structure)
    mdata = get_mdata_dict(one_signal_structure)
    return records, mdata


def load_kalman(path_to_load, group_list_to_filter, name_list, sampling_rate, compute_flag = False):
    group_list_esksmooth = ['esksmooth']
    try:
        if compute_flag:
            make_new_computation
        signal_structure_baseline_removal = load_signal(path_to_load,zip(group_list_esksmooth, name_list))
    except Exception as e:
        _logger.debug(e)
        pass

    one_signal_structure = get_one_signal_structure(signal_structure_baseline_removal, zip(group_list_esksmooth, name_list)[0])
    records = get_multiple_records(one_signal_structure)
    mdata = get_mdata_dict(one_signal_structure)
    print 'records'
    print records
    return records, mdata


def load_noise_removal(path_to_load, group_list_to_filter, name_list, sampling_rate, compute_flag = False):
    group_list_noise_removal = ['FIR_lowpass_40hz']
    try:
        if compute_flag:
            make_new_computation
        signal_structure_noise_removal = load_signal(path_to_load,zip(group_list_noise_removal, name_list))
    except Exception as e:
        print e
        _logger.debug(e)
        noise_removal(path_to_load, name_list[0], group_list_to_filter[0], sampling_rate)
        signal_structure_noise_removal = load_signal(path_to_load,zip(group_list_noise_removal, name_list))

    one_signal_structure = get_one_signal_structure(signal_structure_noise_removal, zip(group_list_noise_removal, name_list)[0])
    records = get_multiple_records(one_signal_structure)
    mdata = get_mdata_dict(one_signal_structure)
    return records, mdata


def load_rpeaks(path_to_load, group_list_to_filter, name_list, sampling_rate, compute_flag = False):
    names = lambda label: name_list[0] + '_rpeaks_'+label
    labels = map(str, range(0,5))
    rpeaks_names = map(names, labels)
    group_list = group_list_to_filter * len(labels)

    try:
        if compute_flag:
            make_new_computation
        signal_structure_rpeaks = load_signal(path_to_load ,zip(group_list, rpeaks_names))
    except Exception as e:
        print e
        _logger.debug(e)
        create_rpeak_dataset(path_to_load, zip(group_list_to_filter, name_list), sampling_rate)
        signal_structure_rpeaks = load_signal(path_to_load ,zip(group_list, rpeaks_names))

    rpeaks = [get_multiple_records(get_one_signal_structure(signal_structure_rpeaks, group_name)) for group_name in zip(group_list, rpeaks_names)]
    return rpeaks


def plot_single_model(time, model, color, start, end):
    print 'here0'
    plt.figure()
    print len(model)
    for i in xrange(0, len(model)):
        print model[i,:]
        plt.subplot(len(model), 1, i+1)
        plt.plot(model[i,:], color=color)
        # plt.axvline(x=30*60)
        plt.xlim([start, end])
        plt.ylim([-500, 500])

    plt.subplots_adjust(0.09, 0.04, 0.94, 0.94, 0.26, 0.46)


def plot_single_model_fft(time, model, names, color, start, end):
    plt.figure()
    print len(model)
    for i in xrange(0, len(model)):
        plt.subplot(len(model), 1, i+1)
        sig = model[i, start:end]
        yf = sp.fft(sig)
        P = 1.0 / 1000
        xf = np.linspace(0, 1.0 / (2.0 * P), len(sig) // 2)
        plt.plot(
            xf, 2.0 / len(sig) * np.abs(yf[0:len(sig) // 2]), color=color)
        plt.xlim([0, 100])
        plt.ylim([0, 100])


    plt.subplots_adjust(0.09, 0.04, 0.94, 0.94, 0.26, 0.46)



def plot_models(times, models, names, colors, start, end):
    plt.figure()
    for ii, (time, model, name) in enumerate(zip(times, models, names), 1):
        plt.subplot(len(models), 1, ii)
        plt.title(name)
        for ts, sig, color in zip(time.T, model.T, colors):
            plt.plot(ts, sig, color=color)
            plt.xlim([start, end])
            plt.ylim([-500, 500])
            # if scatter not None:
            #     plt.scatter(scat, color=scatter_color)

    plt.subplots_adjust(0.09, 0.04, 0.94, 0.94, 0.26, 0.46)



def plot_models_scatter(time_scatter_models, scatter_models,
                        time_models, models, names, colors, start, end):
    plt.figure()
    for ii, (time_scatter_model, scatter_model, time_model, model, name)\
        in enumerate(zip(time_scatter_models, scatter_models, 
                         time_models, models, names), 1):
        plt.subplot(len(models), 1, ii)
        plt.title(name)
        for ts_scatter, sig_scatter, ts, sig, color\
            in zip(time_scatter_model.T, scatter_model.T, time_model.T, model.T, colors):
            plt.plot(ts, sig, color=color)
            plt.scatter(ts_scatter, sig_scatter, color='g')
            plt.xlim([start,end])
            plt.ylim([-500,500])
            #     plt.scatter(scat, color=scatter_color)

    plt.subplots_adjust(0.09, 0.04, 0.94, 0.94, 0.26, 0.46)


def fft_plot(times, models, names, colors):
    plt.figure()
    for ii, (time, model, name) in enumerate(zip(times, models, names), 1):
        plt.subplot(len(models), 1, ii)
        plt.title(name)
        for ts, sig, color in zip(time.T, model.T, colors):
            yf = sp.fft(sig)
            P = 1.0 / 1000
            xf = np.linspace(0, 1.0 / (2.0 * P), len(sig) // 2)
            plt.plot(
                xf, 2.0 / len(sig) * np.abs(yf[0:len(sig) // 2]), color=color)
            plt.xlim([0, 100])
            plt.ylim([0, 5])

            # if scatter not None:
            #     plt.scatter(scat, color=scatter_color)

    plt.subplots_adjust(0.09, 0.04, 0.94, 0.94, 0.26, 0.46)



# def plot_one_model(time, model, names, color)


def shape_array(array):
    return np.array([array]).T


def main():

    #signal
    time_before_seizure = 30
    time_after_seizure = 10
    # path_to_load = '~/Desktop/phisionet_seizures_new.h5'
    sampling_rate = 1000
    path_to_load = '~/Desktop/seizure_datasets_new.h5'
    name_list = [str(time_before_seizure*60) + '_' + str(time_after_seizure*60)]
    group_list_raw = ['raw']
    group_list_baseline_removal = ['medianFIR']
    group_list_noise_removal = ['FIR_lowpass_40hz']
    group_list_esksmooth = ['esksmooth']

    group_list = [str(
    time_before_seizure*60) + '_' + str(time_after_seizure*60) + '/raw']
    group_name_list = list_group_signals(path_to_load, group_list[0])['signals']
    # compress(path_to_load, group_name_list)

    load_extract_feature(path_to_load, group_list, 'baseline_removal', compute_flag = False)
    # print list_group_signals(path_to_load, group_list[0])
    stop
    # Raw signal 
    # signal_structure_raw = load_signal(path_to_load,zip(group_list_raw, name_list))
    # one_signal_structure_raw = get_one_signal_structure(signal_structure_raw, zip(group_list_raw, name_list)[0])
    # records_raw = get_multiple_records(one_signal_structure_raw)

    # stop # ========================================================

    # # Baseline removal
    # records_baseline_removal, mdata_baseline_removal = load_baseline_removal(path_to_load, group_list_raw, name_list, sampling_rate, compute_flag = False)

    # # stop # =========================================================

    # # Noise removal 
    # records_noise_removal, mdata_noise_removal = load_noise_removal(path_to_load, group_list_baseline_removal, name_list, sampling_rate, compute_flag = False)

    # # stop # =========================================================

    # # Rpeak detection
    # rpeaks_noise_removal = load_rpeaks(path_to_load, group_list_noise_removal, name_list, sampling_rate, compute_flag = False)

    # # Noise removal kalman
    # records_kalman, mdata_kalman = load_kalman(path_to_load, group_list_baseline_removal, name_list, sampling_rate, compute_flag = False)

    # # stop # =========================================================
    # # Allocate time array
    # Fs = sampling_rate
    # N = len(records_noise_removal[0,:])
    # T = (N - 1) / Fs
    # t = np.linspace(0, T, N, endpoint=False)


    # # Visual inspection of rpeak detection
    # start = time_before_seizure*60 - 10
    # end = time_before_seizure*60 + 10
    # seizure_nr = 3

    # #rpeaks
    # rpeaks = rpeaks_noise_removal[seizure_nr]
    # rpeaks_resample = resample_rpeaks(np.diff(rpeaks), rpeaks, t)

    # #signal
    # signal_baseline_removal = records_baseline_removal[seizure_nr,:]
    # signal_noise_removal = records_noise_removal[seizure_nr,:]
    # signal_kalman = records_kalman[seizure_nr,:]


    # #plot
    # plt.subplot(3,1,1)
    # plt.xlim([start*250, end*250])
    # plt.plot(signal_kalman)
    # plt.subplot(3,1,2)
    # plt.plot(t, signal_noise_removal)
    # plt.xlim([start, end])
    # plt.subplot(3,1,3)
    # plt.plot(t, signal_baseline_removal)
    # plt.xlim([start, end])
    # plt.show()
    # # # stop

    # # visual_inspection(signal_noise_removal, rpeaks, rpeaks_resample, t, time_before_seizure,
    # #             start, end, sampling_rate)

    # # Visual inspection of rpeak detection --Kalman
    start = 1300
    end = 1400
    seizure_nr = 1

    # Fs = 250
    # N = len(records_kalman[0,:])
    # T = (N - 1) / Fs
    # t_down = np.linspace(0, T, N, endpoint=False)
    # factor = 4

    # signal_kalman_inter = interpolate_signal(signal_kalman, factor)

    # print np.shape(signal_kalman)
    # print np.shape(t)


    # #rpeaks
    # rpeaks = map(functools.partial(detect_rpeaks,
    #             sampling_rate=sampling_rate), [signal_kalman_inter])[0]
    # print rpeaks

    # rpeaks_resample = resample_rpeaks(np.diff(rpeaks), rpeaks, t)


    # visual_inspection(signal_kalman_inter, rpeaks, rpeaks_resample, t, time_before_seizure,
    #             start, end, 250)

    # beats = compute_beats(signal_kalman_inter, rpeaks)
    # tmp = time.time()
    # values = sameni_evolution(beats)
    # s = time.time() - tmp

    # values = np.asarray(values)
    # print s, 
    # print ' seconds'


    name_list = ['sameni_parameters_nr' + str(seizure_nr)]
    mdata_list = ['']
    signal_structure_baseline_removal = load_signal(path_to_load, zip(group_list_esksmooth, name_list))

    one_signal_structure = get_one_signal_structure(signal_structure_baseline_removal, zip(group_list_esksmooth, name_list)[0])
    records = get_multiple_records(one_signal_structure)
    mdata = get_mdata_dict(one_signal_structure)

    print records
    # stop
    color = 'g'

    # plt.plot(records.T[,:])
    # plt.show()

    # plot_single_model(time, records.T, color, start, end)
    # plt.show()




main()
def main():

    #signal
    time_before_seizure = 30
    time_after_seizure = 10
    # path_to_load = '~/Desktop/phisionet_seizures.h5'
    sampling_rate = 1000
    path_to_load = '~/Desktop/seizure_datasets.h5'
    name_list = [
        str(time_before_seizure * 60) + '_' + str(time_after_seizure * 60)
    ]
    group_list_raw = ['raw']
    group_list_baseline_removal = ['medianFIR']
    group_list_noise_removal = ['FIR_lowpass_40hz']
    group_list_esksmooth = ['esksmooth']

    # Raw signal
    signal_structure_raw = load_signal(path_to_load,
                                       zip(group_list_raw, name_list))
    # one_signal_structure_raw = get_one_signal_structure(signal_structure, zip(group_list, name_list)[0])
    # records_raw = get_multiple_records(one_signal_structure_raw)

    # stop # ========================================================

    # Baseline removal
    records_baseline_removal, mdata_baseline_removal = load_baseline_removal(
        path_to_load,
        group_list_raw,
        name_list,
        sampling_rate,
        compute_flag=False)

    # stop # =========================================================

    # Noise removal
    records_noise_removal, mdata_noise_removal = load_noise_removal(
        path_to_load,
        group_list_baseline_removal,
        name_list,
        sampling_rate,
        compute_flag=False)

    # Noise removal kalman

    records_kalman, mdata_kalman = load_kalman(path_to_load,
                                               group_list_baseline_removal,
                                               name_list,
                                               sampling_rate,
                                               compute_flag=False)

    # stop # =========================================================

    # Rpeak detection
    # rpeaks_noise_removal = load_rpeaks(path_to_load, group_list_noise_removal, name_list, sampling_rate, compute_flag = False)

    # stop # =========================================================
    # Allocate time array
    Fs = 250
    N = len(records_kalman[0, :])
    T = (N - 1) / Fs
    t = np.linspace(0, T, N, endpoint=False)
    factor = 2

    # Visual inspection of rpeak detection
    start = 280
    end = 300
    seizure_nr = 1

    if seizure_nr < 3:
        Fs = 1000
        N = len(records_baseline_removal[0, :])
        T = (N - 1) / Fs
        t_new = np.linspace(0, T, N, endpoint=False)
        signal_inter = records_baseline_removal[seizure_nr]
        rpeaks_RAM = ecg.hamilton_segmenter(
            signal=signal_inter, sampling_rate=sampling_rate)['rpeaks']
    else:
        signal = records_kalman[seizure_nr]
        signal_inter, t_new = interpolate(t, signal, 1000)
        rpeaks_RAM = ecg.hamilton_segmenter(
            signal=signal_inter, sampling_rate=sampling_rate)['rpeaks']

    # stop
    y = np.diff(rpeaks_RAM)

    # visual_inspection(signal_inter, rpeaks_RAM, y, t, time_before_seizure,
    #             start, end, sampling_rate)

    t_rpeaks = t_new[rpeaks_RAM[1:]]
    y_new, t_n = interpolate(t_rpeaks, y, 2)
    print(len(t) - 1) / 250
    print(len(t_n) - 1) / 2
    time_before_seizure = time_before_seizure * 60
    print time_before_seizure
    #rpeaks
    # rpeaks = rpeaks_noise_removal[seizure_nr]
    # rpeaks_resample = resample_rpeaks(np.diff(rpeaks), rpeaks, t)
    # rpeaks = find_rpeaks(rpeaks, start * sampling_rate,
    #     end * sampling_rate)

    #signal
    # signal_baseline_removal = records_baseline_removal[seizure_nr,:]
    # signal_noise_removal = records_noise_removal[seizure_nr,:]

    #plot
    plt.subplot(2, 1, 1)
    plt.plot(t_new, signal_inter)
    plt.plot(t_new[rpeaks_RAM], signal_inter[rpeaks_RAM], 'o')
    # plt.axvline(x=time_before_seizure*60, color = 'g')
    plt.subplot(2, 1, 2)
    plt.plot(t_n, y_new)
    plt.axvline(x=time_before_seizure, color='g')
    plt.plot()
    # plt.xlim([start, end])
    # plt.subplot(2,1,2)
    # plt.plot(t, signal_baseline_removal)
    # plt.xlim([start, end])
    plt.show()
    # # stop

    beats = compute_fixed_beats(signal_inter, rpeaks_RAM)
    print np.shape(beats[0:5])
    pca = compute_PC(beats[0:5])

    pca = np.dot(pca, beats[0:5])
    print np.shape(pca)

    evol = trace_evol_PC(beats)
    print np.shape(evol)
    evol = evol.T

    t_evol = t_new[rpeaks_RAM[6:-1]]
    print len(t_evol)

    ev = [interpolate(t_evol, eigen, 2) for eigen in evol]

    plt.subplot(6, 1, 1)
    plt.plot(ev[4][1], 1 * 1000 * 60 / y_new[1:len(ev[4][1]) + 1])
    plt.axvline(x=time_before_seizure, color='g')
    plt.legend(['HRV', 'Seizure onset'])
    plt.ylabel('bpm')
    plt.subplot(6, 1, 2)
    plt.plot(ev[4][1], ev[4][0])
    plt.axvline(x=time_before_seizure, color='g')
    plt.legend(['Eigen-Value 1', 'Seizure onset'])
    plt.subplot(6, 1, 3)
    plt.plot(ev[3][1], ev[3][0])
    plt.axvline(x=time_before_seizure, color='g')
    plt.legend(['Eigen-Value 2', 'Seizure onset'])
    plt.subplot(6, 1, 4)
    plt.plot(ev[2][1], ev[2][0])
    plt.axvline(x=time_before_seizure, color='g')
    plt.legend(['Eigen-Value 3', 'Seizure onset'])
    plt.subplot(6, 1, 5)
    plt.plot(ev[1][1], ev[1][0])
    plt.axvline(x=time_before_seizure, color='g')
    plt.legend(['Eigen-Value 4', 'Seizure onset'])
    plt.subplot(6, 1, 6)
    plt.plot(ev[0][1], ev[0][0])
    plt.axvline(x=time_before_seizure, color='g')
    plt.legend(['Eigen-Value 5', 'Seizure onset'])
    plt.xlabel('t (s)')
    plt.subplots_adjust(0.09, 0.1, 0.94, 0.94, 0.26, 0.46)

    plt.show()
def main():

    #signal
    time_before_seizure = 30
    time_after_seizure = 10
    # path_to_load = '~/Desktop/phisionet_seizures_new.h5'
    sampling_rate = 1000
    path_to_load = '~/Desktop/seizure_datasets_new.h5'
    name_list = [str(time_before_seizure*60) + '_' + str(time_after_seizure*60)]
    group_list_raw = ['raw']
    group_list_baseline_removal = ['medianFIR']
    group_list_noise_removal = ['FIR_lowpass_40hz']
    group_list_esksmooth = ['esksmooth']

    group_list = [str(
    time_before_seizure*60) + '_' + str(time_after_seizure*60) + '/raw']
    group_name_list = list_group_signals(path_to_load, group_list[0])['signals']
    # compress(path_to_load, group_name_list)

    load_extract_feature(path_to_load, group_list, 'baseline_removal', compute_flag = False)
    # print list_group_signals(path_to_load, group_list[0])
    stop
    # Raw signal 
    # signal_structure_raw = load_signal(path_to_load,zip(group_list_raw, name_list))
    # one_signal_structure_raw = get_one_signal_structure(signal_structure_raw, zip(group_list_raw, name_list)[0])
    # records_raw = get_multiple_records(one_signal_structure_raw)

    # stop # ========================================================

    # # Baseline removal
    # records_baseline_removal, mdata_baseline_removal = load_baseline_removal(path_to_load, group_list_raw, name_list, sampling_rate, compute_flag = False)

    # # stop # =========================================================

    # # Noise removal 
    # records_noise_removal, mdata_noise_removal = load_noise_removal(path_to_load, group_list_baseline_removal, name_list, sampling_rate, compute_flag = False)

    # # stop # =========================================================

    # # Rpeak detection
    # rpeaks_noise_removal = load_rpeaks(path_to_load, group_list_noise_removal, name_list, sampling_rate, compute_flag = False)

    # # Noise removal kalman
    # records_kalman, mdata_kalman = load_kalman(path_to_load, group_list_baseline_removal, name_list, sampling_rate, compute_flag = False)

    # # stop # =========================================================
    # # Allocate time array
    # Fs = sampling_rate
    # N = len(records_noise_removal[0,:])
    # T = (N - 1) / Fs
    # t = np.linspace(0, T, N, endpoint=False)


    # # Visual inspection of rpeak detection
    # start = time_before_seizure*60 - 10
    # end = time_before_seizure*60 + 10
    # seizure_nr = 3

    # #rpeaks
    # rpeaks = rpeaks_noise_removal[seizure_nr]
    # rpeaks_resample = resample_rpeaks(np.diff(rpeaks), rpeaks, t)

    # #signal
    # signal_baseline_removal = records_baseline_removal[seizure_nr,:]
    # signal_noise_removal = records_noise_removal[seizure_nr,:]
    # signal_kalman = records_kalman[seizure_nr,:]


    # #plot
    # plt.subplot(3,1,1)
    # plt.xlim([start*250, end*250])
    # plt.plot(signal_kalman)
    # plt.subplot(3,1,2)
    # plt.plot(t, signal_noise_removal)
    # plt.xlim([start, end])
    # plt.subplot(3,1,3)
    # plt.plot(t, signal_baseline_removal)
    # plt.xlim([start, end])
    # plt.show()
    # # # stop

    # # visual_inspection(signal_noise_removal, rpeaks, rpeaks_resample, t, time_before_seizure,
    # #             start, end, sampling_rate)

    # # Visual inspection of rpeak detection --Kalman
    start = 1300
    end = 1400
    seizure_nr = 1

    # Fs = 250
    # N = len(records_kalman[0,:])
    # T = (N - 1) / Fs
    # t_down = np.linspace(0, T, N, endpoint=False)
    # factor = 4

    # signal_kalman_inter = interpolate_signal(signal_kalman, factor)

    # print np.shape(signal_kalman)
    # print np.shape(t)


    # #rpeaks
    # rpeaks = map(functools.partial(detect_rpeaks,
    #             sampling_rate=sampling_rate), [signal_kalman_inter])[0]
    # print rpeaks

    # rpeaks_resample = resample_rpeaks(np.diff(rpeaks), rpeaks, t)


    # visual_inspection(signal_kalman_inter, rpeaks, rpeaks_resample, t, time_before_seizure,
    #             start, end, 250)

    # beats = compute_beats(signal_kalman_inter, rpeaks)
    # tmp = time.time()
    # values = sameni_evolution(beats)
    # s = time.time() - tmp

    # values = np.asarray(values)
    # print s, 
    # print ' seconds'


    name_list = ['sameni_parameters_nr' + str(seizure_nr)]
    mdata_list = ['']
    signal_structure_baseline_removal = load_signal(path_to_load, zip(group_list_esksmooth, name_list))

    one_signal_structure = get_one_signal_structure(signal_structure_baseline_removal, zip(group_list_esksmooth, name_list)[0])
    records = get_multiple_records(one_signal_structure)
    mdata = get_mdata_dict(one_signal_structure)

    print records
    # stop
    color = 'g'
Ejemplo n.º 17
0
def load_feature(path_to_load, path_to_map, feature_to_load,
                 files='just_new', sampling_rate=1000,
                 **feature_groups_required):

    feature_group_to_process = feature_groups_required['feature_group_to_process']

    feature_group_extracted = [feature_group_to_process + '/' + feature_to_load]

    feature_groups_to_process = {k:feature_groups_required[k] for k in feature_groups_required.keys() if 'process' in k}

    # feature_group_aux = {k:feature_groups_required[k] for k in feature_groups_required.keys() if 'group' in k and 'process' not in k}

    # auxiliary_inputs = {k:feature_groups_required[k] for k in feature_groups_required.keys() if 'group' not in k and 'process' not in k}


    # Input and default parameters from feature to extract ---------------------------------------------------------------

    win_inputs = {k[4:]:feature_groups_required[k] for k in feature_groups_required.keys() if 'group' not in k and 'window' in k}
    param_inputs = {k[6:]:feature_groups_required[k] for k in feature_groups_required.keys() if 'group' not in k and 'param' in k}

    win_final, param_final = get_input_and_default_params(win_inputs, param_inputs, feature_to_load)

    print param_final
    # stop
    win_str = get_str_from_params(win_final, '_$beginwin',
                                      'endwin$_')
    param_str = get_str_from_params(param_final, '_$beginparam',
                                        'endparam$_')


    feature_group_to_save = [feature_group_extracted[0] + win_str + param_str]

    feature_groups_saved_list = get_feature_group_name_list(path_to_map, feature_to_load+ '#')

    # --------------------------------------------------------------------------------------------------------------------

    # Default parameters from feature to process ---------------------------------------------------------------
    win_param_to_process = get_params_from_str(
                                 feature_group_to_process,
                                '$beginwin', 
                                 'endwin$')

    param_to_process = get_params_from_str(
                                   feature_group_to_process, 
                                   '$beginparam',
                                   'endparam$')

    print win_str
    print param_str
    # ---------------------------------------------------------------------------------------------------------------------

    # stop

    print feature_group_to_save

    # stop

    if feature_group_to_save[0] not in feature_groups_saved_list:
        print 'Saving to txt'
        group_name = feature_group_to_save[0]
        txtname = path_to_load[:-3] + '_map.txt'

        with open(path_to_map, 'ab') as inF:
            inF.write(group_name[:group_name.index(feature_to_load) + len(feature_to_load)]
                            + '#'
                            + group_name[group_name.index(feature_to_load) + len(feature_to_load):]
                            + '!' + "\n")  # python will convert \n to os.linesep

        inF.close()

    print 'loading feature'
    print param_final
    print win_final

    print 'FIles!!'
    print files


    if files=='just_new':
        print 'here'
        names_already_processed = get_names(list_group_signals(path_to_load, feature_group_to_save[0])['signals'])
        names_to_save = get_names(list_group_signals(path_to_load, feature_group_to_process)['signals'])
        names_to_save = [name for name in names_to_save if name not in names_already_processed]
        for k in feature_groups_to_process.keys():
            feature_groups_to_process[k] = [(feature_groups_to_process[k], name) for name in names_to_save]
        # stop

    # print feature_groups_to_process
    if files=='all_new':
        names_to_save = get_names(list_group_signals(path_to_load, feature_group_to_process)['signals'])
        for k in feature_groups_to_process.keys():
            feature_groups_to_process[k] = list_group_signals(path_to_load, feature_groups_to_process[k])['signals']
        names_to_save = get_names(list_group_signals(path_to_load, feature_group_to_process)['signals'])
        # print feature_groups_to_process
        #*****************IMPORTANT CHANGE***************************
        # names_to_save = get_names(list_group_signals(path_to_load, feature_group_to_process)['signals'])

    if files=='existent':
        feature_group_name_extracted = list_group_signals(path_to_load, feature_group_extracted[0])['signals']
        # print feature_group_name_extracted
        try:
            signal_structure = load_signal(path_to_load, feature_group_name_extracted)
            extracted_features = [get_multiple_records(get_one_signal_structure(signal_structure, group_name)) for group_name in feature_group_name_extracted]
            mdata = [get_mdata_dict(get_one_signal_structure(signal_structure, group_name)) for group_name in feature_group_name_extracted]
            return extracted_features, mdata
        except Exception as e:
            _logger.debug(e)

    # Load all signals into memory --INTENSIVE BE CAREFUL
    # for k in feature_groups_to_process.keys():
    #     signal_structure = load_signal(path_to_load, feature_groups_to_process[k])
    #     feature_groups_to_process[k] = [get_multiple_records(get_one_signal_structure(signal_structure, group_name)) for group_name in feature_groups_to_process[k]]

    for i, name in enumerate(names_to_save):
        dict_to_process = {}
        for k in feature_groups_to_process.keys():
            group_name = feature_groups_to_process[k][i]
            signal_structure = load_signal(path_to_load, [group_name])
            feature_signal = [get_multiple_records(get_one_signal_structure(signal_structure, group_name))]
            dict_to_process[k] = feature_signal

        # MEMORY INTENSIVE - BE CAREFUUL *********************
        # dict_to_process = {}
        # for k in feature_groups_required.keys():
        #     dict_to_process[k] = [feature_groups_required[k][i]]
        #*****************************************************

        # print dict_to_process
        return_object = globals()[feature_to_load](dict_to_process,
                                                      win_final, param_final,
                                                      win_param_to_process,
                                                      param_to_process)

        extracted_features = return_object[0]
        mdata_list = return_object[1]
        window_list = return_object[2]

        delete_signal(path_to_load, [name], feature_group_to_save)
        delete_signal(path_to_load, ['window_' + name], feature_group_to_save)
        save_signal(path_to_load, extracted_features,
                    mdata_list,
                    [name], feature_group_to_save)
        save_signal(path_to_load, window_list,
                    [''] * len(window_list),
                    ['window_' + name], feature_group_to_save)