Ejemplo n.º 1
0
def eval_butterfly(n_samples, trained_batterfly=None):
    if trained_batterfly is None:
        # выбираем модель-классификатор
        filepath_model = easygui.fileopenbox(
            "выберите файл с обученной моделью классиикатором.h5")
        trained_batterfly = load_model(filepath_model)
        trained_batterfly.summary()

    # вытаскиваем полный непорезанный датасет
    _, x_test, _, y_test = prepare_data(seg_len=None)

    # убеждаемся что датасет подходит к модели
    output_len = trained_batterfly.output_shape[1]
    num_labels = y_test.shape[1]
    assert output_len == num_labels

    # включаем его порезку на сегменты
    test_generator = ecg_batches_generator_for_classifier(
        segment_len=ecg_segment_len,
        batch_size=n_samples,
        ecg_dataset=x_test,
        diagnodses=y_test)
    xy = next(test_generator)

    # заставляем модель предсказать
    prediction = trained_batterfly.predict_on_batch(x=xy[0])
    print("ответ модели:")
    print(prediction)
    print("правильный ответ:")
    print(xy[1])
    return xy[1], prediction
Ejemplo n.º 2
0
def TEST_generator_for_ae():
    segment_len = 3
    batch_size = 1
    x_train, _, _, _ = prepare_data(seg_len=None)
    train_generator = ecg_batches_generator(segment_len=segment_len,
                                            batch_size=batch_size,
                                            ecg_dataset=x_train)
    batch = next(train_generator)
    print("батч имеет форму: " + str(batch[0].shape))
Ejemplo n.º 3
0
def train_batterfly(name):
    x_train, x_test, y_train, y_test = prepare_data(
        seg_len=None)  # вытаскиваем полный непорезанный датасет
    num_labels = y_train.shape[1]
    model = create_batterfly(num_labels=num_labels)
    model.summary()
    batch_size = 30

    train_generator = ecg_batches_generator_for_classifier(
        segment_len=ecg_segment_len,
        batch_size=batch_size,
        ecg_dataset=x_train,
        diagnodses=y_train)
    test_generator = ecg_batches_generator_for_classifier(
        segment_len=ecg_segment_len,
        batch_size=300,
        ecg_dataset=x_test,
        diagnodses=y_test)
    steps_per_epoch = 40
    print("батчей за эпоху будет:" + str(steps_per_epoch))
    print("в одном батче " + str(batch_size) + " кардиограмм.")

    #уменьшение learning rate автоматически на плато
    #learning_rate_reduction = ReduceLROnPlateau(monitor='val_loss', factor = 0.1, patience = 5, verbose = 1)

    #изменение LR по методу SGDR
    #change_lr = cosine_lr.SGDRScheduler(min_lr=0.0001, max_lr=0.1, steps_per_epoch=np.ceil(15/batch_size), lr_decay=0.8, cycle_length=1, mult_factor=1)

    #tb_callback = TensorBoard(log_dir='./butterfly_logs', histogram_freq=20, write_graph=True, write_grads=True)
    history = model.fit_generator(generator=train_generator,
                                  steps_per_epoch=steps_per_epoch,
                                  epochs=50,
                                  validation_data=test_generator,
                                  validation_steps=1)

    save_history(history, name)
    model.save(name + '.h5')

    eval_generator = ecg_batches_generator_for_classifier(
        segment_len=ecg_segment_len,
        batch_size=700,
        ecg_dataset=x_test,
        diagnodses=y_test)
    xy = next(eval_generator)

    # заставляем модель предсказать
    prediction = model.predict_on_batch(x=xy[0])
    print("ответ модели:")
    print(prediction)
    print("правильный ответ:")
    print(xy[1])
    return xy[1], prediction
Ejemplo n.º 4
0
def TEST_generator_for_classifier():
    segment_len = 3
    batch_size = 10
    x_train, _, y_train, _ = prepare_data(seg_len=None)
    train_generator = ecg_batches_generator_for_classifier(
        segment_len=segment_len,
        batch_size=batch_size,
        ecg_dataset=x_train,
        diagnodses=y_train)
    batch_xy = next(train_generator)
    print("батч имеет форму: \\n x.shape=" + str(batch_xy[0].shape))
    print("y.shape=" + str(batch_xy[1].shape))
    pprint.pprint(batch_xy[1])
Ejemplo n.º 5
0
from keras import backend as K
from keras.callbacks import (TensorBoard)
from keras.layers import (LSTM, Bidirectional, Dense, Conv1D, MaxPooling1D,
                          UpSampling1D)
from keras.models import (Sequential, load_model)

from dataset_getter import prepare_data

num_decoder_tokens = 4
Size = 2992
num_cores = 32
kernel = 8
num_latent = 50
leads = 12

x_train, x_test, y_train, y_test = prepare_data(seg_len=None)
# отрезаем куски без разметки
x_train = x_train[:, 1000:4000, :, :]
x_test = x_test[:, 1000:4000, :, :]
y_train = y_train[:, 1000:4000]
y_test = y_test[:, 1000:4000]


def one_hot(y):
    """
    переводит небинарную маску в one hot
    :param y:
    :return:
    """
    y_new = []
    for i in range(y.shape[0]):
Ejemplo n.º 6
0
def prepare_data_for_canterpillar(segment_len=None):
    x_train, x_test, _, _ = prepare_data(segment_len)
    return x_train, x_test