Exemple #1
0
def train(config, X, y, Xval=None, yval=None):

    classes = ['N', 'V', '/', 'A', 'F',
               '~']  #,'L','R',f','j','E','a']#,'J','Q','e','S']
    Xe = np.expand_dims(X, axis=2)
    if not config.split:
        from sklearn.model_selection import train_test_split
        Xe, Xvale, y, yval = train_test_split(Xe,
                                              y,
                                              test_size=0.2,
                                              random_state=1)
    else:
        Xvale = np.expand_dims(Xval, axis=2)
        (m, n) = y.shape
        y = y.reshape((m, 1, n))
        (mvl, nvl) = yval.shape
        yval = yval.reshape((mvl, 1, nvl))

    if config.checkpoint_path is not None:
        model = model.load_model(config.checkpoint_path)
        initial_epoch = config.resume_epoch  # put the resuming epoch
    else:
        model = ECG_model(config)
        initial_epoch = 0

    mkdir_recursive('models')
    #lr_decay_callback = LearningRateSchedulerPerBatch(lambda epoch: 0.1)
    callbacks = [
        EarlyStopping(patience=config.patience, verbose=1),
        ReduceLROnPlateau(factor=0.5, patience=3, min_lr=0.01, verbose=1),
        TensorBoard(log_dir='./logs',
                    histogram_freq=0,
                    write_graph=True,
                    write_grads=False,
                    write_images=True),
        ModelCheckpoint('models/{}-latest.hdf5'.format(config.feature),
                        monitor='val_loss',
                        save_best_only=False,
                        verbose=1,
                        period=10)
        # , lr_decay_callback
    ]
    if config.is_train == True:
        model.fit(Xe,
                  y,
                  validation_data=(Xvale, yval),
                  epochs=config.epochs,
                  batch_size=config.batch,
                  callbacks=callbacks,
                  initial_epoch=initial_epoch)
    print_results(
        config,
        model,
        Xvale,
        yval,
        classes,
    )
(m, n) = target_train.shape
target_train = target_train.reshape((m, 1, n))
(mvl, nvl) = target_test.shape
target_test = target_test.reshape((mvl, 1, nvl))

callbacks = [
    EarlyStopping(patience=config.patience, verbose=1),
    ReduceLROnPlateau(factor=0.5, patience=3, min_lr=0.01, verbose=1),
    TensorBoard(log_dir='./logs',
                histogram_freq=0,
                write_graph=True,
                write_grads=False,
                write_images=True),
    ModelCheckpoint('models/{}-vae-1-latest.hdf5'.format(config.feature),
                    monitor='val_loss',
                    save_best_only=False,
                    verbose=1,
                    period=10)
]

initial_epoch = 0
model = ECG_model(config)
model.fit(Xde,
          target_train,
          validation_data=(Xvalde, target_test),
          epochs=config.epochs,
          batch_size=config.batch,
          callbacks=callbacks,
          initial_epoch=initial_epoch)
print_results(config, model, Xvalde, target_test, classes, "vae-1-")
def train(config, X, y, Xval=None, yval=None):

    classes = [
        'A', 'E', 'j', 'L', 'N', 'P', 'R', 'V'
    ]  #['N','V','/','A','F','~']#,'L','R',f','j','E','a']#,'J','Q','e','S']
    Xe = np.expand_dims(X, axis=2)
    if not config.split:
        from sklearn.model_selection import train_test_split
        Xe, Xvale, y, yval = train_test_split(Xe,
                                              y,
                                              test_size=0.25,
                                              random_state=1)

        (m, n) = y.shape
        y = y.reshape((m, 1, n))
        (mvl, nvl) = yval.shape
        yval = yval.reshape((mvl, 1, nvl))

    else:
        Xvale = np.expand_dims(Xval, axis=2)
        (m, n) = y.shape
        y = y.reshape((m, 1, n))
        (mvl, nvl) = yval.shape
        yval = yval.reshape((mvl, 1, nvl))

    if config.checkpoint_path is not None:
        model = model.load_model(config.checkpoint_path)
        initial_epoch = config.resume_epoch  # put the resuming epoch
    else:
        #encoder = encoder_model(config)
        #decoder = decoder_model(config)
        initial_epoch = 0

    mkdir_recursive('models')
    #lr_decay_callback = LearningRateSchedulerPerBatch(lambda epoch: 0.1)
    callbacks = [
        EarlyStopping(patience=config.patience, verbose=1),
        ReduceLROnPlateau(factor=0.5, patience=3, min_lr=0.01, verbose=1),
        TensorBoard(log_dir='./logs',
                    histogram_freq=0,
                    write_graph=True,
                    write_grads=False,
                    write_images=True),
        ModelCheckpoint('models/{}-ae-latest.hdf5'.format(config.feature),
                        monitor='val_loss',
                        save_best_only=False,
                        verbose=1,
                        period=10)
        # , lr_decay_callback
    ]

    ae = ae_model(config)
    ae.fit(
        Xe,
        Xe,
        validation_data=(Xvale, Xvale),
        #epochs=5,
        epochs=config.ae_epochs,
        batch_size=config.batch,
        callbacks=callbacks,
        initial_epoch=initial_epoch)

    Xde = ae.predict(Xe)
    Xvalde = ae.predict(Xvale)

    from matplotlib import pyplot as plt
    xaxis = np.arange(1, 257)
    plt.plot(xaxis, Xvale[0, ...])
    plt.plot(xaxis, Xvalde[0, ...])
    plt.plot(xaxis, Xvale[1, ...])
    plt.plot(xaxis, Xvalde[1, ...])
    plt.plot(xaxis, Xvale[3, ...])
    plt.plot(xaxis, Xvalde[3, ...])
    plt.title('ae_reconstructed_beats_' + str(config.epochs))
    plt.show()
    plt.savefig('results/ae_reconstructed_beats_' + str(config.epochs) +
                '.png')

    model = ECG_model(config)
    model.fit(Xde,
              y,
              validation_data=(Xvalde, yval),
              epochs=config.epochs,
              batch_size=config.batch,
              callbacks=callbacks,
              initial_epoch=initial_epoch)
    print_results(config, model, Xvalde, yval, classes, "ae-")
Exemple #4
0
def train(config, X, y, Xval=None, yval=None):

    classes = [
        'A', 'E', 'j', 'L', 'N', 'P', 'R', 'V'
    ]  #['N','V','/','A','F','~']#,'L','R',f','j','E','a']#,'J','Q','e','S']
    #Xe = np.expand_dims(X, axis=2)
    if not config.split:
        from sklearn.model_selection import train_test_split
        #Xe, Xvale, y, yval = train_test_split(Xe, y, test_size=0.25, random_state=1)
        X, Xval, y, yval = train_test_split(X,
                                            y,
                                            test_size=0.25,
                                            random_state=1)

        #(m, n) = y.shape
        #y = y.reshape((m, 1, n ))
        #(mvl, nvl) = yval.shape
        #yval = yval.reshape((mvl, 1, nvl))

    else:
        Xvale = np.expand_dims(Xval, axis=2)
        (m, n) = y.shape
        y = y.reshape((m, 1, n))
        (mvl, nvl) = yval.shape
        yval = yval.reshape((mvl, 1, nvl))

    if config.checkpoint_path is not None:
        model = model.load_model(config.checkpoint_path)
        initial_epoch = config.resume_epoch  # put the resuming epoch
    else:
        #encoder = encoder_model(config)
        #decoder = decoder_model(config)
        initial_epoch = 0

    mkdir_recursive('models')
    #lr_decay_callback = LearningRateSchedulerPerBatch(lambda epoch: 0.1)
    callbacks = [
        EarlyStopping(patience=config.patience, verbose=1),
        ReduceLROnPlateau(factor=0.5, patience=3, min_lr=0.01, verbose=1),
        TensorBoard(log_dir='./logs',
                    histogram_freq=0,
                    write_graph=True,
                    write_grads=False,
                    write_images=True),
        ModelCheckpoint('models/{}-latest.hdf5'.format(config.feature),
                        monitor='val_loss',
                        save_best_only=False,
                        verbose=1,
                        period=10)
        # , lr_decay_callback
    ]
    print('=====shapes======')
    print("Xe shape", X.shape)
    print("y shape", y.shape)
    print('=================')

    timesteps = config.input_size
    inputs = Input(shape=(timesteps, 1))
    encoded = LSTM(timesteps, activation=LeakyReLU(alpha=0.2))(inputs)

    decoded = RepeatVector(timesteps)(encoded)
    decoded = LSTM(timesteps, activation=LeakyReLU(alpha=0.2))(decoded)

    autoencoder = Model(inputs, decoded)
    #autoencoder = Model(inputs, encoded)

    adam = Adam(lr=0.1,
                beta_1=0.9,
                beta_2=0.999,
                epsilon=None,
                decay=0.0,
                amsgrad=False)
    autoencoder.compile(optimizer=adam,
                        loss='categorical_crossentropy',
                        metrics=['accuracy'])
    autoencoder.summary()

    autoencoder.fit(X,
                    X,
                    validation_data=(Xval, Xval),
                    epochs=3,
                    batch_size=config.batch,
                    callbacks=callbacks,
                    initial_epoch=initial_epoch)

    Xae = autoencoder.predict(X)
    Xvalae = autoencoder.predict(Xval)

    #print_results(config, decoder, Xvalee, yval, classes, )
    model = ECG_model(config)
    model.fit(Xae,
              y,
              validation_data=(Xvalae, yval),
              epochs=config.epochs,
              batch_size=config.batch,
              callbacks=callbacks,
              initial_epoch=initial_epoch)

    from matplotlib import pyplot as plt
    xaxis = np.arange(1, 257)
    plt.plot(xaxis, Xval[0, ...])
    plt.plot(xaxis, Xvalae[0, ...])
    plt.show()

    print_results(
        config,
        model,
        Xvalae,
        yval,
        classes,
    )
    '''
    vae = vae_model(config)

    vae.fit(Xe, Xe,
            validation_data=(Xvale, Xvale),
            epochs=config.epochs,
            batch_size=config.batch,
            callbacks=callbacks,
            initial_epoch=initial_epoch)   
    '''
    '''