Example #1
0
model_cnn.add(MaxPooling1D(1))

model_cnn.add(Flatten())
model_cnn.add(Dense(hidden_dim, activation='relu'))
model_cnn.add(Dropout(0.5))
model_cnn.add(Dense(numclass, activation='softmax'))

model_cnn.summary()
model_cnn.compile(loss='categorical_crossentropy',
              optimizer=Adam(lr=learning_rate),
              metrics=['acc'])

lr_level=2
best_score_by_acc = 0.
best_score_by_loss = 999.
x_val, y_val =dataset.get_all_validation_data()
for step in range(dataset.get_step()):
    cur_step = str(step + 1) + "/" + str(dataset.get_step())
    print('\n步骤'+cur_step)
    x_train, y_train = dataset.next_train_batch()

    this_epoch_loss_and_acc = model_cnn.fit(x=x_train, y=y_train, validation_data=(x_val, y_val), epochs=1,verbose=2)
    for iters in range(numclass):
        history_test = model_cnn.evaluate(
            x=x_val_slice[iters],
            y=y_val_slice[iters],
            batch_size=None,
            verbose=2
        )
        print('class-%d __ loss :%.4f , acc :%.4f' %(iters ,history_test[0],history_test[1]))
    # save best acc
Example #2
0
train_size = dataset.get_train_length()
val_size = dataset.get_validation_length()
print("train size:" + str(train_size))
print("test size:" + str(val_size))
steps_per_epoch = int((train_size - 1) / args.BATCH) + 1
print("steps_per_epoch:", steps_per_epoch)


def get_train_generator():
    while 1:
        yield dataset.next_train_batch()


train_generator = get_train_generator()
val_data = dataset.get_all_validation_data()

callbacks = []
ModelCheckpoint = keras.callbacks.ModelCheckpoint(os.path.join(
    MODEL_PATH, KERAS_MODEL_NAME),
                                                  monitor='val_acc',
                                                  verbose=1,
                                                  save_best_only=True,
                                                  save_weights_only=False,
                                                  mode='auto',
                                                  period=1)
EarlyStopping = keras.callbacks.EarlyStopping(monitor='val_acc',
                                              min_delta=0,
                                              patience=config.es_patience,
                                              verbose=1,
                                              mode='auto',