def classify_train(name, learning_rate, init_weight=None):
    print('start classify_train')
    net = Resnet()
    #net = SimpleVgg()
    model = net.get_model(learning_rate)
    if not init_weight == None:
        model.load_weights(init_weight)
    model.summary()
    generator = DataGenerator(name=name)
    run = '{}-{}-{}'.format(name,
                            time.localtime().tm_hour,
                            time.localtime().tm_min)
    log_dir = CLASSIFY_LOG_DIR.format(run)
    check_point = log_dir + '/' + name + '_checkpoint-{epoch:02d}-{val_loss:.4f}.hdf5'

    print("classify train round {}".format(run))
    tensorboard = TensorBoard(log_dir=log_dir, write_graph=False)
    checkpoint = ModelCheckpoint(filepath=check_point,
                                 monitor='val_loss',
                                 verbose=1,
                                 save_best_only=True)
    early_stopping = EarlyStopping(monitor='val_loss',
                                   patience=TRAIN_EARLY_STOPPING,
                                   verbose=1)

    model.fit_generator(
        generator.flow_classfication(mode='train'),
        steps_per_epoch=TRAIN_STEPS_PER_EPOCH,
        validation_data=generator.flow_classfication(mode='val'),
        validation_steps=TRAIN_VALID_STEPS,
        epochs=TRAIN_EPOCHS,
        verbose=1,
        callbacks=[tensorboard, checkpoint, early_stopping])
Пример #2
0
def predict_test_only_classification():
    net = Resnet()
    name = 'Resnet'
    model = net.get_model()
    model.load_weights(CLASS_MODEL_PATH)

    columns = [
        'seriesuid', 'coordX', 'coordY', 'coordZ', 'class', 'probability'
    ]
    df = pd.DataFrame(columns=columns)

    #Use two round to detect. same step,different start point
    print('Round 1')
    df = predict_box(np.array([16, 16, 16]), model, columns, df)
    print('Round 2')
    df = predict_box(np.array([0, 0, 0]), model, columns, df)

    df.to_csv('./output/result_only_class.csv', index=False)
Пример #3
0
def predict_test(name='lung',
                 mode='test',
                 seg_model_path=SEG_LUNG_TRAIN_WEIGHT,
                 class_model_path=CLASS_LUNG_TRAIN_WEIGHT,
                 seg_thresh_hold=0.8,
                 limit=[0, 0]):
    detect_net = UNet()
    class_net = Resnet()

    detect_model = detect_net.get_model(0.1)
    detect_model.load_weights(seg_model_path)
    class_model = class_net.get_model(0.1)
    class_model.load_weights(class_model_path)

    columns = [
        'seriesuid', 'coordX', 'coordY', 'coordZ', 'class', 'probability'
    ]
    df = pd.DataFrame(columns=columns)
    for img, meta in get_files(name, mode):
        count = 0
        cubs = []
        cub_sizes = []
        for w in range(limit[0], img.shape[0] - limit[0], 32):
            for h in range(limit[1], img.shape[1] - limit[1], 32):
                for d in range(0, img.shape[2], 32):
                    if d + INPUT_DEPTH > img.shape[2]:
                        d = img.shape[2] - INPUT_DEPTH
                    if h + INPUT_HEIGHT > img.shape[1]:
                        h = img.shape[1] - INPUT_HEIGHT
                    if w + INPUT_WIDTH > img.shape[0]:
                        w = img.shape[0] - INPUT_WIDTH
                    cub = img[w:w + INPUT_WIDTH, h:h + INPUT_HEIGHT,
                              d:d + INPUT_DEPTH]

                    if np.all(cub == ZERO_CENTER):
                        continue

                    #batch_cub = cub[np.newaxis, ..., np.newaxis]
                    cubs.append(cub)
                    cub_sizes.append([w, h, d])
        for k in range(0, len(cub_sizes), 16):
            t = 16
            if k + 16 >= len(cub_sizes):
                t = len(cub_sizes) - k

            batch_cub = np.array(cubs[k:t + k])
            batch_cub_sizes = cub_sizes[k:t + k]

            batch_cub = batch_cub[..., np.newaxis]
            pre_y_batch = detect_model.predict(batch_cub)
            for k in range(pre_y_batch.shape[0]):
                pre_y = pre_y_batch[k, :, :, :, 0] > seg_thresh_hold
                #print('predicted pix:'+ str(np.sum(pre_y)))
                if np.sum(pre_y) > 0:
                    crops, crop_centers, diameter, bboxes = crop_for_class(
                        img, pre_y, np.array(batch_cub_sizes[k]))
                    #print('find:'+str(len(crop_centers)))
                    for i, center in enumerate(crop_centers):
                        crop = crops[i]
                        crop_cub = crop[np.newaxis, ..., np.newaxis]
                        class_type = class_model.predict(crop_cub)
                        class_type = class_type[0]
                        index = np.argmax(class_type)
                        if index > 0:
                            #print('Add one')
                            location = meta['origin'] + center
                            new_row = pd.DataFrame([[
                                meta['seriesuid'], location[0], location[1],
                                location[2], label_softmax_reverse[index],
                                class_type[index]
                            ]],
                                                   columns=columns)
                            df = df.append(new_row, ignore_index=True)
        df.to_csv('./output/predict_' + name + '_' + mode + '.csv',
                  index=False)
    print('finished')