Exemple #1
0
    def load_data(self, data_path: str) -> list:
        data_list = get_datalist(data_path)
        t_data_list = []
        # print("data_list:",data_list) .jpg - .txt
        flag = 1
        for img_path, label_path in data_list:
            data = self._get_annotation(label_path)
            # if flag:
            #     flag = 0
            #     print("data:",data)
            if len(data['text_polys']) > 0:

                item = {
                    'img_path': img_path,
                    'img_name': pathlib.Path(img_path).stem
                }
                # {'img_path': '/home/share/gaoluoluo/dbnet/datasets/train/img/ic15_img0.jpg', 'img_name': 'ic15_img0'}
                item.update(data)
                # if flag:
                #     flag = 0
                #     print("item:",item)

                t_data_list.append(item)
            else:
                print('there is no suit bbox in {}'.format(label_path))
        return t_data_list
Exemple #2
0
 def load_data(self, data_path: str) -> list:
     data_list = get_datalist(data_path)
     t_data_list = []
     for img_path, label_path in data_list:
         data = self._get_annotation(label_path)
         if len(data['text_polys']) > 0:
             item = {'img_path': img_path, 'img_name': pathlib.Path(img_path).stem}
             item.update(data)
             t_data_list.append(item)
         else:
             print('there is no suit bbox in {}'.format(label_path))
     return t_data_list
Exemple #3
0
def main():
    opt = parse_opts()
    opt.inputsize = [int(item) for item in opt.input_list.split(',')]
    torch.cuda.set_device(opt.gpuid)
    device = torch.device("cuda:%d" % opt.gpuid)
    folder = 'fold_%d_%sresult' % (opt.fold, opt.model) + time.strftime(
        "%Y_%m_%d%H_%M_%S", time.localtime())
    save_path = os.path.join(opt.result_path, folder)

    model = get_model(opt)
    model.to(device)
    trainlist, vallist = get_datalist(opt.fold)
    trainset = DATASET(trainlist)
    valset = DATASET_VAL(vallist)

    train_loader = torch.utils.data.DataLoader(trainset,
                                               batch_size=7,
                                               num_workers=8,
                                               shuffle=True)
    val_loader = torch.utils.data.DataLoader(valset,
                                             batch_size=2,
                                             num_workers=6,
                                             shuffle=False)
    optimizer = RAdam(model.parameters(), lr=3e-4, weight_decay=1e-5)
    best_meandice = 100

    for epoch in range(50):
        adjust_learning_rate(optimizer, epoch, 3e-4, 5)
        train(model, optimizer, train_loader, epoch, 50, device)

        mean_dist = val(model, val_loader, epoch, device)
        if mean_dist < best_meandice:
            best_meandice = mean_dist
            check_mkdirs(save_path)

            if epoch > 1:
                print('save model...:' +
                      os.path.join(save_path, '%.4f.pkl' % (best_meandice)))
                check_mkdirs(save_path)
                torch.save(
                    model.state_dict(),
                    os.path.join(save_path, '%.4f.pkl' % (best_meandice)))
        print('Best Mean Dice: %.4f' % best_meandice)

    os.rename(save_path, save_path + '_%.4f' % best_meandice)
Exemple #4
0
 def load_data(self, data_path: str) -> list:
     return get_datalist(data_path, self.num_label)
Exemple #5
0
 def load_data(self, data_path: str) -> list:
     return get_datalist(data_path)
        if checkValid:
            try:
                if not checkImageIsValid(imageBin):
                    print('%s is not a valid image' % imagePath)
                    continue
            except:
                continue

        imageKey = 'image-%09d'.encode() % cnt
        labelKey = 'label-%09d'.encode() % cnt
        cache[imageKey] = imageBin
        cache[labelKey] = label.encode()

        if cnt % 1000 == 0:
            writeCache(env, cache)
            cache = {}
        cnt += 1
    nSamples = cnt - 1
    cache['num-samples'.encode()] = str(nSamples).encode()
    writeCache(env, cache)
    print('Created dataset with %d samples' % nSamples)


if __name__ == '__main__':
    data_list = [["/data1/ocr/dataset/test_crnn/train.txt"]]
    save_path = '/data1/ocr/dataset/test_crn/lmdb/val'
    os.makedirs(save_path, exist_ok=True)
    train_data_list = get_datalist(data_list)

    createDataset(train_data_list, save_path)