예제 #1
0
    log_path = os.path.join(args.work_dir, 'ckpt', DATETIME)
    if not os.path.exists(log_path):
        os.makedirs(log_path)
    log = Logger(os.path.join(log_path, DATETIME + '.log'))
    log.logger.info(args)

    # prepare val data
    DAVIS_ROOT = args.davis
    palette = Image.open(DAVIS_ROOT +
                         '/Annotations/606332/00000.png').getpalette()

    val_dataset = DAVIS(DAVIS_ROOT,
                        phase='val',
                        imset='total_val.txt',
                        resolution='480p',
                        separate_instance=False,
                        only_single=False,
                        target_size=(832, 448))
    val_loader = data.DataLoader(val_dataset,
                                 batch_size=1,
                                 shuffle=False,
                                 num_workers=2,
                                 pin_memory=True)

    model = nn.DataParallel(STM())

    if torch.cuda.is_available():
        model.cuda()

    # load weights.pth
예제 #2
0
파일: train_v2.py 프로젝트: hukefei/STM
    #prepare data
    clip_size = 8
    iou_ignore_bg = True
    BATCH_SIZE = 1
    base_lr = 1e-4  # 1e-4

    DAVIS_ROOT = '/cfs/mazhongke/databases/DAVIS2017/'
    YOUTUBE_ROOT = '/cfs/dataset/youtube_complete/'
    palette = Image.open(DAVIS_ROOT +
                         '/Annotations/480p/blackswan/00000.png').getpalette()

    val_dataset = DAVIS(DAVIS_ROOT,
                        phase='val',
                        imset='2016/val.txt',
                        resolution='480p',
                        separate_instance=False,
                        only_single=False,
                        target_size=(864, 480))
    val_loader = data.DataLoader(val_dataset,
                                 batch_size=1,
                                 shuffle=False,
                                 num_workers=2,
                                 pin_memory=True)

    train_dataset = DAVIS(YOUTUBE_ROOT,
                          phase='train',
                          imset='train.txt',
                          resolution='480p',
                          separate_instance=True,
                          only_single=False,
예제 #3
0
파일: train_rgstm.py 프로젝트: hukefei/STM
    if not os.path.exists(args.work_dir):
        os.makedirs(args.work_dir)

    GPU = args.gpu
    YEAR = args.year

    #prepare val data
    DAVIS_ROOT = args.davis
    palette = Image.open(DAVIS_ROOT +
                         '/Annotations/480p/blackswan/00000.png').getpalette()

    if args.year == 2016:
        val_dataset_2016 = DAVIS(DAVIS_ROOT,
                                 phase='val',
                                 imset='2016/val.txt',
                                 resolution='480p',
                                 separate_instance=False,
                                 only_single=False,
                                 target_size=(864, 480))
        val_loader_2016 = data.DataLoader(val_dataset_2016,
                                          batch_size=1,
                                          shuffle=False,
                                          num_workers=2,
                                          pin_memory=True)
    elif args.year == 2017:
        val_dataset_2017 = DAVIS(DAVIS_ROOT,
                                 phase='val',
                                 imset='2017/val.txt',
                                 resolution='480p',
                                 separate_instance=False,
                                 only_single=False,
예제 #4
0
    if not os.path.exists(args.work_dir):
        os.makedirs(args.work_dir)

    GPU = args.gpu
    YEAR = args.year

    #prepare val data
    DAVIS_ROOT = args.davis
    palette = Image.open(DAVIS_ROOT +
                         '/Annotations/480p/blackswan/00000.png').getpalette()

    val_dataset = DAVIS(DAVIS_ROOT,
                        phase='val',
                        imset=str(args.year) + '/val.txt',
                        resolution='480p',
                        separate_instance=False,
                        only_single=False,
                        target_size=(864, 480))
    val_loader = data.DataLoader(val_dataset,
                                 batch_size=1,
                                 shuffle=False,
                                 num_workers=2,
                                 pin_memory=True)

    # build model
    model = nn.DataParallel(STM())
    if torch.cuda.is_available():
        model.cuda()

    # load weights.pth