Пример #1
0
    def __init__(self, lr=1e-3, batchs=8, cuda=True):
        '''
        :param tt: train_test
        :param tag: 1 - evaluation on testing data, 0 - without evaluation on testing data
        :param lr:
        :param batchs:
        :param cuda:
        '''
        # all the tensor should set the 'volatile' as True, and False when update the network
        self.hungarian = Munkres()
        self.device = torch.device("cuda" if cuda else "cpu")
        self.nEpochs = 999
        self.lr = lr
        self.batchsize = batchs
        self.numWorker = 4

        self.show_process = 0  # interaction
        self.step_input = 1

        print '     Preparing the model...'
        self.resetU()

        self.Uphi = uphi().to(self.device)
        self.Ephi = ephi().to(self.device)

        self.criterion = nn.MSELoss() if criterion_s else nn.CrossEntropyLoss()
        self.criterion = self.criterion.to(self.device)

        self.optimizer = optim.Adam([{
            'params': self.Uphi.parameters()
        }, {
            'params': self.Ephi.parameters()
        }],
                                    lr=lr)

        # seqs = [2, 4, 5, 9, 10, 11, 13]
        # lengths = [600, 1050, 837, 525, 654, 900, 750]
        seqs = [2, 4, 5, 10]
        lengths = [600, 1050, 837, 654]

        for i in xrange(len(seqs)):
            self.writer = SummaryWriter()
            # print '     Loading Data...'
            seq = seqs[i]
            self.seq_index = seq
            start = time.time()
            sequence_dir = 'MOT16/train/MOT16-%02d' % seq
            self.outName = t_dir + 'result_%02d.txt' % seq
            self.train_set = DatasetFromFolder(sequence_dir, self.outName)

            self.train_test = lengths[i]
            self.tag = 0
            self.loss_threhold = 0.03
            self.update()

            print '     Logging...'
            t_data = time.time() - start
            self.log(t_data)
Пример #2
0
    def __init__(self, lr=5e-3, cuda=True):
        # all the tensor should set the 'volatile' as True, and False when update the network
        self.hungarian = Munkres()
        self.cuda = cuda
        self.nEpochs = 999
        self.tau = 3
        # self.frame_end = len(self.edges[0])-1
        self.lr = lr
        self.outName = 'result.txt'

        self.show_process = 0  # interaction
        self.step_input = 1

        print '     Loading Data...'
        start = time.time()
        self.train_set = DatasetFromFolder('MOT16/train/MOT16-05',
                                           self.outName)
        t_data = time.time() - start

        # print self.tail
        self.tail = train_test
        self.tau = 1
        self.frame_head = 1
        self.frame_end = self.frame_head + self.tau
        self.loss_threhold = 0.03

        print '     Preparing the model...'
        self.resetU()

        self.Uphi = uphi()
        self.Ephi = ephi()

        self.criterion = nn.MSELoss() if criterion_s else nn.CrossEntropyLoss()

        self.optimizer = optim.Adam([{
            'params': self.Uphi.parameters()
        }, {
            'params': self.Ephi.parameters()
        }],
                                    lr=lr)

        print '     Logging...'
        self.log(t_data)

        if self.cuda:
            print '     >>>>>> CUDA <<<<<<'
            self.Uphi = self.Uphi.cuda()
            self.Ephi = self.Ephi.cuda()
            self.criterion = self.criterion.cuda()
Пример #3
0
    def __init__(self, tt, tag, lr=1e-3, batchs=8, cuda=True):
        '''
        :param tt: train_test
        :param tag: 1 - evaluation on testing data, 0 - without evaluation on testing data
        :param lr:
        :param batchs:
        :param cuda:
        '''
        # all the tensor should set the 'volatile' as True, and False when update the network
        self.writer = SummaryWriter()
        self.hungarian = Munkres()
        self.device = torch.device("cuda" if cuda else "cpu")
        self.nEpochs = 999
        self.lr = lr
        self.batchsize = batchs
        self.numWorker = 4
        self.outName = t_dir + 'result%s.txt' % name

        self.show_process = 0  # interaction
        self.step_input = 1

        # print '     Loading Data...'
        start = time.time()
        self.train_set = DatasetFromFolder(sequence_dir, self.outName)
        t_data = time.time() - start

        self.train_test = tt
        self.tag = tag
        self.loss_threhold = 0.03

        print '     Preparing the model...'
        self.resetU()

        self.Uphi = uphi().to(self.device)
        self.Ephi = ephi().to(self.device)

        self.criterion = nn.MSELoss() if criterion_s else nn.CrossEntropyLoss()
        self.criterion = self.criterion.to(self.device)

        self.optimizer = optim.Adam([{
            'params': self.Uphi.parameters()
        }, {
            'params': self.Ephi.parameters()
        }],
                                    lr=lr)

        print '     Logging...'
        self.log(t_data)
Пример #4
0
 def __init__(self, typeDir, epochs, l, load, dir):
     print('===> Loading datasets')
     train_set = DatasetFromFolder(self.typeDir)
     self.training_data_loader = DataLoader(dataset=train_set,
                                            num_workers=4,
                                            batch_size=4,
                                            shuffle=True)
Пример #5
0
def get_training_set():
    train_dir = join(root_dir, "train")

    return DatasetFromFolder(train_dir,
                             input_transform=input_transform()
                             #target_transform=target_transform()
                             )
Пример #6
0
def get_test_set(data_dir, upscale_factor):
    root_dir = data_dir   # download_bsd300()
    test_dir = join(root_dir, "test")
    crop_size = calculate_valid_crop_size(256, upscale_factor)
    
    #my code
#    test_images= os.listdir(test_dir)
#    
#    for input_image in test_images:
#        
#        img = Image.open(test_dir+'/'+input_image).convert('YCbCr')
#        y, cb, cr = img.split()
#        target_t=target_transform(crop_size)
#        target= target_t(y)
##        print(target)
#        out = target.cpu()
#        print('out.shape', out.shape)
#        out_img_y = out.detach().numpy()
#        out_img_y *= 255.0
#        out_img_y = out_img_y.clip(0, 255)
#        out_img_y = Image.fromarray(np.uint8(out_img_y[0]), mode='L')
#    
#        out_img_cb = cb.resize(out_img_y.size, Image.BICUBIC)
#        out_img_cr = cr.resize(out_img_y.size, Image.BICUBIC)
#        out_img = Image.merge('YCbCr', [out_img_y, out_img_cb, out_img_cr]).convert('RGB')
#    
#    #    print(input_image)
#        out_img.save('demo/' + input_image)
    
#        print('target shape', target.shape)    #torch.Size([1, 255, 255])
#        print('cb shape', cb.shape)
        
    return DatasetFromFolder(test_dir,
                             input_transform=input_transform(crop_size, upscale_factor),
                             target_transform=target_transform(crop_size))
Пример #7
0
def get_test_set(upscale_factor):
    root_dir = download_ms()
    test_dir = join(root_dir, "test")
    test_label_dir = join(root_dir, "test_labels")

    return DatasetFromFolder(test_dir, test_label_dir, input_transform,
                             target_transform)
Пример #8
0
def get_test_set(dest, crop_size, upscale_factor, jpeg, noise=None, blur=None):
    root_dir = get_image_dir(dest)
    test_dir = os.path.join(root_dir, "test")
    crop_size = calculate_valid_crop_size(crop_size, upscale_factor)

    return DatasetFromFolder(test_dir,
                             data_transform=test_data_transform(crop_size, upscale_factor))
def get_test_set(upscale_factor):
    test_dir = "all_images_colab_size_200k"
    crop_size = calculate_valid_crop_size(256, upscale_factor)

    return DatasetFromFolder(test_dir,
                             input_transform=input_transform(
                                 crop_size, upscale_factor),
                             target_transform=target_transform(crop_size))
Пример #10
0
def get_test_set(upscale_factor):
    root_dir = document_dataset()
    test_dir = join(root_dir, "test")
    crop_size = calculate_valid_crop_size(256, upscale_factor)

    return DatasetFromFolder(test_dir,
                             input_transform=input_transform(crop_size, upscale_factor),
                             target_transform=target_transform(crop_size))
Пример #11
0
def get_test_set(upscale_factor):
    root_dir = './BSR/BSDS500/data/images'
    test_dir = join(root_dir, "test")
    crop_size = calculate_valid_crop_size(256, upscale_factor)
    return DatasetFromFolder(test_dir,
                             input_transform=input_transform(
                                 crop_size, upscale_factor),
                             target_transform=target_transform(crop_size))
Пример #12
0
def get_test_set():
    root_dir = download_bsd300()
    test_dir = join(root_dir, "test")

    return DatasetFromFolder(test_dir,
                             LR_transform=LR_transform(crop_size),
                             HR_2_transform=HR_2_transform(crop_size),
                             HR_4_transform=HR_4_transform(crop_size))
Пример #13
0
def get_test_set(data_dir, dataset, hr, upscale_factor,patch_size):
    hr_dir = join(data_dir, hr)
    lr_dir = join(data_dir, dataset)
    #crop_size = calculate_valid_crop_size(crop_size, upscale_factor)

    return DatasetFromFolder(hr_dir, lr_dir,patch_size, upscale_factor, dataset, data_augmentation=False,
                             input_transform=input_transform(),
                             target_transform=target_transform())
Пример #14
0
def get_training_set(data_dir, hr, upscale_factor, patch_size,
                     data_augmentation):
    hr_dir = join(data_dir, hr)
    return DatasetFromFolder(hr_dir,
                             patch_size,
                             upscale_factor,
                             data_augmentation,
                             transform=transform())
Пример #15
0
def get_training_set(data_dir, upscale_factor):
    root_dir = data_dir  # download_bsd300()
    train_dir = join(root_dir, "train")
    crop_size = calculate_valid_crop_size(256, upscale_factor)

    return DatasetFromFolder(train_dir,
                             input_transform=input_transform(crop_size, upscale_factor),
                             target_transform=target_transform(crop_size))
Пример #16
0
    def loadData(self):
        self.train_test = SEQLEN
        for camera in xrange(4, 9):
            # print '     Loading Data...'
            self.seq_index = camera
            start = time.time()

            self.outName = t_dir+'result_%d.txt'%camera
            out = open(self.outName, 'w')
            out.close()

            self.train_set = DatasetFromFolder(camera, self.outName, show=0)

            self.update()

            print '     Logging...'
            t_data = (time.time() - start)/60
            self.log(t_data)
Пример #17
0
def get_test_set():
    root_dir = download_bsd300()
    test_dir = os.path.join(root_dir, "test")

    return DatasetFromFolder(test_dir,
                             LR_transform=LR_transform(CROP_SIZE),
                             HR_2_transform=HR_2_transform(CROP_SIZE),
                             HR_4_transform=HR_4_transform(CROP_SIZE),
                             HR_8_transform=HR_8_transform(CROP_SIZE))
Пример #18
0
def get_test_set(upscale_factor, sr_run):
    root_dir = download_div2k() #download_bsd300() #
    test_dir = join(root_dir, "valid_sr/", str(sr_run) )
    test_dir2= join(root_dir, "compressed_valid_png")
    crop_size = calculate_valid_crop_size(128, upscale_factor)

    return DatasetFromFolder(test_dir, test_dir2,
                             input_transform=input_transform(crop_size, upscale_factor),
                             target_transform=target_transform(crop_size))
Пример #19
0
def get_test_set(upscale_factor):
    #root_dir = download_bsd300()
    test_dir = join(data_folder, "test")
    crop_size = calculate_valid_crop_size(256, upscale_factor)

    return DatasetFromFolder(test_dir,
                             input_transform=input_transform(
                                 crop_size, upscale_factor),
                             target_transform=target_transform(crop_size))
Пример #20
0
def get_training_set(training_size=224,
                     image_type='RGB',
                     mean_std=((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
                     image_source='train'):
    return DatasetFromFolder('train',
                             training_size,
                             image_type,
                             mean_std,
                             image_source=image_source)
Пример #21
0
def get_training_set(upscale_factor, folder):
    root_dir = join("dataset", folder)
    train_dir = join(root_dir, "train")
    crop_size = calculate_valid_crop_size(cropsize, upscale_factor)

    return DatasetFromFolder(train_dir,
                             input_transform=input_transform(
                                 crop_size, upscale_factor),
                             target_transform=target_transform(crop_size))
Пример #22
0
def get_training_set(data_dir, nFrames, upscale_factor, data_augmentation,
                     other_dataset, patch_size, future_frame):
    return DatasetFromFolder(data_dir,
                             nFrames,
                             upscale_factor,
                             data_augmentation,
                             other_dataset,
                             patch_size,
                             future_frame,
                             transform=transform())
Пример #23
0
def get_training_set(data_dir, train_dir, patch_size, sr_patch_size,
                     upscale_factor, num_classes, data_augmentation):
    return DatasetFromFolder(data_dir,
                             train_dir,
                             patch_size,
                             sr_patch_size,
                             upscale_factor,
                             num_classes,
                             data_augmentation,
                             transform=transform())
Пример #24
0
def get_test_set():
    root_dir = 'dataset'
    test_dir = join(root_dir, "validation")
    crop_size = (72, 272)
    return DatasetFromFolder(test_dir,
                             input_transform=Compose([
                                 CenterCrop(crop_size),
                                 Resize(crop_size),
                                 ToTensor(),
                             ]))
Пример #25
0
def get_training_set(train_dir=None):
    if train_dir is None:
        root_dir = download_bsd300()
        train_dir = os.path.join(root_dir, "train")

    return DatasetFromFolder(train_dir,
                             LR_transform=LR_transform(CROP_SIZE),
                             HR_2_transform=HR_2_transform(CROP_SIZE),
                             HR_4_transform=HR_4_transform(CROP_SIZE),
                             HR_8_transform=HR_8_transform(CROP_SIZE))
Пример #26
0
def get_test_set(upscale_factor, convert_gray=False):
    root_dir = download_bsd300()
    test_dir = join(root_dir, "test")
    crop_size = calculate_valid_crop_size(256, upscale_factor)

    return DatasetFromFolder(test_dir,
                             input_transform=input_transform(
                                 crop_size, upscale_factor),
                             target_transform=target_transform(crop_size),
                             convert_gray=convert_gray)
Пример #27
0
def get_val_set():
    root_dir =  os.path.join(os.path.expanduser('~'), 'data/plate_recognition/plate_e2e')
    val_dir = join(root_dir, "validation")
    crop_size = (72, 272)
    return DatasetFromFolder(val_dir, input_transform=Compose([CenterCrop(crop_size), Resize(crop_size), ToTensor(),Normalize(mean=[0.485, 0.456, 0.406],
                         std=[0.229, 0.224, 0.225]),]))

    # return DatasetFromFolder(val_dir,
    #                          input_transform=Compose([ToTensor(),Normalize(mean=[0.485, 0.456, 0.406],
    #                      std=[0.229, 0.224, 0.225]),]))
                      
Пример #28
0
def get_test_set(upscale_factor):
    # <FIXED LINE FOR EXPERIMENT>
    # root_dir = download_bsd300()
    # test_dir = join(root_dir, "test")
    root_dir = './data'
    test_dir = root_dir + '/test'
    crop_size = calculate_valid_crop_size(256, upscale_factor)

    return DatasetFromFolder(test_dir,
                             input_transform=input_transform(crop_size, upscale_factor),
                             target_transform=target_transform(crop_size))
Пример #29
0
def get_val_set(root):
    root_dir = root
    val_LR_dir = join(root_dir, "val_LR/")
    val_HR_2_dir = join(root_dir, "val_HR_2/")
    val_HR_4_dir = join(root_dir, "val_HR_4/")

    return DatasetFromFolder(val_LR_dir,
                             val_HR_2_dir,
                             val_HR_4_dir,
                             LR_transform=LR_transform(),
                             HR_2_transform=HR_2_transform(),
                             HR_4_transform=HR_4_transform())
Пример #30
0
def get_test_set(h=IMAGE_HEIGHT,
                 w=IMAGE_WIDTH,
                 download=download_bsd300,
                 upscale_factor=None):
    h = calculate_valid_size(h, upscale_factor)
    w = calculate_valid_size(w, upscale_factor)

    return DatasetFromFolder(
        join(download(), 'test'),
        input_transfrom=input_transform(h, w, upscale_factor),
        target_transform=target_transform(h, w),
    )