def CreateDataLoader(opt):
    from data.custom_dataset_data_loader import CustomDatasetDataLoader

    data_loader = CustomDatasetDataLoader()
    data_loader.initialize(opt)

    return data_loader
def CreateDataLoader(opt):
    from data.custom_dataset_data_loader import CustomDatasetDataLoader
    data_loader = CustomDatasetDataLoader()
    print(data_loader.name())
    data_loader.initialize(opt)
    #print data_loader
    return data_loader
示例#3
0
def CreateDataLoader(opt):
    ''' Calls CustomDatasetDataLoader and initializes it. '''
    from data.custom_dataset_data_loader import CustomDatasetDataLoader
    data_loader = CustomDatasetDataLoader()
    print(data_loader.name())
    data_loader.initialize(opt)
    return data_loader
示例#4
0
def CreateDataLoader(datafolder, dataroot='./dataset', dataset_mode='2afc',
                     load_size=64, batch_size=1, serial_batches=True,
                     nThreads=4):
    from data.custom_dataset_data_loader import CustomDatasetDataLoader
    data_loader = CustomDatasetDataLoader()
    # print(data_loader.name())
    data_loader.initialize(datafolder, dataroot=dataroot + '/' + dataset_mode,
                           dataset_mode=dataset_mode, load_size=load_size,
                           batch_size=batch_size,
                           serial_batches=serial_batches, nThreads=nThreads)
    return data_loader
def CreateDataLoader(opt, k):
    if k == 0:
        from data.custom_dataset_data_loader import CustomDatasetDataLoader
        data_loader = CustomDatasetDataLoader()
        print(data_loader.name())
        data_loader.initialize(opt)
        return data_loader
    else:
        from data.custom_dataset_data_loader_super import CustomDatasetDataLoader_super
        data_loader = CustomDatasetDataLoader_super()
        print(data_loader.name())
        data_loader.initialize(opt)
        return data_loader
示例#6
0
def CreateDataLoader(
    datafolder,
    dataroot="./dataset",
    dataset_mode="2afc",
    load_size=64,
    batch_size=1,
    serial_batches=True,
):
    from data.custom_dataset_data_loader import CustomDatasetDataLoader

    data_loader = CustomDatasetDataLoader()
    # print(data_loader.name())
    data_loader.initialize(
        datafolder,
        dataroot=dataroot + "/" + dataset_mode,
        dataset_mode=dataset_mode,
        load_size=load_size,
        batch_size=batch_size,
        serial_batches=serial_batches,
        nThreads=1,
    )
    return data_loader
示例#7
0
def CreateDataLoader(opt):
    from data.custom_dataset_data_loader import CustomDatasetDataLoader
    data_loader = CustomDatasetDataLoader()
    print(data_loader.name())  # The name returned is "CustomDatasetDataLoader"
    data_loader.initialize(opt)  # initialization parameters
    return data_loader
def CreateDataLoader(opt):
    from data.custom_dataset_data_loader import CustomDatasetDataLoader
    data_loader = CustomDatasetDataLoader()
    print(data_loader.name())
    data_loader.initialize(opt)
    return data_loader
示例#9
0
def CreateDataLoader(opt, rank):
    data_loader = CustomDatasetDataLoader()
    # print(data_loader.name())
    data_loader.initialize(opt, rank)
    return data_loader
示例#10
0
def main_task():

    # define params
    opt = BaseOptions().parse()
    iter_path = os.path.join(opt.checkpoints_dir, 'iter.txt')
    ioupath_path = os.path.join(opt.checkpoints_dir, 'MIoU.txt')

    # load training data
    if opt.continue_train:
        try:
            start_epoch, epoch_iter = np.loadtxt(iter_path,
                                                 delimiter=',',
                                                 dtype=int)
        except:
            start_epoch, epoch_iter = 1, 0
        try:
            best_iou = np.loadtxt(ioupath_path, dtype=float)
        except:
            best_iou = 0.
    else:
        start_epoch, epoch_iter = 1, 0
        best_iou = 0.

    os.environ["CUDA_VISIBLE_DEVICES"] = str(opt.gpu_ids[0])

    # define data mode
    data_loader = CustomDatasetDataLoader()
    data_loader.initialize(opt)
    dataset, dataset_val = data_loader.load_data()
    dataset_size = len(dataset)

    # define model
    model = Deeplab_Solver(opt)
    total_steps = (start_epoch - 1) * dataset_size + epoch_iter

    print("starting training model......")

    for epoch in range(start_epoch, opt.nepochs):
        if epoch != start_epoch:
            epoch_iter = epoch_iter % dataset_size

        # for train
        opt.isTrain = True
        model.model.train()
        for i, data in enumerate(dataset, start=epoch_iter):
            total_steps += opt.batchSize
            epoch_iter += opt.batchSize

            # keep time to watch how times each one epoch
            epoch_start_time = time.time()

            # forward and backward pass
            model.forward(data, isTrain=True)
            model.backward(total_steps,
                           opt.nepochs * dataset_size * opt.batchSize + 1)

            # save latest model
            if total_steps % opt.save_latest_freq == 0:
                print('saving the latest model (epoch %d, total_steps %d)' %
                      (epoch, total_steps))
                model.save('latest')
                np.savetxt(iter_path, (epoch, epoch_iter),
                           delimiter=',',
                           fmt='%d')

        if model.trainingavgloss < 0.010:
            break

        # for eval
        opt.isTrain = False
        model.model.eval()
        if dataset_val != None:
            label_trues, labels_preds = [], []
            for i, data in enumerate(dataset_val):
                seggt, segpred = model.forward(data, isTrain=False)
                seggt = seggt.data.cpu().numpy()
                segpred = segpred.data.cpu().numpy()

                label_trues.append(seggt)
                labels_preds.append(segpred)

            metrics = util.label_accuracy_score(label_trues,
                                                labels_preds,
                                                n_class=opt.label_nc)
            metrics *= 100
            print('''\
                    Validation:
                    Accuracy: {0}
                    AccuracyClass: {1}
                    MeanIOU: {2}
                    FWAVAccuracy: {3}
                    '''.format(*metrics))

            # save model for best
            if metrics[2] > best_iou:
                best_iou = metrics[2]
                model.save('best')

            print('end of epoch %d / %d \t Time Taken: %d sec' %
                  (epoch + 1, opt.nepochs, time.time() - epoch_start_time))
        self.resize_or_crop = "resize_and_crop"
        self.save_epoch_freq = 5
        self.save_latest_freq = 5000
        self.serial_batches = False
        self.which_direction = "BtoA"
        self.which_epoch = "latest"
        self.checkpoints_dir = "/data/kdabi/CS698O/Autopainter/CS698-cartoon-painter/saved_models"
        self.results_dir = "/data/kdabi/CS698O/Autopainter/CS698-cartoon-painter/saved_models"


opt = Options()

# opt = TrainOptions().parse()

data_loader = CustomDatasetDataLoader()
data_loader.initialize(opt)
dataset = data_loader.load_data()
dataset_size = len(data_loader)
# print('#training images = %d' % dataset_size)

model = FeatureLoss(opt)
visualizer = Visualizer(opt)
total_steps = 0

for epoch in range(opt.epoch_count, opt.niter + opt.niter_decay + 1):
    epoch_start_time = time.time()
    epoch_iter = 0

    for i, data in enumerate(dataset):
        iter_start_time = time.time()
        total_steps += opt.batchSize
def CreateDataLoader(datafolder,dataroot='./dataset',dataset_mode='2afc',load_size=64,batch_size=1,serial_batches=True):
    from data.custom_dataset_data_loader import CustomDatasetDataLoader
    data_loader = CustomDatasetDataLoader()
    # print(data_loader.name())
    data_loader.initialize(datafolder,dataroot=dataroot+'/'+dataset_mode,dataset_mode=dataset_mode,load_size=load_size,batch_size=batch_size,serial_batches=serial_batches, nThreads=1)
    return data_loader
示例#13
0
def CreateDataLoader(opt, isVal=False):
    from data.custom_dataset_data_loader import CustomDatasetDataLoader
    data_loader = CustomDatasetDataLoader()
    print(data_loader.name())
    data_loader.initialize(opt, isVal)
    return data_loader
示例#14
0
def CreateDataLoader(config, filename):
    from data.custom_dataset_data_loader import CustomDatasetDataLoader
    data_loader = CustomDatasetDataLoader()
    print(data_loader.name())
    data_loader.initialize(config, filename)
    return data_loader
示例#15
0
def CreateDataLoader(opt):
    data_loader = CustomDatasetDataLoader()
    data_loader.initialize(opt)

    return data_loader