Esempio n. 1
0
def main():
    # add argumentation
    parser = argparse.ArgumentParser(
        description='MobileNet_v2_DeepLab_v3 Pytorch Implementation')
    parser.add_argument(
        '--dataset',
        default='cityscapes',
        choices=['cityscapes', 'other'],
        help='Dataset used in training MobileNet v2+DeepLab v3')
    parser.add_argument('--root',
                        default='./data/cityscapes',
                        help='Path to your dataset')
    parser.add_argument('--epoch',
                        default=None,
                        help='Total number of training epoch')
    parser.add_argument('--lr', default=None, help='Base learning rate')
    parser.add_argument('--pretrain',
                        default=None,
                        help='Path to a pre-trained backbone model')
    parser.add_argument('--resume_from',
                        default=None,
                        help='Path to a checkpoint to resume model')

    args = parser.parse_args()
    params = Params()

    # parse args
    if not os.path.exists(args.root):
        if params.dataset_root is None:
            raise ValueError('ERROR: Root %s not exists!' % args.root)
    else:
        params.dataset_root = args.root
    if args.epoch is not None:
        params.num_epoch = args.epoch
    if args.lr is not None:
        params.base_lr = args.lr
    if args.pretrain is not None:
        params.pre_trained_from = args.pretrain
    if args.resume_from is not None:
        params.resume_from = args.resume_from

    LOG('Network parameters:')
    print_config(params)

    # create dataset and transformation
    LOG('Creating Dataset and Transformation......')
    datasets = create_dataset(params)
    LOG('Creation Succeed.\n')

    # create model
    LOG('Initializing MobileNet and DeepLab......')
    net = MobileNetv2_DeepLabv3(params, datasets)
    LOG('Model Built.\n')

    # let's start to train!
    net.Train()
    net.Test()
Esempio n. 2
0
def main():
    # add argumentation
    parser = argparse.ArgumentParser(description='MobileNet_v2_DeepLab_v3 Pytorch Implementation')
    #todo maybe make it work with multiple datasets?
    #parser.add_argument('--dataset', default='cityscapes', choices=['cityscapes', 'other'],
    #                    help='Dataset used in training MobileNet v2+DeepLab v3')
    parser.add_argument('--root', default='./data/cityscapes', help='Path to your dataset')
    parser.add_argument('--epoch', default=None, help='Total number of training epoch')
    parser.add_argument('--lr', default=None, help='Base learning rate')
    parser.add_argument('--pretrain', default=None, help='Path to a pre-trained backbone model')
    parser.add_argument('--resume_from', default=None, help='Path to a checkpoint to resume model')
    parser.add_argument('--logdir', default=None, help='Directory to save logs for Tensorboard')
    parser.add_argument('--batch_size', default=128, help='Batch size for training')

    args = parser.parse_args()
    params = Params()

    # parse args
    if not os.path.exists(args.root):
        if params.dataset_root is None:
            raise ValueError('ERROR: Root %s doesn\'t exist!' % args.root)
    else:
        params.dataset_root = args.root
    if args.epoch is not None:
        params.num_epoch = int(args.epoch)
    if args.lr is not None:
        params.base_lr = args.lr
    if args.pretrain is not None:
        params.pre_trained_from = args.pretrain
    if args.resume_from is not None:
        params.resume_from = args.resume_from
    if args.logdir is not None:
        params.logdir = args.logdir
    params.summary_dir, params.ckpt_dir = create_train_dir(params.logdir)
    params.train_batch = int(args.batch_size)

    LOG('Network parameters:')
    print_config(params)

    # create dataset and transformation
    LOG('Creating Dataset and Transformation......')
    datasets = create_dataset(params)
    LOG('Creation Succeed.\n')

    # create model
    LOG('Initializing MobileNet and DeepLab......')
    net = MobileNetv2_DeepLabv3(params, datasets)
    LOG('Model Built.\n')

    # let's start to train!
    net.Train()
    net.Test()
Esempio n. 3
0
def main():
    parser = argparse.ArgumentParser(
        description='MobileNet_V2 Pytorch Implementation')
    parser.add_argument('--dataset',
                        default='cifar10',
                        choices=['imagenet', 'cifar10', 'cifar100', 'other'],
                        help='Dataset used in training MobileNet V2')
    parser.add_argument('--root',
                        default='./data/cifar10',
                        help='Path to your dataset')

    args = parser.parse_args()

    # parse args
    if args.dataset == 'cifar10':
        params = CIFAR10_params()
    elif args.dataset == 'cifar100':
        params = CIFAR100_params()
    else:
        params = Params()
    params.dataset_root = args.root

    if not os.path.exists(args.root):
        print('ERROR: Root %s not exists!' % args.root)
        exit(1)
    """ TEST CODE """
    # params = CIFAR100_params
    # params.dataset_root = '/home/ubuntu/cifar100'

    # create model
    print('\nInitializing MobileNet......')
    net = MobileNetv2(params)
    print('Initialization Done.\n')

    # create dataset and transformation
    print('Loading Data......')
    dataset = create_dataset(params)
    print('Data Loaded.\n')

    # let's start to train!
    net.train_n_epoch(dataset)
Esempio n. 4
0
        transforms.Compose([
            RandomResizedCrop(params.image_size, scale=(0.5, 2.0)),
            RandomHorizontalFlip(),
            ToTensor()
        ]),
        'val':
        transforms.Compose([
            RandomResizedCrop(params.image_size, scale=(0.5, 2.0)),
            ToTensor()
        ]),
        'test':
        transforms.Compose([ToTensor()])
    }

    # file_dir = {p: os.path.join(params.dataset_root, p) for p in phase}

    # datasets = {Cityscapes(file_dir[p], mode=p, transforms=transform[p]) for p in phase}
    datasets = {
        p: Cityscapes(params.dataset_root, mode=p, transforms=transform[p])
        for p in phase
    }

    return datasets


if __name__ == '__main__':
    from config import Params
    pp = Params()
    pp.dataset_root = '/media/ubuntu/disk/cityscapes'
    datasets = create_datasets(pp)