Exemplo n.º 1
0
 class EvalDetectionCLI(scfg.Config):
     default = {
         'true': scfg.Path(None, help='true coco dataset'),
         'pred': scfg.Path(None, help='predicted coco dataset'),
         'out_dpath': scfg.Path('./out', help='output directory')
     }
     pass
Exemplo n.º 2
0
class SegmentationConfig(scfg.Config):
    """
    Default configuration for setting up a training session
    """
    default = {
        'name': scfg.Value('untitled', help='A human readable tag that is "nice" for humans'),
        'workdir': scfg.Path('~/work/sseg', help='Dump all results in your workdir'),

        'workers': scfg.Value(0, help='number of parallel dataloading jobs'),
        'xpu': scfg.Value('argv', help='See netharn.XPU for details. can be cpu/gpu/cuda0/0,1,2,3)'),

        'augmenter': scfg.Value('simple', help='type of training dataset augmentation'),
        'class_weights': scfg.Value('log-median-idf', help='how to weight inbalanced classes'),
        # 'class_weights': scfg.Value(None, help='how to weight inbalanced classes'),

        'datasets': scfg.Value('special:shapes256', help='Either a special key or a coco file'),
        'train_dataset': scfg.Value(None),
        'vali_dataset': scfg.Value(None),
        'test_dataset': scfg.Value(None),

        'arch': scfg.Value('psp', help='Network architecture code'),
        'optim': scfg.Value('adam', help='Weight optimizer. Can be SGD, ADAM, ADAMW, etc..'),

        'backend': scfg.Value('npy', help='fast lookup backnd. may be npy or cog'),
        'input_dims': scfg.Value((224, 224), help='Window size to input to the network'),
        'input_overlap': scfg.Value(0.25, help='amount of overlap when creating a sliding window dataset'),
        'normalize_inputs': scfg.Value(True, help='if True, precompute training mean and std for data whitening'),

        'batch_size': scfg.Value(4, help='number of items per batch'),
        'bstep': scfg.Value(4, help='number of batches before a gradient descent step'),

        'max_epoch': scfg.Value(140, help='Maximum number of epochs'),
        'patience': scfg.Value(140, help='Maximum "bad" validation epochs before early stopping'),

        'lr': scfg.Value(1e-4, help='Base learning rate'),
        'decay':  scfg.Value(1e-5, help='Base weight decay'),

        'focus': scfg.Value(0.0, help='focus for focal loss'),

        'schedule': scfg.Value('step90', help=('Special coercable netharn code. Eg: onecycle50, step50, gamma')),

        'init': scfg.Value('kaiming_normal', help='How to initialized weights. (can be a path to a pretrained model)'),
        'pretrained': scfg.Path(help=('alternative way to specify a path to a pretrained model')),
    }

    def normalize(self):
        if self['pretrained'] in ['null', 'None']:
            self['pretrained'] = None

        if self['pretrained'] is not None:
            self['init'] = 'pretrained'
Exemplo n.º 3
0
 class MyConfig(scfg.Config):
     default = {
         'option1': scfg.Value('bar', help='an option'),
         'option2': scfg.Value((1, 2, 3), tuple, help='another option'),
         'option3': None,
         'option4': 'foo',
         'discrete': scfg.Value(None, choices=['a', 'b', 'c']),
         'apath': scfg.Path(help='a path'),
     }
Exemplo n.º 4
0
class ImageClfConfig(scfg.Config):
    """
    Default configuration for setting up a training session
    """
    default = {
        'nice': scfg.Path('untitled', help='A human readable tag that is "nice" for humans'),
        'workdir': scfg.Path('~/work/tiny-imagenet', help='Dump all results in your workdir'),

        'workers': scfg.Value(0, help='number of parallel dataloading jobs'),
        'xpu': scfg.Value('argv', help='See netharn.XPU for details. can be cpu/gpu/cuda0/0,1,2,3)'),

        'augmenter': scfg.Value(True, help='type of training dataset augmentation'),

        # 'datasets': scfg.Value('special:tiny_imgnet', help='Eventually you may be able to sepcify a coco file'),
        'train_dataset': scfg.Value(None),
        'vali_dataset': scfg.Value(None),
        'test_dataset': scfg.Value(None),

        'arch': scfg.Value('resnet50', help='Network architecture code'),
        'optim': scfg.Value('adam', help='Weight optimizer. Can be SGD, ADAM, ADAMW, etc..'),

        'input_dims': scfg.Value((224, 224), help='Window size to input to the network'),

        'batch_size': scfg.Value(6, help='number of items per batch'),

        'max_epoch': scfg.Value(100, help='Maximum number of epochs'),
        'patience': scfg.Value(100, help='Maximum "bad" validation epochs before early stopping'),

        'lr': scfg.Value(1e-3, help='Base learning rate'),
        'decay':  scfg.Value(1e-5, help='Base weight decay'),

        'schedule': scfg.Value('onecycle71', help=('Special coercable netharn code. Eg: onecycle50, step50, gamma')),

        'init': scfg.Value('kaiming_normal', help='How to initialized weights. (can be a path to a pretrained model)'),
        'pretrained': scfg.Path(help=('alternative way to specify a path to a pretrained model')),
    }

    def normalize(self):
        if self['pretrained'] in ['null', 'None']:
            self['pretrained'] = None

        if self['pretrained'] is not None:
            self['init'] = 'pretrained'
Exemplo n.º 5
0
class DetectFitConfig(scfg.Config):
    default = {
        # Personal Preference
        'nice':
        scfg.Value(
            'untitled',
            help=
            ('a human readable tag for your experiment (we also keep a '
             'failsafe computer readable tag in case you update hyperparams, '
             'but forget to update this flag)')),

        # System Options
        'workdir':
        scfg.Path('~/work/detect',
                  help='path where this script can dump stuff'),
        'workers':
        scfg.Value(0, help='number of DataLoader processes'),
        'xpu':
        scfg.Value('argv', help='a CUDA device or a CPU'),

        # Data (the hardest part of machine learning)
        'datasets':
        scfg.Value('special:shapes1024', help='special dataset key'),
        'train_dataset':
        scfg.Value(None, help='override train with a custom coco dataset'),
        'vali_dataset':
        scfg.Value(None, help='override vali with a custom coco dataset'),
        'test_dataset':
        scfg.Value(None, help='override test with a custom coco dataset'),

        # Dataset options
        'multiscale':
        False,
        'visible_thresh':
        scfg.Value(
            0.5,
            help=
            'percentage of a box that must be visible to be included in truth'
        ),
        'input_dims':
        scfg.Value((256, 256), help='size to '),
        'normalize_inputs':
        scfg.Value(
            False,
            help='if True, precompute training mean and std for data whitening'
        ),
        'augment':
        scfg.Value('simple',
                   help='key indicating augmentation strategy',
                   choices=['complex', 'simple', None]),
        'ovthresh':
        0.5,

        # High level options
        'arch':
        scfg.Value('yolo2', help='network toplogy', choices=['yolo2']),
        'optim':
        scfg.Value('adam',
                   help='torch optimizer',
                   choices=['sgd', 'adam', 'adamw']),
        'batch_size':
        scfg.Value(
            4, help='number of images that run through the network at a time'),
        'bstep':
        scfg.Value(8, help='num batches before stepping'),
        'lr':
        scfg.Value(1e-3, help='learning rate'),  # 1e-4,
        'decay':
        scfg.Value(1e-5, help='weight decay'),
        'schedule':
        scfg.Value('step90', help='learning rate / momentum scheduler'),
        'max_epoch':
        scfg.Value(140, help='Maximum number of epochs'),
        'patience':
        scfg.Value(
            140,
            help='Maximum number of bad epochs on validation before stopping'),

        # Initialization
        'init':
        scfg.Value('imagenet', help='initialization strategy'),
        'pretrained':
        scfg.Path(help='path to a netharn deploy file'),

        # Loss Terms
        'focus':
        scfg.Value(0.0, help='focus for Focal Loss'),
    }

    def normalize(self):
        if self['pretrained'] in ['null', 'None']:
            self['pretrained'] = None

        if self['datasets'] == 'special:voc':
            self['train_dataset'] = ub.expandpath(
                '~/data/VOC/voc-trainval.mscoco.json')
            self['vali_dataset'] = ub.expandpath(
                '~/data/VOC/voc-test-2007.mscoco.json')

        key = self.get('pretrained', None) or self.get('init', None)
        if key == 'imagenet':
            self['pretrained'] = yolo2.initial_imagenet_weights()
        elif key == 'lightnet':
            self['pretrained'] = yolo2.demo_voc_weights()

        if self['pretrained'] is not None:
            self['init'] = 'pretrained'
Exemplo n.º 6
0
class ClfConfig(scfg.Config):
    """
    This is the default configuration for running the classification example.

    Instances of this class behave like a dictionary. However, they can also be
    specified on the command line, via kwargs, or by pointing to a YAML/json
    file. See :module:``scriptconfig`` for details of how to use
    :class:`scriptconfig.Config` objects.
    """
    default = {
        'name':
        scfg.Value('clf_example',
                   help='A human readable tag that is "name" for humans'),
        'workdir':
        scfg.Path('~/work/netharn', help='Dump all results in your workdir'),
        'workers':
        scfg.Value(2, help='number of parallel dataloading jobs'),
        'xpu':
        scfg.Value(
            'auto',
            help=
            'See netharn.XPU for details. can be auto/cpu/xpu/cuda0/0,1,2,3)'),
        'datasets':
        scfg.Value('special:shapes256',
                   help='Either a special key or a coco file'),
        'train_dataset':
        scfg.Value(None),
        'vali_dataset':
        scfg.Value(None),
        'test_dataset':
        scfg.Value(None),
        'sampler_backend':
        scfg.Value(None, help='ndsampler backend'),
        'channels':
        scfg.Value('rgb', help='special channel code. See ChannelSpec'),
        'arch':
        scfg.Value('resnet50', help='Network architecture code'),
        'optim':
        scfg.Value('adam',
                   help='Weight optimizer. Can be SGD, ADAM, ADAMW, etc..'),
        'input_dims':
        scfg.Value((224, 224), help='Window size to input to the network'),
        'normalize_inputs':
        scfg.Value(
            True,
            help=(
                'if True, precompute training mean and std for data whitening'
            )),
        'balance':
        scfg.Value(None, help='balance strategy. Can be category or None'),
        'augmenter':
        scfg.Value('simple', help='type of training dataset augmentation'),
        'batch_size':
        scfg.Value(3, help='number of items per batch'),
        'num_batches':
        scfg.Value(
            'auto',
            help=
            'Number of batches per epoch (mainly for balanced batch sampling)'
        ),
        'max_epoch':
        scfg.Value(140, help='Maximum number of epochs'),
        'patience':
        scfg.Value(
            140, help='Maximum "bad" validation epochs before early stopping'),
        'lr':
        scfg.Value(1e-4, help='Base learning rate'),
        'decay':
        scfg.Value(1e-5, help='Base weight decay'),
        'schedule':
        scfg.Value(
            'step90-120',
            help=
            ('Special coercible netharn code. Eg: onecycle50, step50, gamma, ReduceLROnPlateau-p10-c10'
             )),
        'init':
        scfg.Value(
            'noop',
            help=
            'How to initialized weights: e.g. noop, kaiming_normal, path-to-a-pretrained-model)'
        ),
        'pretrained':
        scfg.Path(
            help=('alternative way to specify a path to a pretrained model')),
    }

    def normalize(self):
        if self['pretrained'] in ['null', 'None']:
            self['pretrained'] = None

        if self['pretrained'] is not None:
            self['init'] = 'pretrained'
Exemplo n.º 7
0
 class GrabVOC_Config(scfg.Config):
     default = {
         'dpath': scfg.Path(
             ub.expandpath('~/data/VOC'), help='download location')
     }
Exemplo n.º 8
0
class StyleTransferConfig(scfg.Config):
    default = {
        'name':
        scfg.Value('style_example',
                   help='A human readable tag that is "name" for humans'),
        'workdir':
        scfg.Path('~/work/netharn', help='Dump all results in your workdir'),
        'workers':
        scfg.Value(2, help='number of parallel dataloading jobs'),
        'xpu':
        scfg.Value(
            'auto',
            help=
            'See netharn.XPU for details. can be auto/cpu/xpu/cuda0/0,1,2,3)'),
        'datasets':
        scfg.Value('special:shapes256',
                   help='Either a special key or a coco file'),
        'train_dataset':
        scfg.Value(None),
        'vali_dataset':
        scfg.Value(None),
        'test_dataset':
        scfg.Value(None),
        'sampler_backend':
        scfg.Value(None, help='ndsampler backend'),
        'channels':
        scfg.Value('rgb', help='special channel code. See ChannelSpec'),

        # 'arch': scfg.Value('resnet50', help='Network architecture code'),
        'optim':
        scfg.Value('adam',
                   help='Weight optimizer. Can be SGD, ADAM, ADAMW, etc..'),
        'input_dims':
        scfg.Value((256, 256), help='Window size to input to the network'),

        # TODO
        'normalize_inputs':
        scfg.Value(
            True,
            help=(
                'if True, precompute training mean and std for data whitening'
            )),
        'balance':
        scfg.Value(None, help='balance strategy. Can be category or None'),
        # 'augmenter': scfg.Value('simple', help='type of training dataset augmentation'),
        'batch_size':
        scfg.Value(3, help='number of items per batch'),
        'num_batches':
        scfg.Value(
            'auto',
            help=
            'Number of batches per epoch (mainly for balanced batch sampling)'
        ),
        'max_epoch':
        scfg.Value(140, help='Maximum number of epochs'),
        'patience':
        scfg.Value(
            140, help='Maximum "bad" validation epochs before early stopping'),
        'lr':
        scfg.Value(1e-4, help='Base learning rate'),
        'decay':
        scfg.Value(1e-5, help='Base weight decay'),
        'schedule':
        scfg.Value(
            'step90-120',
            help=
            ('Special coercible netharn code. Eg: onecycle50, step50, gamma, ReduceLROnPlateau-p10-c10'
             )),
        'init':
        scfg.Value(
            'noop',
            help=
            'How to initialized weights: e.g. noop, kaiming_normal, path-to-a-pretrained-model)'
        ),
        # 'pretrained': scfg.Path(help=('alternative way to specify a path to a pretrained model')),
    }