Example #1
0
                        'unsupervised pre-training.')
    parser.add_argument('--batch',
                        dest='batchSize',
                        type=int,
                        default=1,
                        help='Batch size for training and test sets.')
    parser.add_argument('--base',
                        dest='base',
                        type=str,
                        default='./leNet5',
                        help='Base name of the network output and temp files.')
    parser.add_argument('image', help='Input image to train.')
    options = parser.parse_args()

    # setup the logger
    log = setupLogging('likenessFinder', options.level, options.logfile)

    # read the file into memory
    if log is not None:
        log.info('Reading the Input into Memory...')
    imageFromDisk = Image.open(options.image)
    imageFromDisk.load()

    # decimate everything for speed
    thumbOrig = imageFromDisk.copy()
    thumbOrig.thumbnail((imageFromDisk.size[0] / options.scale,
                         imageFromDisk.size[1] / options.scale),
                        Image.ANTIALIAS)

    # load a network from disk or train a new network
    network = createNetwork(thumbOrig, log)
Example #2
0
                        help='Synapse for the deep network to distill. This ' +
                        'network should be trained and ready.')
    parser.add_argument('--syn',
                        dest='synapse',
                        type=str,
                        default=None,
                        help='Load from a previously saved network.')
    parser.add_argument('data',
                        help='Directory or pkl.gz file for the ' +
                        'training and test sets. This can be ' +
                        'the location of a dark pickle.')
    options = parser.parse_args()

    # setup the logger
    logName = 'distillery: ' + options.data
    log = setupLogging(logName, options.level, options.logfile)
    prof = Profiler(log=log, name=logName, profFile=options.profile)

    # create a random number generator for efficiency
    from numpy.random import RandomState
    from operator import mul
    from time import time
    rng = RandomState(int(time()))
    #rng = RandomState(4567) # always initialize the same

    # NOTE: The pickleDataset will silently use previously created pickles if
    #       one exists (for efficiency). So watch out for stale pickles!
    # NOTE: User may pass a dark pickle into here, and the logic will react
    #       appropriately to the situation.
    train, test, labels = ingestImagery(filepath=options.data,
                                        shared=True,
    parser.add_argument('--hidden', dest='hidden', type=int, default=400,
                        help='Number of Neurons in Hidden Layer.')
    parser.add_argument('--neuron', dest='neuron', type=int, default=250,
                        help='Number of Neurons in Output Layer.')
    parser.add_argument('--epoch', dest='numEpochs', type=int, default=15,
                        help='Number of epochs to run per layer during ' +
                             'unsupervised pre-training.')
    parser.add_argument('--batch', dest='batchSize', type=int, default=1,
                        help='Batch size for training and test sets.')
    parser.add_argument('--base', dest='base', type=str, default='./leNet5',
                        help='Base name of the network output and temp files.')
    parser.add_argument('image', help='Input image to train.')
    options = parser.parse_args()

    # setup the logger
    log = setupLogging('likenessFinder', options.level, options.logfile)

    # read the file into memory
    if log is not None :
        log.info('Reading the Input into Memory...')
    imageFromDisk = Image.open(options.image)
    imageFromDisk.load()

    # decimate everything for speed
    thumbOrig = imageFromDisk.copy()
    thumbOrig.thumbnail((imageFromDisk.size[0] / options.scale,
                         imageFromDisk.size[1] / options.scale),
                        Image.ANTIALIAS)

    # load a network from disk or train a new network
    network = createNetwork(thumbOrig, log)
    parser.add_argument('--numChips', dest='numChips', type=int, default=1000,
                        help='Number of epochs to run per layer.')
    parser.add_argument('--epoch', dest='numEpochs', type=int, default=15,
                        help='Number of epochs to run per layer.')
    parser.add_argument('--batch', dest='batchSize', type=int, default=100,
                        help='Batch size for training and test sets.')
    parser.add_argument('--base', dest='base', type=str, default='./leNet5',
                        help='Base name of the network output and temp files.')
    parser.add_argument('--syn', dest='synapse', type=str, default=None,
                        help='Load from a previously saved network.')
    parser.add_argument('data', help='Directory or pkl.gz file for the ' +
                                     'training and test sets')
    options = parser.parse_args()

    # setup the logger
    log = setupLogging('cnnPreTrainer: ' + options.data, 
                       options.level, options.logfile)

    # NOTE: The pickleDataset will silently use previously created pickles if
    #       one exists (for efficiency). So watch out for stale pickles!
    chipArgs = {'chipSize': options.chipSize, 
                'numChips': options.numChips}
    train = ingestImagery(filepath=options.data, shared=True,
                          batchSize=options.batchSize, 
                          log=log, chipFunc=randomChip, kwargs=chipArgs)

    if options.synapse is not None :
        # load a previously saved network
        network = StackedAENetwork(train, log=log)
        network.load(options.synapse)
    else :
        network = buildStackedAENetwork(
Example #5
0
                        help='Synapse for the shallow target network. This ' +
                        'network should be populated with freshly ' + 
                        'initialized layers for optimal results.')
    parser.add_argument('--deep', dest='deep', type=str, default=None,
                        help='Synapse for the deep network to distill. This ' +
                        'network should be trained and ready.')
    parser.add_argument('--dark', dest='dark', type=str, default=None,
                        help='pkl.gz file previously created by the ' +
                        'distillery for dark knowledge transfer.')
    parser.add_argument('--data', dest='data', type=str, default=None,
                        help='Directory or pkl.gz file for the training and ' +
                        'test sets')
    options = parser.parse_args()

    # setup the logger
    log = setupLogging('distillery: ' + options.data, 
                       options.level, options.logfile)

    # if the user specified a deep network and dataset, then distill the
    # knowledge into a new pickle to use for training.
    if options.deep is not None :
        from dataset.ingest.distill import distillKnowledge

        if options.dark is not None :
            raise Exception('Only specify one usage, --deep or --dark.')
        if options.data is None :
            raise Exception('When specifying a deep network, please also ' +
                            'specify a dataset to distill using --data.')

        # distill knowledge out of the deep network into a pickle
        deepNet = ClassifierNetwork(filepath=options.deep, log=log)
        options.dark = distillKnowledge(deepNet=deepNet,
Example #6
0
                        help='Number of inferior validation checks to end ' +
                             'the supervised learning session.')
    parser.add_argument('--holdout', dest='holdout', type=float, default=.05,
                        help='Percent of data to be held out for testing.')
    parser.add_argument('--batch', dest='batchSize', type=int, default=5,
                        help='Batch size for training and test sets.')
    parser.add_argument('--base', dest='base', type=str, default='./leNet5',
                        help='Base name of the network output and temp files.')
    parser.add_argument('--syn', dest='synapse', type=str, default=None,
                        help='Load from a previously saved network.')
    parser.add_argument('data', help='Directory or pkl.gz file for the ' +
                                     'training and test sets')
    options = parser.parse_args()

    # setup the logger
    log = setupLogging('semiSupervisedTrainer: ' + options.data,
                       options.level, options.logfile)

    # create a random number generator for efficiency
    from numpy.random import RandomState
    from operator import mul
    rng = RandomState(int(time()))

    # NOTE: The pickleDataset will silently use previously created pickles if
    #       one exists (for efficiency). So watch out for stale pickles!
    train, test, labels = ingestImagery(filepath=options.data, shared=False,
                                        batchSize=options.batchSize, 
                                        holdoutPercentage=options.holdout, 
                                        log=log)
    trainShape = train[0].shape

    # create the stacked network -- LeNet-5 (minus the output layer)