예제 #1
0
파일: gtsrb.py 프로젝트: woodygzp/test1
    valid_set.y = np.float32(np.eye(num_outputs)[valid_set.y])
    test_set.y = np.float32(np.eye(num_outputs)[test_set.y])

    # for hinge loss
    train_set.y = 2 * train_set.y - 1.
    valid_set.y = 2 * valid_set.y - 1.
    test_set.y = 2 * test_set.y - 1.

    print('Building the CNN...')

    # Prepare Theano variables for inputs and targets
    input = T.tensor4('inputs')
    target = T.matrix('targets')
    LR = T.scalar('LR', dtype=theano.config.floatX)

    cnn = cnv.genCnv(input, num_outputs, learning_parameters)

    train_output = lasagne.layers.get_output(cnn, deterministic=False)

    # squared hinge loss
    loss = T.mean(T.sqr(T.maximum(0., 1. - target * train_output)))

    # W updates
    W = lasagne.layers.get_all_params(cnn, binary=True)
    W_grads = binary_net.compute_grads(loss, cnn)
    updates = lasagne.updates.adam(loss_or_grads=W_grads,
                                   params=W,
                                   learning_rate=LR)
    updates = binary_net.clipping_scaling(updates, cnn)

    # other parameters updates
예제 #2
0
    # for hinge loss
    train_set.y = 2 * train_set.y - 1.
    valid_set.y = 2 * valid_set.y - 1.
    test_set.y = 2 * test_set.y - 1.

    print('Building Network...')

    # Prepare Theano variables for inputs and targets
    input = T.tensor4('inputs')
    target = T.matrix('targets')
    LR = T.scalar('LR', dtype=theano.config.floatX)

    if args.model == 'cnv':
        import cnv
        cnn = cnv.genCnv(input, classes, learning_parameters)
    elif args.model == 'resnet':
        import resnet
        cnn = resnet.genCnv(input, classes, learning_parameters)
    elif args.model == 'lenet':
        import lenet
        cnn = lenet.genCnv(input, classes, learning_parameters)
    elif args.model == 'inception':
        import inception
        cnn = inception.genCnv(input, classes, learning_parameters)

    train_output = lasagne.layers.get_output(cnn, deterministic=False)

    # squared hinge loss
    loss = T.mean(T.sqr(T.maximum(0., 1. - target * train_output)))
예제 #3
0
    valid_sety = np.float32(np.eye(30)[valid_sety])
    test_sety = np.float32(np.eye(30)[test_sety])

    # for hinge loss
    train_sety = 2 * train_sety - 1.
    valid_sety = 2 * valid_sety - 1.
    test_sety = 2 * test_sety - 1.

    print('Building the CNN...')

    # Prepare Theano variables for inputs and targets
    input = T.tensor4('inputs')
    target = T.matrix('targets')
    LR = T.scalar('LR', dtype=theano.config.floatX)

    cnn = cnv.genCnv(input, 30, learning_parameters)

    train_output = lasagne.layers.get_output(cnn, deterministic=False)

    # squared hinge loss
    loss = T.mean(T.sqr(T.maximum(0., 1. - target * train_output)))

    # W updates
    W = lasagne.layers.get_all_params(cnn, binary=True)
    W_grads = binary_net.compute_grads(loss, cnn)
    updates = lasagne.updates.adam(loss_or_grads=W_grads,
                                   params=W,
                                   learning_rate=LR)
    updates = binary_net.clipping_scaling(updates, cnn)

    # other parameters updates