Пример #1
0
def test_eval_plus_two_inputs():
    result = cntk.eval(
        cntk.plus(cntk.input_numpy([[1., 2., 3., 4.]]),
                  cntk.input_numpy([[1., 1., 0., 0.]])))
    TOLERANCE_ABSOLUTE = 1E-06
    assert np.allclose(result,
                       np.asarray([2., 3., 3., 4.]),
                       atol=TOLERANCE_ABSOLUTE)
Пример #2
0
def train_eval_logistic_regression_with_numpy(criterion_name=None,
                                              eval_name=None,
                                              device_id=-1):

    # for repro and tests :-)
    np.random.seed(1)

    train_X, train_y = synthetic_data(train_N, feature_dim, num_classes)
    test_X, test_y = synthetic_data(test_N, feature_dim, num_classes)

    # Set up the training data for CNTK. Before writing the CNTK configuration,
    # the data will be attached to X.reader.batch and y.reader.batch and then
    # serialized.
    X = C.input_numpy(train_X)
    y = C.input_numpy(train_y)

    # define our network -- one weight tensor and a bias
    W = C.parameter(value=np.zeros(shape=(num_classes, feature_dim)))
    b = C.parameter(value=np.zeros(shape=(num_classes, 1)))
    out = C.times(W, X) + b

    ce = C.cross_entropy_with_softmax(y, out)
    ce.tag = 'criterion'
    ce.name = criterion_name

    eval = C.ops.cntk1.SquareError(y, out)
    eval.tag = 'eval'
    eval.name = eval_name

    my_sgd = C.SGDParams(epoch_size=0,
                         minibatch_size=25,
                         learning_rates_per_mb=0.1,
                         max_epochs=3)

    with C.LocalExecutionContext('logreg_numpy',
                                 device_id=device_id,
                                 clean_up=True) as ctx:
        ctx.train(root_nodes=[ce, eval], training_params=my_sgd)

        # For testing, we attach the test data to the input nodes.
        X.reader.batch, y.reader.batch = test_X, test_y
        result = ctx.test(root_nodes=[ce, eval])
        return result
Пример #3
0
def train_eval_logistic_regression_with_numpy(criterion_name=None,
        eval_name=None, device_id=-1):

    # for repro and tests :-)
    np.random.seed(1)

    train_X, train_y = synthetic_data(train_N, feature_dim, num_classes)
    test_X, test_y = synthetic_data(test_N, feature_dim, num_classes)

    # Set up the training data for CNTK. Before writing the CNTK configuration,
    # the data will be attached to X.reader.batch and y.reader.batch and then
    # serialized. 
    X = C.input_numpy(train_X)
    y = C.input_numpy(train_y)

    # define our network -- one weight tensor and a bias
    W = C.parameter(value=np.zeros(shape=(num_classes, feature_dim)))
    b = C.parameter(value=np.zeros(shape=(num_classes, 1)))
    out = C.times(W, X) + b

    ce = C.cross_entropy_with_softmax(y, out)
    ce.tag = 'criterion'
    ce.name = criterion_name    
    
    eval = C.ops.cntk1.SquareError(y, out)
    eval.tag = 'eval'
    eval.name = eval_name

    my_sgd = C.SGDParams(epoch_size=0, minibatch_size=25,
            learning_rates_per_mb=0.1, max_epochs=3)

    with C.LocalExecutionContext('logreg', clean_up=True) as ctx:
        ctx.device_id = device_id

        ctx.train(
                root_nodes=[ce,eval], 
                training_params=my_sgd)

        # For testing, we attach the test data to the input nodes.
        X.reader.batch, y.reader.batch = test_X, test_y
        result = ctx.test(root_nodes=[ce,eval])
        return result
Пример #4
0
def train_eval_logistic_regression_with_numpy(criterion_name=None,
        eval_name=None, device_id=-1):

    # for repro and tests :-)
    np.random.seed(1)

    N = 500
    d = 250

    # create synthetic data using numpy
    X = np.random.randn(N, d)
    Y = np.random.randint(size=(N, 1), low=0, high=2)
    Y = np.hstack((Y, 1-Y))

    # set up the training data for CNTK
    x = C.input_numpy(X)
    y = C.input_numpy(Y)

    # define our network -- one weight tensor and a bias
    W = C.parameter(value=np.zeros(shape=(2, d)))
    b = C.parameter(value=np.zeros(shape=(2, 1)))
    out = C.times(W, x) + b

    ce = C.cross_entropy_with_softmax(y, out)
    ce.tag = 'criterion'
    ce.name = criterion_name    
    
    eval = C.ops.cntk1.SquareError(y, out)
    eval.tag = 'eval'
    eval.name = eval_name

    my_sgd = C.SGDParams(epoch_size=0, minibatch_size=25, learning_rates_per_mb=0.1, max_epochs=3)
    with C.LocalExecutionContext('logreg') as ctx:
        ctx.device_id = device_id

        ctx.train(
                root_nodes=[ce,eval], 
                training_params=my_sgd)

        result = ctx.test(root_nodes=[ce,eval])
        return result
Пример #5
0
def test_eval_plus_two_inputs():
    result = cntk.eval(
        cntk.plus(cntk.input_numpy([_LEFT]), cntk.input_numpy([_RIGHT])))
    TOLERANCE_ABSOLUTE = 1E-06
    assert np.allclose(result, _EXPECTED, atol=TOLERANCE_ABSOLUTE)
Пример #6
0
def test_eval_plus_two_inputs():
    result = cntk.eval(cntk.plus(cntk.input_numpy([_LEFT]), cntk.input_numpy([_RIGHT])))
    TOLERANCE_ABSOLUTE = 1E-06    
    assert np.allclose(result, _EXPECTED, atol=TOLERANCE_ABSOLUTE)
Пример #7
0
def test_eval_plus_two_inputs():
    result = cntk.eval(cntk.plus(cntk.input_numpy([[1., 2., 3., 4.]]), cntk.input_numpy([[1., 1., 0., 0.]])))
    TOLERANCE_ABSOLUTE = 1E-06    
    assert np.allclose(result, np.asarray([2., 3., 3., 4.]), atol=TOLERANCE_ABSOLUTE)