def test_hdf5_layer(tmpdir):
    td = tmpdir.mkdir('test_hdf5_layer')
    path_h5 = td.join('h5layer.in.h5').ensure().strpath
    n_sample = 5
    names = ['data%02d' % i for i in xrange(n_sample)]
    blob_name = 'blob1'
    batch_size = 2
    blob_shape = (1, 2, 3)
    data = np.random.rand(*((n_sample, ) + blob_shape))
    import h5py
    with h5py.File(path_h5, 'w') as hd:
        for i, name in enumerate(names):
            hd.create_group(name)
            hd[name][blob_name] = data[i]
    import csv
    lpath_source = td.join('source.txt').ensure()
    csv.writer(lpath_source.open('w')).writerows(map(lambda x: [x], names))
    t = caffe.Blob([])
    bottom = []
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.data_layers"
    lp.python_param.layer = "HDF5Layer"
    lp.python_param.param_str = str(
        dict(batch_size=batch_size,
             source=lpath_source.strpath,
             path_h5=path_h5,
             column_id=0,
             blob_name='blob1'))
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    j = 0
    for i in xrange(3):
        layer.Reshape(bottom, top)
        layer.Forward(bottom, top)
        assert top[0].shape == (batch_size, ) + blob_shape
        for blob in top[0].data:
            j %= n_sample
            assert np.all(blob == data[j].astype(np.float32))
            j += 1

    # Shuffle: Values are not checked here so far.
    lp.python_param.param_str = str(
        dict(batch_size=batch_size,
             source=lpath_source.strpath,
             path_h5=path_h5,
             column_id=0,
             blob_name='blob1',
             shuffle=True,
             random_seed=313))
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    for i in xrange(3):
        layer.Reshape(bottom, top)
        layer.Forward(bottom, top)
        assert top[0].shape == (batch_size, ) + blob_shape
def test_hdf5_layer(tmpdir):
    td = tmpdir.mkdir('test_hdf5_layer')
    path_h5 = td.join('h5layer.in.h5').ensure().strpath
    n_sample = 5
    names = ['data%02d' % i for i in xrange(n_sample)]
    blob_name = 'blob1'
    batch_size = 2
    blob_shape = (1, 2, 3)
    data = np.random.rand(*((n_sample,) + blob_shape))
    import h5py
    with h5py.File(path_h5, 'w') as hd:
        for i, name in enumerate(names):
            hd.create_group(name)
            hd[name][blob_name] = data[i]
    import csv
    lpath_source = td.join('source.txt').ensure()
    csv.writer(lpath_source.open('w')).writerows(map(lambda x: [x], names))
    t = caffe.Blob([])
    bottom = []
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.data_layers"
    lp.python_param.layer = "HDF5Layer"
    lp.python_param.param_str = str(dict(
        batch_size=batch_size, source=lpath_source.strpath, path_h5=path_h5,
        column_id=0, blob_name='blob1'))
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    j = 0
    for i in xrange(3):
        layer.Reshape(bottom, top)
        layer.Forward(bottom, top)
        assert top[0].shape == (batch_size,) + blob_shape
        for blob in top[0].data:
            j %= n_sample
            assert np.all(blob == data[j].astype(np.float32))
            j += 1

    # Shuffle: Values are not checked here so far.
    lp.python_param.param_str = str(dict(
        batch_size=batch_size, source=lpath_source.strpath, path_h5=path_h5,
        column_id=0, blob_name='blob1', shuffle=True, random_seed=313))
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    for i in xrange(3):
        layer.Reshape(bottom, top)
        layer.Forward(bottom, top)
        assert top[0].shape == (batch_size,) + blob_shape
def matrix_mult_layer(request):
    if request.config.getoption('caffe_cpu'):
        raise pytest.skip("MatrixMultLayer requires GPU")
    m1 = caffe.Blob((2, 4, 2))
    m2 = caffe.Blob((2, 2, 3))
    t = caffe.Blob([])
    t1, t2 = request.param
    if t1:
        s = m1.shape
        m1.reshape(s[0], s[2], s[1])
    if t2:
        s = m2.shape
        m2.reshape(s[0], s[2], s[1])
    rng = np.random.RandomState(313)
    m1.data[...] = rng.randn(*m1.shape)
    m2.data[...] = rng.randn(*m2.shape)
    bottom = [m1, m2]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "MatrixMultLayer"
    lp.python_param.param_str = str({'t1': t1, 't2': t2})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    return layer, bottom, top, request.param
Example #4
0
def test_morphology_layer():
    ba, c, h, w = (2, 3, 4, 4)
    b = caffe.Blob([ba, c, h, w])
    t = caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.vision_layers"
    lp.python_param.layer = "MorphologyLayer"
    lp.python_param.param_str = str(dict(op='erode', kernel='4nn'))
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    b.data[1, ...] = [[
        [0, 1, 1, 0],
        [1, 1, 1, 1],
        [1, 1, 1, 1],
        [0, 1, 1, 0],
    ]]
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(top[0].data[1, ...] == [[
        [0, 0, 0, 0],
        [0, 1, 1, 0],
        [0, 1, 1, 0],
        [0, 0, 0, 0],
    ]])
def test_dssim_layer(request):
    if request.config.getoption('caffe_cpu'):
        raise pytest.skip("DSSIMLayer requires GPU")
    x, y = np.ogrid[:5, :5]
    img1 = np.sin(x / 5.0 * np.pi) * np.cos(y / 5.0 * np.pi)
    img1 = np.repeat(img1[..., np.newaxis], 3, 2)
    img1 = (img1 - img1.min()) / (img1.max() - img1.min())
    rng = np.random.RandomState(313)
    img2 = img1 + rng.randn(*img1.shape) * 0.2
    img2[img2 > 1] = 1
    img2[img2 < 0] = 0
    bottom = [caffe.Blob([]), caffe.Blob([])]
    top = [caffe.Blob([])]
    img1 = img1.transpose(2, 0, 1)
    img2 = img2.transpose(2, 0, 1)
    bottom[0].reshape(*((1,) + img1.shape))
    bottom[1].reshape(*((1,) + img2.shape))
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.loss_layers"
    lp.python_param.layer = "DSSIMLayer"
    lp.python_param.param_str = str({'hsize': 3})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    bottom[0].data[...] = img1[np.newaxis]
    bottom[1].data[...] = img2[np.newaxis]
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top, check_bottom=[0, 1])
def test_crossent_layer(request):
    if request.config.getoption('caffe_cpu'):
        raise pytest.skip("crossentropyLossLayer requires GPU")
    pred = caffe.Blob((5, 8))
    label = caffe.Blob((5, 1))
    loss = caffe.Blob([])
    bottom = [pred, label]
    top = [loss]

    # Fill
    rng = np.random.RandomState(313)
    pred.data[...] = rng.rand(*pred.shape) + 0.1
    label.data[...] = rng.randint(0, 8, label.shape)
    pred.data[...] = pred.data / pred.data.sum(axis=1, keepdims=True)
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.loss_layers"
    lp.python_param.layer = "CrossEntropyLossLayer"
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    ref = -np.mean(
        np.log(
            np.maximum(np.finfo(np.float32).tiny, pred.data)
        ).reshape(pred.shape[0], -1)[
            np.arange(pred.shape[0]), label.data.astype('int32')]
    )
    assert np.isclose(ref, loss.data)
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top, check_bottom=[0])
def test_slice_by_array_layer(blob_4_2322, tmpdir):
    path_indexes = tmpdir.join('indexes.mat').strpath
    from scipy.io import savemat
    indexes = np.array([2, 0])
    savemat(path_indexes, {'indexes': indexes})
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "SliceByArrayLayer"
    lp.python_param.param_str = str(
        {'path_mat': path_indexes, 'key': 'indexes'})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(top[0].data == bottom[0].data[:, indexes, ...])
    checker = GradientChecker(1e-2, 1e-5)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
def test_graident_4_layer():
    ba, c, h, w = (2, 3, 4, 4)
    b = caffe.Blob([ba, c, h, w])
    t = caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.vision_layers"
    lp.python_param.layer = "Gradient4Layer"
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert top[0].shape == (ba, 2, c, h, w)
    assert np.all(
        top[0].data[:, 0, :, :, :-1] ==
        bottom[0].data[..., :, :-1] - bottom[0].data[..., :, 1:])
    assert np.all(
        top[0].data[:, 1, :, :-1, :] ==
        bottom[0].data[..., :-1, :] - bottom[0].data[..., 1:, :])
    checker = GradientChecker(1e-2, 1e-4)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
def matrix_mult_layer(request):
    if request.config.getoption('caffe_cpu'):
        raise pytest.skip("MatrixMultLayer requires GPU")
    m1 = caffe.Blob((2, 4, 2))
    m2 = caffe.Blob((2, 2, 3))
    t = caffe.Blob([])
    t1, t2 = request.param
    if t1:
        s = m1.shape
        m1.reshape(s[0], s[2], s[1])
    if t2:
        s = m2.shape
        m2.reshape(s[0], s[2], s[1])
    rng = np.random.RandomState(313)
    m1.data[...] = rng.randn(*m1.shape)
    m2.data[...] = rng.randn(*m2.shape)
    bottom = [m1, m2]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "MatrixMultLayer"
    lp.python_param.param_str = str({'t1':  t1, 't2': t2})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    return layer, bottom, top, request.param
def test_tile_layer():
    ba, c, h, w = [2, 3, 3, 4]
    b, t = caffe.Blob([ba, c, h, w]), caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "TileLayer"
    axis = 1
    num = 5
    lp.python_param.param_str = str(
        {'axis': axis, 'num': num})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert t.shape == (ba, c * num, h, w)
    reps = [1 for _ in t.shape]
    reps[axis] = num
    assert np.all(np.tile(b.data, reps) == t.data)
    checker = GradientChecker(1e-2, 1e-5)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
def test_tile_layer():
    ba, c, h, w = [2, 3, 3, 4]
    b, t = caffe.Blob([ba, c, h, w]), caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "TileLayer"
    axis = 1
    num = 5
    lp.python_param.param_str = str({'axis': axis, 'num': num})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert t.shape == (ba, c * num, h, w)
    reps = [1 for _ in t.shape]
    reps[axis] = num
    assert np.all(np.tile(b.data, reps) == t.data)
    checker = GradientChecker(1e-2, 1e-5)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_logit_loss_layer(request):
    if request.config.getoption('caffe_cpu'):
        raise pytest.skip("LogitLossLayer requires GPU")
    # data
    y = np.random.rand(2, 3, 4, 5)
    t = (np.random.rand(2, 3, 4, 5) > 0.5).astype(np.float)\
        - (np.random.rand(2, 3, 4, 5) > 0.5).astype(np.float)
    # setting up blobs
    bottom = [caffe.Blob([]), caffe.Blob([])]
    top = [caffe.Blob([])]
    bottom[0].reshape(*y.shape)
    bottom[1].reshape(*t.shape)
    bottom[0].data[...] = y
    bottom[1].data[...] = t
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.loss_layers"
    lp.python_param.layer = "LogitLossLayer"
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    # reference computation
    l = np.sum(np.abs(t) * np.log(1 + np.exp(-y * t))) / np.sum(np.abs(t))
    assert np.isclose(top[0].data, l)
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top, check_bottom=[0])
Example #13
0
 def test_create_conv_layer(self):
     # Setting layer parameter for convolution
     layer_param = caffe_pb2.LayerParameter()
     layer_param.type = 'Convolution'
     layer_param.name = 'conv1'
     cparam = layer_param.convolution_param
     cparam.num_output = 3
     cparam.kernel_size.append(2)
     wfiller = cparam.weight_filler
     wfiller.type = "uniform"
     wfiller.max = 3
     wfiller.min = 1.5
     # Create layer
     conv_layer = caffe.create_layer(layer_param)
     self.assertEqual(conv_layer.type, 'Convolution')
     # Set up layer
     conv_layer.SetUp([self.blobi], [self.blobo])
     weights = conv_layer.blobs[0]
     self.assertTrue(np.all(weights.data >= 1.5))
     self.assertTrue(np.all(weights.data <= 3.0))
     # Reshape out blobs
     conv_layer.Reshape([self.blobi], [self.blobo])
     shapei = self.shapei
     shapeo = self.blobo.data.shape
     self.assertEqual(
         shapeo,
         (shapei[0], cparam.num_output, shapei[2] - cparam.kernel_size[0] +
          1, shapei[3] - cparam.kernel_size[0] + 1))
     # Forward, Backward
     conv_layer.Forward([self.blobi], [self.blobo])
     conv_layer.Backward([self.blobo], [True], [self.blobi])
def test_slice_by_array_layer(blob_4_2322, tmpdir):
    path_indexes = tmpdir.join('indexes.mat').strpath
    from scipy.io import savemat
    indexes = np.array([2, 0])
    savemat(path_indexes, {'indexes': indexes})
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "SliceByArrayLayer"
    lp.python_param.param_str = str({
        'path_mat': path_indexes,
        'key': 'indexes'
    })
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(top[0].data == bottom[0].data[:, indexes, ...])
    checker = GradientChecker(1e-2, 1e-5)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_dssim_layer(request):
    if request.config.getoption('caffe_cpu'):
        raise pytest.skip("DSSIMLayer requires GPU")
    x, y = np.ogrid[:5, :5]
    img1 = np.sin(x / 5.0 * np.pi) * np.cos(y / 5.0 * np.pi)
    img1 = np.repeat(img1[..., np.newaxis], 3, 2)
    img1 = (img1 - img1.min()) / (img1.max() - img1.min())
    rng = np.random.RandomState(313)
    img2 = img1 + rng.randn(*img1.shape) * 0.2
    img2[img2 > 1] = 1
    img2[img2 < 0] = 0
    bottom = [caffe.Blob([]), caffe.Blob([])]
    top = [caffe.Blob([])]
    img1 = img1.transpose(2, 0, 1)
    img2 = img2.transpose(2, 0, 1)
    bottom[0].reshape(*((1,) + img1.shape))
    bottom[1].reshape(*((1,) + img2.shape))
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.loss_layers"
    lp.python_param.layer = "DSSIMLayer"
    lp.python_param.param_str = str({'hsize': 3})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    bottom[0].data[...] = img1[np.newaxis]
    bottom[1].data[...] = img2[np.newaxis]
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top, check_bottom=[0, 1])
Example #16
0
 def test_create_conv_layer(self):
     # Setting layer parameter for convolution
     layer_param = caffe_pb2.LayerParameter()
     layer_param.type = 'Convolution'
     layer_param.name = 'conv1'
     cparam = layer_param.convolution_param
     cparam.num_output = 3
     cparam.kernel_size = 2
     wfiller = cparam.weight_filler
     wfiller.type = "uniform"
     wfiller.max = 3
     wfiller.min = 1.5
     # Create layer
     conv_layer = caffe.create_layer(layer_param)
     self.assertEqual(conv_layer.type, 'Convolution')
     # Set up layer
     conv_layer.SetUp([self.blobi], [self.blobo])
     weights = conv_layer.blobs[0]
     self.assertTrue(np.all(weights.data >= 1.5))
     self.assertTrue(np.all(weights.data <= 3.0))
     # Reshape out blobs
     conv_layer.Reshape([self.blobi], [self.blobo])
     shapei = self.shapei
     shapeo = self.blobo.data.shape
     self.assertEqual(
         shapeo,
         (shapei[0], cparam.num_output,
             shapei[2] - cparam.kernel_size + 1,
             shapei[3] - cparam.kernel_size + 1))
     # Forward, Backward
     conv_layer.Forward([self.blobi], [self.blobo])
     conv_layer.Backward([self.blobo], [True], [self.blobi])
def test_morphology_layer():
    ba, c, h, w = (2, 3, 4, 4)
    b = caffe.Blob([ba, c, h, w])
    t = caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.vision_layers"
    lp.python_param.layer = "MorphologyLayer"
    lp.python_param.param_str = str(dict(op='erode', kernel='4nn'))
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    b.data[1, ...] = [[
        [0, 1, 1, 0],
        [1, 1, 1, 1],
        [1, 1, 1, 1],
        [0, 1, 1, 0],
    ]]
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(
        top[0].data[1, ...] == [[
            [0, 0, 0, 0],
            [0, 1, 1, 0],
            [0, 1, 1, 0],
            [0, 0, 0, 0],
        ]])
def upsample_creator(factor, num_in):
    kernel = get_kernel_size(factor)
    stride = factor
    lp = caffe.LayerParameter(
        """name: "upsample", type: "Deconvolution"
        convolution_param { kernel_size: %d stride: %d num_output: %d group: %d pad: %d
        weight_filler: { type: "bilinear_upsampling" } bias_term: false }""" %
        (kernel, stride, num_in, num_in, get_pad(factor)))
    return caffe.create_layer(lp.to_python())
def upsample_creator(factor, num_in):
    kernel = get_kernel_size(factor)
    stride = factor
    lp = caffe.LayerParameter("""name: "upsample", type: "Deconvolution"
        convolution_param { kernel_size: %d stride: %d num_output: %d group: %d pad: %d
        weight_filler: { type: "bilinear_upsampling" } bias_term: false }""" % (
            kernel, stride, num_in, num_in, get_pad(factor)
        ))
    return caffe.create_layer(lp.to_python())
    def test_mask_loss(self):
        lp = caffe_pb2.LayerParameter()
        lp.type = "Python"
        lp.python_param.module='mask_reg_layer.layer'
        lp.python_param.layer='MaskLossLayer'
        layer = caffe.create_layer(lp)

        checker = GradientChecker(1e-2, 1e-1)
        checker.check_gradient_exhaustive(
            layer, self.bottom, self.top, check_bottom=[0])
Example #21
0
 def view_sigmoid_output(self):
     net = self.solver.net
     lp = caffe_pb2.LayerParameter()
     lp.type = "Sigmoid"
     layer = caffe.create_layer(lp)
     bottom = [net.blobs['cls_score']]
     top = [caffe.Blob([])]
     labels = net.blobs['labels'].data
     layer.SetUp(bottom, top)
     layer.Reshape(bottom, top)
     layer.Forward(bottom, top)
     np.set_printoptions(precision=3, suppress=True)
     print("Sigmoid output v.s. Labels: ")
     print(np.c_[top[0].data, labels])
 def test_euclidean(self):
     lp = caffe_pb2.LayerParameter()
     lp.type = "EuclideanLoss"
     layer = caffe.create_layer(lp)
     layer.SetUp(self.bottom, self.top)
     layer.Reshape(self.bottom, self.top)
     layer.Forward(self.bottom, self.top)
     # manual computation
     loss = np.sum((self.bottom[0].data - self.bottom[1].data) ** 2) \
         / self.bottom[0].shape[0] / 2.0
     self.assertAlmostEqual(float(self.top[0].data), loss, 5)
     checker = GradientChecker(1e-2, 1e-2)
     checker.check_gradient_exhaustive(
         layer, self.bottom, self.top, check_bottom='all')
def reshape_layer(blob_4_2322):
    b, _, _, t = blob_4_2322
    t.reshape(*b.shape)
    bottom = [t]
    top = [t]
    reshape = (2, 12)
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "ReshapeLayer"
    lp.python_param.param_str = str({'shape': reshape})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    return layer
def reshape_layer(blob_4_2322):
    b, _, _, t = blob_4_2322
    t.reshape(*b.shape)
    bottom = [t]
    top = [t]
    reshape = (2, 12)
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "ReshapeLayer"
    lp.python_param.param_str = str({'shape': reshape})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    return layer
Example #25
0
 def view_softmax_output(self):
     net = self.solver.net
     lp = caffe_pb2.LayerParameter()
     lp.type = "Softmax"
     layer = caffe.create_layer(lp)
     bottom = [net.blobs['cls_score']]
     top = [caffe.Blob([])]
     labels = net.blobs['labels'].data
     layer.SetUp(bottom, top)
     layer.Reshape(bottom, top)
     layer.Forward(bottom, top)
     np.set_printoptions(precision=3, suppress=True)
     print("Softmax output v.s. Labels: ")
     # print(np.sum(top[0].data,axis=1))
     # print(np.sum(top[0].data,axis=0))
     # print(top[0].data.shape)
     # print(np.c_[top[0].data, labels])
     print(np.c_[np.argmax(top[0].data, axis=1), labels])
def test_reduction_layer_sum(blob_4_2322):
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "ReductionLayer"
    lp.python_param.param_str = str({'axis': 1, 'op': 'sum'})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(b.data.sum(layer.axis_, keepdims=True) == t.data)
    checker = GradientChecker(1e-2, 1e-4)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_reduction_layer_sum(blob_4_2322):
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "ReductionLayer"
    lp.python_param.param_str = str({'axis': 1, 'op': 'sum'})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(b.data.sum(layer.axis_, keepdims=True) == t.data)
    checker = GradientChecker(1e-2, 1e-4)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
def sil2_loss_layer(request, blob_4_2322):
    if request.config.getoption('caffe_cpu'):
        raise pytest.skip("ScaleInvariantL2LossLayer requires GPU")
    print "Call:", request.fixturename
    # Create blobs
    pred, label, mask, top = blob_4_2322
    bottom = [pred, label, mask]
    top = [top]
    lam = 0.5
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.loss_layers"
    lp.python_param.layer = "ScaleInvariantL2LossLayer"
    lp.python_param.param_str = str({'lambda': lam})
    # caffe.set_mode_gpu()
    # caffe.set_device(0)
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    return layer
def sil2_loss_layer(request, blob_4_2322):
    if request.config.getoption('caffe_cpu'):
        raise pytest.skip("ScaleInvariantL2LossLayer requires GPU")
    print "Call:", request.fixturename
    # Create blobs
    pred, label, mask, top = blob_4_2322
    bottom = [pred, label, mask]
    top = [top]
    lam = 0.5
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.loss_layers"
    lp.python_param.layer = "ScaleInvariantL2LossLayer"
    lp.python_param.param_str = str({'lambda': lam})
    # caffe.set_mode_gpu()
    # caffe.set_device(0)
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    return layer
 def test_inner_product(self):
     lp = caffe_pb2.LayerParameter()
     lp.type = "InnerProduct"
     lp.inner_product_param.num_output = 3
     layer = caffe.create_layer(lp)
     layer.SetUp([self.bottom[0]], self.top)
     w = self.rng.randn(*layer.blobs[0].shape)
     b = self.rng.randn(*layer.blobs[1].shape)
     layer.blobs[0].data[...] = w
     layer.blobs[1].data[...] = b
     layer.Reshape([self.bottom[0]], self.top)
     layer.Forward([self.bottom[0]], self.top)
     assert np.allclose(
         self.top[0].data,
         np.dot(
             self.bottom[0].data.reshape(self.bottom[0].shape[0], -1), w.T
             ) + b
         )
     checker = GradientChecker(1e-2, 1e-1)
     checker.check_gradient_exhaustive(
         layer, [self.bottom[0]], self.top, check_bottom=[0])
def test_parameter_layer():
    t = caffe.Blob([])
    bottom = []
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "ParameterLayer"
    lp.python_param.param_str = str(
        dict(shape=(2, 3, 2, 2),
             filler="lambda shape, rng: rng.randn(*shape) * 0.01"))
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    assert len(layer.blobs) == 1
    assert layer.blobs[0].shape == (2, 3, 2, 2)
    param_copy = layer.blobs[0].data.copy()
    layer.Forward(bottom, top)
    assert np.allclose(top[0].data, param_copy)
    checker = GradientChecker(1e-3, 1e-5)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_axpb_layer(blob_4_2322):
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    va = 0.7
    vb = -0.3
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "AXPBLayer"
    lp.python_param.param_str = str({'a': va, 'b': vb})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(va * b.data + vb == t.data)
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_broadcast_layer():
    ba, c, h, w = [2, 1, 3, 4]
    b, t = caffe.Blob([ba, c, h, w]), caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "BroadcastLayer"
    lp.python_param.param_str = str({'axis': 1, 'num': 3})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert t.shape == (ba, 3, h, w)
    for i in xrange(3):
        assert np.all(b.data == t.data[:, i:i + 1])
    checker = GradientChecker(1e-2, 1e-5)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_parameter_layer():
    t = caffe.Blob([])
    bottom = []
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "ParameterLayer"
    lp.python_param.param_str = str(dict(
        shape=(2, 3, 2, 2),
        filler="lambda shape, rng: rng.randn(*shape) * 0.01"))
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    assert len(layer.blobs) == 1
    assert layer.blobs[0].shape == (2, 3, 2, 2)
    param_copy = layer.blobs[0].data.copy()
    layer.Forward(bottom, top)
    assert np.allclose(top[0].data, param_copy)
    checker = GradientChecker(1e-3, 1e-5)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
def test_axpb_layer(blob_4_2322):
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    va = 0.7
    vb = -0.3
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "AXPBLayer"
    lp.python_param.param_str = str({'a': va, 'b': vb})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert np.all(va * b.data + vb == t.data)
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
Example #36
0
 def test_SoftmaxLoss(self):
     net = self.solver.net
     #print(net.blobs.keys())
     # for i in range(30):
     #     print("shape @ {}: {}".format(i,net.blobs[i].shape))
     # print(dir(net))
     # print(len(net.blobs))
     # print(dir(net.layer_dict['cls_score']))
     # sys.exit()
     #print(net.layer_dict.keys())
     lp = caffe_pb2.LayerParameter()
     lp.type = "Sigmoid"
     layer = caffe.create_layer(lp)
     data = net.blobs['cls_score'].data
     bottom = [caffe.Blob(data.shape)]
     bottom[0].data[...] = data
     top = [caffe.Blob([])]
     layer.SetUp(bottom, top)
     layer.Reshape(bottom, top)
     layer.Forward(bottom, top)
     print("data", data, "top[0].data", top[0].data)
     print(net.blobs['labels'].data - top[0].data)
def test_lp_normalization_layer(blob_4_2322, lpnorm_params):
    axis, p = lpnorm_params
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "LpNormalizationLayer"
    lp.python_param.param_str = str({'axis': axis, 'p': p})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.rand(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    if axis is None:
        axis = tuple(range(1, len(bottom[0].shape)))
    test_top = b.data / ((b.data**p).sum(axis, keepdims=True)**(1. / p))
    assert np.allclose(test_top, t.data)
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(layer, bottom, top)
def test_broadcast_layer():
    ba, c, h, w = [2, 1, 3, 4]
    b, t = caffe.Blob([ba, c, h, w]), caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "BroadcastLayer"
    lp.python_param.param_str = str(
        {'axis': 1, 'num': 3})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert t.shape == (ba, 3, h, w)
    for i in xrange(3):
        assert np.all(b.data == t.data[:, i:i + 1])
    checker = GradientChecker(1e-2, 1e-5)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
def test_lp_normalization_layer(blob_4_2322, lpnorm_params):
    axis, p = lpnorm_params
    b, _, _, t = blob_4_2322
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.common_layers"
    lp.python_param.layer = "LpNormalizationLayer"
    lp.python_param.param_str = str({'axis': axis, 'p': p})
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.rand(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    if axis is None:
        axis = tuple(range(1, len(bottom[0].shape)))
    test_top = b.data / ((b.data**p).sum(axis, keepdims=True) ** (1./p))
    assert np.allclose(test_top, t.data)
    checker = GradientChecker(1e-3, 1e-2)
    checker.check_gradient_exhaustive(
        layer, bottom, top)
Example #40
0
def test_graident_4_layer():
    ba, c, h, w = (2, 3, 4, 4)
    b = caffe.Blob([ba, c, h, w])
    t = caffe.Blob([])
    bottom = [b]
    top = [t]
    # Create Layer
    lp = caffe_pb2.LayerParameter()
    lp.type = "Python"
    lp.python_param.module = "caffe_helper.layers.vision_layers"
    lp.python_param.layer = "Gradient4Layer"
    layer = caffe.create_layer(lp)
    layer.SetUp(bottom, top)
    rng = np.random.RandomState(313)
    b.data[...] = rng.randn(*b.shape)
    layer.Reshape(bottom, top)
    layer.Forward(bottom, top)
    assert top[0].shape == (ba, 2, c, h, w)
    assert np.all(top[0].data[:, 0, :, :, :-1] == bottom[0].data[..., :, :-1] -
                  bottom[0].data[..., :, 1:])
    assert np.all(top[0].data[:, 1, :, :-1, :] == bottom[0].data[..., :-1, :] -
                  bottom[0].data[..., 1:, :])
    checker = GradientChecker(1e-2, 1e-4)
    checker.check_gradient_exhaustive(layer, bottom, top)