예제 #1
0
 def check_fast_conv(self):
     
     
     x = np.random.randn(100, 3, 31, 31)
     w = np.random.randn(25, 3, 3, 3)
     b = np.random.randn(25,)
     dout = np.random.randn(100, 25, 16, 16)
     conv_param = {'stride': 2, 'pad': 1}
     
     t0 = time()
     out_naive, cache_naive = conv_forward_naive(x, w, b, conv_param)
     t1 = time()
     out_fast, cache_fast = conv_forward_fast(x, w, b, conv_param)
     t2 = time()
     
     print 'Testing conv_forward_fast:'
     print 'Naive: %fs' % (t1 - t0)
     print 'Fast: %fs' % (t2 - t1)
     print 'Speedup: %fx' % ((t1 - t0) / (t2 - t1))
     print 'Difference: ', self.rel_error(out_naive, out_fast)
     
     t0 = time()
     dx_naive, dw_naive, db_naive = conv_backward_naive(dout, cache_naive)
     t1 = time()
     dx_fast, dw_fast, db_fast = conv_backward_fast(dout, cache_fast)
     t2 = time()
     
     print '\nTesting conv_backward_fast:'
     print 'Naive: %fs' % (t1 - t0)
     print 'Fast: %fs' % (t2 - t1)
     print 'Speedup: %fx' % ((t1 - t0) / (t2 - t1))
     print 'dx difference: ', self.rel_error(dx_naive, dx_fast)
     print 'dw difference: ', self.rel_error(dw_naive, dw_fast)
     print 'db difference: ', self.rel_error(db_naive, db_fast)
     return
예제 #2
0
def conv_relu_backward(dout, cache):
    """
    Backward pass for the conv-relu convenience layer.
    """
    conv_cache, relu_cache = cache
    da = relu_backward(dout, relu_cache)
    dx, dw, db = conv_backward_fast(da, conv_cache)
    return dx, dw, db
def conv_relu_backward(dout, cache):
    """
    Backward pass for the conv-relu convenience layer.
    """
    conv_cache, relu_cache = cache
    da = relu_backward(dout, relu_cache)
    dx, dw, db = conv_backward_fast(da, conv_cache)
    return dx, dw, db
예제 #4
0
def conv_relu_pool_backward(dout, cache):
    """
    Backward pass for the conv-relu-pool convenience layer
    """
    conv_cache, relu_cache, pool_cache = cache
    ds = max_pool_backward_fast(dout, pool_cache)
    da = relu_backward(ds, relu_cache)
    dx, dw, db = conv_backward_fast(da, conv_cache)
    return dx, dw, db
def conv_relu_pool_backward(dout, cache):
    """
    Backward pass for the conv-relu-pool convenience layer
    """
    conv_cache, relu_cache, pool_cache = cache
    ds = max_pool_backward_fast(dout, pool_cache)
    da = relu_backward(ds, relu_cache)
    dx, dw, db = conv_backward_fast(da, conv_cache)
    return dx, dw, db
예제 #6
0
t0 = time()
out_naive, cache_naive = conv_forward_naive(x, w, b, conv_param)
t1 = time()
out_fast, cache_fast = conv_forward_fast(x, w, b, conv_param)
t2 = time()

print('Testing conv_forward_fast:')
print('Naive: %fs' % (t1 - t0))
print('Fast: %fs' % (t2 - t1))
print('Speedup: %fx' % ((t1 - t0) / (t2 - t1)))
print('Difference: ', rel_error(out_naive, out_fast))

t0 = time()
dx_naive, dw_naive, db_naive = conv_backward_naive(dout, cache_naive)
t1 = time()
dx_fast, dw_fast, db_fast = conv_backward_fast(dout, cache_fast)
t2 = time()

print('\nTesting conv_backward_fast:')
print('Naive: %fs' % (t1 - t0))
print('Fast: %fs' % (t2 - t1))
print('Speedup: %fx' % ((t1 - t0) / (t2 - t1)))
print('dx difference: ', rel_error(dx_naive, dx_fast))
print('dw difference: ', rel_error(dw_naive, dw_fast))
print('db difference: ', rel_error(db_naive, db_fast))

# Relative errors should be close to 0.0
from cs231n.fast_layers import max_pool_forward_fast, max_pool_backward_fast
np.random.seed(231)
x = np.random.randn(100, 3, 32, 32)
dout = np.random.randn(100, 3, 16, 16)
예제 #7
0
 def _backward(self, x, cache):
     global grads
     dx, grads[self.n('w')], grads[self.n('b')] = \
         conv_backward_fast(x, cache)
     return dx
예제 #8
0
파일: mytest.py 프로젝트: faceteam/cs231n
w = np.random.randn(25, 3, 3, 3)
b = np.random.randn(25,)
dout = np.random.randn(100, 25, 16, 16)
conv_param = {'stride': 2, 'pad': 1}


t0 = time()
out_naive, cache_naive = conv_forward_naive(x, w, b, conv_param)
t1 = time()
out_fast, cache_fast = conv_forward_fast(x, w, b, conv_param)
t2 = time()

print 'Testing conv_forward_fast:'
print 'Naive: %fs' % (t1 - t0)
print 'Fast: %fs' % (t2 - t1)
print 'Speedup: %fx' % ((t1 - t0) / (t2 - t1))
print 'Difference: ', rel_error(out_naive, out_fast)

t0 = time()
dx_naive, dw_naive, db_naive = conv_backward_naive(dout, cache_naive)
t1 = time()
dx_fast, dw_fast, db_fast = conv_backward_fast(dout, cache_fast)
t2 = time()

print '\nTesting conv_backward_fast:'
print 'Naive: %fs' % (t1 - t0)
print 'Fast: %fs' % (t2 - t1)
print 'Speedup: %fx' % ((t1 - t0) / (t2 - t1))
print 'dx difference: ', rel_error(dx_naive, dx_fast)
print 'dw difference: ', rel_error(dw_naive, dw_fast)
print 'db difference: ', rel_error(db_naive, db_fast)