def test_previous_gradient_wrt_x_accounted(self): op = Conv2D(self.x, self.y, padding='VALID', stride=1) op.register(op) op.new_context() op.forward() gradient = np.arange(16).reshape((2, 2, 2, 2)) op.accumulate(op, gradient) expected = [[[[1.0, 3.0, 5.0, 7.0, 9.0], [14.0, 26.0, 38.0, 50.0, 62.0], [74.0, 86.0, 98.0, 110.0, 122.0], [103.0, 113.0, 123.0, 133.0, 143.0]], [[36.0, 56.0, 76.0, 96.0, 116.0], [296.0, 352.0, 408.0, 464.0, 520.0], [576.0, 632.0, 688.0, 744.0, 800.0], [520.0, 556.0, 592.0, 628.0, 664.0]], [[275.0, 293.0, 311.0, 329.0, 347.0], [762.0, 806.0, 850.0, 894.0, 938.0], [982.0, 1026.0, 1070.0, 1114.0, 1158.0], [657.0, 683.0, 709.0, 735.0, 761.0]]], [[[9.0, 43.0, 77.0, 111.0, 145.0], [190.0, 266.0, 342.0, 418.0, 494.0], [570.0, 646.0, 722.0, 798.0, 874.0], [431.0, 473.0, 515.0, 557.0, 599.0]], [[532.0, 616.0, 700.0, 784.0, 868.0], [1608.0, 1792.0, 1976.0, 2160.0, 2344.0], [2528.0, 2712.0, 2896.0, 3080.0, 3264.0], [1656.0, 1756.0, 1856.0, 1956.0, 2056.0]], [[763.0, 813.0, 863.0, 913.0, 963.0], [1898.0, 2006.0, 2114.0, 2222.0, 2330.0], [2438.0, 2546.0, 2654.0, 2762.0, 2870.0], [1465.0, 1523.0, 1581.0, 1639.0, 1697.0]]]] actual = self.x.gradient self.assertTrue((expected == actual).all())
def test_gradient_wrt_x_with_no_padding_and_stride_equals_1(self): da = [[[[1.0, 5.0, 9.0, 13.0, 17.0], [22.0, 30.0, 38.0, 46.0, 54.0], [62.0, 70.0, 78.0, 86.0, 94.0], [41.0, 45.0, 49.0, 53.0, 57.0]], [[62.0, 70.0, 78.0, 86.0, 94.0], [164.0, 180.0, 196.0, 212.0, 228.0], [244.0, 260.0, 276.0, 292.0, 308.0], [142.0, 150.0, 158.0, 166.0, 174.0]], [[61.0, 65.0, 69.0, 73.0, 77.0], [142.0, 150.0, 158.0, 166.0, 174.0], [182.0, 190.0, 198.0, 206.0, 214.0], [101.0, 105.0, 109.0, 113.0, 117.0]]], [[[1.0, 5.0, 9.0, 13.0, 17.0], [22.0, 30.0, 38.0, 46.0, 54.0], [62.0, 70.0, 78.0, 86.0, 94.0], [41.0, 45.0, 49.0, 53.0, 57.0]], [[62.0, 70.0, 78.0, 86.0, 94.0], [164.0, 180.0, 196.0, 212.0, 228.0], [244.0, 260.0, 276.0, 292.0, 308.0], [142.0, 150.0, 158.0, 166.0, 174.0]], [[61.0, 65.0, 69.0, 73.0, 77.0], [142.0, 150.0, 158.0, 166.0, 174.0], [182.0, 190.0, 198.0, 206.0, 214.0], [101.0, 105.0, 109.0, 113.0, 117.0]]]] op = Conv2D(self.x, self.y, padding='VALID', stride=1) op.register(op) op.new_context() result = op.forward() gradient = np.ones_like(result) op.accumulate(op, gradient) actual = self.x.gradient self.assertTrue((da == actual).all())
def test_forward_output_with_no_padding_and_stride_equals_1(self): op = Conv2D(self.x, self.y, padding='VALID', stride=1) actual = op.forward() self.assertEqual(actual.shape, (2, 2, 2, 2)) expected = [[[[20410, 20920], [24760, 25420]], [[37810, 38920], [42160, 43420]]], [[[72610, 74920], [76960, 79420]], [[90010, 92920], [94360, 97420]]]] self.assertTrue((expected == actual).all())
def test_gradient_wrt_y_with_no_padding_and_stride_equals_1(self): db = [[[[340.0, 340.0], [348.0, 348.0], [356.0, 356.0], [364.0, 364.0], [372.0, 372.0]], [[380.0, 380.0], [388.0, 388.0], [396.0, 396.0], [404.0, 404.0], [412.0, 412.0]], [[420.0, 420.0], [428.0, 428.0], [436.0, 436.0], [444.0, 444.0], [452.0, 452.0]]], [[[500.0, 500.0], [508.0, 508.0], [516.0, 516.0], [524.0, 524.0], [532.0, 532.0]], [[540.0, 540.0], [548.0, 548.0], [556.0, 556.0], [564.0, 564.0], [572.0, 572.0]], [[580.0, 580.0], [588.0, 588.0], [596.0, 596.0], [604.0, 604.0], [612.0, 612.0]]]] op = Conv2D(self.x, self.y, padding='VALID', stride=1) op.register(op) op.new_context() result = op.forward() gradient = np.ones_like(result) op.accumulate(op, gradient) actual = self.y.gradient self.assertTrue((db == actual).all())
def test_previous_gradient_wrt_y_accounted(self): op = Conv2D(self.x, self.y, padding='VALID', stride=1) op.register(op) op.new_context() op.forward() gradient = np.arange(16).reshape((2, 2, 2, 2)) op.accumulate(op, gradient) expected = [[[[3520.0, 3860.0], [3576.0, 3924.0], [3632.0, 3988.0], [3688.0, 4052.0], [3744.0, 4116.0]], [[3800.0, 4180.0], [3856.0, 4244.0], [3912.0, 4308.0], [3968.0, 4372.0], [4024.0, 4436.0]], [[4080.0, 4500.0], [4136.0, 4564.0], [4192.0, 4628.0], [4248.0, 4692.0], [4304.0, 4756.0]]], [[[4640.0, 5140.0], [4696.0, 5204.0], [4752.0, 5268.0], [4808.0, 5332.0], [4864.0, 5396.0]], [[4920.0, 5460.0], [4976.0, 5524.0], [5032.0, 5588.0], [5088.0, 5652.0], [5144.0, 5716.0]], [[5200.0, 5780.0], [5256.0, 5844.0], [5312.0, 5908.0], [5368.0, 5972.0], [5424.0, 6036.0]]]] actual = self.y.gradient self.assertTrue((expected == actual).all())
def test_gradient_wrt_y_with_valid_padding_and_stride_equals_2(self): db = [[[[60.0, 60.0], [62.0, 62.0], [64.0, 64.0], [66.0, 66.0], [68.0, 68.0]], [[70.0, 70.0], [72.0, 72.0], [74.0, 74.0], [76.0, 76.0], [78.0, 78.0]], [[80.0, 80.0], [82.0, 82.0], [84.0, 84.0], [86.0, 86.0], [88.0, 88.0]]], [[[100.0, 100.0], [102.0, 102.0], [104.0, 104.0], [106.0, 106.0], [108.0, 108.0]], [[110.0, 110.0], [112.0, 112.0], [114.0, 114.0], [116.0, 116.0], [118.0, 118.0]], [[120.0, 120.0], [122.0, 122.0], [124.0, 124.0], [126.0, 126.0], [128.0, 128.0]]]] op = Conv2D(self.x, self.y, padding='VALID', stride=2) op.register(op) op.new_context() result = op.forward() gradient = np.ones_like(result) op.accumulate(op, gradient) actual = self.y.gradient self.assertTrue((db == actual).all())
def test_gradient_wrt_x_with_valid_padding_and_stride_equals_2(self): da = [[[[1.0, 5.0, 9.0, 13.0, 17.0], [21.0, 25.0, 29.0, 33.0, 37.0], [41.0, 45.0, 49.0, 53.0, 57.0], [0.0, 0.0, 0.0, 0.0, 0.0]], [[61.0, 65.0, 69.0, 73.0, 77.0], [81.0, 85.0, 89.0, 93.0, 97.0], [101.0, 105.0, 109.0, 113.0, 117.0], [0.0, 0.0, 0.0, 0.0, 0.0]], [[0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0]]], [[[1.0, 5.0, 9.0, 13.0, 17.0], [21.0, 25.0, 29.0, 33.0, 37.0], [41.0, 45.0, 49.0, 53.0, 57.0], [0.0, 0.0, 0.0, 0.0, 0.0]], [[61.0, 65.0, 69.0, 73.0, 77.0], [81.0, 85.0, 89.0, 93.0, 97.0], [101.0, 105.0, 109.0, 113.0, 117.0], [0.0, 0.0, 0.0, 0.0, 0.0]], [[0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0], [0.0, 0.0, 0.0, 0.0, 0.0]]]] op = Conv2D(self.x, self.y, padding='VALID', stride=2) op.register(op) op.new_context() result = op.forward() gradient = np.ones_like(result) op.accumulate(op, gradient) actual = self.x.gradient self.assertTrue((da == actual).all())
def test_forward_output_with_valid_padding_and_stride_equals_2(self): op = Conv2D(self.x, self.y, padding='VALID', stride=2) actual = op.forward() self.assertEqual(actual.shape, (2, 1, 1, 2)) expected = [[[[20410, 20920]]], [[[72610, 74920]]]] self.assertTrue((expected == actual).all())