Esempio n. 1
0
    def test_relu_speed(self):
        import time
        from EggNetExtension import relu4D
        from EggNet import relu

        n_runs = 100
        # Get large image data set
        I, _ = get_uniform_test_image_and_kernel((10, 28, 28, 6),
                                                 (3, 3, 6, 12))

        print("Code         Time")

        t_start = time.time()
        for i in range(n_runs):
            _ = relu4D(I)
        t_end = time.time()
        t_cext = (t_end - t_start) / n_runs
        print("C-Ext       ", t_cext)

        t_start = time.time()
        for i in range(n_runs):
            _ = relu(I)
        t_end = time.time()
        t_py = (t_end - t_start) / n_runs
        print("Python      ", t_py)

        # Speedup is ~2
        print("Speedup:  ", t_py / t_cext)
Esempio n. 2
0
    def test_relu_ndim(self):
        from EggNet import relu
        import EggNetExtension

        sizes_to_test = [
            (100, 100),
            (10, 100, 100),
            (10, 100, 100, 10),
        ]
        for size in sizes_to_test:
            x = np.random.rand(*size).astype(np.float32)
            x_relu1 = x.copy()
            x_relu2 = relu(x)

            if x.ndim == 2:
                EggNetExtension.relu2D(x_relu1)
            elif x.ndim == 3:
                EggNetExtension.relu3D(x_relu1)
            elif x.ndim == 4:
                EggNetExtension.relu4D(x_relu1)
            else:
                raise ValueError()

            self.assertTrue(
                np.allclose(x_relu1, x_relu2, atol=self.NUMERIC_EPS))
Esempio n. 3
0
 def test_int(self):
     from EggNet import relu
     import EggNetExtension
     x1 = np.random.rand(100).astype(dtype=np.int16) * 10 - 5
     x2 = x1.copy()
     EggNetExtension.relu_int16_t_inplace(x1)
     x2 = relu(x2)
     np.allclose(x1, x2, atol=self.NUMERIC_EPS)
Esempio n. 4
0
    def __call__(self, *args, **kwargs):
        # use the '@' sign to refer to a tensor dot
        # calculate z = xW + b
        x = args[0]
        z = np.matmul(x, self.W) + self.b

        if self.activation is None:
            return z
        elif self.activation is "relu":
            # return np.apply_over_axis(relu, z, 1)
            return relu(z)
        elif self.activation is "softmax":
            # return np.apply_over_axis(softmax, z, 1)
            return softmax(z)
        else:
            raise ValueError("Activation of {} is not valid".format(
                self.activation))
Esempio n. 5
0
    def __call__(self, input, *args, **kwargs):
        x = input
        try:
            z = conv2d_fast(x, self.kernel, stride=1)
        except ImportError as imerror:
            print(
                "[ERROR]: The Fast C-Extension could not be loaded? Is it installed? Fallback to default python "
                "implementation: ", imerror)
            z = conv2d(x, self.kernel, stride=1)

        if self.use_bias:
            z += self.b

        if self.activation is None:
            return z
        elif self.activation is "relu":
            return relu(z)
        else:
            raise ValueError("Activation of {} is not valid".format(
                self.activation))