def fprop_relu(self, layer, x, slope): if layer is None: layer = layer_mkl.ReluLayerMKL() if not hasattr(x, 'shape5D'): return self.maximum(x, 0) + slope * self.minimum(0, x) if slope != 0: self.convert(x) x.clean_mkl() return self.maximum(x, 0) + slope * self.minimum(0, x) if x.shape5D is not None: C, D, H, W, N = x.shape5D else: C, N = x._tensor.shape D, H, W = 1, 1, 1 x.shape5D = C, D, H, W, N layer.shape5D = C, D, H, W, N primitives = c_longlong(layer.dnnPrimitives.ctypes.data) if x.primitive[3] == 0: layer.inputMKL = False self.mklEngine.Relu_f(x.get_prim(), primitives, layer.initOk_f, N, C, H, W) layer.initOk_f = 1 return x
def bprop_relu(self, layer, x, error, deltas, slope): if layer is None: layer = layer_mkl.ReluLayerMKL() if slope != 0 or error is None: if error is not None: self.convert(error) error.clean_mkl() return self.greater(x, 0) + slope * self.less(x, 0) # to be moved to C code if not layer.inputMKL: self.convert(error) error.clean_mkl() primitives = c_longlong(layer.dnnPrimitives.ctypes.data) self.mklEngine.Relu_b(x.get_prim(), error.get_prim(), primitives, layer.initOk_b) layer.initOk_b = 1 deltas.set_mkl(error) deltas.shape5D = layer.shape5D if deltas.primitive[3] == 0: deltas[:] = error
def relu_layer(self): return layer_mkl.ReluLayerMKL()