def call(self, inputs): """ :Note: Equivalent to __call__() :param inputs: Tensor to be applied :type inputs: tf.Tensor :return: Tensor after applying the layer which is just n-deg P(inputs) :rtype: tf.Tensor """ polylist = [] output_list = [] for k in range(self.output_units): for j in range(self.input_dim): polylist.append([ tf.multiply(intpow_avx2(inputs[:, j], i), self.kernel[i, j, k]) for i in range(self.deg + 1) ]) if self.use_bias: polylist[j].append(inputs[:, j]) output_list.append( tf.add_n( [tf.add_n(polylist[jj]) for jj in range(self.input_dim)])) output = tf.stack(output_list, axis=-1) if self.activation is not None: output = self.activation(output) return output
def _ndegmul(self, inputs, kernel): polylist = [] output_list = [] for k in range(self.output_units): for j in range(self.input_dim): polylist.append([tf.multiply(intpow_avx2(inputs[:, j], i), kernel[i, j, k]) for i in range(self.deg + 1)]) if self.use_bias: polylist[j].append(inputs[:, j]) output_list.append(tf.add_n([tf.add_n(polylist[jj]) for jj in range(self.input_dim)])) output = tf.stack(output_list, axis=-1) return output