def test_rcmatrix_identity(): for order in range(1, 4): rcmatrix = complement_permmatrix(order) np.testing.assert_equal(np.eye(pow(4, order)), np.matmul(rcmatrix, rcmatrix))
def build(self, input_shape): # from the shape of the one-hot encoding (input_shape), # we determine the order of the encoding. self.rcmatrix = K.constant(complement_permmatrix( int(numpy.log(input_shape[-1]) / numpy.log(4))), dtype=K.floatx()) super(Complement, self).build(input_shape)
def test_revcomp_rcmatrix(tmpdir): os.environ['JANGGU_OUTPUT'] = tmpdir.strpath rcmatrix = complement_permmatrix(1) np.testing.assert_equal( rcmatrix, np.array([[0, 0, 0, 1], [0, 0, 1, 0], [0, 1, 0, 0], [1, 0, 0, 0]])) rcmatrix = complement_permmatrix(2) np.testing.assert_equal( rcmatrix[0], np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1])) np.testing.assert_equal( rcmatrix[4], np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0])) np.testing.assert_equal( rcmatrix[8], np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0])) np.testing.assert_equal( rcmatrix[12], np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0])) np.testing.assert_equal( rcmatrix[1], np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0])) np.testing.assert_equal( rcmatrix[5], np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0])) np.testing.assert_equal( rcmatrix[9], np.array([0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0])) np.testing.assert_equal( rcmatrix[13], np.array([0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0]))
def build(self, input_shape): with K.name_scope(self.forward_layer.name): self.forward_layer.build(input_shape) with K.name_scope(self.revcomp_layer.name): rcmatrix = K.constant(complement_permmatrix( int(numpy.log(input_shape[-1]) / numpy.log(4))), dtype=K.floatx()) kernel = self.forward_layer.kernel[::-1, :, :, :] kernel = tf.einsum('ij,sdjc->sdic', rcmatrix, kernel) self.revcomp_layer.kernel = kernel self.revcomp_layer.bias = self.forward_layer.bias self.revcomp_layer.use_bias = self.forward_layer.use_bias self.revcomp_layer.input_spec = self.forward_layer.input_spec self.built = True
def build(self, input_shape): with K.name_scope(self.forward_layer._name): if not self.forward_layer.built: self.forward_layer.build(input_shape) with K.name_scope(self.revcomp_layer._name): rcmatrix = K.constant(complement_permmatrix( int(numpy.log(input_shape[-1]) / numpy.log(4))), dtype=K.floatx()) kernel = self.forward_layer.kernel[::-1, :, :, :] kernel = tf.einsum('ij,sdjc->sdic', rcmatrix, kernel) self.revcomp_layer.kernel = kernel self.revcomp_layer.bias = self.forward_layer.bias self.revcomp_layer.use_bias = self.forward_layer.use_bias self.revcomp_layer.input_spec = self.forward_layer.input_spec # these attributes have changed between keras 2.2 and 2.4 # so they need to be set depending on the keras version. if hasattr(self.forward_layer, '_build_conv_op_input_shape'): self.revcomp_layer._build_conv_op_input_shape = self.forward_layer._build_conv_op_input_shape if hasattr(self.forward_layer, '_convolution_op'): self.revcomp_layer._convolution_op = self.forward_layer._convolution_op self.built = True