Example #1
0
def test_p4_p4_conv_equivariance():
    from groupy.gfunc import P4FuncArray
    import groupy.garray.C4_array as c4a
    from groupy.gconv.chainer_gconv.p4_conv import P4ConvP4

    im = np.random.randn(1, 1, 4, 11, 11).astype('float32')
    check_equivariance(
        im=im,
        layers=[P4ConvP4(1, 2, 3)],
        input_array=P4FuncArray,
        output_array=P4FuncArray,
        point_group=c4a,
    )
Example #2
0
def test_p4_net_equivariance():
    from groupy.gfunc import Z2FuncArray, P4FuncArray
    import groupy.garray.C4_array as c4a
    from groupy.gconv.chainer_gconv.p4_conv import P4ConvZ2, P4ConvP4

    im = np.random.randn(1, 1, 11, 11).astype('float32')
    check_equivariance(
        im=im,
        layers=[
            P4ConvZ2(in_channels=1, out_channels=2, ksize=3),
            P4ConvP4(in_channels=2, out_channels=3, ksize=3)
        ],
        input_array=Z2FuncArray,
        output_array=P4FuncArray,
        point_group=c4a,
    )
Example #3
0
 def __init__(self):
     ksize = 3
     bn = True
     act = F.relu
     self.dr = 0.3
     super(P4CNN, self).__init__(
         l1=ConvBNAct(conv=P4ConvZ2(in_channels=1,
                                    out_channels=10,
                                    ksize=ksize,
                                    stride=1,
                                    pad=0),
                      bn=bn,
                      act=act),
         l2=ConvBNAct(conv=P4ConvP4(in_channels=10,
                                    out_channels=10,
                                    ksize=ksize,
                                    stride=1,
                                    pad=0),
                      bn=bn,
                      act=act),
         l3=ConvBNAct(conv=P4ConvP4(in_channels=10,
                                    out_channels=10,
                                    ksize=ksize,
                                    stride=1,
                                    pad=0),
                      bn=bn,
                      act=act),
         l4=ConvBNAct(conv=P4ConvP4(in_channels=10,
                                    out_channels=10,
                                    ksize=ksize,
                                    stride=1,
                                    pad=0),
                      bn=bn,
                      act=act),
         l5=ConvBNAct(conv=P4ConvP4(in_channels=10,
                                    out_channels=10,
                                    ksize=ksize,
                                    stride=1,
                                    pad=0),
                      bn=bn,
                      act=act),
         l6=ConvBNAct(conv=P4ConvP4(in_channels=10,
                                    out_channels=10,
                                    ksize=ksize,
                                    stride=1,
                                    pad=0),
                      bn=bn,
                      act=act),
         top=P4ConvP4(in_channels=10,
                      out_channels=10,
                      ksize=4,
                      stride=1,
                      pad=0),
     )
Example #4
0
    def __init__(self):
        bn = True
        ksize = 3
        pad = 1
        act = F.relu

        super(P4AllCNNC, self).__init__(
            l1=ConvBNAct(conv=P4ConvZ2(in_channels=3,
                                       out_channels=48,
                                       ksize=ksize,
                                       stride=1,
                                       pad=pad),
                         bn=bn,
                         act=act),
            l2=ConvBNAct(conv=P4ConvP4(in_channels=48,
                                       out_channels=48,
                                       ksize=ksize,
                                       stride=1,
                                       pad=pad),
                         bn=bn,
                         act=act),
            l3=ConvBNAct(conv=P4ConvP4(in_channels=48,
                                       out_channels=48,
                                       ksize=ksize,
                                       stride=2,
                                       pad=pad),
                         bn=bn,
                         act=act),
            l4=ConvBNAct(conv=P4ConvP4(in_channels=48,
                                       out_channels=96,
                                       ksize=ksize,
                                       stride=1,
                                       pad=pad),
                         bn=bn,
                         act=act),
            l5=ConvBNAct(conv=P4ConvP4(in_channels=96,
                                       out_channels=96,
                                       ksize=ksize,
                                       stride=1,
                                       pad=pad),
                         bn=bn,
                         act=act),
            l6=ConvBNAct(conv=P4ConvP4(in_channels=96,
                                       out_channels=96,
                                       ksize=ksize,
                                       stride=2,
                                       pad=pad),
                         bn=bn,
                         act=act),
            l7=ConvBNAct(conv=P4ConvP4(in_channels=96,
                                       out_channels=96,
                                       ksize=ksize,
                                       stride=1,
                                       pad=pad),
                         bn=bn,
                         act=act),
            l8=ConvBNAct(conv=P4ConvP4(in_channels=96,
                                       out_channels=96,
                                       ksize=1,
                                       stride=1,
                                       pad=0),
                         bn=bn,
                         act=act),

            # Note: it's unusual to have a bn + relu before softmax, but this is what's described by springenberg et al.
            l9=ConvBNAct(conv=P4ConvP4(in_channels=96,
                                       out_channels=10,
                                       ksize=1,
                                       stride=1,
                                       pad=0),
                         bn=bn,
                         act=act),
        )

        wtscale = 0.035

        self.l1.conv.W.data = (np.random.randn(*self.l1.conv.W.data.shape) *
                               wtscale).astype(np.float32)
        self.l2.conv.W.data = (np.random.randn(*self.l2.conv.W.data.shape) *
                               wtscale).astype(np.float32)
        self.l3.conv.W.data = (np.random.randn(*self.l3.conv.W.data.shape) *
                               wtscale).astype(np.float32)
        self.l4.conv.W.data = (np.random.randn(*self.l4.conv.W.data.shape) *
                               wtscale).astype(np.float32)
        self.l5.conv.W.data = (np.random.randn(*self.l5.conv.W.data.shape) *
                               wtscale).astype(np.float32)
        self.l6.conv.W.data = (np.random.randn(*self.l6.conv.W.data.shape) *
                               wtscale).astype(np.float32)
        self.l7.conv.W.data = (np.random.randn(*self.l7.conv.W.data.shape) *
                               wtscale).astype(np.float32)
        self.l8.conv.W.data = (np.random.randn(*self.l8.conv.W.data.shape) *
                               wtscale).astype(np.float32)
        self.l9.conv.W.data = (np.random.randn(*self.l9.conv.W.data.shape) *
                               wtscale).astype(np.float32)