Example #1
0
    def test_biasadd_support(self):
        from pyxir.contrib.dpuv2.ultra96_op_support import \
            biasadd_op_support

        X = XLayer(type=['BiasAdd'],
                   name='bn1',
                   shapes=[-1, 2, 4, 4],
                   sizes=[32],
                   bottoms=[],
                   tops=[],
                   targets=[],
                   attrs={'axis': 1})

        assert biasadd_op_support(X, [], [])

        X = XLayer(type=['BiasAdd'],
                   name='bn1',
                   shapes=[-1, 2570, 4, 4],
                   sizes=[2570 * 16],
                   bottoms=[],
                   tops=[],
                   targets=[],
                   attrs={'axis': 1})

        assert not biasadd_op_support(X, [], [])
Example #2
0
    def test_concat_support(self):
        from pyxir.contrib.dpuv2.ultra96_op_support import \
            concat_op_support

        X = XLayer(type=['Concat'],
                   name='layer1',
                   shapes=[-1, 2, 4, 4],
                   sizes=[32],
                   bottoms=[],
                   tops=[],
                   targets=[],
                   attrs={'axis': 1})

        assert concat_op_support(X, [], [])

        X = XLayer(type=['Concat'],
                   name='layer1',
                   shapes=[-1, 2570, 4, 4],
                   sizes=[2570 * 16],
                   bottoms=[],
                   tops=[],
                   targets=[],
                   attrs={'axis': 1})

        assert not concat_op_support(X, [], [])
Example #3
0
    def test_mean_support(self):
        from pyxir.contrib.dpuv2.ultra96_op_support import \
            mean_op_support

        X = XLayer(type=['Mean'],
                   name='layer1',
                   shapes=[-1, 2, 1, 1],
                   sizes=[2],
                   bottoms=[],
                   tops=[],
                   targets=[],
                   attrs={
                       'axes': [2, 3],
                       'keepdims': True,
                       'exclude': False
                   })

        assert mean_op_support(X, [], [])

        X = XLayer(type=['Mean'],
                   name='layer1',
                   shapes=[-1, 1, 4, 4],
                   sizes=[16],
                   bottoms=[],
                   tops=[],
                   targets=[],
                   attrs={
                       'axes': [1],
                       'keepdims': True,
                       'exclude': False
                   })

        assert not mean_op_support(X, [], [])
Example #4
0
    def test_mean_support(self):
        from pyxir.contrib.dpuv2.ultra96_op_support import \
            prelu_op_support

        X = XLayer(type=['pReLU'],
                   name='layer1',
                   shapes=[-1, 2, 1, 1],
                   sizes=[2],
                   bottoms=[],
                   tops=[],
                   targets=[],
                   attrs={'alpha': 0.1})

        assert prelu_op_support(X, [], [])

        X = XLayer(type=['pReLU'],
                   name='layer1',
                   shapes=[-1, 1, 4, 4],
                   sizes=[16],
                   bottoms=[],
                   tops=[],
                   targets=[],
                   attrs={'alpha': 0.2})

        assert not prelu_op_support(X, [], [])
Example #5
0
    def test_convolution_layer_tfl(self):

        iX = XLayer(type=['Input'],
                    name='in1',
                    shapes=[1, 3, 3, 2],
                    sizes=[32],
                    bottoms=[],
                    tops=[],
                    targets=[])

        kX = XLayer(type=['Constant'],
                    name='kernel',
                    shapes=[4, 3, 3, 2],
                    sizes=[54],
                    data=[
                        np.transpose(np.ones((4, 2, 3, 3), dtype=np.float32),
                                     (0, 2, 3, 1))
                    ],
                    bottoms=[],
                    tops=[],
                    targets=[])

        X = xlf.get_xop_factory_func('Convolution')(op_name='conv1',
                                                    kernel_size=[3, 3],
                                                    strides=[1, 1],
                                                    padding_hw=[1, 1],
                                                    dilation=[1, 1],
                                                    groups=1,
                                                    channels=4,
                                                    data_layout='NHWC',
                                                    kernel_layout='OHWI',
                                                    input_layer=iX,
                                                    weights_layer=kX)

        assert X.type[0] == 'Convolution'
        assert X.shapes == [1, 3, 3, 4]
        assert X.attrs['padding'] == [[0, 0], [1, 1], [1, 1], [0, 0]]
        assert X.attrs['data_layout'] == 'NHWC'
        assert X.attrs['kernel_layout'] == 'OIHW'
        assert X.attrs['shape'] == [1, 3, 3, 4]
        assert X.attrs['kernel_size'] == [3, 3]
        assert X.attrs['strides'] == [1, 1]
        assert X.attrs['groups'] == 1
        assert X.attrs['dilation'] == [1, 1]
        assert X.attrs['channels'] == [2, 4]

        np.testing.assert_array_equal(X.data.weights,
                                      np.ones((4, 2, 3, 3), dtype=np.float32))
        np.testing.assert_array_equal(X.data.biases,
                                      np.zeros((4), dtype=np.float32))

        from pyxir.graph.ops.l2_convolution import \
            conv2d_layout_transform

        conv2d_layout_transform(X, target_layout='NCHW')

        assert X.type[0] == 'Convolution'
        assert X.shapes == [1, 4, 3, 3]
        assert X.attrs['data_layout'] == 'NCHW'
        assert X.attrs['padding'] == [[0, 0], [0, 0], [1, 1], [1, 1]]
Example #6
0
    def test_pooling_support(self):
        from pyxir.contrib.dpuv2.ultra96_op_support import \
            pooling_op_support

        X = XLayer(type=['Pooling'],
                   name='layer1',
                   shapes=[-1, 2, 4, 4],
                   sizes=[32],
                   bottoms=[],
                   tops=[],
                   targets=[],
                   attrs={
                       'data_layout': 'NCHW',
                       'kernel_size': [2, 2],
                       'strides': [3, 3],
                       'padding': [[0, 0], [0, 0], [1, 1], [1, 1]]
                   })

        assert pooling_op_support(X, [], [])

        X = XLayer(type=['Pooling'],
                   name='layer1',
                   shapes=[-1, 2570, 4, 4],
                   sizes=[2570 * 16],
                   bottoms=[],
                   tops=[],
                   targets=[],
                   attrs={
                       'data_layout': 'NCHW',
                       'kernel_size': [2, 2],
                       'strides': [1, 1],
                       'padding': [[0, 0], [0, 0], [1, 1], [1, 1]]
                   })

        assert not pooling_op_support(X, [], [])
Example #7
0
    def test_batchnorm_support(self):
        from pyxir.contrib.dpuv2.ultra96_op_support import batchnorm_op_support

        X = XLayer(
            type=["BatchNorm"],
            name="bn1",
            shapes=[-1, 2, 4, 4],
            sizes=[32],
            bottoms=[],
            tops=[],
            targets=[],
            attrs={"axis": 1},
        )

        assert batchnorm_op_support(X, [], [])

        X = XLayer(
            type=["BatchNorm"],
            name="bn1",
            shapes=[-1, 2570, 4, 4],
            sizes=[2570 * 16],
            bottoms=[],
            tops=[],
            targets=[],
            attrs={"axis": 1},
        )

        assert not batchnorm_op_support(X, [], [])
Example #8
0
    def test_scale_support(self):
        from pyxir.contrib.dpuv2.ultra96_op_support import scale_op_support

        X = XLayer(
            type=["Scale"],
            name="layer1",
            shapes=[-1, 2, 4, 4],
            sizes=[32],
            bottoms=[],
            tops=[],
            targets=[],
            attrs={"axis": 1},
        )

        assert scale_op_support(X, [], [])

        X = XLayer(
            type=["Scale"],
            name="layer1",
            shapes=[-1, 2570, 4, 4],
            sizes=[2570 * 16],
            bottoms=[],
            tops=[],
            targets=[],
            attrs={"axis": 1},
        )

        assert not scale_op_support(X, [], [])
Example #9
0
    def test_xgraph_factory(self):

        xlayers = [
            XLayer(name='in1',
                   type=['Input'],
                   bottoms=[],
                   tops=['conv1'],
                   targets=[]),
            XLayer(name='in2',
                   type=['Input'],
                   bottoms=[],
                   tops=['add1'],
                   targets=[]),
            XLayer(name='conv1',
                   type=['Convolution'],
                   bottoms=['in1'],
                   tops=['add1'],
                   data=ConvData(weights=np.array([[[[1, 2], [3, 4]]]],
                                                  dtype=np.float32),
                                 biases=np.array([0., 1.], dtype=np.float32)),
                   targets=[]),
            XLayer(name='add1',
                   type=['Eltwise'],
                   bottoms=['conv1', 'in2'],
                   tops=[],
                   targets=[])
        ]
        xgraph = TestXGraphBasePass.xgraph_factory.build_from_xlayer(xlayers)

        test_pass = TestPass()
        new_xgraph = test_pass.execute(xgraph)

        assert (len(new_xgraph) == 4)
        assert (new_xgraph.get('conv1').type[0] == 'Pooling')
Example #10
0
    def test_pad_pooling_support(self):
        from pyxir.contrib.dpuv2.ultra96_op_support import pad_op_support

        X = XLayer(
            type=["Pad"],
            name="pad1",
            shapes=[-1, 2, 6, 6],
            sizes=[72],
            bottoms=[],
            tops=["layer1"],
            targets=[],
            attrs={"padding": [[0, 0], [0, 0], [2, 2], [2, 2]]},
        )

        tX = XLayer(
            type=["Pooling"],
            name="layer1",
            shapes=[-1, 2, 4, 4],
            sizes=[32],
            bottoms=["pad1"],
            tops=[],
            targets=[],
            attrs={
                "data_layout": "NCHW",
                "kernel_size": [2, 2],
                "strides": [3, 3],
                "padding": [[0, 0], [0, 0], [0, 0], [0, 0]],
            },
        )

        assert pad_op_support(X, [], [tX])

        X = XLayer(
            type=["Pad"],
            name="pad1",
            shapes=[-1, 2, 6, 6],
            sizes=[72],
            bottoms=[],
            tops=["layer1"],
            targets=[],
            attrs={"padding": [[0, 0], [0, 0], [5, 2], [5, 2]]},
        )

        tX = XLayer(
            type=["Pooling"],
            name="layer1",
            shapes=[-1, 2, 4, 4],
            sizes=[32],
            bottoms=["pad1"],
            tops=[],
            targets=[],
            attrs={
                "data_layout": "NCHW",
                "kernel_size": [2, 2],
                "strides": [3, 3],
                "padding": [[0, 0], [0, 0], [0, 0], [0, 0]],
            },
        )

        assert not pad_op_support(X, [], [tX])
Example #11
0
    def test_xgraph_add_remove(self):

        xgraph = XGraph()
        xgraph.add(
            XLayer(name='in1', type=['Input'], bottoms=[], tops=[],
                   targets=[]))

        assert (len(xgraph) == 1)
        assert (len(xgraph.get_layer_names()) == 1)
        assert (len(xgraph.get_output_names()) == 1)
        assert (len(xgraph.get_input_names()) == 1)

        X_conv = XLayer(name='conv1',
                        type=['Convolution'],
                        bottoms=['in1'],
                        tops=[],
                        data=ConvData(weights=np.array([[[[1, 2], [3, 4]]]],
                                                       dtype=np.float32),
                                      biases=np.array([0., 1.],
                                                      dtype=np.float32)),
                        targets=[])
        xgraph.add(X_conv)

        assert (len(xgraph) == 2)
        assert (len(xgraph.get_layer_names()) == 2)
        assert (len(xgraph.get_output_names()) == 1)
        assert (len(xgraph.get_input_names()) == 1)

        xgraph.remove(X_conv.name)

        assert (len(xgraph) == 1)
        assert (len(xgraph.get_layer_names()) == 1)
        assert (len(xgraph.get_output_names()) == 1)
        assert (len(xgraph.get_input_names()) == 1)
Example #12
0
    def test_io_basic(self):
        net = [
            XLayer(name='in1',
                   type=['Input'],
                   shapes=TensorShape([1, 1, 4, 4]),
                   bottoms=[],
                   tops=[],
                   targets=[]),
            XLayer(name='in2',
                   type=['Input'],
                   shapes=TensorShape([1, 1, 4, 4]),
                   bottoms=[],
                   tops=[],
                   targets=[]),
            XLayer(name='add',
                   type=['Eltwise'],
                   shapes=TensorShape([1, 1, 4, 4]),
                   bottoms=['in1', 'in2'],
                   tops=[],
                   targets=[])
        ]
        xgraph = TestXGraphIO.xgraph_factory.build_from_xlayer(net)

        TestXGraphIO.xgraph_io.save(xgraph, 'test')

        loaded_xgraph = TestXGraphIO.xgraph_io.load('test.json', 'test.h5')

        # assert(len(loaded_xgraph.get_layers()) == len(xgraph.get_layers()))
        assert all([
            lxl == xl
            for lxl, xl in zip(loaded_xgraph.get_layers(), xgraph.get_layers())
        ])

        os.remove('test.json')
        os.remove('test.h5')
Example #13
0
    def test_pad_convolution_support(self):
        from pyxir.contrib.dpuv2.ultra96_op_support import \
            pad_op_support

        X = XLayer(type=['Pad'],
                   name='pad1',
                   shapes=[-1, 2, 6, 6],
                   sizes=[72],
                   bottoms=[],
                   tops=['layer1'],
                   targets=[],
                   attrs={'padding': [[0, 0], [0, 0], [1, 1], [1, 1]]})

        tX = XLayer(type=['Convolution'],
                    name='layer1',
                    shapes=[-1, 2, 4, 4],
                    sizes=[32],
                    bottoms=['pad1'],
                    tops=[],
                    targets=[],
                    attrs={
                        'data_layout': 'NCHW',
                        'kernel_size': [2, 2],
                        'strides': [1, 1],
                        'dilation': [1, 1],
                        'padding': [[0, 0], [0, 0], [0, 0], [0, 0]],
                        'channels': [4, 2],
                        'groups': 1
                    })

        assert pad_op_support(X, [], [tX])

        X = XLayer(type=['Pad'],
                   name='pad1',
                   shapes=[-1, 2, 6, 6],
                   sizes=[72],
                   bottoms=[],
                   tops=['layer1'],
                   targets=[],
                   attrs={'padding': [[0, 0], [0, 0], [2, 2], [2, 2]]})

        tX = XLayer(type=['Convolution'],
                    name='layer1',
                    shapes=[-1, 2, 4, 4],
                    sizes=[32],
                    bottoms=['pad1'],
                    tops=[],
                    targets=[],
                    attrs={
                        'data_layout': 'NCHW',
                        'kernel_size': [2, 2],
                        'strides': [1, 1],
                        'dilation': [1, 1],
                        'padding': [[0, 0], [0, 0], [0, 0], [0, 0]],
                        'channels': [4, 2],
                        'groups': 1
                    })

        assert not pad_op_support(X, [], [tX])
Example #14
0
    def test_xlayer_subgraph(self):

        X = XLayer(subgraph="xp0")

        assert isinstance(X.target, str)

        assert X.subgraph == "xp0"
        X.subgraph = 'xp1'
        assert X.subgraph == 'xp1'
Example #15
0
    def test_xlayer_target(self):

        X = XLayer(target="dpu")

        assert isinstance(X.target, str)

        assert X.target == "dpu"
        X.target = 'cpu'
        assert X.target == 'cpu'
Example #16
0
    def test_simple(self):
        net = [
            XLayer(name='in1',
                   type=['Input'],
                   shapes=[1, 1, 4, 4],
                   sizes=[16],
                   bottoms=[],
                   tops=['conv1'],
                   layer=['in1'],
                   targets=[]),
            XLayer(name='conv1',
                   type=['Convolution'],
                   shapes=[1, 2, 3, 3],
                   sizes=[18],
                   bottoms=['in1'],
                   tops=[],
                   layer=['conv1'],
                   data=ConvData(np.array([1, 1]), np.array([0, 0])),
                   attrs={
                       'data_layout': 'NCHW',
                       'padding': [[0, 0], [0, 0], [1, 1], [1, 1]]
                   },
                   targets=[])
        ]
        xgraph = TestLayoutTransformationPass.xgraph_factory\
            .build_from_xlayer(net)

        layout_transform_pass = XGraphLayoutTransformationPass('NHWC')
        new_xgraph = layout_transform_pass.execute(xgraph)

        xlayers = new_xgraph.get_layers()
        # print(xlayers)
        assert len(new_xgraph) == 4
        assert xlayers[0].type[0] == 'Input'
        assert xlayers[1].type[0] == 'Transpose'
        assert xlayers[2].type[0] == 'Convolution'
        assert xlayers[3].type[0] == 'Transpose'

        assert xlayers[0].bottoms == []
        assert xlayers[0].tops == ['conv1_bottom_NCHW>NHWC']
        assert xlayers[0].shapes == [1, 1, 4, 4]
        assert xlayers[1].bottoms == ['in1']
        assert xlayers[1].tops == ['conv1']
        assert xlayers[1].shapes == [1, 4, 4, 1]
        assert xlayers[2].bottoms == ['conv1_bottom_NCHW>NHWC']
        assert xlayers[2].tops == ['conv1_top_NHWC>NCHW']
        assert xlayers[2].shapes == [1, 3, 3, 2]
        assert xlayers[3].bottoms == ['conv1']
        assert xlayers[3].tops == []
        assert xlayers[3].shapes == [1, 2, 3, 3]

        # NCHW -> NHWC
        assert xlayers[1].attrs['axes'] == [0, 2, 3, 1]
        # NHWC -> NCHW
        assert xlayers[3].attrs['axes'] == [0, 3, 1, 2]

        assert xlayers[2].attrs['data_layout'] == 'NHWC'
Example #17
0
    def test_xlayer_internal(self):

        X = XLayer(internal=True)

        assert isinstance(X.internal, bool)

        assert X.internal is True
        X.internal = False
        assert not X.internal
Example #18
0
    def test_multiply_layer(self):

        iX1 = XLayer(
            type=['Input'],
            name='in1',
            shapes=[-1, 2, 1, 4],
            sizes=[8],
            bottoms=[],
            tops=[],
            targets=[]
        )

        iX2 = XLayer(
            type=['Input'],
            name='in2',
            shapes=[-1, 2, 1, 4],
            sizes=[8],
            bottoms=[],
            tops=[],
            targets=[]
        )

        mX = xlf.get_xop_factory_func('Multiply')('mul2', [iX1, iX2])

        assert mX.type[0] == 'Multiply'
        assert mX.shapes == [-1, 2, 1, 4]

        iX3 = XLayer(
            type=['Input'],
            name='in3',
            shapes=[-1, 1, 4, 1],
            sizes=[4],
            bottoms=[],
            tops=[],
            targets=[]
        )

        mX = xlf.get_xop_factory_func('Multiply')('mul3', [iX1, iX3])

        assert mX.type[0] == 'Multiply'
        assert mX.shapes == [-1, 2, 4, 4]

        iX4 = XLayer(
            type=['Input'],
            name='in4',
            shapes=[4, 1],
            sizes=[4],
            bottoms=[],
            tops=[],
            targets=[]
        )

        mX = xlf.get_xop_factory_func('Multiply')('mul4', [iX1, iX4])

        assert mX.type[0] == 'Multiply'
        assert mX.shapes == [-1, 2, 4, 4]
Example #19
0
    def test_multiply_layer(self):

        iX1 = XLayer(
            type=["Input"],
            name="in1",
            shapes=[-1, 2, 1, 4],
            sizes=[8],
            bottoms=[],
            tops=[],
            targets=[],
        )

        iX2 = XLayer(
            type=["Input"],
            name="in2",
            shapes=[-1, 2, 1, 4],
            sizes=[8],
            bottoms=[],
            tops=[],
            targets=[],
        )

        mX = xlf.get_xop_factory_func("Multiply")("mul2", [iX1, iX2])

        assert mX.type[0] == "Multiply"
        assert mX.shapes == [-1, 2, 1, 4]

        iX3 = XLayer(
            type=["Input"],
            name="in3",
            shapes=[-1, 1, 4, 1],
            sizes=[4],
            bottoms=[],
            tops=[],
            targets=[],
        )

        mX = xlf.get_xop_factory_func("Multiply")("mul3", [iX1, iX3])

        assert mX.type[0] == "Multiply"
        assert mX.shapes == [-1, 2, 4, 4]

        iX4 = XLayer(
            type=["Input"],
            name="in4",
            shapes=[4, 1],
            sizes=[4],
            bottoms=[],
            tops=[],
            targets=[],
        )

        mX = xlf.get_xop_factory_func("Multiply")("mul4", [iX1, iX4])

        assert mX.type[0] == "Multiply"
        assert mX.shapes == [-1, 2, 4, 4]
Example #20
0
    def test_xlayer_shapes(self):

        # TensorShape
        X = XLayer(shapes=[-1, 2, 4, 4])

        assert X.shapes == [-1, 2, 4, 4]
        assert X.shapes == TensorShape([-1, 2, 4, 4])

        X.shapes[1] = 3
        assert X.shapes == [-1, 3, 4, 4]
        assert X.shapes == TensorShape([-1, 3, 4, 4])

        X.shapes = [-1, 3, 5, 5]
        assert X.shapes == [-1, 3, 5, 5]
        assert X.shapes == TensorShape([-1, 3, 5, 5])
        assert X.shapes.get_size() == [75]

        shapes2 = X.shapes._replace(5, 6)
        assert shapes2 == TensorShape([-1, 3, 6, 6])

        # TupleShape
        X = XLayer(shapes=[[-1, 2, 4, 4], [-1, 2, 3, 3]])

        assert X.shapes == [[-1, 2, 4, 4], [-1, 2, 3, 3]]
        assert X.shapes == TupleShape(
            [TensorShape([-1, 2, 4, 4]),
             TensorShape([-1, 2, 3, 3])])
        assert X.shapes.get_size() == [32, 18]

        assert X.shapes[0] == [-1, 2, 4, 4]
        assert X.shapes[1] == [-1, 2, 3, 3]
        assert X.shapes[0] == TensorShape([-1, 2, 4, 4])
        assert X.shapes[1] == TensorShape([-1, 2, 3, 3])

        X.shapes[0] = [-1, 1, 2, 2]
        assert X.shapes == [[-1, 1, 2, 2], [-1, 2, 3, 3]]

        X.shapes[0][1] = 3
        assert X.shapes == [[-1, 3, 2, 2], [-1, 2, 3, 3]]
        assert X.shapes.get_size() == [12, 18]
        assert X.shapes.tolist() == [[-1, 3, 2, 2], [-1, 2, 3, 3]]

        X.shapes[1] = [-1, 3, 4, 4]
        assert X.shapes.get_size() == [12, 48]

        shapes2 = X.shapes._replace(4, 6)
        assert shapes2 == [[-1, 3, 2, 2], [-1, 3, 6, 6]]
        assert shapes2 == TupleShape([[-1, 3, 2, 2], [-1, 3, 6, 6]])
        assert shapes2.get_size() == [12, 108]
        assert X.shapes.get_size() == [12, 48]

        # Tuple one element
        X.shapes = [[1, 2, 3, 3]]
        assert X.shapes == [[1, 2, 3, 3]]
        assert X.shapes == TupleShape([[1, 2, 3, 3]])
Example #21
0
    def test_xlayer_name(self):

        X = XLayer(name="Elon")
        assert X.name == "Elon"

        X2 = X.copy()
        assert X2.name == "Elon"

        X2.name = "Musk"
        assert X.name == "Elon"
        assert X2.name == "Musk"
Example #22
0
    def test_xlayer_name(self):

        X = XLayer(name='Elon')
        assert X.name == 'Elon'

        X2 = X.copy()
        assert X2.name == 'Elon'

        X2.name = 'Musk'
        assert X.name == 'Elon'
        assert X2.name == 'Musk'
    def test_take_layer(self):

        iX = XLayer(
            type=["Input"],
            name="in1",
            shapes=[1, 3, 4, 4],
            sizes=[48],
            bottoms=[],
            tops=[],
            targets=[],
        )

        indX1 = XLayer(
            type=["Constant"],
            name="indices",
            shapes=[],
            sizes=[],
            data=[np.array(0, dtype=np.int32)],
            bottoms=[],
            tops=[],
            targets=[],
        )

        tX = xlf.get_xop_factory_func("Take")("take1", [iX, indX1],
                                              axis=1,
                                              mode="clip")

        assert tX.type[0] == "Take"
        assert tX.attrs["axis"] == 1
        assert tX.attrs["mode"] == "clip"
        assert tX.bottoms == ["in1", "indices"]
        assert tX.shapes == [1, 4, 4]
        assert tX.sizes == [16]

        indX2 = XLayer(
            type=["Constant"],
            name="indices",
            shapes=[2],
            sizes=[2],
            data=[np.array([0, 2], dtype=np.int32)],
            bottoms=[],
            tops=[],
            targets=[],
        )

        tX = px.ops.take("take2", [iX, indX2], axis=1, mode="clip")

        assert tX.type[0] == "Take"
        assert tX.attrs["axis"] == 1
        assert tX.attrs["mode"] == "clip"
        assert tX.bottoms == ["in1", "indices"]
        assert tX.shapes == [1, 2, 4, 4]
        assert tX.sizes == [32]
Example #24
0
    def test_scaling_layer(self):

        iX = XLayer(
            type=["Input"],
            name="in1",
            shapes=[1, 2, 4, 4],
            sizes=[32],
            bottoms=[],
            tops=[],
            targets=[],
        )

        gX = XLayer(
            type=["Constant"],
            name="gamma",
            shapes=[2],
            sizes=[2],
            data=[np.array([1.0, 2.0])],
            bottoms=[],
            tops=[],
            targets=[],
        )

        bX = XLayer(
            type=["Constant"],
            name="beta",
            shapes=[2],
            sizes=[2],
            data=[np.array([1.0, -2.0])],
            bottoms=[],
            tops=[],
            targets=[],
        )

        sX = xlf.get_xop_factory_func("Scale")("scale1", iX, gX, bX, axis=1)

        assert sX.type[0] == "Scale"
        assert sX.attrs["axis"] == 1

        np.testing.assert_array_equal(sX.data.gamma, np.array([1.0, 2.0]))
        np.testing.assert_array_equal(sX.data.beta, np.array([1.0, -2.0]))

        from pyxir.graph.ops.l1_basic_nn import scale_transpose_transform

        scale_transpose_transform(sX, axes=[0, 2, 3, 1])

        assert sX.type[0] == "Scale"
        assert sX.shapes == [1, 4, 4, 2]
        assert sX.attrs["axis"] == 3
Example #25
0
    def test_scaling_layer(self):

        iX = XLayer(
            type=['Input'],
            name='in1',
            shapes=[1, 2, 4, 4],
            sizes=[32],
            bottoms=[],
            tops=[],
            targets=[]
        )

        gX = XLayer(
            type=['Constant'],
            name='gamma',
            shapes=[2],
            sizes=[2],
            data=[np.array([1., 2.])],
            bottoms=[],
            tops=[],
            targets=[]
        )

        bX = XLayer(
            type=['Constant'],
            name='beta',
            shapes=[2],
            sizes=[2],
            data=[np.array([1., -2.])],
            bottoms=[],
            tops=[],
            targets=[]
        )

        sX = xlf.get_xop_factory_func('Scale')('scale1', iX, gX, bX, axis=1)

        assert sX.type[0] == 'Scale'
        assert sX.attrs['axis'] == 1

        np.testing.assert_array_equal(sX.data.gamma, np.array([1., 2.]))
        np.testing.assert_array_equal(sX.data.beta, np.array([1., -2.]))

        from pyxir.graph.ops.l1_basic_nn import scale_transpose_transform

        scale_transpose_transform(sX, axes=[0, 2, 3, 1])

        assert sX.type[0] == 'Scale'
        assert sX.shapes == [1, 4, 4, 2]
        assert sX.attrs['axis'] == 3
Example #26
0
    def test_xlayer_sizes(self):

        X = XLayer(sizes=[16])

        assert isinstance(X.sizes, IntVector)

        assert X.sizes == [16]
        del X.sizes[0]
        assert X.sizes == []
        X.sizes.append(8)
        assert X.sizes == [8]

        X.sizes = [32]
        assert X.sizes == [32]
        assert len(X.sizes) == 1
Example #27
0
    def test_pad_layer(self):

        iX = XLayer(
            type=["Input"],
            name="in1",
            shapes=[1, 2, 7, 7],
            sizes=[98],
            bottoms=[],
            tops=[],
            targets=[],
        )

        X = xlf.get_xop_factory_func("Pad")(
            op_name="pad1",
            padding=[[0, 0], [0, 0], [1, 0], [1, 0]],
            pad_value=0,
            input_layer=iX,
        )

        assert X.type[0] == "Pad"
        assert X.shapes == [1, 2, 8, 8]
        assert X.sizes == [128]
        assert X.attrs["padding"] == [[0, 0], [0, 0], [1, 0], [1, 0]]

        from pyxir.graph.ops.l2_convolution import padding_transpose_transform

        padding_transpose_transform(X, axes=(0, 2, 3, 1))

        assert X.type[0] == "Pad"
        assert X.shapes == [1, 8, 8, 2]
        assert X.attrs["padding"] == [[0, 0], [1, 0], [1, 0], [0, 0]]
Example #28
0
    def test_nn_upsampling2d(self):

        iX = XLayer(type=['Input'],
                    name='in1',
                    shapes=[1, 4, 2, 2],
                    sizes=[16],
                    bottoms=[],
                    tops=[],
                    targets=[])

        sX = xlf.get_xop_factory_func('Upsampling2D')(
            'ups1', [iX],
            scale_h=3,
            scale_w=2,
            data_layout='NCHW',
            method='nearest_neighbor',
            align_corners=False)

        assert sX.type[0] == 'Upsampling2D'
        assert sX.shapes == [1, 4, 6, 4]
        assert sX.sizes == [96]
        assert sX.attrs['scale_h'] == 3
        assert sX.attrs['scale_w'] == 2
        assert sX.attrs['data_layout'] == 'NCHW'
        assert sX.attrs['method'] == 'nearest_neighbor'
        assert sX.attrs['align_corners'] is False

        from pyxir.graph.ops.l2_convolution import \
            upsampling2d_layout_transform

        upsampling2d_layout_transform(sX, target_layout='NHWC')

        assert sX.type[0] == 'Upsampling2D'
        assert sX.shapes == [1, 6, 4, 4]
        assert sX.attrs['data_layout'] == 'NHWC'
Example #29
0
    def test_pad_layer(self):

        iX = XLayer(type=['Input'],
                    name='in1',
                    shapes=[1, 2, 7, 7],
                    sizes=[98],
                    bottoms=[],
                    tops=[],
                    targets=[])

        X = xlf.get_xop_factory_func('Pad')(op_name='pad1',
                                            padding=[[0, 0], [0, 0], [1, 0],
                                                     [1, 0]],
                                            pad_value=0,
                                            input_layer=iX)

        assert X.type[0] == 'Pad'
        assert X.shapes == [1, 2, 8, 8]
        assert X.sizes == [128]
        assert X.attrs['padding'] == [[0, 0], [0, 0], [1, 0], [1, 0]]

        from pyxir.graph.ops.l2_convolution import \
            padding_transpose_transform

        padding_transpose_transform(X, axes=(0, 2, 3, 1))

        assert X.type[0] == 'Pad'
        assert X.shapes == [1, 8, 8, 2]
        assert X.attrs['padding'] == [[0, 0], [1, 0], [1, 0], [0, 0]]
Example #30
0
    def test_global_pooling_layer(self):

        iX = XLayer(type=['Input'],
                    name='in1',
                    shapes=[1, 2, 7, 7],
                    sizes=[98],
                    bottoms=[],
                    tops=[],
                    targets=[])

        X = xlf.get_xop_factory_func('GlobalPooling')(op_name='gp1',
                                                      pool_type='Max',
                                                      layout='NCHW',
                                                      input_layer=iX)

        assert X.type[0] == 'Pooling'
        assert X.shapes == [1, 2, 1, 1]
        assert X.attrs['padding'] == [[0, 0], [0, 0], [0, 0], [0, 0]]
        assert X.attrs['insize'] == [7, 7]
        assert X.attrs['outsize'] == [1, 1]
        assert X.attrs['data_layout'] == 'NCHW'
        assert X.attrs['strides'] == [1, 1]
        assert X.attrs['kernel_size'] == [7, 7]
        assert X.attrs['pool_type'] == 'Max'

        from pyxir.graph.ops.l2_convolution import \
            pooling_layout_transform

        pooling_layout_transform(X, target_layout='NHWC')

        assert X.type[0] == 'Pooling'
        assert X.shapes == [1, 1, 1, 2]
        assert X.attrs['data_layout'] == 'NHWC'