コード例 #1
0
    def test_set_static_masks(self):
        main_program = paddle.static.default_main_program()
        startup_program = paddle.static.default_startup_program()
        with paddle.static.program_guard(main_program, startup_program):
            input = paddle.static.data(name='image', shape=[None, 3, 16, 16])
            label = fluid.data(name='label', shape=[None, 1], dtype='int64')
            conv1 = conv_bn_layer(input, 8, 1, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 1, "conv2")
            conv3 = fluid.layers.conv2d_transpose(input=conv2,
                                                  num_filters=16,
                                                  filter_size=2,
                                                  stride=2)
            predict = fluid.layers.fc(input=conv3, size=10, act='softmax')
            cost = fluid.layers.cross_entropy(input=predict, label=label)
            adam_optimizer = fluid.optimizer.AdamOptimizer(0.01)
            avg_cost = fluid.layers.mean(cost)
            adam_optimizer.minimize(avg_cost)

        place = paddle.static.cpu_places()[0]
        exe = paddle.static.Executor(place)
        scope = paddle.static.global_scope()
        exe.run(startup_program, scope=scope)

        pruner = UnstructuredPruner(main_program,
                                    'ratio',
                                    scope=scope,
                                    place=place)

        self._update_masks(pruner, 0.0)
        pruner.update_params()
        self._update_masks(pruner, 1.0)
        pruner.set_static_masks()
        sparsity_0 = pruner.total_sparse(main_program)
        x = np.random.random(size=(10, 3, 16, 16)).astype('float32')
        label = np.random.random(size=(10, 1)).astype('int64')
        loss_data, = exe.run(main_program,
                             feed={
                                 "image": x,
                                 "label": label
                             },
                             fetch_list=[cost.name])
        sparsity_1 = UnstructuredPruner.total_sparse(main_program)
        pruner.update_params()
        sparsity_2 = UnstructuredPruner.total_sparse(main_program)
        print(sparsity_0, sparsity_1, sparsity_2)
        self.assertEqual(sparsity_0, 1.0)
        self.assertLess(abs(sparsity_2 - 1), 0.001)
        self.assertLess(sparsity_1, 1.0)
コード例 #2
0
    def test_prune(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        with fluid.unique_name.guard():
            with fluid.program_guard(main_program, startup_program):
                input = fluid.data(name="image", shape=[1, 3, 16, 16])
                conv1 = conv_bn_layer(
                    input, 8, 3, "conv1", act='relu')  #[1, 8, 1, 1]
                # hit default pruning worker
                cast1 = paddle.cast(conv1, dtype="int32")
                # hit unsupported pruning worker
                out = paddle.reshape(cast1, shape=[1, -1])

        graph = GraphWrapper(main_program)
        cls = PRUNE_WORKER.get("conv2d")
        in_var = graph.var("conv1_weights")
        op = in_var.outputs()[0]
        # pruning input of conv op
        pruned_params = []
        ret = {}
        os.environ['OPS_UNSUPPORTED'] = "reshape2"
        worker = cls(op, pruned_params, {}, True)
        hit_unsupported_op = False
        try:
            worker.prune(in_var, 0, [])
        except UnsupportOpError as e:
            hit_unsupported_op = True
            print(e)
        self.assertTrue(hit_unsupported_op)
コード例 #3
0
    def test_mul(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        with fluid.program_guard(main_program, startup_program):
            input = fluid.data(name="image", shape=[None, 3, 16, 16])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            fc_0 = paddle.fluid.layers.fc(conv1, size=10)
            fc_1 = paddle.fluid.layers.fc(fc_0, size=10)

        place = fluid.CPUPlace()
        exe = fluid.Executor(place)
        scope = fluid.Scope()
        exe.run(startup_program, scope=scope)
        pruner = Pruner()
        # test backward search of concat
        pruned_program, _, _ = pruner.prune(main_program,
                                            scope,
                                            params=["conv1_weights"],
                                            ratios=[0.5],
                                            place=place,
                                            lazy=False,
                                            only_graph=True,
                                            param_backup=None,
                                            param_shape_backup=None)
        shapes = {
            "conv1_weights": (4, 3, 3, 3),
            "fc_0.w_0": (1024, 10),
            "fc_1.w_0": (10, 10)
        }
        for param in pruned_program.global_block().all_parameters():
            if param.name in shapes.keys():
                self.assertTrue(shapes[param.name] == param.shape)
コード例 #4
0
    def test_prune(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        with fluid.unique_name.guard():
            with fluid.program_guard(main_program, startup_program):
                input = fluid.data(name="image", shape=[1, 3, 16, 16])
                conv1 = conv_bn_layer(input, 8, 3, "conv1",
                                      act='relu')  #[1, 8, 1, 1]
                conv2 = conv_bn_layer(input, 8, 3, "conv2",
                                      act='relu')  #[1, 8, 1, 1]
                out = conv1 + conv2

        graph = GraphWrapper(main_program)
        cls = PRUNE_WORKER.get("sum")
        out_var = graph.var(out.name)
        in_var = graph.var(conv1.name)
        op = out_var.inputs()[0]
        # pruning out
        pruned_params = []
        ret = {}
        worker = cls(op, pruned_params, {}, True)
        worker.prune(out_var, 1, [])
        for var, axis, _, _ in pruned_params:
            ret[var.name()] = axis
        self.assertTrue(
            ret == {
                'conv1_weights': 0,
                'conv1_bn_scale': 0,
                'conv1_bn_offset': 0,
                'conv1_bn_mean': 0,
                'conv1_bn_variance': 0
            })

        # pruning inputs
        pruned_params = []
        worker = cls(op, pruned_params, {}, True)
        worker.skip_vars = [out.name]
        try:
            worker.prune(in_var, 0, [])
        except UnsupportOpError as e:
            print(e)
        self.assertTrue(pruned_params == [])
コード例 #5
0
ファイル: test_loss.py プロジェクト: 666DZY666/PaddleSlim
    def test_loss(self):
        student_main = fluid.Program()
        student_startup = fluid.Program()
        with fluid.program_guard(student_main, student_startup):
            input = fluid.data(name="image", shape=[None, 3, 224, 224])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            student_predict = conv1 + conv2

        teacher_main = fluid.Program()
        teacher_startup = fluid.Program()
        with fluid.program_guard(teacher_main, teacher_startup):
            input = fluid.data(name="image", shape=[None, 3, 224, 224])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            teacher_predict = conv_bn_layer(conv5, 8, 3, "conv6")

        place = fluid.CPUPlace()
        data_name_map = {'image': 'image'}
        merge(teacher_main, student_main, data_name_map, place)
        merged_ops = []
        for block in student_main.blocks:
            for op in block.ops:
                merged_ops.append(op.type)

        def adaptation_loss(t_var, s_var):
            teacher_channel = t_var.shape[1]
            s_hint = fluid.layers.conv2d(s_var, teacher_channel, 1)
            hint_loss = fluid.layers.reduce_mean(
                fluid.layers.square(s_hint - t_var))
            return hint_loss

        with fluid.program_guard(student_main):
            distill_loss = loss(
                adaptation_loss,
                student_main,
                t_var='teacher_conv6_bn_output.tmp_2',
                s_var='conv2_bn_output.tmp_2')
        loss_ops = []
        for block in student_main.blocks:
            for op in block.ops:
                loss_ops.append(op.type)
        self.assertTrue(set(merged_ops).difference(set(loss_ops)) == set())
        self.assertTrue(
            set(loss_ops).difference(set(merged_ops)) ==
            {'reduce_mean', 'elementwise_sub', 'square'})
コード例 #6
0
    def test_split(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        with fluid.program_guard(main_program, startup_program):
            input = fluid.data(name="image", shape=[None, 3, 16, 16])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(input, 4, 3, "conv2")
            split_0, split_1 = paddle.split(conv1, 2, axis=1)
            add = split_0 + conv2
            out = conv_bn_layer(add, 4, 3, "conv3")
            out1 = conv_bn_layer(split_1, 4, 4, "conv4")

        shapes = {}
        for param in main_program.global_block().all_parameters():
            shapes[param.name] = param.shape

        place = fluid.CPUPlace()
        exe = fluid.Executor(place)
        scope = fluid.Scope()
        exe.run(startup_program, scope=scope)
        pruner = Pruner()
        # test backward search of concat
        pruned_program, _, _ = pruner.prune(main_program,
                                            scope,
                                            params=["conv2_weights"],
                                            ratios=[0.5],
                                            place=place,
                                            lazy=False,
                                            only_graph=True,
                                            param_backup=None,
                                            param_shape_backup=None)
        shapes = {
            "conv1_weights": (6, 3, 3, 3),
            "conv2_weights": (2, 3, 3, 3),
            "conv3_weights": (4, 2, 3, 3),
            "conv4_weights": (4, 4, 3, 3),
        }
        for param in pruned_program.global_block().all_parameters():
            if "weights" in param.name and "conv2d" in param.name:
                self.assertTrue(shapes[param.name] == param.shape)
コード例 #7
0
 def test_prune(self):
     main_program = fluid.Program()
     startup_program = fluid.Program()
     #   X       X              O       X              O
     # conv1-->conv2-->sum1-->conv3-->conv4-->sum2-->conv5-->conv6
     #     |            ^ |                    ^
     #     |____________| |____________________|
     #
     # X: prune output channels
     # O: prune input channels
     with fluid.program_guard(main_program, startup_program):
         input = fluid.data(name="image", shape=[None, 3, 16, 16])
         conv1 = conv_bn_layer(input, 8, 3, "conv1")
         conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
         sum1 = conv1 + conv2
         conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
         conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
         sum2 = conv4 + sum1
         conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
         conv6 = conv_bn_layer(conv5, 8, 3, "conv6")
     groups = collect_convs(
         ["conv1_weights", "conv2_weights", "conv3_weights"], main_program)
     self.assertTrue(len(groups) == 2)
     self.assertTrue(len(groups[0]) == 18)
     self.assertTrue(len(groups[1]) == 6)
コード例 #8
0
    def _gen_model(self):
        self.main_program = paddle.static.default_main_program()
        self.startup_program = paddle.static.default_startup_program()
        # conv1-->conv2-->sum1-->conv3-->conv4-->sum2-->conv5-->conv6
        #     |            ^ |                    ^
        #     |____________| |____________________|
        with paddle.static.program_guard(self.main_program,
                                         self.startup_program):
            input = paddle.static.data(name='image', shape=[None, 3, 16, 16])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            conv6 = conv_bn_layer(conv5, 8, 3, "conv6")

            conv7 = fluid.layers.conv2d_transpose(input=conv6,
                                                  num_filters=16,
                                                  filter_size=2,
                                                  stride=2)

        place = paddle.static.cpu_places()[0]
        exe = paddle.static.Executor(place)
        self.scope = paddle.static.global_scope()
        exe.run(self.startup_program, scope=self.scope)

        self.pruner = UnstructuredPruner(self.main_program,
                                         'ratio',
                                         scope=self.scope,
                                         place=place)
コード例 #9
0
    def test_sensitivity(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        with fluid.program_guard(main_program, startup_program):
            input = fluid.data(name="image", shape=[None, 1, 28, 28])
            label = fluid.data(name="label", shape=[None, 1], dtype="int64")
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            conv6 = conv_bn_layer(conv5, 8, 3, "conv6")
            out = fluid.layers.fc(conv6, size=10, act='softmax')
            acc_top1 = fluid.layers.accuracy(input=out, label=label, k=1)
        eval_program = main_program.clone(for_test=True)

        place = fluid.CUDAPlace(0)
        exe = fluid.Executor(place)
        exe.run(startup_program)

        val_reader = paddle.fluid.io.batch(paddle.dataset.mnist.test(),
                                           batch_size=128)

        def eval_func(program):
            feeder = fluid.DataFeeder(feed_list=['image', 'label'],
                                      place=place,
                                      program=program)
            acc_set = []
            for data in val_reader():
                acc_np = exe.run(program=program,
                                 feed=feeder.feed(data),
                                 fetch_list=[acc_top1])
                acc_set.append(float(acc_np[0]))
            acc_val_mean = numpy.array(acc_set).mean()
            print("acc_val_mean: {}".format(acc_val_mean))
            return acc_val_mean

        sensitivity(eval_program,
                    place, ["conv4_weights"],
                    eval_func,
                    "./sensitivities_file_0",
                    pruned_ratios=[0.1, 0.2])

        sensitivity(eval_program,
                    place, ["conv4_weights"],
                    eval_func,
                    "./sensitivities_file_1",
                    pruned_ratios=[0.3, 0.4])

        sens_0 = load_sensitivities('./sensitivities_file_0')
        sens_1 = load_sensitivities('./sensitivities_file_1')
        sens = merge_sensitive([sens_0, sens_1])
        origin_sens = sensitivity(eval_program,
                                  place, ["conv4_weights"],
                                  eval_func,
                                  "./sensitivities_file_1",
                                  pruned_ratios=[0.1, 0.2, 0.3, 0.4])
        self.assertTrue(sens == origin_sens)
コード例 #10
0
    def test_prune(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        with fluid.unique_name.guard():
            with fluid.program_guard(main_program, startup_program):
                input = fluid.data(name="image", shape=[1, 3, 16, 16])

                conv1 = conv_bn_layer(input,
                                      6,
                                      3,
                                      "conv1",
                                      groups=1,
                                      bias=True,
                                      act='relu')

        graph = GraphWrapper(main_program)
        cls = PRUNE_WORKER.get("conv2d")
        weight_var = graph.var("conv1_weights")
        in_var = graph.var("image")
        op = in_var.outputs()[0]
        out_var = op.outputs("Output")[0]
        # pruning weights of conv op
        pruned_params = []
        ret = {}
        worker = cls(op, pruned_params, {}, True)
        worker.prune(weight_var, 0, [])
        worker.prune(weight_var, 1, [])
        for var, axis, _, _ in pruned_params:
            if var.name() not in ret:
                ret[var.name()] = []
            ret[var.name()].append(axis)
        self.assertTrue(
            ret == {
                'conv1_weights': [0, 1],
                'conv1_out.b_0': [0],
                'conv1_bn_scale': [0],
                'conv1_bn_offset': [0],
                'conv1_bn_mean': [0],
                'conv1_bn_variance': [0]
            })
        # pruning out of conv op
        pruned_params = []
        ret = {}
        worker = cls(op, pruned_params, visited={}, skip_stranger=True)
        worker.prune(out_var, 1, [])
        for var, axis, _, _ in pruned_params:
            if var.name() not in ret:
                ret[var.name()] = []
            ret[var.name()].append(axis)
        self.assertTrue(ret == {'conv1_weights': [0]})
コード例 #11
0
    def test_prune(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        #   X       X              O       X              O
        # conv1-->conv2-->sum1-->conv3-->conv4-->sum2-->conv5-->conv6
        #     |            ^ |                    ^
        #     |____________| |____________________|
        #
        # X: prune output channels
        # O: prune input channels
        with fluid.program_guard(main_program, startup_program):
            input = fluid.data(name="image", shape=[None, 3, 16, 16])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            conv6 = conv_bn_layer(conv5, 8, 3, "conv6")

        shapes = {}
        for param in main_program.global_block().all_parameters():
            shapes[param.name] = param.shape

        place = fluid.CPUPlace()
        exe = fluid.Executor(place)
        scope = fluid.Scope()
        exe.run(startup_program, scope=scope)

        pruned_flops = 0.5
        pruner = AutoPruner(main_program,
                            scope,
                            place,
                            params=["conv4_weights"],
                            init_ratios=[0.5],
                            pruned_flops=0.5,
                            pruned_latency=None,
                            server_addr=("", 0),
                            init_temperature=100,
                            reduce_rate=0.85,
                            max_try_number=300,
                            max_client_num=10,
                            search_steps=2,
                            max_ratios=[0.9],
                            min_ratios=[0],
                            key="auto_pruner")

        base_flops = flops(main_program)
        program = pruner.prune(main_program)
        self.assertTrue(flops(program) <= base_flops * (1 - pruned_flops))
        pruner.reward(1)

        program = pruner.prune(main_program)
        self.assertTrue(flops(program) <= base_flops * (1 - pruned_flops))
        pruner.reward(1)
コード例 #12
0
    def test_prune(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        #   X       X              O       X              O
        # conv1-->conv2-->sum1-->conv3-->conv4-->sum2-->conv5-->conv6
        #     |            ^ |                    ^
        #     |____________| |____________________|
        #
        # X: prune output channels
        # O: prune input channels
        with fluid.program_guard(main_program, startup_program):
            input = fluid.data(name="image", shape=[None, 3, 16, 16])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            conv6 = conv_bn_layer(conv5, 8, 3, "conv6")

        shapes = {}
        for param in main_program.global_block().all_parameters():
            shapes[param.name] = param.shape

        place = fluid.CPUPlace()
        exe = fluid.Executor(place)
        scope = fluid.Scope()
        exe.run(startup_program, scope=scope)
        criterion = 'bn_scale'
        idx_selector = 'optimal_threshold'
        pruner = Pruner(criterion)
        main_program, _, _ = pruner.prune(
            main_program,
            scope,
            params=["conv4_weights"],
            ratios=[0.5],
            place=place,
            lazy=False,
            only_graph=False,
            param_backup=None,
            param_shape_backup=None)

        shapes = {
            "conv1_weights": (4L, 3L, 3L, 3L),
            "conv2_weights": (4L, 4L, 3L, 3L),
            "conv3_weights": (8L, 4L, 3L, 3L),
            "conv4_weights": (4L, 8L, 3L, 3L),
            "conv5_weights": (8L, 4L, 3L, 3L),
            "conv6_weights": (8L, 8L, 3L, 3L)
        }

        for param in main_program.global_block().all_parameters():
            if "weights" in param.name:
                print("param: {}; param shape: {}".format(param.name,
                                                          param.shape))
コード例 #13
0
    def test_loss(self):
        input = paddle.static.data(name="image", shape=[None, 3, 224, 224])
        conv1 = conv_bn_layer(input, 8, 3, "conv1")
        conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
        student_predict = conv1 + conv2

        teacher_main = paddle.static.Program()
        teacher_startup = paddle.static.Program()
        with paddle.static.program_guard(teacher_main, teacher_startup):
            input = paddle.static.data(name="image", shape=[None, 3, 224, 224])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            teacher_predict = conv_bn_layer(conv5, 8, 3, "conv6")

        place = paddle.CPUPlace()
        data_name_map = {'image': 'image'}
        merge(teacher_main, paddle.static.default_main_program(),
              data_name_map, place)
        merged_ops = []
        for block in paddle.static.default_main_program().blocks:
            for op in block.ops:
                merged_ops.append(op.type)

        def adaptation_loss(t_var, s_var):
            hint_loss = paddle.mean(
                paddle.nn.functional.square_error_cost(s_var, t_var))
            return hint_loss

        distill_loss = loss(adaptation_loss,
                            t_var='teacher_conv6_bn_output.tmp_2',
                            s_var='conv2_bn_output.tmp_2')
        loss_ops = []
        for block in paddle.static.default_main_program().blocks:
            for op in block.ops:
                loss_ops.append(op.type)
        self.assertTrue(set(merged_ops).difference(set(loss_ops)) == set())
        self.assertTrue(
            set(loss_ops).difference(set(merged_ops)) ==
            {'reduce_mean', 'elementwise_sub', 'square'})
コード例 #14
0
    def test_merge(self):
        student_main = fluid.Program()
        student_startup = fluid.Program()
        with fluid.program_guard(student_main, student_startup):
            input = fluid.data(name="image", shape=[None, 3, 224, 224])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            student_predict = conv1 + conv2

        teacher_main = fluid.Program()
        teacher_startup = fluid.Program()
        with fluid.program_guard(teacher_main, teacher_startup):
            input = fluid.data(name="image", shape=[None, 3, 224, 224])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            teacher_predict = conv_bn_layer(conv5, 8, 3, "conv6")

        place = fluid.CPUPlace()
        data_name_map = {'image': 'image'}
        merge(teacher_main, student_main, data_name_map, place)
        merged_ops = []
        for block in student_main.blocks:
            for op in block.ops:
                merged_ops.append(op.type)
        with fluid.program_guard(student_main):
            distill_loss = fsp_loss('teacher_conv5_bn_output.tmp_2',
                                    'teacher_conv6_bn_output.tmp_2',
                                    'conv1_bn_output.tmp_2',
                                    'conv2_bn_output.tmp_2', student_main)
        loss_ops = []
        for block in student_main.blocks:
            for op in block.ops:
                loss_ops.append(op.type)
        self.assertTrue(set(merged_ops).difference(set(loss_ops)) == set())
        self.assertTrue(
            set(loss_ops).difference(set(merged_ops)) ==
            {'elementwise_sub', 'reduce_mean', 'square', 'fsp'})
コード例 #15
0
 def test_prune(self):
     main_program = fluid.Program()
     startup_program = fluid.Program()
     with fluid.program_guard(main_program, startup_program):
         input = fluid.data(name="image", shape=[None, 3, 16, 16])
         conv1 = conv_bn_layer(input, 8, 3, "conv1")
         conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
         sum1 = conv1 + conv2
         conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
         conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
         sum2 = conv4 + sum1
         conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
         conv6 = conv_bn_layer(conv5, 8, 3, "conv6")
     self.assertTrue(3288 == model_size(main_program))
コード例 #16
0
    def test_merge(self):
        student_main = fluid.Program()
        student_startup = fluid.Program()
        with fluid.program_guard(student_main, student_startup):
            input = fluid.data(name="image", shape=[None, 3, 224, 224])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            student_predict = conv1 + conv2
        student_ops = []
        for block in student_main.blocks:
            for op in block.ops:
                student_ops.append(op)

        teacher_main = fluid.Program()
        teacher_startup = fluid.Program()
        with fluid.program_guard(teacher_main, teacher_startup):
            input = fluid.data(name="image", shape=[None, 3, 224, 224])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            teacher_predict = conv_bn_layer(conv5, 8, 3, "conv6")
        teacher_ops = []
        for block in teacher_main.blocks:
            for op in block.ops:
                teacher_ops.append(op)

        place = fluid.CPUPlace()
        data_name_map = {'image': 'image'}
        merge(teacher_main, student_main, data_name_map, place)
        merged_ops = []
        for block in student_main.blocks:
            for op in block.ops:
                merged_ops.append(op)
        self.assertTrue(len(student_ops) + len(teacher_ops) == len(merged_ops))
コード例 #17
0
    def test_soft_label_loss(self):
        input = paddle.static.data(name="image", shape=[None, 3, 224, 224])
        conv1 = conv_bn_layer(input, 8, 3, "conv1")
        conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
        student_predict = conv1 + conv2

        teacher_main = paddle.static.Program()
        teacher_startup = paddle.static.Program()
        with paddle.static.program_guard(teacher_main, teacher_startup):
            input = paddle.static.data(name="image", shape=[None, 3, 224, 224])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            teacher_predict = conv_bn_layer(conv5, 8, 3, "conv6")

        place = paddle.CPUPlace()
        data_name_map = {'image': 'image'}
        merge(teacher_main,
              paddle.static.default_main_program(), data_name_map, place)
        merged_ops = []
        for block in paddle.static.default_main_program().blocks:
            for op in block.ops:
                merged_ops.append(op.type)
        distill_loss = soft_label_loss('teacher_conv6_bn_output.tmp_2',
                                       'conv2_bn_output.tmp_2')
        loss_ops = []
        for block in paddle.static.default_main_program().blocks:
            for op in block.ops:
                loss_ops.append(op.type)
        self.assertTrue(set(merged_ops).difference(set(loss_ops)) == set())
        self.assertTrue(
            set(loss_ops).difference(set(merged_ops)) ==
            {'cross_entropy', 'softmax', 'reduce_mean', 'scale'})
コード例 #18
0
    def test_prune(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        with fluid.unique_name.guard():
            with fluid.program_guard(main_program, startup_program):
                input = fluid.data(name="image", shape=[None, 3, 16, 16])
                conv1 = conv_bn_layer(input, 8, 3, "conv1", act='relu')
                out = paddle.unsqueeze(conv1, axis=[0])

        graph = GraphWrapper(main_program)
        cls = PRUNE_WORKER.get("unsqueeze2")
        out_var = graph.var(out.name)
        in_var = graph.var(conv1.name)
        op = out_var.inputs()[0]
        # pruning out
        pruned_params = []
        ret = {}
        worker = cls(op, pruned_params, {}, True)
        worker.prune(out_var, 2, [])
        for var, axis, _, _ in pruned_params:
            ret[var.name()] = axis
        self.assertTrue(
            ret == {
                'conv1_weights': 0,
                'conv1_bn_scale': 0,
                'conv1_bn_offset': 0,
                'conv1_bn_mean': 0,
                'conv1_bn_variance': 0
            })

        # pruning in
        pruned_params = []
        ret = {}
        worker = cls(op, pruned_params, {}, True)
        worker.prune(in_var, 1, [])
        for var, axis, _, _ in pruned_params:
            ret[var.name()] = axis
        self.assertTrue(ret == {})
コード例 #19
0
    def test_prune(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        #   X       X              O       X              O
        # conv1-->conv2-->sum1-->conv3-->conv4-->sum2-->conv5-->conv6
        #     |            ^ |                    ^
        #     |____________| |____________________|
        #
        # X: prune output channels
        # O: prune input channels
        with fluid.program_guard(main_program, startup_program):
            input = fluid.data(name="image", shape=[None, 3, 16, 16])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            conv6 = conv_bn_layer(conv5, 8, 3, "conv6")
        collected_groups = collect_convs(
            ["conv1_weights", "conv2_weights", "conv3_weights", "dummy"],
            main_program)
        while [] in collected_groups:
            collected_groups.remove([])
        print(collected_groups)

        params = set([
            param.name for param in main_program.all_parameters()
            if "weights" in param.name
        ])

        expected_groups = [[('conv1_weights', 0), ('conv2_weights', 1),
                            ('conv2_weights', 0), ('conv3_weights', 1),
                            ('conv4_weights', 0), ('conv5_weights', 1)],
                           [('conv3_weights', 0), ('conv4_weights', 1)]]

        self.assertTrue(len(collected_groups) == len(expected_groups))
        for _collected, _expected in zip(collected_groups, expected_groups):
            for _name, _axis, _ in _collected:
                if _name in params:
                    self.assertTrue((_name, _axis) in _expected)
            for _name, _axis in _expected:
                if _name in params:
                    self.assertTrue((_name, _axis, []) in _collected)
コード例 #20
0
    def _gen_model(self):
        self.main_program = paddle.static.default_main_program()
        self.startup_program = paddle.static.default_startup_program()
        # conv1-->conv2-->sum1-->conv3-->conv4-->sum2-->conv5-->conv6
        #     |            ^ |                    ^
        #     |____________| |____________________|
        with paddle.static.program_guard(self.main_program,
                                         self.startup_program):
            input = paddle.static.data(name='image', shape=[None, 3, 16, 16])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            conv6 = conv_bn_layer(conv5, 8, 3, "conv6")

            conv7 = fluid.layers.conv2d_transpose(
                input=conv6, num_filters=16, filter_size=2, stride=2)

        place = paddle.static.cpu_places()[0]
        exe = paddle.static.Executor(place)
        self.scope = paddle.static.global_scope()
        exe.run(self.startup_program, scope=self.scope)

        configs = {
            'stable_iterations': 0,
            'pruning_iterations': 1000,
            'tunning_iterations': 1000,
            'resume_iteration': 500,
            'pruning_steps': 20,
            'initial_ratio': 0.05,
        }
        self.pruner = GMPUnstructuredPruner(
            self.main_program,
            scope=self.scope,
            place=place,
            configs=configs,
            ratio=0.55)
        print(self.pruner.ratio)
        self.assertGreater(self.pruner.ratio, 0.3)
コード例 #21
0
    def test_prune(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        #   X       X              O       X              O
        # conv1-->conv2-->sum1-->conv3-->conv4-->sum2-->conv5-->conv6
        #     |            ^ |                    ^
        #     |____________| |____________________|
        #
        # X: prune output channels
        # O: prune input channels
        with fluid.program_guard(main_program, startup_program):
            input = fluid.data(name="image", shape=[None, 3, 16, 16])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            conv6 = conv_bn_layer(conv5, 8, 3, "conv6")

        shapes = {}
        for param in main_program.global_block().all_parameters():
            shapes[param.name] = param.shape

        place = fluid.CPUPlace()
        exe = fluid.Executor(place)
        scope = fluid.Scope()
        exe.run(startup_program, scope=scope)

        graph = GraphWrapper(main_program)

        conv_op = graph.var("conv4_weights").outputs()[0]
        walker = conv2d_walker(conv_op, [])
        walker.prune(graph.var("conv4_weights"), pruned_axis=0, pruned_idx=[])
        print walker.pruned_params
コード例 #22
0
 def cond_block1():
     cond_conv = conv_bn_layer(conv5, 8, 3, "conv_cond1_1")
     return cond_conv
コード例 #23
0
    def test_prune(self):
        train_program = fluid.Program()
        startup_program = fluid.Program()
        with fluid.program_guard(train_program, startup_program):
            input = fluid.data(name="image", shape=[None, 3, 16, 16])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            conv6 = conv_bn_layer(conv5, 8, 3, "conv6")
            feature = fluid.layers.reshape(conv6, [-1, 128, 16])
            predict = fluid.layers.fc(input=feature, size=10, act='softmax')
            label = fluid.data(name='label', shape=[None, 1], dtype='int64')
            print(label.shape)
            print(predict.shape)
            cost = fluid.layers.cross_entropy(input=predict, label=label)
            avg_cost = fluid.layers.mean(cost)
            adam_optimizer = fluid.optimizer.AdamOptimizer(0.01)
            adam_optimizer.minimize(avg_cost)

        place = fluid.CPUPlace()
        exe = fluid.Executor(place)

        scope = fluid.global_scope()
        exe.run(startup_program, scope=scope)
        criterion = 'bn_scale'
        pruner = Pruner(criterion)
        main_program, _, _ = pruner.prune(train_program,
                                          scope,
                                          params=["conv4_weights"],
                                          ratios=[0.5],
                                          place=place,
                                          lazy=False,
                                          only_graph=False,
                                          param_backup=None,
                                          param_shape_backup=None)

        x = numpy.random.random(size=(10, 3, 16, 16)).astype('float32')
        label = numpy.random.random(size=(10, 1)).astype('int64')
        loss_data, = exe.run(train_program,
                             feed={
                                 "image": x,
                                 "label": label
                             },
                             fetch_list=[cost.name])

        save_model(exe, main_program, 'model_file')
        pruned_program = fluid.Program()
        pruned_startup_program = fluid.Program()
        with fluid.program_guard(pruned_program, pruned_startup_program):
            input = fluid.data(name="image", shape=[None, 3, 16, 16])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            conv6 = conv_bn_layer(conv5, 8, 3, "conv6")
        pruned_test_program = pruned_program.clone(for_test=True)
        exe.run(pruned_startup_program)
        load_model(exe, pruned_program, 'model_file')
        load_model(exe, pruned_test_program, 'model_file')
        shapes = {
            "conv1_weights": (4, 3, 3, 3),
            "conv2_weights": (4, 4, 3, 3),
            "conv3_weights": (8, 4, 3, 3),
            "conv4_weights": (4, 8, 3, 3),
            "conv5_weights": (8, 4, 3, 3),
            "conv6_weights": (8, 8, 3, 3)
        }

        for param in pruned_program.global_block().all_parameters():
            if "weights" in param.name:
                print("param: {}; param shape: {}".format(
                    param.name, param.shape))
                self.assertTrue(param.shape == shapes[param.name])
        for param in pruned_test_program.global_block().all_parameters():
            if "weights" in param.name:
                print("param: {}; param shape: {}".format(
                    param.name, param.shape))
                self.assertTrue(param.shape == shapes[param.name])
コード例 #24
0
    def test_concat(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        #                                  X
        # conv1   conv2-->concat         conv3-->sum-->out
        #     |            ^ |                    ^
        #     |____________| |____________________|
        #
        with fluid.program_guard(main_program, startup_program):
            input = fluid.data(name="image", shape=[None, 3, 16, 16])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(input, 8, 3, "conv2", sync_bn=True)
            tmp = fluid.layers.concat([conv1, conv2], axis=1)
            conv3 = conv_bn_layer(input, 16, 3, "conv3", bias=None)
            out = conv3 + tmp

        shapes = {}
        for param in main_program.global_block().all_parameters():
            shapes[param.name] = param.shape

        place = fluid.CPUPlace()
        exe = fluid.Executor(place)
        scope = fluid.Scope()
        exe.run(startup_program, scope=scope)
        pruner = Pruner()
        # test backward search of concat
        pruned_program, _, _ = pruner.prune(main_program,
                                            scope,
                                            params=["conv3_weights"],
                                            ratios=[0.5],
                                            place=place,
                                            lazy=False,
                                            only_graph=True,
                                            param_backup=None,
                                            param_shape_backup=None)
        shapes = {
            "conv3_weights": (8, 3, 3, 3),
            "conv2_weights": (4, 3, 3, 3),
            "conv1_weights": (4, 3, 3, 3)
        }
        for param in pruned_program.global_block().all_parameters():
            if "weights" in param.name and "conv2d" in param.name:
                self.assertTrue(shapes[param.name] == param.shape)

        # test forward search of concat
        pruned_program, _, _ = pruner.prune(
            main_program,
            scope,
            params=["conv1_weights", "conv2_weights"],
            ratios=[0.5, 0.5],
            place=place,
            lazy=False,
            only_graph=False,
            param_backup=None,
            param_shape_backup=None)

        shapes = {
            "conv1_weights": (4, 3, 3, 3),
            "conv1_bn_scale": (4, ),
            "conv1_bn_variance": (4, ),
            "conv1_bn_mean": (4, ),
            "conv1_bn_offset": (4, ),
            "conv2_weights": (4, 3, 3, 3),
            "sync_batch_norm_0.w_0": (4, ),
            "sync_batch_norm_0.w_1": (4, ),
            "conv2_bn_scale": (4, ),
            "conv2_bn_offset": (4, ),
            "conv3_weights": (8, 3, 3, 3),
            "conv3_bn_mean": (8, ),
            "conv3_bn_offset": (8, ),
            "conv3_bn_scale": (8, ),
            "conv3_bn_variance": (8, ),
            "conv3_out.b_0": (8, ),
        }

        for param in pruned_program.global_block().all_parameters():
            if "weights" in param.name and "conv2d" in param.name:
                self.assertTrue(shapes[param.name] == param.shape)
コード例 #25
0
    def test_prune(self):
        train_program = fluid.Program()
        startup_program = fluid.Program()
        with fluid.program_guard(train_program, startup_program):
            input = fluid.data(name="image", shape=[None, 3, 16, 16])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            conv6 = conv_bn_layer(conv5, 8, 3, "conv6")

        place = fluid.CPUPlace()
        exe = fluid.Executor(place)

        scope = fluid.global_scope()
        exe.run(startup_program, scope=scope)
        criterion = 'bn_scale'
        pruner = Pruner(criterion)
        main_program, _, _ = pruner.prune(train_program,
                                          scope,
                                          params=["conv4_weights"],
                                          ratios=[0.5],
                                          place=place,
                                          lazy=False,
                                          only_graph=False,
                                          param_backup=None,
                                          param_shape_backup=None)

        x = numpy.random.random(size=(10, 3, 16, 16)).astype('float32')
        loss_data, = exe.run(train_program,
                             feed={"image": x},
                             fetch_list=[conv6.name])

        save_model(exe, main_program, 'model_file')
        pruned_program = fluid.Program()
        pruned_startup_program = fluid.Program()
        with fluid.program_guard(pruned_program, pruned_startup_program):
            input = fluid.data(name="image", shape=[None, 3, 16, 16])
            conv1 = conv_bn_layer(input, 8, 3, "conv1")
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2")
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3")
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            conv6 = conv_bn_layer(conv5, 8, 3, "conv6")
        exe.run(pruned_startup_program)
        load_model(exe, pruned_program, 'model_file')
        shapes = {
            "conv1_weights": (4, 3, 3, 3),
            "conv2_weights": (4, 4, 3, 3),
            "conv3_weights": (8, 4, 3, 3),
            "conv4_weights": (4, 8, 3, 3),
            "conv5_weights": (8, 4, 3, 3),
            "conv6_weights": (8, 8, 3, 3)
        }

        for param in pruned_program.global_block().all_parameters():
            if "weights" in param.name:
                print("param: {}; param shape: {}".format(
                    param.name, param.shape))
                self.assertTrue(param.shape == shapes[param.name])
コード例 #26
0
    def test_prune(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        #   X       X              O       X              O
        # conv1-->conv2-->sum1-->conv3-->conv4-->sum2-->conv5-->conv6
        #     |            ^ |                    ^
        #     |____________| |____________________|
        #
        # X: prune output channels
        # O: prune input channels
        with fluid.program_guard(main_program, startup_program):
            input = fluid.data(name="image", shape=[None, 3, 16, 16])
            label = fluid.data(name='label', shape=[None, 1], dtype='int64')
            conv1 = conv_bn_layer(input, 8, 3, "conv1", act='relu')
            conv2 = conv_bn_layer(conv1, 8, 3, "conv2", act='leaky_relu')
            sum1 = conv1 + conv2
            conv3 = conv_bn_layer(sum1, 8, 3, "conv3", act='relu6')
            conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
            sum2 = conv4 + sum1
            conv5 = conv_bn_layer(sum2, 8, 3, "conv5")
            sum3 = fluid.layers.sum([sum2, conv5])
            conv6 = conv_bn_layer(sum3, 8, 3, "conv6")
            sub1 = conv6 - sum3
            mult = sub1 * sub1
            conv7 = conv_bn_layer(
                mult, 8, 3, "Depthwise_Conv7", groups=8, use_cudnn=False)
            floored = fluid.layers.floor(conv7)
            scaled = fluid.layers.scale(floored)
            concated = fluid.layers.concat([scaled, mult], axis=1)
            conv8 = conv_bn_layer(concated, 8, 3, "conv8")
            feature = fluid.layers.reshape(conv8, [-1, 128, 16])
            predict = fluid.layers.fc(input=feature, size=10, act='softmax')
            cost = fluid.layers.cross_entropy(input=predict, label=label)
            adam_optimizer = fluid.optimizer.AdamOptimizer(0.01)
            avg_cost = fluid.layers.mean(cost)
            adam_optimizer.minimize(avg_cost)

        params = []
        for param in main_program.all_parameters():
            if 'conv' in param.name:
                params.append(param.name)

        place = fluid.CPUPlace()
        exe = fluid.Executor(place)
        exe.run(startup_program)
        x = np.random.random(size=(10, 3, 16, 16)).astype('float32')
        label = np.random.random(size=(10, 1)).astype('int64')
        loss_data, = exe.run(main_program,
                             feed={"image": x,
                                   "label": label},
                             fetch_list=[cost.name])
        pruner = Pruner()
        main_program, _, _ = pruner.prune(
            main_program,
            fluid.global_scope(),
            params=params,
            ratios=[0.5] * len(params),
            place=place,
            lazy=False,
            only_graph=False,
            param_backup=None,
            param_shape_backup=None)
コード例 #27
0
 def cond_block2():
     cond_conv1 = conv_bn_layer(conv5, 8, 3, "conv_cond2_1")
     cond_conv2 = conv_bn_layer(cond_conv1, 8, 3,
                                "conv_cond2_2")
     return cond_conv2
コード例 #28
0
 def cond_block2():
     cond_conv1 = conv_bn_layer(conv5, 8, 3, "conv_cond2_1")
     cond_conv2 = conv_bn_layer(cond_conv1, 8, 3, "conv_cond2_2")
     fluid.layers.assign(input=cond_conv2, output=cond_output)
コード例 #29
0
 def cond_block1():
     cond_conv = conv_bn_layer(conv5, 8, 3, "conv_cond1_1")
     fluid.layers.assign(input=cond_conv, output=cond_output)
コード例 #30
0
    def test_prune(self):
        main_program = fluid.Program()
        startup_program = fluid.Program()
        #   X       X              O       X              O
        # conv1-->conv2-->sum1-->conv3-->conv4-->sum2-->conv5-->conv6
        #     |            ^ |                    ^
        #     |____________| |____________________|
        #
        # X: prune output channels
        # O: prune input channels
        with fluid.unique_name.guard():
            with fluid.program_guard(main_program, startup_program):
                input = fluid.data(name="image", shape=[None, 3, 16, 16])
                label = fluid.data(name='label', shape=[None, 1], dtype='int64')
                conv1 = conv_bn_layer(input, 8, 3, "conv1", act='relu')
                conv2 = conv_bn_layer(conv1, 8, 3, "conv2", act='leaky_relu')
                sum1 = conv1 + conv2
                conv3 = conv_bn_layer(sum1, 8, 3, "conv3", act='relu6')
                conv4 = conv_bn_layer(conv3, 8, 3, "conv4")
                sum2 = conv4 + sum1
                conv5 = conv_bn_layer(sum2, 8, 3, "conv5")

                flag = fluid.layers.fill_constant([1], value=1, dtype='int32')
                rand_flag = paddle.randint(2, dtype='int32')
                cond = fluid.layers.less_than(x=flag, y=rand_flag)
                cond_output = fluid.layers.create_global_var(
                    shape=[1],
                    value=0.0,
                    dtype='float32',
                    persistable=False,
                    name='cond_output')

                def cond_block1():
                    cond_conv = conv_bn_layer(conv5, 8, 3, "conv_cond1_1")
                    fluid.layers.assign(input=cond_conv, output=cond_output)

                def cond_block2():
                    cond_conv1 = conv_bn_layer(conv5, 8, 3, "conv_cond2_1")
                    cond_conv2 = conv_bn_layer(cond_conv1, 8, 3, "conv_cond2_2")
                    fluid.layers.assign(input=cond_conv2, output=cond_output)

                fluid.layers.cond(cond, cond_block1, cond_block2)
                sum3 = fluid.layers.sum([sum2, cond_output])

                conv6 = conv_bn_layer(sum3, 8, 3, "conv6")
                sub1 = conv6 - sum3
                mult = sub1 * sub1
                conv7 = conv_bn_layer(
                    mult, 8, 3, "Depthwise_Conv7", groups=8, use_cudnn=False)
                floored = fluid.layers.floor(conv7)
                scaled = fluid.layers.scale(floored)
                concated = fluid.layers.concat([scaled, mult], axis=1)
                conv8 = conv_bn_layer(concated, 8, 3, "conv8")
                predict = fluid.layers.fc(input=conv8, size=10, act='softmax')
                cost = fluid.layers.cross_entropy(input=predict, label=label)
                adam_optimizer = fluid.optimizer.AdamOptimizer(0.01)
                avg_cost = fluid.layers.mean(cost)
                adam_optimizer.minimize(avg_cost)

        params = []
        for param in main_program.all_parameters():
            if 'conv' in param.name:
                params.append(param.name)
        #TODO: To support pruning convolution before fc layer.
        params.remove('conv8_weights')

        place = fluid.CUDAPlace(0)
        exe = fluid.Executor(place)
        exe.run(startup_program)
        x = np.random.random(size=(10, 3, 16, 16)).astype('float32')
        label = np.random.random(size=(10, 1)).astype('int64')
        loss_data, = exe.run(main_program,
                             feed={"image": x,
                                   "label": label},
                             fetch_list=[cost.name])
        pruner = Pruner()
        main_program, _, _ = pruner.prune(
            main_program,
            fluid.global_scope(),
            params=params,
            ratios=[0.5] * len(params),
            place=place,
            lazy=False,
            only_graph=False,
            param_backup=None,
            param_shape_backup=None)

        loss_data, = exe.run(main_program,
                             feed={"image": x,
                                   "label": label},
                             fetch_list=[cost.name])