def demo_ori():
    model_file = "../test_res18.prototxt"
    weights_file = "../weights_resize_res18.caffemodel"
    data_file = "../test_pad_30_resize_112.txt"
    img_root = "/home_1/data/caffe/DTY_Side"
    cfg_file = "../test_yxx_resize.yml"
    pkl_dir = "../pkl_feature"
    batch_size = 512

    cfg_from_file(cfg_file)

    net = load_net_(model_file,
                    weights_file=weights_file,
                    GPU_index=3,
                    batch_size=batch_size,
                    forward_type="test")

    img_file_list = open(data_file, "r").readlines()

    img_list = img_file_list[0:batch_size]

    input_data, input_label, input_data_name = load_data_batch_with_label(
        net, img_root, img_list)
    feature_v = get_feature_information(net, model_file, input_data)
    # print feature_v["prob"]

    if not os.path.exists(pkl_dir):
        os.makedirs(pkl_dir)
    save_name = os.path.join(pkl_dir, "feature_res_{}.pkl".format(batch_size))
    pickle.dump(feature_v, open(save_name, 'wb'))
Ejemplo n.º 2
0
def test_net_cls(args):
    if args.cfg_file is not None:
        cfg_from_file(args.cfg_file)

    net = load_net_(args.deploy_file,
                    args.weights_file,
                    args.gpu_id,
                    batch_size=-1,
                    forward_type="test")
    img_list = open(args.img_list_file, "r").readlines()

    input_layer_name = net._layer_names[net._inputs[0]]
    batch_size, c, h, w = net.blobs[input_layer_name].shape

    epoch_num = int(len(img_list) / batch_size)

    predict_res = []
    for epoch_i in range(epoch_num):
        if epoch_i % 10 == 0:
            print "TODO: {}% ".format(1.0 * epoch_i / epoch_num * 100)
        batch_img_list = img_list[epoch_i * batch_size:(epoch_i + 1) *
                                  batch_size]
        input_data, input_label, input_data_name = load_data_batch_with_label(
            net, args.img_root_path, batch_img_list)
        single_batch_res = predict_with_label(net, input_data, input_label,
                                              input_data_name)
        predict_res.extend(single_batch_res)

    pkl_dir = os.path.join(args.save_root_path, "res_pkl")
    if not os.path.exists(pkl_dir):
        os.makedirs(pkl_dir)
    save_name = os.path.join(
        pkl_dir, "{}_res.pkl".format(
            os.path.splitext(os.path.basename(args.weights_file))[0]))
    pickle.dump(predict_res, open(save_name, 'wb'))
Ejemplo n.º 3
0
    def __init__(self, P_parameters):

        # get ori net weights
        self.P_parameters = P_parameters
        self.ori_net = load_net_(self.P_parameters.model_file,
                                 weights_file=self.P_parameters.weights_file,
                                 GPU_index=self.P_parameters.GPU_index,
                                 batch_size=-1,
                                 forward_type="test")

        self.Net_composition = Net_composition_(self.P_parameters.model_file)

        self.pruned_layers_set = set()
        self.fixed_layers_set = set()
Ejemplo n.º 4
0
 def create_new_net(self):
     self.new_net = load_net_(self.P_parameters.new_model_file,
                              weights_file=None,
                              GPU_index=self.P_parameters.GPU_index,
                              batch_size=-1,
                              forward_type="test")
def my_test():
    args = parse_args()
    print(args)

    args.cfg_file = "../test_yxx_resize.yml"
    args.gpu_id = 3
    args.img_list_file = "../test_pad_30_resize_112.txt"
    args.img_root_path = "/home_1/data/caffe/DTY_Side"

    args.task_root = "../model_2"
    args.model_file = "test_res18"
    args.model_file_train = "train_res18"
    args.weights_file = "weights_resize_res18"
    args.solver_file = "solver_res18"

    batch_size = 128

    cfg_from_file(args.cfg_file)
    P_parameters = Prune_parameters_(args)

    net_com = Net_composition_(P_parameters.model_file)
    weights_graph = net_com.weights_layers_graph

    pruning_layer_dict = {"conv1": 1}
    BN_pruning_dict = {"bn_conv1": [i for i in range(63)]}

    Prune_ = Prune_tools(P_parameters)
    Prune_.update_model_file(pruning_layer_dict)
    Prune_.create_new_net()

    Weights_pruning_dict = get_all_keep_index(BN_pruning_dict,
                                              pruning_layer_dict,
                                              weights_graph)

    Prune_.update_new_net(Weights_pruning_dict)
    Prune_.save_new_weights()

    model_ori = "/home_1/code/caffe_test/compress/weights_pruning/20190529/model_2/test_res18.prototxt"
    weights_ori = "/home_1/code/caffe_test/compress/weights_pruning/20190529/model_2/weights_resize_res18.caffemodel"
    net_scale = load_net_(model_ori,
                          weights_file=weights_ori,
                          GPU_index=3,
                          batch_size=batch_size,
                          forward_type="test")
    net_scale.params["scale_conv1"][0].data[-1] = 0
    net_scale.params["scale_conv1"][1].data[-1] = 0

    net_ori = load_net_(model_ori,
                        weights_file=weights_ori,
                        GPU_index=3,
                        batch_size=batch_size,
                        forward_type="test")

    model_pruning = "/home_1/code/caffe_test/compress/weights_pruning/20190529/model_2/test_res18_0.prototxt"
    weights_pruning = "/home_1/code/caffe_test/compress/weights_pruning/20190529/model_2/weights_resize_res18_0.caffemodel"
    net_pruning = load_net_(model_pruning,
                            weights_file=weights_pruning,
                            GPU_index=3,
                            batch_size=batch_size,
                            forward_type="test")

    img_file_list = open(args.img_list_file, "r").readlines()
    img_list = img_file_list[0:batch_size]
    input_data, input_label, input_data_name = load_data_batch_with_label(
        net_pruning, args.img_root_path, img_list)

    out_put_pruning, net_pruning = single_batch_forward(
        net_pruning, input_data)
    out_put_scale, net_scale = single_batch_forward(net_scale, input_data)
    out_put_ori, net_ori = single_batch_forward(net_ori, input_data)
    # print "======================="
    # print "pruning: "
    # print out_put_pruning
    # print "-----------------------"
    # print "scale: "
    # print out_put_scale
    # print "-----------------------"
    for key_ in out_put_pruning.keys():
        for i in range(len(out_put_pruning[key_])):
            print "------------------------"
            print "scale: "
            print out_put_scale[key_][i]
            print "pruning: "
            print out_put_pruning[key_][i]
            print "ori: "
            print out_put_ori[key_][i]

if __name__ == '__main__':
    model_file = "../test_res18.prototxt"
    weights_file = "../weights_resize_res18.caffemodel"
    data_file = "../test_pad_30_resize_112.txt"
    img_root = "/home_1/data/caffe/DTY_Side"
    cfg_file = "../test_yxx_resize.yml"
    pkl_dir = "../pkl_feature"
    batch_size = 512

    cfg_from_file(cfg_file)

    net = load_net_(model_file,
                    weights_file=weights_file,
                    GPU_index=3,
                    batch_size=batch_size,
                    forward_type="test")

    img_file_list = open(data_file, "r").readlines()

    random.shuffle(img_file_list)
    img_list = img_file_list[0:batch_size]

    input_data, input_label, input_data_name = load_data_batch_with_label(
        net, img_root, img_list)
    print input_data_name
    print input_label
    feature_v = get_pruning_prob_each(net, model_file, input_data)

    if not os.path.exists(pkl_dir):