Пример #1
0
def find_closest_with_brute_force(neighbors_df, ref_location):
	min_distance = 99999999999
	closest_neighbor_name = closest_neighbor_coords = None
	for name, coords in neighbors_df.iterrows():
		d = distance(ref_location, tuple(coords))
		if d < min_distance:
			min_distance = d
			closest_neighbor_name = name
			closest_neighbor_coords = tuple(coords)

	return closest_neighbor_name, closest_neighbor_coords
                        '--result_npy',
                        default='./models/resnet50v1b-sparse-0.npy',
                        help='result numpy to compare reconstructor result')
    return parser.parse_args()


if __name__ == '__main__':

    args = parse_args()
    with open(args.graph, 'r') as f:
        graph = json.load(f)

    params = np.load(args.params, allow_pickle=True)['arr_0'][()]
    trc = src.TorchReconstructor(graph, params)
    trc.load_weights()
    trc.eval()
    print(trc)
    print("spec_name, nnz, sparsity")
    for module in trc.model:
        print(module.spec_name, module.nnz, module.sparsity)
    if args.input_npy:
        x = torch.from_numpy(np.load(args.input_npy))
    else:
        x = torch.randn((1, 3, 224, 224))
    total_params, dense_flops, sparse_flops = src.compute_model_complexity(
        trc, x, verbose=True)
    if args.result_npy and args.input_npy:
        result = np.load(args.result_npy)
        y = trc(x)
        print('distance', src.distance(y.detach().numpy(), result))
Пример #3
0
    dummpy_input = np.random.randn(1, 224, 224, 3)

    g = tf.Graph()
    with g.as_default():
        trc_inference = src.TFReconstructor(graph, params)
        result = trc_inference(dummpy_input)

    new_g = tf.Graph()
    with new_g.as_default():
        trc = src.TFReconstructorTrain(graph, params)
        trc._execute()
        with tf.variable_scope('test', reuse=tf.AUTO_REUSE):
            featmap = trc.model(is_training=True)
            featmap_test = trc.model(is_training=False)

        sess = tf.Session()
        sess.run(tf.global_variables_initializer())
        # frozen_graph_def = trc.frozen()
        # tf.import_graph_def(frozen_graph_def, name='')
        # tf.io.write_graph(frozen_graph_def, os.path.dirname(args.save_path),
        #         os.path.basename(args.save_path), as_text=True)
        input_tsr = new_g.get_tensor_by_name('test/0:0')
        new_train_result = sess.run(featmap,
                                    feed_dict={input_tsr: dummpy_input})
        print(src.distance(result, new_train_result['506']))

        input_tsr = new_g.get_tensor_by_name('test/0_1:0')
        new_test_result = sess.run(featmap_test,
                                   feed_dict={input_tsr: dummpy_input})
        print(src.distance(result, new_test_result['506']))
                        help='save pb path')
    parser.add_argument('--post', default='', help='postprocess key')
    parser.add_argument('--shape', default=224, help='', type=int)
    return parser.parse_args()


if __name__ == '__main__':
    args = parse_args()
    with open(args.graph, 'r') as f:
        graph = json.load(f)
    params = np.load(args.params, allow_pickle=True)['arr_0'][()]
    params = src.transform_weight_from_mxnet_to_tensorflow(params)
    trc = src.TFReconstructor(graph, params)
    # import pdb; pdb.set_trace()
    if args.post:
        trc.set_postprocessor(args.post)

    if args.input_npy:
        x = np.load(args.input_npy)
    else:
        shape = [1, 3, args.shape, args.shape]
        x = np.random.uniform(0, 1, shape)
    x = x.transpose((0, 2, 3, 1))
    y = trc(x)
    if args.result_npy and args.input_npy:
        result = np.load(args.result_npy)
        print('distance', src.distance(y, result))

    if args.save_path:
        trc.save_graph(args.save_path)
                        help='quantization strategy')
    parser.add_argument('-wq',
                        '--weight-quan',
                        default='',
                        help='per layer or per channel')
    args = parser.parse_args()
    with open(args.config_file, 'r') as f:
        config = easydict.EasyDict(yaml.load(f.read()))
    merge_args_to_config(config, args)
    with open(config.MODEL.graph, 'r') as f:
        graph = json.load(f)
    params = np.load(config.MODEL.params, allow_pickle=True)['arr_0'][()]

    print(config)
    out_quan, quan_act_list = run(graph, params, config.QUAN,
                                  config.EVALUATION, config.SAVE_PATH)
    config.QUAN.strategy = 'null'
    print(config)
    out, act_list = run(graph, params, config.QUAN, config.EVALUATION, '')
    assert len(quan_act_list) == len(act_list)
    for idx in range(len(act_list)):
        try:
            print(
                act_list[idx]['node'],
                src.distance(quan_act_list[idx]['act'], act_list[idx]['act']))
        except Exception as err:
            import pdb
            pdb.set_trace()
            print(act_list[idx])
    print('final distance', src.distance(out_quan, out))