Beispiel #1
0
 def test_case2(self):
     paddle.disable_static()
     model = mobilenet_v2()
     predictor = TableLatencyPredictor(f'./{opt_tool}',
                                       hardware='845',
                                       threads=4,
                                       power_mode=3,
                                       batchsize=1)
     latency = predictor.predict_latency(model,
                                         input_shape=[1, 3, 224, 224],
                                         save_dir='./model',
                                         data_type='fp32',
                                         task_type='cls')
     assert latency > 0
     latency = predictor.predict_latency(model,
                                         input_shape=[1, 3, 224, 224],
                                         save_dir='./model',
                                         data_type='int8',
                                         task_type='cls')
     assert latency > 0
Beispiel #2
0
 def test_case4(self):
     paddle.disable_static()
     model = ModelCase1()
     predictor = TableLatencyPredictor(f'./{opt_tool}',
                                       hardware='845',
                                       threads=4,
                                       power_mode=3,
                                       batchsize=1)
     latency = predictor.predict_latency(model,
                                         input_shape=[1, 116, 28, 28],
                                         save_dir='./model',
                                         data_type='fp32',
                                         task_type='cls')
     assert latency > 0
Beispiel #3
0
 def test_case7(self):
     paddle.disable_static()
     model = ModelCase3()
     predictor = TableLatencyPredictor(f'./{opt_tool}',
                                       hardware='845',
                                       threads=4,
                                       power_mode=3,
                                       batchsize=1)
     predictor.set_det_multi_input(det_multi_input=True)
     latency = predictor.predict_latency(model,
                                         input_shape=[1, 3, 224, 224],
                                         save_dir='./model',
                                         data_type='fp32',
                                         task_type='det')
     assert latency > 0
Beispiel #4
0
def get_latency(model, data_type):
    paddle.disable_static()
    predictor = TableLatencyPredictor(
        f'./{opt_tool}', hardware='845', threads=4, power_mode=3, batchsize=1)
    latency = predictor.predict_latency(
        model,
        input_shape=[1, 3, 224, 224],
        save_dir='./tmp_model',
        data_type=data_type,
        task_type='cls')
    print('{} latency : {}'.format(data_type, latency))

    subprocess.call('rm -rf ./tmp_model', shell=True)
    paddle.disable_static()
    return latency
Beispiel #5
0
 def test_case6(self):
     paddle.disable_static()
     model = ModelCase2()
     predictor = TableLatencyPredictor(f'./{opt_tool}',
                                       hardware='845',
                                       threads=4,
                                       power_mode=3,
                                       batchsize=1)
     pbmodel_file = predictor.opt_model(model,
                                        input_shape=[1, 3, 224, 224],
                                        save_dir='./model',
                                        data_type='int8',
                                        task_type='det')
     assert os.path.exists(pbmodel_file)
     latency = predictor.predict_latency(model,
                                         input_shape=[1, 3, 224, 224],
                                         save_dir='./model',
                                         data_type='fp32',
                                         task_type='det')
     assert latency > 0