Пример #1
0
 def test_e2e_on_gpu_vehicle_person_non_vehicle(self):
     skip_if_cuda_not_available()
     classes = 'vehicle,person,non-vehicle'
     self.do_finetuning(classes, on_gpu=True)
     self.do_evaluation(classes, on_gpu=True)
     self.do_export(classes, on_gpu=True)
     self.do_evaluation_of_exported_model(classes)
Пример #2
0
 def test_e2e_on_gpu_person(self):
     skip_if_cuda_not_available()
     classes = 'person'
     self.do_finetuning(classes, on_gpu=True)
     self.do_evaluation(classes, on_gpu=True)
     self.do_export(classes, on_gpu=True)
     self.do_evaluation_of_exported_model(classes)
        def test_nncf_compress_and_export(self):
            skip_if_cuda_not_available()
            log_file, metrics_path = super().test_nncf_compress_and_export()

            compress_ap = collect_ap(log_file)
            last_compress_ap = compress_ap[-1]
            logging.info(f'From training last_compress_ap={last_compress_ap}')

            ap = self._get_bbox_metric(metrics_path)
            logging.info(f'Evaluation after export result ap={ap}')

            self.assertGreater(ap, last_compress_ap - self.test_export_thr)
        def test_nncf_compress_and_eval_on_gpu(self):
            skip_if_cuda_not_available()
            log_file, metrics_path = super(
            ).test_nncf_compress_and_eval_on_gpu()

            compress_ap = collect_ap(log_file)
            last_compress_ap = compress_ap[-1]
            logging.info(f'From training last_compress_ap={last_compress_ap}')

            ap = self._get_bbox_metric(metrics_path)
            logging.info(f'Evaluation result ap={ap}')
            self.assertLess(abs(last_compress_ap - ap), 1e-6)

            return log_file, metrics_path
        def test_nncf_compress_on_gpu(self):
            skip_if_cuda_not_available()
            logging.info('Begin test_nncf_compress_on_gpu')
            best_models = self.do_preliminary_finetuning(True)
            self.assertEqual(len(best_models), 2)
            self.assertIn('model_0', best_models[0])
            self.assertIn('model_1', best_models[1])

            log_file = self.do_compress(main_weights_path=best_models[0],
                                        aux_weights_path=best_models[1])
            logging.debug('Compression is finished')
            best_compressed_models = self._find_best_models(self.output_folder)
            logging.debug(
                f'Found best compressed models: {best_compressed_models}')
            self.assertEqual(len(best_compressed_models), 2)
            self.assertIn('model_0', best_compressed_models[0])
            self.assertIn('model_1', best_compressed_models[1])

            logging.info('End test_nncf_compress_on_gpu')
            return log_file
        def test_nncf_compress_and_export(self):
            skip_if_cuda_not_available()
            logging.info('Begin test_nncf_compress_and_export')
            best_models = self.do_preliminary_finetuning(True)
            self.assertEqual(len(best_models), 2)
            self.assertIn('model_0', best_models[0])
            self.assertIn('model_1', best_models[1])

            log_file = self.do_compress(main_weights_path=best_models[0],
                                        aux_weights_path=best_models[1])
            logging.debug('Compression is finished')
            latest_compressed_model = self._find_latest_model(
                self.output_folder)
            logging.debug(
                f'Found latest compressed models: {latest_compressed_model}')

            logging.info('Exporting the latest compressed model')
            export_dir = self.output_folder
            run_through_shell(f'cd {os.path.dirname(self.template_file)};'
                              f'python3 export.py --openvino'
                              f' --load-weights {latest_compressed_model}'
                              f' --save-model-to {export_dir}')
            onnx_res_files = find_files_by_pattern(export_dir, '*.onnx')
            xml_res_files = find_files_by_pattern(export_dir, '*.xml')
            bin_res_files = find_files_by_pattern(export_dir, '*.bin')
            self.assertTrue(len(onnx_res_files) == 1, 'Export to onnx failed')
            self.assertTrue(
                len(xml_res_files) == 1, 'Export to openvino failed')
            self.assertTrue(
                len(bin_res_files) == 1, 'Export to openvino failed')

            xml_res_file = xml_res_files[0]
            logging.debug(f'Before making evaluation of {xml_res_file}')
            metrics_path = self.do_eval(xml_res_file)
            logging.debug(f'After making evaluation of {xml_res_file}')
            logging.debug(f'    metrics are stored to the file {metrics_path}')

            logging.info('End test_nncf_compress_and_export')

            return log_file, metrics_path
        def test_nncf_compress_and_eval_on_gpu(self):
            skip_if_cuda_not_available()
            logging.info('Begin test_nncf_compress_and_eval_on_gpu')
            best_models = self.do_preliminary_finetuning(True)
            self.assertEqual(len(best_models), 2)
            self.assertIn('model_0', best_models[0])
            self.assertIn('model_1', best_models[1])

            log_file = self.do_compress(main_weights_path=best_models[0],
                                        aux_weights_path=best_models[1])
            logging.debug('Compression is finished')
            latest_compressed_model = self._find_latest_model(
                self.output_folder)
            logging.debug(
                f'Found latest compressed models: {latest_compressed_model}')

            last_training_rank1 = self._extract_last_rank1_from_log(log_file)

            logging.debug(
                f'Before making evaluation of {latest_compressed_model}')
            metrics_path = self.do_eval(latest_compressed_model)
            logging.debug(
                f'After making evaluation of {latest_compressed_model}')
            logging.debug(f'Metrics are stored in {metrics_path}')

            accuracy = self._extract_accuracy_from_metrics_file(metrics_path)
            self.assertAlmostEqual(
                last_training_rank1,
                accuracy,
                delta=0.001,
                msg=f'Difference between accuracy from log file {log_file} '
                f'and the accuracy from evaluation metrics file {metrics_path}'
            )
            logging.info('End test_nncf_compress_and_eval_on_gpu')

            return log_file, metrics_path
Пример #8
0
 def test_export_on_gpu(self):
     skip_if_cuda_not_available()
     export_dir = os.path.join(self.output_folder, 'gpu_export')
     self.do_export(export_dir, on_gpu=True)
 def test_nncf_compress_on_gpu(self):
     skip_if_cuda_not_available()
     log_file = super().test_nncf_compress_on_gpu()
     ap = collect_ap(log_file)
     self.assertGreater(ap[-1], 0)
 def test_finetuning_with_classes_on_gpu(self):
     skip_if_cuda_not_available()
     self.do_finetuning_with_classes(on_gpu=True)
 def test_e2e_on_gpu(self):
     skip_if_cuda_not_available()
     self.do_finetuning(on_gpu=True)
     self.do_evaluation(on_gpu=True)
     self.do_export(on_gpu=True)
     self.do_evaluation_of_exported_model()