def test_parsing_file_with_duplicated_lines( self, mocked_open: MagicMock) -> None: """Test parsing of files without any lines.""" mocked_open.return_value.__enter__.return_value = [ "Foo bar baz", "2021-05-27 07:52:50 [INFO] Tune 1 result is: [accuracy: 0.1234, duration (seconds): 5.6789], Best tune result is: None", # noqa: E501 "2021-05-27 07:52:50 [INFO] Tune 2 result is: [accuracy: 0.2345, duration (seconds): 0.6789], Best tune result is: [accuracy: 0.2345, duration (seconds): 0.6789]", # noqa: E501 "2021-05-27 07:52:27 [INFO] FP32 baseline is: [accuracy: 0.12344, duration (seconds): 5.6789]", # noqa: E501 "2021-05-27 07:52:27 [INFO] Save quantized model at /a/b/c.pb", "a b c d", "2021-05-27 07:52:50 [INFO] Tune 1 result is: [accuracy: 0.1234, duration (seconds): 5.6789], Best tune result is: None", # noqa: E501 "2021-05-27 07:52:50 [INFO] Tune 2 result is: [accuracy: 0.99876, duration (seconds): 0.5432], Best tune result is: [accuracy: 0.99876, duration (seconds): 0.5432]", # noqa: E501 "2021-05-27 07:52:27 [INFO] FP32 baseline is: [accuracy: 0.12344, duration (seconds): 5.6789]", # noqa: E501 "2021-05-27 07:52:27 [INFO] Save quantized model at /foo/bar/baz.pb", ] tuning_parser = OptimizationParser(["file.log"]) parsed = tuning_parser.process() self.assertEqual( { "acc_input_model": 0.1234, "acc_optimized_model": 0.9988, "path_optimized_model": "/foo/bar/baz.pb", }, parsed, )
def test_parsing_empty_files(self, mocked_open: MagicMock) -> None: """Test parsing of files without any lines.""" mocked_open.return_value.__enter__.return_value = [] optimization_parser = OptimizationParser(["file.log"]) parsed = optimization_parser.process() self.assertEqual({}, parsed)
def test_parsing_simple_file(self, mocked_open: MagicMock) -> None: """Test parsing of file.""" mocked_open.return_value.__enter__.return_value = [ "Foo bar baz", "2021-05-27 07:52:50 [INFO] Tune 1 result is: [accuracy: 0.1234, duration (seconds): 5.6789], Best tune result is: None", # noqa: E501 "2021-05-27 07:52:50 [INFO] Tune 2 result is: [accuracy: 0.99876, duration (seconds): 0.5432], Best tune result is: [accuracy: 0.99876, duration (seconds): 0.5432]", # noqa: E501 "2021-05-27 07:52:27 [INFO] FP32 baseline is: [accuracy: 0.12344, duration (seconds): 5.6789]", # noqa: E501 "a b c d", ] optimization_parser = OptimizationParser(["file.log"]) parsed = optimization_parser.process() self.assertEqual( { "acc_input_model": 0.1234, "acc_optimized_model": 0.9988, }, parsed, )
def test_parsing_empty_file_list(self) -> None: """Test parsing of none files.""" optimization_parser = OptimizationParser([]) parsed = optimization_parser.process() self.assertEqual({}, parsed)
def execute_optimization(data: Dict[str, Any]) -> dict: """Get configuration.""" from lpot.ux.utils.workload.workload import Workload if not str(data.get("id", "")): message = "Missing request id." mq.post_error( "optimization_finish", {"message": message, "code": 404}, ) raise Exception(message) request_id: str = data["id"] workdir = Workdir(request_id=request_id, overwrite=False) workload_path: str = workdir.workload_path try: workload_data = _load_json_as_dict( os.path.join(workload_path, "workload.json"), ) except Exception as err: mq.post_error( "optimization_finish", {"message": repr(err), "code": 404, "id": request_id}, ) raise err workload = Workload(workload_data) optimization: Optimization = OptimizationFactory.get_optimization( workload, workdir.template_path, ) send_data = { "message": "started", "id": request_id, "size_input_model": get_size(optimization.input_graph), } workdir.clean_logs() workdir.update_data( request_id=request_id, model_path=optimization.input_graph, input_precision=optimization.input_precision, model_output_path=optimization.output_graph, output_precision=optimization.output_precision, status="wip", ) executor = Executor( workspace_path=workload_path, subject="optimization", data=send_data, log_name="output", ) proc = executor.call( optimization.command, ) optimization_time = executor.process_duration if optimization_time: optimization_time = round(optimization_time, 2) log.debug(f"Elapsed time: {optimization_time}") logs = [os.path.join(workload_path, "output.txt")] parser = OptimizationParser(logs) if proc.is_ok: response_data = parser.process() if isinstance(response_data, dict): response_data["id"] = request_id response_data["optimization_time"] = optimization_time response_data["size_optimized_model"] = get_size(optimization.output_graph) response_data["model_output_path"] = optimization.output_graph response_data["size_input_model"] = get_size(optimization.input_graph) response_data["is_custom_dataloader"] = bool(workdir.template_path) workdir.update_data( request_id=request_id, model_path=optimization.input_graph, model_output_path=optimization.output_graph, metric=response_data, status="success", execution_details={"optimization": optimization.serialize()}, input_precision=optimization.input_precision, output_precision=optimization.output_precision, ) response_data["execution_details"] = {"optimization": optimization.serialize()} log.debug(f"Parsed data is {json.dumps(response_data)}") mq.post_success("optimization_finish", response_data) return response_data else: log.debug("FAIL") workdir.update_data( request_id=request_id, model_path=optimization.input_graph, input_precision=optimization.input_precision, output_precision=optimization.output_precision, status="error", ) mq.post_failure("optimization_finish", {"message": "failed", "id": request_id}) raise ClientErrorException("Optimization failed during execution.")