class TuningParser: """Parser class is responsible for parsing tuning log files.""" def __init__(self, logs: list) -> None: """Initialize object.""" self._logs = logs self.metric = Metric() def process(self) -> Dict[str, Any]: """Process files.""" for log_file in self._logs: log.debug(f"Read from {log_file}") with open(log_file) as f: for line in f: for key in self.patterns: prog = re.compile(self.patterns[key]) match = prog.search(line) if match: self.metric.insert_data(key, match.group(1)) parsed_data: Dict[str, Any] = self.metric.serialize() # type: ignore return parsed_data @property def patterns(self) -> dict: """Set patterns to get metrics from lines.""" return { "acc_fp32": r".*FP32 baseline is: \[(\d+.\d+),", "acc_int8": r".*Best tune result is: \[(\d+.\d+),", }
def update_data( self, request_id: Optional[str], model_path: Optional[str] = None, input_precision: Optional[str] = None, model_output_path: Optional[str] = None, output_precision: Optional[str] = None, mode: Optional[str] = None, metric: Optional[Union[Dict[str, Any], Metric]] = Metric(), status: Optional[str] = None, execution_details: Optional[Dict[str, Any]] = None, ) -> None: """Update data in workloads.list_json.""" self.load() workload_info = WorkloadInfo( workload_path=self.workload_path, request_id=request_id, model_path=model_path, input_precision=input_precision, model_output_path=model_output_path, output_precision=output_precision, mode=mode, metric=metric, status=status, code_template_path=self.template_path, execution_details=execution_details, ).serialize() self.workloads_data["workloads"][request_id] = workload_info self.dump()
def __init__( self, workspace_path: Optional[str] = None, request_id: Optional[str] = None, model_path: Optional[str] = None, input_precision: Optional[str] = None, model_output_path: Optional[str] = None, output_precision: Optional[str] = None, mode: Optional[str] = None, metric: Optional[Union[dict, Metric]] = None, overwrite: bool = True, ) -> None: """Initialize workdir class.""" self.workdir_path = os.path.join(os.environ.get("HOME", ""), ".lpot") self.ensure_working_path_exists() self.workloads_json = os.path.join(self.workdir_path, "workloads_list.json") self.request_id = request_id self.workload_path: str if not metric: metric = Metric() if os.path.isfile(self.workloads_json): self.workloads_data = self.load() else: self.workloads_data = { "active_workspace_path": workspace_path, "workloads": {}, "version": "2", } workload_data = self.get_workload_data(request_id) if workload_data: self.workload_path = workload_data.get("workload_path", "") elif workspace_path and request_id: workload_name = request_id if model_path: workload_name = "_".join( [ Path(model_path).stem, request_id, ], ) self.workload_path = os.path.join( workspace_path, "workloads", workload_name, ) if request_id and overwrite: self.update_data( request_id=request_id, model_path=model_path, input_precision=input_precision, model_output_path=model_output_path, output_precision=output_precision, mode=mode, metric=metric, )
def __init__( self, workspace_path: Optional[str] = None, request_id: Optional[str] = None, model_path: Optional[str] = None, model_output_path: Optional[str] = None, metric: Optional[Union[dict, Metric]] = None, overwrite: bool = True, ) -> None: """Initialize workdir class.""" self.workdir_path = os.path.join(os.environ["HOME"], ".lpot") self.ensure_working_path_exists() self.workloads_json = os.path.join(self.workdir_path, "workloads_list.json") self.request_id = request_id self.workload_path: str if not metric: metric = Metric() if os.path.isfile(self.workloads_json): self.workloads_data = self.load() else: self.workloads_data = { "active_workspace_path": workspace_path, "workloads": {}, } if ( self.workloads_data.get("workloads", None) and self.workloads_data.get( "workloads", None, ).get(request_id, None) ): self.workload_path = self.workloads_data["workloads"][request_id][ "workload_path" ] elif workspace_path and request_id: workload_name = request_id if model_path: workload_name = "_".join( [ Path(model_path).stem, request_id, ], ) self.workload_path = os.path.join( workspace_path, "workloads", workload_name, ) if request_id and overwrite: self.update_data(request_id, model_path, model_output_path, metric)
def process(self) -> Dict[str, Any]: """Process files.""" metric = Metric() partial: Dict[str, List] = {} for log_file in self._logs: log.debug(f"Read from {log_file}") with open(log_file) as f: for line in f: for key in self.patterns: prog = re.compile(self.patterns[key]) match = prog.search(line) if not match: continue metric_name = f"perf_{key}_fp32" metric.insert_data(metric_name, match.group(1)) converted_value = getattr(metric, metric_name) parse_result = { key: converted_value, } partial = self.update_partial(partial, parse_result) return self.summarize_partial(partial)
def __init__(self, logs: list) -> None: """Initialize parser.""" self._logs = logs self.metric = Metric()