def store_configuration_map( configuration_map: ConfigurationMap, file_path: Path ) -> None: """ Store a `ConfigurationMap` to a file. Args: configuration_map: to store file_path: to the file """ if file_path.suffix not in ["yml", "yaml"]: LOG.warning( "ConfigurationMap file path does not end in " ".yaml or .yml but dumped file is of type yaml." ) with open(file_path, 'w') as stream: version_header = VersionHeader.from_version_number( "ConfigurationMap", 1 ) yaml.dump_all([ version_header.get_dict(), { id_config_pair[0]: id_config_pair[1].dump_to_string() for id_config_pair in configuration_map.id_config_tuples() } ], stream, default_flow_style=False, explicit_start=True, explicit_end=True)
def store_artefacts_to_file(artefacts: Artefacts, file_path: Path) -> None: """ Store artefacts in a file. Args: artefacts: the artefacts to store file_path: the file to store in """ store_as_yaml(file_path, [ VersionHeader.from_version_number('Artefacts', __ARTEFACTS_FILE_VERSION), artefacts ])
def __init__(self, path: Path) -> None: super().__init__(path) with open(path, 'r') as stream: documents = yaml.load_all(stream, Loader=yaml.CLoader) version_header = VersionHeader(next(documents)) version_header.raise_if_not_type("BlameReport") version_header.raise_if_version_is_less_than(4) if version_header.version < 5: LOG.warning("You are using an outdated blame report format " "that might not be supported in the future.") self.__meta_data = BlameReportMetaData \ .create_blame_report_meta_data(next(documents)) self.__function_entries: tp.Dict[str, BlameResultFunctionEntry] = {} raw_blame_report = next(documents) self.__blame_taint_scope = BlameTaintScope.from_string( # be backwards compatible with blame report version 4 raw_blame_report.get('scope', "COMMIT")) for raw_func_entry in raw_blame_report['result-map']: new_function_entry = ( BlameResultFunctionEntry. create_blame_result_function_entry( raw_func_entry, raw_blame_report['result-map'][raw_func_entry])) self.__function_entries[ new_function_entry.name] = new_function_entry
def __init__(self, path: Path) -> None: super().__init__(path) with open(path, "r") as stream: documents = yaml.load_all(stream, Loader=yaml.CLoader) version_header = VersionHeader(next(documents)) version_header.raise_if_not_type("CommitReport") version_header.raise_if_version_is_less_than(3) raw_infos = next(documents) self.finfos: tp.Dict[str, FunctionInfo] = {} for raw_finfo in raw_infos['function-info']: finfo = FunctionInfo(raw_finfo) self.finfos[finfo.name] = finfo self.region_mappings: tp.Dict[str, RegionMapping] = {} raw_region_mapping = raw_infos['region-mapping'] if raw_region_mapping is not None: for raw_r_mapping in raw_region_mapping: r_mapping = RegionMapping(raw_r_mapping) self.region_mappings[r_mapping.id] = r_mapping gedges = next(documents) self.graph_info: tp.Dict[str, FunctionGraphEdges] = {} for raw_fg_edge in gedges: f_edge = FunctionGraphEdges(raw_fg_edge) self.graph_info[f_edge.fid] = f_edge
def openFile(self) -> None: if self.isWindowModified(): reply = QMessageBox.question( self, "Warning", "Discard unsaved changes?", QMessageBox.Yes | QMessageBox.No ) if reply == QMessageBox.No: return filename = QFileDialog.getOpenFileName(self) if not filename[0]: return # TODO (julianbreiteneicher): Warn user (documentation?) that we (have to) use the unsafe loader? try: with open(filename[0], 'r') as yaml_file: documents = yaml_file.read().split("---") version_header = VersionHeader(yaml.load(documents[0])) version_header.raise_if_not_type("InteractionFilter") version_header.raise_if_version_is_less_than(1) root_node = yaml.load(documents[1], Loader=yaml.Loader) root_node.fixParentPointers() self._filename = filename[0] self._file_basename = os.path.basename(filename[0]) self._model.reInit(root_node) self.setWindowModified(False) self.updateWindowTitle() except Exception as e: msg = QMessageBox() msg.setIcon(QMessageBox.Critical) msg.setInformativeText(str(e)) msg.setWindowTitle("Error") msg.exec_() raise e
def __init__(self, path: Path, szz_tool: str): super().__init__(path) with open(path, 'r') as stream: documents = yaml.load_all(stream, Loader=yaml.CLoader) version_header = VersionHeader(next(documents)) version_header.raise_if_not_type("SZZReport") version_header.raise_if_version_is_less_than(1) raw_report = next(documents) if not raw_report["szz_tool"] == szz_tool: raise AssertionError( "Report was not created with the correct tool." ) self.__bugs: tp.Dict[str, RawBug] = {} for fix, introducers in raw_report["bugs"].items(): self.__bugs[fix] = RawBug(fix, set(introducers), None)
def create_report(self) -> actions.StepResult: """Create a report from SZZUnleashed data.""" project = self.obj varats_result_folder = get_varats_result_folder(project) run_dir = Path(project.source_of_primary).parent with (run_dir / "results" / "fix_and_introducers_pairs.json").open("r") as result_json: szz_result = json.load(result_json) bugs: tp.Dict[str, tp.Set[str]] = {} # entries are lists of the form [<fix>, <introducing>] for result_entry in szz_result: bugs.setdefault(result_entry[0], set()) bugs[result_entry[0]].add(result_entry[1]) raw_szz_report = { "szz_tool": SZZTool.SZZ_UNLEASHED.tool_name, "bugs": {k: sorted(list(v)) for k, v in bugs.items()} } result_file = SZZUnleashedReport.get_file_name( "SZZUnleashed", project_name=str(project.name), binary_name="none", # we don't rely on binaries in this experiment project_revision=project.version_of_primary, project_uuid=str(project.run_uuid), extension_type=FSE.SUCCESS) with open(f"{varats_result_folder}/{result_file}", "w") as yaml_file: yaml_file.write( yaml.dump_all([ VersionHeader.from_version_number( "SZZReport", 1).get_dict(), raw_szz_report ], explicit_start=True, explicit_end=True)) return actions.StepResult.OK
def create_report(self) -> actions.StepResult: """Create a report from SZZ data.""" project = self.obj bug_provider = BugProvider.get_provider_for_project(project) pygit_bugs = bug_provider.find_pygit_bugs() varats_result_folder = get_varats_result_folder(project) def commit_to_hash(commit: Commit) -> str: return str(commit.id) bugs: tp.Dict[str, tp.List[str]] = {} # entries are lists of the form [<fix>, <introducing>] for bug in pygit_bugs: bugs[commit_to_hash(bug.fixing_commit)] = sorted( [commit_to_hash(commit) for commit in bug.introducing_commits]) raw_szz_report = { "szz_tool": SZZTool.PYDRILLER_SZZ.tool_name, "bugs": bugs } result_file = PyDrillerSZZReport.get_file_name( "PyDrSZZ", project_name=str(project.name), binary_name="none", # we don't rely on binaries in this experiment project_revision=project.version_of_primary, project_uuid=str(project.run_uuid), extension_type=FSE.SUCCESS) with open(f"{varats_result_folder}/{result_file}", "w") as yaml_file: yaml_file.write( yaml.dump_all([ VersionHeader.from_version_number( "SZZReport", 1).get_dict(), raw_szz_report ], explicit_start=True, explicit_end=True)) return actions.StepResult.OK
def load_case_study_from_file(file_path: Path) -> CaseStudy: """ Load a case study from a file. Args: file_path: path to the case study file """ documents = load_yaml(file_path) version_header = VersionHeader(next(documents)) version_header.raise_if_not_type("CaseStudy") version_header.raise_if_version_is_less_than(1) raw_case_study = next(documents) stages: tp.List[CSStage] = [] for raw_stage in raw_case_study['stages']: hash_id_tuples: tp.List[CSEntry] = [] for raw_hash_id_tuple in raw_stage['revisions']: if 'config_ids' in raw_hash_id_tuple: config_ids = [int(x) for x in raw_hash_id_tuple['config_ids']] else: config_ids = [] hash_id_tuples.append( CSEntry(FullCommitHash(raw_hash_id_tuple['commit_hash']), raw_hash_id_tuple['commit_id'], config_ids)) sampling_method_name = raw_stage.get('sampling_method') or None if sampling_method_name: sampling_method: tp.Optional[SamplingMethod] = SamplingMethodBase[ SamplingMethod].get_sampling_method_type( sampling_method_name)() else: sampling_method = None release_type = raw_stage.get('release_type') or None stages.append( CSStage( raw_stage.get('name') or None, sampling_method, ReleaseType[release_type] if release_type is not None else None, hash_id_tuples)) return CaseStudy(raw_case_study['project_name'], raw_case_study['version'], stages)
def load_artefacts_from_file(file_path: Path) -> Artefacts: """ Load an artefacts file. Args: file_path: path to the artefacts file Returns: the artefacts created from the given file """ documents = load_yaml(file_path) version_header = VersionHeader(next(documents)) version_header.raise_if_not_type("Artefacts") version_header.raise_if_version_is_less_than(__ARTEFACTS_FILE_VERSION) raw_artefacts = next(documents) artefacts: tp.List[Artefact] = [] for raw_artefact in raw_artefacts.pop('artefacts'): artefact_type_name = raw_artefact.pop('artefact_type') artefact_type = Artefact.ARTEFACT_TYPES.get(artefact_type_name, None) artefact_type_version = raw_artefact.pop('artefact_type_version') name = raw_artefact.pop('name') output_dir = raw_artefact.pop('output_dir') if not artefact_type: LOG.warning( f"Skipping artefact of unknown type '{artefact_type_name}'") continue if artefact_type_version < artefact_type.ARTEFACT_TYPE_VERSION: LOG.warning( f"Skipping artefact {name} because it uses an outdated " f"version of {artefact_type_name}.") continue artefacts.append( artefact_type.create_artefact(name, output_dir, **raw_artefact)) return Artefacts(artefacts)
def load_configuration_map( file_path: Path, concrete_config_type: tp.Type[Configuration] ) -> ConfigurationMap: """ Load a configuration map from a file. Args: file_path: to the configuration map file concrete_config_type: type of the configuration objects that should be created Returns: a new `ConfigurationMap` based on the parsed file """ with open(file_path, 'r') as stream: documents = yaml.load_all(stream, Loader=yaml.CLoader) version_header = VersionHeader(next(documents)) version_header.raise_if_not_type("ConfigurationMap") version_header.raise_if_version_is_less_than(1) return create_configuration_map_from_yaml_doc( next(documents), concrete_config_type )
def load_configuration_map_from_case_study_file( file_path: Path, concrete_config_type: tp.Type[Configuration]) -> ConfigurationMap: """ Load a configuration map from a case-study file. Args: file_path: to the configuration map file concrete_config_type: type of the configuration objects that should be created Returns: a new `ConfigurationMap` based on the parsed file """ documents = load_yaml(file_path) version_header = VersionHeader(next(documents)) version_header.raise_if_not_type("CaseStudy") version_header.raise_if_version_is_less_than(1) next(documents) # Skip case study yaml-doc return create_configuration_map_from_yaml_doc(next(documents), concrete_config_type)
def __init__(self, path: Path) -> None: super().__init__(path) with open(path, 'r') as stream: documents = yaml.load_all(stream, Loader=yaml.CLoader) version_header = VersionHeader(next(documents)) version_header.raise_if_not_type("FeatureAnalysisReport") version_header.raise_if_version_is_less_than(1) self.__meta_data = FeatureAnalysisReportMetaData \ .create_feature_analysis_report_meta_data(next(documents)) self.__function_entries: tp.Dict[ str, FeatureAnalysisResultFunctionEntry] = {} raw_feature_analysis_report = next(documents) for raw_func_entry in raw_feature_analysis_report['result-map']: new_function_entry = ( FeatureAnalysisResultFunctionEntry. create_feature_analysis_result_function_entry( raw_func_entry, raw_feature_analysis_report['result-map'] [raw_func_entry])) self.__function_entries[ new_function_entry.name] = new_function_entry
def getVersionHeader() -> VersionHeader: return VersionHeader.from_version_number("InteractionFilter", 1)
def __store_case_study_to_file(case_study: CaseStudy, file_path: Path) -> None: """Store case study to file.""" store_as_yaml( file_path, [VersionHeader.from_version_number('CaseStudy', 1), case_study])