def analyse_projects( projects: List[Tuple[str, str]], is_local: bool = False, entities: Optional[List[str]] = None ) -> None: """Run Issues (that are not PRs), PRs, PR Reviews analysis on specified projects. Arguments: projects {List[Tuple[str, str]]} -- one tuple should be in format (project_name, repository_name) is_local {bool} -- if set to False, Ceph will be used entities {Optional[List[str]]} -- entities that will be analysed. If not specified, all are used. """ path = Path.cwd().joinpath("./srcopsmetrics/bot_knowledge") for project in projects: _LOGGER.info("######################## Analysing %s ########################\n" % "/".join(project)) github_repo = github_knowledge.connect_to_source(project=project) project_path = path.joinpath("./" + github_repo.full_name) utils.check_directory(project_path) allowed_entities = _get_all_entities() specified_entities = [] if entities: specified_entities = [e for e in allowed_entities if e.__name__ in entities] if specified_entities == []: raise NotKnownEntities(message="", specified_entities=entities, available_entities=allowed_entities) inspected_entities = specified_entities or allowed_entities for entity in inspected_entities: _LOGGER.info("%s inspection" % entity.__name__) github_knowledge.analyse_entity( github_repo=github_repo, project_path=project_path, entity_cls=entity, is_local=is_local ) _LOGGER.info("\n")
def create_per_pr_plot( self, *, result_path: Path, project: str, x_array: List[Any], y_array: List[Any], x_label: str, y_label: str, title: str, output_name: str, ): """Create processed data in time per project plot.""" fig, ax = plt.subplots() x = x_array y = y_array ax.plot(x, y, "ro") plt.gcf().autofmt_xdate() ax.set(xlabel=x_label, ylabel=y_label, title=title) ax.grid() check_directory(result_path.joinpath(project)) team_results = result_path.joinpath(f"{project}/{output_name}.png") fig.savefig(team_results) # plt.show() plt.close()
def analyse_projects( projects: List[Tuple[str, str]], is_local: bool = False, entities: Optional[List[str]] = None ) -> None: """Run Issues (that are not PRs), PRs, PR Reviews analysis on specified projects. Arguments: projects {List[Tuple[str, str]]} -- one tuple should be in format (project_name, repository_name) is_local {bool} -- if set to False, Ceph will be used entities {Optional[List[str]]} -- entities that will be analysed. If not specified, all are used. """ path = Path.cwd().joinpath("./srcopsmetrics/bot_knowledge") for project in projects: _LOGGER.info("######################## Analysing %s ########################\n" % "/".join(project)) github_repo = github_knowledge.connect_to_source(project=project) project_path = path.joinpath("./" + github_repo.full_name) check_directory(project_path) allowed_entities = [e.value for e in EntityTypeEnum] if entities: check_entities = [i for i in entities if i not in allowed_entities] if check_entities: raise NotKnownEntities(f"There are Entities requested which are not known: {check_entities}") entities = entities or allowed_entities for entity in entities: _LOGGER.info("%s inspection" % entity) github_knowledge.analyse_entity(github_repo, project_path, entity, is_local)
def __call__(self, *args, **kwargs): """Load or process knowledge and save it.""" # fmt: off def wrapper(): return self.func(*args, **kwargs) # fmt: on # black is bugged in means of formatting the inside wrapper function, disabling it for above block project_path = self.main.joinpath(os.getenv("PROJECT")) utils.check_directory(project_path) total_path = project_path.joinpath(f"{self.func.__name__ }.json") is_local = os.getenv("IS_LOCAL") == "True" storage = KnowledgeStorage(is_local) knowledge = storage.load_previous_knowledge( file_path=total_path, knowledge_type="Processed Knowledge") if knowledge is None or knowledge == {} or os.getenv( "PROCESS_KNOWLEDGE") == "True": knowledge = wrapper() storage.save_knowledge(file_path=total_path, data=knowledge) return knowledge
def file_path(self) -> Path: """Get entity file path.""" path = Path.cwd().joinpath( os.getenv(StoragePath.LOCATION_VAR.value, StoragePath.DEFAULT.value)) path = path.joinpath(StoragePath.KNOWLEDGE.value) project_path = path.joinpath("./" + self.repository.full_name) utils.check_directory(project_path) appendix = ".json" # if as_csv else ".json" TODO implement as_csv bool return project_path.joinpath("./" + self.filename + appendix)
def evaluate_and_store_kebechet_metrics(self): """Calculate and store metrics for every kebechet manager in repository.""" for get_stats in [self.update_manager]: stats = get_stats() path = Path(f"./{_ROOT_DIR}/{self.repo_name}/") utils.check_directory(path) file_name = f"kebechet_{get_stats.__name__}" if self.today: curr_day = datetime.now().date() file_name += f"_{str(curr_day)}" file_name += ".json" KnowledgeStorage(is_local=self.is_local).save_data( file_path=path.joinpath(file_name), data=stats)
def analyse_projects(projects: List[Tuple[str, str]], use_ceph: bool = False) -> None: """Run Issues (that are not PRs), PRs, PR Reviews analysis on specified projects. Arguments: projects {List[Tuple[str, str]]} -- one tuple should be in format (project_name, repository_name) """ path = Path.cwd().joinpath("./srcopsmetrics/bot_knowledge") for project in projects: _LOGGER.info( "######################## Starting analysing %s ########################" % "/".join(project)) github_repo = connect_to_source(project=project) project_path = path.joinpath("./" + github_repo.full_name) check_directory(project_path) analyse_entity(github_repo, project_path, "Issue", use_ceph) analyse_entity(github_repo, project_path, "PullRequest", use_ceph) _LOGGER.info( "######################## Analysis ended ########################")
def __call__(self, *args, **kwargs): """Load or process knowledge and save it.""" def wrapper(): return self.func(*args, **kwargs) project = os.getenv('PROJECT') preprocessed_dir = Path(StoragePath.PROCESSED.value).joinpath(project) utils.check_directory(preprocessed_dir) total_path = preprocessed_dir.joinpath(f'{self.func.__name__ }.json') storage = KnowledgeStorage(os.getenv('IS_LOCAL')) knowledge = storage.load_previous_knowledge( file_path=total_path, knowledge_type='Processed Knowledge') if knowledge is None or knowledge == {} or os.getenv( 'PROCESS_KNOWLEDGE') is 'True': knowledge = wrapper() storage.save_knowledge(file_path=total_path, data=knowledge) return knowledge