def main(): try: # read configuration config = ... # type: Configuration with open("config.yml", "r", encoding="utf-8-sig") as file: content = file.read() config = yaml.load(content, Loader=yaml.FullLoader) # get repository reference repo = getRepo(config) # get args token = sys.argv[1] # delete existing alias file if present if os.path.exists(config.aliasPath): os.remove(config.aliasPath) # extract aliases extractAliases( repo, config.aliasPath, config.repositoryShortname, token, config.aliasSimilarityMaxDistance, ) finally: # close repo to avoid resource leaks del repo
def main(argv): try: # parse args configFile = "" pat = "" try: opts, args = getopt.getopt(argv, "hc:p:", ["help", "config=", "pat="]) except getopt.GetoptError: print( "ERROR: incorrect arguments!\nmain.py -c <config.yml> -p <GitHub PAT>" ) sys.exit(2) for opt, arg in opts: if opt in ("-h", "--help"): print("main.py -c <config.yml> -p <GitHub PAT>") sys.exit() elif opt in ("-c", "--config"): configFile = arg elif opt in ("-p", "--pat"): pat = arg # validate file if not os.path.exists(configFile): sys.exit("ERROR: configuration file not found") # read configuration config = ... # type: Configuration with open(configFile, "r", encoding="utf-8-sig") as file: content = file.read() config = yaml.load(content, Loader=yaml.FullLoader) # get repository reference repo = getRepo(config) # delete any existing output files for repo if os.path.exists(config.analysisOutputPath): shutil.rmtree(config.analysisOutputPath, False) os.makedirs(config.analysisOutputPath) # handle aliases commits = list(replaceAliases(repo.iter_commits(), config.aliasPath)) # run analysis tagAnalysis(repo, config.analysisOutputPath) authorInfoDict = commitAnalysis(commits, config.analysisOutputPath) centralityAnalysis(repo, commits, config.analysisOutputPath) issueOrPrDevs = graphqlAnalysis( pat, config.repositoryShortname, config.analysisOutputPath ) devAnalysis(authorInfoDict, issueOrPrDevs, config.analysisOutputPath) # open output directory explore(config.analysisOutputPath) finally: # close repo to avoid resource leaks if "repo" in locals(): del repo
def main(): try: # read configuration config = ... # type: Configuration with open('config.yml', 'r', encoding='utf-8-sig') as file: content = file.read() config = yaml.load(content) # get repository reference repo = getRepo(config) # delete any existing output files if os.path.exists(config.analysisOutputPath): shutil.rmtree(config.analysisOutputPath) os.makedirs(config.analysisOutputPath) # handle aliases commits = list(replaceAliases(repo, config.aliasPath)) # run analysis tagAnalysis(repo, config.analysisOutputPath) commitAnalysis(commits, config.analysisOutputPath) centralityAnalysis(repo, commits, config.analysisOutputPath) finally: # close repo to avoid resource leaks del repo
def main(): try: # parse args config = parseAliasArgs(sys.argv) # get repository reference repo = getRepo(config) # build path aliasPath = os.path.join(config.repositoryPath, "aliases.yml") # delete existing alias file if present if os.path.exists(aliasPath): os.remove(aliasPath) # extract aliases extractAliases(config, repo, aliasPath) finally: # close repo to avoid resource leaks if "repo" in locals(): del repo
def main(argv): try: # validate running in venv if not hasattr(sys, "prefix"): raise Exception( "The tool does not appear to be running in the virtual environment!\nSee README for activation." ) # validate python version if sys.version_info.major != 3 or sys.version_info.minor != 8: raise Exception( "Expected Python 3.8 as runtime but got {0}.{1}, the tool might not run as expected!\nSee README for stack requirements." .format( sys.version_info.major, sys.version_info.minor, sys.version_info.micro, )) # validate installed modules required = { "wheel", "networkx", "pandas", "matplotlib", "gitpython", "requests", "pyyaml", "progress", "strsimpy", "python-dateutil", "sentistrength", "joblib", } installed = {pkg for pkg in pkg_resources.working_set.by_key} missing = required - installed if len(missing) > 0: raise Exception( "Missing required modules: {0}.\nSee README for tool installation." .format(missing)) # parse args config = parseDevNetworkArgs(sys.argv) # prepare folders if os.path.exists(config.resultsPath): remove_tree(config.resultsPath) os.makedirs(config.metricsPath) # get repository reference repo = getRepo(config) # setup sentiment analysis senti = sentistrength.PySentiStr() sentiJarPath = os.path.join(config.sentiStrengthPath, "SentiStrength.jar").replace("\\", "/") senti.setSentiStrengthPath(sentiJarPath) sentiDataPath = os.path.join(config.sentiStrengthPath, "SentiStrength_Data").replace("\\", "/") + "/" senti.setSentiStrengthLanguageFolderPath(sentiDataPath) # prepare batch delta delta = relativedelta(months=+config.batchMonths) # handle aliases commits = list(replaceAliases(repo.iter_commits(), config)) # run analysis batchDates, authorInfoDict, daysActive = commitAnalysis( senti, commits, delta, config) tagAnalysis(repo, delta, batchDates, daysActive, config) coreDevs = centrality.centralityAnalysis(commits, delta, batchDates, config) releaseAnalysis(commits, config, delta, batchDates) prParticipantBatches, prCommentBatches = prAnalysis( config, senti, delta, batchDates, ) issueParticipantBatches, issueCommentBatches = issueAnalysis( config, senti, delta, batchDates, ) politenessAnalysis(config, prCommentBatches, issueCommentBatches) for batchIdx, batchDate in enumerate(batchDates): # get combined author lists combinedAuthorsInBatch = (prParticipantBatches[batchIdx] + issueParticipantBatches[batchIdx]) # build combined network centrality.buildGraphQlNetwork( batchIdx, combinedAuthorsInBatch, "issuesAndPRsCentrality", config, ) # get combined unique authors for both PRs and issues uniqueAuthorsInPrBatch = set( author for pr in prParticipantBatches[batchIdx] for author in pr) uniqueAuthorsInIssueBatch = set( author for pr in issueParticipantBatches[batchIdx] for author in pr) uniqueAuthorsInBatch = uniqueAuthorsInPrBatch.union( uniqueAuthorsInIssueBatch) # get batch core team batchCoreDevs = coreDevs[batchIdx] # run dev analysis devAnalysis( authorInfoDict, batchIdx, uniqueAuthorsInBatch, batchCoreDevs, config, ) # run smell detection smellDetection(config, batchIdx) finally: # close repo to avoid resource leaks if "repo" in locals(): del repo