def computeSubScore(self, gene, program, err="", exec_time=0): logging.debug("computeSubScore started") pLen = 1 try: score = 0.0 ti = time() i = analyze_file.analyze_source_code("test.js", program) cycloMetricComplexity = 1 pLen = i.token_count score = -float(exec_time) / pLen + 1 funcListSize = len(i.function_list) + 1 for index in range(funcListSize - 1): cycloMetricComplexity += i.function_list[ index].cyclomatic_complexity avgCycloMetricComplx = cycloMetricComplexity / funcListSize + 1 nonTerminalsMeticsInfo = CountNestedStructures( gene.syntaxTree, self.metricNonTerm.keys()) for a in nonTerminalsMeticsInfo.keys(): for temp in nonTerminalsMeticsInfo[a]: score += temp * self.metricNonTerm[a] if "warning" in err: logging.debug("warning found: " + err) score += 10 score = score / avgCycloMetricComplx except Exception as e: logging.info("computeSubScore-exception:") logging.info(e) pass logging.debug("computeSubScore completed") return score, pLen
async def analyseform(request: Request, language: str = Form(...), code: str = Form(...)): filename: str = get_filename(language) analysis = analyze_file.analyze_source_code(filename, code) return templates.TemplateResponse( "results.j2", { "request": request, "code": code, "rows": { "Average Cyclomatic Complexity": analysis.average_cyclomatic_complexity, "Lines of Code": analysis.__dict__["nloc"], "Average Token Count": analysis.__dict__["token_count"], }, "functions": None if len(analysis.function_list) == 0 else get_function_attrs( analysis.function_list), }, )
def get_csharp_fileinfo(source_code): return analyze_file.analyze_source_code("a.cs", source_code)
def get_php_function_list(source_code): return analyze_file.analyze_source_code("a.php", source_code).function_list
def get_ttcn_function_list(source_code): return analyze_file.analyze_source_code( "a.ttcn", source_code).function_list
def get_swift_function_list(source_code): return analyze_file.analyze_source_code( "a.swift", source_code).function_list
def get_rust_fileinfo(source_code): return analyze_file.analyze_source_code("a.rs", source_code)
def get_go_function_list(source_code): return analyze_file.analyze_source_code("a.go", source_code).function_list
def post(self): dictionary = json.loads(self.request.body) file_info = analyze_file.analyze_source_code( dictionary.get("lang", ".c"), dictionary.get("code", "")) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps(file_info, cls=FileInfoEncoder))
def get_swift_function_list(source_code): return analyze_file.analyze_source_code("a.swift", source_code).function_list
async def analyse(scs: SourceCodeString): code_str: str = '\n'.join(scs.code) filename: str = get_filename(scs.language) analysis = analyze_file.analyze_source_code(filename, code_str) return analysis.__dict__
from lizard import analyze_file i = analyze_file.analyze_source_code("AllTests.cpp", "int foo(){}") print i.__dict__ print i.token_count for index in range(len(i.function_list)): print i.function_list[index].cyclomatic_complexity
def analyse(): return render_template('index.html', info=analyze_file.analyze_source_code("a.cpp", request.form['content']))
def get_fortran_fileinfo(source_code): return analyze_file.analyze_source_code('a.f90', source_code)
def compileAuthors(self): """Mine all repos in the repo list for commits by those in authors. None for get all""" print("mining " + self.name) reposToMine = [] for repo in self.repos: if repo not in self.minedRepos: reposToMine.append(repo) for repo in reposToMine: try: if not os.path.exists( repo + "/.git"): #in case the repo is one level down repo = repo + "/" + os.listdir(repo)[0] #print("moved to "+repo) if repo in self.minedRepos: continue self.minedRepos.add(repo) #if not os.path.exists(repo+".git"): # repo = os.listdir(repo)[0] remote = self.get_remote(repo) created = remote.created_at if self.authorsToMine: commitsToMine = [] for authorName in self.authorsToMine: for commit in remote.get_commits(author=authorName): if commit.sha not in self.commits: commitsToMine.append(commit.sha) if not commitsToMine: print("No important commits here, skipping " + repo) continue miner = pydriller.repository_mining.RepositoryMining( repo, only_modifications_with_file_types=gitProfileSet. langList, only_no_merge=True, only_commits=commitsToMine, since=created) else: miner = pydriller.repository_mining.RepositoryMining( repo, only_modifications_with_file_types=gitProfileSet. langList, only_no_merge=True, since=created) repository = pydriller.GitRepository(repo) print("Scanning repo: " + miner._path_to_repo) for commit in tqdm(miner.traverse_commits()): try: author = commit.author if author.name not in self.aliases: ghCommit = remote.get_commit(commit.hash) namedUser = ghCommit.author if not namedUser: continue if namedUser.login not in self.authors: self.authors[namedUser.login] = gitAuthor( namedUser) self.aliases[author.name] = namedUser.login author = self.authors[self.aliases[author.name]] if self.authorsToMine and author.name not in self.authorsToMine: continue if commit.hash in author.commits or commit.hash in self.commits: continue #don't reprocess seen hashes self.commits.add(commit.hash) author.commits.add(commit.hash) if repo not in author.repos: author.repos.add(repo) for mod in commit.modifications: mod._calculate_metrics() if mod.new_path is None or not mod.new_path.split( ".")[-1] in gitProfileSet.langList: continue author.files.add(mod.new_path) #parse diff and add lines to list newSC = list() leDiff = repository.parse_diff(mod.diff) for num, line in leDiff["added"]: newSC.append(line) from lizard import analyze_file as liz fileInfo = liz.analyze_source_code( mod.new_path, "\n".join(newSC)) #maintain list of dicts containing the source code of specific functions. Same format as for lines lineIndex = 0 for fun in fileInfo.function_list: #Make sure these appear in the "function" arg_list_termination = r"\)\s*{" started = False newFun = dict() lineStr = "" try: while (leDiff["added"][lineIndex][0] < fun.start_line): lineIndex += 1 while (leDiff["added"][lineIndex][0] < fun.end_line + 1): last_lineStr = lineStr lineStr = leDiff["added"][lineIndex][1] if not started and re.search( arg_list_termination, "".join( [lineStr, last_lineStr])): started = True newFun.update({ (commit.hash, mod.new_path, leDiff["added"][lineIndex][0]): lineStr }) lineIndex += 1 except IndexError: #if end of input reached before end of functions. This is probable when non-complete functions are submitted. pass if started and len( newFun ) > 1 and '}' in lineStr + last_lineStr: author.lines.update(newFun) author.functions.append( self.functionToString(newFun)) except Exception as e: continue except Exception as e: print("problem processing " + repo) continue except KeyboardInterrupt: print("continuing") continue self.minedRepos.add(repo) print(str("finished" + str(miner._path_to_repo))) print(self) self.repos = self.minedRepos
def get_java_fileinfo(source_code): return analyze_file.analyze_source_code("a.java", source_code)
def get_scala_function_list(source_code): return analyze_file.analyze_source_code( "a.scala", source_code).function_list
def create_objc_lizard(self, source_code): return analyze_file.analyze_source_code("a.m", source_code).function_list
def get_cpp_function_list_with_fans_extension(file_path, source_code): return analyze_file.analyze_source_code(file_path, source_code).function_list
def get_scala_function_list(source_code): return analyze_file.analyze_source_code("a.scala", source_code).function_list
def get_go_function_list(source_code): return analyze_file.analyze_source_code( "a.go", source_code).function_list
def get_js_function_list(source_code): return analyze_file.analyze_source_code("a.js", source_code).function_list
def get_ruby_function_list(source_code): return analyze_file.analyze_source_code("a.rb", source_code).function_list
def get_kotlin_function_list(source_code): return analyze_file.analyze_source_code("a.kt", source_code).function_list
def get_cpp_fileinfo(source_code): return analyze_file.analyze_source_code("a.cpp", source_code)
def get_ts_function_list(source_code): return analyze_file.analyze_source_code("a.ts", source_code).function_list
def post(self): dictionary = json.loads(self.request.body) file_info = analyze_file.analyze_source_code(dictionary.get("lang", ".c"), dictionary.get("code", "")) self.response.headers['Content-Type'] = 'application/json' self.response.write(json.dumps(file_info, cls=FileInfoEncoder))
def get_gdscript_function_list(source_code): return analyze_file.analyze_source_code( "a.gd", source_code).function_list
def analyse(): return render_template('index.html', info=analyze_file.analyze_source_code( "a.cpp", request.form['content']))