def retrieve_and_search(self): search_round = 0 end = self.all_fct_name while search_round < self._round: intmediate = [] for elements in end: for children in elements.children: intmediate.append(children) search_round += 1 end = intmediate print_writeofd("Round {}, beginning tracing functions: ".format(self._round) + print_node_list(end), self.ofd) for elements in end: self.fctname_thisround = [] self.fct_inspect = elements.name[0] self.fct_inspect_url = elements.name[1] url_list = search_github(self.fct_inspect, self.repo, self.ofd) time.sleep(SLEEP_TIME) appear_url_list = url_list to_delete_list = [] for item in url_list: urlcontent = get_url_content(item, self.ofd) if urlcontent == None: continue p = add_backward_links(urlcontent) if p == None: continue self.url_inspect = item if (item == self.fct_inspect_url) and self.manual_control_import: print_writeofd("Info: importation file and to be imported function in the same file, importation check fulfilled", self.ofd) elif (item != self.fct_inspect_url) and self.manual_control_import: # Now checks for import self.checking_import = True self.import_true = False self.generic_visit(p) self.checking_import = False if self.import_true == False: print_writeofd("Info: Importation check failed: function [{}] at url [{}], importation occurs in [{}]".format(self.fct_inspect, slice_url(self.fct_inspect_url), slice_url(item)), self.ofd) to_delete_list.append(item) continue else: print_writeofd("Info: Importation check succeeded", self.ofd) self.generic_visit(p) for searched in self.fctname_thisround: node = Node((searched[0], searched[1], []), parent = elements) print_writeofd("Traced functions {} based on [{}]".format(self.fctname_thisround, self.fct_inspect), self.ofd) for i in to_delete_list: appear_url_list.remove(i) print_writeofd("This function [{}] appears in {}".format(self.fct_inspect, slice_list(appear_url_list)), self.ofd) for tree in self.all_fct_name: for members in PreOrderIter(tree): if members.name == (elements.name[0], elements.name[1], []): members.name = (elements.name[0], elements.name[1], appear_url_list)
if __name__ == "__main__": for line in infd.readlines(): functionnames = [] print("=================================================") ofd.write("=================================================\n") url = line.split()[1] repo = re.sub('https://github.com/', '', url) repo = re.sub('\n', '', repo) print(url) ofd.write("{}\n".format(url)) url_list = [] for keyword in ASYNC_KEYWORD_LIST_EXIST: return_value = search_github(keyword, repo, ofd) if return_value != None: url_list.extend(return_value) elif return_value == None or return_value == []: continue if url_list == []: print_writeofd("No use of async", ofd) continue url_list_1 = [] for keyword in ASYNC_KEYWORD_LIST: return_value = search_github(keyword, repo, ofd) if return_value != None: url_list_1.extend(return_value) elif return_value == None or return_value == []:
return content if __name__ == "__main__": for line in infd.readlines(): try: print_writeofd("=================================================", ofd) url = line.split()[1] repo = re.sub('https://github.com/', '', url) repo = re.sub('\n', '', repo) print_writeofd(url, ofd) url_list = [] for keyword in KEYWORD1: int_url_list = search_github(keyword, repo, ofd) if int_url_list != None: url_list.extend(int_url_list) if url_list == None or url_list == []: continue at_least_one_file = False w_all_fct_name = [] for url in url_list: urlcontent = get_url_content(url, ofd) if urlcontent == None: continue p = add_backward_links(urlcontent) if p == None: continue for keyword in KEYWORD1:
fd.close() os.remove(os.path.join(PWD, CACHE_FILE_NAME)) return content if __name__ == "__main__": for line in infd.readlines(): try: print_writeofd("=================================================", ofd) url = line.split()[1] repo = re.sub('https://github.com/', '', url) repo = re.sub('\n', '', repo) print_writeofd(url, ofd) url_list = search_github(KEYWORD1, repo, ofd) if url_list == None or url_list == []: continue at_least_one_file = False w_all_fct_name = [] for url in url_list: urlcontent = get_url_content(url, ofd) if urlcontent == None: continue p = add_backward_links(urlcontent) if p == None: continue v = RetraceFixedInput() v.ofd = ofd v.entire_tree = p
return True return False if __name__ == "__main__": for line in infd.readlines(): functionnames = [] print("=================================================") ofd.write("=================================================\n") url = line.split()[1] repo = re.sub('https://github.com/', '', url) repo = re.sub('\n', '', repo) print(url) ofd.write("{}\n".format(url)) url_list = search_github(ASYNC_START, repo, ofd) if url_list == None or url_list == []: print_writeofd("No use of async", ofd) continue v = search_one_repo(remove_list_dup(url_list), ofd) if v: if functionnames == []: print_writeofd("No use of parallelism - no function", ofd) continue # total_url_list contains the urls to the keywords searched regarding parallel APIs total_url_list = [] for key in KEYWORD: url_list_2 = search_github(key, repo, ofd)