def load_and_save_full_data_for_filter(authed_jira, jira_filter, filter_name, load_full_data, cache_data): result = [] for i in range(0, max_result, 10000): thread_list = [] for j in range(0, max_thread): thread_list.append( threading.Thread(target=save_jira_task, args=( result, authed_jira, jira_filter, i + (j * 1000), load_full_data, ))) for current_thread in thread_list: current_thread.start() for current_thread in thread_list: current_thread.join() if cache_data: jira_data_file_with_path = str( project_dir_path) + "/data/cache_data/" + filter_name + ".json" log.info("Saving JIRA Data to - " + jira_data_file_with_path) cached = CachedIssues(result) cached.dump(open(jira_data_file_with_path, 'w')) log.info("Data Saved!") log.info("Total JIRA for filter = " + str(filter_name) + " -" + str(len(result))) return result
def another_function(): jira_auth = connect_to_jira() my_dict = [] data = [] # get all jira for filter for i, ticket in enumerate(data): jira_id = get_jira_id(ticket) ticket_full_data = jira_auth.issue(jira_id) my_dict.update({ticket.id: ticket_full_data}) log.info("Added for :" + str(i) + " JIRA : " + jira_id) jira_data_file_with_path = str( project_dir_path) + "/data/cache_data/cas_project_all_jira.json" log.info("Saving JIRA Data to -> " + jira_data_file_with_path) cached = CachedIssues(my_dict) cached.dump(open(jira_data_file_with_path, 'w')) log.info("Data Saved!")
def other_other_work(): my_dict = [] for i in range(0, 84000, 1000): try: jira_data_file_with_path = str( project_dir_path ) + "/data/cache_data/cas_project_all_jira_" + str(i) + ".json" result = CachedIssues.load(open(jira_data_file_with_path)) my_dict.extend(result) except Exception as e: print(e) jira_data_file_with_path = str( project_dir_path ) + "/data/cache_data/cas_project_all_jira_detailed.json" log.info("Saving JIRA Data to -> " + jira_data_file_with_path) cached = CachedIssues(my_dict) cached.dump(open(jira_data_file_with_path, 'w')) log.info("Data Saved!")
def save_filter_jira(authed_jira, jira_filter, filter_name): log.info("Crawling the filter - " + jira_filter) filter_tickets = authed_jira.search_issues(jira_filter, startAt=0, maxResults=resultStep) for i in range(resultStep + 1, maxResultsToReturn, resultStep): log.info("Crawling result for i:" + str(i)) result = authed_jira.search_issues(jira_filter, startAt=i, maxResults=maxResultsToReturn) if len(result) == 0: break filter_tickets.extend(result) log.exception(filter_name) jira_data_file_with_path = str( project_dir_path) + "/data/cache_data/" + filter_name + ".json" log.info("Saving JIRA Data to -> " + jira_data_file_with_path) cached = CachedIssues(filter_tickets) cached.dump(open(jira_data_file_with_path, 'w')) log.info("Data Saved!") return filter_tickets
def load_data_for_filter(authed_jira, jira_filter, filter_name, refresh_filter_result=False, load_full_data=False, cache_data=False): try: if refresh_filter_result: result = load_and_save_full_data_for_filter( authed_jira, jira_filter, filter_name, load_full_data, cache_data) else: jira_data_file_with_path = str( project_dir_path) + "/data/cache_data/" + filter_name + ".json" result = CachedIssues.load(open(jira_data_file_with_path)) return result except Exception as e: log.exception(e) return None
""" Fetched test data from public JIRA instance and stores it in JSON file. """ URL = 'https://jira.atlassian.com/' from jira import JIRA from jira_cache import CachedIssues jira = JIRA(URL) issues = jira.search_issues("text ~ Python", expand='changelog') cached = CachedIssues(issues) with open('issues.json', 'w') as fp: cached.dump(fp)
# initialize JIRA print("Initializing JIRA...") options = {'server': 'https://issues.apache.org/jira'} jira = JIRA(options=options) # load issues jira_issue_path = jira_path + 'issue_cache.json' if not os.path.isfile(jira_issue_path): try: ISSUES = jira.search_issues("project=" + repo_name.upper(), maxResults=False, fields="*all") print( "Issues loaded from server! Caching to file for later use...") cached = CachedIssues(ISSUES) cached.dump(open(jira_issue_path, 'w')) except JIRAError as e: if e.status_code == 429: print("Got 429 (rate-limited) response from server.") print(e.text) exit_msg = "Exiting " + time.asctime( time.localtime(time.time())) sys.exit(exit_msg) else: print(e.text) except Exception as e: print("Some other exception occured.") print(e) else:
sys.path.append(os.path.dirname(__file__)) import commit_features as cf import time output_dir_eszz = "../Enhanced_SZZ/outputs/hive/" output_dir_szz = "../SZZ/outputs/hive/" repo_dir = "../Enhanced_SZZ/repos/hive/" repo_name = "hive" print("Initializing repo...") repo = Repo.init(repo_dir) print("Initializing JIRA...") options = {'server': 'https://issues.apache.org/jira'} jira = JIRA(options=options) print("Load cached jira issues...") saved_jira_path = "../Enhanced_SZZ/jira/hive/issue_cache.json" ISSUES = CachedIssues.load(open(saved_jira_path)) def get_jira_id(commit): result = re.search( '(' + repo_name.upper() + '[-,_]{1}[0-9]+|HADOOP[-,_]{1}[0-9]+)', commit.message, re.IGNORECASE) if result is not None: return result.group(0).replace("_", "-", 1) else: return None def get_jira_issue(commit): jira_id = get_jira_id(commit=commit) if jira_id is not None:
def test_initialisation_works_for_empty_list(): issues = CachedIssues([]) assert issues == []
import sys sys.path.insert(0, '..') from jira import JIRA from jira_cache import CachedIssues print('Connecting to https://jira.atlassian.com...') jira = JIRA('https://jira.atlassian.com/') result = jira.search_issues('project=JRA and text ~ "Python"', expand='changelog') print('Caching %i issues...' % len(result)) cached = CachedIssues(result) cached.dump(open('python-issues.json', 'w')) print('Loading issues from dump...') print(CachedIssues.load(open('python-issues.json')))