def test_det_patched_files(self): """Test det_patched_files function.""" pf = os.path.join(os.path.dirname(__file__), 'sandbox', 'sources', 'toy', 'toy-0.0_typo.patch') self.assertEqual(ft.det_patched_files(pf), ['b/toy-0.0/toy.source']) self.assertEqual(ft.det_patched_files(pf, omit_ab_prefix=True), ['toy-0.0/toy.source'])
def fetch_easyconfigs_from_pr(pr, path=None, github_user=None): """Fetch patched easyconfig files for a particular PR.""" if github_user is None: github_user = build_option('github_user') if path is None: path = build_option('pr_path') if path is None: path = tempfile.mkdtemp() else: # make sure path exists, create it if necessary mkdir(path, parents=True) _log.debug("Fetching easyconfigs from PR #%s into %s" % (pr, path)) pr_url = lambda g: g.repos[GITHUB_EB_MAIN][GITHUB_EASYCONFIGS_REPO].pulls[ pr] status, pr_data = github_api_get_request(pr_url, github_user) if not status == HTTP_STATUS_OK: raise EasyBuildError( "Failed to get data for PR #%d from %s/%s (status: %d %s)", pr, GITHUB_EB_MAIN, GITHUB_EASYCONFIGS_REPO, status, pr_data) # 'clean' on successful (or missing) test, 'unstable' on failed tests stable = pr_data['mergeable_state'] == GITHUB_MERGEABLE_STATE_CLEAN if not stable: _log.warning("Mergeable state for PR #%d is not '%s': %s.", pr, GITHUB_MERGEABLE_STATE_CLEAN, pr_data['mergeable_state']) for key, val in sorted(pr_data.items()): _log.debug("\n%s:\n\n%s\n" % (key, val)) # determine list of changed files via diff diff_fn = os.path.basename(pr_data['diff_url']) diff_filepath = os.path.join(path, diff_fn) download_file(diff_fn, pr_data['diff_url'], diff_filepath, forced=True) diff_txt = read_file(diff_filepath) os.remove(diff_filepath) patched_files = det_patched_files(txt=diff_txt, omit_ab_prefix=True, github=True) _log.debug("List of patched files: %s" % patched_files) # obtain last commit # get all commits, increase to (max of) 100 per page if pr_data['commits'] > GITHUB_MAX_PER_PAGE: raise EasyBuildError( "PR #%s contains more than %s commits, can't obtain last commit", pr, GITHUB_MAX_PER_PAGE) status, commits_data = github_api_get_request(lambda g: pr_url(g).commits, github_user, per_page=GITHUB_MAX_PER_PAGE) last_commit = commits_data[-1] _log.debug("Commits: %s, last commit: %s" % (commits_data, last_commit['sha'])) # obtain most recent version of patched files for patched_file in patched_files: fn = os.path.basename(patched_file) sha = last_commit['sha'] full_url = URL_SEPARATOR.join([ GITHUB_RAW, GITHUB_EB_MAIN, GITHUB_EASYCONFIGS_REPO, sha, patched_file ]) _log.info("Downloading %s from %s" % (fn, full_url)) download_file(fn, full_url, path=os.path.join(path, fn), forced=True) all_files = [os.path.basename(x) for x in patched_files] tmp_files = os.listdir(path) if not sorted(tmp_files) == sorted(all_files): raise EasyBuildError( "Not all patched files were downloaded to %s: %s vs %s", path, tmp_files, all_files) ec_files = [os.path.join(path, filename) for filename in tmp_files] return ec_files
"Failed to get data for PR #%d from %s/%s (status: %d %s)", pr, GITHUB_EB_MAIN, GITHUB_EASYCONFIGS_REPO, status, pr_data) # 'clean' on successful (or missing) test, 'unstable' on failed tests stable = pr_data['mergeable_state'] == GITHUB_MERGEABLE_STATE_CLEAN if not stable: _log.warning("Mergeable state for PR #%d is not '%s': %s.", pr, GITHUB_MERGEABLE_STATE_CLEAN, pr_data['mergeable_state']) for key, val in sorted(pr_data.items()): _log.debug("\n%s:\n\n%s\n" % (key, val)) # determine list of changed files via diff diff_txt = download(pr_data['diff_url']) patched_files = det_patched_files(txt=diff_txt, omit_ab_prefix=True) _log.debug("List of patched files: %s" % patched_files) # obtain last commit # get all commits, increase to (max of) 100 per page if pr_data['commits'] > GITHUB_MAX_PER_PAGE: raise EasyBuildError( "PR #%s contains more than %s commits, can't obtain last commit", pr, GITHUB_MAX_PER_PAGE) status, commits_data = pr_url.commits.get(per_page=GITHUB_MAX_PER_PAGE) last_commit = commits_data[-1] _log.debug("Commits: %s, last commit: %s" % (commits_data, last_commit['sha'])) # obtain most recent version of patched files for patched_file in patched_files:
def fetch_easyconfigs_from_pr(pr, path=None, github_user=None): """Fetch patched easyconfig files for a particular PR.""" if github_user is None: github_user = build_option('github_user') if path is None: path = build_option('pr_path') if path is None: path = tempfile.mkdtemp() else: # make sure path exists, create it if necessary mkdir(path, parents=True) _log.debug("Fetching easyconfigs from PR #%s into %s" % (pr, path)) pr_url = lambda g: g.repos[GITHUB_EB_MAIN][GITHUB_EASYCONFIGS_REPO].pulls[pr] status, pr_data = github_api_get_request(pr_url, github_user) if not status == HTTP_STATUS_OK: raise EasyBuildError("Failed to get data for PR #%d from %s/%s (status: %d %s)", pr, GITHUB_EB_MAIN, GITHUB_EASYCONFIGS_REPO, status, pr_data) # 'clean' on successful (or missing) test, 'unstable' on failed tests stable = pr_data['mergeable_state'] == GITHUB_MERGEABLE_STATE_CLEAN if not stable: _log.warning("Mergeable state for PR #%d is not '%s': %s.", pr, GITHUB_MERGEABLE_STATE_CLEAN, pr_data['mergeable_state']) for key, val in sorted(pr_data.items()): _log.debug("\n%s:\n\n%s\n" % (key, val)) # determine list of changed files via diff diff_fn = os.path.basename(pr_data['diff_url']) diff_filepath = os.path.join(path, diff_fn) download_file(diff_fn, pr_data['diff_url'], diff_filepath, forced=True) diff_txt = read_file(diff_filepath) os.remove(diff_filepath) patched_files = det_patched_files(txt=diff_txt, omit_ab_prefix=True) _log.debug("List of patched files: %s" % patched_files) # obtain last commit # get all commits, increase to (max of) 100 per page if pr_data['commits'] > GITHUB_MAX_PER_PAGE: raise EasyBuildError("PR #%s contains more than %s commits, can't obtain last commit", pr, GITHUB_MAX_PER_PAGE) status, commits_data = github_api_get_request(lambda g: pr_url(g).commits, github_user, per_page=GITHUB_MAX_PER_PAGE) last_commit = commits_data[-1] _log.debug("Commits: %s, last commit: %s" % (commits_data, last_commit['sha'])) # obtain most recent version of patched files for patched_file in patched_files: fn = os.path.basename(patched_file) sha = last_commit['sha'] full_url = URL_SEPARATOR.join([GITHUB_RAW, GITHUB_EB_MAIN, GITHUB_EASYCONFIGS_REPO, sha, patched_file]) _log.info("Downloading %s from %s" % (fn, full_url)) download_file(fn, full_url, path=os.path.join(path, fn), forced=True) all_files = [os.path.basename(x) for x in patched_files] tmp_files = os.listdir(path) if not sorted(tmp_files) == sorted(all_files): raise EasyBuildError("Not all patched files were downloaded to %s: %s vs %s", path, tmp_files, all_files) ec_files = [os.path.join(path, fn) for fn in tmp_files] return ec_files
tup = (pr, GITHUB_EB_MAIN, GITHUB_EASYCONFIGS_REPO, status, pr_data) _log.error("Failed to get data for PR #%d from %s/%s (status: %d %s)" % tup) # 'clean' on successful (or missing) test, 'unstable' on failed tests stable = pr_data['mergeable_state'] == GITHUB_MERGEABLE_STATE_CLEAN if not stable: tup = (pr, GITHUB_MERGEABLE_STATE_CLEAN, pr_data['mergeable_state']) _log.warning("Mergeable state for PR #%d is not '%s': %s." % tup) for key, val in sorted(pr_data.items()): _log.debug("\n%s:\n\n%s\n" % (key, val)) # determine list of changed files via diff diff_txt = download(pr_data['diff_url']) patched_files = det_patched_files(txt=diff_txt, omit_ab_prefix=True) _log.debug("List of patches files: %s" % patched_files) # obtain last commit status, commits_data = pr_url.commits.get() last_commit = commits_data[-1] _log.debug("Commits: %s" % commits_data) # obtain most recent version of patched files for patched_file in patched_files: fn = os.path.basename(patched_file) sha = last_commit['sha'] full_url = URL_SEPARATOR.join([GITHUB_RAW, GITHUB_EB_MAIN, GITHUB_EASYCONFIGS_REPO, sha, patched_file]) _log.info("Downloading %s from %s" % (fn, full_url)) download(full_url, path=os.path.join(path, fn))
def test_det_patched_files(self): """Test det_patched_files function.""" pf = os.path.join(os.path.dirname(__file__), "sandbox", "sources", "toy", "toy-0.0_typo.patch") self.assertEqual(ft.det_patched_files(pf), ["b/toy-0.0/toy.source"]) self.assertEqual(ft.det_patched_files(pf, omit_ab_prefix=True), ["toy-0.0/toy.source"])