def test_emptyMergeWorkflow(self): self._devCheckoutPushNewBranch("temp/emptyMerge/master") self._devPushNewFile("NEWFILE") self._phabUpdateWithExpectations(total=0, bad=0) # move back to master and land a conflicting change with phlsys_fs.chdir_context("developer"): runCommands("git checkout master") self._devCheckoutPushNewBranch("ph-review/emptyMerge/master") self._devPushNewFile("NEWFILE") self._phabUpdateWithExpectations(total=1, bad=0) self._acceptTheOnlyReview() self._phabUpdateWithExpectations(total=0, bad=0) # move back to original and try to push and land with phlsys_fs.chdir_context("developer"): runCommands("git checkout temp/emptyMerge/master") self._devCheckoutPushNewBranch("ph-review/emptyMerge2/master") self._phabUpdateWithExpectations(total=1, bad=0) self._acceptTheOnlyReview() self._phabUpdateWithExpectations(total=1, bad=1) # 'resolve' by abandoning our change with phlsys_fs.chdir_context("developer"): runCommands("git push origin :ph-review/emptyMerge2/master") self._phabUpdateWithExpectations(total=0, bad=0, emails=0)
def test_mergeConflictWorkflow(self): self._devCheckoutPushNewBranch("temp/mergeConflict/master") self._devPushNewFile("NEWFILE", contents="hello") self._phabUpdateWithExpectations(total=0, bad=0) # move back to master and land a conflicting change with phlsys_fs.chdir_context("developer"): runCommands("git checkout master") self._devCheckoutPushNewBranch("ph-review/mergeConflict/master") self._devPushNewFile("NEWFILE", contents="goodbye") self._phabUpdateWithExpectations(total=1, bad=0) self._acceptTheOnlyReview() self._phabUpdateWithExpectations(total=0, bad=0) # move back to original and try to push and land with phlsys_fs.chdir_context("developer"): runCommands("git checkout temp/mergeConflict/master") self._devCheckoutPushNewBranch("ph-review/mergeConflict2/master") self._phabUpdateWithExpectations(total=1, bad=0) self._acceptTheOnlyReview() self._phabUpdateWithExpectations(total=1, bad=1) # 'resolve' by forcing our change through print "force our change" with phlsys_fs.chdir_context("developer"): runCommands("git fetch -p") runCommands("git merge origin/master -s ours") runCommands("git push origin ph-review/mergeConflict2/master") print "update again" self._phabUpdateWithExpectations(total=1, bad=0) self._acceptTheOnlyReview() self._phabUpdateWithExpectations(total=0, bad=0, emails=0)
def setUp(self): # TODO: just make a temp dir runCommands("rm -rf abd-test") runCommands("mkdir abd-test") self._saved_path = os.getcwd() os.chdir("abd-test") runCommands( "git --git-dir=devgit init --bare", "git clone devgit developer", "git clone devgit phab", ) self._devSetAuthorAccount(self.author_account) self._phabSetAuthorAccount(phldef_conduit.PHAB) with phlsys_fs.chdir_context("developer"): self._createCommitNewFile("README", self.reviewer) runCommands("git push origin master") with phlsys_fs.chdir_context("phab"): runCommands("git fetch origin -p") self.conduit = phlsys_conduit.Conduit( phldef_conduit.TEST_URI, phldef_conduit.PHAB.user, phldef_conduit.PHAB.certificate) self.mock_sender = phlmail_mocksender.MailSender() self.mailer = abdmail_mailer.Mailer( self.mock_sender, ["*****@*****.**"], "http://server.fake/testrepo.git", "http://phabricator.server.fake/")
def test_A_Breathing(self): with phlsys_fs.chtmpdir_context(): fetch_config = str( 'remote.origin.fetch=+refs/arcyd/landinglog' ':refs/arcyd/origin/landinglog') run = phlsys_subprocess.run_commands run('git init --bare origin') run('git clone origin dev --config ' + fetch_config) with phlsys_fs.chdir_context('dev'): # make an initial commit on the master branch run('touch README') run('git add README') run('git commit README -m initial_commit') run('git push origin master') run('git checkout -b myfeature') # create a new branch with unique content with open('README', 'w') as f: f.write('myfeature content') run('git add README') run('git commit README -m myfeature_content') run('git push -u origin myfeature') dev = phlsys_git.Repo('dev') # make sure we can prepend a branch to the landinglog when empty abdt_landinglog.prepend(dev, '1234', 'myfeature', '4567') log = abdt_landinglog.get_log(dev) self.assertEqual(1, len(log)) self.assertEqual(log[0].review_sha1, "1234") self.assertEqual(log[0].name, "myfeature") self.assertEqual(log[0].landed_sha1, "4567") # make sure we can prepend another branch abdt_landinglog.prepend(dev, '5678', 'newfeature', '8901') log = abdt_landinglog.get_log(dev) self.assertEqual(2, len(log)) self.assertEqual(log[0].review_sha1, "5678") self.assertEqual(log[0].name, "newfeature") self.assertEqual(log[0].landed_sha1, "8901") self.assertEqual(log[1].review_sha1, "1234") self.assertEqual(log[1].name, "myfeature") self.assertEqual(log[1].landed_sha1, "4567") # make a new, independent clone and make sure we get the same log abdt_landinglog.push_log(dev, 'origin') run('git clone origin dev2 --config ' + fetch_config) with phlsys_fs.chdir_context('dev2'): run('git fetch') dev2 = phlsys_git.Repo('dev2') self.assertListEqual( abdt_landinglog.get_log(dev), abdt_landinglog.get_log(dev2))
def test_A_Breathing(self): with phlsys_fs.chtmpdir_context(): fetch_config = str('remote.origin.fetch=+refs/arcyd/landinglog' ':refs/arcyd/origin/landinglog') run = phlsys_subprocess.run_commands run('git init --bare origin') run('git clone origin dev --config ' + fetch_config) with phlsys_fs.chdir_context('dev'): # make an initial commit on the master branch run('touch README') run('git add README') run('git commit README -m initial_commit') run('git push origin master') run('git checkout -b myfeature') # create a new branch with unique content with open('README', 'w') as f: f.write('myfeature content') run('git add README') run('git commit README -m myfeature_content') run('git push -u origin myfeature') dev = phlsys_git.Repo('dev') # make sure we can prepend a branch to the landinglog when empty abdt_landinglog.prepend(dev, '1234', 'myfeature', '4567') log = abdt_landinglog.get_log(dev) self.assertEqual(1, len(log)) self.assertEqual(log[0].review_sha1, "1234") self.assertEqual(log[0].name, "myfeature") self.assertEqual(log[0].landed_sha1, "4567") # make sure we can prepend another branch abdt_landinglog.prepend(dev, '5678', 'newfeature', '8901') log = abdt_landinglog.get_log(dev) self.assertEqual(2, len(log)) self.assertEqual(log[0].review_sha1, "5678") self.assertEqual(log[0].name, "newfeature") self.assertEqual(log[0].landed_sha1, "8901") self.assertEqual(log[1].review_sha1, "1234") self.assertEqual(log[1].name, "myfeature") self.assertEqual(log[1].landed_sha1, "4567") # make a new, independent clone and make sure we get the same log abdt_landinglog.push_log(dev, 'origin') run('git clone origin dev2 --config ' + fetch_config) with phlsys_fs.chdir_context('dev2'): run('git fetch') dev2 = phlsys_git.Repo('dev2') self.assertListEqual(abdt_landinglog.get_log(dev), abdt_landinglog.get_log(dev2))
def _devPushNewFile( self, filename, has_reviewer=True, has_plan=True, contents=""): with phlsys_fs.chdir_context("developer"): reviewer = self.reviewer if has_reviewer else None plan = "testplan" if has_plan else None self._createCommitNewFileRaw(filename, plan, reviewer, contents) runCommands("git push")
def _getTheOnlyReviewId(self): with phlsys_fs.chdir_context("phab"): clone = phlsys_git.GitClone(".") branches = phlgit_branch.get_remote(clone, "origin") wbList = abdt_naming.getWorkingBranches(branches) self.assertEqual(len(wbList), 1) wb = wbList[0] return wb.id
def test_badBaseWorkflow(self): self._devCheckoutPushNewBranch("ph-review/badBaseWorkflow/blaster") self._devPushNewFile("NEWFILE", has_plan=False) self._phabUpdateWithExpectations(total=1, bad=1) # delete the bad branch with phlsys_fs.chdir_context("developer"): runCommands("git push origin :ph-review/badBaseWorkflow/blaster") self._phabUpdateWithExpectations(total=0, bad=0, emails=0)
def _countPhabBadWorkingBranches(self): with phlsys_fs.chdir_context("phab"): clone = phlsys_git.GitClone(".") branches = phlgit_branch.get_remote(clone, "origin") wbList = abdt_naming.getWorkingBranches(branches) numBadBranches = 0 for wb in wbList: if abdt_naming.isStatusBad(wb): numBadBranches += 1 return numBadBranches
def _phabUpdateWithExpectations(self, total=None, bad=None, emails=None): with phlsys_fs.chdir_context("phab"): runCommands("git fetch origin -p") # multiple updates should have the same result if we are # not fetching and assuming the data in Phabricator # doesn't change. self._phabUpdateWithExpectationsHelper(total, bad, emails) self._phabUpdateWithExpectationsHelper(total, bad, emails) self._phabUpdateWithExpectationsHelper(total, bad, emails)
def run(dir_path): """Return errors from running cppcheck in supplied 'dir_path'. :dir_path: string of the path to the directory to run in :returns: list of Result """ with phlsys_fs.chdir_context(dir_path): # XXX: handle the "couldn't find files" exception return parse_output(phlsys_subprocess.run( 'cppcheck', '-q', '.', '--xml', '--xml-version=2').stderr.strip())
def run(dir_path): """Return errors from running cppcheck in supplied 'dir_path'. :dir_path: string of the path to the directory to run in :returns: list of Result """ with phlsys_fs.chdir_context(dir_path): # XXX: handle the "couldn't find files" exception return parse_output( phlsys_subprocess.run('cppcheck', '-q', '.', '--xml', '--xml-version=2').stderr.strip())
def test_C_exercise_wait_cycles(self): with setup_arcyd() as arcyd: # [ C] count_cycles is disabled by default self.assertFalse(arcyd._has_enabled_count_cycles) arcyd.enable_count_cycles_script() # [ C] count_cycles is enabled by enable_count_cycles_script self.assertTrue(arcyd._has_enabled_count_cycles) phlsys_fs.write_text_file(os.path.join( arcyd._root_dir, 'cycle_counter'), '2') with phlsys_fs.chdir_context(arcyd._root_dir): os.system("./count_cycles.sh") # [ C] correct number of cycles counted - 3 (2 + 1) self.assertEqual(3, arcyd.count_cycles())
def test_C_exercise_wait_cycles(self): with setup_arcyd() as arcyd: # [ C] count_cycles is disabled by default self.assertFalse(arcyd._has_enabled_count_cycles) arcyd.enable_count_cycles_script() # [ C] count_cycles is enabled by enable_count_cycles_script self.assertTrue(arcyd._has_enabled_count_cycles) phlsys_fs.write_text_file( os.path.join(arcyd._root_dir, 'cycle_counter'), '2') with phlsys_fs.chdir_context(arcyd._root_dir): os.system("./count_cycles.sh") # [ C] correct number of cycles counted - 3 (2 + 1) self.assertEqual(3, arcyd.count_cycles())
def test_changeAlreadyMergedOnBase(self): self._devCheckoutPushNewBranch("landing_branch") self._devPushNewFile("NEWFILE") self._devCheckoutPushNewBranch( "ph-review/alreadyMerged/landing_branch") self._phabUpdateWithExpectations(total=1, bad=1) # reset the landing branch back to master to resolve with phlsys_fs.chdir_context("developer"): runCommands("git checkout landing_branch") runCommands("git reset origin/master --hard") runCommands("git push origin landing_branch --force") self._phabUpdateWithExpectations(total=1, bad=0) self._acceptTheOnlyReview() self._phabUpdateWithExpectations(total=0, bad=0, emails=0)
def _fetch_if_needed(url_watcher, args, out, arcyd_reporter, repo_desc): did_fetch = False def prune_and_fetch(): phlsys_subprocess.run_commands("git remote prune origin") phlsys_subprocess.run_commands("git fetch") # fetch only if we need to snoop_url = args.repo_snoop_url if not snoop_url or url_watcher.has_url_recently_changed(snoop_url): with phlsys_fs.chdir_context(args.repo_path): out.display("fetch (" + repo_desc + "): ") with arcyd_reporter.tag_timer_context('git fetch'): abdt_tryloop.tryloop( prune_and_fetch, 'fetch/prune', repo_desc) did_fetch = True return did_fetch
def test_simpleWorkflow(self): self._devCheckoutPushNewBranch("ph-review/simpleWorkflow/master") self._devPushNewFile("NEWFILE") self._phabUpdateWithExpectations(total=1, bad=0) self._devPushNewFile("NEWFILE2") self._phabUpdateWithExpectations(total=1, bad=0) self._acceptTheOnlyReview() self._phabUpdateWithExpectations(total=0, bad=0, emails=0) # check the author on master with phlsys_fs.chdir_context("developer"): runCommands("git fetch -p", "git checkout master") clone = phlsys_git.GitClone(".") head = phlgit_log.get_last_commit_hash(clone) authors = phlgit_log.get_author_names_emails_from_hashes( clone, [head]) author = authors[0] name = author[0] email = author[1] self.assertEqual(self.author_account.user, name) self.assertEqual(self.author_account.email, email)
def launch_debug_shell(self): with phlsys_fs.chdir_context(self._root_dir): print("Launching debug shell, exit the shell to continue ...") subprocess.call('bash')
def _countPhabWorkingBranches(self): with phlsys_fs.chdir_context("phab"): clone = phlsys_git.GitClone(".") branches = phlgit_branch.get_remote(clone, "origin") wbList = abdt_naming.getWorkingBranches(branches) return len(wbList)
def run_once(args, out): sender = phlmail_sender.MailSender( phlsys_sendmail.Sendmail(), args.arcyd_email) mailer = abdmail_mailer.Mailer( sender, [args.admin_email], args.repo_desc, args.instance_uri) # TODO: this should be a URI for users not conduit # prepare delays in the event of trouble when fetching or connecting # TODO: perhaps this policy should be decided higher-up delays = [ datetime.timedelta(seconds=1), datetime.timedelta(seconds=1), datetime.timedelta(seconds=10), datetime.timedelta(seconds=10), datetime.timedelta(seconds=100), datetime.timedelta(seconds=100), datetime.timedelta(seconds=1000), ] # log.error if we get an exception when fetching def on_exception(e, delay): logging.error(str(e) + "\nwill wait " + str(delay)) if args.try_touch_path: try: # TODO: don't rely on the touch command phlsys_subprocess.run("touch", args.try_touch_path) except Exception: pass # XXX: we don't care atm, later log this with phlsys_fs.chdir_context(args.repo_path): out.display("fetch (" + args.repo_desc + "): ") phlsys_tryloop.try_loop_delay( lambda: phlsys_subprocess.run_commands("git fetch -p"), delays, onException=on_exception) # XXX: until conduit refreshes the connection, we'll suffer from # timeouts; reduce the probability of this by using a new # conduit each time. # create an array so that the 'connect' closure binds to the 'conduit' # variable as we'd expect, otherwise it'll just modify a local variable # and this 'conduit' will remain 'None' # XXX: we can do better in python 3.x conduit = [None] def connect(): #nonlocal conduit # XXX: we'll rebind in python 3.x, instead of array conduit[0] = phlsys_conduit.Conduit( args.instance_uri, args.arcyd_user, args.arcyd_cert, https_proxy=args.https_proxy) phlsys_tryloop.try_loop_delay(connect, delays, onException=on_exception) out.display("process (" + args.repo_desc + "): ") abdi_processrepo.processUpdatedRepo( conduit[0], args.repo_path, "origin", mailer) if args.ok_touch_path: try: # TODO: don't rely on the touch command phlsys_subprocess.run("touch", args.ok_touch_path) except Exception: pass # XXX: we don't care atm, later log this
def _devCheckoutPushNewBranch(self, branch): with phlsys_fs.chdir_context("developer"): runCommands("git checkout -b " + branch) runCommands("git push -u origin " + branch)
def _devResetBranchToMaster(self, branch): with phlsys_fs.chdir_context("developer"): runCommands("git reset origin/master --hard") runCommands("git push -u origin " + branch + " --force")
def _phabUpdate(self): with phlsys_fs.chdir_context("phab"): runCommands("git fetch origin -p") abdi_processrepo.processUpdatedRepo( self.conduit, "phab", "origin", self.mailer)
def _run_once(args, out, reporter): sender = phlmail_sender.MailSender( phlsys_sendmail.Sendmail(), args.arcyd_email) mailer = abdmail_mailer.Mailer( sender, [args.admin_email], args.repo_desc, args.instance_uri) # TODO: this should be a URI for users not conduit pluginManager = phlsys_pluginmanager.PluginManager( args.plugins, args.trusted_plugins) # prepare delays in the event of trouble when fetching or connecting # TODO: perhaps this policy should be decided higher-up delays = [ datetime.timedelta(seconds=1), datetime.timedelta(seconds=1), datetime.timedelta(seconds=10), datetime.timedelta(seconds=10), datetime.timedelta(seconds=100), datetime.timedelta(seconds=100), datetime.timedelta(seconds=1000), ] # log.error if we get an exception when fetching def on_tryloop_exception(e, delay): reporter.on_tryloop_exception(e, delay) logging.error(str(e) + "\nwill wait " + str(delay)) def prune_and_fetch(): phlsys_subprocess.run_commands("git remote prune origin") phlsys_subprocess.run_commands("git fetch") with phlsys_fs.chdir_context(args.repo_path): out.display("fetch (" + args.repo_desc + "): ") phlsys_tryloop.try_loop_delay( prune_and_fetch, delays, onException=on_tryloop_exception) # XXX: until conduit refreshes the connection, we'll suffer from # timeouts; reduce the probability of this by using a new # conduit each time. # create an array so that the 'connect' closure binds to the 'conduit' # variable as we'd expect, otherwise it'll just modify a local variable # and this 'conduit' will remain 'None' # XXX: we can do better in python 3.x (nonlocal?) conduit = [None] def connect(): # nonlocal conduit # XXX: we'll rebind in python 3.x, instead of array conduit[0] = phlsys_conduit.Conduit( args.instance_uri, args.arcyd_user, args.arcyd_cert, https_proxy=args.https_proxy) phlsys_tryloop.try_loop_delay( connect, delays, onException=on_tryloop_exception) out.display("process (" + args.repo_desc + "): ") arcyd_conduit = abdt_conduit.Conduit(conduit[0]) branch_url_callable = None if args.branch_url_format: def make_branch_url(branch_name): return args.branch_url_format.format(branch=branch_name) branch_url_callable = make_branch_url arcyd_clone = abdt_git.Clone( args.repo_path, "origin", args.repo_desc, branch_url_callable) branches = arcyd_clone.get_managed_branches() try: abdi_processrepo.process_branches( branches, arcyd_conduit, mailer, pluginManager, reporter) except Exception: reporter.on_traceback(traceback.format_exc()) raise reporter.on_completed()