class TestGerritHooks(Base): """ Functional tests that validate Gerrit hooks. """ @classmethod def setUpClass(cls): cls.msu = ManageSfUtils(config.GATEWAY_URL) @classmethod def tearDownClass(cls): pass def setUp(self): self.projects = [] self.dirs_to_delete = [] self.issues = [] self.u = config.ADMIN_USER self.u2 = config.USER_2 self.rm = RedmineUtils( config.GATEWAY_URL + "/redmine/", auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[self.u]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[self.u2]['auth_cookie']) self.gu.add_pubkey(config.USERS[self.u]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.u]["privkey"]) self.gitu = GerritGitUtils(self.u, priv_key_path, config.USERS[self.u]['email']) def tearDown(self): for issue in self.issues: self.rm.delete_issue(issue) for name in self.projects: self.msu.deleteProject(name, self.u) for dirs in self.dirs_to_delete: shutil.rmtree(dirs) def create_project(self, name, user, options=None): self.msu.createProject(name, user, options) self.projects.append(name) def _test_update_issue_hooks(self, comment_template, status): """ A referenced issue in commit msg triggers the hook """ pname = 'p_%s' % create_random_str() # Be sure the project does not exist self.msu.deleteProject(pname, self.u) # Create the project self.create_project(pname, self.u) # Put USER_2 as core for the project self.gu.add_group_member(self.u2, "%s-core" % pname) # Create an issue on the project issue_id = self.rm.create_issue(pname, "There is a problem") # Clone and commit something url = "ssh://%s@%s:29418/%s" % (self.u, config.GATEWAY_HOST, pname) clone_dir = self.gitu.clone(url, pname) cmt_msg = comment_template % issue_id self.gitu.add_commit_and_publish(clone_dir, 'master', cmt_msg) # Check issue status (Gerrit hook updates the issue to in progress) attempt = 0 while True: if self.rm.test_issue_status(issue_id, 'In Progress'): break if attempt > 10: break time.sleep(1) attempt += 1 self.assertTrue(self.rm.test_issue_status(issue_id, 'In Progress')) self._test_merging(pname, issue_id, status) def _test_merging(self, pname, issue_id, status): # Get the change id and merge the patch change_ids = self.gu.get_my_changes_for_project(pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] self.gu.submit_change_note(change_id, "current", "Code-Review", "2") self.gu.submit_change_note(change_id, "current", "Workflow", "1") self.gu.submit_change_note(change_id, "current", "Verified", "2") self.gu2.submit_change_note(change_id, "current", "Code-Review", "2") self.assertTrue(self.gu.submit_patch(change_id, "current")) # Check issue status (Gerrit hook updates the issue to in progress) attempt = 0 while True: if self.rm.test_issue_status(issue_id, status): break if attempt > 10: break time.sleep(1) attempt += 1 self.assertTrue(self.rm.test_issue_status(issue_id, status)) def test_gerrit_hook(self): """test various commit messages triggering a hook""" for template, final_status in TEST_MSGS: self._test_update_issue_hooks(template, final_status)
class TestProjectTestsWorkflow(Base): """ Functional tests to verify the configuration of a project test """ @classmethod def setUpClass(cls): cls.msu = ManageSfUtils(config.GATEWAY_URL) cls.sample_project_dir = \ os.path.join(config.SF_TESTS_DIR, "sample_project/") @classmethod def tearDownClass(cls): pass def setUp(self): self.projects = [] self.dirs_to_delete = [] self.un = config.ADMIN_USER self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[self.un]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) self.ju = JenkinsUtils() self.gu.add_pubkey(config.USERS[self.un]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) # Clone the config repo and make change to it # in order to test the new sample_project self.config_clone_dir = self.clone_as_admin("config") self.original_layout = file(os.path.join( self.config_clone_dir, "zuul/layout.yaml")).read() self.original_zuul_projects = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() self.original_project = file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read() # Put USER_2 as core for config project self.gu.add_group_member(config.USER_2, "config-core") def tearDown(self): self.restore_config_repo(self.original_layout, self.original_project, self.original_zuul_projects) for name in self.projects: self.msu.deleteProject(name, config.ADMIN_USER) for dirs in self.dirs_to_delete: shutil.rmtree(dirs) def assert_reviewer_approvals(self, change_id, value): approvals = {} for _ in range(90): approvals = self.gu.get_reviewer_approvals(change_id, 'jenkins') if approvals and approvals.get('Verified') == value: break time.sleep(1) self.assertEqual(value, approvals.get('Verified')) def clone_as_admin(self, pname): url = "ssh://%s@%s:29418/%s" % (self.un, config.GATEWAY_HOST, pname) clone_dir = self.gitu_admin.clone(url, pname) if os.path.dirname(clone_dir) not in self.dirs_to_delete: self.dirs_to_delete.append(os.path.dirname(clone_dir)) return clone_dir def restore_config_repo(self, layout, project, zuul): file(os.path.join( self.config_clone_dir, "zuul/layout.yaml"), 'w').write( layout) file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( zuul) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( project) self.commit_direct_push_as_admin( self.config_clone_dir, "Restore layout.yaml and projects.yaml") def commit_direct_push_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) self.gitu_admin.direct_push_branch(clone_dir, 'master') def push_review_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) self.gitu_admin.review_push_branch(clone_dir, 'master') def create_project(self, name, user, options=None): self.msu.createProject(name, user, options) self.projects.append(name) def test_check_project_test_workflow(self): """ Validate new project to test via zuul layout.yaml """ # We want to create a project, provide project source # code with tests. We then configure zuul/jjb to handle the # run of the test cases. We then validate Gerrit has been # updated about the test results # We use the sample-project (that already exists) pname = 'test_workflow_%s' % create_random_str() # Be sure the project does not exist self.msu.deleteProject(pname, config.ADMIN_USER) # Create it self.create_project(pname, config.ADMIN_USER) # Add the sample-project to the empty repository clone_dir = self.clone_as_admin(pname) copytree(self.sample_project_dir, clone_dir) self.commit_direct_push_as_admin(clone_dir, "Add the sample project") # Change to config/zuul/layout.yaml and jobs/projects.yaml # in order to test the new project ycontent = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( ycontent.replace("zuul-demo", pname), ) ycontent2 = load(file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read()) sp2 = copy.deepcopy( [p for p in ycontent2 if 'project' in p and p['project']['name'] == 'zuul-demo'][0]) sp2['project']['name'] = pname ycontent2.append(sp2) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( dump(ycontent2)) # Retrieve the previous build number for config-check last_success_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastSuccessfulBuild") # Retrieve the previous build number for config-update last_success_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastSuccessfulBuild") # Send review (config-check) will be triggered self.push_review_as_admin( self.config_clone_dir, "Add config definition in Zuul/JJB config for %s" % pname) # Wait for config-check to finish and verify the success self.ju.wait_till_job_completes("config-check", last_success_build_num_ch, "lastSuccessfulBuild") last_build_num_ch, last_success_build_num_ch = 0, 1 attempt = 0 while last_build_num_ch != last_success_build_num_ch: if attempt >= 90: break time.sleep(1) last_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastBuild") last_success_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastSuccessfulBuild") attempt += 1 self.assertEqual(last_build_num_ch, last_success_build_num_ch) # let some time to Zuul to update the test result to Gerrit. time.sleep(2) # Get the change id change_ids = self.gu.get_my_changes_for_project("config") self.assertGreater(len(change_ids), 0) change_id = change_ids[0] # Check whether zuul sets verified to +1 after running the tests # let some time to Zuul to update the test result to Gerrit. self.assert_reviewer_approvals(change_id, '+1') # review the change self.gu2.submit_change_note(change_id, "current", "Code-Review", "2") self.gu2.submit_change_note(change_id, "current", "Workflow", "1") # now zuul processes gate pipeline and runs config-check job # Wait for config-check to finish and verify the success self.ju.wait_till_job_completes("config-check", last_success_build_num_ch, "lastSuccessfulBuild") last_build_num_ch, last_success_build_num_ch = 0, 1 attempt = 0 while last_build_num_ch != last_success_build_num_ch: if attempt >= 90: break time.sleep(1) last_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastBuild") last_success_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastSuccessfulBuild") attempt += 1 self.assertEqual(last_build_num_ch, last_success_build_num_ch) # Check whether zuul sets verified to +2 after running the tests # let some time to Zuul to update the test result to Gerrit. self.assert_reviewer_approvals(change_id, '+2') # verify whether zuul merged the patch change = self.gu.get_change('config', 'master', change_id) change_status = change['status'] attempt = 0 while change_status != 'MERGED': if attempt >= 90: break time.sleep(1) change = self.gu.get_change('config', 'master', change_id) change_status = change['status'] attempt += 1 self.assertEqual(change_status, 'MERGED') # Test post pipe line # as the patch is merged, post pieline should run config-update job # Wait for config-update to finish and verify the success self.ju.wait_till_job_completes("config-update", last_success_build_num_cu, "lastSuccessfulBuild") last_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastBuild") last_success_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastSuccessfulBuild") self.assertEqual(last_build_num_cu, last_success_build_num_cu) # Retrieve the prev build number for pname-unit-tests # Retrieve the prev build number for pname-functional-tests last_success_build_num_sp_ut = \ self.ju.get_last_build_number("%s-unit-tests" % pname, "lastSuccessfulBuild") last_success_build_num_sp_ft = \ self.ju.get_last_build_number("%s-functional-tests" % pname, "lastSuccessfulBuild") # Test config-update # config-update should have created jobs for pname # Trigger tests on pname # Send a review and check tests has been run self.gitu_admin.add_commit_and_publish( clone_dir, 'master', "Add useless file", self.un) # Wait for pname-unit-tests to finish and verify the success self.ju.wait_till_job_completes("%s-unit-tests" % pname, last_success_build_num_sp_ut, "lastSuccessfulBuild") # Wait for pname-functional-tests to end and check the success self.ju.wait_till_job_completes("%s-functional-tests" % pname, last_success_build_num_sp_ft, "lastSuccessfulBuild") # Check the unit tests succeed last_build_num_sp_ut = \ self.ju.get_last_build_number("%s-unit-tests" % pname, "lastBuild") last_success_build_num_sp_ut = \ self.ju.get_last_build_number("%s-unit-tests" % pname, "lastSuccessfulBuild") self.assertEqual(last_build_num_sp_ut, last_success_build_num_sp_ut) # Check the functional tests succeed last_build_num_sp_ft = \ self.ju.get_last_build_number("%s-functional-tests" % pname, "lastBuild") last_success_build_num_sp_ft = \ self.ju.get_last_build_number("%s-functional-tests" % pname, "lastSuccessfulBuild") self.assertEqual(last_build_num_sp_ft, last_success_build_num_sp_ft) # Get the change id change_ids = self.gu.get_my_changes_for_project(pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] # let some time to Zuul to update the test result to Gerrit. for i in range(90): if "jenkins" in self.gu.get_reviewers(change_id): break time.sleep(1) self.assert_reviewer_approvals(change_id, '+1')
class TestProjectReplication(Base): """ Functional tests to verify the gerrit replication feature """ def setUp(self): self.msu = ManageSfUtils(config.GATEWAY_URL) self.un = config.ADMIN_USER self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[self.un]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) self.k_idx = self.gu2.add_pubkey(config.USERS[config.USER_2]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) # Configuration to access mirror repo present in mysql self.msql_repo_path = "ssh://%s@%s/%s" \ % (config.GERRIT_USER, config.GATEWAY_HOST, 'home/gerrit/site_path/git/') # prepare environment for git clone on mirror repo self.mt = Tool() self.mt_tempdir = tempfile.mkdtemp() priv_key = file(config.GERRIT_SERVICE_PRIV_KEY_PATH, 'r').read() priv_key_path = os.path.join(self.mt_tempdir, 'user.priv') file(priv_key_path, 'w').write(priv_key) os.chmod(priv_key_path, stat.S_IREAD | stat.S_IWRITE) ssh_wrapper = "ssh -o StrictHostKeyChecking=no -i " \ "%s \"$@\"" % priv_key_path wrapper_path = os.path.join(self.mt_tempdir, 'ssh_wrapper.sh') file(wrapper_path, 'w').write(ssh_wrapper) os.chmod(wrapper_path, stat.S_IRWXU) self.mt.env['GIT_SSH'] = wrapper_path self.pname = 'test-replication' def tearDown(self): self.deleteConfigSection(self.un, self.pname) self.deleteMirrorRepo(self.pname) self.msu.deleteProject(self.pname, self.un) self.gu2.del_pubkey(self.k_idx) # Can't use GerritGitUtils.clone as not sure when source uri repo in mysql # be ready.(i.e gerrit is taking time to create the mirror repo in mysql # node) So this clone may succeed or fail, we don't need 'git review -s' # and other review commands in clone method def clone(self, uri, target): self.assertTrue(uri.startswith('ssh://')) cmd = "git clone %s %s" % (uri, target) self.mt.exe(cmd, self.mt_tempdir) clone = os.path.join(self.mt_tempdir, target) return clone def create_project(self, name, user, options=None): self.msu.createProject(name, user, options) def ssh_run_cmd(self, sshkey_priv_path, user, host, subcmd): host = '%s@%s' % (user, host) sshcmd = ['ssh', '-o', 'LogLevel=ERROR', '-o', 'StrictHostKeyChecking=no', '-o', 'UserKnownHostsFile=/dev/null', '-i', sshkey_priv_path, host] cmd = sshcmd + subcmd p = Popen(cmd, stdout=PIPE) return p.communicate() def deleteMirrorRepo(self, name): sshkey_priv_path = config.GERRIT_SERVICE_PRIV_KEY_PATH user = '******' host = config.GATEWAY_HOST mirror_path = '/home/gerrit/site_path/git/%s.git' % name cmd = ['rm', '-rf', mirror_path] self.ssh_run_cmd(sshkey_priv_path, user, host, cmd) def createConfigSection(self, user, project): # Section name will be node name and the project section = 'mysql_%s' % project host = '%s@%s' % (config.GERRIT_USER, config.GATEWAY_HOST) mirror_repo_path = '/home/gerrit/site_path/git/\${name}.git' url = '%s:%s' % (host, mirror_repo_path) self.msu.replicationModifyConfig(user, 'add', section, 'projects', project) self.msu.replicationModifyConfig(user, 'add', section, 'url', url) push = '+refs/heads/*:refs/heads/*' self.msu.replicationModifyConfig(user, 'add', section, 'push', push) push = '+refs/tags/*:refs/tags/*' self.msu.replicationModifyConfig(user, 'add', section, 'push', push) def deleteConfigSection(self, user, project): # section name will be node name and the project section = 'managesf_%s' % project self.msu.replicationModifyConfig(user, 'remove-section', section) def mirror_clone_and_check_files(self, url, pname, us_files): retries = 0 files = [] while True: clone = self.clone(url, pname) # clone may fail, as mirror repo is not yet ready(i.e gerrit not # yet replicated the project) if os.path.isdir(clone): files = [f for f in os.listdir(clone) if not f.startswith('.')] shutil.rmtree(clone) if us_files and files: break elif retries > 30: break else: time.sleep(3) retries += 1 if us_files: for f in us_files: self.assertIn(f, files) self.assertTrue((len(us_files) < len(files))) def test_replication(self): """ Test gerrit replication for review process """ # Be sure the project, mirror repo, project in config don't exist self.deleteMirrorRepo(self.pname) self.deleteConfigSection(self.un, self.pname) self.msu.deleteProject(self.pname, self.un) # Create the project self.create_project(self.pname, self.un) # Create new section for this project in replication.config self.createConfigSection(self.un, self.pname) # Force gerrit to read its known_hosts file. The only # way to do that is by restarting gerrit. The Puppet Gerrit # manifest will restart gerrit if a new entry in known_hosts_gerrit # is discovered. # This may take some time (gerrit in some condition take long # to be fully up) call("ssh [email protected] systemctl restart gerrit", shell=True) call("ssh [email protected] /root/wait4gerrit.sh", shell=True) # Clone the project and submit it for review priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) gitu = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) url = "ssh://%s@%s:29418/%s" % (self.un, config.GATEWAY_HOST, self.pname) clone_dir = gitu.clone(url, self.pname) gitu.add_commit_and_publish(clone_dir, "master", "Test commit") # Add 2 files and resubmit for review data = "echo Working" us_files = ["run_functional-tests.sh", "run_tests.sh"] for f in us_files: file(os.path.join(clone_dir, f), 'w').write(data) os.chmod(os.path.join(clone_dir, f), 0755) gitu.add_commit_and_publish(clone_dir, "master", None, fnames=us_files) # Review the patch and merge it change_ids = self.gu.get_my_changes_for_project(self.pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] self.gu.submit_change_note(change_id, "current", "Code-Review", "2") self.gu.submit_change_note(change_id, "current", "Verified", "2") self.gu.submit_change_note(change_id, "current", "Workflow", "1") # Put USER_2 as core for config project grp_name = '%s-core' % self.pname self.gu.add_group_member(config.USER_2, grp_name) self.gu2.submit_change_note(change_id, "current", "Code-Review", "2") self.assertTrue(self.gu.submit_patch(change_id, "current")) shutil.rmtree(clone_dir) # Verify if gerrit automatically triggered replication # Mirror repo(in mysql node) should have these latest changes # Clone the mirror repo(from mysql) and check for the 2 files msql_repo_url = self.msql_repo_path + '%s.git' % self.pname self.mirror_clone_and_check_files(msql_repo_url, self.pname, us_files)
class TestGerritHooks(Base): """ Functional tests that validate Gerrit hooks. """ @classmethod def setUpClass(cls): cls.msu = ManageSfUtils(config.GATEWAY_URL) @classmethod def tearDownClass(cls): pass def setUp(self): self.projects = [] self.dirs_to_delete = [] self.issues = [] self.u = config.ADMIN_USER self.u2 = config.USER_2 self.rm = RedmineUtils( config.GATEWAY_URL + "/redmine/", auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) self.gu = GerritUtils(config.GATEWAY_URL, auth_cookie=config.USERS[self.u]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[self.u2]['auth_cookie']) self.gu.add_pubkey(config.USERS[self.u]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.u]["privkey"]) self.gitu = GerritGitUtils(self.u, priv_key_path, config.USERS[self.u]['email']) def tearDown(self): for issue in self.issues: self.rm.delete_issue(issue) for name in self.projects: self.msu.deleteProject(name, self.u) for dirs in self.dirs_to_delete: shutil.rmtree(dirs) def create_project(self, name, user, options=None): self.msu.createProject(name, user, options) self.projects.append(name) def _test_update_issue_hooks(self, comment_template, status): """ A referenced issue in commit msg triggers the hook """ pname = 'p_%s' % create_random_str() # Be sure the project does not exist self.msu.deleteProject(pname, self.u) # Create the project self.create_project(pname, self.u) # Put USER_2 as core for the project self.gu.add_group_member(self.u2, "%s-core" % pname) # Create an issue on the project issue_id = self.rm.create_issue(pname, "There is a problem") # Clone and commit something url = "ssh://%s@%s:29418/%s" % (self.u, config.GATEWAY_HOST, pname) clone_dir = self.gitu.clone(url, pname) cmt_msg = comment_template % issue_id self.gitu.add_commit_and_publish(clone_dir, 'master', cmt_msg) # Check issue status (Gerrit hook updates the issue to in progress) attempt = 0 while True: if self.rm.test_issue_status(issue_id, 'In Progress'): break if attempt > 10: break time.sleep(1) attempt += 1 self.assertTrue(self.rm.test_issue_status(issue_id, 'In Progress')) self._test_merging(pname, issue_id, status) def _test_merging(self, pname, issue_id, status): # Get the change id and merge the patch change_ids = self.gu.get_my_changes_for_project(pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] self.gu.submit_change_note(change_id, "current", "Code-Review", "2") self.gu.submit_change_note(change_id, "current", "Workflow", "1") self.gu.submit_change_note(change_id, "current", "Verified", "2") self.gu2.submit_change_note(change_id, "current", "Code-Review", "2") self.assertTrue(self.gu.submit_patch(change_id, "current")) # Check issue status (Gerrit hook updates the issue to in progress) attempt = 0 while True: if self.rm.test_issue_status(issue_id, status): break if attempt > 10: break time.sleep(1) attempt += 1 self.assertTrue(self.rm.test_issue_status(issue_id, status)) def test_gerrit_hook(self): """test various commit messages triggering a hook""" for template, final_status in TEST_MSGS: self._test_update_issue_hooks(template, final_status)
class TestProjectReplication(Base): """ Functional tests to verify the gerrit replication feature """ def setUp(self): self.msu = ManageSfUtils(config.GATEWAY_URL) self.un = config.ADMIN_USER self.gu = GerritUtils(config.GATEWAY_URL, auth_cookie=config.USERS[self.un]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) self.k_idx = self.gu2.add_pubkey(config.USERS[config.USER_2]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) # Configuration to access mirror repo present in managesf self.managesf_repo_path = "ssh://%s@%s/home/gerrit/git/" % ( config.GERRIT_USER, config.GATEWAY_HOST) # prepare environment for git clone on mirror repo self.mt = Tool() self.mt_tempdir = tempfile.mkdtemp() priv_key = file(config.GERRIT_SERVICE_PRIV_KEY_PATH, 'r').read() priv_key_path = os.path.join(self.mt_tempdir, 'user.priv') file(priv_key_path, 'w').write(priv_key) os.chmod(priv_key_path, stat.S_IREAD | stat.S_IWRITE) ssh_wrapper = "ssh -o StrictHostKeyChecking=no -i %s \"$@\"" % ( priv_key_path) wrapper_path = os.path.join(self.mt_tempdir, 'ssh_wrapper.sh') file(wrapper_path, 'w').write(ssh_wrapper) os.chmod(wrapper_path, stat.S_IRWXU) self.mt.env['GIT_SSH'] = wrapper_path self.pname = 'test-replication' def tearDown(self): self.deleteConfigSection(self.un, self.pname) self.deleteMirrorRepo(self.pname) self.msu.deleteProject(self.pname, self.un) self.gu2.del_pubkey(self.k_idx) # Can't use GerritGitUtils.clone as not sure when source uri repo # be ready.(i.e gerrit is taking time to create the mirror repo in managesf # node) So this clone may succeed or fail, we don't need 'git review -s' # and other review commands in clone method def clone(self, uri, target): self.assertTrue(uri.startswith('ssh://')) cmd = "git clone %s %s" % (uri, target) self.mt.exe(cmd, self.mt_tempdir) clone = os.path.join(self.mt_tempdir, target) return clone def create_project(self, name, user, options=None): self.msu.createProject(name, user, options) def ssh_run_cmd(self, sshkey_priv_path, user, host, subcmd): host = '%s@%s' % (user, host) sshcmd = [ 'ssh', '-o', 'LogLevel=ERROR', '-o', 'StrictHostKeyChecking=no', '-o', 'UserKnownHostsFile=/dev/null', '-i', sshkey_priv_path, host ] cmd = sshcmd + subcmd p = Popen(cmd, stdout=PIPE) return p.communicate() def deleteMirrorRepo(self, name): sshkey_priv_path = config.GERRIT_SERVICE_PRIV_KEY_PATH user = '******' host = config.GATEWAY_HOST mirror_path = '/home/gerrit/git/%s.git' % name cmd = ['rm', '-rf', mirror_path] self.ssh_run_cmd(sshkey_priv_path, user, host, cmd) def createConfigSection(self, user, project): # Section name will be node name and the project section = 'managesf_%s' % project host = '%s@%s' % (config.GERRIT_USER, config.GATEWAY_HOST) mirror_repo_path = '/home/gerrit/git/\${name}.git' url = '%s:%s' % (host, mirror_repo_path) self.msu.replicationModifyConfig(user, 'add', section, 'projects', project) self.msu.replicationModifyConfig(user, 'add', section, 'url', url) push = '+refs/heads/*:refs/heads/*' self.msu.replicationModifyConfig(user, 'add', section, 'push', push) push = '+refs/tags/*:refs/tags/*' self.msu.replicationModifyConfig(user, 'add', section, 'push', push) def deleteConfigSection(self, user, project): # section name will be node name and the project section = 'managesf_%s' % project self.msu.replicationModifyConfig(user, 'remove', section) def mirror_clone_and_check_files(self, url, pname, us_files): retries = 0 files = [] while True: clone = self.clone(url, pname) # clone may fail, as mirror repo is not yet ready(i.e gerrit not # yet replicated the project) if os.path.isdir(clone): files = [f for f in os.listdir(clone) if not f.startswith('.')] shutil.rmtree(clone) if us_files and files: break elif retries > 30: break else: time.sleep(3) retries += 1 if us_files: for f in us_files: self.assertIn(f, files) self.assertTrue((len(us_files) < len(files))) def test_replication(self): """ Test gerrit replication for review process """ # Be sure the project, mirror repo, project in config don't exist self.deleteMirrorRepo(self.pname) self.deleteConfigSection(self.un, self.pname) self.msu.deleteProject(self.pname, self.un) # Create the project self.create_project(self.pname, self.un) # Create new section for this project in replication.config self.createConfigSection(self.un, self.pname) # Force gerrit to read its known_hosts file. The only # way to do that is by restarting gerrit. The Puppet Gerrit # manifest will restart gerrit if a new entry in known_hosts_gerrit # is discovered. # This may take some time (gerrit in some condition take long # to be fully up) call("ssh %s ssh gerrit systemctl restart gerrit" % config.GATEWAY_HOST, shell=True) call("ssh %s ssh gerrit /root/wait4gerrit.sh" % config.GATEWAY_HOST, shell=True) # Clone the project and submit it for review priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) gitu = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) url = "ssh://%s@%s:29418/%s" % (self.un, config.GATEWAY_HOST, self.pname) clone_dir = gitu.clone(url, self.pname) gitu.add_commit_and_publish(clone_dir, "master", "Test commit") # Add 2 files and resubmit for review data = "echo Working" us_files = ["run_functional-tests.sh", "run_tests.sh"] for f in us_files: file(os.path.join(clone_dir, f), 'w').write(data) os.chmod(os.path.join(clone_dir, f), 0755) gitu.add_commit_and_publish(clone_dir, "master", None, fnames=us_files) # Review the patch and merge it change_ids = self.gu.get_my_changes_for_project(self.pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] self.gu.submit_change_note(change_id, "current", "Code-Review", "2") self.gu.submit_change_note(change_id, "current", "Verified", "2") self.gu.submit_change_note(change_id, "current", "Workflow", "1") # Put USER_2 as core for config project grp_name = '%s-core' % self.pname self.gu.add_group_member(config.USER_2, grp_name) self.gu2.submit_change_note(change_id, "current", "Code-Review", "2") self.assertTrue(self.gu.submit_patch(change_id, "current")) shutil.rmtree(clone_dir) # Verify if gerrit automatically triggered replication repo_url = self.managesf_repo_path + '%s.git' % self.pname self.mirror_clone_and_check_files(repo_url, self.pname, us_files)
class TestProjectTestsWorkflow(Base): """ Functional tests to verify the configuration of a project test """ @classmethod def setUpClass(cls): cls.ru = ResourcesUtils() cls.sample_project_dir = \ os.path.join(config.SF_TESTS_DIR, "sample_project/") @classmethod def tearDownClass(cls): pass def setUp(self): super(TestProjectTestsWorkflow, self).setUp() self.projects = [] self.dirs_to_delete = [] self.un = config.ADMIN_USER self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[self.un]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) self.ju = JenkinsUtils() self.gu.add_pubkey(config.USERS[self.un]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) # Clone the config repo and keep job/zuul config content self.config_clone_dir = self.clone_as_admin("config") self.original_zuul_projects = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() self.original_project = file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read() self.need_restore_config_repo = False # Put USER_2 as core for config project self.gu.add_group_member(config.USER_2, "config-core") def tearDown(self): super(TestProjectTestsWorkflow, self).tearDown() if self.need_restore_config_repo: self.restore_config_repo(self.original_project, self.original_zuul_projects) for name in self.projects: self.ru.direct_delete_repo(name) for dirs in self.dirs_to_delete: shutil.rmtree(dirs) def assert_reviewer_approvals(self, change_id, value): approvals = {} for _ in range(300): approvals = self.gu.get_reviewer_approvals(change_id, 'jenkins') if approvals and approvals.get('Verified') == value: break time.sleep(1) self.assertEqual(value, approvals.get('Verified')) def clone_as_admin(self, pname): url = "ssh://%s@%s:29418/%s" % (self.un, config.GATEWAY_HOST, pname) clone_dir = self.gitu_admin.clone(url, pname) if os.path.dirname(clone_dir) not in self.dirs_to_delete: self.dirs_to_delete.append(os.path.dirname(clone_dir)) return clone_dir def restore_config_repo(self, project, zuul): logger.info("Restore {zuul,jobs}/projects.yaml") file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( zuul) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( project) change_sha = self.commit_direct_push_as_admin( self.config_clone_dir, "Restore {zuul,jobs}/projects.yaml") logger.info("Waiting for config-update on %s" % change_sha) self.ju.wait_for_config_update(change_sha) def commit_direct_push_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) return self.gitu_admin.direct_push_branch(clone_dir, 'master') def push_review_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) return self.gitu_admin.review_push_branch(clone_dir, 'master') def create_project(self, name): self.ru.direct_create_repo(name) self.projects.append(name) def test_timestamped_logs(self): """Test that jenkins timestamps logs""" # Done here to make sure a config-update job was run and to avoid # duplicating code timestamp_re = re.compile('\d{2}:\d{2}:\d{2}.\d{0,3}') n = self.ju.get_last_build_number("config-update", "lastBuild") cu_logs = self.ju.get_job_logs("config-update", n) self.assertTrue(cu_logs is not None) for l in cu_logs.split('\n'): if l: self.assertRegexpMatches(l, timestamp_re, msg=l) def test_check_project_test_workflow(self): """ Validate new project to test via zuul """ # We want to create a project, provide project source # code with tests. We then configure zuul/jjb to handle the # run of the test cases. We then validate Gerrit has been # updated about the test results # We use the sample-project (that already exists) pname = 'test_workflow_%s' % create_random_str() logger.info("Creating project %s" % pname) # Create it self.create_project(pname) logger.info("Populating the project with %s" % self.sample_project_dir) # Add the sample-project to the empty repository clone_dir = self.clone_as_admin(pname) copytree(self.sample_project_dir, clone_dir) self.commit_direct_push_as_admin(clone_dir, "Add the sample project") # Change to config/{zuul,jobs}/projects.yaml # in order to test the new project logger.info("Adding config-repo configuration") ycontent = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( ycontent.replace("zuul-demo", pname), ) ycontent2 = load(file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read()) sp2 = copy.deepcopy( [p for p in ycontent2 if 'project' in p and p['project']['name'] == 'zuul-demo'][0]) sp2['project']['name'] = pname ycontent2.append(sp2) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( dump(ycontent2)) # Send review (config-check) will be triggered logger.info("Submitting the config review") change_sha = self.push_review_as_admin( self.config_clone_dir, "Add config definition in Zuul/JJB config for %s" % pname) change_nr = self.gu.get_change_number(change_sha) logger.info("Waiting for verify +1 on change %d" % change_nr) self.assertEquals(self.gu.wait_for_verify(change_nr), 1) # review the config change as a member from the config-core group logger.info("Approving and waiting for verify +2") self.gu2.submit_change_note(change_nr, "current", "Code-Review", "2") self.gu2.submit_change_note(change_nr, "current", "Workflow", "1") for retry in xrange(60): jenkins_vote = self.gu.get_vote(change_nr, "Verified") if jenkins_vote == 2: break time.sleep(1) self.assertEquals(jenkins_vote, 2) # verify whether zuul merged the patch logger.info("Waiting for change to be merged") for retry in xrange(60): change_status = self.gu.get_info(change_nr)['status'] if change_status == "MERGED": break time.sleep(1) self.assertEqual(change_status, 'MERGED') self.need_restore_config_repo = True logger.info("Waiting for config-update") config_update_log = self.ju.wait_for_config_update(change_sha) self.assertIn("Finished: SUCCESS", config_update_log) # Propose a change on a the repo and expect a Verified +1 logger.info("Submiting a test change to %s" % pname) change_sha = self.gitu_admin.add_commit_and_publish( clone_dir, 'master', "Add useless file", self.un) change_nr = self.gu.get_change_number(change_sha) logger.info("Waiting for verify +1 on change %d" % change_nr) self.assertEquals(self.gu.wait_for_verify(change_nr), 1) # Update the change on a the repo and expect a Verified -1 logger.info("Submiting a test change to %s suppose to fail" % pname) data = "#!/bin/bash\nexit 1\n" file(os.path.join(clone_dir, "run_tests.sh"), 'w').write(data) os.chmod(os.path.join(clone_dir, "run_tests.sh"), 0755) self.gitu_admin.add_commit_and_publish( clone_dir, "master", None, fnames=["run_tests.sh"]) logger.info("Waiting for verify -1 on change %d" % change_nr) self.assertEquals(self.gu.wait_for_verify(change_nr), -1) logger.info("Validate jobs ran via the job api %s" % pname) # This piece of code is there by convenience ... # TODO: Should be moved in the job api tests file. # Test the manageSF jobs API: query per patch & revision change_ids = self.gu.get_my_changes_for_project(pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] patch = self.gu.get_change_last_patchset(change_id)['_number'] cookie = get_cookie(config.ADMIN_USER, config.ADMIN_PASSWORD) cookies = {"auth_pubtkt": cookie} base_url = config.GATEWAY_URL + "/manage/jobs/" for j in ["%s-functional-tests" % pname, "%s-unit-tests" % pname]: job = requests.get(base_url + '%s/?change=%s' % (j, patch), cookies=cookies).json() self.assertTrue("jenkins" in job.keys(), job) self.assertTrue(len(job["jenkins"]) > 1, job)