def test_check_add_automatic_reviewers(self): """ Test if reviewers-by-blame plugin works """ data = "this\nis\na\ncouple\nof\nlines" change_id, gu, k1_index, pname = self._prepare_review_submit_testing( ('file', data)) # Merge the change gu.submit_change_note(change_id, "current", "Code-Review", "2") gu.submit_change_note(change_id, "current", "Verified", "2") gu.submit_change_note(change_id, "current", "Workflow", "1") self.assertTrue(gu.submit_patch(change_id, "current")) gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) # Change the file we have commited with Admin user k2_index = gu2.add_pubkey(config.USERS[config.USER_2]["pubkey"]) priv_key_path = set_private_key(config.USERS[config.USER_2]["privkey"]) gitu2 = GerritGitUtils(config.USER_2, priv_key_path, config.USERS[config.USER_2]['email']) url = "ssh://%s@%s:29418/%s" % (config.USER_2, config.GATEWAY_HOST, pname) clone_dir = gitu2.clone(url, pname) self.dirs_to_delete.append(os.path.dirname(clone_dir)) data = ['this', 'is', 'some', 'lines'] file(os.path.join(clone_dir, "file"), 'w').write("\n".join(data)) gitu2.add_commit_and_publish(clone_dir, "master", "Test commit", fnames=["file"]) # Get the change id change_ids = gu2.get_my_changes_for_project(pname) self.assertEqual(len(change_ids), 1) change_id = change_ids[0] # Verify first_u has been automatically added to reviewers for retry in xrange(3): if len(gu2.get_reviewers(change_id)) > 0: break time.sleep(1) reviewers = gu2.get_reviewers(change_id) self.assertEqual(len(reviewers), 1) self.assertEqual(reviewers[0], config.ADMIN_USER) gu.del_pubkey(k1_index) gu2.del_pubkey(k2_index)
class TestResourcesWorkflow(Base): def setUp(self): super(TestResourcesWorkflow, self).setUp() priv_key_path = set_private_key( config.USERS[config.ADMIN_USER]["privkey"]) self.gitu_admin = GerritGitUtils( config.ADMIN_USER, priv_key_path, config.USERS[config.ADMIN_USER]['email']) self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) self.ju = JenkinsUtils() self.dirs_to_delete = [] def tearDown(self): super(TestResourcesWorkflow, self).tearDown() for dirs in self.dirs_to_delete: shutil.rmtree(dirs) def ssh_run_cmd(self, sshkey_priv_path, user, host, subcmd): host = '%s@%s' % (user, host) sshcmd = [ 'ssh', '-o', 'LogLevel=ERROR', '-o', 'StrictHostKeyChecking=no', '-o', 'UserKnownHostsFile=/dev/null', '-i', sshkey_priv_path, host ] cmd = sshcmd + subcmd devnull = open(os.devnull, 'wb') p = Popen(cmd, stdout=devnull, stderr=devnull) return p.communicate(), p.returncode def clone_as_admin(self, pname): url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, pname) clone_dir = self.gitu_admin.clone(url, pname) if os.path.dirname(clone_dir) not in self.dirs_to_delete: self.dirs_to_delete.append(os.path.dirname(clone_dir)) return clone_dir def commit_direct_push_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) return self.gitu_admin.direct_push_branch(clone_dir, 'master') def set_resources_then_direct_push(self, fpath, resources=None, mode='add'): config_clone_dir = self.clone_as_admin("config") path = os.path.join(config_clone_dir, fpath) if mode == 'add': file(path, 'w').write(resources) elif mode == 'del': os.unlink(path) change_sha = self.commit_direct_push_as_admin( config_clone_dir, "Add new resources for functional tests") config_update_log = self.ju.wait_for_config_update(change_sha) self.assertIn("SUCCESS", config_update_log) def wait_for_jenkins_note(self, change_id): attempt = 0 while "jenkins" not in self.gu.get_reviewers(change_id): if attempt >= 90: break time.sleep(1) attempt += 1 def propose_resources_change_check_ci(self, fpath, resources=None, mode='add', expected_note=1, msg=None): config_clone_dir = self.clone_as_admin("config") path = os.path.join(config_clone_dir, fpath) if mode == 'add': file(path, 'w').write(resources) elif mode == 'del': os.unlink(path) if not msg: msg = "Validate resources" if mode == 'add': change_sha = self.gitu_admin.add_commit_and_publish( config_clone_dir, "master", msg, fnames=[path]) if mode == 'del': change_sha = self.gitu_admin.add_commit_for_all_new_additions( config_clone_dir, msg, publish=True) change_nr = self.gu.get_change_number(change_sha) note = self.gu.wait_for_verify(change_nr) self.assertEqual(note, expected_note) def get_resources(self): gateau = config.USERS[config.ADMIN_USER]['auth_cookie'] resp = requests.get("%s/manage/resources/" % config.GATEWAY_URL, cookies={'auth_pubtkt': gateau}) return resp.json() def test_validate_wrong_resource_workflow(self): """ Check resources - wrong model is detected by config-check """ # This resource is not correct fpath = "resources/%s.yaml" % create_random_str() name = create_random_str() resources = """resources: groups: %s: unknown-key: value description: test for functional test """ # Add the resource file with review then check CI resources = resources % name self.propose_resources_change_check_ci(fpath, resources=resources, mode='add', expected_note=-1) def test_validate_correct_resource_workflow(self): """ Check resources - good model is detected by config-check """ # This resource is correct fpath = "resources/%s.yaml" % create_random_str() name = create_random_str() resources = """resources: groups: %s: description: test for functional test members: - [email protected] """ # Add the resource file with review then check CI resources = resources % name self.propose_resources_change_check_ci(fpath, resources=resources, mode='add') def test_validate_resources_deletion(self): """ Check resources - deletions detected and authorized via flag """ fpath = "resources/%s.yaml" % create_random_str() name = create_random_str() resources = """resources: groups: %s: description: test for functional test members: [] """ # Add the resources file w/o review resources = resources % name self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Remove the resource file via the review self.propose_resources_change_check_ci(fpath, mode='del', expected_note=-1) # Remove the resource file with "allow-delete" flag via the review shutil.rmtree(os.path.join(self.gitu_admin.tempdir, 'config')) msg = "Remove resource with flag\nsf-resources: allow-delete" self.propose_resources_change_check_ci(fpath, mode='del', msg=msg) @skipIfServiceMissing('storyboard') def test_CUD_project(self): """ Check resources - ops on project work as expected """ sclient = SFStoryboard(config.GATEWAY_URL + "/storyboard_api", config.USERS[config.USER_4]['auth_cookie']) fpath = "resources/%s.yaml" % create_random_str() name = create_random_str() resources = """resources: projects: %(pname)s: description: An awesome project issue-tracker: SFStoryboard source-repositories: - %(pname)s/%(r1name)s repos: %(pname)s/%(r1name)s: description: The server part acl: %(pname)s acls: %(pname)s: file: | [access "refs/*"] read = group Anonymous Users """ tmpl_keys = { 'pname': create_random_str(), 'r1name': create_random_str() } resources = resources % tmpl_keys # Add the resources file w/o review self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Some checks to validate stuff have been created projects = [p.name for p in sclient.projects.get_all()] self.assertIn("%s/%s" % (tmpl_keys['pname'], tmpl_keys['r1name']), projects) project_groups = [p.name for p in sclient.project_groups.get_all()] self.assertIn(tmpl_keys['pname'], project_groups) # Modify the project resource resources = """resources: projects: %(pname)s: description: An awesome project issue-tracker: SFStoryboard source-repositories: - %(pname)s/%(r1name)s - %(pname)s/%(r2name)s repos: %(pname)s/%(r1name)s: description: The server part acl: %(pname)s %(pname)s/%(r2name)s: description: The server part acl: %(pname)s acls: %(pname)s: file: | [access "refs/*"] read = group Anonymous Users """ tmpl_keys.update({'r2name': create_random_str()}) resources = resources % tmpl_keys self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Some checks to validate stuff have been updated projects = [p.name for p in sclient.projects.get_all()] for name in (tmpl_keys['r1name'], tmpl_keys['r2name']): self.assertIn("%s/%s" % (tmpl_keys['pname'], name), projects) project_groups = [p.name for p in sclient.project_groups.get_all()] self.assertIn(tmpl_keys['pname'], project_groups) # Del the resources file w/o review self.set_resources_then_direct_push(fpath, mode='del') # Check the project group has been deleted # Note the project (in storyboard) is not deleted # this is a current limitation of the API (01/13/2017) project_groups = [p.name for p in sclient.project_groups.get_all()] self.assertFalse(tmpl_keys['pname'] in project_groups) def test_CUD_group(self): """ Check resources - ops on group work as expected """ fpath = "resources/%s.yaml" % create_random_str() name = create_random_str() resources = """resources: groups: %s: description: test for functional test members: - [email protected] - [email protected] """ # Add the resources file w/o review resources = resources % name self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Check members on Gerrit gid = self.gu.get_group_id(name) members = [m['email'] for m in self.gu.get_group_members(gid)] self.assertIn("*****@*****.**", members) self.assertIn("*****@*****.**", members) # Modify resources Add/Remove members w/o review resources = """resources: groups: %s: description: test for functional test members: - [email protected] - [email protected] """ resources = resources % name self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Check members on Gerrit gid = self.gu.get_group_id(name) members = [m['email'] for m in self.gu.get_group_members(gid)] self.assertIn("*****@*****.**", members) self.assertIn("*****@*****.**", members) self.assertNotIn("*****@*****.**", members) # Del the resources file w/o review self.set_resources_then_direct_push(fpath, mode='del') # Check the group has been deleted self.assertFalse(self.gu.get_group_id(name)) def test_CD_repo(self): """ Check resources - ops on git repositories work as expected """ fpath = "resources/%s.yaml" % create_random_str() name = create_random_str() resources = """resources: repos: %s: description: test for functional test default-branch: br1 branches: br1: HEAD br2: HEAD master: '0' """ # Add the resources file w/o review resources = resources % name self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Check the project has been created self.assertTrue(self.gu.project_exists(name)) # Check branches branches = self.gu.g.get('/projects/%s/branches/' % name) for wref in ("HEAD", "br1", "br2"): found = False for ref in branches: if found: continue if ref['ref'].endswith(wref): found = True if ref['ref'] == 'HEAD' and ref['revision'] != "br1": raise Exception("Wrong default branch") if not found: raise Exception("Requested branch %s not found" % wref) # Del the resources file w/o review self.set_resources_then_direct_push(fpath, mode='del') # Check the project has been deleted self.assertFalse(self.gu.project_exists(name)) def test_CRUD_resources(self): """ Check resources - bulk ops on resources work as expected """ fpath = "resources/%s.yaml" % create_random_str() tmpl_keys = { 'pname': create_random_str(), 'r1name': create_random_str(), 'r2name': create_random_str(), 'aname': create_random_str(), 'g1name': create_random_str(), 'g2name': create_random_str() } resources = """resources: projects: %(pname)s: description: An awesome project contacts: - [email protected] source-repositories: - %(pname)s/%(r1name)s - %(pname)s/%(r2name)s website: http://ichiban-cloud.io documentation: http://ichiban-cloud.io/docs issue-tracker-url: http://ichiban-cloud.bugtrackers.io repos: %(pname)s/%(r1name)s: description: The server part acl: %(aname)s %(pname)s/%(r2name)s: description: The client part acl: %(aname)s acls: %(aname)s: file: | [access "refs/*"] read = group Anonymous Users read = group %(pname)s/%(g2name)s owner = group %(pname)s/%(g1name)s [access "refs/heads/*"] label-Code-Review = -2..+2 group %(pname)s/%(g2name)s label-Verified = -2..+2 group %(pname)s/%(g1name)s label-Workflow = -1..+1 group %(pname)s/%(g2name)s submit = group %(pname)s/%(g2name)s read = group Anonymous Users read = group %(pname)s/%(g2name)s [access "refs/meta/config"] read = group %(pname)s/%(g2name)s [receive] requireChangeId = true [submit] mergeContent = false action = fast forward only groups: - %(pname)s/%(g1name)s - %(pname)s/%(g2name)s groups: %(pname)s/%(g1name)s: members: - [email protected] %(pname)s/%(g2name)s: members: - [email protected] - [email protected] """ # Add the resources file w/o review resources = resources % tmpl_keys self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Check resources have been created self.assertTrue( self.gu.project_exists( os.path.join(tmpl_keys['pname'], tmpl_keys['r1name']))) self.assertTrue( self.gu.project_exists( os.path.join(tmpl_keys['pname'], tmpl_keys['r2name']))) gid = self.gu.get_group_id( os.path.join(tmpl_keys['pname'], tmpl_keys['g1name'])) members = [m['email'] for m in self.gu.get_group_members(gid)] self.assertEqual(len(members), 1) self.assertIn("*****@*****.**", members) gid2 = self.gu.get_group_id( os.path.join(tmpl_keys['pname'], tmpl_keys['g2name'])) members = [m['email'] for m in self.gu.get_group_members(gid2)] self.assertEqual(len(members), 2) self.assertIn("*****@*****.**", members) self.assertIn("*****@*****.**", members) # Verify ACLs have been written for both repo for r in ('r1name', 'r2name'): rname = os.path.join(tmpl_keys['pname'], tmpl_keys[r]) acl = self.gu.g.get('access/?project=%s' % rname) self.assertIn( gid2, acl[rname]['local']['refs/heads/*']['permissions'] ['submit']['rules'].keys()) # Verify the resources endpoint know about what we pushed res = self.get_resources() self.assertIn(tmpl_keys['pname'], res['resources']['projects'].keys()) self.assertIn(tmpl_keys['aname'], res['resources']['acls'].keys()) self.assertIn(os.path.join(tmpl_keys['pname'], tmpl_keys['g1name']), res['resources']['groups'].keys()) self.assertIn(os.path.join(tmpl_keys['pname'], tmpl_keys['g2name']), res['resources']['groups'].keys()) self.assertIn(os.path.join(tmpl_keys['pname'], tmpl_keys['r1name']), res['resources']['repos'].keys()) self.assertIn(os.path.join(tmpl_keys['pname'], tmpl_keys['r2name']), res['resources']['repos'].keys()) # Modify the ACL to verify repos ACL are updated resources = re.sub( 'submit = group .*', 'submit = group %s' % os.path.join(tmpl_keys['pname'], tmpl_keys['g1name']), resources) self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Verify ACLs have been updated for both repo for r in ('r1name', 'r2name'): rname = os.path.join(tmpl_keys['pname'], tmpl_keys[r]) acl = self.gu.g.get('access/?project=%s' % rname) self.assertIn( gid, acl[rname]['local']['refs/heads/*']['permissions'] ['submit']['rules'].keys()) # Now let's remove all that awesome resources self.set_resources_then_direct_push(fpath, mode='del') # Check resources have been deleted self.assertFalse( self.gu.project_exists( os.path.join(tmpl_keys['pname'], tmpl_keys['r1name']))) self.assertFalse( self.gu.project_exists( os.path.join(tmpl_keys['pname'], tmpl_keys['r2name']))) self.assertFalse( self.gu.get_group_id( os.path.join(tmpl_keys['pname'], tmpl_keys['g1name']))) self.assertFalse( self.gu.get_group_id( os.path.join(tmpl_keys['pname'], tmpl_keys['g2name']))) res = self.get_resources() projects = res['resources'].get('projects', {}) acls = res['resources'].get('acls', {}) groups = res['resources'].get('groups', {}) repos = res['resources'].get('repos', {}) self.assertNotIn(tmpl_keys['pname'], projects.keys()) self.assertNotIn(tmpl_keys['aname'], acls.keys()) self.assertNotIn(os.path.join(tmpl_keys['pname'], tmpl_keys['g1name']), groups.keys()) self.assertNotIn(os.path.join(tmpl_keys['pname'], tmpl_keys['g2name']), groups.keys()) self.assertNotIn(os.path.join(tmpl_keys['pname'], tmpl_keys['r1name']), repos.keys()) self.assertNotIn(os.path.join(tmpl_keys['pname'], tmpl_keys['r2name']), repos.keys()) def test_GET_resources(self): """ Check resources - GET resources works as expected""" cookies = dict(auth_pubtkt=config.USERS[config.USER_1]['auth_cookie']) ret = requests.get("%s/manage/resources/" % config.GATEWAY_URL, cookies=cookies) self.assertIn('resources', ret.json()) def test_GET_missing_resources(self): """ Check resources - GET missing resources works as expected""" token = config.USERS[config.ADMIN_USER]['auth_cookie'] prev = "resources: {}" new = """resources: groups: %(gname)s: description: A test group members: ['*****@*****.**'] """ group_name = create_random_str() data = {'prev': prev, 'new': new % {'gname': group_name}} # Direct PUT resources bypassing the config repo workflow requests.put("%s/manage/resources/" % config.GATEWAY_URL, json=data, cookies={'auth_pubtkt': token}) # Verify managesf detects diff and propose a re-sync resource struct ret = requests.get("%s/manage/resources/?get_missing_" "resources=true" % config.GATEWAY_URL, cookies={'auth_pubtkt': token}) logs, resources = ret.json() self.assertListEqual(logs, []) self.assertIn(group_name, resources['resources']['groups']) # Call the resources.sh script on managesf node to propose # a review on the config repo to re-sync with the reality cmd = [ '/usr/local/bin/resources.sh', 'get_missing_resources', 'submit' ] self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH, 'root', config.GATEWAY_HOST, cmd) # Get change id of the submitted review search_string = "Propose missing resources to the config repo" r = requests.get('%s/r/changes/?q=%s' % (config.GATEWAY_URL, search_string)) lastid = 0 for r in json.loads(r.content[4:]): if r['_number'] > lastid: lastid = r['_number'] self.assertEqual(self.gu.wait_for_verify(lastid), 1) # Check flag "sf-resources: skip-apply" in the commit msg change = self.gu.g.get( 'changes/?q=%s&o=CURRENT_REVISION&o=CURRENT_COMMIT' % lastid)[0] revision = change["current_revision"] commit = change['revisions'][revision]["commit"] self.assertEqual(commit["message"].split('\n')[0], 'Propose missing resources to the config repo') self.assertTrue(commit["message"].find('sf-resources: skip-apply') > 0) # Approve the change and wait for the +2 self.gu.submit_change_note(change['id'], "current", "Code-Review", "2") self.gu.submit_change_note(change['id'], "current", "Workflow", "1") # Check config-update return a success # The flag sf-resources: skip-apply should be detected # by the config update. Then missing resources won't # by concidered new and the resources apply will be skipped. # This tests (checking config-update succeed) confirm # resource apply have been skipped if not managesf resources # apply would have return 409 error making config-update failed too. # If not True then we cannot concider config-update succeed config_update_log = self.ju.wait_for_config_update(revision) self.assertIn("Skip resources apply.", config_update_log) self.assertIn("SUCCESS", config_update_log) # Checking again missing resources must return nothing ret = requests.get("%s/manage/resources/?get_missing_" "resources=true" % config.GATEWAY_URL, cookies={'auth_pubtkt': token}) logs, resources = ret.json() self.assertListEqual(logs, []) self.assertEqual(len(resources['resources']), 0)
def test_check_add_automatic_reviewers(self): """ Test if reviewers-by-blame plugin works """ pname = 'p_%s' % create_random_str() u2mail = config.USERS[config.USER_2]['email'] options = {'core-group': u2mail} self.create_project(pname, options) first_u = config.ADMIN_USER gu_first_u = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[first_u]['auth_cookie']) self.assertTrue(gu_first_u.project_exists(pname)) # Push data in the create project as Admin user k1_index = gu_first_u.add_pubkey(config.USERS[first_u]["pubkey"]) priv_key_path = set_private_key(config.USERS[first_u]["privkey"]) gitu = GerritGitUtils(first_u, priv_key_path, config.USERS[first_u]['email']) url = "ssh://%s@%s:29418/%s" % (first_u, config.GATEWAY_HOST, pname) clone_dir = gitu.clone(url, pname) self.dirs_to_delete.append(os.path.dirname(clone_dir)) data = ['this', 'is', 'a', 'couple', 'of', 'lines'] clone_dir = gitu.clone(url, pname) file(os.path.join(clone_dir, "file"), 'w').write("\n".join(data)) gitu.add_commit_and_publish(clone_dir, "master", "Test commit", fnames=["file"]) # Get the change id change_ids = gu_first_u.get_my_changes_for_project(pname) self.assertEqual(len(change_ids), 1) change_id = change_ids[0] # Merge the change gu_first_u.submit_change_note(change_id, "current", "Code-Review", "2") gu_first_u.submit_change_note(change_id, "current", "Verified", "2") gu_first_u.submit_change_note(change_id, "current", "Workflow", "1") second_u = config.USER_2 gu_second_u = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[second_u]['auth_cookie']) self.assertTrue(gu_first_u.submit_patch(change_id, "current")) # Change the file we have commited with Admin user k2_index = gu_second_u.add_pubkey(config.USERS[second_u]["pubkey"]) priv_key_path = set_private_key(config.USERS[second_u]["privkey"]) gitu = GerritGitUtils(second_u, priv_key_path, config.USERS[second_u]['email']) url = "ssh://%s@%s:29418/%s" % (second_u, config.GATEWAY_HOST, pname) clone_dir = gitu.clone(url, pname) self.dirs_to_delete.append(os.path.dirname(clone_dir)) data = ['this', 'is', 'some', 'lines'] file(os.path.join(clone_dir, "file"), 'w').write("\n".join(data)) gitu.add_commit_and_publish(clone_dir, "master", "Test commit", fnames=["file"]) # Get the change id change_ids = gu_second_u.get_my_changes_for_project(pname) self.assertEqual(len(change_ids), 1) change_id = change_ids[0] # Verify first_u has been automatically added to reviewers attempts = 0 while True: if len(gu_second_u.get_reviewers(change_id)) > 0 or attempts >= 3: break attempts += 1 time.sleep(1) reviewers = gu_second_u.get_reviewers(change_id) self.assertGreaterEqual(len(reviewers), 1) self.assertTrue(first_u in reviewers) gu_first_u.del_pubkey(k1_index) gu_second_u.del_pubkey(k2_index)
class TestProjectTestsWorkflow(Base): """ Functional tests to verify the configuration of a project test """ @classmethod def setUpClass(cls): cls.msu = ManageSfUtils(config.GATEWAY_URL) cls.sample_project_dir = \ os.path.join(config.SF_TESTS_DIR, "sample_project/") @classmethod def tearDownClass(cls): pass def setUp(self): self.projects = [] self.dirs_to_delete = [] self.un = config.ADMIN_USER self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[self.un]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) self.ju = JenkinsUtils() self.gu.add_pubkey(config.USERS[self.un]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) # Clone the config repo and make change to it # in order to test the new sample_project self.config_clone_dir = self.clone_as_admin("config") self.original_layout = file(os.path.join( self.config_clone_dir, "zuul/layout.yaml")).read() self.original_zuul_projects = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() self.original_project = file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read() # Put USER_2 as core for config project self.gu.add_group_member(config.USER_2, "config-core") def tearDown(self): self.restore_config_repo(self.original_layout, self.original_project, self.original_zuul_projects) for name in self.projects: self.msu.deleteProject(name, config.ADMIN_USER) for dirs in self.dirs_to_delete: shutil.rmtree(dirs) def assert_reviewer_approvals(self, change_id, value): approvals = {} for _ in range(90): approvals = self.gu.get_reviewer_approvals(change_id, 'jenkins') if approvals and approvals.get('Verified') == value: break time.sleep(1) self.assertEqual(value, approvals.get('Verified')) def clone_as_admin(self, pname): url = "ssh://%s@%s:29418/%s" % (self.un, config.GATEWAY_HOST, pname) clone_dir = self.gitu_admin.clone(url, pname) if os.path.dirname(clone_dir) not in self.dirs_to_delete: self.dirs_to_delete.append(os.path.dirname(clone_dir)) return clone_dir def restore_config_repo(self, layout, project, zuul): file(os.path.join( self.config_clone_dir, "zuul/layout.yaml"), 'w').write( layout) file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( zuul) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( project) self.commit_direct_push_as_admin( self.config_clone_dir, "Restore layout.yaml and projects.yaml") def commit_direct_push_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) self.gitu_admin.direct_push_branch(clone_dir, 'master') def push_review_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) self.gitu_admin.review_push_branch(clone_dir, 'master') def create_project(self, name, user, options=None): self.msu.createProject(name, user, options) self.projects.append(name) def test_check_project_test_workflow(self): """ Validate new project to test via zuul layout.yaml """ # We want to create a project, provide project source # code with tests. We then configure zuul/jjb to handle the # run of the test cases. We then validate Gerrit has been # updated about the test results # We use the sample-project (that already exists) pname = 'test_workflow_%s' % create_random_str() # Be sure the project does not exist self.msu.deleteProject(pname, config.ADMIN_USER) # Create it self.create_project(pname, config.ADMIN_USER) # Add the sample-project to the empty repository clone_dir = self.clone_as_admin(pname) copytree(self.sample_project_dir, clone_dir) self.commit_direct_push_as_admin(clone_dir, "Add the sample project") # Change to config/zuul/layout.yaml and jobs/projects.yaml # in order to test the new project ycontent = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( ycontent.replace("zuul-demo", pname), ) ycontent2 = load(file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read()) sp2 = copy.deepcopy( [p for p in ycontent2 if 'project' in p and p['project']['name'] == 'zuul-demo'][0]) sp2['project']['name'] = pname ycontent2.append(sp2) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( dump(ycontent2)) # Retrieve the previous build number for config-check last_success_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastSuccessfulBuild") # Retrieve the previous build number for config-update last_success_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastSuccessfulBuild") # Send review (config-check) will be triggered self.push_review_as_admin( self.config_clone_dir, "Add config definition in Zuul/JJB config for %s" % pname) # Wait for config-check to finish and verify the success self.ju.wait_till_job_completes("config-check", last_success_build_num_ch, "lastSuccessfulBuild") last_build_num_ch, last_success_build_num_ch = 0, 1 attempt = 0 while last_build_num_ch != last_success_build_num_ch: if attempt >= 90: break time.sleep(1) last_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastBuild") last_success_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastSuccessfulBuild") attempt += 1 self.assertEqual(last_build_num_ch, last_success_build_num_ch) # let some time to Zuul to update the test result to Gerrit. time.sleep(2) # Get the change id change_ids = self.gu.get_my_changes_for_project("config") self.assertGreater(len(change_ids), 0) change_id = change_ids[0] # Check whether zuul sets verified to +1 after running the tests # let some time to Zuul to update the test result to Gerrit. self.assert_reviewer_approvals(change_id, '+1') # review the change self.gu2.submit_change_note(change_id, "current", "Code-Review", "2") self.gu2.submit_change_note(change_id, "current", "Workflow", "1") # now zuul processes gate pipeline and runs config-check job # Wait for config-check to finish and verify the success self.ju.wait_till_job_completes("config-check", last_success_build_num_ch, "lastSuccessfulBuild") last_build_num_ch, last_success_build_num_ch = 0, 1 attempt = 0 while last_build_num_ch != last_success_build_num_ch: if attempt >= 90: break time.sleep(1) last_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastBuild") last_success_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastSuccessfulBuild") attempt += 1 self.assertEqual(last_build_num_ch, last_success_build_num_ch) # Check whether zuul sets verified to +2 after running the tests # let some time to Zuul to update the test result to Gerrit. self.assert_reviewer_approvals(change_id, '+2') # verify whether zuul merged the patch change = self.gu.get_change('config', 'master', change_id) change_status = change['status'] attempt = 0 while change_status != 'MERGED': if attempt >= 90: break time.sleep(1) change = self.gu.get_change('config', 'master', change_id) change_status = change['status'] attempt += 1 self.assertEqual(change_status, 'MERGED') # Test post pipe line # as the patch is merged, post pieline should run config-update job # Wait for config-update to finish and verify the success self.ju.wait_till_job_completes("config-update", last_success_build_num_cu, "lastSuccessfulBuild") last_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastBuild") last_success_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastSuccessfulBuild") self.assertEqual(last_build_num_cu, last_success_build_num_cu) # Retrieve the prev build number for pname-unit-tests # Retrieve the prev build number for pname-functional-tests last_success_build_num_sp_ut = \ self.ju.get_last_build_number("%s-unit-tests" % pname, "lastSuccessfulBuild") last_success_build_num_sp_ft = \ self.ju.get_last_build_number("%s-functional-tests" % pname, "lastSuccessfulBuild") # Test config-update # config-update should have created jobs for pname # Trigger tests on pname # Send a review and check tests has been run self.gitu_admin.add_commit_and_publish( clone_dir, 'master', "Add useless file", self.un) # Wait for pname-unit-tests to finish and verify the success self.ju.wait_till_job_completes("%s-unit-tests" % pname, last_success_build_num_sp_ut, "lastSuccessfulBuild") # Wait for pname-functional-tests to end and check the success self.ju.wait_till_job_completes("%s-functional-tests" % pname, last_success_build_num_sp_ft, "lastSuccessfulBuild") # Check the unit tests succeed last_build_num_sp_ut = \ self.ju.get_last_build_number("%s-unit-tests" % pname, "lastBuild") last_success_build_num_sp_ut = \ self.ju.get_last_build_number("%s-unit-tests" % pname, "lastSuccessfulBuild") self.assertEqual(last_build_num_sp_ut, last_success_build_num_sp_ut) # Check the functional tests succeed last_build_num_sp_ft = \ self.ju.get_last_build_number("%s-functional-tests" % pname, "lastBuild") last_success_build_num_sp_ft = \ self.ju.get_last_build_number("%s-functional-tests" % pname, "lastSuccessfulBuild") self.assertEqual(last_build_num_sp_ft, last_success_build_num_sp_ft) # Get the change id change_ids = self.gu.get_my_changes_for_project(pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] # let some time to Zuul to update the test result to Gerrit. for i in range(90): if "jenkins" in self.gu.get_reviewers(change_id): break time.sleep(1) self.assert_reviewer_approvals(change_id, '+1')
def test_check_zuul_operations(self): """ Test if zuul verifies project correctly through zuul-demo project """ # zuul-demo - test project used exclusively to test zuul installation # The necessary project descriptions are already declared in Jenkins # and zuul pname = 'demo/zuul-demo' self.create_project(pname, config.ADMIN_USER) un = config.ADMIN_USER gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[un]['auth_cookie']) ju = JenkinsUtils() k_index = gu.add_pubkey(config.USERS[un]["pubkey"]) # Gerrit part self.assertTrue(gu.project_exists(pname)) priv_key_path = set_private_key(config.USERS[un]["privkey"]) gitu = GerritGitUtils(un, priv_key_path, config.USERS[un]['email']) url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST, pname) clone_dir = gitu.clone(url, pname) self.dirs_to_delete.append(os.path.dirname(clone_dir)) last_fail_build_num_ft = \ ju.get_last_build_number("zuul-demo-functional-tests", "lastFailedBuild") last_fail_build_num_ut = \ ju.get_last_build_number("zuul-demo-unit-tests", "lastFailedBuild") last_succeed_build_num_ft = \ ju.get_last_build_number("zuul-demo-functional-tests", "lastSuccessfulBuild") last_succeed_build_num_ut = \ ju.get_last_build_number("zuul-demo-unit-tests", "lastSuccessfulBuild") gitu.add_commit_and_publish(clone_dir, "master", "Test commit") change_ids = gu.get_my_changes_for_project(pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] # Give some time for jenkins to work ju.wait_till_job_completes("zuul-demo-functional-tests", last_fail_build_num_ft, "lastFailedBuild") ju.wait_till_job_completes("zuul-demo-unit-tests", last_fail_build_num_ut, "lastFailedBuild") attempt = 0 while "jenkins" not in gu.get_reviewers(change_id): if attempt >= 90: break time.sleep(1) attempt += 1 attempt = 0 while gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'] \ != '-1': if attempt >= 90: break time.sleep(1) attempt += 1 self.assertEqual( gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'], '-1') # Add the test case files and resubmit for review data = "echo Working" files = ["run_functional-tests.sh", "run_tests.sh"] for f in files: file(os.path.join(clone_dir, f), 'w').write(data) os.chmod(os.path.join(clone_dir, f), 0755) gitu.add_commit_and_publish(clone_dir, "master", None, fnames=files) # Give some time for jenkins to work ju.wait_till_job_completes("zuul-demo-functional-tests", last_succeed_build_num_ft, "lastSuccessfulBuild") ju.wait_till_job_completes("zuul-demo-unit-tests", last_succeed_build_num_ut, "lastSuccessfulBuild") attempt = 0 while "jenkins" not in gu.get_reviewers(change_id): if attempt >= 90: break time.sleep(1) attempt += 1 attempt = 0 while gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'] \ != '+1': if attempt >= 90: break time.sleep(1) attempt += 1 self.assertEqual( gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'], '+1') gu.del_pubkey(k_index)
def test_check_add_automatic_reviewers(self): """ Test if reviewers-by-blame plugin works """ pname = 'p_%s' % create_random_str() u2mail = config.USERS[config.USER_2]['email'] options = {'core-group': u2mail} self.create_project(pname, options) first_u = config.ADMIN_USER gu_first_u = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[first_u]['auth_cookie']) self.assertTrue(gu_first_u.project_exists(pname)) # Push data in the create project as Admin user k1_index = gu_first_u.add_pubkey(config.USERS[first_u]["pubkey"]) priv_key_path = set_private_key(config.USERS[first_u]["privkey"]) gitu = GerritGitUtils(first_u, priv_key_path, config.USERS[first_u]['email']) url = "ssh://%s@%s:29418/%s" % (first_u, config.GATEWAY_HOST, pname) clone_dir = gitu.clone(url, pname) self.dirs_to_delete.append(os.path.dirname(clone_dir)) data = ['this', 'is', 'a', 'couple', 'of', 'lines'] clone_dir = gitu.clone(url, pname) file(os.path.join(clone_dir, "file"), 'w').write("\n".join(data)) gitu.add_commit_and_publish(clone_dir, "master", "Test commit", fnames=["file"]) # Get the change id change_ids = gu_first_u.get_my_changes_for_project(pname) self.assertEqual(len(change_ids), 1) change_id = change_ids[0] # Merge the change gu_first_u.submit_change_note(change_id, "current", "Code-Review", "2") gu_first_u.submit_change_note(change_id, "current", "Verified", "2") gu_first_u.submit_change_note(change_id, "current", "Workflow", "1") second_u = config.USER_2 gu_second_u = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[second_u]['auth_cookie']) self.assertTrue(gu_first_u.submit_patch(change_id, "current")) # Change the file we have commited with Admin user k2_index = gu_second_u.add_pubkey(config.USERS[second_u]["pubkey"]) priv_key_path = set_private_key(config.USERS[second_u]["privkey"]) gitu = GerritGitUtils(second_u, priv_key_path, config.USERS[second_u]['email']) url = "ssh://%s@%s:29418/%s" % (second_u, config.GATEWAY_HOST, pname) clone_dir = gitu.clone(url, pname) self.dirs_to_delete.append(os.path.dirname(clone_dir)) data = ['this', 'is', 'some', 'lines'] file(os.path.join(clone_dir, "file"), 'w').write("\n".join(data)) gitu.add_commit_and_publish(clone_dir, "master", "Test commit", fnames=["file"]) # Get the change id change_ids = gu_second_u.get_my_changes_for_project(pname) self.assertEqual(len(change_ids), 1) change_id = change_ids[0] # Verify first_u has been automatically added to reviewers attempts = 0 while True: if len(gu_second_u.get_reviewers(change_id)) > 0 or attempts >= 3: break attempts += 1 time.sleep(1) reviewers = gu_second_u.get_reviewers(change_id) self.assertGreaterEqual(len(reviewers), 1) self.assertTrue(first_u in reviewers) gu_first_u.del_pubkey(k1_index) gu_second_u.del_pubkey(k2_index)
def test_check_zuul_operations(self): """ Test if zuul verifies project correctly through zuul-demo project """ # zuul-demo - test project used exclusively to test zuul installation # The necessary project descriptions are already declared in Jenkins # and zuul pname = 'zuul-demo' self.create_project(pname, config.ADMIN_USER) un = config.ADMIN_USER gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[un]['auth_cookie']) ju = JenkinsUtils() k_index = gu.add_pubkey(config.USERS[un]["pubkey"]) # Gerrit part self.assertTrue(gu.project_exists(pname)) priv_key_path = set_private_key(config.USERS[un]["privkey"]) gitu = GerritGitUtils(un, priv_key_path, config.USERS[un]['email']) url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST, pname) clone_dir = gitu.clone(url, pname) self.dirs_to_delete.append(os.path.dirname(clone_dir)) last_fail_build_num_ft = \ ju.get_last_build_number("zuul-demo-functional-tests", "lastFailedBuild") last_fail_build_num_ut = \ ju.get_last_build_number("zuul-demo-unit-tests", "lastFailedBuild") last_succeed_build_num_ft = \ ju.get_last_build_number("zuul-demo-functional-tests", "lastSuccessfulBuild") last_succeed_build_num_ut = \ ju.get_last_build_number("zuul-demo-unit-tests", "lastSuccessfulBuild") gitu.add_commit_and_publish(clone_dir, "master", "Test commit") change_ids = gu.get_my_changes_for_project(pname) self.assertEqual(len(change_ids), 1) change_id = change_ids[0] # Give some time for jenkins to work ju.wait_till_job_completes("zuul-demo-functional-tests", last_fail_build_num_ft, "lastFailedBuild") ju.wait_till_job_completes("zuul-demo-unit-tests", last_fail_build_num_ut, "lastFailedBuild") attempt = 0 while "jenkins" not in gu.get_reviewers(change_id): if attempt >= 90: break time.sleep(1) attempt += 1 attempt = 0 while gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'] \ != '-1': if attempt >= 90: break time.sleep(1) attempt += 1 self.assertEqual( gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'], '-1') # Add the test case files and resubmit for review data = "echo Working" files = ["run_functional-tests.sh", "run_tests.sh"] for f in files: file(os.path.join(clone_dir, f), 'w').write(data) os.chmod(os.path.join(clone_dir, f), 0755) gitu.add_commit_and_publish(clone_dir, "master", None, fnames=files) # Give some time for jenkins to work ju.wait_till_job_completes("zuul-demo-functional-tests", last_succeed_build_num_ft, "lastSuccessfulBuild") ju.wait_till_job_completes("zuul-demo-unit-tests", last_succeed_build_num_ut, "lastSuccessfulBuild") attempt = 0 while "jenkins" not in gu.get_reviewers(change_id): if attempt >= 90: break time.sleep(1) attempt += 1 attempt = 0 while gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'] \ != '+1': if attempt >= 90: break time.sleep(1) attempt += 1 self.assertEqual( gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'], '+1') gu.del_pubkey(k_index)