def setUp(self): self.projects = [] self.dirs_to_delete = [] self.un = config.ADMIN_USER self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[self.un]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) self.ju = JenkinsUtils() self.gu.add_pubkey(config.USERS[self.un]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) # Clone the config repo and make change to it # in order to test the new sample_project self.config_clone_dir = self.clone_as_admin("config") self.original_layout = file(os.path.join( self.config_clone_dir, "zuul/layout.yaml")).read() self.original_zuul_projects = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() self.original_project = file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read() # Put USER_2 as core for config project self.gu.add_group_member(config.USER_2, "config-core")
def setUp(self): super(TestProjectTestsWorkflow, self).setUp() self.projects = [] self.dirs_to_delete = [] self.un = config.ADMIN_USER self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[self.un]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) self.ju = JenkinsUtils() self.gu.add_pubkey(config.USERS[self.un]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) # Clone the config repo and keep job/zuul config content self.config_clone_dir = self.clone_as_admin("config") self.original_zuul_projects = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() self.original_project = file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read() self.need_restore_config_repo = False # Put USER_2 as core for config project self.gu.add_group_member(config.USER_2, "config-core")
def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.GATEWAY_URL + "/redmine/", auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
def setUp(self): super(TestRepoxplorer, self).setUp() priv_key_path = set_private_key( config.USERS[config.ADMIN_USER]["privkey"]) self.gitu_admin = GerritGitUtils( config.ADMIN_USER, priv_key_path, config.USERS[config.ADMIN_USER]['email']) self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) self.ju = JenkinsUtils() self.dirs_to_delete = []
def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ru = ResourcesUtils() self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.stb_client = SFStoryboard( config.GATEWAY_URL + "/storyboard_api", config.USERS[config.ADMIN_USER]['auth_cookie'])
class TestJenkinsBasic(Base): """ Functional tests to validate config repo bootstrap """ def setUp(self): self.ju = JenkinsUtils() def test_config_jobs_exist(self): """ Test if jenkins config-update and config-check are created """ url = '%s/job/config-check/' % self.ju.jenkins_url resp = self.ju.get(url) self.assertEquals(resp.status_code, 200) url = '%s/job/config-update/' % self.ju.jenkins_url resp = self.ju.get(url) self.assertEquals(resp.status_code, 200)
def setUp(self): self.projects = [] self.dirs_to_delete = [] self.ju = JenkinsUtils() priv_key_path = set_private_key( config.USERS[config.ADMIN_USER]["privkey"]) self.gitu_admin = GerritGitUtils( config.ADMIN_USER, priv_key_path, config.USERS[config.ADMIN_USER]['email']) # Change to zuul/projects.yaml in order to test a with different name self.config_clone_dir = self.clone_as_admin("config") self.original_zuul_projects = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() ycontent = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( ycontent.replace("name: zuul-demo", "name: demo/zuul-demo"), ) last_success_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastSuccessfulBuild") self.commit_direct_push_as_admin( self.config_clone_dir, "Set zuul/projects.yaml") self.ju.wait_till_job_completes("config-update", last_success_build_num_cu, "lastSuccessfulBuild", max_retries=60)
def setUpClass(cls): cookie = get_cookie(config.ADMIN_USER, config.ADMIN_PASSWORD) cls.cookie = {"auth_pubtkt": cookie} cls.ju = JenkinsUtils() cls.test_job = "test-sleep-" + rand_suffix() cls.ju.create_job(cls.test_job, TEST_PARAMETERIZED_JOB) cls.ju.run_job(cls.test_job, {'timeout': '1'}) cls.ju.wait_till_job_completes(cls.test_job, 1, 'lastBuild') cls.base_url = config.GATEWAY_URL + "/manage/jobs/"
def setUp(self): super(TestProjectReplication, self).setUp() self.ru = ResourcesUtils() self.un = config.ADMIN_USER self.ju = JenkinsUtils() self.gu = GerritUtils(config.GATEWAY_URL, auth_cookie=config.USERS[self.un]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) self.k_idx = self.gu2.add_pubkey(config.USERS[config.USER_2]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) # Prepare environment for git clone on mirror repo self.mt = Tool() self.mt_tempdir = tempfile.mkdtemp() # Copy the service private key in a flat file priv_key = file(config.SERVICE_PRIV_KEY_PATH, 'r').read() priv_key_path = os.path.join(self.mt_tempdir, 'user.priv') file(priv_key_path, 'w').write(priv_key) os.chmod(priv_key_path, stat.S_IREAD | stat.S_IWRITE) # Prepare the ssh wrapper script ssh_wrapper = "ssh -o StrictHostKeyChecking=no -i %s \"$@\"" % ( priv_key_path) wrapper_path = os.path.join(self.mt_tempdir, 'ssh_wrapper.sh') file(wrapper_path, 'w').write(ssh_wrapper) os.chmod(wrapper_path, stat.S_IRWXU) # Set the wrapper as GIT_SSH env variable self.mt.env['GIT_SSH'] = wrapper_path self.config_clone_dir = None # Project we are going to configure the replication for self.pname = 'test/replication' # Remove artifacts of previous run if any self.delete_config_section(self.un, self.pname) self.delete_mirror_repo(self.pname)
def __init__(self): with open('resources.yaml', 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.REDMINE_URL, auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.gu = GerritUtils( 'http://%s/' % config.GATEWAY_HOST, auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.GATEWAY_URL + "/redmine/", auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie'])
class TestJenkinsBasic(Base): """ Functional tests to validate config repo bootstrap """ def setUp(self): super(TestJenkinsBasic, self).setUp() self.ju = JenkinsUtils() def test_sf_service_user_credentials(self): """Test if SF_SERVICE_USER's credentials are correctly added to the credentials store""" creds_uid = '900936e8-a4e0-483e-8ab8-07bca5f80699' url = '%s/credential-store/domain/_/credential/%s/api/json' url = url % (self.ju.jenkins_url, creds_uid) resp = self.ju.get(url) self.assertEquals(200, resp.status_code, url) self.assertTrue("SF_SERVICE_USER" in resp.text, url)
class TestProjectReplication(Base): """ Functional tests to verify the gerrit replication feature """ def setUp(self): super(TestProjectReplication, self).setUp() self.ru = ResourcesUtils() self.un = config.ADMIN_USER self.ju = JenkinsUtils() self.gu = GerritUtils(config.GATEWAY_URL, auth_cookie=config.USERS[self.un]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) self.k_idx = self.gu2.add_pubkey(config.USERS[config.USER_2]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) # Prepare environment for git clone on mirror repo self.mt = Tool() self.mt_tempdir = tempfile.mkdtemp() # Copy the service private key in a flat file priv_key = file(config.SERVICE_PRIV_KEY_PATH, 'r').read() priv_key_path = os.path.join(self.mt_tempdir, 'user.priv') file(priv_key_path, 'w').write(priv_key) os.chmod(priv_key_path, stat.S_IREAD | stat.S_IWRITE) # Prepare the ssh wrapper script ssh_wrapper = "ssh -o StrictHostKeyChecking=no -i %s \"$@\"" % ( priv_key_path) wrapper_path = os.path.join(self.mt_tempdir, 'ssh_wrapper.sh') file(wrapper_path, 'w').write(ssh_wrapper) os.chmod(wrapper_path, stat.S_IRWXU) # Set the wrapper as GIT_SSH env variable self.mt.env['GIT_SSH'] = wrapper_path self.config_clone_dir = None # Project we are going to configure the replication for self.pname = 'test/replication' # Remove artifacts of previous run if any self.delete_config_section(self.un, self.pname) self.delete_mirror_repo(self.pname) def tearDown(self): super(TestProjectReplication, self).tearDown() self.delete_config_section(self.un, self.pname) self.delete_mirror_repo(self.pname) self.ru.direct_delete_repo(self.pname) self.gu2.del_pubkey(self.k_idx) def clone(self, uri, target): self.assertTrue(uri.startswith('ssh://')) cmd = "git clone %s %s" % (uri, target) clone = os.path.join(self.mt_tempdir, target) if os.path.isdir(clone): shutil.rmtree(clone) self.mt.exe(cmd, self.mt_tempdir) return clone def create_project(self, name): logger.info("Create repo to for testing replication %s" % name) self.ru.direct_create_repo(name) def ssh_run_cmd(self, sshkey_priv_path, user, host, subcmd): host = '%s@%s' % (user, host) sshcmd = [ 'ssh', '-o', 'LogLevel=ERROR', '-o', 'StrictHostKeyChecking=no', '-o', 'UserKnownHostsFile=/dev/null', '-i', sshkey_priv_path, host ] cmd = sshcmd + subcmd p = Popen(cmd, stdout=PIPE) return p.communicate(), p.returncode def delete_mirror_repo(self, name): logger.info("Delete mirror repo created by the replication") mirror_path = '/var/lib/gerrit/tmp/%s.git' % name cmd = [ 'ssh', 'gerrit.%s' % config.GATEWAY_HOST, 'rm', '-rf', mirror_path ] self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH, 'root', config.GATEWAY_HOST, cmd) def create_config_section(self, project): logger.info("Add the replication config section") host = '%s@%s' % (config.GERRIT_USER, config.GATEWAY_HOST) mirror_repo_path = '/var/lib/gerrit/tmp/\${name}.git' url = '%s:%s' % (host, mirror_repo_path) path = os.path.join(self.config_clone_dir, 'gerrit/replication.config') call("git config -f %s --remove-section remote.test_project" % path, shell=True) call("git config -f %s --add remote.test_project.projects %s" % (path, project), shell=True) call("git config -f %s --add remote.test_project.url %s" % (path, url), shell=True) self.gitu_admin.add_commit_for_all_new_additions( self.config_clone_dir, "Add replication test section") # The direct push will trigger the config-update job # as we commit through 29418 change_sha = self.gitu_admin.direct_push_branch( self.config_clone_dir, 'master') logger.info("Waiting for config-update on %s" % change_sha) self.ju.wait_for_config_update(change_sha) cmd = [ 'ssh', 'gerrit.%s' % config.GATEWAY_HOST, 'grep', 'test_project', '/etc/gerrit/replication.config' ] logger.info("Wait for the replication config section to land") _, code = self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH, 'root', config.GATEWAY_HOST, cmd) if code == 0: return raise Exception('replication.config file has not been updated (add)') def delete_config_section(self, user, project): logger.info("Remove the replication config section") url = "ssh://%s@%s:29418/config" % (self.un, config.GATEWAY_HOST) self.config_clone_dir = self.gitu_admin.clone(url, 'config', config_review=True) sha = open("%s/.git/refs/heads/master" % self.config_clone_dir).read().strip() path = os.path.join(self.config_clone_dir, 'gerrit/replication.config') call("git config -f %s --remove-section remote.test_project" % path, shell=True) change_sha = self.gitu_admin.add_commit_for_all_new_additions( self.config_clone_dir, "Remove replication test section") # The direct push will trigger the config-update job # as we commit through 29418 if change_sha == sha: # Nothing have been changed/Nothing to publish return change_sha = self.gitu_admin.direct_push_branch( self.config_clone_dir, 'master') logger.info("Waiting for config-update on %s" % change_sha) self.ju.wait_for_config_update(change_sha) cmd = [ 'ssh', 'gerrit.%s' % config.GATEWAY_HOST, 'grep', 'test_project', '/etc/gerrit/replication.config' ] _, code = self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH, 'root', config.GATEWAY_HOST, cmd) if code != 0: return raise Exception('replication.config has not been updated (rm)') def mirror_clone_and_check_files(self, url, pname): for retry in xrange(50): clone = self.clone(url, pname) # clone may fail, as mirror repo is not yet ready(i.e gerrit not # yet replicated the project) if os.path.isdir(clone): logger.info("Files in the mirror repo: %s" % os.listdir(clone)) if os.path.isdir(clone) and \ os.path.isfile(os.path.join(clone, '.gitreview')): break else: time.sleep(3) self.assertTrue(os.path.exists(os.path.join(clone, '.gitreview'))) def test_replication(self): """ Test gerrit replication for review process """ # Create the project self.create_project(self.pname) # Be sure sftests.com host key is inside the known_hosts cmds = [[ 'ssh', 'gerrit.%s' % config.GATEWAY_HOST, 'ssh-keyscan', 'sftests.com', '>', '/var/lib/gerrit/.ssh/known_hosts' ]] for cmd in cmds: self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH, 'root', config.GATEWAY_HOST, cmd) # Create new section for this project in replication.config self.create_config_section(self.pname) # Verify if gerrit replicated the repo self.managesf_repo_path = "ssh://%s@%s/var/lib/gerrit/tmp/" % ( 'root', config.GATEWAY_HOST) repo_url = self.managesf_repo_path + '%s.git' % self.pname logger.info("Wait for the replication to happen") self.mirror_clone_and_check_files(repo_url, self.pname)
class TestProjectTestsWorkflow(Base): """ Functional tests to verify the configuration of a project test """ @classmethod def setUpClass(cls): cls.msu = ManageSfUtils(config.GATEWAY_URL) cls.sample_project_dir = \ os.path.join(config.SF_TESTS_DIR, "sample_project/") @classmethod def tearDownClass(cls): pass def setUp(self): self.projects = [] self.dirs_to_delete = [] self.un = config.ADMIN_USER self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[self.un]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) self.ju = JenkinsUtils() self.gu.add_pubkey(config.USERS[self.un]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) # Clone the config repo and make change to it # in order to test the new sample_project self.config_clone_dir = self.clone_as_admin("config") self.original_layout = file(os.path.join( self.config_clone_dir, "zuul/layout.yaml")).read() self.original_zuul_projects = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() self.original_project = file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read() # Put USER_2 as core for config project self.gu.add_group_member(config.USER_2, "config-core") def tearDown(self): self.restore_config_repo(self.original_layout, self.original_project, self.original_zuul_projects) for name in self.projects: self.msu.deleteProject(name, config.ADMIN_USER) for dirs in self.dirs_to_delete: shutil.rmtree(dirs) def assert_reviewer_approvals(self, change_id, value): approvals = {} for _ in range(90): approvals = self.gu.get_reviewer_approvals(change_id, 'jenkins') if approvals and approvals.get('Verified') == value: break time.sleep(1) self.assertEqual(value, approvals.get('Verified')) def clone_as_admin(self, pname): url = "ssh://%s@%s:29418/%s" % (self.un, config.GATEWAY_HOST, pname) clone_dir = self.gitu_admin.clone(url, pname) if os.path.dirname(clone_dir) not in self.dirs_to_delete: self.dirs_to_delete.append(os.path.dirname(clone_dir)) return clone_dir def restore_config_repo(self, layout, project, zuul): file(os.path.join( self.config_clone_dir, "zuul/layout.yaml"), 'w').write( layout) file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( zuul) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( project) self.commit_direct_push_as_admin( self.config_clone_dir, "Restore layout.yaml and projects.yaml") def commit_direct_push_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) self.gitu_admin.direct_push_branch(clone_dir, 'master') def push_review_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) self.gitu_admin.review_push_branch(clone_dir, 'master') def create_project(self, name, user, options=None): self.msu.createProject(name, user, options) self.projects.append(name) def test_check_project_test_workflow(self): """ Validate new project to test via zuul layout.yaml """ # We want to create a project, provide project source # code with tests. We then configure zuul/jjb to handle the # run of the test cases. We then validate Gerrit has been # updated about the test results # We use the sample-project (that already exists) pname = 'test_workflow_%s' % create_random_str() # Be sure the project does not exist self.msu.deleteProject(pname, config.ADMIN_USER) # Create it self.create_project(pname, config.ADMIN_USER) # Add the sample-project to the empty repository clone_dir = self.clone_as_admin(pname) copytree(self.sample_project_dir, clone_dir) self.commit_direct_push_as_admin(clone_dir, "Add the sample project") # Change to config/zuul/layout.yaml and jobs/projects.yaml # in order to test the new project ycontent = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( ycontent.replace("zuul-demo", pname), ) ycontent2 = load(file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read()) sp2 = copy.deepcopy( [p for p in ycontent2 if 'project' in p and p['project']['name'] == 'zuul-demo'][0]) sp2['project']['name'] = pname ycontent2.append(sp2) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( dump(ycontent2)) # Retrieve the previous build number for config-check last_success_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastSuccessfulBuild") # Retrieve the previous build number for config-update last_success_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastSuccessfulBuild") # Send review (config-check) will be triggered self.push_review_as_admin( self.config_clone_dir, "Add config definition in Zuul/JJB config for %s" % pname) # Wait for config-check to finish and verify the success self.ju.wait_till_job_completes("config-check", last_success_build_num_ch, "lastSuccessfulBuild") last_build_num_ch, last_success_build_num_ch = 0, 1 attempt = 0 while last_build_num_ch != last_success_build_num_ch: if attempt >= 90: break time.sleep(1) last_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastBuild") last_success_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastSuccessfulBuild") attempt += 1 self.assertEqual(last_build_num_ch, last_success_build_num_ch) # let some time to Zuul to update the test result to Gerrit. time.sleep(2) # Get the change id change_ids = self.gu.get_my_changes_for_project("config") self.assertGreater(len(change_ids), 0) change_id = change_ids[0] # Check whether zuul sets verified to +1 after running the tests # let some time to Zuul to update the test result to Gerrit. self.assert_reviewer_approvals(change_id, '+1') # review the change self.gu2.submit_change_note(change_id, "current", "Code-Review", "2") self.gu2.submit_change_note(change_id, "current", "Workflow", "1") # now zuul processes gate pipeline and runs config-check job # Wait for config-check to finish and verify the success self.ju.wait_till_job_completes("config-check", last_success_build_num_ch, "lastSuccessfulBuild") last_build_num_ch, last_success_build_num_ch = 0, 1 attempt = 0 while last_build_num_ch != last_success_build_num_ch: if attempt >= 90: break time.sleep(1) last_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastBuild") last_success_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastSuccessfulBuild") attempt += 1 self.assertEqual(last_build_num_ch, last_success_build_num_ch) # Check whether zuul sets verified to +2 after running the tests # let some time to Zuul to update the test result to Gerrit. self.assert_reviewer_approvals(change_id, '+2') # verify whether zuul merged the patch change = self.gu.get_change('config', 'master', change_id) change_status = change['status'] attempt = 0 while change_status != 'MERGED': if attempt >= 90: break time.sleep(1) change = self.gu.get_change('config', 'master', change_id) change_status = change['status'] attempt += 1 self.assertEqual(change_status, 'MERGED') # Test post pipe line # as the patch is merged, post pieline should run config-update job # Wait for config-update to finish and verify the success self.ju.wait_till_job_completes("config-update", last_success_build_num_cu, "lastSuccessfulBuild") last_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastBuild") last_success_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastSuccessfulBuild") self.assertEqual(last_build_num_cu, last_success_build_num_cu) # Retrieve the prev build number for pname-unit-tests # Retrieve the prev build number for pname-functional-tests last_success_build_num_sp_ut = \ self.ju.get_last_build_number("%s-unit-tests" % pname, "lastSuccessfulBuild") last_success_build_num_sp_ft = \ self.ju.get_last_build_number("%s-functional-tests" % pname, "lastSuccessfulBuild") # Test config-update # config-update should have created jobs for pname # Trigger tests on pname # Send a review and check tests has been run self.gitu_admin.add_commit_and_publish( clone_dir, 'master', "Add useless file", self.un) # Wait for pname-unit-tests to finish and verify the success self.ju.wait_till_job_completes("%s-unit-tests" % pname, last_success_build_num_sp_ut, "lastSuccessfulBuild") # Wait for pname-functional-tests to end and check the success self.ju.wait_till_job_completes("%s-functional-tests" % pname, last_success_build_num_sp_ft, "lastSuccessfulBuild") # Check the unit tests succeed last_build_num_sp_ut = \ self.ju.get_last_build_number("%s-unit-tests" % pname, "lastBuild") last_success_build_num_sp_ut = \ self.ju.get_last_build_number("%s-unit-tests" % pname, "lastSuccessfulBuild") self.assertEqual(last_build_num_sp_ut, last_success_build_num_sp_ut) # Check the functional tests succeed last_build_num_sp_ft = \ self.ju.get_last_build_number("%s-functional-tests" % pname, "lastBuild") last_success_build_num_sp_ft = \ self.ju.get_last_build_number("%s-functional-tests" % pname, "lastSuccessfulBuild") self.assertEqual(last_build_num_sp_ft, last_success_build_num_sp_ft) # Get the change id change_ids = self.gu.get_my_changes_for_project(pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] # let some time to Zuul to update the test result to Gerrit. for i in range(90): if "jenkins" in self.gu.get_reviewers(change_id): break time.sleep(1) self.assert_reviewer_approvals(change_id, '+1')
def test_check_zuul_operations(self): """ Test if zuul verifies project correctly through zuul-demo project """ # zuul-demo - test project used exclusively to test zuul installation # The necessary project descriptions are already declared in Jenkins # and zuul pname = 'zuul-demo' self.create_project(pname, config.ADMIN_USER) un = config.ADMIN_USER gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[un]['auth_cookie']) ju = JenkinsUtils() k_index = gu.add_pubkey(config.USERS[un]["pubkey"]) # Gerrit part self.assertTrue(gu.project_exists(pname)) priv_key_path = set_private_key(config.USERS[un]["privkey"]) gitu = GerritGitUtils(un, priv_key_path, config.USERS[un]['email']) url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST, pname) clone_dir = gitu.clone(url, pname) self.dirs_to_delete.append(os.path.dirname(clone_dir)) last_fail_build_num_ft = \ ju.get_last_build_number("zuul-demo-functional-tests", "lastFailedBuild") last_fail_build_num_ut = \ ju.get_last_build_number("zuul-demo-unit-tests", "lastFailedBuild") last_succeed_build_num_ft = \ ju.get_last_build_number("zuul-demo-functional-tests", "lastSuccessfulBuild") last_succeed_build_num_ut = \ ju.get_last_build_number("zuul-demo-unit-tests", "lastSuccessfulBuild") gitu.add_commit_and_publish(clone_dir, "master", "Test commit") change_ids = gu.get_my_changes_for_project(pname) self.assertEqual(len(change_ids), 1) change_id = change_ids[0] # Give some time for jenkins to work ju.wait_till_job_completes("zuul-demo-functional-tests", last_fail_build_num_ft, "lastFailedBuild") ju.wait_till_job_completes("zuul-demo-unit-tests", last_fail_build_num_ut, "lastFailedBuild") attempt = 0 while "jenkins" not in gu.get_reviewers(change_id): if attempt >= 90: break time.sleep(1) attempt += 1 attempt = 0 while gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'] \ != '-1': if attempt >= 90: break time.sleep(1) attempt += 1 self.assertEqual( gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'], '-1') # Add the test case files and resubmit for review data = "echo Working" files = ["run_functional-tests.sh", "run_tests.sh"] for f in files: file(os.path.join(clone_dir, f), 'w').write(data) os.chmod(os.path.join(clone_dir, f), 0755) gitu.add_commit_and_publish(clone_dir, "master", None, fnames=files) # Give some time for jenkins to work ju.wait_till_job_completes("zuul-demo-functional-tests", last_succeed_build_num_ft, "lastSuccessfulBuild") ju.wait_till_job_completes("zuul-demo-unit-tests", last_succeed_build_num_ut, "lastSuccessfulBuild") attempt = 0 while "jenkins" not in gu.get_reviewers(change_id): if attempt >= 90: break time.sleep(1) attempt += 1 attempt = 0 while gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'] \ != '+1': if attempt >= 90: break time.sleep(1) attempt += 1 self.assertEqual( gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'], '+1') gu.del_pubkey(k_index)
def setUp(self): self.ju = JenkinsUtils()
class SFchecker: """ This checker is only intended for testin SF backup/restore and update. It checks that the user data defined in resourses.yaml are present on the SF. Those data must have been provisioned by SFProvisioner. """ def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.gu = GerritUtils( 'http://%s/' % config.GATEWAY_HOST, auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.GATEWAY_URL + "/redmine/", auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) def check_project(self, name): print " Check project %s exists ..." % name, if not self.gu.project_exists(name) or \ (is_present('SFRedmine') and not self.rm.project_exists(name)): print "FAIL" exit(1) print "OK" def check_files_in_project(self, name, files): print " Check files(%s) exists in project ..." % ",".join(files), # TODO(fbo); use gateway host instead of gerrit host url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(url, name, config_review=False) for f in files: if not os.path.isfile(os.path.join(clone_dir, f)): print "FAIL" exit(1) def check_issues_on_project(self, name, issues): print " Check that at least %s issues exists for that project ...," %\ len(issues) current_issues = self.rm.get_issues_by_project(name) if len(current_issues) < len(issues): print "FAIL: expected %s, project has %s" % ( len(issues), len(current_issues)) exit(1) print "OK" def check_jenkins_jobs(self, name, jobnames): print " Check that jenkins jobs(%s) exists ..." % ",".join(jobnames), for jobname in jobnames: if not '%s_%s' % (name, jobname) in self.ju.list_jobs(): print "FAIL" exit(1) print "OK" def check_reviews_on_project(self, name, issues): reviews = [i for i in issues if i['review']] print " Check that at least %s reviews exists for that project ..." %\ len(reviews), pending_reviews = self.ggu.list_open_reviews(name, config.GATEWAY_HOST) if not len(pending_reviews) >= len(reviews): print "FAIL" exit(1) print "OK" def check_pads(self, amount): pass def check_pasties(self, amount): pass def command(self, cmd): return ssh_run_cmd(os.path.expanduser("~/.ssh/id_rsa"), "root", config.GATEWAY_HOST, shlex.split(cmd)) def compute_checksum(self, f): out = self.command("md5sum %s" % f)[0] if out: return out.split()[0] def read_file(self, f): return self.command("cat %s" % f)[0] def simple_login(self, user, password): """log as user""" return get_cookie(user, password) def check_users_list(self): print "Check that users are listable ...", users = [u['name'] for u in self.resources['users']] c = {'auth_pubtkt': config.USERS[config.ADMIN_USER]['auth_cookie']} url = 'http://%s/manage/project/membership/' % config.GATEWAY_HOST registered = requests.get(url, cookies=c).json() # usernames are in first position r_users = [u[0] for u in registered] if not set(users).issubset(set(r_users)): print "FAIL" exit(1) print "OK" def check_checksums(self): print "Check that expected file are there" checksum_list = yaml.load(file('/tmp/pc_checksums.yaml')) mismatch = False for f, checksum in checksum_list.items(): c = self.compute_checksum(f) if c == checksum: print "Expected checksum (%s) for %s is OK." % ( checksum, f) else: print "Expected checksum (%s) for %s is WRONG (%s)." % ( checksum, f, c) print "New file is:" print " %s" % self.read_file(f).replace("\n", "\n ") mismatch = True if "checksum_warn_only" not in sys.argv and mismatch: sys.exit(1) def checker(self): self.check_checksums() self.check_users_list() for project in self.resources['projects']: print "Check user datas for %s" % project['name'] self.check_project(project['name']) self.check_files_in_project(project['name'], [f['name'] for f in project['files']]) if is_present('SFRedmine'): self.check_issues_on_project(project['name'], project['issues']) self.check_reviews_on_project(project['name'], project['issues']) self.check_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.check_pads(2) self.check_pasties(2) for user in self.resources['local_users']: print "Check user %s can log in ..." % user['username'], if self.simple_login(user['username'], user['password']): print "OK" else: print "FAIL" exit(1)
class SFchecker: """ This checker is only intended for testin SF backup/restore and update. It checks that the user data defined in resourses.yaml are present on the SF. Those data must have been provisioned by SFProvisioner. """ def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.gu = GerritUtils( 'http://%s/' % config.GATEWAY_HOST, auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.stb_client = SFStoryboard( config.GATEWAY_URL + "/storyboard_api", config.USERS[config.ADMIN_USER]['auth_cookie']) def check_project(self, name): print " Check project %s exists ..." % name, if not self.gu.project_exists(name): print "FAIL" exit(1) if is_present('storyboard'): if name not in [ p.name for p in self.stb_client.projects.get_all() ]: print "FAIL" exit(1) print "OK" def check_files_in_project(self, name, files): print " Check files(%s) exists in project ..." % ",".join(files), # TODO(fbo); use gateway host instead of gerrit host url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(url, name, config_review=False) for f in files: if not os.path.isfile(os.path.join(clone_dir, f)): print "FAIL" exit(1) def check_issues_on_project(self, name, issues): print(" Check that at least %s issues exists " "for that project ..." % len(issues)) p = [p for p in self.stb_client.projects.get_all() if p.name == name][0] pt = [ t for t in self.stb_client.tasks.get_all() if t.project_id == p.id ] if len(pt) != len(issues): print "FAIL: expected %s, project has %s" % (len(issues), len(pt)) exit(1) print "OK" def check_jenkins_jobs(self, name, jobnames): print " Check that jenkins jobs(%s) exists ..." % ",".join(jobnames) for jobname in jobnames: if not '%s_%s' % (name, jobname) in self.ju.list_jobs(): print "FAIL" exit(1) print "OK" def check_reviews_on_project(self, name, issues): reviews = [i for i in issues if i['review']] print " Check that at least %s reviews exists for that project ..." %\ len(reviews), pending_reviews = self.ggu.list_open_reviews(name, config.GATEWAY_HOST) if not len(pending_reviews) >= len(reviews): print "FAIL" exit(1) print "OK" def check_pads(self, amount): pass def check_pasties(self, amount): pass def command(self, cmd): return ssh_run_cmd(os.path.expanduser("~/.ssh/id_rsa"), "root", config.GATEWAY_HOST, shlex.split(cmd)) def compute_checksum(self, f): out = self.command("md5sum %s" % f)[0] if out: return out.split()[0] def read_file(self, f): return self.command("cat %s" % f)[0] def simple_login(self, user, password): """log as user""" return get_cookie(user, password) def check_users_list(self): print "Check that users are listable ...", users = [u['name'] for u in self.resources['users']] c = {'auth_pubtkt': config.USERS[config.ADMIN_USER]['auth_cookie']} url = 'http://%s/manage/project/membership/' % config.GATEWAY_HOST registered = requests.get(url, cookies=c).json() # usernames are in first position r_users = [u[0] for u in registered] if not set(users).issubset(set(r_users)): print "FAIL: expected %s, got %s" % (users, r_users) exit(1) print "OK" def check_checksums(self): print "Check that expected file are there" checksum_list = yaml.load(file('pc_checksums.yaml')) mismatch = False for f, checksum in checksum_list.items(): if f.startswith("content_"): continue c = self.compute_checksum(f) if c == checksum: print "Expected checksum (%s) for %s is OK." % (checksum, f) else: print "Expected checksum (%s) for %s is WRONG (%s)." % ( checksum, f, c) print "New file is:" print " %s" % self.read_file(f).replace("\n", "\n ") print "Old file was:" print " %s" % checksum_list['content_' + f].replace( "\n", "\n ") mismatch = True if "checksum_warn_only" not in sys.argv and mismatch: sys.exit(1) def checker(self): self.check_checksums() self.check_users_list() for project in self.resources['projects']: print "Check user datas for %s" % project['name'] self.check_project(project['name']) self.check_files_in_project(project['name'], [f['name'] for f in project['files']]) if is_present('storyboard'): self.check_issues_on_project(project['name'], project['issues']) self.check_reviews_on_project(project['name'], project['issues']) self.check_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.check_pads(2) self.check_pasties(2) for user in self.resources['local_users']: print "Check user %s can log in ..." % user['username'], if self.simple_login(user['username'], user['password']): print "OK" else: print "FAIL" exit(1)
def setUp(self): super(TestJenkinsBasic, self).setUp() self.ju = JenkinsUtils()
class SFchecker: """ This checker is only intended for testin SF backup/restore and update. It checks that the user data defined in resourses.yaml are present on the SF. Those data must have been provisioned by SFProvisioner. """ def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.gu = GerritUtils( 'http://%s/' % config.GATEWAY_HOST, auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.GATEWAY_URL + "/redmine/", auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) def check_project(self, name): print " Check project %s exists ..." % name, if not self.gu.project_exists(name) or \ not self.rm.project_exists(name): print "FAIL" exit(1) print "OK" def check_files_in_project(self, name, files): print " Check files(%s) exists in project ..." % ",".join(files), # TODO(fbo); use gateway host instead of gerrit host url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(url, name, config_review=False) for f in files: if not os.path.isfile(os.path.join(clone_dir, f)): print "FAIL" exit(1) def check_issues_on_project(self, name, issues): print " Check that at least %s issues exists for that project ...," %\ len(issues) current_issues = self.rm.get_issues_by_project(name) if len(current_issues) < len(issues): print "FAIL: expected %s, project has %s" % ( len(issues), len(current_issues)) exit(1) print "OK" def check_jenkins_jobs(self, name, jobnames): print " Check that jenkins jobs(%s) exists ..." % ",".join(jobnames), for jobname in jobnames: if not '%s_%s' % (name, jobname) in self.ju.list_jobs(): print "FAIL" exit(1) print "OK" def check_reviews_on_project(self, name, issues): reviews = [i for i in issues if i['review']] print " Check that at least %s reviews exists for that project ..." %\ len(reviews), pending_reviews = self.ggu.list_open_reviews(name, config.GATEWAY_HOST) if not len(pending_reviews) >= len(reviews): print "FAIL" exit(1) print "OK" def check_pads(self, amount): pass def check_pasties(self, amount): pass def checker(self): for project in self.resources['projects']: print "Check user datas for %s" % project['name'] self.check_project(project['name']) self.check_files_in_project(project['name'], [f['name'] for f in project['files']]) self.check_issues_on_project(project['name'], project['issues']) self.check_reviews_on_project(project['name'], project['issues']) self.check_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.check_pads(2) self.check_pasties(2)
class TestRepoxplorer(Base): def setUp(self): super(TestRepoxplorer, self).setUp() priv_key_path = set_private_key( config.USERS[config.ADMIN_USER]["privkey"]) self.gitu_admin = GerritGitUtils( config.ADMIN_USER, priv_key_path, config.USERS[config.ADMIN_USER]['email']) self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) self.ju = JenkinsUtils() self.dirs_to_delete = [] def tearDown(self): super(TestRepoxplorer, self).tearDown() for dirs in self.dirs_to_delete: shutil.rmtree(dirs) def clone_as_admin(self, pname): url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, pname) clone_dir = self.gitu_admin.clone(url, pname) if os.path.dirname(clone_dir) not in self.dirs_to_delete: self.dirs_to_delete.append(os.path.dirname(clone_dir)) return clone_dir def commit_direct_push_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) return self.gitu_admin.direct_push_branch(clone_dir, 'master') def set_resources_then_direct_push(self, fpath, resources=None, mode='add'): config_clone_dir = self.clone_as_admin("config") path = os.path.join(config_clone_dir, fpath) if mode == 'add': file(path, 'w').write(resources) elif mode == 'del': os.unlink(path) change_sha = self.commit_direct_push_as_admin( config_clone_dir, "Add new resources for functional tests") config_update_log = self.ju.wait_for_config_update(change_sha) self.assertIn("SUCCESS", config_update_log) def get_projects(self): url = config.GATEWAY_URL + "/repoxplorer/projects.json/" resp = requests.get(url) self.assertEqual(resp.status_code, 200) return resp.json() def get_groups(self): url = config.GATEWAY_URL + "/repoxplorer/api_groups.json/" resp = requests.get(url) self.assertEqual(resp.status_code, 200) return resp.json() @skipIfServiceMissing('repoxplorer') def test_repoxplorer_accessible(self): """ Test if RepoXplorer is accessible on gateway hosts """ url = config.GATEWAY_URL + "/repoxplorer/" resp = requests.get(url) self.assertEqual(resp.status_code, 200) self.assertTrue('[RepoXplorer] - Projects listing' in resp.text) @skipIfServiceMissing('repoxplorer') def test_repoxplorer_data_indexed(self): """ Test if RepoXplorer has indexed the config repository """ url = config.GATEWAY_URL + "/repoxplorer/commits.json?pid=internal" resp = requests.get(url) self.assertEqual(resp.status_code, 200) self.assertTrue(resp.json()[2] > 0) @skipIfServiceMissing('repoxplorer') def test_repoxplorer_displayed_top_menu(self): """ Test if RepoXplorer link is displayed in the top menu """ url = config.GATEWAY_URL + "/topmenu.html" resp = requests.get(url) self.assertEqual(resp.status_code, 200) self.assertTrue('href="/repoxplorer/"' in resp.text, 'repoxplorer not present as a link') @skipIfServiceMissing('repoxplorer') def test_repoxplorer_config_from_resources(self): """ Test if RepoXPlorer is reconfigured from new resources """ fpath = "resources/%s.yaml" % create_random_str() resources = """resources: projects: %(pname)s: description: An awesome project source-repositories: - %(pname)s/%(rname)s repos: %(pname)s/%(rname)s: description: The server part acl: %(pname)s acls: %(pname)s: file: | [access "refs/*"] read = group Anonymous Users groups: %(gname)s: description: test for functional test members: - [email protected] """ tmpl_keys = { 'pname': create_random_str(), 'rname': create_random_str(), 'gname': create_random_str() } resources = resources % tmpl_keys self.set_resources_then_direct_push(fpath, resources=resources, mode='add') projects = self.get_projects() groups = self.get_groups() self.assertIn(tmpl_keys['gname'], groups.keys()) self.assertIn(tmpl_keys['pname'], projects['projects'].keys()) project_repos = [ r['name'] for r in projects['projects'][tmpl_keys['pname']] ] self.assertIn(tmpl_keys['pname'] + '/' + tmpl_keys['rname'], project_repos) self.set_resources_then_direct_push(fpath, mode='del') projects = self.get_projects() groups = self.get_groups() self.assertNotIn(tmpl_keys['gname'], groups.keys()) self.assertNotIn(tmpl_keys['pname'], projects['projects'].keys())
class SFchecker: """ This checker is only intended for testin SF backup/restore and update. It checks that the user data defined in resourses.yaml are present on the SF. Those data must have been provisioned by SFProvisioner. """ def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.gu = GerritUtils( 'http://%s/' % config.GATEWAY_HOST, auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.GATEWAY_URL + "/redmine/", auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) def check_project(self, name): print " Check project %s exists ..." % name, if not self.gu.project_exists(name) or \ (is_present('SFRedmine') and not self.rm.project_exists(name)): print "FAIL" exit(1) print "OK" def check_files_in_project(self, name, files): print " Check files(%s) exists in project ..." % ",".join(files), # TODO(fbo); use gateway host instead of gerrit host url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(url, name, config_review=False) for f in files: if not os.path.isfile(os.path.join(clone_dir, f)): print "FAIL" exit(1) def check_issues_on_project(self, name, issues): print " Check that at least %s issues exists for that project ...," %\ len(issues) current_issues = self.rm.get_issues_by_project(name) if len(current_issues) < len(issues): print "FAIL: expected %s, project has %s" % (len(issues), len(current_issues)) exit(1) print "OK" def check_jenkins_jobs(self, name, jobnames): print " Check that jenkins jobs(%s) exists ..." % ",".join(jobnames), for jobname in jobnames: if not '%s_%s' % (name, jobname) in self.ju.list_jobs(): print "FAIL" exit(1) print "OK" def check_reviews_on_project(self, name, issues): reviews = [i for i in issues if i['review']] print " Check that at least %s reviews exists for that project ..." %\ len(reviews), pending_reviews = self.ggu.list_open_reviews(name, config.GATEWAY_HOST) if not len(pending_reviews) >= len(reviews): print "FAIL" exit(1) print "OK" def check_pads(self, amount): pass def check_pasties(self, amount): pass def checker(self): for project in self.resources['projects']: print "Check user datas for %s" % project['name'] self.check_project(project['name']) self.check_files_in_project(project['name'], [f['name'] for f in project['files']]) if is_present('SFRedmine'): self.check_issues_on_project(project['name'], project['issues']) self.check_reviews_on_project(project['name'], project['issues']) self.check_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.check_pads(2) self.check_pasties(2)
class SFProvisioner(object): """ This provider is only intended for testing SF backup/restore and update. It provisions some user datas in a SF installation based on a resourses.yaml file. Later those data can be checked by its friend the SFChecker. Provisioned data should remain really simple. """ def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ru = ResourcesUtils() self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.stb_client = SFStoryboard( config.GATEWAY_URL + "/storyboard_api", config.USERS[config.ADMIN_USER]['auth_cookie']) def create_project(self, name): print " Creating project %s ..." % name self.ru.create_repo(name) def push_files_in_project(self, name, files): print " Add files(%s) in a commit ..." % ",".join(files) # TODO(fbo); use gateway host instead of gerrit host self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(self.url, name, config_review=False) self.clone_dir = clone_dir for f in files: file(os.path.join(clone_dir, f), 'w').write('data') self.ggu.git_add(clone_dir, (f, )) self.ggu.add_commit_for_all_new_additions(clone_dir) self.ggu.direct_push_branch(clone_dir, 'master') def create_storyboard_issue(self, name, issue_name): project = self.stb_client.projects.get(name) story = self.stb_client.stories.create(title=issue_name) task = self.stb_client.tasks.create(story_id=story.id, project_id=project.id, title=issue_name) return task.id, story.id def create_issues_on_project(self, name, issues): print " Create %s issue(s) for that project ..." % len(issues) for i in issues: if is_present('storyboard'): issue = self.create_storyboard_issue(name, i['name']) else: issue = (random.randint(1, 100), random.randint(1, 100)) yield issue, i['review'] def create_jenkins_jobs(self, name, jobnames): print " Create Jenkins jobs(%s) ..." % ",".join(jobnames) for jobname in jobnames: self.ju.create_job("%s_%s" % (name, jobname)) def create_pads(self, amount): # TODO pass def create_pasties(self, amount): # TODO pass def simple_login(self, user): """log as user to make the user listable""" get_cookie(user, config.USERS[user]['password']) def create_review(self, project, issue): """Very basic review creator for statistics and restore tests purposes.""" self.ggu.config_review(self.clone_dir) self.ggu.add_commit_in_branch(self.clone_dir, 'branch_' + str(issue[0]), commit='test\n\nTask: #%s\nStory: #%s' % (issue[0], issue[1])) self.ggu.review_push_branch(self.clone_dir, 'branch_' + str(issue[0])) def create_local_user(self, username, password, email): self.msu.create_user(username, password, email) def command(self, cmd): return ssh_run_cmd(os.path.expanduser("~/.ssh/id_rsa"), "root", config.GATEWAY_HOST, shlex.split(cmd)) def compute_checksum(self, f): out = self.command("md5sum %s" % f)[0] if out: return out.split()[0] def read_file(self, f): return self.command("cat %s" % f)[0] def provision(self): for cmd in self.resources['commands']: print "Execute command %s" % cmd['cmd'] print self.command(cmd['cmd']) checksum_list = {} for checksum in self.resources['checksum']: print "Compute checksum for file %s" % checksum['file'] checksum_list[checksum['file']] = self.compute_checksum( checksum['file']) checksum_list['content_' + checksum['file']] = self.read_file( checksum['file']) yaml.dump(checksum_list, file('pc_checksums.yaml', 'w'), default_flow_style=False) for user in self.resources['local_users']: print "Create local user %s" % user['username'] self.create_local_user(user['username'], user['password'], user['email']) for u in self.resources['users']: print "log in as %s" % u['name'] self.simple_login(u['name']) for project in self.resources['projects']: print "Create user datas for %s" % project['name'] self.create_project(project['name']) self.push_files_in_project(project['name'], [f['name'] for f in project['files']]) for i, review in self.create_issues_on_project( project['name'], project['issues']): if review: print "Create review for bug %s in %s" % (i, project['name']) self.create_review(project['name'], i) self.create_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.create_pads(2) self.create_pasties(2)
class TestZuulOps(Base): """ Functional tests to validate config repo bootstrap """ @classmethod def setUpClass(cls): cls.msu = ManageSfUtils(config.GATEWAY_URL) pass @classmethod def tearDownClass(cls): pass def clone_as_admin(self, pname): url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, pname) clone_dir = self.gitu_admin.clone(url, pname) if os.path.dirname(clone_dir) not in self.dirs_to_delete: self.dirs_to_delete.append(os.path.dirname(clone_dir)) return clone_dir def restore_config_repo(self, zuul): file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( zuul) self.commit_direct_push_as_admin( self.config_clone_dir, "Restore zuul/projects.yaml") def commit_direct_push_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) self.gitu_admin.direct_push_branch(clone_dir, 'master') def setUp(self): self.projects = [] self.dirs_to_delete = [] self.ju = JenkinsUtils() priv_key_path = set_private_key( config.USERS[config.ADMIN_USER]["privkey"]) self.gitu_admin = GerritGitUtils( config.ADMIN_USER, priv_key_path, config.USERS[config.ADMIN_USER]['email']) # Change to zuul/projects.yaml in order to test a with different name self.config_clone_dir = self.clone_as_admin("config") self.original_zuul_projects = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() ycontent = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( ycontent.replace("name: zuul-demo", "name: demo/zuul-demo"), ) last_success_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastSuccessfulBuild") self.commit_direct_push_as_admin( self.config_clone_dir, "Set zuul/projects.yaml") self.ju.wait_till_job_completes("config-update", last_success_build_num_cu, "lastSuccessfulBuild", max_retries=60) def tearDown(self): self.restore_config_repo(self.original_zuul_projects) for name in self.projects: self.msu.deleteProject(name, config.ADMIN_USER) for dirs in self.dirs_to_delete: shutil.rmtree(dirs) def create_project(self, name, user, options=None): self.msu.createProject(name, user, options) self.projects.append(name) def test_check_zuul_operations(self): """ Test if zuul verifies project correctly through zuul-demo project """ # zuul-demo - test project used exclusively to test zuul installation # The necessary project descriptions are already declared in Jenkins # and zuul pname = 'demo/zuul-demo' self.create_project(pname, config.ADMIN_USER) un = config.ADMIN_USER gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[un]['auth_cookie']) ju = JenkinsUtils() k_index = gu.add_pubkey(config.USERS[un]["pubkey"]) # Gerrit part self.assertTrue(gu.project_exists(pname)) priv_key_path = set_private_key(config.USERS[un]["privkey"]) gitu = GerritGitUtils(un, priv_key_path, config.USERS[un]['email']) url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST, pname) clone_dir = gitu.clone(url, pname) self.dirs_to_delete.append(os.path.dirname(clone_dir)) last_fail_build_num_ft = \ ju.get_last_build_number("zuul-demo-functional-tests", "lastFailedBuild") last_fail_build_num_ut = \ ju.get_last_build_number("zuul-demo-unit-tests", "lastFailedBuild") last_succeed_build_num_ft = \ ju.get_last_build_number("zuul-demo-functional-tests", "lastSuccessfulBuild") last_succeed_build_num_ut = \ ju.get_last_build_number("zuul-demo-unit-tests", "lastSuccessfulBuild") gitu.add_commit_and_publish(clone_dir, "master", "Test commit") change_ids = gu.get_my_changes_for_project(pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] # Give some time for jenkins to work ju.wait_till_job_completes("zuul-demo-functional-tests", last_fail_build_num_ft, "lastFailedBuild") ju.wait_till_job_completes("zuul-demo-unit-tests", last_fail_build_num_ut, "lastFailedBuild") attempt = 0 while "jenkins" not in gu.get_reviewers(change_id): if attempt >= 90: break time.sleep(1) attempt += 1 attempt = 0 while gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'] \ != '-1': if attempt >= 90: break time.sleep(1) attempt += 1 self.assertEqual( gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'], '-1') # Add the test case files and resubmit for review data = "echo Working" files = ["run_functional-tests.sh", "run_tests.sh"] for f in files: file(os.path.join(clone_dir, f), 'w').write(data) os.chmod(os.path.join(clone_dir, f), 0755) gitu.add_commit_and_publish(clone_dir, "master", None, fnames=files) # Give some time for jenkins to work ju.wait_till_job_completes("zuul-demo-functional-tests", last_succeed_build_num_ft, "lastSuccessfulBuild") ju.wait_till_job_completes("zuul-demo-unit-tests", last_succeed_build_num_ut, "lastSuccessfulBuild") attempt = 0 while "jenkins" not in gu.get_reviewers(change_id): if attempt >= 90: break time.sleep(1) attempt += 1 attempt = 0 while gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'] \ != '+1': if attempt >= 90: break time.sleep(1) attempt += 1 self.assertEqual( gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'], '+1') gu.del_pubkey(k_index)
def test_check_zuul_operations(self): """ Test if zuul verifies project correctly through zuul-demo project """ # zuul-demo - test project used exclusively to test zuul installation # The necessary project descriptions are already declared in Jenkins # and zuul pname = 'demo/zuul-demo' self.create_project(pname, config.ADMIN_USER) un = config.ADMIN_USER gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[un]['auth_cookie']) ju = JenkinsUtils() k_index = gu.add_pubkey(config.USERS[un]["pubkey"]) # Gerrit part self.assertTrue(gu.project_exists(pname)) priv_key_path = set_private_key(config.USERS[un]["privkey"]) gitu = GerritGitUtils(un, priv_key_path, config.USERS[un]['email']) url = "ssh://%s@%s:29418/%s" % (un, config.GATEWAY_HOST, pname) clone_dir = gitu.clone(url, pname) self.dirs_to_delete.append(os.path.dirname(clone_dir)) last_fail_build_num_ft = \ ju.get_last_build_number("zuul-demo-functional-tests", "lastFailedBuild") last_fail_build_num_ut = \ ju.get_last_build_number("zuul-demo-unit-tests", "lastFailedBuild") last_succeed_build_num_ft = \ ju.get_last_build_number("zuul-demo-functional-tests", "lastSuccessfulBuild") last_succeed_build_num_ut = \ ju.get_last_build_number("zuul-demo-unit-tests", "lastSuccessfulBuild") gitu.add_commit_and_publish(clone_dir, "master", "Test commit") change_ids = gu.get_my_changes_for_project(pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] # Give some time for jenkins to work ju.wait_till_job_completes("zuul-demo-functional-tests", last_fail_build_num_ft, "lastFailedBuild") ju.wait_till_job_completes("zuul-demo-unit-tests", last_fail_build_num_ut, "lastFailedBuild") attempt = 0 while "jenkins" not in gu.get_reviewers(change_id): if attempt >= 90: break time.sleep(1) attempt += 1 attempt = 0 while gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'] \ != '-1': if attempt >= 90: break time.sleep(1) attempt += 1 self.assertEqual( gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'], '-1') # Add the test case files and resubmit for review data = "echo Working" files = ["run_functional-tests.sh", "run_tests.sh"] for f in files: file(os.path.join(clone_dir, f), 'w').write(data) os.chmod(os.path.join(clone_dir, f), 0755) gitu.add_commit_and_publish(clone_dir, "master", None, fnames=files) # Give some time for jenkins to work ju.wait_till_job_completes("zuul-demo-functional-tests", last_succeed_build_num_ft, "lastSuccessfulBuild") ju.wait_till_job_completes("zuul-demo-unit-tests", last_succeed_build_num_ut, "lastSuccessfulBuild") attempt = 0 while "jenkins" not in gu.get_reviewers(change_id): if attempt >= 90: break time.sleep(1) attempt += 1 attempt = 0 while gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'] \ != '+1': if attempt >= 90: break time.sleep(1) attempt += 1 self.assertEqual( gu.get_reviewer_approvals(change_id, 'jenkins')['Verified'], '+1') gu.del_pubkey(k_index)
class SFProvisioner(object): """ This provider is only intended for testing SF backup/restore and update. It provisions some user datas in a SF installation based on a resourses.yaml file. Later those data can be checked by its friend the SFChecker. Provisioned data should remain really simple. """ def __init__(self): with open('resources.yaml', 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.REDMINE_URL, auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) def create_project(self, name): print " Creating project %s ..." % name self.msu.createProject(name, config.ADMIN_USER) def push_files_in_project(self, name, files): print " Add files(%s) in a commit ..." % ",".join(files) # TODO(fbo); use gateway host instead of gerrit host self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(self.url, name, config_review=False) self.clone_dir = clone_dir for f in files: file(os.path.join(clone_dir, f), 'w').write('data') self.ggu.git_add(clone_dir, (f,)) self.ggu.add_commit_for_all_new_additions(clone_dir) self.ggu.direct_push_branch(clone_dir, 'master') def create_issues_on_project(self, name, issues): print " Create %s issue(s) for that project ..." % len(issues) for i in issues: issue = self.rm.create_issue(name, i['name']) yield issue, i['review'] def create_jenkins_jobs(self, name, jobnames): print " Create Jenkins jobs(%s) ..." % ",".join(jobnames) for jobname in jobnames: self.ju.create_job("%s_%s" % (name, jobname)) def create_pads(self, amount): # TODO pass def create_pasties(self, amount): # TODO pass def create_review(self, project, issue): """Very basic review creator for statistics and restore tests purposes.""" self.ggu.config_review(self.clone_dir) self.ggu.add_commit_in_branch(self.clone_dir, 'branch_' + issue, commit='test\n\nBug: %s' % issue) self.ggu.review_push_branch(self.clone_dir, 'branch_' + issue) def provision(self): for project in self.resources['projects']: print "Create user datas for %s" % project['name'] self.create_project(project['name']) self.push_files_in_project(project['name'], [f['name'] for f in project['files']]) for i, review in self.create_issues_on_project(project['name'], project['issues']): if review: print "Create review for bug %i in %s" % (i, project['name']) self.create_review(project['name'], str(i)) self.create_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.create_pads(2) self.create_pasties(2)
class TestResourcesWorkflow(Base): def setUp(self): super(TestResourcesWorkflow, self).setUp() priv_key_path = set_private_key( config.USERS[config.ADMIN_USER]["privkey"]) self.gitu_admin = GerritGitUtils( config.ADMIN_USER, priv_key_path, config.USERS[config.ADMIN_USER]['email']) self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) self.ju = JenkinsUtils() self.dirs_to_delete = [] def tearDown(self): super(TestResourcesWorkflow, self).tearDown() for dirs in self.dirs_to_delete: shutil.rmtree(dirs) def ssh_run_cmd(self, sshkey_priv_path, user, host, subcmd): host = '%s@%s' % (user, host) sshcmd = [ 'ssh', '-o', 'LogLevel=ERROR', '-o', 'StrictHostKeyChecking=no', '-o', 'UserKnownHostsFile=/dev/null', '-i', sshkey_priv_path, host ] cmd = sshcmd + subcmd devnull = open(os.devnull, 'wb') p = Popen(cmd, stdout=devnull, stderr=devnull) return p.communicate(), p.returncode def clone_as_admin(self, pname): url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, pname) clone_dir = self.gitu_admin.clone(url, pname) if os.path.dirname(clone_dir) not in self.dirs_to_delete: self.dirs_to_delete.append(os.path.dirname(clone_dir)) return clone_dir def commit_direct_push_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) return self.gitu_admin.direct_push_branch(clone_dir, 'master') def set_resources_then_direct_push(self, fpath, resources=None, mode='add'): config_clone_dir = self.clone_as_admin("config") path = os.path.join(config_clone_dir, fpath) if mode == 'add': file(path, 'w').write(resources) elif mode == 'del': os.unlink(path) change_sha = self.commit_direct_push_as_admin( config_clone_dir, "Add new resources for functional tests") config_update_log = self.ju.wait_for_config_update(change_sha) self.assertIn("SUCCESS", config_update_log) def wait_for_jenkins_note(self, change_id): attempt = 0 while "jenkins" not in self.gu.get_reviewers(change_id): if attempt >= 90: break time.sleep(1) attempt += 1 def propose_resources_change_check_ci(self, fpath, resources=None, mode='add', expected_note=1, msg=None): config_clone_dir = self.clone_as_admin("config") path = os.path.join(config_clone_dir, fpath) if mode == 'add': file(path, 'w').write(resources) elif mode == 'del': os.unlink(path) if not msg: msg = "Validate resources" if mode == 'add': change_sha = self.gitu_admin.add_commit_and_publish( config_clone_dir, "master", msg, fnames=[path]) if mode == 'del': change_sha = self.gitu_admin.add_commit_for_all_new_additions( config_clone_dir, msg, publish=True) change_nr = self.gu.get_change_number(change_sha) note = self.gu.wait_for_verify(change_nr) self.assertEqual(note, expected_note) def get_resources(self): gateau = config.USERS[config.ADMIN_USER]['auth_cookie'] resp = requests.get("%s/manage/resources/" % config.GATEWAY_URL, cookies={'auth_pubtkt': gateau}) return resp.json() def test_validate_wrong_resource_workflow(self): """ Check resources - wrong model is detected by config-check """ # This resource is not correct fpath = "resources/%s.yaml" % create_random_str() name = create_random_str() resources = """resources: groups: %s: unknown-key: value description: test for functional test """ # Add the resource file with review then check CI resources = resources % name self.propose_resources_change_check_ci(fpath, resources=resources, mode='add', expected_note=-1) def test_validate_correct_resource_workflow(self): """ Check resources - good model is detected by config-check """ # This resource is correct fpath = "resources/%s.yaml" % create_random_str() name = create_random_str() resources = """resources: groups: %s: description: test for functional test members: - [email protected] """ # Add the resource file with review then check CI resources = resources % name self.propose_resources_change_check_ci(fpath, resources=resources, mode='add') def test_validate_resources_deletion(self): """ Check resources - deletions detected and authorized via flag """ fpath = "resources/%s.yaml" % create_random_str() name = create_random_str() resources = """resources: groups: %s: description: test for functional test members: [] """ # Add the resources file w/o review resources = resources % name self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Remove the resource file via the review self.propose_resources_change_check_ci(fpath, mode='del', expected_note=-1) # Remove the resource file with "allow-delete" flag via the review shutil.rmtree(os.path.join(self.gitu_admin.tempdir, 'config')) msg = "Remove resource with flag\nsf-resources: allow-delete" self.propose_resources_change_check_ci(fpath, mode='del', msg=msg) @skipIfServiceMissing('storyboard') def test_CUD_project(self): """ Check resources - ops on project work as expected """ sclient = SFStoryboard(config.GATEWAY_URL + "/storyboard_api", config.USERS[config.USER_4]['auth_cookie']) fpath = "resources/%s.yaml" % create_random_str() name = create_random_str() resources = """resources: projects: %(pname)s: description: An awesome project issue-tracker: SFStoryboard source-repositories: - %(pname)s/%(r1name)s repos: %(pname)s/%(r1name)s: description: The server part acl: %(pname)s acls: %(pname)s: file: | [access "refs/*"] read = group Anonymous Users """ tmpl_keys = { 'pname': create_random_str(), 'r1name': create_random_str() } resources = resources % tmpl_keys # Add the resources file w/o review self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Some checks to validate stuff have been created projects = [p.name for p in sclient.projects.get_all()] self.assertIn("%s/%s" % (tmpl_keys['pname'], tmpl_keys['r1name']), projects) project_groups = [p.name for p in sclient.project_groups.get_all()] self.assertIn(tmpl_keys['pname'], project_groups) # Modify the project resource resources = """resources: projects: %(pname)s: description: An awesome project issue-tracker: SFStoryboard source-repositories: - %(pname)s/%(r1name)s - %(pname)s/%(r2name)s repos: %(pname)s/%(r1name)s: description: The server part acl: %(pname)s %(pname)s/%(r2name)s: description: The server part acl: %(pname)s acls: %(pname)s: file: | [access "refs/*"] read = group Anonymous Users """ tmpl_keys.update({'r2name': create_random_str()}) resources = resources % tmpl_keys self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Some checks to validate stuff have been updated projects = [p.name for p in sclient.projects.get_all()] for name in (tmpl_keys['r1name'], tmpl_keys['r2name']): self.assertIn("%s/%s" % (tmpl_keys['pname'], name), projects) project_groups = [p.name for p in sclient.project_groups.get_all()] self.assertIn(tmpl_keys['pname'], project_groups) # Del the resources file w/o review self.set_resources_then_direct_push(fpath, mode='del') # Check the project group has been deleted # Note the project (in storyboard) is not deleted # this is a current limitation of the API (01/13/2017) project_groups = [p.name for p in sclient.project_groups.get_all()] self.assertFalse(tmpl_keys['pname'] in project_groups) def test_CUD_group(self): """ Check resources - ops on group work as expected """ fpath = "resources/%s.yaml" % create_random_str() name = create_random_str() resources = """resources: groups: %s: description: test for functional test members: - [email protected] - [email protected] """ # Add the resources file w/o review resources = resources % name self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Check members on Gerrit gid = self.gu.get_group_id(name) members = [m['email'] for m in self.gu.get_group_members(gid)] self.assertIn("*****@*****.**", members) self.assertIn("*****@*****.**", members) # Modify resources Add/Remove members w/o review resources = """resources: groups: %s: description: test for functional test members: - [email protected] - [email protected] """ resources = resources % name self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Check members on Gerrit gid = self.gu.get_group_id(name) members = [m['email'] for m in self.gu.get_group_members(gid)] self.assertIn("*****@*****.**", members) self.assertIn("*****@*****.**", members) self.assertNotIn("*****@*****.**", members) # Del the resources file w/o review self.set_resources_then_direct_push(fpath, mode='del') # Check the group has been deleted self.assertFalse(self.gu.get_group_id(name)) def test_CD_repo(self): """ Check resources - ops on git repositories work as expected """ fpath = "resources/%s.yaml" % create_random_str() name = create_random_str() resources = """resources: repos: %s: description: test for functional test default-branch: br1 branches: br1: HEAD br2: HEAD master: '0' """ # Add the resources file w/o review resources = resources % name self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Check the project has been created self.assertTrue(self.gu.project_exists(name)) # Check branches branches = self.gu.g.get('/projects/%s/branches/' % name) for wref in ("HEAD", "br1", "br2"): found = False for ref in branches: if found: continue if ref['ref'].endswith(wref): found = True if ref['ref'] == 'HEAD' and ref['revision'] != "br1": raise Exception("Wrong default branch") if not found: raise Exception("Requested branch %s not found" % wref) # Del the resources file w/o review self.set_resources_then_direct_push(fpath, mode='del') # Check the project has been deleted self.assertFalse(self.gu.project_exists(name)) def test_CRUD_resources(self): """ Check resources - bulk ops on resources work as expected """ fpath = "resources/%s.yaml" % create_random_str() tmpl_keys = { 'pname': create_random_str(), 'r1name': create_random_str(), 'r2name': create_random_str(), 'aname': create_random_str(), 'g1name': create_random_str(), 'g2name': create_random_str() } resources = """resources: projects: %(pname)s: description: An awesome project contacts: - [email protected] source-repositories: - %(pname)s/%(r1name)s - %(pname)s/%(r2name)s website: http://ichiban-cloud.io documentation: http://ichiban-cloud.io/docs issue-tracker-url: http://ichiban-cloud.bugtrackers.io repos: %(pname)s/%(r1name)s: description: The server part acl: %(aname)s %(pname)s/%(r2name)s: description: The client part acl: %(aname)s acls: %(aname)s: file: | [access "refs/*"] read = group Anonymous Users read = group %(pname)s/%(g2name)s owner = group %(pname)s/%(g1name)s [access "refs/heads/*"] label-Code-Review = -2..+2 group %(pname)s/%(g2name)s label-Verified = -2..+2 group %(pname)s/%(g1name)s label-Workflow = -1..+1 group %(pname)s/%(g2name)s submit = group %(pname)s/%(g2name)s read = group Anonymous Users read = group %(pname)s/%(g2name)s [access "refs/meta/config"] read = group %(pname)s/%(g2name)s [receive] requireChangeId = true [submit] mergeContent = false action = fast forward only groups: - %(pname)s/%(g1name)s - %(pname)s/%(g2name)s groups: %(pname)s/%(g1name)s: members: - [email protected] %(pname)s/%(g2name)s: members: - [email protected] - [email protected] """ # Add the resources file w/o review resources = resources % tmpl_keys self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Check resources have been created self.assertTrue( self.gu.project_exists( os.path.join(tmpl_keys['pname'], tmpl_keys['r1name']))) self.assertTrue( self.gu.project_exists( os.path.join(tmpl_keys['pname'], tmpl_keys['r2name']))) gid = self.gu.get_group_id( os.path.join(tmpl_keys['pname'], tmpl_keys['g1name'])) members = [m['email'] for m in self.gu.get_group_members(gid)] self.assertEqual(len(members), 1) self.assertIn("*****@*****.**", members) gid2 = self.gu.get_group_id( os.path.join(tmpl_keys['pname'], tmpl_keys['g2name'])) members = [m['email'] for m in self.gu.get_group_members(gid2)] self.assertEqual(len(members), 2) self.assertIn("*****@*****.**", members) self.assertIn("*****@*****.**", members) # Verify ACLs have been written for both repo for r in ('r1name', 'r2name'): rname = os.path.join(tmpl_keys['pname'], tmpl_keys[r]) acl = self.gu.g.get('access/?project=%s' % rname) self.assertIn( gid2, acl[rname]['local']['refs/heads/*']['permissions'] ['submit']['rules'].keys()) # Verify the resources endpoint know about what we pushed res = self.get_resources() self.assertIn(tmpl_keys['pname'], res['resources']['projects'].keys()) self.assertIn(tmpl_keys['aname'], res['resources']['acls'].keys()) self.assertIn(os.path.join(tmpl_keys['pname'], tmpl_keys['g1name']), res['resources']['groups'].keys()) self.assertIn(os.path.join(tmpl_keys['pname'], tmpl_keys['g2name']), res['resources']['groups'].keys()) self.assertIn(os.path.join(tmpl_keys['pname'], tmpl_keys['r1name']), res['resources']['repos'].keys()) self.assertIn(os.path.join(tmpl_keys['pname'], tmpl_keys['r2name']), res['resources']['repos'].keys()) # Modify the ACL to verify repos ACL are updated resources = re.sub( 'submit = group .*', 'submit = group %s' % os.path.join(tmpl_keys['pname'], tmpl_keys['g1name']), resources) self.set_resources_then_direct_push(fpath, resources=resources, mode='add') # Verify ACLs have been updated for both repo for r in ('r1name', 'r2name'): rname = os.path.join(tmpl_keys['pname'], tmpl_keys[r]) acl = self.gu.g.get('access/?project=%s' % rname) self.assertIn( gid, acl[rname]['local']['refs/heads/*']['permissions'] ['submit']['rules'].keys()) # Now let's remove all that awesome resources self.set_resources_then_direct_push(fpath, mode='del') # Check resources have been deleted self.assertFalse( self.gu.project_exists( os.path.join(tmpl_keys['pname'], tmpl_keys['r1name']))) self.assertFalse( self.gu.project_exists( os.path.join(tmpl_keys['pname'], tmpl_keys['r2name']))) self.assertFalse( self.gu.get_group_id( os.path.join(tmpl_keys['pname'], tmpl_keys['g1name']))) self.assertFalse( self.gu.get_group_id( os.path.join(tmpl_keys['pname'], tmpl_keys['g2name']))) res = self.get_resources() projects = res['resources'].get('projects', {}) acls = res['resources'].get('acls', {}) groups = res['resources'].get('groups', {}) repos = res['resources'].get('repos', {}) self.assertNotIn(tmpl_keys['pname'], projects.keys()) self.assertNotIn(tmpl_keys['aname'], acls.keys()) self.assertNotIn(os.path.join(tmpl_keys['pname'], tmpl_keys['g1name']), groups.keys()) self.assertNotIn(os.path.join(tmpl_keys['pname'], tmpl_keys['g2name']), groups.keys()) self.assertNotIn(os.path.join(tmpl_keys['pname'], tmpl_keys['r1name']), repos.keys()) self.assertNotIn(os.path.join(tmpl_keys['pname'], tmpl_keys['r2name']), repos.keys()) def test_GET_resources(self): """ Check resources - GET resources works as expected""" cookies = dict(auth_pubtkt=config.USERS[config.USER_1]['auth_cookie']) ret = requests.get("%s/manage/resources/" % config.GATEWAY_URL, cookies=cookies) self.assertIn('resources', ret.json()) def test_GET_missing_resources(self): """ Check resources - GET missing resources works as expected""" token = config.USERS[config.ADMIN_USER]['auth_cookie'] prev = "resources: {}" new = """resources: groups: %(gname)s: description: A test group members: ['*****@*****.**'] """ group_name = create_random_str() data = {'prev': prev, 'new': new % {'gname': group_name}} # Direct PUT resources bypassing the config repo workflow requests.put("%s/manage/resources/" % config.GATEWAY_URL, json=data, cookies={'auth_pubtkt': token}) # Verify managesf detects diff and propose a re-sync resource struct ret = requests.get("%s/manage/resources/?get_missing_" "resources=true" % config.GATEWAY_URL, cookies={'auth_pubtkt': token}) logs, resources = ret.json() self.assertListEqual(logs, []) self.assertIn(group_name, resources['resources']['groups']) # Call the resources.sh script on managesf node to propose # a review on the config repo to re-sync with the reality cmd = [ '/usr/local/bin/resources.sh', 'get_missing_resources', 'submit' ] self.ssh_run_cmd(config.SERVICE_PRIV_KEY_PATH, 'root', config.GATEWAY_HOST, cmd) # Get change id of the submitted review search_string = "Propose missing resources to the config repo" r = requests.get('%s/r/changes/?q=%s' % (config.GATEWAY_URL, search_string)) lastid = 0 for r in json.loads(r.content[4:]): if r['_number'] > lastid: lastid = r['_number'] self.assertEqual(self.gu.wait_for_verify(lastid), 1) # Check flag "sf-resources: skip-apply" in the commit msg change = self.gu.g.get( 'changes/?q=%s&o=CURRENT_REVISION&o=CURRENT_COMMIT' % lastid)[0] revision = change["current_revision"] commit = change['revisions'][revision]["commit"] self.assertEqual(commit["message"].split('\n')[0], 'Propose missing resources to the config repo') self.assertTrue(commit["message"].find('sf-resources: skip-apply') > 0) # Approve the change and wait for the +2 self.gu.submit_change_note(change['id'], "current", "Code-Review", "2") self.gu.submit_change_note(change['id'], "current", "Workflow", "1") # Check config-update return a success # The flag sf-resources: skip-apply should be detected # by the config update. Then missing resources won't # by concidered new and the resources apply will be skipped. # This tests (checking config-update succeed) confirm # resource apply have been skipped if not managesf resources # apply would have return 409 error making config-update failed too. # If not True then we cannot concider config-update succeed config_update_log = self.ju.wait_for_config_update(revision) self.assertIn("Skip resources apply.", config_update_log) self.assertIn("SUCCESS", config_update_log) # Checking again missing resources must return nothing ret = requests.get("%s/manage/resources/?get_missing_" "resources=true" % config.GATEWAY_URL, cookies={'auth_pubtkt': token}) logs, resources = ret.json() self.assertListEqual(logs, []) self.assertEqual(len(resources['resources']), 0)
class SFProvisioner(object): """ This provider is only intended for testing SF backup/restore and update. It provisions some user datas in a SF installation based on a resourses.yaml file. Later those data can be checked by its friend the SFChecker. Provisioned data should remain really simple. """ def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.GATEWAY_URL + "/redmine/", auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) def create_project(self, name): print " Creating project %s ..." % name self.msu.createProject(name, config.ADMIN_USER) def push_files_in_project(self, name, files): print " Add files(%s) in a commit ..." % ",".join(files) # TODO(fbo); use gateway host instead of gerrit host self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(self.url, name, config_review=False) self.clone_dir = clone_dir for f in files: file(os.path.join(clone_dir, f), 'w').write('data') self.ggu.git_add(clone_dir, (f, )) self.ggu.add_commit_for_all_new_additions(clone_dir) self.ggu.direct_push_branch(clone_dir, 'master') def create_issues_on_project(self, name, issues): print " Create %s issue(s) for that project ..." % len(issues) for i in issues: issue = self.rm.create_issue(name, i['name']) yield issue, i['review'] def create_jenkins_jobs(self, name, jobnames): print " Create Jenkins jobs(%s) ..." % ",".join(jobnames) for jobname in jobnames: self.ju.create_job("%s_%s" % (name, jobname)) def create_pads(self, amount): # TODO pass def create_pasties(self, amount): # TODO pass def create_review(self, project, issue): """Very basic review creator for statistics and restore tests purposes.""" self.ggu.config_review(self.clone_dir) self.ggu.add_commit_in_branch(self.clone_dir, 'branch_' + issue, commit='test\n\nBug: %s' % issue) self.ggu.review_push_branch(self.clone_dir, 'branch_' + issue) def provision(self): for project in self.resources['projects']: print "Create user datas for %s" % project['name'] self.create_project(project['name']) self.push_files_in_project(project['name'], [f['name'] for f in project['files']]) for i, review in self.create_issues_on_project( project['name'], project['issues']): if review: print "Create review for bug %i in %s" % (i, project['name']) self.create_review(project['name'], str(i)) self.create_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.create_pads(2) self.create_pasties(2)
class SFProvisioner(object): """ This provider is only intended for testing SF backup/restore and update. It provisions some user datas in a SF installation based on a resourses.yaml file. Later those data can be checked by its friend the SFChecker. Provisioned data should remain really simple. """ def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.GATEWAY_URL + "/redmine/", auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) def create_project(self, name): print " Creating project %s ..." % name self.msu.createProject(name, config.ADMIN_USER) def push_files_in_project(self, name, files): print " Add files(%s) in a commit ..." % ",".join(files) # TODO(fbo); use gateway host instead of gerrit host self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(self.url, name, config_review=False) self.clone_dir = clone_dir for f in files: file(os.path.join(clone_dir, f), 'w').write('data') self.ggu.git_add(clone_dir, (f,)) self.ggu.add_commit_for_all_new_additions(clone_dir) self.ggu.direct_push_branch(clone_dir, 'master') def create_issues_on_project(self, name, issues): print " Create %s issue(s) for that project ..." % len(issues) for i in issues: if is_present('SFRedmine'): issue = self.rm.create_issue(name, i['name']) else: issue = random.randint(1,100) yield issue, i['review'] def create_jenkins_jobs(self, name, jobnames): print " Create Jenkins jobs(%s) ..." % ",".join(jobnames) for jobname in jobnames: self.ju.create_job("%s_%s" % (name, jobname)) def create_pads(self, amount): # TODO pass def create_pasties(self, amount): # TODO pass def simple_login(self, user): """log as user to make the user listable""" get_cookie(user, config.USERS[user]['password']) def create_review(self, project, issue): """Very basic review creator for statistics and restore tests purposes.""" self.ggu.config_review(self.clone_dir) self.ggu.add_commit_in_branch(self.clone_dir, 'branch_' + issue, commit='test\n\nBug: %s' % issue) self.ggu.review_push_branch(self.clone_dir, 'branch_' + issue) def create_local_user(self, username, password, email): self.msu.create_user(username, password, email) def command(self, cmd): return ssh_run_cmd(os.path.expanduser("~/.ssh/id_rsa"), "root", config.GATEWAY_HOST, shlex.split(cmd)) def compute_checksum(self, f): out = self.command("md5sum %s" % f)[0] if out: return out.split()[0] def provision(self): for cmd in self.resources['commands']: print "Execute command %s" % cmd['cmd'] print self.command(cmd['cmd']) checksum_list = {} for checksum in self.resources['checksum'] : print "Compute checksum for file %s" % checksum['file'] checksum_list[checksum['file']] = self.compute_checksum( checksum['file']) yaml.dump(checksum_list, file('/tmp/pc_checksums.yaml', 'w')) for user in self.resources['local_users']: print "Create local user %s" % user['username'] self.create_local_user(user['username'], user['password'], user['email']) for u in self.resources['users']: print "log in as %s" % u['name'] self.simple_login(u['name']) for project in self.resources['projects']: print "Create user datas for %s" % project['name'] self.create_project(project['name']) self.push_files_in_project(project['name'], [f['name'] for f in project['files']]) for i, review in self.create_issues_on_project(project['name'], project['issues']): if review: print "Create review for bug %i in %s" % (i, project['name']) self.create_review(project['name'], str(i)) self.create_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.create_pads(2) self.create_pasties(2)
class TestProjectTestsWorkflow(Base): """ Functional tests to verify the configuration of a project test """ @classmethod def setUpClass(cls): cls.ru = ResourcesUtils() cls.sample_project_dir = \ os.path.join(config.SF_TESTS_DIR, "sample_project/") @classmethod def tearDownClass(cls): pass def setUp(self): super(TestProjectTestsWorkflow, self).setUp() self.projects = [] self.dirs_to_delete = [] self.un = config.ADMIN_USER self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[self.un]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) self.ju = JenkinsUtils() self.gu.add_pubkey(config.USERS[self.un]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) # Clone the config repo and keep job/zuul config content self.config_clone_dir = self.clone_as_admin("config") self.original_zuul_projects = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() self.original_project = file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read() self.need_restore_config_repo = False # Put USER_2 as core for config project self.gu.add_group_member(config.USER_2, "config-core") def tearDown(self): super(TestProjectTestsWorkflow, self).tearDown() if self.need_restore_config_repo: self.restore_config_repo(self.original_project, self.original_zuul_projects) for name in self.projects: self.ru.direct_delete_repo(name) for dirs in self.dirs_to_delete: shutil.rmtree(dirs) def assert_reviewer_approvals(self, change_id, value): approvals = {} for _ in range(300): approvals = self.gu.get_reviewer_approvals(change_id, 'jenkins') if approvals and approvals.get('Verified') == value: break time.sleep(1) self.assertEqual(value, approvals.get('Verified')) def clone_as_admin(self, pname): url = "ssh://%s@%s:29418/%s" % (self.un, config.GATEWAY_HOST, pname) clone_dir = self.gitu_admin.clone(url, pname) if os.path.dirname(clone_dir) not in self.dirs_to_delete: self.dirs_to_delete.append(os.path.dirname(clone_dir)) return clone_dir def restore_config_repo(self, project, zuul): logger.info("Restore {zuul,jobs}/projects.yaml") file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( zuul) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( project) change_sha = self.commit_direct_push_as_admin( self.config_clone_dir, "Restore {zuul,jobs}/projects.yaml") logger.info("Waiting for config-update on %s" % change_sha) self.ju.wait_for_config_update(change_sha) def commit_direct_push_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) return self.gitu_admin.direct_push_branch(clone_dir, 'master') def push_review_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) return self.gitu_admin.review_push_branch(clone_dir, 'master') def create_project(self, name): self.ru.direct_create_repo(name) self.projects.append(name) def test_timestamped_logs(self): """Test that jenkins timestamps logs""" # Done here to make sure a config-update job was run and to avoid # duplicating code timestamp_re = re.compile('\d{2}:\d{2}:\d{2}.\d{0,3}') n = self.ju.get_last_build_number("config-update", "lastBuild") cu_logs = self.ju.get_job_logs("config-update", n) self.assertTrue(cu_logs is not None) for l in cu_logs.split('\n'): if l: self.assertRegexpMatches(l, timestamp_re, msg=l) def test_check_project_test_workflow(self): """ Validate new project to test via zuul """ # We want to create a project, provide project source # code with tests. We then configure zuul/jjb to handle the # run of the test cases. We then validate Gerrit has been # updated about the test results # We use the sample-project (that already exists) pname = 'test_workflow_%s' % create_random_str() logger.info("Creating project %s" % pname) # Create it self.create_project(pname) logger.info("Populating the project with %s" % self.sample_project_dir) # Add the sample-project to the empty repository clone_dir = self.clone_as_admin(pname) copytree(self.sample_project_dir, clone_dir) self.commit_direct_push_as_admin(clone_dir, "Add the sample project") # Change to config/{zuul,jobs}/projects.yaml # in order to test the new project logger.info("Adding config-repo configuration") ycontent = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( ycontent.replace("zuul-demo", pname), ) ycontent2 = load(file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read()) sp2 = copy.deepcopy( [p for p in ycontent2 if 'project' in p and p['project']['name'] == 'zuul-demo'][0]) sp2['project']['name'] = pname ycontent2.append(sp2) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( dump(ycontent2)) # Send review (config-check) will be triggered logger.info("Submitting the config review") change_sha = self.push_review_as_admin( self.config_clone_dir, "Add config definition in Zuul/JJB config for %s" % pname) change_nr = self.gu.get_change_number(change_sha) logger.info("Waiting for verify +1 on change %d" % change_nr) self.assertEquals(self.gu.wait_for_verify(change_nr), 1) # review the config change as a member from the config-core group logger.info("Approving and waiting for verify +2") self.gu2.submit_change_note(change_nr, "current", "Code-Review", "2") self.gu2.submit_change_note(change_nr, "current", "Workflow", "1") for retry in xrange(60): jenkins_vote = self.gu.get_vote(change_nr, "Verified") if jenkins_vote == 2: break time.sleep(1) self.assertEquals(jenkins_vote, 2) # verify whether zuul merged the patch logger.info("Waiting for change to be merged") for retry in xrange(60): change_status = self.gu.get_info(change_nr)['status'] if change_status == "MERGED": break time.sleep(1) self.assertEqual(change_status, 'MERGED') self.need_restore_config_repo = True logger.info("Waiting for config-update") config_update_log = self.ju.wait_for_config_update(change_sha) self.assertIn("Finished: SUCCESS", config_update_log) # Propose a change on a the repo and expect a Verified +1 logger.info("Submiting a test change to %s" % pname) change_sha = self.gitu_admin.add_commit_and_publish( clone_dir, 'master', "Add useless file", self.un) change_nr = self.gu.get_change_number(change_sha) logger.info("Waiting for verify +1 on change %d" % change_nr) self.assertEquals(self.gu.wait_for_verify(change_nr), 1) # Update the change on a the repo and expect a Verified -1 logger.info("Submiting a test change to %s suppose to fail" % pname) data = "#!/bin/bash\nexit 1\n" file(os.path.join(clone_dir, "run_tests.sh"), 'w').write(data) os.chmod(os.path.join(clone_dir, "run_tests.sh"), 0755) self.gitu_admin.add_commit_and_publish( clone_dir, "master", None, fnames=["run_tests.sh"]) logger.info("Waiting for verify -1 on change %d" % change_nr) self.assertEquals(self.gu.wait_for_verify(change_nr), -1) logger.info("Validate jobs ran via the job api %s" % pname) # This piece of code is there by convenience ... # TODO: Should be moved in the job api tests file. # Test the manageSF jobs API: query per patch & revision change_ids = self.gu.get_my_changes_for_project(pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] patch = self.gu.get_change_last_patchset(change_id)['_number'] cookie = get_cookie(config.ADMIN_USER, config.ADMIN_PASSWORD) cookies = {"auth_pubtkt": cookie} base_url = config.GATEWAY_URL + "/manage/jobs/" for j in ["%s-functional-tests" % pname, "%s-unit-tests" % pname]: job = requests.get(base_url + '%s/?change=%s' % (j, patch), cookies=cookies).json() self.assertTrue("jenkins" in job.keys(), job) self.assertTrue(len(job["jenkins"]) > 1, job)