class SFProvisioner(object): """ This provider is only intended for testing SF backup/restore and update. It provisions some user datas in a SF installation based on a resourses.yaml file. Later those data can be checked by its friend the SFChecker. Provisioned data should remain really simple. """ def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ru = ResourcesUtils() self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.stb_client = SFStoryboard( config.GATEWAY_URL + "/storyboard_api", config.USERS[config.ADMIN_USER]['auth_cookie']) def create_resources(self): print " Creating resources ..." if cmp_version(os.environ.get("PROVISIONED_VERSION", "0.0"), "2.4.0"): # Remove review-dashboard for p in self.resources['resources']['projects'].values(): del p['review-dashboard'] self.ru.create_resources("provisioner", {'resources': self.resources['resources']}) # Create review for the first few repositories for project in self.resources['resources']['repos'].keys()[:3]: self.clone_project(project) self.create_review(project, "Test review for %s" % project) def create_project(self, name): print " Creating project %s ..." % name self.ru.create_repo(name) def clone_project(self, name): # TODO(fbo); use gateway host instead of gerrit host self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) self.clone_dir = self.ggu.clone(self.url, name, config_review=False) def push_files_in_project(self, name, files): print " Add files(%s) in a commit ..." % ",".join(files) self.clone_project(name) for f in files: file(os.path.join(self.clone_dir, f), 'w').write('data') self.ggu.git_add(self.clone_dir, (f, )) self.ggu.add_commit_for_all_new_additions(self.clone_dir) self.ggu.direct_push_branch(self.clone_dir, 'master') def create_storyboard_issue(self, name, issue_name): project = self.stb_client.projects.get(name) story = self.stb_client.stories.create(title=issue_name) task = self.stb_client.tasks.create(story_id=story.id, project_id=project.id, title=issue_name) return task.id, story.id def create_issues_on_project(self, name, issues): print " Create %s issue(s) for that project ..." % len(issues) for i in issues: if is_present('storyboard'): issue = self.create_storyboard_issue(name, i['name']) else: issue = (random.randint(1, 100), random.randint(1, 100)) yield issue, i['review'] def create_pads(self, amount): # TODO pass def create_pasties(self, amount): # TODO pass def simple_login(self, user, password): """log as user to make the user listable""" get_cookie(user, password) def create_review(self, project, commit_message, branch='master'): """Very basic review creator for statistics and restore tests purposes.""" self.ggu.config_review(self.clone_dir) self.ggu.add_commit_in_branch(self.clone_dir, branch, commit=commit_message) self.ggu.review_push_branch(self.clone_dir, branch) def create_review_for_issue(self, project, issue): self.create_review( project, 'test\n\nTask: #%s\nStory: #%s' % (issue[0], issue[1]), 'branch_%s' % str(issue[0])) def create_local_user(self, username, password, email): self.msu.create_user(username, password, email) def command(self, cmd): return ssh_run_cmd(os.path.expanduser("~/.ssh/id_rsa"), "root", config.GATEWAY_HOST, shlex.split(cmd)) def compute_checksum(self, f): out = self.command("md5sum %s" % f)[0] if out: return out.split()[0] def read_file(self, f): return self.command("cat %s" % f)[0] def provision(self): for cmd in self.resources['commands']: print "Execute command %s" % cmd['cmd'] print self.command(cmd['cmd']) checksum_list = {} for checksum in self.resources['checksum']: print "Compute checksum for file %s" % checksum['file'] checksum_list[checksum['file']] = self.compute_checksum( checksum['file']) checksum_list['content_' + checksum['file']] = self.read_file( checksum['file']) yaml.dump(checksum_list, file('pc_checksums.yaml', 'w'), default_flow_style=False) for user in self.resources['local_users']: print "Create local user %s" % user['username'] self.create_local_user(user['username'], user['password'], user['email']) self.simple_login(user['username'], user['password']) for u in self.resources['users']: print "log in as %s" % u['name'] self.simple_login(u['name'], config.USERS[u['name']]['password']) for project in self.resources['projects']: print "Create user datas for %s" % project['name'] self.create_project(project['name']) self.push_files_in_project(project['name'], [f['name'] for f in project['files']]) for i, review in self.create_issues_on_project( project['name'], project['issues']): if review: print "Create review for bug %s in %s" % (i, project['name']) self.create_review_for_issue(project['name'], i) self.create_resources() self.create_pads(2) self.create_pasties(2)
class TestProjectTestsWorkflow(Base): """ Functional tests to verify the configuration of a project test """ @classmethod def setUpClass(cls): cls.msu = ManageSfUtils(config.GATEWAY_URL) cls.sample_project_dir = \ os.path.join(config.SF_TESTS_DIR, "sample_project/") @classmethod def tearDownClass(cls): pass def setUp(self): self.projects = [] self.dirs_to_delete = [] self.un = config.ADMIN_USER self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[self.un]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) self.ju = JenkinsUtils() self.gu.add_pubkey(config.USERS[self.un]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) # Clone the config repo and make change to it # in order to test the new sample_project self.config_clone_dir = self.clone_as_admin("config") self.original_layout = file(os.path.join( self.config_clone_dir, "zuul/layout.yaml")).read() self.original_zuul_projects = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() self.original_project = file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read() # Put USER_2 as core for config project self.gu.add_group_member(config.USER_2, "config-core") def tearDown(self): self.restore_config_repo(self.original_layout, self.original_project, self.original_zuul_projects) for name in self.projects: self.msu.deleteProject(name, config.ADMIN_USER) for dirs in self.dirs_to_delete: shutil.rmtree(dirs) def assert_reviewer_approvals(self, change_id, value): approvals = {} for _ in range(90): approvals = self.gu.get_reviewer_approvals(change_id, 'jenkins') if approvals and approvals.get('Verified') == value: break time.sleep(1) self.assertEqual(value, approvals.get('Verified')) def clone_as_admin(self, pname): url = "ssh://%s@%s:29418/%s" % (self.un, config.GATEWAY_HOST, pname) clone_dir = self.gitu_admin.clone(url, pname) if os.path.dirname(clone_dir) not in self.dirs_to_delete: self.dirs_to_delete.append(os.path.dirname(clone_dir)) return clone_dir def restore_config_repo(self, layout, project, zuul): file(os.path.join( self.config_clone_dir, "zuul/layout.yaml"), 'w').write( layout) file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( zuul) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( project) self.commit_direct_push_as_admin( self.config_clone_dir, "Restore layout.yaml and projects.yaml") def commit_direct_push_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) self.gitu_admin.direct_push_branch(clone_dir, 'master') def push_review_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) self.gitu_admin.review_push_branch(clone_dir, 'master') def create_project(self, name, user, options=None): self.msu.createProject(name, user, options) self.projects.append(name) def test_check_project_test_workflow(self): """ Validate new project to test via zuul layout.yaml """ # We want to create a project, provide project source # code with tests. We then configure zuul/jjb to handle the # run of the test cases. We then validate Gerrit has been # updated about the test results # We use the sample-project (that already exists) pname = 'test_workflow_%s' % create_random_str() # Be sure the project does not exist self.msu.deleteProject(pname, config.ADMIN_USER) # Create it self.create_project(pname, config.ADMIN_USER) # Add the sample-project to the empty repository clone_dir = self.clone_as_admin(pname) copytree(self.sample_project_dir, clone_dir) self.commit_direct_push_as_admin(clone_dir, "Add the sample project") # Change to config/zuul/layout.yaml and jobs/projects.yaml # in order to test the new project ycontent = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( ycontent.replace("zuul-demo", pname), ) ycontent2 = load(file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read()) sp2 = copy.deepcopy( [p for p in ycontent2 if 'project' in p and p['project']['name'] == 'zuul-demo'][0]) sp2['project']['name'] = pname ycontent2.append(sp2) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( dump(ycontent2)) # Retrieve the previous build number for config-check last_success_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastSuccessfulBuild") # Retrieve the previous build number for config-update last_success_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastSuccessfulBuild") # Send review (config-check) will be triggered self.push_review_as_admin( self.config_clone_dir, "Add config definition in Zuul/JJB config for %s" % pname) # Wait for config-check to finish and verify the success self.ju.wait_till_job_completes("config-check", last_success_build_num_ch, "lastSuccessfulBuild") last_build_num_ch, last_success_build_num_ch = 0, 1 attempt = 0 while last_build_num_ch != last_success_build_num_ch: if attempt >= 90: break time.sleep(1) last_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastBuild") last_success_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastSuccessfulBuild") attempt += 1 self.assertEqual(last_build_num_ch, last_success_build_num_ch) # let some time to Zuul to update the test result to Gerrit. time.sleep(2) # Get the change id change_ids = self.gu.get_my_changes_for_project("config") self.assertGreater(len(change_ids), 0) change_id = change_ids[0] # Check whether zuul sets verified to +1 after running the tests # let some time to Zuul to update the test result to Gerrit. self.assert_reviewer_approvals(change_id, '+1') # review the change self.gu2.submit_change_note(change_id, "current", "Code-Review", "2") self.gu2.submit_change_note(change_id, "current", "Workflow", "1") # now zuul processes gate pipeline and runs config-check job # Wait for config-check to finish and verify the success self.ju.wait_till_job_completes("config-check", last_success_build_num_ch, "lastSuccessfulBuild") last_build_num_ch, last_success_build_num_ch = 0, 1 attempt = 0 while last_build_num_ch != last_success_build_num_ch: if attempt >= 90: break time.sleep(1) last_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastBuild") last_success_build_num_ch = \ self.ju.get_last_build_number("config-check", "lastSuccessfulBuild") attempt += 1 self.assertEqual(last_build_num_ch, last_success_build_num_ch) # Check whether zuul sets verified to +2 after running the tests # let some time to Zuul to update the test result to Gerrit. self.assert_reviewer_approvals(change_id, '+2') # verify whether zuul merged the patch change = self.gu.get_change('config', 'master', change_id) change_status = change['status'] attempt = 0 while change_status != 'MERGED': if attempt >= 90: break time.sleep(1) change = self.gu.get_change('config', 'master', change_id) change_status = change['status'] attempt += 1 self.assertEqual(change_status, 'MERGED') # Test post pipe line # as the patch is merged, post pieline should run config-update job # Wait for config-update to finish and verify the success self.ju.wait_till_job_completes("config-update", last_success_build_num_cu, "lastSuccessfulBuild") last_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastBuild") last_success_build_num_cu = \ self.ju.get_last_build_number("config-update", "lastSuccessfulBuild") self.assertEqual(last_build_num_cu, last_success_build_num_cu) # Retrieve the prev build number for pname-unit-tests # Retrieve the prev build number for pname-functional-tests last_success_build_num_sp_ut = \ self.ju.get_last_build_number("%s-unit-tests" % pname, "lastSuccessfulBuild") last_success_build_num_sp_ft = \ self.ju.get_last_build_number("%s-functional-tests" % pname, "lastSuccessfulBuild") # Test config-update # config-update should have created jobs for pname # Trigger tests on pname # Send a review and check tests has been run self.gitu_admin.add_commit_and_publish( clone_dir, 'master', "Add useless file", self.un) # Wait for pname-unit-tests to finish and verify the success self.ju.wait_till_job_completes("%s-unit-tests" % pname, last_success_build_num_sp_ut, "lastSuccessfulBuild") # Wait for pname-functional-tests to end and check the success self.ju.wait_till_job_completes("%s-functional-tests" % pname, last_success_build_num_sp_ft, "lastSuccessfulBuild") # Check the unit tests succeed last_build_num_sp_ut = \ self.ju.get_last_build_number("%s-unit-tests" % pname, "lastBuild") last_success_build_num_sp_ut = \ self.ju.get_last_build_number("%s-unit-tests" % pname, "lastSuccessfulBuild") self.assertEqual(last_build_num_sp_ut, last_success_build_num_sp_ut) # Check the functional tests succeed last_build_num_sp_ft = \ self.ju.get_last_build_number("%s-functional-tests" % pname, "lastBuild") last_success_build_num_sp_ft = \ self.ju.get_last_build_number("%s-functional-tests" % pname, "lastSuccessfulBuild") self.assertEqual(last_build_num_sp_ft, last_success_build_num_sp_ft) # Get the change id change_ids = self.gu.get_my_changes_for_project(pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] # let some time to Zuul to update the test result to Gerrit. for i in range(90): if "jenkins" in self.gu.get_reviewers(change_id): break time.sleep(1) self.assert_reviewer_approvals(change_id, '+1')
class SFProvisioner(object): """ This provider is only intended for testing SF backup/restore and update. It provisions some user datas in a SF installation based on a resourses.yaml file. Later those data can be checked by its friend the SFChecker. Provisioned data should remain really simple. """ def __init__(self): with open('resources.yaml', 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.REDMINE_URL, auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) def create_project(self, name): print " Creating project %s ..." % name self.msu.createProject(name, config.ADMIN_USER) def push_files_in_project(self, name, files): print " Add files(%s) in a commit ..." % ",".join(files) # TODO(fbo); use gateway host instead of gerrit host self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(self.url, name, config_review=False) self.clone_dir = clone_dir for f in files: file(os.path.join(clone_dir, f), 'w').write('data') self.ggu.git_add(clone_dir, (f,)) self.ggu.add_commit_for_all_new_additions(clone_dir) self.ggu.direct_push_branch(clone_dir, 'master') def create_issues_on_project(self, name, issues): print " Create %s issue(s) for that project ..." % len(issues) for i in issues: issue = self.rm.create_issue(name, i['name']) yield issue, i['review'] def create_jenkins_jobs(self, name, jobnames): print " Create Jenkins jobs(%s) ..." % ",".join(jobnames) for jobname in jobnames: self.ju.create_job("%s_%s" % (name, jobname)) def create_pads(self, amount): # TODO pass def create_pasties(self, amount): # TODO pass def create_review(self, project, issue): """Very basic review creator for statistics and restore tests purposes.""" self.ggu.config_review(self.clone_dir) self.ggu.add_commit_in_branch(self.clone_dir, 'branch_' + issue, commit='test\n\nBug: %s' % issue) self.ggu.review_push_branch(self.clone_dir, 'branch_' + issue) def provision(self): for project in self.resources['projects']: print "Create user datas for %s" % project['name'] self.create_project(project['name']) self.push_files_in_project(project['name'], [f['name'] for f in project['files']]) for i, review in self.create_issues_on_project(project['name'], project['issues']): if review: print "Create review for bug %i in %s" % (i, project['name']) self.create_review(project['name'], str(i)) self.create_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.create_pads(2) self.create_pasties(2)
class TestProjectTestsWorkflow(Base): """ Functional tests to verify the configuration of a project test """ @classmethod def setUpClass(cls): cls.ru = ResourcesUtils() cls.sample_project_dir = \ os.path.join(config.SF_TESTS_DIR, "sample_project/") @classmethod def tearDownClass(cls): pass def setUp(self): super(TestProjectTestsWorkflow, self).setUp() self.projects = [] self.dirs_to_delete = [] self.un = config.ADMIN_USER self.gu = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[self.un]['auth_cookie']) self.gu2 = GerritUtils( config.GATEWAY_URL, auth_cookie=config.USERS[config.USER_2]['auth_cookie']) self.ju = JenkinsUtils() self.gu.add_pubkey(config.USERS[self.un]["pubkey"]) priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, priv_key_path, config.USERS[self.un]['email']) # Clone the config repo and keep job/zuul config content self.config_clone_dir = self.clone_as_admin("config") self.original_zuul_projects = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() self.original_project = file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read() self.need_restore_config_repo = False # Put USER_2 as core for config project self.gu.add_group_member(config.USER_2, "config-core") def tearDown(self): super(TestProjectTestsWorkflow, self).tearDown() if self.need_restore_config_repo: self.restore_config_repo(self.original_project, self.original_zuul_projects) for name in self.projects: self.ru.direct_delete_repo(name) for dirs in self.dirs_to_delete: shutil.rmtree(dirs) def assert_reviewer_approvals(self, change_id, value): approvals = {} for _ in range(300): approvals = self.gu.get_reviewer_approvals(change_id, 'jenkins') if approvals and approvals.get('Verified') == value: break time.sleep(1) self.assertEqual(value, approvals.get('Verified')) def clone_as_admin(self, pname): url = "ssh://%s@%s:29418/%s" % (self.un, config.GATEWAY_HOST, pname) clone_dir = self.gitu_admin.clone(url, pname) if os.path.dirname(clone_dir) not in self.dirs_to_delete: self.dirs_to_delete.append(os.path.dirname(clone_dir)) return clone_dir def restore_config_repo(self, project, zuul): logger.info("Restore {zuul,jobs}/projects.yaml") file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( zuul) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( project) change_sha = self.commit_direct_push_as_admin( self.config_clone_dir, "Restore {zuul,jobs}/projects.yaml") logger.info("Waiting for config-update on %s" % change_sha) self.ju.wait_for_config_update(change_sha) def commit_direct_push_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) return self.gitu_admin.direct_push_branch(clone_dir, 'master') def push_review_as_admin(self, clone_dir, msg): # Stage, commit and direct push the additions on master self.gitu_admin.add_commit_for_all_new_additions(clone_dir, msg) return self.gitu_admin.review_push_branch(clone_dir, 'master') def create_project(self, name): self.ru.direct_create_repo(name) self.projects.append(name) def test_timestamped_logs(self): """Test that jenkins timestamps logs""" # Done here to make sure a config-update job was run and to avoid # duplicating code timestamp_re = re.compile('\d{2}:\d{2}:\d{2}.\d{0,3}') n = self.ju.get_last_build_number("config-update", "lastBuild") cu_logs = self.ju.get_job_logs("config-update", n) self.assertTrue(cu_logs is not None) for l in cu_logs.split('\n'): if l: self.assertRegexpMatches(l, timestamp_re, msg=l) def test_check_project_test_workflow(self): """ Validate new project to test via zuul """ # We want to create a project, provide project source # code with tests. We then configure zuul/jjb to handle the # run of the test cases. We then validate Gerrit has been # updated about the test results # We use the sample-project (that already exists) pname = 'test_workflow_%s' % create_random_str() logger.info("Creating project %s" % pname) # Create it self.create_project(pname) logger.info("Populating the project with %s" % self.sample_project_dir) # Add the sample-project to the empty repository clone_dir = self.clone_as_admin(pname) copytree(self.sample_project_dir, clone_dir) self.commit_direct_push_as_admin(clone_dir, "Add the sample project") # Change to config/{zuul,jobs}/projects.yaml # in order to test the new project logger.info("Adding config-repo configuration") ycontent = file(os.path.join( self.config_clone_dir, "zuul/projects.yaml")).read() file(os.path.join( self.config_clone_dir, "zuul/projects.yaml"), 'w').write( ycontent.replace("zuul-demo", pname), ) ycontent2 = load(file(os.path.join( self.config_clone_dir, "jobs/projects.yaml")).read()) sp2 = copy.deepcopy( [p for p in ycontent2 if 'project' in p and p['project']['name'] == 'zuul-demo'][0]) sp2['project']['name'] = pname ycontent2.append(sp2) file(os.path.join( self.config_clone_dir, "jobs/projects.yaml"), 'w').write( dump(ycontent2)) # Send review (config-check) will be triggered logger.info("Submitting the config review") change_sha = self.push_review_as_admin( self.config_clone_dir, "Add config definition in Zuul/JJB config for %s" % pname) change_nr = self.gu.get_change_number(change_sha) logger.info("Waiting for verify +1 on change %d" % change_nr) self.assertEquals(self.gu.wait_for_verify(change_nr), 1) # review the config change as a member from the config-core group logger.info("Approving and waiting for verify +2") self.gu2.submit_change_note(change_nr, "current", "Code-Review", "2") self.gu2.submit_change_note(change_nr, "current", "Workflow", "1") for retry in xrange(60): jenkins_vote = self.gu.get_vote(change_nr, "Verified") if jenkins_vote == 2: break time.sleep(1) self.assertEquals(jenkins_vote, 2) # verify whether zuul merged the patch logger.info("Waiting for change to be merged") for retry in xrange(60): change_status = self.gu.get_info(change_nr)['status'] if change_status == "MERGED": break time.sleep(1) self.assertEqual(change_status, 'MERGED') self.need_restore_config_repo = True logger.info("Waiting for config-update") config_update_log = self.ju.wait_for_config_update(change_sha) self.assertIn("Finished: SUCCESS", config_update_log) # Propose a change on a the repo and expect a Verified +1 logger.info("Submiting a test change to %s" % pname) change_sha = self.gitu_admin.add_commit_and_publish( clone_dir, 'master', "Add useless file", self.un) change_nr = self.gu.get_change_number(change_sha) logger.info("Waiting for verify +1 on change %d" % change_nr) self.assertEquals(self.gu.wait_for_verify(change_nr), 1) # Update the change on a the repo and expect a Verified -1 logger.info("Submiting a test change to %s suppose to fail" % pname) data = "#!/bin/bash\nexit 1\n" file(os.path.join(clone_dir, "run_tests.sh"), 'w').write(data) os.chmod(os.path.join(clone_dir, "run_tests.sh"), 0755) self.gitu_admin.add_commit_and_publish( clone_dir, "master", None, fnames=["run_tests.sh"]) logger.info("Waiting for verify -1 on change %d" % change_nr) self.assertEquals(self.gu.wait_for_verify(change_nr), -1) logger.info("Validate jobs ran via the job api %s" % pname) # This piece of code is there by convenience ... # TODO: Should be moved in the job api tests file. # Test the manageSF jobs API: query per patch & revision change_ids = self.gu.get_my_changes_for_project(pname) self.assertGreater(len(change_ids), 0) change_id = change_ids[0] patch = self.gu.get_change_last_patchset(change_id)['_number'] cookie = get_cookie(config.ADMIN_USER, config.ADMIN_PASSWORD) cookies = {"auth_pubtkt": cookie} base_url = config.GATEWAY_URL + "/manage/jobs/" for j in ["%s-functional-tests" % pname, "%s-unit-tests" % pname]: job = requests.get(base_url + '%s/?change=%s' % (j, patch), cookies=cookies).json() self.assertTrue("jenkins" in job.keys(), job) self.assertTrue(len(job["jenkins"]) > 1, job)
class SFProvisioner(object): """ This provider is only intended for testing SF backup/restore and update. It provisions some user datas in a SF installation based on a resourses.yaml file. Later those data can be checked by its friend the SFChecker. Provisioned data should remain really simple. """ def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.GATEWAY_URL + "/redmine/", auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) def create_project(self, name): print " Creating project %s ..." % name self.msu.createProject(name, config.ADMIN_USER) def push_files_in_project(self, name, files): print " Add files(%s) in a commit ..." % ",".join(files) # TODO(fbo); use gateway host instead of gerrit host self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(self.url, name, config_review=False) self.clone_dir = clone_dir for f in files: file(os.path.join(clone_dir, f), 'w').write('data') self.ggu.git_add(clone_dir, (f, )) self.ggu.add_commit_for_all_new_additions(clone_dir) self.ggu.direct_push_branch(clone_dir, 'master') def create_issues_on_project(self, name, issues): print " Create %s issue(s) for that project ..." % len(issues) for i in issues: issue = self.rm.create_issue(name, i['name']) yield issue, i['review'] def create_jenkins_jobs(self, name, jobnames): print " Create Jenkins jobs(%s) ..." % ",".join(jobnames) for jobname in jobnames: self.ju.create_job("%s_%s" % (name, jobname)) def create_pads(self, amount): # TODO pass def create_pasties(self, amount): # TODO pass def create_review(self, project, issue): """Very basic review creator for statistics and restore tests purposes.""" self.ggu.config_review(self.clone_dir) self.ggu.add_commit_in_branch(self.clone_dir, 'branch_' + issue, commit='test\n\nBug: %s' % issue) self.ggu.review_push_branch(self.clone_dir, 'branch_' + issue) def provision(self): for project in self.resources['projects']: print "Create user datas for %s" % project['name'] self.create_project(project['name']) self.push_files_in_project(project['name'], [f['name'] for f in project['files']]) for i, review in self.create_issues_on_project( project['name'], project['issues']): if review: print "Create review for bug %i in %s" % (i, project['name']) self.create_review(project['name'], str(i)) self.create_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.create_pads(2) self.create_pasties(2)
class SFProvisioner(object): """ This provider is only intended for testing SF backup/restore and update. It provisions some user datas in a SF installation based on a resourses.yaml file. Later those data can be checked by its friend the SFChecker. Provisioned data should remain really simple. """ def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.GATEWAY_URL + "/redmine/", auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) def create_project(self, name): print " Creating project %s ..." % name self.msu.createProject(name, config.ADMIN_USER) def push_files_in_project(self, name, files): print " Add files(%s) in a commit ..." % ",".join(files) # TODO(fbo); use gateway host instead of gerrit host self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(self.url, name, config_review=False) self.clone_dir = clone_dir for f in files: file(os.path.join(clone_dir, f), 'w').write('data') self.ggu.git_add(clone_dir, (f,)) self.ggu.add_commit_for_all_new_additions(clone_dir) self.ggu.direct_push_branch(clone_dir, 'master') def create_issues_on_project(self, name, issues): print " Create %s issue(s) for that project ..." % len(issues) for i in issues: if is_present('SFRedmine'): issue = self.rm.create_issue(name, i['name']) else: issue = random.randint(1,100) yield issue, i['review'] def create_jenkins_jobs(self, name, jobnames): print " Create Jenkins jobs(%s) ..." % ",".join(jobnames) for jobname in jobnames: self.ju.create_job("%s_%s" % (name, jobname)) def create_pads(self, amount): # TODO pass def create_pasties(self, amount): # TODO pass def simple_login(self, user): """log as user to make the user listable""" get_cookie(user, config.USERS[user]['password']) def create_review(self, project, issue): """Very basic review creator for statistics and restore tests purposes.""" self.ggu.config_review(self.clone_dir) self.ggu.add_commit_in_branch(self.clone_dir, 'branch_' + issue, commit='test\n\nBug: %s' % issue) self.ggu.review_push_branch(self.clone_dir, 'branch_' + issue) def create_local_user(self, username, password, email): self.msu.create_user(username, password, email) def command(self, cmd): return ssh_run_cmd(os.path.expanduser("~/.ssh/id_rsa"), "root", config.GATEWAY_HOST, shlex.split(cmd)) def compute_checksum(self, f): out = self.command("md5sum %s" % f)[0] if out: return out.split()[0] def provision(self): for cmd in self.resources['commands']: print "Execute command %s" % cmd['cmd'] print self.command(cmd['cmd']) checksum_list = {} for checksum in self.resources['checksum'] : print "Compute checksum for file %s" % checksum['file'] checksum_list[checksum['file']] = self.compute_checksum( checksum['file']) yaml.dump(checksum_list, file('/tmp/pc_checksums.yaml', 'w')) for user in self.resources['local_users']: print "Create local user %s" % user['username'] self.create_local_user(user['username'], user['password'], user['email']) for u in self.resources['users']: print "log in as %s" % u['name'] self.simple_login(u['name']) for project in self.resources['projects']: print "Create user datas for %s" % project['name'] self.create_project(project['name']) self.push_files_in_project(project['name'], [f['name'] for f in project['files']]) for i, review in self.create_issues_on_project(project['name'], project['issues']): if review: print "Create review for bug %i in %s" % (i, project['name']) self.create_review(project['name'], str(i)) self.create_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.create_pads(2) self.create_pasties(2)