def test_upstream(self): """ Validate upstream feature of managesf """ # Create a test upstream project pname_us = 'p_upstream' self.create_project(pname_us, config.ADMIN_USER) ggu_us = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, pname_us) # clone us_clone_dir = ggu_us.clone(url, pname_us) self.dirs_to_delete.append(os.path.dirname(us_clone_dir)) # Test that the clone is a success self.assertTrue(os.path.isdir(us_clone_dir)) # push some test files to the upstream project us_files = [str(x) for x in range(1, 10)] for f in us_files: file(os.path.join(us_clone_dir, f), 'w').write(f) os.chmod(os.path.join(us_clone_dir, f), 0755) ggu_us.add_commit_in_branch(us_clone_dir, "master", commit="Adding files 1-10", files=us_files) ggu_us.direct_push_branch(us_clone_dir, "master") # No create a test project with upstream pointing to the above upstream_url = "ssh://%s@%s:29418/%s" % ( config.ADMIN_USER, config.GATEWAY_HOST, pname_us) pname = 'p_%s' % create_random_str() # create the project as admin options = {"upstream": upstream_url, "upstream-ssh-key": config.ADMIN_PRIV_KEY_PATH} self.create_project(pname, config.ADMIN_USER, options=options) ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, pname) # clone clone_dir = ggu.clone(url, pname) self.dirs_to_delete.append(os.path.dirname(clone_dir)) # Check if the files pushed in upstream project is present files = [f for f in os.listdir(clone_dir) if not f.startswith('.')] self.assertEqual(set(files), set(us_files))
def test_upstream(self): """ Validate upstream feature of managesf """ # Create a test upstream project pname_us = 'p_upstream' self.create_project(pname_us, config.ADMIN_USER) ggu_us = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, pname_us) # clone us_clone_dir = ggu_us.clone(url, pname_us) self.dirs_to_delete.append(os.path.dirname(us_clone_dir)) # Test that the clone is a success self.assertTrue(os.path.isdir(us_clone_dir)) # push some test files to the upstream project us_files = [str(x) for x in range(1, 10)] for f in us_files: file(os.path.join(us_clone_dir, f), 'w').write(f) os.chmod(os.path.join(us_clone_dir, f), 0755) ggu_us.add_commit_in_branch(us_clone_dir, "master", commit="Adding files 1-10", files=us_files) ggu_us.direct_push_branch(us_clone_dir, "master") # No create a test project with upstream pointing to the above upstream_url = "ssh://%s@%s:29418/%s" % ( config.ADMIN_USER, config.GATEWAY_HOST, pname_us) pname = 'p_%s' % create_random_str() # create the project as admin options = {"upstream": upstream_url, "upstream-ssh-key": config.ADMIN_PRIV_KEY_PATH} self.create_project(pname, config.ADMIN_USER, options=options) ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, pname) # clone clone_dir = ggu.clone(url, pname) self.dirs_to_delete.append(os.path.dirname(clone_dir)) # Check if the files pushed in upstream project is present files = [f for f in os.listdir(clone_dir) if not f.startswith('.')] self.assertEqual(set(files), set(us_files))
class SFProvisioner(object): """ This provider is only intended for testing SF backup/restore and update. It provisions some user datas in a SF installation based on a resourses.yaml file. Later those data can be checked by its friend the SFChecker. Provisioned data should remain really simple. """ def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ru = ResourcesUtils() self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.stb_client = SFStoryboard( config.GATEWAY_URL + "/storyboard_api", config.USERS[config.ADMIN_USER]['auth_cookie']) def create_resources(self): print " Creating resources ..." if cmp_version(os.environ.get("PROVISIONED_VERSION", "0.0"), "2.4.0"): # Remove review-dashboard for p in self.resources['resources']['projects'].values(): del p['review-dashboard'] self.ru.create_resources("provisioner", {'resources': self.resources['resources']}) # Create review for the first few repositories for project in self.resources['resources']['repos'].keys()[:3]: self.clone_project(project) self.create_review(project, "Test review for %s" % project) def create_project(self, name): print " Creating project %s ..." % name self.ru.create_repo(name) def clone_project(self, name): # TODO(fbo); use gateway host instead of gerrit host self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) self.clone_dir = self.ggu.clone(self.url, name, config_review=False) def push_files_in_project(self, name, files): print " Add files(%s) in a commit ..." % ",".join(files) self.clone_project(name) for f in files: file(os.path.join(self.clone_dir, f), 'w').write('data') self.ggu.git_add(self.clone_dir, (f, )) self.ggu.add_commit_for_all_new_additions(self.clone_dir) self.ggu.direct_push_branch(self.clone_dir, 'master') def create_storyboard_issue(self, name, issue_name): project = self.stb_client.projects.get(name) story = self.stb_client.stories.create(title=issue_name) task = self.stb_client.tasks.create(story_id=story.id, project_id=project.id, title=issue_name) return task.id, story.id def create_issues_on_project(self, name, issues): print " Create %s issue(s) for that project ..." % len(issues) for i in issues: if is_present('storyboard'): issue = self.create_storyboard_issue(name, i['name']) else: issue = (random.randint(1, 100), random.randint(1, 100)) yield issue, i['review'] def create_pads(self, amount): # TODO pass def create_pasties(self, amount): # TODO pass def simple_login(self, user, password): """log as user to make the user listable""" get_cookie(user, password) def create_review(self, project, commit_message, branch='master'): """Very basic review creator for statistics and restore tests purposes.""" self.ggu.config_review(self.clone_dir) self.ggu.add_commit_in_branch(self.clone_dir, branch, commit=commit_message) self.ggu.review_push_branch(self.clone_dir, branch) def create_review_for_issue(self, project, issue): self.create_review( project, 'test\n\nTask: #%s\nStory: #%s' % (issue[0], issue[1]), 'branch_%s' % str(issue[0])) def create_local_user(self, username, password, email): self.msu.create_user(username, password, email) def command(self, cmd): return ssh_run_cmd(os.path.expanduser("~/.ssh/id_rsa"), "root", config.GATEWAY_HOST, shlex.split(cmd)) def compute_checksum(self, f): out = self.command("md5sum %s" % f)[0] if out: return out.split()[0] def read_file(self, f): return self.command("cat %s" % f)[0] def provision(self): for cmd in self.resources['commands']: print "Execute command %s" % cmd['cmd'] print self.command(cmd['cmd']) checksum_list = {} for checksum in self.resources['checksum']: print "Compute checksum for file %s" % checksum['file'] checksum_list[checksum['file']] = self.compute_checksum( checksum['file']) checksum_list['content_' + checksum['file']] = self.read_file( checksum['file']) yaml.dump(checksum_list, file('pc_checksums.yaml', 'w'), default_flow_style=False) for user in self.resources['local_users']: print "Create local user %s" % user['username'] self.create_local_user(user['username'], user['password'], user['email']) self.simple_login(user['username'], user['password']) for u in self.resources['users']: print "log in as %s" % u['name'] self.simple_login(u['name'], config.USERS[u['name']]['password']) for project in self.resources['projects']: print "Create user datas for %s" % project['name'] self.create_project(project['name']) self.push_files_in_project(project['name'], [f['name'] for f in project['files']]) for i, review in self.create_issues_on_project( project['name'], project['issues']): if review: print "Create review for bug %s in %s" % (i, project['name']) self.create_review_for_issue(project['name'], i) self.create_resources() self.create_pads(2) self.create_pasties(2)
class TestLogExportedInElasticSearch(Base): """ Functional tests to verify job logs are exported in ElasticSearch """ def setUp(self): super(TestLogExportedInElasticSearch, self).setUp() self.un = config.ADMIN_USER self.priv_key_path = set_private_key(config.USERS[self.un]["privkey"]) self.gitu_admin = GerritGitUtils(self.un, self.priv_key_path, config.USERS[self.un]['email']) def run_ssh_cmd(self, sshkey_priv_path, user, host, subcmd): host = '%s@%s' % (user, host) sshcmd = ['ssh', '-o', 'LogLevel=ERROR', '-o', 'StrictHostKeyChecking=no', '-o', 'UserKnownHostsFile=/dev/null', '-i', sshkey_priv_path, host] cmd = sshcmd + subcmd p = Popen(cmd, stdout=PIPE) return p.communicate(), p.returncode def push_request_script(self, index, newhash): newhash = newhash.rstrip() content = """ curl -s -XPOST 'http://elasticsearch.%s:9200/%s/_search?pretty&size=1' -d '{ "query": { "bool": { "must": [ { "match": { "build_name": "config-update" } }, { "match": { "build_newrev": "%s" } } ] } } }' """ with open('/tmp/test_request.sh', 'w') as fd: fd.write(content % (config.GATEWAY_HOST, index, newhash)) cmd = ['scp', '/tmp/test_request.sh', 'root@%s:/tmp/test_request.sh' % config.GATEWAY_HOST] p = Popen(cmd, stdout=PIPE) return p.communicate(), p.returncode def find_index(self): subcmd = "curl -s -XGET http://elasticsearch.%s:9200/_cat/indices" % ( config.GATEWAY_HOST) subcmd = shlex.split(subcmd) # A logstash index is created by day today_str = datetime.datetime.utcnow().strftime('%Y.%m.%d') # Here we fetch the index name, but also we wait for # it to appears in ElasticSearch for 5 mins index = [] for retry in xrange(300): try: out = self.run_ssh_cmd(config.SERVICE_PRIV_KEY_PATH, 'root', config.GATEWAY_HOST, subcmd) outlines = out[0][0].split('\n') outlines.pop() index = [o for o in outlines if o.split()[2].startswith('logstash-%s' % today_str)] if len(index): break except: time.sleep(1) self.assertEqual( len(index), 1, "No logstash index has been found for today logstash-%s (%s)" % ( today_str, str(index))) index = index[0].split()[2] return index def verify_logs_exported(self): subcmd = "bash /tmp/test_request.sh" subcmd = shlex.split(subcmd) for retry in xrange(300): out = self.run_ssh_cmd(config.SERVICE_PRIV_KEY_PATH, 'root', config.GATEWAY_HOST, subcmd) ret = json.loads(out[0][0]) if len(ret['hits']['hits']) >= 1: break elif len(ret['hits']['hits']) == 0: time.sleep(1) self.assertEqual(len(ret['hits']['hits']), 1, "Fail to find our log in ElasticSeach") return ret['hits']['hits'][0] def direct_push_in_config_repo(self, url, pname='config'): rand_str = ''.join(random.choice( string.ascii_uppercase + string.digits) for _ in range(5)) clone = self.gitu_admin.clone(url, pname) with open('%s/test_%s' % (clone, rand_str), 'w') as fd: fd.write('test') self.gitu_admin.add_commit_in_branch( clone, 'master', ['test_%s' % rand_str]) head = file('%s/.git/refs/heads/master' % clone).read() self.gitu_admin.direct_push_branch(clone, 'master') return head @skipIfServiceMissing('elasticsearch') def test_log_indexation(self): """ Test job log are exported in Elasticsearch """ head = self.direct_push_in_config_repo( 'ssh://admin@%s:29418/config' % ( config.GATEWAY_HOST)) index = self.find_index() self.push_request_script(index, head) log = self.verify_logs_exported() self.assertEqual(log['_source']["build_name"], "config-update")
class SFProvisioner(object): """ This provider is only intended for testing SF backup/restore and update. It provisions some user datas in a SF installation based on a resourses.yaml file. Later those data can be checked by its friend the SFChecker. Provisioned data should remain really simple. """ def __init__(self): with open('resources.yaml', 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.REDMINE_URL, auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) def create_project(self, name): print " Creating project %s ..." % name self.msu.createProject(name, config.ADMIN_USER) def push_files_in_project(self, name, files): print " Add files(%s) in a commit ..." % ",".join(files) # TODO(fbo); use gateway host instead of gerrit host self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(self.url, name, config_review=False) self.clone_dir = clone_dir for f in files: file(os.path.join(clone_dir, f), 'w').write('data') self.ggu.git_add(clone_dir, (f,)) self.ggu.add_commit_for_all_new_additions(clone_dir) self.ggu.direct_push_branch(clone_dir, 'master') def create_issues_on_project(self, name, issues): print " Create %s issue(s) for that project ..." % len(issues) for i in issues: issue = self.rm.create_issue(name, i['name']) yield issue, i['review'] def create_jenkins_jobs(self, name, jobnames): print " Create Jenkins jobs(%s) ..." % ",".join(jobnames) for jobname in jobnames: self.ju.create_job("%s_%s" % (name, jobname)) def create_pads(self, amount): # TODO pass def create_pasties(self, amount): # TODO pass def create_review(self, project, issue): """Very basic review creator for statistics and restore tests purposes.""" self.ggu.config_review(self.clone_dir) self.ggu.add_commit_in_branch(self.clone_dir, 'branch_' + issue, commit='test\n\nBug: %s' % issue) self.ggu.review_push_branch(self.clone_dir, 'branch_' + issue) def provision(self): for project in self.resources['projects']: print "Create user datas for %s" % project['name'] self.create_project(project['name']) self.push_files_in_project(project['name'], [f['name'] for f in project['files']]) for i, review in self.create_issues_on_project(project['name'], project['issues']): if review: print "Create review for bug %i in %s" % (i, project['name']) self.create_review(project['name'], str(i)) self.create_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.create_pads(2) self.create_pasties(2)
class SFProvisioner(object): """ This provider is only intended for testing SF backup/restore and update. It provisions some user datas in a SF installation based on a resourses.yaml file. Later those data can be checked by its friend the SFChecker. Provisioned data should remain really simple. """ def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.GATEWAY_URL + "/redmine/", auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) def create_project(self, name): print " Creating project %s ..." % name self.msu.createProject(name, config.ADMIN_USER) def push_files_in_project(self, name, files): print " Add files(%s) in a commit ..." % ",".join(files) # TODO(fbo); use gateway host instead of gerrit host self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(self.url, name, config_review=False) self.clone_dir = clone_dir for f in files: file(os.path.join(clone_dir, f), 'w').write('data') self.ggu.git_add(clone_dir, (f, )) self.ggu.add_commit_for_all_new_additions(clone_dir) self.ggu.direct_push_branch(clone_dir, 'master') def create_issues_on_project(self, name, issues): print " Create %s issue(s) for that project ..." % len(issues) for i in issues: issue = self.rm.create_issue(name, i['name']) yield issue, i['review'] def create_jenkins_jobs(self, name, jobnames): print " Create Jenkins jobs(%s) ..." % ",".join(jobnames) for jobname in jobnames: self.ju.create_job("%s_%s" % (name, jobname)) def create_pads(self, amount): # TODO pass def create_pasties(self, amount): # TODO pass def create_review(self, project, issue): """Very basic review creator for statistics and restore tests purposes.""" self.ggu.config_review(self.clone_dir) self.ggu.add_commit_in_branch(self.clone_dir, 'branch_' + issue, commit='test\n\nBug: %s' % issue) self.ggu.review_push_branch(self.clone_dir, 'branch_' + issue) def provision(self): for project in self.resources['projects']: print "Create user datas for %s" % project['name'] self.create_project(project['name']) self.push_files_in_project(project['name'], [f['name'] for f in project['files']]) for i, review in self.create_issues_on_project( project['name'], project['issues']): if review: print "Create review for bug %i in %s" % (i, project['name']) self.create_review(project['name'], str(i)) self.create_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.create_pads(2) self.create_pasties(2)
class SFProvisioner(object): """ This provider is only intended for testing SF backup/restore and update. It provisions some user datas in a SF installation based on a resourses.yaml file. Later those data can be checked by its friend the SFChecker. Provisioned data should remain really simple. """ def __init__(self): with open("%s/resources.yaml" % pwd, 'r') as rsc: self.resources = yaml.load(rsc) config.USERS[config.ADMIN_USER]['auth_cookie'] = get_cookie( config.ADMIN_USER, config.USERS[config.ADMIN_USER]['password']) self.msu = ManageSfUtils(config.GATEWAY_URL) self.ggu = GerritGitUtils(config.ADMIN_USER, config.ADMIN_PRIV_KEY_PATH, config.USERS[config.ADMIN_USER]['email']) self.ju = JenkinsUtils() self.rm = RedmineUtils( config.GATEWAY_URL + "/redmine/", auth_cookie=config.USERS[config.ADMIN_USER]['auth_cookie']) def create_project(self, name): print " Creating project %s ..." % name self.msu.createProject(name, config.ADMIN_USER) def push_files_in_project(self, name, files): print " Add files(%s) in a commit ..." % ",".join(files) # TODO(fbo); use gateway host instead of gerrit host self.url = "ssh://%s@%s:29418/%s" % (config.ADMIN_USER, config.GATEWAY_HOST, name) clone_dir = self.ggu.clone(self.url, name, config_review=False) self.clone_dir = clone_dir for f in files: file(os.path.join(clone_dir, f), 'w').write('data') self.ggu.git_add(clone_dir, (f,)) self.ggu.add_commit_for_all_new_additions(clone_dir) self.ggu.direct_push_branch(clone_dir, 'master') def create_issues_on_project(self, name, issues): print " Create %s issue(s) for that project ..." % len(issues) for i in issues: if is_present('SFRedmine'): issue = self.rm.create_issue(name, i['name']) else: issue = random.randint(1,100) yield issue, i['review'] def create_jenkins_jobs(self, name, jobnames): print " Create Jenkins jobs(%s) ..." % ",".join(jobnames) for jobname in jobnames: self.ju.create_job("%s_%s" % (name, jobname)) def create_pads(self, amount): # TODO pass def create_pasties(self, amount): # TODO pass def simple_login(self, user): """log as user to make the user listable""" get_cookie(user, config.USERS[user]['password']) def create_review(self, project, issue): """Very basic review creator for statistics and restore tests purposes.""" self.ggu.config_review(self.clone_dir) self.ggu.add_commit_in_branch(self.clone_dir, 'branch_' + issue, commit='test\n\nBug: %s' % issue) self.ggu.review_push_branch(self.clone_dir, 'branch_' + issue) def create_local_user(self, username, password, email): self.msu.create_user(username, password, email) def command(self, cmd): return ssh_run_cmd(os.path.expanduser("~/.ssh/id_rsa"), "root", config.GATEWAY_HOST, shlex.split(cmd)) def compute_checksum(self, f): out = self.command("md5sum %s" % f)[0] if out: return out.split()[0] def provision(self): for cmd in self.resources['commands']: print "Execute command %s" % cmd['cmd'] print self.command(cmd['cmd']) checksum_list = {} for checksum in self.resources['checksum'] : print "Compute checksum for file %s" % checksum['file'] checksum_list[checksum['file']] = self.compute_checksum( checksum['file']) yaml.dump(checksum_list, file('/tmp/pc_checksums.yaml', 'w')) for user in self.resources['local_users']: print "Create local user %s" % user['username'] self.create_local_user(user['username'], user['password'], user['email']) for u in self.resources['users']: print "log in as %s" % u['name'] self.simple_login(u['name']) for project in self.resources['projects']: print "Create user datas for %s" % project['name'] self.create_project(project['name']) self.push_files_in_project(project['name'], [f['name'] for f in project['files']]) for i, review in self.create_issues_on_project(project['name'], project['issues']): if review: print "Create review for bug %i in %s" % (i, project['name']) self.create_review(project['name'], str(i)) self.create_jenkins_jobs(project['name'], [j['name'] for j in project['jobnames']]) self.create_pads(2) self.create_pasties(2)